From 4202cfe58a749a0ef139f0afedc8564d7524b09b Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Wed, 23 Aug 2023 14:01:01 -0400 Subject: [PATCH 001/262] Ensure that lane SQLAlchemy models are properly registered. (#1341) --- core/lane.py | 2 +- core/model/__init__.py | 3 +++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/core/lane.py b/core/lane.py index 8b719241a0..89b2df9ffc 100644 --- a/core/lane.py +++ b/core/lane.py @@ -2783,7 +2783,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): ) # Only the books on these specific CustomLists will be shown. - customlists = relationship( + customlists: Mapped[List[CustomList]] = relationship( "CustomList", secondary=lambda: lanes_customlists, backref="lane" # type: ignore ) diff --git a/core/model/__init__.py b/core/model/__init__.py index c08d7e3d95..069c48d324 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -584,3 +584,6 @@ def _bulk_operation(self): from .resource import Hyperlink, Representation, Resource, ResourceTransformation from .time_tracking import PlaytimeEntry, PlaytimeSummary from .work import Work, WorkGenre + +# Import order important here to avoid an import cycle. +from core.lane import Lane, LaneGenre # isort:skip From 35258f8d95d8fca82ff205c3d57dee4161f5d8c7 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 23 Aug 2023 15:03:39 -0300 Subject: [PATCH 002/262] Bugfix for OPDS 2 token fulfillment (PP-360) (#1342) * Temporary fix for token auth, by rolling back integration configuration changes. * Fix issue found in local testing. --- api/opds2.py | 15 ++++++++++----- core/opds2_import.py | 10 +++++----- tests/api/test_opds2.py | 16 ++++++++-------- tests/core/test_opds2_import.py | 7 ++++--- 4 files changed, 27 insertions(+), 21 deletions(-) diff --git a/api/opds2.py b/api/opds2.py index a1ca54d399..fa38f3d82b 100644 --- a/api/opds2.py +++ b/api/opds2.py @@ -9,7 +9,7 @@ from api.circulation import CirculationFulfillmentPostProcessor, FulfillmentInfo from api.circulation_exceptions import CannotFulfill from core.lane import Facets -from core.model import ExternalIntegration +from core.model import ConfigurationSetting, ExternalIntegration from core.model.edition import Edition from core.model.identifier import Identifier from core.model.licensing import LicensePoolDeliveryMechanism @@ -109,13 +109,18 @@ def fulfill( if "authentication_token" not in templated.variable_names: return fulfillment - token_auth = licensepool.collection.integration_configuration.settings_dict.get( - ExternalIntegration.TOKEN_AUTH + # TODO: This needs to be refactored to use IntegrationConfiguration, + # but it has been temporarily rolled back, since the IntegrationConfiguration + # code caused problems fulfilling TOKEN_AUTH books in production. + # This should be fixed as part of the work PP-313 to fully remove + # ExternalIntegrations from our collections code. + token_auth = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, licensepool.collection.external_integration ) - if token_auth is None: + if not token_auth or token_auth.value is None: return fulfillment - token = self.get_authentication_token(patron, token_auth) + token = self.get_authentication_token(patron, token_auth.value) if isinstance(token, ProblemDetail): raise CannotFulfill() diff --git a/core/opds2_import.py b/core/opds2_import.py index 308a60d7ef..11955be837 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -26,7 +26,7 @@ FormField, ) from core.mirror import MirrorUploader -from core.model.configuration import HasExternalIntegration +from core.model.configuration import ConfigurationSetting, HasExternalIntegration from core.model.integration import IntegrationConfiguration from .coverage import CoverageFailure @@ -950,10 +950,10 @@ def _parse_feed_links(self, links: list[core_ast.Link]) -> None: for link in links: if first_or_default(link.rels) == Hyperlink.TOKEN_AUTH: # Save the collection-wide token authentication endpoint - config = self.integration_configuration() - settings = config.settings_dict.copy() - settings[ExternalIntegration.TOKEN_AUTH] = link.href - config.settings_dict = settings + auth_setting = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, self.collection.external_integration + ) + auth_setting.value = link.href def extract_feed_data( self, feed: str | opds2_ast.OPDS2Feed, feed_url: str | None = None diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py index 072a4b3e03..d4a896adb6 100644 --- a/tests/api/test_opds2.py +++ b/tests/api/test_opds2.py @@ -19,7 +19,7 @@ ) from core.lane import Facets, Pagination from core.model.collection import Collection -from core.model.configuration import ExternalIntegration +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.datasource import DataSource from core.model.patron import Loan from core.model.resource import Hyperlink @@ -180,11 +180,13 @@ def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): protocol=ExternalIntegration.OPDS2_IMPORT ) work = db.work(with_license_pool=True, collection=collection) - DatabaseTransactionFixture.set_settings( - collection.integration_configuration, - ExternalIntegration.TOKEN_AUTH, - "http://example.org/token?userName={patron_id}", + integration: ExternalIntegration = collection.create_external_integration( + ExternalIntegration.OPDS2_IMPORT ) + setting: ConfigurationSetting = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, integration + ) + setting.value = "http://example.org/token?userName={patron_id}" ff_info = FulfillmentInfo( collection, @@ -252,9 +254,7 @@ def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): ff_info.content_link = ( "http://example.org/11234/fulfill?authToken={authentication_token}" ) - DatabaseTransactionFixture.set_settings( - collection.integration_configuration, ExternalIntegration.TOKEN_AUTH, None - ) + setting.value = None ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) assert ff_info.content_link_redirect == False diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index 7f44c38352..e1d2c5a8bb 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -5,6 +5,7 @@ from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from core.model import ( + ConfigurationSetting, Contribution, Contributor, DataSource, @@ -441,9 +442,9 @@ def test_auth_token_feed( imported_editions, pools, works, failures = data.importer.import_from_feed( content ) - setting = data.importer.integration_configuration().settings_dict.get( - ExternalIntegration.TOKEN_AUTH + setting = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, data.collection.external_integration ) # Did the token endpoint get stored correctly? - assert setting == "http://example.org/auth?userName={patron_id}" + assert setting.value == "http://example.org/auth?userName={patron_id}" From a2a11bbb638ae62f2e464d62205bc0f92ac4fd3b Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 24 Aug 2023 13:12:43 -0300 Subject: [PATCH 003/262] Make sure we are closing DB transactions opened in tests. (#1343) --- tests/api/test_controller_scopedsession.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/test_controller_scopedsession.py index 1f80649d48..d07c5b05a5 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/test_controller_scopedsession.py @@ -136,11 +136,14 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): [identifier] = fixture.app.manager._db.query(Identifier).all() assert "1024" == identifier.identifier - # But if we were to use flask_scoped_session to create a - # brand new session, it would not see the Identifier, - # because it's running in a different database session. - new_session = fixture.app.manager._db.session_factory() - assert [] == new_session.query(Identifier).all() + # We use the session context manager here to make sure + # we don't keep a transaction open for this new session + # once we are done with it. + with fixture.app.manager._db.session_factory() as new_session: + # But if we were to use flask_scoped_session to create a + # brand new session, it would not see the Identifier, + # because it's running in a different database session. + assert [] == new_session.query(Identifier).all() # When the index controller runs in the request context, # it doesn't store anything that's associated with the @@ -182,3 +185,7 @@ def test_scoped_session(self, controller_fixture_without_cm: ControllerFixture): # which is the same as self._db, the unscoped database session # used by most other unit tests. assert session1 != session2 + + # Make sure that we close the connections for the scoped sessions. + session1.bind.dispose() + session2.bind.dispose() From d7d3d8766059b2a4a94571ef5c9d0970535a24c8 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 28 Aug 2023 16:42:20 -0300 Subject: [PATCH 004/262] Revert "Temporarily removed the basic-token authentication capabilities (#1249)" (#1275) This reverts commit 308c4db755a9f314c7efc8be4eea2b300546e52b. Co-authored-by: Tim DiLauro Co-authored-by: Rishi Diwan --- api/authenticator.py | 6 ++++++ tests/api/test_authenticator.py | 28 +++++++++++++++++++++++++++- 2 files changed, 33 insertions(+), 1 deletion(-) diff --git a/api/authenticator.py b/api/authenticator.py index 2808c0419c..9be8470c1c 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -401,6 +401,12 @@ def register_basic_auth_provider( ): raise CannotLoadConfiguration("Two basic auth providers configured") self.basic_auth_provider = provider + if self.library is not None: + self.access_token_authentication_provider = ( + BasicTokenAuthenticationProvider( + self._db, self.library, self.basic_auth_provider + ) + ) def register_saml_provider( self, diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 15d1d2198c..34c2a7a5ca 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -21,6 +21,7 @@ from werkzeug.datastructures import Authorization from api.annotations import AnnotationWriter +from api.authentication.access_token import AccessTokenProvider from api.authentication.base import PatronData from api.authentication.basic import ( BarcodeFormats, @@ -922,6 +923,21 @@ def test_authenticated_patron_bearer( assert response == "foo" assert saml.authenticated_patron.call_count == 1 + def test_authenticated_patron_bearer_access_token( + self, db: DatabaseTransactionFixture, mock_basic: MockBasicFixture + ): + basic = mock_basic() + authenticator = LibraryAuthenticator( + _db=db.session, library=db.default_library(), basic_auth_provider=basic + ) + patron = db.patron() + token = AccessTokenProvider.generate_token(db.session, patron, "pass") + auth = Authorization(auth_type="bearer", token=token) + + auth_patron = authenticator.authenticated_patron(db.session, auth) + assert type(auth_patron) == Patron + assert auth_patron.id == patron.id + def test_authenticated_patron_unsupported_mechanism( self, db: DatabaseTransactionFixture ): @@ -957,6 +973,16 @@ def test_get_credential_from_header( ) assert authenticator.get_credential_from_header(credential) is None + authenticator = LibraryAuthenticator( + _db=db.session, + library=db.default_library(), + basic_auth_provider=basic, + ) + patron = db.patron() + token = AccessTokenProvider.generate_token(db.session, patron, "passworx") + credential = Authorization(auth_type="bearer", token=token) + assert authenticator.get_credential_from_header(credential) == "passworx" + def test_create_authentication_document( self, db: DatabaseTransactionFixture, @@ -1077,7 +1103,7 @@ def annotate_authentication_document(library, doc, url_for): # The main thing we need to test is that the # authentication sub-documents are assembled properly and # placed in the right position. - [basic_doc] = doc["authentication"] + [token_doc, basic_doc] = doc["authentication"] expect_basic = basic.authentication_flow_document(db.session) assert expect_basic == basic_doc From 77188cc3012af57935736eeed7379710404ba592 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 31 Aug 2023 08:54:41 -0300 Subject: [PATCH 005/262] Bump tox from 4.10.0 to 4.11.0 (#1346) Bumps [tox](https://github.com/tox-dev/tox) from 4.10.0 to 4.11.0. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.10.0...4.11.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 71 +++++++++++++++++++++++++++++++---------------------- 1 file changed, 42 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index 76ff1cca63..0ef4e4503c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alembic" @@ -993,18 +993,21 @@ sgmllib3k = "*" [[package]] name = "filelock" -version = "3.12.2" +version = "3.12.3" description = "A platform independent file lock." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, - {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, + {file = "filelock-3.12.3-py3-none-any.whl", hash = "sha256:f067e40ccc40f2b48395a80fcbd4728262fab54e232e090a4063ab804179efeb"}, + {file = "filelock-3.12.3.tar.gz", hash = "sha256:0ecc1dd2ec4672a10c8550a8182f1bd0c0a5088470ecd5a125e45f49472fac3d"}, ] +[package.dependencies] +typing-extensions = {version = ">=4.7.1", markers = "python_version < \"3.11\""} + [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-timeout (>=2.1)"] [[package]] name = "firebase-admin" @@ -1169,12 +1172,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1276,8 +1279,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -2554,13 +2557,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "pluggy" -version = "1.2.0" +version = "1.3.0" description = "plugin and hook calling mechanisms for python" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, - {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, + {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, + {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, ] [package.extras] @@ -3016,13 +3019,13 @@ pywin32 = ">=223" [[package]] name = "pyproject-api" -version = "1.5.3" +version = "1.6.1" description = "API to interact with the python pyproject.toml based projects" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "pyproject_api-1.5.3-py3-none-any.whl", hash = "sha256:14cf09828670c7b08842249c1f28c8ee6581b872e893f81b62d5465bec41502f"}, - {file = "pyproject_api-1.5.3.tar.gz", hash = "sha256:ffb5b2d7cad43f5b2688ab490de7c4d3f6f15e0b819cb588c4b771567c9729eb"}, + {file = "pyproject_api-1.6.1-py3-none-any.whl", hash = "sha256:4c0116d60476b0786c88692cf4e325a9814965e2469c5998b830bba16b183675"}, + {file = "pyproject_api-1.6.1.tar.gz", hash = "sha256:1817dc018adc0d1ff9ca1ed8c60e1623d5aaca40814b953af14a9cf9a5cae538"}, ] [package.dependencies] @@ -3030,8 +3033,8 @@ packaging = ">=23.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} [package.extras] -docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "importlib-metadata (>=6.6)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "setuptools (>=67.8)", "wheel (>=0.40)"] +docs = ["furo (>=2023.8.19)", "sphinx (<7.2)", "sphinx-autodoc-typehints (>=1.24)"] +testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "setuptools (>=68.1.2)", "wheel (>=0.41.2)"] [[package]] name = "pyrsistent" @@ -3220,6 +3223,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3227,8 +3231,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3245,6 +3256,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3252,6 +3264,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3654,7 +3667,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} mypy = {version = ">=0.910", optional = true, markers = "python_version >= \"3\" and extra == \"mypy\""} sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\""} @@ -3720,30 +3733,30 @@ files = [ [[package]] name = "tox" -version = "4.10.0" +version = "4.11.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.10.0-py3-none-any.whl", hash = "sha256:e4a1b1438955a6da548d69a52350054350cf6a126658c20943261c48ed6d4c92"}, - {file = "tox-4.10.0.tar.gz", hash = "sha256:e041b2165375be690aca0ec4d96360c6906451380520e4665bf274f66112be35"}, + {file = "tox-4.11.0-py3-none-any.whl", hash = "sha256:7f7e5f1b20115560e610b9a11143bbcf48270ec3293f36c0a18be7b287c3b41f"}, + {file = "tox-4.11.0.tar.gz", hash = "sha256:cc665e1e6b095f843b952ea5696f7a64bb64982aff62b62547ef171fa60e21eb"}, ] [package.dependencies] cachetools = ">=5.3.1" chardet = ">=5.2" colorama = ">=0.4.6" -filelock = ">=3.12.2" +filelock = ">=3.12.3" packaging = ">=23.1" platformdirs = ">=3.10" -pluggy = ">=1.2" -pyproject-api = ">=1.5.3" +pluggy = ">=1.3" +pyproject-api = ">=1.6.1" tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} virtualenv = ">=20.24.3" [package.extras] -docs = ["furo (>=2023.7.26)", "sphinx (>=7.1.2)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=0.3.1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.1)"] +docs = ["furo (>=2023.8.19)", "sphinx (>=7.2.4)", "sphinx-argparse-cli (>=1.11.1)", "sphinx-autodoc-typehints (>=1.24)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] +testing = ["build[virtualenv] (>=0.10)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.1.1)", "devpi-process (>=1)", "diff-cover (>=7.7)", "distlib (>=0.3.7)", "flaky (>=3.7)", "hatch-vcs (>=0.3)", "hatchling (>=1.18)", "psutil (>=5.9.5)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)", "pytest-xdist (>=3.3.1)", "re-assert (>=1.1)", "time-machine (>=2.12)", "wheel (>=0.41.2)"] [[package]] name = "tox-docker" From 13513bc15cdcaee3339754dec6212b5431fef24d Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 31 Aug 2023 18:49:01 -0300 Subject: [PATCH 006/262] =?UTF-8?q?Remove=20the=20directory=20importer=20s?= =?UTF-8?q?cript=20=F0=9F=94=A5=20(PP-213)=20(#1345)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- api/admin/controller/settings.py | 11 - api/onix.py | 420 ---------------- bin/directory_import | 14 - core/metadata_layer.py | 113 +---- core/model/configuration.py | 2 - scripts.py | 568 --------------------- tests/api/conftest.py | 1 - tests/api/lcp/test_collection.py | 2 +- tests/api/test_onix.py | 92 ---- tests/api/test_scripts.py | 631 ------------------------ tests/core/conftest.py | 1 - tests/core/models/test_configuration.py | 2 +- tests/core/test_metadata.py | 51 +- tests/fixtures/marc_files.py | 16 - 14 files changed, 7 insertions(+), 1917 deletions(-) delete mode 100644 api/onix.py delete mode 100755 bin/directory_import delete mode 100644 tests/api/test_onix.py delete mode 100644 tests/fixtures/marc_files.py diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 5ab4bda7a5..e70070d4ab 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -465,17 +465,6 @@ def _get_collection_protocols(self, provider_apis): protocols = self._get_integration_protocols( provider_apis, protocol_name_attr="NAME" ) - protocols.append( - { - "name": ExternalIntegration.MANUAL, - "label": _("Manual import"), - "description": _( - "Books will be manually added to the circulation manager, " - "not imported automatically through a protocol." - ), - "settings": [], - } - ) return protocols diff --git a/api/onix.py b/api/onix.py deleted file mode 100644 index 5b327cb484..0000000000 --- a/api/onix.py +++ /dev/null @@ -1,420 +0,0 @@ -import logging -from enum import Enum - -import dateutil.parser -from lxml import etree - -from core.classifier import Classifier -from core.metadata_layer import ( - CirculationData, - ContributorData, - IdentifierData, - LinkData, - Metadata, - SubjectData, -) -from core.model import ( - Classification, - Contributor, - EditionConstants, - Hyperlink, - Identifier, - LicensePool, - Representation, - Subject, -) -from core.util.datetime_helpers import to_utc -from core.util.xmlparser import XMLParser - - -class UsageStatus(Enum): - UNLIMITED = "01" - LIMITED = "02" - PROHIBITED = "03" - - -class UsageUnit(Enum): - COPIES = "01" - CHARACTERS = "02" - WORDS = "03" - PAGES = "04" - PERCENTAGE = "05" - DEVICES = "06" - CONCURRENT_USERS = "07" - PERCENTAGE_PER_TIME_PERIOD = "08" - DAYS = "09" - TIMES = "10" - - -class ONIXExtractor: - """Transform an ONIX file into a list of Metadata objects.""" - - # TODO: '20' indicates a semicolon-separated list of freeform tags, - # which could also be useful. - SUBJECT_TYPES = { - "01": Classifier.DDC, - "03": Classifier.LCC, - "04": Classifier.LCSH, - "10": Classifier.BISAC, - "12": Classifier.BIC, - } - - AUDIENCE_TYPES = { - "01": Classifier.AUDIENCE_ADULT, # General/trade for adult audience - "02": Classifier.AUDIENCE_CHILDREN, # (not for educational purpose) - "03": Classifier.AUDIENCE_YOUNG_ADULT, # (not for educational purpose) - "04": Classifier.AUDIENCE_CHILDREN, # Primary and secondary/elementary and high school - "05": Classifier.AUDIENCE_ADULT, # College/higher education - "06": Classifier.AUDIENCE_ADULT, # Professional and scholarly - "07": Classifier.AUDIENCE_ADULT, # ESL - "08": Classifier.AUDIENCE_ADULT, # Adult education - "09": Classifier.AUDIENCE_ADULT, # Second language teaching other than English - } - - CONTRIBUTOR_TYPES = { - "A01": Contributor.AUTHOR_ROLE, - "A02": Contributor.AUTHOR_ROLE, # 'With or as told to' - "A03": Contributor.AUTHOR_ROLE, # Screenplay author - "A04": Contributor.LYRICIST_ROLE, # Libretto author for an opera - "A05": Contributor.LYRICIST_ROLE, - "A06": Contributor.COMPOSER_ROLE, - "A07": Contributor.ILLUSTRATOR_ROLE, # Visual artist who is the primary creator of the work - "A08": Contributor.PHOTOGRAPHER_ROLE, - "A09": Contributor.AUTHOR_ROLE, # 'Created by' - "A10": Contributor.UNKNOWN_ROLE, # 'From an idea by' - "A11": Contributor.DESIGNER_ROLE, - "A12": Contributor.ILLUSTRATOR_ROLE, - "A13": Contributor.PHOTOGRAPHER_ROLE, - "A14": Contributor.AUTHOR_ROLE, # Author of the text for a work that is primarily photos or illustrations - "A15": Contributor.INTRODUCTION_ROLE, # Preface author - "A16": Contributor.UNKNOWN_ROLE, # Prologue author - "A17": Contributor.UNKNOWN_ROLE, # Summary author - "A18": Contributor.UNKNOWN_ROLE, # Supplement author - "A19": Contributor.AFTERWORD_ROLE, # Afterword author - "A20": Contributor.UNKNOWN_ROLE, # Author of notes or annotations - "A21": Contributor.UNKNOWN_ROLE, # Author of commentary on main text - "A22": Contributor.UNKNOWN_ROLE, # Epilogue author - "A23": Contributor.FOREWORD_ROLE, - "A24": Contributor.INTRODUCTION_ROLE, - "A25": Contributor.UNKNOWN_ROLE, # Author/compiler of footnotes - "A26": Contributor.UNKNOWN_ROLE, # Author of memoir accompanying main text - "A27": Contributor.UNKNOWN_ROLE, # Person who carried out experiments reported in the text - "A29": Contributor.INTRODUCTION_ROLE, # Author of introduction and notes - "A30": Contributor.UNKNOWN_ROLE, # Writer of computer programs ancillary to the text - "A31": Contributor.LYRICIST_ROLE, # 'Book and lyrics by' - "A32": Contributor.CONTRIBUTOR_ROLE, # 'Contributions by' - "A33": Contributor.UNKNOWN_ROLE, # Appendix author - "A34": Contributor.UNKNOWN_ROLE, # Compiler of index - "A35": Contributor.ARTIST_ROLE, # 'Drawings by' - "A36": Contributor.ARTIST_ROLE, # Cover artist - "A37": Contributor.UNKNOWN_ROLE, # Responsible for preliminary work on which the work is based - "A38": Contributor.UNKNOWN_ROLE, # Author of the first edition who is not an author of the current edition - "A39": Contributor.UNKNOWN_ROLE, # 'Maps by' - "A40": Contributor.ARTIST_ROLE, # 'Inked or colored by' - "A41": Contributor.UNKNOWN_ROLE, # 'Paper engineering by' - "A42": Contributor.UNKNOWN_ROLE, # 'Continued by' - "A43": Contributor.UNKNOWN_ROLE, # Interviewer - "A44": Contributor.UNKNOWN_ROLE, # Interviewee - "A45": Contributor.AUTHOR_ROLE, # Writer of dialogue, captions in a comic book - "A46": Contributor.ARTIST_ROLE, # Inker - "A47": Contributor.ARTIST_ROLE, # Colorist - "A48": Contributor.ARTIST_ROLE, # Letterer - "A51": Contributor.UNKNOWN_ROLE, # 'Research by' - "A99": Contributor.UNKNOWN_ROLE, # 'Other primary creator' - "B01": Contributor.EDITOR_ROLE, - "B02": Contributor.EDITOR_ROLE, # 'Revised by' - "B03": Contributor.UNKNOWN_ROLE, # 'Retold by' - "B04": Contributor.UNKNOWN_ROLE, # 'Abridged by' - "B05": Contributor.ADAPTER_ROLE, - "B06": Contributor.TRANSLATOR_ROLE, - "B07": Contributor.UNKNOWN_ROLE, # 'As told by' - "B08": Contributor.TRANSLATOR_ROLE, # With commentary on the translation - "B09": Contributor.EDITOR_ROLE, # Series editor - "B10": Contributor.TRANSLATOR_ROLE, # 'Edited and translated by' - "B11": Contributor.EDITOR_ROLE, # Editor-in-chief - "B12": Contributor.EDITOR_ROLE, # Guest editor - "B13": Contributor.EDITOR_ROLE, # Volume editor - "B14": Contributor.EDITOR_ROLE, # Editorial board member - "B15": Contributor.EDITOR_ROLE, # 'Editorial coordination by' - "B16": Contributor.EDITOR_ROLE, # Managing editor - "B17": Contributor.EDITOR_ROLE, # Founding editor of a serial publication - "B18": Contributor.EDITOR_ROLE, # 'Prepared for publication by' - "B19": Contributor.EDITOR_ROLE, # Associate editor - "B20": Contributor.EDITOR_ROLE, # Consultant editor - "B21": Contributor.EDITOR_ROLE, # General editor - "B22": Contributor.UNKNOWN_ROLE, # 'Dramatized by' - "B23": Contributor.EDITOR_ROLE, # 'General rapporteur' - "B24": Contributor.EDITOR_ROLE, # Literary editor - "B25": Contributor.COMPOSER_ROLE, # 'Arranged by (music)' - "B26": Contributor.EDITOR_ROLE, # Technical editor - "B27": Contributor.UNKNOWN_ROLE, # Thesis advisor - "B28": Contributor.UNKNOWN_ROLE, # Thesis examiner - "B29": Contributor.EDITOR_ROLE, # Scientific editor - "B30": Contributor.UNKNOWN_ROLE, # Historical advisor - "B31": Contributor.UNKNOWN_ROLE, # Editor of the first edition who is not an editor of the current edition - "B99": Contributor.EDITOR_ROLE, # Other type of adaptation or editing - "C01": Contributor.UNKNOWN_ROLE, # 'Compiled by' - "C02": Contributor.UNKNOWN_ROLE, # 'Selected by' - "C03": Contributor.UNKNOWN_ROLE, # 'Non-text material selected by' - "C04": Contributor.UNKNOWN_ROLE, # 'Curated by' - "C99": Contributor.UNKNOWN_ROLE, # Other type of compilation - "D01": Contributor.PRODUCER_ROLE, - "D02": Contributor.DIRECTOR_ROLE, - "D03": Contributor.MUSICIAN_ROLE, # Conductor - "D04": Contributor.UNKNOWN_ROLE, # Choreographer - "D05": Contributor.DIRECTOR_ROLE, # Other type of direction - "E01": Contributor.ACTOR_ROLE, - "E02": Contributor.PERFORMER_ROLE, # Dancer - "E03": Contributor.NARRATOR_ROLE, # 'Narrator' - "E04": Contributor.UNKNOWN_ROLE, # Commentator - "E05": Contributor.PERFORMER_ROLE, # Vocal soloist - "E06": Contributor.PERFORMER_ROLE, # Instrumental soloist - "E07": Contributor.NARRATOR_ROLE, # Reader of recorded text, as in an audiobook - "E08": Contributor.PERFORMER_ROLE, # Name of a musical group in a performing role - "E09": Contributor.PERFORMER_ROLE, # Speaker - "E10": Contributor.UNKNOWN_ROLE, # Presenter - "E99": Contributor.PERFORMER_ROLE, # Other type of performer - "F01": Contributor.PHOTOGRAPHER_ROLE, # 'Filmed/photographed by' - "F02": Contributor.EDITOR_ROLE, # 'Editor (film or video)' - "F99": Contributor.UNKNOWN_ROLE, # Other type of recording - "Z01": Contributor.UNKNOWN_ROLE, # 'Assisted by' - "Z02": Contributor.UNKNOWN_ROLE, # 'Honored/dedicated to' - "Z99": Contributor.UNKNOWN_ROLE, # Other creative responsibility - } - - PRODUCT_CONTENT_TYPES = { - "10": EditionConstants.BOOK_MEDIUM, # Text (eye-readable) - "01": EditionConstants.AUDIO_MEDIUM, # Audiobook - } - - _logger = logging.getLogger(__name__) - - @classmethod - def parse(cls, file, data_source_name, default_medium=None): - metadata_records = [] - - # TODO: ONIX has plain language 'reference names' and short tags that - # may be used interchangably. This code currently only handles short tags, - # and it's not comprehensive. - - parser = XMLParser() - tree = etree.parse(file) - root = tree.getroot() - - for record in root.findall("product"): - title = parser.text_of_optional_subtag( - record, "descriptivedetail/titledetail/titleelement/b203" - ) - if not title: - title_prefix = parser.text_of_optional_subtag( - record, "descriptivedetail/titledetail/titleelement/b030" - ) - title_without_prefix = parser.text_of_optional_subtag( - record, "descriptivedetail/titledetail/titleelement/b031" - ) - if title_prefix and title_without_prefix: - title = title_prefix + " " + title_without_prefix - - medium = parser.text_of_optional_subtag(record, "b385") - - if not medium and default_medium: - medium = default_medium - else: - medium = cls.PRODUCT_CONTENT_TYPES.get( - medium, EditionConstants.BOOK_MEDIUM - ) - - subtitle = parser.text_of_optional_subtag( - record, "descriptivedetail/titledetail/titleelement/b029" - ) - language = ( - parser.text_of_optional_subtag( - record, "descriptivedetail/language/b252" - ) - or "eng" - ) - publisher = parser.text_of_optional_subtag( - record, "publishingdetail/publisher/b081" - ) - imprint = parser.text_of_optional_subtag( - record, "publishingdetail/imprint/b079" - ) - if imprint == publisher: - imprint = None - - publishing_date = parser.text_of_optional_subtag( - record, "publishingdetail/publishingdate/b306" - ) - issued = None - if publishing_date: - issued = dateutil.parser.isoparse(publishing_date) - if issued.tzinfo is None: - cls._logger.warning( - "Publishing date {} does not contain timezone information. Assuming UTC.".format( - publishing_date - ) - ) - issued = to_utc(issued) - - identifier_tags = parser._xpath(record, "productidentifier") - identifiers = [] - primary_identifier = None - for tag in identifier_tags: - type = parser.text_of_subtag(tag, "b221") - if type == "02" or type == "15": - primary_identifier = IdentifierData( - Identifier.ISBN, parser.text_of_subtag(tag, "b244") - ) - identifiers.append(primary_identifier) - - subject_tags = parser._xpath(record, "descriptivedetail/subject") - subjects = [] - - weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - for tag in subject_tags: - type = parser.text_of_subtag(tag, "b067") - if type in cls.SUBJECT_TYPES: - b069 = parser.text_of_optional_subtag(tag, "b069") - - if b069: - subjects.append( - SubjectData(cls.SUBJECT_TYPES[type], b069, weight=weight) - ) - - audience_tags = parser._xpath(record, "descriptivedetail/audience/b204") - audiences = [] - for tag in audience_tags: - if tag.text in cls.AUDIENCE_TYPES: - subjects.append( - SubjectData( - Subject.FREEFORM_AUDIENCE, - cls.AUDIENCE_TYPES[tag.text], - weight=weight, - ) - ) - - # TODO: We don't handle ONIX unnamed and alternatively named contributors. - contributor_tags = parser._xpath(record, "descriptivedetail/contributor") - contributors = [] - for tag in contributor_tags: - type = parser.text_of_subtag(tag, "b035") - if type in cls.CONTRIBUTOR_TYPES: - person_name_display = parser.text_of_optional_subtag(tag, "b036") - person_name_inverted = parser.text_of_optional_subtag(tag, "b037") - corp_name_display = parser.text_of_optional_subtag(tag, "b047") - corp_name_inverted = parser.text_of_optional_subtag(tag, "x443") - bio = parser.text_of_optional_subtag(tag, "b044") - family_name = None - if person_name_display or person_name_inverted: - display_name = person_name_display - sort_name = person_name_inverted - family_name = parser.text_of_optional_subtag(tag, "b040") - elif corp_name_display or corp_name_inverted: - display_name = corp_name_display - # Sort form for corporate name might just be the display name - sort_name = corp_name_inverted or corp_name_display - else: - sort_name = display_name = None - contributors.append( - ContributorData( - sort_name=sort_name, - display_name=display_name, - family_name=family_name, - roles=[cls.CONTRIBUTOR_TYPES[type]], - biography=bio, - ) - ) - - collateral_tags = parser._xpath(record, "collateraldetail/textcontent") - links = [] - for tag in collateral_tags: - type = parser.text_of_subtag(tag, "x426") - # TODO: '03' is the summary in the example I'm testing, but that - # might not be generally true. - if type == "03": - text = parser.text_of_subtag(tag, "d104") - links.append( - LinkData( - rel=Hyperlink.DESCRIPTION, - media_type=Representation.TEXT_HTML_MEDIA_TYPE, - content=text, - ) - ) - - usage_constraint_tags = parser._xpath( - record, "descriptivedetail/epubusageconstraint" - ) - licenses_owned = LicensePool.UNLIMITED_ACCESS - - if usage_constraint_tags: - cls._logger.debug( - "Found {} EpubUsageConstraint tags".format( - len(usage_constraint_tags) - ) - ) - - for usage_constraint_tag in usage_constraint_tags: - usage_status = parser.text_of_subtag(usage_constraint_tag, "x319") - - cls._logger.debug(f"EpubUsageStatus: {usage_status}") - - if usage_status == UsageStatus.PROHIBITED.value: - raise Exception("The content is prohibited") - elif usage_status == UsageStatus.LIMITED.value: - usage_limit_tags = parser._xpath( - record, "descriptivedetail/epubusageconstraint/epubusagelimit" - ) - - cls._logger.debug( - f"Found {len(usage_limit_tags)} EpubUsageLimit tags" - ) - - if not usage_limit_tags: - continue - - [usage_limit_tag] = usage_limit_tags - - usage_unit = parser.text_of_subtag(usage_limit_tag, "x321") - - cls._logger.debug(f"EpubUsageUnit: {usage_unit}") - - if ( - usage_unit == UsageUnit.COPIES.value - or usage_status == UsageUnit.CONCURRENT_USERS.value - ): - quantity_limit = parser.text_of_subtag(usage_limit_tag, "x320") - - cls._logger.debug(f"Quantity: {quantity_limit}") - - if licenses_owned == LicensePool.UNLIMITED_ACCESS: - licenses_owned = 0 - - licenses_owned += int(quantity_limit) - - metadata_records.append( - Metadata( - data_source=data_source_name, - title=title, - subtitle=subtitle, - language=language, - medium=medium, - publisher=publisher, - imprint=imprint, - issued=issued, - primary_identifier=primary_identifier, - identifiers=identifiers, - subjects=subjects, - contributors=contributors, - links=links, - circulation=CirculationData( - data_source_name, - primary_identifier, - licenses_owned=licenses_owned, - licenses_available=licenses_owned, - licenses_reserved=0, - patrons_in_hold_queue=0, - ), - ) - ) - - return metadata_records diff --git a/bin/directory_import b/bin/directory_import deleted file mode 100755 index 51baf60bdb..0000000000 --- a/bin/directory_import +++ /dev/null @@ -1,14 +0,0 @@ -#!/usr/bin/env python -"""Import books into a collection from local disk storage.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from api.lcp import mirror # noqa: autoflake -from scripts import DirectoryImportScript - -DirectoryImportScript().run() diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 15c4ef54e4..9290b68d0f 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -9,17 +9,15 @@ import csv import datetime import logging -import re from collections import defaultdict from typing import List, Optional from dateutil.parser import parse -from pymarc import MARCReader from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ from .analytics import Analytics -from .classifier import NO_NUMBER, NO_VALUE, Classifier +from .classifier import NO_NUMBER, NO_VALUE from .model import ( Classification, Collection, @@ -46,9 +44,9 @@ from .model.configuration import ExternalIntegrationLink from .model.licensing import LicenseFunctions, LicenseStatus from .util import LanguageCodes -from .util.datetime_helpers import strptime_utc, to_utc, utc_now +from .util.datetime_helpers import to_utc, utc_now from .util.median import median -from .util.personal_names import display_name_to_sort_name, name_tidy +from .util.personal_names import display_name_to_sort_name class ReplacementPolicy: @@ -2380,108 +2378,3 @@ def _date_field(self, row, field_name): self.log.warning('Could not parse date "%s"' % value) value = None return value - - -class MARCExtractor: - - """Transform a MARC file into a list of Metadata objects. - - This is not totally general, but it's a good start. - """ - - # Common things found in a MARC record after the name of the author - # which we sould like to remove. - END_OF_AUTHOR_NAME_RES = [ - re.compile(r",\s+[0-9]+-"), # Birth year - re.compile(r",\s+active "), - re.compile(r",\s+graf,"), - re.compile(r",\s+author."), - ] - - @classmethod - def name_cleanup(cls, name): - # Turn 'Dante Alighieri, 1265-1321, author.' - # into 'Dante Alighieri'. - for regex in cls.END_OF_AUTHOR_NAME_RES: - match = regex.search(name) - if match: - name = name[: match.start()] - break - name = name_tidy(name) - return name - - @classmethod - def parse_year(cls, value): - """Handle a publication year that may not be in the right format.""" - for format in ("%Y", "%Y."): - try: - return strptime_utc(value, format) - except ValueError: - continue - return None - - @classmethod - def parse(cls, file, data_source_name, default_medium_type=None): - reader = MARCReader(file) - metadata_records = [] - - for record in reader: - title = record.title - if title.endswith(" /"): - title = title[: -len(" /")] - issued_year = cls.parse_year(record.pubyear) - publisher = record.publisher - if publisher.endswith(","): - publisher = publisher[:-1] - - links = [] - summary = record.notes[0]["a"] - - if summary: - summary_link = LinkData( - rel=Hyperlink.DESCRIPTION, - media_type=Representation.TEXT_PLAIN, - content=summary, - ) - links.append(summary_link) - - isbn = record["020"]["a"].split(" ")[0] - primary_identifier = IdentifierData(Identifier.ISBN, isbn) - - subjects = [ - SubjectData( - Classifier.FAST, - subject["a"], - ) - for subject in record.subjects - ] - - author = record.author - if author: - author = cls.name_cleanup(author) - author_names = [author] - else: - author_names = ["Anonymous"] - contributors = [ - ContributorData( - sort_name=author, - roles=[Contributor.AUTHOR_ROLE], - ) - for author in author_names - ] - - metadata_records.append( - Metadata( - data_source=data_source_name, - title=title, - language="eng", - medium=Edition.BOOK_MEDIUM, - publisher=publisher, - issued=issued_year, - primary_identifier=primary_identifier, - subjects=subjects, - contributors=contributors, - links=links, - ) - ) - return metadata_records diff --git a/core/model/configuration.py b/core/model/configuration.py index 264d7c3dd2..96df379648 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -182,7 +182,6 @@ class ExternalIntegration(Base): ODL = "ODL" ODL2 = "ODL 2.0" LCP = DataSourceConstants.LCP - MANUAL = DataSourceConstants.MANUAL PROQUEST = DataSourceConstants.PROQUEST # These protocols were used on the Content Server when mirroring @@ -201,7 +200,6 @@ class ExternalIntegration(Base): AXIS_360, GUTENBERG, ENKI, - MANUAL, ] # Some integrations with LICENSE_GOAL imply that the data and diff --git a/scripts.py b/scripts.py index 33ae1124bd..f088686dfb 100644 --- a/scripts.py +++ b/scripts.py @@ -24,7 +24,6 @@ from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from api.nyt import NYTBestSellerAPI -from api.onix import ONIXExtractor from api.opds_for_distributors import ( OPDSForDistributorsImporter, OPDSForDistributorsImportMonitor, @@ -36,35 +35,21 @@ from core.lane import Facets, FeaturedFacets, Lane, Pagination from core.log import LogConfiguration from core.marc import MARCExporter -from core.metadata_layer import ( - CirculationData, - FormatData, - LinkData, - MARCExtractor, - ReplacementPolicy, -) -from core.mirror import MirrorUploader from core.model import ( LOCK_ID_DB_INIT, CachedMARCFile, CirculationEvent, - Collection, ConfigurationSetting, Contribution, DataSource, - DeliveryMechanism, Edition, - EditionConstants, ExternalIntegration, Hold, - Hyperlink, Identifier, Library, LicensePool, Loan, Patron, - Representation, - RightsStatus, SessionManager, get_one, pg_advisory_lock, @@ -72,7 +57,6 @@ from core.model.configuration import ExternalIntegrationLink from core.opds import AcquisitionFeed from core.scripts import ( - CollectionType, IdentifierInputScript, LaneSweeperScript, LibraryInputScript, @@ -1177,558 +1161,6 @@ class OPDSForDistributorsReaperScript(OPDSImportScript): PROTOCOL = OPDSForDistributorsImporter.NAME -class DirectoryImportScript(TimestampScript): - """Import some books into a collection, based on a file containing - metadata and directories containing ebook and cover files. - """ - - name = "Import new titles from a directory on disk" - - @classmethod - def arg_parser(cls, _db): - parser = argparse.ArgumentParser() - parser.add_argument( - "--collection-name", - help="Titles will be imported into a collection with this name. The collection will be created if it does not already exist.", - required=True, - ) - parser.add_argument( - "--collection-type", - help="Collection type. Valid values are: OPEN_ACCESS (default), PROTECTED_ACCESS, LCP.", - type=CollectionType, - choices=list(CollectionType), - default=CollectionType.OPEN_ACCESS, - ) - parser.add_argument( - "--data-source-name", - help="All data associated with this import activity will be recorded as originating with this data source. The data source will be created if it does not already exist.", - required=True, - ) - parser.add_argument( - "--metadata-file", - help="Path to a file containing MARC or ONIX 3.0 metadata for every title in the collection", - required=True, - ) - parser.add_argument( - "--metadata-format", - help='Format of the metadata file ("marc" or "onix")', - default="marc", - ) - parser.add_argument( - "--cover-directory", - help="Directory containing a full-size cover image for every title in the collection.", - ) - parser.add_argument( - "--ebook-directory", - help="Directory containing an EPUB or PDF file for every title in the collection.", - required=True, - ) - RS = RightsStatus - rights_uris = ", ".join(RS.OPEN_ACCESS) - parser.add_argument( - "--rights-uri", - help="A URI explaining the rights status of the works being uploaded. Acceptable values: %s" - % rights_uris, - required=True, - ) - parser.add_argument( - "--dry-run", - help="Show what would be imported, but don't actually do the import.", - action="store_true", - ) - parser.add_argument( - "--default-medium-type", - help="Default medium type used in the case when it's not explicitly specified in a metadata file. " - "Valid values are: {}.".format( - ", ".join(EditionConstants.FULFILLABLE_MEDIA) - ), - type=str, - choices=EditionConstants.FULFILLABLE_MEDIA, - ) - - return parser - - def do_run(self, cmd_args=None): - parser = self.arg_parser(self._db) - parsed = parser.parse_args(cmd_args) - collection_name = parsed.collection_name - collection_type = parsed.collection_type - data_source_name = parsed.data_source_name - metadata_file = parsed.metadata_file - metadata_format = parsed.metadata_format - cover_directory = parsed.cover_directory - ebook_directory = parsed.ebook_directory - rights_uri = parsed.rights_uri - dry_run = parsed.dry_run - default_medium_type = parsed.default_medium_type - - return self.run_with_arguments( - collection_name=collection_name, - collection_type=collection_type, - data_source_name=data_source_name, - metadata_file=metadata_file, - metadata_format=metadata_format, - cover_directory=cover_directory, - ebook_directory=ebook_directory, - rights_uri=rights_uri, - dry_run=dry_run, - default_medium_type=default_medium_type, - ) - - def run_with_arguments( - self, - collection_name, - collection_type, - data_source_name, - metadata_file, - metadata_format, - cover_directory, - ebook_directory, - rights_uri, - dry_run, - default_medium_type=None, - ): - if dry_run: - self.log.warning( - "This is a dry run. No files will be uploaded and nothing will change in the database." - ) - - collection, mirrors = self.load_collection( - collection_name, collection_type, data_source_name - ) - - if not collection or not mirrors: - return - - self.timestamp_collection = collection - - if dry_run: - mirrors = None - - self_hosted_collection = collection_type in ( - CollectionType.OPEN_ACCESS, - CollectionType.PROTECTED_ACCESS, - ) - replacement_policy = ReplacementPolicy.from_license_source(self._db) - replacement_policy.mirrors = mirrors - metadata_records = self.load_metadata( - metadata_file, metadata_format, data_source_name, default_medium_type - ) - for metadata in metadata_records: - _, licensepool = self.work_from_metadata( - collection, - collection_type, - metadata, - replacement_policy, - cover_directory, - ebook_directory, - rights_uri, - ) - - licensepool.self_hosted = True if self_hosted_collection else False - - if not dry_run: - self._db.commit() - - def load_collection(self, collection_name, collection_type, data_source_name): - """Locate a Collection with the given name. - - If the collection is found, it will be associated - with the given data source and configured with existing - covers and books mirror configurations. - - :param collection_name: Name of the Collection. - :type collection_name: string - - :param collection_type: Type of the collection: open access/proteceted access. - :type collection_name: CollectionType - - :param data_source_name: Associate this data source with - the Collection if it does not already have a data source. - A DataSource object will be created if necessary. - :type data_source_name: string - - :return: A 2-tuple (Collection, list of MirrorUploader instances) - :rtype: Tuple[Collection, List[MirrorUploader]] - """ - collection, is_new = Collection.by_name_and_protocol( - self._db, - collection_name, - ExternalIntegration.LCP - if collection_type == CollectionType.LCP - else ExternalIntegration.MANUAL, - ) - - if is_new: - self.log.error( - "An existing collection must be used and should be set up before running this script." - ) - return None, None - - mirrors = dict(covers_mirror=None, books_mirror=None) - - types = [ - ExternalIntegrationLink.COVERS, - ExternalIntegrationLink.OPEN_ACCESS_BOOKS - if collection_type == CollectionType.OPEN_ACCESS - else ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ] - for type in types: - mirror_for_type = MirrorUploader.for_collection(collection, type) - if not mirror_for_type: - self.log.error( - "An existing %s mirror integration should be assigned to the collection before running the script." - % type - ) - return None, None - mirrors[type] = mirror_for_type - - data_source = DataSource.lookup( - self._db, data_source_name, autocreate=True, offers_licenses=True - ) - settings = collection.integration_configuration.settings_dict.copy() - settings[Collection.DATA_SOURCE_NAME_SETTING] = data_source.name - - return collection, mirrors - - def load_metadata( - self, metadata_file, metadata_format, data_source_name, default_medium_type - ): - """Read a metadata file and convert the data into Metadata records.""" - metadata_records = [] - - if metadata_format == "marc": - extractor = MARCExtractor() - elif metadata_format == "onix": - extractor = ONIXExtractor() - - with open(metadata_file) as f: - metadata_records.extend( - extractor.parse(f, data_source_name, default_medium_type) - ) - return metadata_records - - def work_from_metadata( - self, collection, collection_type, metadata, policy, *args, **kwargs - ): - """Creates a Work instance from metadata - - :param collection: Target collection - :type collection: Collection - - :param collection_type: Collection's type: open access/protected access - :type collection_type: CollectionType - - :param metadata: Book's metadata - :type metadata: Metadata - - :param policy: Replacement policy - :type policy: ReplacementPolicy - - :return: A 2-tuple of (Work object, LicensePool object) - :rtype: Tuple[core.model.work.Work, LicensePool] - """ - self.annotate_metadata(collection_type, metadata, policy, *args, **kwargs) - - if not metadata.circulation: - # We cannot actually provide access to the book so there - # is no point in proceeding with the import. - return - - edition, new = metadata.edition(self._db) - metadata.apply(edition, collection, replace=policy) - [pool] = [x for x in edition.license_pools if x.collection == collection] - if new: - self.log.info("Created new edition for %s", edition.title) - else: - self.log.info("Updating existing edition for %s", edition.title) - - work, ignore = pool.calculate_work() - if work: - work.set_presentation_ready() - self.log.info(f"FINALIZED {work.title}/{work.author}/{work.sort_author}") - return work, pool - - def annotate_metadata( - self, - collection_type, - metadata, - policy, - cover_directory, - ebook_directory, - rights_uri, - ): - """Add a CirculationData and possibly an extra LinkData to `metadata` - - :param collection_type: Collection's type: open access/protected access - :type collection_type: CollectionType - - :param metadata: Book's metadata - :type metadata: Metadata - - :param policy: Replacement policy - :type policy: ReplacementPolicy - - :param cover_directory: Directory containing book covers - :type cover_directory: string - - :param ebook_directory: Directory containing books - :type ebook_directory: string - - :param rights_uri: URI explaining the rights status of the works being uploaded - :type rights_uri: string - """ - identifier, ignore = metadata.primary_identifier.load(self._db) - data_source = metadata.data_source(self._db) - mirrors = policy.mirrors - - circulation_data = self.load_circulation_data( - collection_type, - identifier, - data_source, - ebook_directory, - mirrors, - metadata.title, - rights_uri, - ) - if not circulation_data: - # There is no point in contining. - return - - if metadata.circulation: - circulation_data.licenses_owned = metadata.circulation.licenses_owned - circulation_data.licenses_available = ( - metadata.circulation.licenses_available - ) - circulation_data.licenses_reserved = metadata.circulation.licenses_reserved - circulation_data.patrons_in_hold_queue = ( - metadata.circulation.patrons_in_hold_queue - ) - circulation_data.licenses = metadata.circulation.licenses - - metadata.circulation = circulation_data - - # If a cover image is available, add it to the Metadata - # as a link. - cover_link = None - if cover_directory: - cover_link = self.load_cover_link( - identifier, data_source, cover_directory, mirrors - ) - if cover_link: - metadata.links.append(cover_link) - else: - logging.info( - "Proceeding with import even though %r has no cover.", identifier - ) - - def load_circulation_data( - self, - collection_type, - identifier, - data_source, - ebook_directory, - mirrors, - title, - rights_uri, - ): - """Loads an actual copy of a book from disk - - :param collection_type: Collection's type: open access/protected access - :type collection_type: CollectionType - - :param identifier: Book's identifier - :type identifier: core.model.identifier.Identifier, - - :param data_source: DataSource object - :type data_source: DataSource - - :param ebook_directory: Directory containing books - :type ebook_directory: string - - :param mirrors: Dictionary containing mirrors for books and their covers - :type mirrors: Dict[string, MirrorUploader] - - :param title: Book's title - :type title: string - - :param rights_uri: URI explaining the rights status of the works being uploaded - :type rights_uri: string - - :return: A CirculationData that contains the book as an open-access - download, or None if no such book can be found - :rtype: CirculationData - """ - ignore, book_media_type, book_content = self._locate_file( - identifier.identifier, - ebook_directory, - Representation.COMMON_EBOOK_EXTENSIONS, - "ebook file", - ) - if not book_content: - # We couldn't find an actual copy of the book, so there is - # no point in proceeding. - return - - book_mirror = ( - mirrors[ - ExternalIntegrationLink.OPEN_ACCESS_BOOKS - if collection_type == CollectionType.OPEN_ACCESS - else ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS - ] - if mirrors - else None - ) - - # Use the S3 storage for books. - if book_mirror: - book_url = book_mirror.book_url( - identifier, - "." + Representation.FILE_EXTENSIONS[book_media_type], - open_access=collection_type == CollectionType.OPEN_ACCESS, - data_source=data_source, - title=title, - ) - else: - # This is a dry run and we won't be mirroring anything. - book_url = ( - identifier.identifier - + "." - + Representation.FILE_EXTENSIONS[book_media_type] - ) - - book_link_rel = ( - Hyperlink.OPEN_ACCESS_DOWNLOAD - if collection_type == CollectionType.OPEN_ACCESS - else Hyperlink.GENERIC_OPDS_ACQUISITION - ) - book_link = LinkData( - rel=book_link_rel, - href=book_url, - media_type=book_media_type, - content=book_content, - ) - formats = [ - FormatData( - content_type=book_media_type, - drm_scheme=DeliveryMechanism.LCP_DRM - if collection_type == CollectionType.LCP - else DeliveryMechanism.NO_DRM, - link=book_link, - ) - ] - circulation_data = CirculationData( - data_source=data_source.name, - primary_identifier=identifier, - links=[book_link], - formats=formats, - default_rights_uri=rights_uri, - ) - return circulation_data - - def load_cover_link(self, identifier, data_source, cover_directory, mirrors): - """Load an actual book cover from disk. - - :return: A LinkData containing a cover of the book, or None - if no book cover can be found. - """ - cover_filename, cover_media_type, cover_content = self._locate_file( - identifier.identifier, - cover_directory, - Representation.COMMON_IMAGE_EXTENSIONS, - "cover image", - ) - - if not cover_content: - return None - cover_filename = ( - identifier.identifier - + "." - + Representation.FILE_EXTENSIONS[cover_media_type] - ) - - # Use an S3 storage mirror for specifically for covers. - if mirrors and mirrors[ExternalIntegrationLink.COVERS]: - cover_url = mirrors[ExternalIntegrationLink.COVERS].cover_image_url( - data_source, identifier, cover_filename - ) - else: - # This is a dry run and we won't be mirroring anything. - cover_url = cover_filename - - cover_link = LinkData( - rel=Hyperlink.IMAGE, - href=cover_url, - media_type=cover_media_type, - content=cover_content, - ) - return cover_link - - @classmethod - def _locate_file( - cls, - base_filename, - directory, - extensions, - file_type="file", - mock_filesystem_operations=None, - ): - """Find an acceptable file in the given directory. - - :param base_filename: A string to be used as the base of the filename. - - :param directory: Look for a file in this directory. - - :param extensions: Any of these extensions for the file is - acceptable. - - :param file_type: Human-readable description of the type of - file we're looking for. This is used only in a log warning if - no file can be found. - - :param mock_filesystem_operations: A test may pass in a - 2-tuple of functions to replace os.path.exists and the 'open' - function. - - :return: A 3-tuple. (None, None, None) if no file can be - found; otherwise (filename, media_type, contents). - """ - if mock_filesystem_operations: - exists_f, open_f = mock_filesystem_operations - else: - exists_f = os.path.exists - open_f = open - - success_path = None - media_type = None - attempts = [] - for extension in extensions: - for ext in (extension, extension.upper()): - if not ext.startswith("."): - ext = "." + ext - filename = base_filename + ext - path = os.path.join(directory, filename) - attempts.append(path) - if exists_f(path): - media_type = Representation.MEDIA_TYPE_FOR_EXTENSION.get( - ext.lower() - ) - content = None - with open_f(path, "rb") as fh: - content = fh.read() - return filename, media_type, content - - # If we went through that whole loop without returning, - # we have failed. - logging.warning( - "Could not find %s for %s. Looked in: %s", - file_type, - base_filename, - ", ".join(attempts), - ) - return None, None, None - - class LaneResetScript(LibraryInputScript): """Reset a library's lanes based on language configuration or estimates of the library's current collection.""" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index fc1309236c..20d39e22d0 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -31,7 +31,6 @@ "tests.fixtures.files", "tests.fixtures.flask", "tests.fixtures.library", - "tests.fixtures.marc_files", "tests.fixtures.odl", "tests.fixtures.opds2_files", "tests.fixtures.opds_files", diff --git a/tests/api/lcp/test_collection.py b/tests/api/lcp/test_collection.py index 8ccd26977f..2aea735dc2 100644 --- a/tests/api/lcp/test_collection.py +++ b/tests/api/lcp/test_collection.py @@ -293,7 +293,7 @@ def test_patron_activity_returns_correct_result(self, lcp_api_fixture): # 2. Loan from a different collection other_collection = lcp_api_fixture.db.collection( - protocol=ExternalIntegration.MANUAL + protocol=ExternalIntegration.LCP ) other_external_identifier = "2" other_license_pool = lcp_api_fixture.db.licensepool( diff --git a/tests/api/test_onix.py b/tests/api/test_onix.py deleted file mode 100644 index 9e80ed80b0..0000000000 --- a/tests/api/test_onix.py +++ /dev/null @@ -1,92 +0,0 @@ -from io import BytesIO - -import pytest - -from api.onix import ONIXExtractor -from core.classifier import Classifier -from core.metadata_layer import CirculationData -from core.model import Classification, Edition, Identifier, LicensePool -from core.util.datetime_helpers import datetime_utc - -from ..fixtures.api_onix_files import ONIXFilesFixture - - -class TestONIXExtractor: - def test_parser(self, api_onix_files_fixture: ONIXFilesFixture): - """Parse an ONIX file into Metadata objects.""" - - file = api_onix_files_fixture.sample_data("onix_example.xml") - metadata_records = ONIXExtractor().parse(BytesIO(file), "MIT Press") - - assert 2 == len(metadata_records) - - record = metadata_records[0] - assert "Safe Spaces, Brave Spaces" == record.title - assert "Diversity and Free Expression in Education" == record.subtitle - assert "Palfrey, John" == record.contributors[0].sort_name - assert "John Palfrey" == record.contributors[0].display_name - assert "Palfrey" == record.contributors[0].family_name - assert "Head of School at Phillips Academy" in record.contributors[0].biography - assert "The MIT Press" == record.publisher - assert None == record.imprint - assert "9780262343664" == record.primary_identifier.identifier - assert Identifier.ISBN == record.primary_identifier.type - assert "eng" == record.language - assert datetime_utc(2017, 10, 6) == record.issued - subjects = record.subjects - assert 7 == len(subjects) - assert "EDU015000" == subjects[0].identifier - assert Classifier.AUDIENCE_ADULT == subjects[-1].identifier - assert Classifier.BISAC == subjects[0].type - assert Classification.TRUSTED_DISTRIBUTOR_WEIGHT == subjects[0].weight - assert Edition.BOOK_MEDIUM == record.medium - assert 2017 == record.issued.year - - assert 1 == len(record.links) - assert ( - "the essential democratic values of diversity and free expression" - in record.links[0].content - ) - - record = metadata_records[1] - assert Edition.AUDIO_MEDIUM == record.medium - assert "The Test Corporation" == record.contributors[0].display_name - assert "Test Corporation, The" == record.contributors[0].sort_name - - @pytest.mark.parametrize( - "name,file_name,licenses_number", - [ - ("limited_usage_status", "onix_3_usage_constraints_example.xml", 20), - ( - "unlimited_usage_status", - "onix_3_usage_constraints_with_unlimited_usage_status.xml", - LicensePool.UNLIMITED_ACCESS, - ), - ( - "wrong_usage_unit", - "onix_3_usage_constraints_example_with_day_usage_unit.xml", - LicensePool.UNLIMITED_ACCESS, - ), - ], - ) - def test_parse_parses_correctly_onix_3_usage_constraints( - self, name, file_name, licenses_number, api_onix_files_fixture: ONIXFilesFixture - ): - # Arrange - file = api_onix_files_fixture.sample_data(file_name) - - # Act - metadata_records = ONIXExtractor().parse( - BytesIO(file), "ONIX 3 Usage Constraints Example" - ) - - # Assert - assert len(metadata_records) == 1 - - [metadata_record] = metadata_records - - assert (metadata_record.circulation is not None) == True - assert isinstance(metadata_record.circulation, CirculationData) == True - assert isinstance(metadata_record.circulation, CirculationData) == True - assert metadata_record.circulation.licenses_owned == licenses_number - assert metadata_record.circulation.licenses_available == licenses_number diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 80a615dede..db2e62a92c 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -1,6 +1,5 @@ from __future__ import annotations -import contextlib import datetime import logging from io import StringIO @@ -24,29 +23,18 @@ ) from core.lane import Facets, FeaturedFacets, Pagination, WorkList from core.marc import MARCExporter -from core.metadata_layer import IdentifierData, Metadata, ReplacementPolicy -from core.mirror import MirrorUploader from core.model import ( LOCK_ID_DB_INIT, CachedMARCFile, ConfigurationSetting, Credential, DataSource, - DeliveryMechanism, - EditionConstants, ExternalIntegration, - Hyperlink, - Identifier, - LicensePool, - Representation, - RightsStatus, SessionManager, create, ) from core.model.configuration import ExternalIntegrationLink from core.opds import AcquisitionFeed -from core.s3 import MockS3Uploader -from core.scripts import CollectionType from core.util.datetime_helpers import datetime_utc, utc_now from core.util.flask_util import OPDSFeedResponse, Response from scripts import ( @@ -55,7 +43,6 @@ CacheMARCFiles, CacheOPDSGroupFeedPerLane, CacheRepresentationPerLane, - DirectoryImportScript, GenerateShortTokenScript, InstanceInitializationScript, LanguageListScript, @@ -68,7 +55,6 @@ if TYPE_CHECKING: from tests.fixtures.authenticator import SimpleAuthIntegrationFixture from tests.fixtures.database import DatabaseTransactionFixture - from tests.fixtures.sample_covers import SampleCoversFixture from tests.fixtures.search import ExternalSearchFixture @@ -819,623 +805,6 @@ def test_languages(self, db: DatabaseTransactionFixture): assert ["tgl 1 (Tagalog)"] == output -class MockDirectoryImportScript(DirectoryImportScript): - """Mock a filesystem to make it easier to test DirectoryInputScript.""" - - def __init__(self, _db, mock_filesystem={}): - super().__init__(_db) - self.mock_filesystem = mock_filesystem - self._locate_file_args = None - - def _locate_file(self, identifier, directory, extensions, file_type): - self._locate_file_args = (identifier, directory, extensions, file_type) - return self.mock_filesystem.get(directory, (None, None, None)) - - -class TestDirectoryImportScript: - def test_do_run(self, db: DatabaseTransactionFixture): - # Calling do_run with command-line arguments parses the - # arguments and calls run_with_arguments. - - class Mock(DirectoryImportScript): - def run_with_arguments(self, *args, **kwargs): - self.ran_with = kwargs - - script = Mock(db.session) - script.do_run( - cmd_args=[ - "--collection-name=coll1", - "--data-source-name=ds1", - "--metadata-file=metadata", - "--metadata-format=marc", - "--cover-directory=covers", - "--ebook-directory=ebooks", - "--rights-uri=rights", - "--dry-run", - f"--default-medium-type={EditionConstants.AUDIO_MEDIUM}", - ] - ) - assert { - "collection_name": "coll1", - "collection_type": CollectionType.OPEN_ACCESS, - "data_source_name": "ds1", - "metadata_file": "metadata", - "metadata_format": "marc", - "cover_directory": "covers", - "ebook_directory": "ebooks", - "rights_uri": "rights", - "dry_run": True, - "default_medium_type": EditionConstants.AUDIO_MEDIUM, - } == script.ran_with - - def test_run_with_arguments(self, db: DatabaseTransactionFixture): - - metadata1 = object() - metadata2 = object() - collection = db.default_collection() - mirrors = object() - work = object() - licensepool = LicensePool() - - class Mock(DirectoryImportScript): - """Mock the methods called by run_with_arguments.""" - - def __init__(self, _db): - super(DirectoryImportScript, self).__init__(_db) - self.load_collection_calls = [] - self.load_metadata_calls = [] - self.work_from_metadata_calls = [] - - def load_collection(self, *args): - self.load_collection_calls.append(args) - return collection, mirrors - - def load_metadata(self, *args, **kwargs): - self.load_metadata_calls.append(args) - return [metadata1, metadata2] - - def work_from_metadata(self, *args): - self.work_from_metadata_calls.append(args) - return work, licensepool - - # First, try a dry run. - - # Make a change to a model object so we can track when the - # session is committed. - db.default_collection().name = "changed" - - script = Mock(db.session) - basic_args = [ - "collection name", - CollectionType.OPEN_ACCESS, - "data source name", - "metadata file", - "marc", - "cover directory", - "ebook directory", - "rights URI", - ] - script.run_with_arguments( - *(basic_args + [True] + [EditionConstants.BOOK_MEDIUM]) - ) - - # load_collection was called with the collection and data source names. - assert [ - ("collection name", CollectionType.OPEN_ACCESS, "data source name") - ] == script.load_collection_calls - - # load_metadata was called with the metadata file and data source name. - assert [ - ("metadata file", "marc", "data source name", EditionConstants.BOOK_MEDIUM) - ] == script.load_metadata_calls - - # work_from_metadata was called twice, once on each metadata - # object. - [ - (coll1, t1, o1, policy1, c1, e1, r1), - (coll2, t2, o2, policy2, c2, e2, r2), - ] = script.work_from_metadata_calls - - assert coll1 == db.default_collection() - assert coll1 == coll2 - - assert o1 == metadata1 - assert o2 == metadata2 - - assert c1 == "cover directory" - assert c1 == c2 - - assert e1 == "ebook directory" - assert e1 == e2 - - assert "rights URI" == r1 - assert r1 == r2 - - # Since this is a dry run, the ReplacementPolicy has no mirror - # set. - for policy in (policy1, policy2): - assert None == policy.mirrors - assert True == policy.links - assert True == policy.formats - assert True == policy.contributions - assert True == policy.rights - - # Now try it not as a dry run. - script = Mock(db.session) - script.run_with_arguments(*(basic_args + [False])) - - # This time, the ReplacementPolicy has a mirror set - # appropriately. - [ - (coll1, t1, o1, policy1, c1, e1, r1), - (coll1, t2, o2, policy2, c2, e2, r2), - ] = script.work_from_metadata_calls - for policy in policy1, policy2: - assert mirrors == policy.mirrors - - # timestamp_collection has been set to the Collection that will be - # used when a Timestamp is created for this script. - assert db.default_collection() == script.timestamp_collection - - def test_load_collection_setting_mirrors(self, db: DatabaseTransactionFixture): - # Calling load_collection does not create a new collection. - script = DirectoryImportScript(db.session) - collection, mirrors = script.load_collection( - "New collection", CollectionType.OPEN_ACCESS, "data source name" - ) - assert None == collection - assert None == mirrors - - existing_collection = db.collection( - name="some collection", protocol=ExternalIntegration.MANUAL - ) - - collection, mirrors = script.load_collection( - "some collection", CollectionType.OPEN_ACCESS, "data source name" - ) - - # No covers or books mirrors were created beforehand for this collection - # so nothing is returned. - assert None == collection - assert None == mirrors - - # Both mirrors need to set up or else nothing is returned. - storage1 = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="name", - password="password", - ) - external_integration_link = db.external_integration_link( - integration=existing_collection.external_integration, - other_integration=storage1, - purpose=ExternalIntegrationLink.COVERS, - ) - - collection, mirrors = script.load_collection( - "some collection", CollectionType.OPEN_ACCESS, "data source name" - ) - assert None == collection - assert None == mirrors - - # Create another storage and assign it for the books mirror - storage2 = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="name", - password="password", - ) - external_integration_link = db.external_integration_link( - integration=existing_collection.external_integration, - other_integration=storage2, - purpose=ExternalIntegrationLink.OPEN_ACCESS_BOOKS, - ) - - collection, mirrors = script.load_collection( - "some collection", CollectionType.OPEN_ACCESS, "data source name" - ) - assert collection == existing_collection - assert isinstance(mirrors[ExternalIntegrationLink.COVERS], MirrorUploader) - assert isinstance( - mirrors[ExternalIntegrationLink.OPEN_ACCESS_BOOKS], MirrorUploader - ) - - def test_work_from_metadata( - self, db: DatabaseTransactionFixture, sample_covers_fixture: SampleCoversFixture - ): - # Validate the ability to create a new Work from appropriate metadata. - - class Mock(MockDirectoryImportScript): - """In this test we need to verify that annotate_metadata - was called but did nothing. - """ - - def annotate_metadata(self, collection_type, metadata, *args, **kwargs): - metadata.annotated = True - return super().annotate_metadata( - collection_type, metadata, *args, **kwargs - ) - - identifier = IdentifierData(Identifier.GUTENBERG_ID, "1003") - identifier_obj, ignore = identifier.load(db.session) - metadata = Metadata( - DataSource.GUTENBERG, primary_identifier=identifier, title="A book" - ) - metadata.annotated = False # type: ignore - datasource = DataSource.lookup(db.session, DataSource.GUTENBERG) - policy = ReplacementPolicy.from_license_source(db.session) - mirrors = dict(books_mirror=MockS3Uploader(), covers_mirror=MockS3Uploader()) - mirror_type_books = ExternalIntegrationLink.OPEN_ACCESS_BOOKS - mirror_type_covers = ExternalIntegrationLink.COVERS - policy.mirrors = mirrors - - # Here, work_from_metadata calls annotate_metadata, but does - # not actually import anything because there are no files 'on - # disk' and thus no way to actually get the book. - collection = db.default_collection() - collection_type = CollectionType.OPEN_ACCESS - shared_args = ( - collection_type, - metadata, - policy, - "cover directory", - "ebook directory", - RightsStatus.CC0, - ) - # args = (collection, *shared_args) - script = Mock(db.session) - assert None == script.work_from_metadata(collection, *shared_args) - assert True == metadata.annotated # type: ignore - - # Now let's try it with some files 'on disk'. - with open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ) as fh: - image = fh.read() - mock_filesystem = { - "cover directory": ("cover.jpg", Representation.JPEG_MEDIA_TYPE, image), - "ebook directory": ( - "book.epub", - Representation.EPUB_MEDIA_TYPE, - "I'm an EPUB.", - ), - } - script = MockDirectoryImportScript(db.session, mock_filesystem=mock_filesystem) # type: ignore - work, licensepool_for_work = script.work_from_metadata(collection, *shared_args) - - # Get the edition that was created for this book. It should have - # already been created by `script.work_from_metadata`. - edition, is_new_edition = metadata.edition(db.session) - assert False == is_new_edition - - # We have created a book. It has a cover image, which has a - # thumbnail. - assert "A book" == work.title - assert ( - work.cover_full_url - == "https://test-cover-bucket.s3.amazonaws.com/Gutenberg/Gutenberg%20ID/1003/1003.jpg" - ) - assert ( - work.cover_thumbnail_url - == "https://test-cover-bucket.s3.amazonaws.com/scaled/300/Gutenberg/Gutenberg%20ID/1003/1003.png" - ) - assert 1 == len(work.license_pools) - assert 1 == len(edition.license_pools) - assert 1 == len( - [lp for lp in edition.license_pools if lp.collection == collection] - ) - [pool] = work.license_pools - assert licensepool_for_work == pool - assert ( - pool.open_access_download_url - == "https://test-content-bucket.s3.amazonaws.com/Gutenberg/Gutenberg%20ID/1003/A%20book.epub" - ) - assert RightsStatus.CC0 == pool.delivery_mechanisms[0].rights_status.uri - - # The two mock S3Uploaders have records of 'uploading' all these files - # to S3. The "books" mirror has the epubs and the "covers" mirror - # contains all the images. - [epub] = mirrors[mirror_type_books].uploaded - [full, thumbnail] = mirrors[mirror_type_covers].uploaded - assert epub.url == pool.open_access_download_url - assert full.url == work.cover_full_url - assert thumbnail.url == work.cover_thumbnail_url - - # The EPUB Representation was cleared out after the upload, to - # save database space. - assert b"I'm an EPUB." == mirrors[mirror_type_books].content[0] - assert None == epub.content - - # Now attempt to get a work for a different collection, but with - # the same metadata. - # Even though there will be two license pools associated with the - # work's presentation edition, the call should be successful. - collection2 = db.collection("second collection") - work2, licensepool_for_work2 = script.work_from_metadata( - collection2, *shared_args - ) - - # The presentation edition should be the same for both works. - edition2 = work2.presentation_edition - assert edition == edition2 - - # The licensepool from which the work is calculated should be - # associated with collection2. - assert licensepool_for_work2.collection == collection2 - - # The work and its presentation edition should both have two licensepools, - # one for each collection. - assert 2 == len(work2.license_pools) - assert 2 == len(edition2.license_pools) - assert 1 == len( - [lp for lp in edition2.license_pools if lp.collection == collection2] - ) - - def test_annotate_metadata(self, db: DatabaseTransactionFixture): - """Verify that annotate_metadata calls load_circulation_data - and load_cover_link appropriately. - """ - - # First, test an unsuccessful annotation. - class MockNoCirculationData(DirectoryImportScript): - """Do nothing when load_circulation_data is called. Explode if - load_cover_link is called. - """ - - def load_circulation_data(self, *args): - self.load_circulation_data_args = args - return None - - def load_cover_link(self, *args): - raise Exception("Explode!") - - collection_type = CollectionType.OPEN_ACCESS - gutenberg = DataSource.lookup(db.session, DataSource.GUTENBERG) - identifier = IdentifierData(Identifier.GUTENBERG_ID, "11111") - identifier_obj, ignore = identifier.load(db.session) - metadata = Metadata( - title=db.fresh_str(), data_source=gutenberg, primary_identifier=identifier - ) - mirrors = object() - policy = ReplacementPolicy(mirrors=mirrors) - cover_directory = object() - ebook_directory = object() - rights_uri = object() - - script = MockNoCirculationData(db.session) - args = ( - collection_type, - metadata, - policy, - cover_directory, - ebook_directory, - rights_uri, - ) - script.annotate_metadata(*args) - - # load_circulation_data was called. - assert ( - collection_type, - identifier_obj, - gutenberg, - ebook_directory, - mirrors, - metadata.title, - rights_uri, - ) == script.load_circulation_data_args - - # But because load_circulation_data returned None, - # metadata.circulation_data was not modified and - # load_cover_link was not called (which would have raised an - # exception). - assert None == metadata.circulation - - # Test a successful annotation with no cover image. - class MockNoCoverLink(DirectoryImportScript): - """Return an object when load_circulation_data is called. - Do nothing when load_cover_link is called. - """ - - def load_circulation_data(self, *args): - return "Some circulation data" - - def load_cover_link(self, *args): - self.load_cover_link_args = args - return None - - script = MockNoCoverLink(db.session) # type: ignore - script.annotate_metadata(*args) - - # The Metadata object was annotated with the return value of - # load_circulation_data. - assert "Some circulation data" == metadata.circulation - - # load_cover_link was called. - assert ( - identifier_obj, - gutenberg, - cover_directory, - mirrors, - ) == script.load_cover_link_args # type: ignore - - # But since it provided no cover link, metadata.links was empty. - assert [] == metadata.links - - # Finally, test a completely successful annotation. - class MockWithCoverLink(DirectoryImportScript): - """Mock success for both load_circulation_data - and load_cover_link. - """ - - def load_circulation_data(self, *args): - return "Some circulation data" - - def load_cover_link(self, *args): - return "A cover link" - - metadata.circulation = None - script = MockWithCoverLink(db.session) # type: ignore - script.annotate_metadata(*args) - - assert "Some circulation data" == metadata.circulation - assert ["A cover link"] == metadata.links - - def test_load_circulation_data(self, db: DatabaseTransactionFixture): - # Create a directory import script with an empty mock filesystem. - script = MockDirectoryImportScript(db.session, {}) - - identifier = db.identifier(Identifier.GUTENBERG_ID, "2345") - gutenberg = DataSource.lookup(db.session, DataSource.GUTENBERG) - mirrors = dict(books_mirror=MockS3Uploader(), covers_mirror=None) - args = ( - CollectionType.OPEN_ACCESS, - identifier, - gutenberg, - "ebooks", - mirrors, - "Name of book", - "rights URI", - ) - - # There is nothing on the mock filesystem, so in this case - # load_circulation_data returns None. - assert None == script.load_circulation_data(*args) - - # But we tried. - assert ( - "2345", - "ebooks", - Representation.COMMON_EBOOK_EXTENSIONS, - "ebook file", - ) == script._locate_file_args - - # Try another script that has a populated mock filesystem. - mock_filesystem = { - "ebooks": ("book.epub", Representation.EPUB_MEDIA_TYPE, "I'm an EPUB.") - } - script = MockDirectoryImportScript(db.session, mock_filesystem) - - # Now _locate_file finds something on the mock filesystem, and - # load_circulation_data loads it into a fully populated - # CirculationData object. - circulation = script.load_circulation_data(*args) - assert identifier == circulation.primary_identifier(db.session) - assert gutenberg == circulation.data_source(db.session) - assert "rights URI" == circulation.default_rights_uri - - # The CirculationData has an open-access link associated with it. - [link] = circulation.links - assert Hyperlink.OPEN_ACCESS_DOWNLOAD == link.rel - assert ( - link.href - == "https://test-content-bucket.s3.amazonaws.com/Gutenberg/Gutenberg%20ID/2345/Name%20of%20book.epub" - ) - assert Representation.EPUB_MEDIA_TYPE == link.media_type - assert "I'm an EPUB." == link.content - - # This open-access link will be made available through a - # delivery mechanism described by this FormatData. - [format] = circulation.formats - assert link == format.link - assert link.media_type == format.content_type - assert DeliveryMechanism.NO_DRM == format.drm_scheme - - def test_load_cover_link(self, db: DatabaseTransactionFixture): - # Create a directory import script with an empty mock filesystem. - script = MockDirectoryImportScript(db.session, {}) - - identifier = db.identifier(Identifier.GUTENBERG_ID, "2345") - gutenberg = DataSource.lookup(db.session, DataSource.GUTENBERG) - mirrors = dict(covers_mirror=MockS3Uploader(), books_mirror=None) - args = (identifier, gutenberg, "covers", mirrors) - - # There is nothing on the mock filesystem, so in this case - # load_cover_link returns None. - assert None == script.load_cover_link(*args) - - # But we tried. - assert ( - "2345", - "covers", - Representation.COMMON_IMAGE_EXTENSIONS, - "cover image", - ) == script._locate_file_args - - # Try another script that has a populated mock filesystem. - mock_filesystem = { - "covers": ("acover.jpeg", Representation.JPEG_MEDIA_TYPE, "I'm an image.") - } - script = MockDirectoryImportScript(db.session, mock_filesystem) - link = script.load_cover_link(*args) - assert Hyperlink.IMAGE == link.rel - assert ( - link.href - == "https://test-cover-bucket.s3.amazonaws.com/Gutenberg/Gutenberg%20ID/2345/2345.jpg" - ) - assert Representation.JPEG_MEDIA_TYPE == link.media_type - assert "I'm an image." == link.content - - def test_locate_file(self): - """Test the ability of DirectoryImportScript._locate_file - to find files on a mock filesystem. - """ - # Create a mock filesystem with a single file. - mock_filesystem = {"directory/thefile.JPEG": "The contents"} - - def mock_exists(path): - return path in mock_filesystem - - @contextlib.contextmanager - def mock_open(path, mode="r"): - yield StringIO(mock_filesystem[path]) - - mock_filesystem_operations = mock_exists, mock_open - - def assert_not_found(base_filename, directory, extensions): - """Verify that the given set of arguments to - _locate_file() does not find anything. - """ - result = DirectoryImportScript._locate_file( - base_filename, - directory, - extensions, - file_type="some file", - mock_filesystem_operations=mock_filesystem_operations, - ) - assert (None, None, None) == result - - def assert_found(base_filename, directory, extensions): - """Verify that the given set of arguments to _locate_file() - finds and loads the single file on the mock filesystem.. - """ - result = DirectoryImportScript._locate_file( - base_filename, - directory, - extensions, - file_type="some file", - mock_filesystem_operations=mock_filesystem_operations, - ) - assert ( - "thefile.JPEG", - Representation.JPEG_MEDIA_TYPE, - "The contents", - ) == result - - # As long as the file and directory match we have some flexibility - # regarding the extensions we look for. - assert_found("thefile", "directory", [".jpeg"]) - assert_found("thefile", "directory", [".JPEG"]) - assert_found("thefile", "directory", ["jpeg"]) - assert_found("thefile", "directory", ["JPEG"]) - assert_found("thefile", "directory", [".another-extension", ".jpeg"]) - - # But file, directory, and (flexible) extension must all match. - assert_not_found("anotherfile", "directory", [".jpeg"]) - assert_not_found("thefile", "another_directory", [".jpeg"]) - assert_not_found("thefile", "directory", [".another-extension"]) - assert_not_found("thefile", "directory", []) - - class TestNovelistSnapshotScript: def mockNoveListAPI(self, *args, **kwargs): self.called_with = (args, kwargs) diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 8c3ecb2987..4f494afddc 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -3,7 +3,6 @@ "tests.fixtures.csv_files", "tests.fixtures.database", "tests.fixtures.library", - "tests.fixtures.marc_files", "tests.fixtures.opds2_files", "tests.fixtures.opds_files", "tests.fixtures.overdrive", diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 552a766221..0f90faaa2a 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -755,7 +755,7 @@ def test_delete( db = example_externalintegration_fixture.database_fixture integration1 = db.external_integration( - ExternalIntegration.MANUAL, + ExternalIntegration.LCP, ExternalIntegration.LICENSE_GOAL, libraries=[db.default_library()], ) diff --git a/tests/core/test_metadata.py b/tests/core/test_metadata.py index a5fe92375b..0c3cd1b3d0 100644 --- a/tests/core/test_metadata.py +++ b/tests/core/test_metadata.py @@ -6,14 +6,13 @@ import pytest from core.analytics import Analytics -from core.classifier import NO_NUMBER, NO_VALUE, Classifier +from core.classifier import NO_NUMBER, NO_VALUE from core.metadata_layer import ( CirculationData, ContributorData, CSVMetadataImporter, IdentifierData, LinkData, - MARCExtractor, MeasurementData, Metadata, ReplacementPolicy, @@ -37,11 +36,10 @@ ) from core.model.configuration import ExternalIntegrationLink from core.s3 import MockS3Uploader -from core.util.datetime_helpers import datetime_utc, strptime_utc, utc_now +from core.util.datetime_helpers import datetime_utc, utc_now from tests.core.mock import DummyHTTPClient, LogCaptureHandler from tests.fixtures.csv_files import CSVFilesFixture from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.marc_files import MARCFilesFixture from tests.fixtures.sample_covers import SampleCoversFixture @@ -1936,48 +1934,3 @@ def test_success(self, db: DatabaseTransactionFixture): # with the identifier of the audiobook equivalent_identifiers = [x.output for x in identifier.equivalencies] assert [book.primary_identifier] == equivalent_identifiers - - -class TestMARCExtractor: - def test_parse_year(self): - m = MARCExtractor.parse_year - nineteen_hundred = strptime_utc("1900", "%Y") - assert nineteen_hundred == m("1900") - assert nineteen_hundred == m("1900.") - assert None == m("not a year") - - def test_parser(self, marc_files_fixture: MARCFilesFixture): - """Parse a MARC file into Metadata objects.""" - - file = marc_files_fixture.sample_data("ils_plympton_01.mrc") - metadata_records = MARCExtractor.parse(file, "Plympton") - - assert 36 == len(metadata_records) - - record = metadata_records[1] - assert "Strange Case of Dr Jekyll and Mr Hyde" == record.title - assert "Stevenson, Robert Louis" == record.contributors[0].sort_name - assert "Recovering the Classics" in record.publisher - assert "9781682280041" == record.primary_identifier.identifier - assert Identifier.ISBN == record.primary_identifier.type - subjects = record.subjects - assert 2 == len(subjects) - for s in subjects: - assert Classifier.FAST == s.type - assert "Canon" in subjects[0].identifier - assert Edition.BOOK_MEDIUM == record.medium - assert 2015 == record.issued.year - assert "eng" == record.language - - assert 1 == len(record.links) - assert ( - "Utterson and Enfield are worried about their friend" - in record.links[0].content - ) - - def test_name_cleanup(self): - """Test basic name cleanup techniques.""" - m = MARCExtractor.name_cleanup - assert "Dante Alighieri" == m("Dante Alighieri, 1265-1321, author.") - assert "Stevenson, Robert Louis" == m("Stevenson, Robert Louis.") - assert "Wells, H.G." == m("Wells, H.G.") diff --git a/tests/fixtures/marc_files.py b/tests/fixtures/marc_files.py deleted file mode 100644 index 3259a467e2..0000000000 --- a/tests/fixtures/marc_files.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - -from tests.fixtures.files import FilesFixture - - -class MARCFilesFixture(FilesFixture): - """A fixture providing access to MARC files.""" - - def __init__(self): - super().__init__("marc") - - -@pytest.fixture() -def marc_files_fixture() -> MARCFilesFixture: - """A fixture providing access to MARC files.""" - return MARCFilesFixture() From 565faeb25a78ee17242a83d6f715a351093efcd0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 09:32:46 -0300 Subject: [PATCH 007/262] Bump alembic from 1.11.3 to 1.12.0 (#1348) Bumps [alembic](https://github.com/sqlalchemy/alembic) from 1.11.3 to 1.12.0. - [Release notes](https://github.com/sqlalchemy/alembic/releases) - [Changelog](https://github.com/sqlalchemy/alembic/blob/main/CHANGES) - [Commits](https://github.com/sqlalchemy/alembic/commits) --- updated-dependencies: - dependency-name: alembic dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0ef4e4503c..543b601682 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.11.3" +version = "1.12.0" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.11.3-py3-none-any.whl", hash = "sha256:d6c96c2482740592777c400550a523bc7a9aada4e210cae2e733354ddae6f6f8"}, - {file = "alembic-1.11.3.tar.gz", hash = "sha256:3db4ce81a9072e1b5aa44c2d202add24553182672a12daf21608d6f62a8f9cf9"}, + {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, + {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, ] [package.dependencies] From ea2fab6a87fcb2c4f6cc342d60c603ab2fcc269e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 09:36:19 -0300 Subject: [PATCH 008/262] Bump actions/checkout from 3 to 4 (#1350) Bumps [actions/checkout](https://github.com/actions/checkout) from 3 to 4. - [Release notes](https://github.com/actions/checkout/releases) - [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md) - [Commits](https://github.com/actions/checkout/compare/v3...v4) --- updated-dependencies: - dependency-name: actions/checkout dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/codeql-analysis.yml | 2 +- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 10 +++++----- 5 files changed, 9 insertions(+), 9 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index a20571d62d..55a0fcc22a 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -20,7 +20,7 @@ jobs: packages: write steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 578e3a4003..7ff3741fa9 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -20,7 +20,7 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: submodules: recursive diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index c06bb784e2..9af0a6d96c 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -11,7 +11,7 @@ jobs: contents: read steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index beca9347b7..232d36124a 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -10,7 +10,7 @@ jobs: contents: read steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 🐍 uses: actions/setup-python@v4 diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 9c53a76fab..d840653ec5 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -25,7 +25,7 @@ jobs: if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 - name: Disable network offload @@ -74,7 +74,7 @@ jobs: if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 - name: Disable network offload @@ -124,7 +124,7 @@ jobs: baseimage: ${{ steps.baseimage.outputs.tag }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false @@ -253,7 +253,7 @@ jobs: POSTGRES_DB: ${{ env.POSTGRES_DB }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false @@ -311,7 +311,7 @@ jobs: image: ["scripts", "webapp", "exec"] steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: persist-credentials: false fetch-depth: 0 From f5b76834046f390c637e5c5d46ffe79dc83b1db0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 09:38:45 -0300 Subject: [PATCH 009/262] Bump tox from 4.11.0 to 4.11.1 (#1351) Bumps [tox](https://github.com/tox-dev/tox) from 4.11.0 to 4.11.1. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.11.0...4.11.1) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 543b601682..2d4591c385 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3733,13 +3733,13 @@ files = [ [[package]] name = "tox" -version = "4.11.0" +version = "4.11.1" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.11.0-py3-none-any.whl", hash = "sha256:7f7e5f1b20115560e610b9a11143bbcf48270ec3293f36c0a18be7b287c3b41f"}, - {file = "tox-4.11.0.tar.gz", hash = "sha256:cc665e1e6b095f843b952ea5696f7a64bb64982aff62b62547ef171fa60e21eb"}, + {file = "tox-4.11.1-py3-none-any.whl", hash = "sha256:da761b4a57ee2b92b5ce39f48ff723fc42d185bf2af508effb683214efa662ea"}, + {file = "tox-4.11.1.tar.gz", hash = "sha256:8a8cc94b7269f8e43dfc636eff2da4b33a199a4e575b5b086cc51aae24ac4262"}, ] [package.dependencies] From a7fd50299e635b52b9f7ec2fef40ce99e586d119 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 09:39:36 -0300 Subject: [PATCH 010/262] Bump pytest from 7.4.0 to 7.4.1 (#1353) Bumps [pytest](https://github.com/pytest-dev/pytest) from 7.4.0 to 7.4.1. - [Release notes](https://github.com/pytest-dev/pytest/releases) - [Changelog](https://github.com/pytest-dev/pytest/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pytest-dev/pytest/compare/7.4.0...7.4.1) --- updated-dependencies: - dependency-name: pytest dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2d4591c385..d7d186edd8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3079,13 +3079,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.0" +version = "7.4.1" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, - {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, + {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, + {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, ] [package.dependencies] From 3b1e709c240f71455d14cd08658d2f3b6f1684d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Sep 2023 09:40:21 -0300 Subject: [PATCH 011/262] Bump pre-commit from 3.3.3 to 3.4.0 (#1352) Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.3.3 to 3.4.0. - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/compare/v3.3.3...v3.4.0) --- updated-dependencies: - dependency-name: pre-commit dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d7d186edd8..e8b153950c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2572,13 +2572,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.3.3" +version = "3.4.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.3.3-py2.py3-none-any.whl", hash = "sha256:10badb65d6a38caff29703362271d7dca483d01da88f9d7e05d0b97171c136cb"}, - {file = "pre_commit-3.3.3.tar.gz", hash = "sha256:a2256f489cd913d575c145132ae196fe335da32d91a8294b7afe6622335dd023"}, + {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, + {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, ] [package.dependencies] From 22ba2fae2e8d7de508d0921736693ed12b5333e4 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 5 Sep 2023 19:40:15 +0530 Subject: [PATCH 012/262] PP-356 Scripts CI test for crontab (#1349) * CI based crontab test for the scripts container --- docker/ci/check_service_status.sh | 14 ++++++++++++++ docker/ci/test_scripts.sh | 3 +++ 2 files changed, 17 insertions(+) diff --git a/docker/ci/check_service_status.sh b/docker/ci/check_service_status.sh index 487a7b4bcc..cdea3b8099 100644 --- a/docker/ci/check_service_status.sh +++ b/docker/ci/check_service_status.sh @@ -29,3 +29,17 @@ function check_service_status() echo " OK" fi } + +function check_crontab() { + container="$1" + + # Installing the crontab will reveal any errors and exit with an error code + $(docker exec "$container" /bin/bash -c "crontab /etc/cron.d/circulation") + validate_status=$? + if [[ "$validate_status" != 0 ]]; then + echo " FAIL: crontab is incorrect" + exit 1 + else + echo " OK" + fi +} diff --git a/docker/ci/test_scripts.sh b/docker/ci/test_scripts.sh index 2463a046a5..3693103087 100755 --- a/docker/ci/test_scripts.sh +++ b/docker/ci/test_scripts.sh @@ -14,4 +14,7 @@ wait_for_runit "$container" # Make sure that cron is running in the scripts container check_service_status "$container" /etc/service/cron + +# Ensure the installed crontab has no problems +check_crontab "$container" exit 0 From 66093b2f908b14806bf93044e4d10d2c64a3b10a Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 5 Sep 2023 19:41:45 +0530 Subject: [PATCH 013/262] PP-398 Maintain type coercions during collections settings mutation (#1347) * Maintain type coercions during collections settings mutation * Alembic migration script to coerce license goal settings to the right types * Use pydantic for type coercion --- ...c6fb2b9_type_coerce_collection_settings.py | 79 +++++++++++++ api/admin/controller/collection_settings.py | 4 +- api/admin/controller/settings.py | 4 +- core/model/collection.py | 3 +- .../api/admin/controller/test_collections.py | 51 +++++++- tests/migration/test_20230905_2b672c6fb2b9.py | 110 ++++++++++++++++++ 6 files changed, 245 insertions(+), 6 deletions(-) create mode 100644 alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py create mode 100644 tests/migration/test_20230905_2b672c6fb2b9.py diff --git a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py new file mode 100644 index 0000000000..e571153750 --- /dev/null +++ b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py @@ -0,0 +1,79 @@ +"""Type coerce collection settings + +Revision ID: 2b672c6fb2b9 +Revises: 0df58829fc1a +Create Date: 2023-09-05 06:40:35.739869+00:00 + +""" +import json +from typing import Any, Callable, Dict, Type + +from pydantic import parse_obj_as + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "2b672c6fb2b9" +down_revision = "0df58829fc1a" +branch_labels = None +depends_on = None + + +def _bool(value): + return value in ("true", "True", True) + + +# All the settings types that have non-str types +ALL_SETTING_TYPES: Dict[str, Type[Any]] = { + "verify_certificate": bool, + "default_reservation_period": bool, + "loan_limit": int, + "hold_limit": int, + "max_retry_count": int, + "ebook_loan_duration": int, + "default_loan_duration": int, +} + + +def _coerce_types(settings: dict) -> None: + """Coerce the types, in-place""" + setting_type: Callable + for setting_name, setting_type in ALL_SETTING_TYPES.items(): + if setting_name in settings: + settings[setting_name] = parse_obj_as(setting_type, settings[setting_name]) + + +def upgrade() -> None: + connection = op.get_bind() + # Fetch all integration settings with the 'licenses' goal + results = connection.execute( + f"SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" + ).fetchall() + + # For each integration setting, we check id any of the non-str + # keys are present in the DB + # We then type-coerce that value + for settings_id, settings in results: + _coerce_types(settings) + connection.execute( + "UPDATE integration_configurations SET settings=%s where id=%s", + json.dumps(settings), + settings_id, + ) + + # Do the same for any Library settings + results = connection.execute( + f"SELECT parent_id, settings from integration_library_configurations;" + ).fetchall() + + for settings_id, settings in results: + _coerce_types(settings) + connection.execute( + "UPDATE integration_library_configurations SET settings=%s where parent_id=%s", + json.dumps(settings), + settings_id, + ) + + +def downgrade() -> None: + """There is no need to revert the types back to strings""" diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 8ead576df3..4b1508f1bd 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -356,10 +356,10 @@ def process_settings( # validate then apply try: - settings_class(**collection_settings) + validated_settings = settings_class(**collection_settings) except ProblemError as ex: return ex.problem_detail - collection.integration_configuration.settings_dict = collection_settings + collection.integration_configuration.settings_dict = validated_settings.dict() return None def _set_external_integration_link( diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index e70070d4ab..7f05d7d73d 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -382,10 +382,10 @@ def _set_configuration_library( config = None # Validate first - protocol_class.library_settings_class()(**info_copy) + validated_data = protocol_class.library_settings_class()(**info_copy) # Attach the configuration config = configuration.for_library(cast(int, library.id), create=True) - config.settings_dict = info_copy + config.settings_dict = validated_data.dict() return config def _set_integration_library(self, integration, library_info, protocol): diff --git a/core/model/collection.py b/core/model/collection.py index 8a5f5a0640..efdb80da37 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -325,10 +325,11 @@ def default_loan_period(self, library, medium=EditionConstants.BOOK_MEDIUM): that someone who borrows a non-open-access item from this collection has it for this number of days. """ - return ( + value = ( self.default_loan_period_setting(library, medium) or self.STANDARD_DEFAULT_LOAN_PERIOD ) + return value @classmethod def loan_period_key(cls, medium=EditionConstants.BOOK_MEDIUM): diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 812f63e710..c22d98b754 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -178,7 +178,6 @@ def test_collections_get_collection_protocols( def test_collections_get_collections_with_multiple_collections( self, settings_ctrl_fixture: SettingsControllerFixture ): - old_prior_test_results = HasSelfTests.prior_test_results setattr( HasCollectionSelfTests, @@ -666,6 +665,7 @@ def test_collections_post_edit( ("overdrive_client_key", "user2"), ("overdrive_client_secret", "password"), ("overdrive_website_id", "1234"), + ("max_retry_count", "10"), ( "libraries", json.dumps([{"short_name": "L1", "ils_name": "the_ils"}]), @@ -684,6 +684,11 @@ def test_collections_post_edit( "overdrive_client_key" ) + # Type coercion stays intact + assert 10 == collection.integration_configuration.settings_dict.get( + "max_retry_count" + ) + # A library now has access to the collection. assert [collection] == l1.collections @@ -754,6 +759,50 @@ def test_collections_post_edit( # The collection now has a parent. assert parent == collection.parent + library = settings_ctrl_fixture.ctrl.db.default_library() + collection2 = settings_ctrl_fixture.ctrl.db.collection( + name="Collection 2", protocol=ExternalIntegration.ODL + ) + with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict( + [ + ("id", str(collection2.id)), + ("name", "Collection 2"), + ("protocol", ExternalIntegration.ODL), + ("external_account_id", "1234"), + ("username", "user"), + ("password", "password"), + ("data_source", "datasource"), + ("passphrase_hint", "passphrase_hint"), + ("passphrase_hint_url", "http://passphrase_hint_url.com"), + ( + "libraries", + json.dumps( + [ + { + "short_name": library.short_name, + "ebook_loan_duration": "200", + } + ] + ), + ), + ] + ) + response = ( + settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + ) + assert response.status_code == 200 + + settings_ctrl_fixture.ctrl.db.session.refresh(collection2) + assert len(collection2.integration_configuration.library_configurations) == 1 + # The library configuration value was correctly coerced to int + assert ( + collection2.integration_configuration.library_configurations[ + 0 + ].settings_dict.get("ebook_loan_duration") + == 200 + ) + def _base_collections_post_request(self, collection): """A template for POST requests to the collections controller.""" return [ diff --git a/tests/migration/test_20230905_2b672c6fb2b9.py b/tests/migration/test_20230905_2b672c6fb2b9.py new file mode 100644 index 0000000000..371fda8ff8 --- /dev/null +++ b/tests/migration/test_20230905_2b672c6fb2b9.py @@ -0,0 +1,110 @@ +import json +from dataclasses import dataclass +from typing import Any, Dict, Optional, Protocol + +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy.engine import Connection, Engine + +from tests.migration.conftest import CreateLibrary + + +@dataclass +class IntegrationConfiguration: + id: int + settings: Dict[str, Any] + + +class CreateConfiguration(Protocol): + def __call__( + self, connection: Connection, protocol: str, name: str, settings: Dict[str, Any] + ) -> IntegrationConfiguration: + ... + + +@pytest.fixture +def create_integration_configuration() -> CreateConfiguration: + def insert_config( + connection: Connection, protocol: str, name: str, settings: Dict[str, Any] + ) -> IntegrationConfiguration: + connection.execute( + "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}')", + "LICENSE_GOAL", + protocol, + name, + json.dumps(settings), + ) + return fetch_config(connection, name=name) + + return insert_config + + +def fetch_config( + connection: Connection, name: Optional[str] = None, parent_id: Optional[int] = None +) -> IntegrationConfiguration: + if name is not None: + _id, settings = connection.execute( # type: ignore[misc] + "SELECT id, settings FROM integration_configurations where name=%s", name + ).fetchone() + else: + _id, settings = connection.execute( # type: ignore[misc] + "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s", + parent_id, + ).fetchone() + return IntegrationConfiguration(_id, settings) + + +MIGRATION_UID = "2b672c6fb2b9" + + +def test_settings_coersion( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_integration_configuration: CreateConfiguration, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_UID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + config = create_integration_configuration( + connection, + "Axis 360", + "axis-test-1", + dict( + verify_certificate="true", + loan_limit="20", + key="value", + ), + ) + + library_id = create_library(connection) + + library_settings = dict( + hold_limit="30", + max_retry_count="2", + ebook_loan_duration="10", + default_loan_duration="11", + unchanged="value", + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id, + config.id, + json.dumps(library_settings), + ) + alembic_runner.migrate_up_one() + + axis_config = fetch_config(connection, name="axis-test-1") + assert axis_config.settings["verify_certificate"] == True + assert axis_config.settings["loan_limit"] == 20 + # Unknown settings remain as-is + assert axis_config.settings["key"] == "value" + + odl_config = fetch_config(connection, parent_id=config.id) + assert odl_config.settings["hold_limit"] == 30 + assert odl_config.settings["max_retry_count"] == 2 + assert odl_config.settings["ebook_loan_duration"] == 10 + assert odl_config.settings["default_loan_duration"] == 11 + # Unknown settings remain as-is + assert odl_config.settings["unchanged"] == "value" From 7e66949939d025f1c44f31e1666930c64b10424c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Sep 2023 09:20:14 -0300 Subject: [PATCH 014/262] Bump types-pillow from 10.0.0.2 to 10.0.0.3 (#1354) Bumps [types-pillow](https://github.com/python/typeshed) from 10.0.0.2 to 10.0.0.3. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pillow dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e8b153950c..ed4bcca9f3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3860,13 +3860,13 @@ files = [ [[package]] name = "types-pillow" -version = "10.0.0.2" +version = "10.0.0.3" description = "Typing stubs for Pillow" optional = false python-versions = "*" files = [ - {file = "types-Pillow-10.0.0.2.tar.gz", hash = "sha256:fe09380ab22d412ced989a067e9ee4af719fa3a47ba1b53b232b46514a871042"}, - {file = "types_Pillow-10.0.0.2-py3-none-any.whl", hash = "sha256:29d51a3ce6ef51fabf728a504d33b4836187ff14256b2e86996d55c91ab214b1"}, + {file = "types-Pillow-10.0.0.3.tar.gz", hash = "sha256:ae0c877d363da349bbb82c5463c9e78037290cc07d3714cb0ceaf5d2f7f5c825"}, + {file = "types_Pillow-10.0.0.3-py3-none-any.whl", hash = "sha256:54a49f3c6a3f5e95ebeee396d7773dde22ce2515d594f9c0596c0a983558f0d4"}, ] [[package]] From 1839c4440d60e1344ee547b8d60f274886c122a7 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 6 Sep 2023 18:09:00 +0530 Subject: [PATCH 015/262] Added notification information to the push notifications (#1360) --- core/util/notifications.py | 11 ++++++++--- tests/core/util/test_notifications.py | 13 +++++++++++++ 2 files changed, 21 insertions(+), 3 deletions(-) diff --git a/core/util/notifications.py b/core/util/notifications.py index 4a42b1d903..13083b83ce 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -65,12 +65,15 @@ def send_loan_expiry_message( edition: Edition = loan.license_pool.presentation_edition identifier: Identifier = loan.license_pool.identifier library_short_name = loan.library and loan.library.short_name + title = f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!" + body = f"Your loan on {edition.title} is expiring soon" for token in tokens: msg = messaging.Message( token=token.device_token, + notification=dict(title=title, body=body), data=dict( - title=f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!", - body=f"Your loan on {edition.title} is expiring soon", + title=title, + body=body, event_type=NotificationConstants.LOAN_EXPIRY_TYPE, loans_endpoint=f"{url}/{loan.library.short_name}/loans", external_identifier=loan.patron.external_identifier, @@ -129,11 +132,13 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work identifier: Identifier = hold.license_pool.identifier + title = f'Your hold on "{work.title}" is available!' for token in tokens: msg = messaging.Message( token=token.device_token, + notification=dict(title=title), data=dict( - title=f'Your hold on "{work.title}" is available!', + title=title, event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, loans_endpoint=loans_api, external_identifier=hold.patron.external_identifier, diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index f5a9b52593..ca337800af 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -52,6 +52,10 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur (), { "token": "atoken", + "notification": dict( + title="Only 1 day left on your loan!", + body=f"Your loan on {work.presentation_edition.title} is expiring soon", + ), "data": dict( title="Only 1 day left on your loan!", body=f"Your loan on {work.presentation_edition.title} is expiring soon", @@ -176,6 +180,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): assert messaging.Message.call_args_list == [ mock.call( token="test-token-1", + notification=dict( + title=f'Your hold on "{work1.title}" is available!', + ), data=dict( title=f'Your hold on "{work1.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, @@ -189,6 +196,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-2", + notification=dict( + title=f'Your hold on "{work1.title}" is available!', + ), data=dict( title=f'Your hold on "{work1.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, @@ -202,6 +212,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-3", + notification=dict( + title=f'Your hold on "{work2.title}" is available!', + ), data=dict( title=f'Your hold on "{work2.title}" is available!', event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, From 202a8f604a0314ffa3d943eeaec0de2fc39aab6f Mon Sep 17 00:00:00 2001 From: dbernstein Date: Wed, 6 Sep 2023 11:14:28 -0700 Subject: [PATCH 016/262] Truncate edition.author and edition.sort_author values to prevent from... (#1315) breaking the postgresql indices using these fields. Resolves: https://ebce-lyrasis.atlassian.net/browse/PP-188 --- core/model/edition.py | 28 ++++++++++++ tests/core/models/test_edition.py | 75 +++++++++++++++++++++++++++++++ 2 files changed, 103 insertions(+) diff --git a/core/model/edition.py b/core/model/edition.py index 3e0589d100..214e87a5f8 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -49,6 +49,21 @@ class Edition(Base, EditionConstants): # in a pinch. MAX_FALLBACK_THUMBNAIL_HEIGHT = 500 + # Postgresql doesn't allow indices to exceed 1/3 of a buffer page. + # We saw the following error here: https://ebce-lyrasis.atlassian.net/browse/PP-188: + # + # Index row size 3208 exceeds btree version 4 maximum 2704 for index "ix_editions_author" + # DETAIL: Index row references tuple (48187,9) in relation "editions". + # HINT: Values larger than 1/3 of a buffer page cannot be indexed. + # + # On rare occasions the author (and sort_author) fields can contain a concatenated list of a + # large number of authors which breaks the index and causes failures. What exactly that threshold is + # I am not entirely certain. It appears that 2704 is the size that broke the 1/3 of a buffer page + # limit. However, I'm not sure how the index size is calculated. I experimented + # with different values. Author field values exceeding 2700 characters in length produced the aforementioned + # error with an index row size of 2800. Author field values below 2650 characters seemed to be okay. + SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR = 2650 + # This Edition is associated with one particular # identifier--the one used by its data source to identify # it. Through the Equivalency class, it is associated with a @@ -724,6 +739,19 @@ def calculate_author(self): sort_author = " ; ".join(sorted(sort_names)) else: sort_author = self.UNKNOWN_AUTHOR + + def truncate_string(mystr: str): + if len(mystr) > self.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR: + return ( + mystr[: (self.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR - 3)] + + "..." + ) + return mystr + + # Very long author and sort_author strings can cause issues for Postgres indices. See + # comment above the SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR constant for details. + author = truncate_string(author) + sort_author = truncate_string(sort_author) return author, sort_author def choose_cover(self, policy=None): diff --git a/tests/core/models/test_edition.py b/tests/core/models/test_edition.py index 38abdfd4f3..2d107220a8 100644 --- a/tests/core/models/test_edition.py +++ b/tests/core/models/test_edition.py @@ -1,3 +1,6 @@ +import random +import string + from core.model import PresentationCalculationPolicy, get_one_or_create from core.model.constants import MediaTypes from core.model.contributor import Contributor @@ -325,6 +328,78 @@ def test_calculate_presentation_author(self, db: DatabaseTransactionFixture): assert "Kelly Accumulator, Bob A. Bitshifter" == wr.author assert "Accumulator, Kelly ; Bitshifter, Bob" == wr.sort_author + def test_calculate_presentation_very_long_author( + self, db: DatabaseTransactionFixture + ): + authors = [] + + # author names should be unique and not similar to ensure that the + # test mirrors the types of long author lists we'd expect in real data. + def generate_random_author(): + return "".join( + random.choices( + string.ascii_uppercase + string.ascii_lowercase + string.digits, + k=25, + ) + ) + + for i in range(0, 500): + author, ignore = db.contributor( + sort_name=", ".join( + [ + generate_random_author(), + generate_random_author(), + ] + ) + ) + authors.append(author.sort_name) + + untruncated_sort_authors = ", ".join([x for x in sorted(authors)]) + wr = db.edition(authors=authors) + wr.calculate_presentation() + db.session.commit() + + def do_check(original_str: str, truncated_str: str): + assert ( + len(truncated_str) + == Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + assert truncated_str.endswith("...") + assert not original_str.endswith("...") + assert ( + len(original_str) + > Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + + do_check(untruncated_sort_authors, wr.sort_author) + # Since we'd expect the sort_author and auth to be equal (since sort_author is assigned to the + # author field by default if no author is specified) we should verify that the author field also + # passes the check. + do_check(untruncated_sort_authors, wr.author) + + def test_calculate_presentation_shortish_author( + self, db: DatabaseTransactionFixture + ): + authors = [] + author, ignore = db.contributor(sort_name=f"AuthorLast, AuthorFirst") + authors.append(author.sort_name) + wr = db.edition(authors=authors) + author, sort_author = wr.calculate_author() + wr.calculate_presentation() + db.session.commit() + + def do_check(original_str: str, calculated_str: str): + assert calculated_str == original_str + assert not calculated_str.endswith("...") + assert ( + len(original_str) + <= Edition.SAFE_AUTHOR_FIELD_LENGTH_TO_AVOID_PG_INDEX_ERROR + ) + assert not original_str.endswith("...") + + do_check(author, wr.author) + do_check(sort_author, wr.sort_author) + def test_set_summary(self, db: DatabaseTransactionFixture): e, pool = db.edition(with_license_pool=True) work = db.work(presentation_edition=e) From f7f8e03babc8632fa3669699241b469311922eee Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 7 Sep 2023 17:21:02 +0530 Subject: [PATCH 017/262] PP-356 Test run scripts during CI (#1359) * test_scripts tests for correctly configured variables as well by running a single script --- docker/ci/check_service_status.sh | 14 ++++++++++++++ docker/ci/test_scripts.sh | 4 ++++ 2 files changed, 18 insertions(+) diff --git a/docker/ci/check_service_status.sh b/docker/ci/check_service_status.sh index cdea3b8099..a76ff6d02d 100644 --- a/docker/ci/check_service_status.sh +++ b/docker/ci/check_service_status.sh @@ -43,3 +43,17 @@ function check_crontab() { echo " OK" fi } + +function run_script() { + container="$1" + script="$2" + + output=$(docker exec "$container" /bin/bash -c "$script") + script_status=$? + if [[ "$script_status" != 0 ]]; then + echo " FAIL: script run failed" + exit 1 + else + echo " OK" + fi +} diff --git a/docker/ci/test_scripts.sh b/docker/ci/test_scripts.sh index 3693103087..d283e87093 100755 --- a/docker/ci/test_scripts.sh +++ b/docker/ci/test_scripts.sh @@ -17,4 +17,8 @@ check_service_status "$container" /etc/service/cron # Ensure the installed crontab has no problems check_crontab "$container" + +# Run a single script to ensure basic settings are correct +# The opds2 import script will only test the DB configuration +run_script "$container" "source ../env/bin/activate && ./opds2_import_monitor" exit 0 From daabfad05000972147d5c079084c95198973632a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 8 Sep 2023 14:47:14 -0300 Subject: [PATCH 018/262] Fix language code validation (PP-422) (#1362) * Fix language code validation, and add some tests for it. --- core/configuration/library.py | 7 ++- tests/core/configuration/test_library.py | 55 ++++++++++++++++++++++++ 2 files changed, 61 insertions(+), 1 deletion(-) create mode 100644 tests/core/configuration/test_library.py diff --git a/core/configuration/library.py b/core/configuration/library.py index f5330e27de..410417885d 100644 --- a/core/configuration/library.py +++ b/core/configuration/library.py @@ -638,12 +638,17 @@ def validate_language_codes( ) -> Optional[List[str]]: """Verify that collection languages are valid.""" if value is not None: + languages = [] for language in value: - if not LanguageCodes.string_to_alpha_3(language): + validated_language = LanguageCodes.string_to_alpha_3(language) + if validated_language is None: field_label = cls.get_form_field_label(field.name) raise SettingsValidationError( problem_detail=UNKNOWN_LANGUAGE.detailed( f'"{field_label}": "{language}" is not a valid language code.' ) ) + if validated_language not in languages: + languages.append(validated_language) + return languages return value diff --git a/tests/core/configuration/test_library.py b/tests/core/configuration/test_library.py new file mode 100644 index 0000000000..ad00aa2860 --- /dev/null +++ b/tests/core/configuration/test_library.py @@ -0,0 +1,55 @@ +from functools import partial +from typing import Callable, List, Optional + +import pytest + +from core.configuration.library import LibrarySettings +from core.util.problem_detail import ProblemError + +LibrarySettingsFixture = Callable[..., LibrarySettings] + + +@pytest.fixture +def library_settings() -> LibrarySettingsFixture: + # Provide a default library settings object for tests, it just gives + # default values for required fields, so we can construct the settings + # without worrying about the defaults. + return partial( + LibrarySettings, + website="http://library.com", + help_web="http://library.com/help", + ) + + +@pytest.mark.parametrize( + "languages,expected", + [ + (None, None), + ([], []), + (["English"], ["eng"]), + (["English", "eng", "fr", "fre", "french"], ["eng", "fre"]), + ], +) +def test_validate_language_codes( + languages: Optional[List[str]], + expected: Optional[List[str]], + library_settings: LibrarySettingsFixture, +) -> None: + settings = library_settings(large_collection_languages=languages) + assert settings.large_collection_languages == expected + + settings = library_settings(small_collection_languages=languages) + assert settings.small_collection_languages == expected + + settings = library_settings(tiny_collection_languages=languages) + assert settings.tiny_collection_languages == expected + + +def test_validate_language_codes_error( + library_settings: LibrarySettingsFixture, +) -> None: + with pytest.raises(ProblemError) as excinfo: + library_settings(large_collection_languages=["eng", "xyz"]) + + assert excinfo.value.problem_detail.detail is not None + assert '"xyz" is not a valid language code' in excinfo.value.problem_detail.detail From 06dd41ab3f4175ded644b980cc206c5f38895952 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Mon, 11 Sep 2023 16:00:38 +0530 Subject: [PATCH 019/262] Fix incorrectly formatted FCM notification (#1361) --- core/util/notifications.py | 4 ++-- tests/core/util/test_notifications.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/core/util/notifications.py b/core/util/notifications.py index 13083b83ce..5641c0d15c 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -70,7 +70,7 @@ def send_loan_expiry_message( for token in tokens: msg = messaging.Message( token=token.device_token, - notification=dict(title=title, body=body), + notification=messaging.Notification(title=title, body=body), data=dict( title=title, body=body, @@ -136,7 +136,7 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: for token in tokens: msg = messaging.Message( token=token.device_token, - notification=dict(title=title), + notification=messaging.Notification(title=title), data=dict( title=title, event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index ca337800af..c47914ecc4 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -52,7 +52,7 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur (), { "token": "atoken", - "notification": dict( + "notification": messaging.Notification( title="Only 1 day left on your loan!", body=f"Your loan on {work.presentation_edition.title} is expiring soon", ), @@ -180,7 +180,7 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): assert messaging.Message.call_args_list == [ mock.call( token="test-token-1", - notification=dict( + notification=messaging.Notification( title=f'Your hold on "{work1.title}" is available!', ), data=dict( @@ -196,7 +196,7 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-2", - notification=dict( + notification=messaging.Notification( title=f'Your hold on "{work1.title}" is available!', ), data=dict( @@ -212,7 +212,7 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token="test-token-3", - notification=dict( + notification=messaging.Notification( title=f'Your hold on "{work2.title}" is available!', ), data=dict( From 183f872ff22f2fad02d8fa4006a793e2a348b387 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Mon, 11 Sep 2023 21:59:01 +0530 Subject: [PATCH 020/262] Fix issue with 2b672c6fb2b9 migration (PP-428) (#1366) - Fixed the type for default_reservation_period - Fixed the primary key queries for the library integration configurations - Additional logging on the migration - Added a join condition so we are only updating library configurations that need to be updated - Only write updated settings to the DB when there have been modifications to the settings - Clean up tests Co-authored-by: Jonathan Green --- ...c6fb2b9_type_coerce_collection_settings.py | 94 +++++++---- tests/migration/test_20230905_2b672c6fb2b9.py | 155 ++++++++++++------ 2 files changed, 169 insertions(+), 80 deletions(-) diff --git a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py index e571153750..0f11714085 100644 --- a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py +++ b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py @@ -6,9 +6,11 @@ """ import json -from typing import Any, Callable, Dict, Type +import logging +from copy import deepcopy +from typing import Any, Dict, Optional, Tuple -from pydantic import parse_obj_as +from pydantic import PositiveInt, ValidationError, parse_obj_as from alembic import op @@ -19,60 +21,90 @@ depends_on = None -def _bool(value): - return value in ("true", "True", True) +log = logging.getLogger(f"palace.migration.{revision}") +log.setLevel(logging.INFO) +log.disabled = False # All the settings types that have non-str types -ALL_SETTING_TYPES: Dict[str, Type[Any]] = { - "verify_certificate": bool, - "default_reservation_period": bool, - "loan_limit": int, - "hold_limit": int, - "max_retry_count": int, - "ebook_loan_duration": int, - "default_loan_duration": int, +ALL_SETTING_TYPES: Dict[str, Any] = { + "verify_certificate": Optional[bool], + "default_reservation_period": Optional[PositiveInt], + "loan_limit": Optional[PositiveInt], + "hold_limit": Optional[PositiveInt], + "max_retry_count": Optional[PositiveInt], + "ebook_loan_duration": Optional[PositiveInt], + "default_loan_duration": Optional[PositiveInt], } -def _coerce_types(settings: dict) -> None: +def _coerce_types(original_settings: Dict[str, Any]) -> Tuple[bool, Dict[str, Any]]: """Coerce the types, in-place""" - setting_type: Callable + modified = False + modified_settings = deepcopy(original_settings) for setting_name, setting_type in ALL_SETTING_TYPES.items(): - if setting_name in settings: - settings[setting_name] = parse_obj_as(setting_type, settings[setting_name]) + if setting_name in original_settings: + # If the setting is an empty string, we set it to None + if original_settings[setting_name] == "": + setting = None + else: + setting = original_settings[setting_name] + + try: + modified = True + modified_settings[setting_name] = parse_obj_as(setting_type, setting) + except ValidationError as e: + log.error( + f"Error while parsing setting {setting_name}. Settings: {original_settings}." + ) + raise e + + return modified, modified_settings def upgrade() -> None: connection = op.get_bind() # Fetch all integration settings with the 'licenses' goal results = connection.execute( - f"SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" + "SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" ).fetchall() # For each integration setting, we check id any of the non-str # keys are present in the DB # We then type-coerce that value for settings_id, settings in results: - _coerce_types(settings) - connection.execute( - "UPDATE integration_configurations SET settings=%s where id=%s", - json.dumps(settings), - settings_id, - ) + modified, updated_settings = _coerce_types(settings) + if modified: + log.info( + f"Updating settings for integration_configuration (id:{settings_id}). " + f"Original settings: {settings}. New settings: {updated_settings}." + ) + # If any of the values were modified, we update the DB + connection.execute( + "UPDATE integration_configurations SET settings=%s where id=%s", + json.dumps(updated_settings), + settings_id, + ) # Do the same for any Library settings results = connection.execute( - f"SELECT parent_id, settings from integration_library_configurations;" + "SELECT ilc.parent_id, ilc.library_id, ilc.settings from integration_library_configurations ilc " + "join integration_configurations ic on ilc.parent_id = ic.id where ic.goal='LICENSE_GOAL';" ).fetchall() - for settings_id, settings in results: - _coerce_types(settings) - connection.execute( - "UPDATE integration_library_configurations SET settings=%s where parent_id=%s", - json.dumps(settings), - settings_id, - ) + for parent_id, library_id, settings in results: + modified, updated_settings = _coerce_types(settings) + if modified: + log.info( + f"Updating settings for integration_library_configuration (parent_id:{parent_id}/library_id:{library_id}). " + f"Original settings: {settings}. New settings: {updated_settings}." + ) + connection.execute( + "UPDATE integration_library_configurations SET settings=%s where parent_id=%s and library_id=%s", + json.dumps(updated_settings), + parent_id, + library_id, + ) def downgrade() -> None: diff --git a/tests/migration/test_20230905_2b672c6fb2b9.py b/tests/migration/test_20230905_2b672c6fb2b9.py index 371fda8ff8..fa3e94a605 100644 --- a/tests/migration/test_20230905_2b672c6fb2b9.py +++ b/tests/migration/test_20230905_2b672c6fb2b9.py @@ -1,6 +1,5 @@ import json -from dataclasses import dataclass -from typing import Any, Dict, Optional, Protocol +from typing import Any, Dict import pytest from pytest_alembic import MigrationContext @@ -9,49 +8,52 @@ from tests.migration.conftest import CreateLibrary -@dataclass -class IntegrationConfiguration: - id: int - settings: Dict[str, Any] - - -class CreateConfiguration(Protocol): +class CreateConfiguration: def __call__( - self, connection: Connection, protocol: str, name: str, settings: Dict[str, Any] - ) -> IntegrationConfiguration: - ... + self, + connection: Connection, + goal: str, + protocol: str, + name: str, + settings: Dict[str, Any], + ) -> int: + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}') returning id", + goal, + protocol, + name, + json.dumps(settings), + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id @pytest.fixture def create_integration_configuration() -> CreateConfiguration: - def insert_config( - connection: Connection, protocol: str, name: str, settings: Dict[str, Any] - ) -> IntegrationConfiguration: - connection.execute( - "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}')", - "LICENSE_GOAL", - protocol, - name, - json.dumps(settings), - ) - return fetch_config(connection, name=name) + return CreateConfiguration() - return insert_config +def fetch_config(connection: Connection, _id: int) -> Dict[str, Any]: + integration_config = connection.execute( + "SELECT settings FROM integration_configurations where id=%s", _id + ).fetchone() + assert integration_config is not None + assert isinstance(integration_config.settings, dict) + return integration_config.settings -def fetch_config( - connection: Connection, name: Optional[str] = None, parent_id: Optional[int] = None -) -> IntegrationConfiguration: - if name is not None: - _id, settings = connection.execute( # type: ignore[misc] - "SELECT id, settings FROM integration_configurations where name=%s", name - ).fetchone() - else: - _id, settings = connection.execute( # type: ignore[misc] - "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s", - parent_id, - ).fetchone() - return IntegrationConfiguration(_id, settings) + +def fetch_library_config( + connection: Connection, parent_id: int, library_id: int +) -> Dict[str, Any]: + integration_lib_config = connection.execute( + "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s and library_id=%s", + parent_id, + library_id, + ).fetchone() + assert integration_lib_config is not None + assert isinstance(integration_lib_config.settings, dict) + return integration_lib_config.settings MIGRATION_UID = "2b672c6fb2b9" @@ -67,18 +69,22 @@ def test_settings_coersion( alembic_runner.migrate_down_one() with alembic_engine.connect() as connection: - config = create_integration_configuration( + config_id = create_integration_configuration( connection, + "LICENSE_GOAL", "Axis 360", "axis-test-1", dict( verify_certificate="true", loan_limit="20", + default_reservation_period="12", key="value", ), ) + # Test 2 library configs, to the same parent library_id = create_library(connection) + library_id2 = create_library(connection) library_settings = dict( hold_limit="30", @@ -90,21 +96,72 @@ def test_settings_coersion( connection.execute( "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", library_id, - config.id, + config_id, json.dumps(library_settings), ) + library_settings = dict( + hold_limit="31", + max_retry_count="3", + ebook_loan_duration="", + default_loan_duration="12", + unchanged="value1", + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id2, + config_id, + json.dumps(library_settings), + ) + + other_config_settings = dict( + verify_certificate="true", + loan_limit="20", + default_reservation_period="12", + key="value", + ) + other_config_id = create_integration_configuration( + connection, "PATRON_AUTH_GOAL", "Other", "other-test", other_config_settings + ) + connection.execute( + "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", + library_id2, + other_config_id, + json.dumps(other_config_settings), + ) + alembic_runner.migrate_up_one() - axis_config = fetch_config(connection, name="axis-test-1") - assert axis_config.settings["verify_certificate"] == True - assert axis_config.settings["loan_limit"] == 20 + axis_config = fetch_config(connection, config_id) + assert axis_config["verify_certificate"] == True + assert axis_config["loan_limit"] == 20 + assert axis_config["default_reservation_period"] == 12 + # Unknown settings remain as-is + assert axis_config["key"] == "value" + + odl_config = fetch_library_config( + connection, parent_id=config_id, library_id=library_id + ) + assert odl_config["hold_limit"] == 30 + assert odl_config["max_retry_count"] == 2 + assert odl_config["ebook_loan_duration"] == 10 + assert odl_config["default_loan_duration"] == 11 # Unknown settings remain as-is - assert axis_config.settings["key"] == "value" + assert odl_config["unchanged"] == "value" - odl_config = fetch_config(connection, parent_id=config.id) - assert odl_config.settings["hold_limit"] == 30 - assert odl_config.settings["max_retry_count"] == 2 - assert odl_config.settings["ebook_loan_duration"] == 10 - assert odl_config.settings["default_loan_duration"] == 11 + odl_config2 = fetch_library_config( + connection, parent_id=config_id, library_id=library_id2 + ) + assert odl_config2["hold_limit"] == 31 + assert odl_config2["max_retry_count"] == 3 + assert odl_config2["ebook_loan_duration"] is None + assert odl_config2["default_loan_duration"] == 12 # Unknown settings remain as-is - assert odl_config.settings["unchanged"] == "value" + assert odl_config2["unchanged"] == "value1" + + # Other integration is unchanged + other_config = fetch_config(connection, other_config_id) + assert other_config == other_config_settings + other_library_config = fetch_library_config( + connection, parent_id=other_config_id, library_id=library_id2 + ) + assert other_library_config == other_config_settings From 97d0cedffcd2af7f1e7fd1b3edc71265c19ef660 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 19:25:20 +0000 Subject: [PATCH 021/262] Bump tox from 4.11.1 to 4.11.3 (#1367) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index ed4bcca9f3..c49e1ac527 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3733,13 +3733,13 @@ files = [ [[package]] name = "tox" -version = "4.11.1" +version = "4.11.3" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.11.1-py3-none-any.whl", hash = "sha256:da761b4a57ee2b92b5ce39f48ff723fc42d185bf2af508effb683214efa662ea"}, - {file = "tox-4.11.1.tar.gz", hash = "sha256:8a8cc94b7269f8e43dfc636eff2da4b33a199a4e575b5b086cc51aae24ac4262"}, + {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"}, + {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"}, ] [package.dependencies] From 3db7590fe8f3fa487c41aa33855df787c280d45d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 19:25:35 +0000 Subject: [PATCH 022/262] Bump pytest from 7.4.1 to 7.4.2 (#1365) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c49e1ac527..fb567345aa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3079,13 +3079,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.1" +version = "7.4.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, - {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, + {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, + {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, ] [package.dependencies] From 3a15ac657ef329ed684b4c587431fb294fc530ff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Sep 2023 19:26:07 +0000 Subject: [PATCH 023/262] Bump pyinstrument from 4.5.1 to 4.5.3 (#1363) --- poetry.lock | 118 +++++++++++++++++++++++++++++----------------------- 1 file changed, 66 insertions(+), 52 deletions(-) diff --git a/poetry.lock b/poetry.lock index fb567345aa..42710ed461 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2859,65 +2859,79 @@ files = [ [[package]] name = "pyinstrument" -version = "4.5.1" +version = "4.5.3" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8f334250b158010d1e2c70d9d10b880f848e03a917079b366b1e2d8890348d41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:55537cd763aee8bce65a201d5ec1aef74677d9ff3ab3391316604ca68740d92a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3d7933bd83e913e21c4031d5c1aeeb2483147e4037363f43475df9ad962c748"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f0d8f6b6df7ce338af35b213cd89b685b2a7c15569f482476c4e0942700b3e71"}, - {file = "pyinstrument-4.5.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98101d064b7af008189dd6f0bdd01f9be39bc6a4630505dfb13ff6ef51a0c67c"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:46f1607e29f93da16d38be41ad2062a56731ff4efa24e561ac848719e8b8ca41"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e287ebc1a8b00d3a767829c03f210df0824ab2e0f6340e8f63bab6fcef1b3546"}, - {file = "pyinstrument-4.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d15613b8d5d509c29001f2edfadd73d418c2814262433fd1225c4f7893e4010a"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win32.whl", hash = "sha256:04c67f08bac41173bc6b44396c60bf1a1879864d0684a7717b1bb8be27793bd9"}, - {file = "pyinstrument-4.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:dc07267447935d28ee914f955613b04d621e5bb44995f793508d6f0eb3ec2818"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8285cfb25b9ee72766bdac8db8c276755115a6e729cda4571005d1ba58c99dda"}, - {file = "pyinstrument-4.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b58239f4a0fe64f688260be0e5b4a1d19a23b890b284cf6c1c8bd0ead4616f41"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4039210a80015ae0ad2016a3b3311b068f5b334d5f5ce3c54d473f8624db0d35"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b28a4c5926036155062c83e15ca93437dbe2d41dd5feeac96f72d4d16b3431c"}, - {file = "pyinstrument-4.5.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d2c2a9de60712abd2228033e4ac63cdee86783af5288f2d7f8efc365e33425"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bf0fdb17cb245c53826c77e2b95095a8fb5053e49ae8ef18aecbbd184028f9e7"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:65ac43f8a1b74a331b5a4f60985531654a8d71a7698e6be5ac7e8493e7a37f37"}, - {file = "pyinstrument-4.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:61632d287f70d850a517533b9e1bf8da41527ffc4d781d4b65106f64ee33cb98"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win32.whl", hash = "sha256:22ae739152ed2366c654f80aa073579f9d5a93caffa74dcb839a62640ffe429f"}, - {file = "pyinstrument-4.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:c72a33168485172a7c2dbd6c4aa3262c8d2a6154bc0792403d8e0689c6ff5304"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8c3dabcb70b705d1342f52f0c3a00647c8a244d1e6ffe46459c05d4533ffabfc"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17d469572d48ee0b78d4ff7ed3972ff40abc70c7dab4777897c843cb03a6ab7b"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66416fa4b3413bc60e6b499e60e8d009384c85cd03535f82337dce55801c43f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c888fca16c3ae04a6d7b5a29ee0c12f9fa23792fab695117160c48c3113428f"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:861fe8c41ac7e54a57ed6ef63268c2843fbc695012427a3d19b2eb1307d9bc61"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0bf91cd5d6c80ff25fd1a136545a5cf752522190b6e6f3806559c352f18d0e73"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b16afb5e67d4d901ef702160e85e04001183b7cdea7e38c8dfb37e491986ccff"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win32.whl", hash = "sha256:f12312341c505e7441e5503b7c77974cff4156d072f0e7f9f822a6b5fdafbc20"}, - {file = "pyinstrument-4.5.1-cp37-cp37m-win_amd64.whl", hash = "sha256:06d96b442a1ae7c267aa34450b028d80559c4f968b10e4d3ce631b0a6ccea6ef"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c6234094ff0ea7d51e7d4699f192019359bf12d5bbe9e1c9c5d1983562162d58"}, - {file = "pyinstrument-4.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f025522edc35831af34bcdbe300b272b432d2afd9811eb780e326116096cbff5"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0a091c575367af427e80829ec414f69a8398acdd68ddfaeb335598071329b44"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ec169cd288f230cbc6a1773384f20481b0a14d2d7cceecf1fb65e56835eaa9a"}, - {file = "pyinstrument-4.5.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004745e83c79d0db7ea8787aba476f13d8bb6d00d75b00d8dbd933a9c7ee1685"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:54be442df5039bc7c73e3e86de0093ca82f3e446392bebab29e51a1512c796cb"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:35e5be8621b3381cf10b1f16bbae527cb7902e87b64e0c9706bc244f6fee51b1"}, - {file = "pyinstrument-4.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:50e93fac7e42dba8b3c630ed00808e7664d0d6c6b0c477462e7b061a31be23dc"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win32.whl", hash = "sha256:b0a88bfe24d4efb129ef2ae7e2d50fa29908634e893bf154e29f91655c558692"}, - {file = "pyinstrument-4.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:b8a71ef9c2ad81e5f3d5f92e1d21a0c9b5f9992e94d0bfcfa9020ea88df4e69f"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9882827e681466d1aff931479387ed77e29674c179bc10fc67f1fa96f724dd20"}, - {file = "pyinstrument-4.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:427228a011d5be21ff009dc05fcd512cee86ea2a51687a3300b8b822bad6815b"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50501756570352e78aaf2aee509b5eb6c68706a2f2701dc3a84b066e570c61ca"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6471f47860f1a5807c182be7184839d747e2702625d44ec19a8f652380541020"}, - {file = "pyinstrument-4.5.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59727936e862677e9716b9317e209e5e31aa1da7eb03c65083d9dee8b5fbe0f8"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9341a07885cba57c2a134847aacb629f27b4ce06a4950a4619629d35a6d8619c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:63c27f2ae8f0501dca4d52b42285be36095f4461dd9e340d32104c2b2df3a731"}, - {file = "pyinstrument-4.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:1bda9b73dde7df63d7606e37340ba0a63ad59053e59eff318f3b67d5a7ea5579"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win32.whl", hash = "sha256:300ed27714c43ae2feb7572e9b3ca39660fb89b3b298e94ad24b64609f823d3c"}, - {file = "pyinstrument-4.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:f2d8e4a9a8167c2a47874d72d6ab0a4266ed484e9ae30f35a515f8594b224b51"}, - {file = "pyinstrument-4.5.1.tar.gz", hash = "sha256:b55a93be883c65650515319455636d32ab32692b097faa1e07f8cd9d4e0eeaa9"}, + {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94f82899486441f0b31c53c4250cb65a9f20036cacb6fb75315069a7b1e3703b"}, + {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e295571bec2bfc1cfbb1ddd66aa5d06c54cf67179c46f0bbdcf709e8130533fd"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d2b2e9c0e6b6cf444716829a00855796a7f80b5bcabe07ddb29dd5c238e5014"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3741e001a2b06be9dc435329f14507b571b273aca8b243b8d2cffd786de1b205"}, + {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92d450301957fa328391ab3da13a26249268233ea0fd1542613c148b8a635950"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4d6bccf4da8c13065c4096e4669ce483d1614698a279419090b9374f0b96328f"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:612c99a6fbad1bcabae0fe7571f5ede0ecd577d1d4a975d19fcfa281997f7075"}, + {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bb637628274e819faec00532cada45d0da8ae4f4033baa84f9cdce559911a4a4"}, + {file = "pyinstrument-4.5.3-cp310-cp310-win32.whl", hash = "sha256:5490c4ddd0f946de2c503c22e1099b34b241d9f4ac80f27b3dc7e484818b734b"}, + {file = "pyinstrument-4.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:48372e82347281c843f9cd710fc848cb5869634e225d5bffcc627673e7554ac9"}, + {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5739550f6a631250aac0b01778882d3e77b3e4ed5c01f4112769ec023cac345d"}, + {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f83b475f90764beb9a44505539f19c005ca31526f35358cde0a02b140c09c4e"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118440b4c6a925f811d97fba02e99066fca8090710fa51c6873834dd37b39040"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ce3adea93d6f4ff54893428b49f1b771f9aa7294a79d812a207c7dd9cbe8161"}, + {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a5851bf8c253d37b415388a1511239a3486249d87a0436d47317480d1e9557b"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3bb877bd2bc9bf492257891e585287f65c6374a1511e64f888a1ad112c18103b"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b9d7254b729571151070a61c7f6c86d02320d62145b9f664a96258fcc26ad1a"}, + {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f294980f636193fdb70f671d17fc98fd4f3624aef1ce061b36be14f53bbe84b4"}, + {file = "pyinstrument-4.5.3-cp311-cp311-win32.whl", hash = "sha256:c04e101c32102091280ac759578d991a3a71a41fe357c651cd78b8bbe9879daf"}, + {file = "pyinstrument-4.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:d4cf26f0f813db178eb36db8fa0ae48cd600b7e3c0447beddd8e7e7cec26e992"}, + {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:82d49865f6aef776ab914b9f09c26ad6279397d8fd26a79a3008c1becab4d88c"}, + {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d4efe0cdccdd44514a6ae7c061dd88d221dd77ae7d7bfd2d743c1f51f90fa3e1"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ec7d794ad206a2ad905160308cc27ad3a985691e99c31e79cfd8de53b75455"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342923b5c7654c73bcd263733b1e9d2b990c2af60d429badcc7cfd5a21bb384b"}, + {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c0d73fd1c7de4b8fca509b7c292709dbe1990527601c7d2307d4f9aca110df"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2be28ec4efa59dd9539bd803381c768a2f2453b6de201e102bf02e17a3efd3f2"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ae906be229fa5ce649016206baa5d20f6a49bb7b6c7643d019f8024e2d11d66"}, + {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d06ef692650f24feb3817869e6519ac117c3358bfe6474c0ded2cbca53c69a5f"}, + {file = "pyinstrument-4.5.3-cp312-cp312-win32.whl", hash = "sha256:f27742fa4b40c2fde105c24b190fa7d54e76195bc4c8d8a4fc5fa1af663468d3"}, + {file = "pyinstrument-4.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:ba6b864a8234f3faf1a3a52587368975d9aca6944a06a68114eb1153501679b4"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efa3140c8813056c5af939f39d750461bb917a0ba96b76cd0171c033939ae0bc"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70c7542c8edfbaee7d2263b07997e668daf6c73e8386abdd1b1a243e88c29da3"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df3e9fdea7f4a2a39a4403044c06efd5d00674807b9f8c104d24f5bf1412e33f"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64aefe67e6ad5a8254f36e0cadaa06f873539d34a3e18b883b8fa7278752f541"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1949f4c4f92ea674415c74a6e5d2105b92175019b03b4808bb61d9a777baffc"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:376924b603278f9df034a8b4a4826ef708abb99acd161b65b66e8b62d596b7c9"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:59caa57aa868098cbe81c842aeac24efef861a9fb1a1f34aa227b6d57b497e57"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-win32.whl", hash = "sha256:2b9da8eb4f947aba804f61cc311f466105161deebbe49b0a651c20cc0bd804b9"}, + {file = "pyinstrument-4.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9c08df4e0b3615df56affdb0898f89c3a964779b344b11f9edae4b5b7ac6d033"}, + {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:944c8a1e8451b9114cff42a0d7d59e482bbf060ccc3ef927d351f8d383f52678"}, + {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:80f89f8f296005eb1f8616cd602ffbdf9efcc069e145a35f35654270c2b7641f"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520c67144da37e93dc03445f8138ef5a9af6f68f89baacb658731d886763f018"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad9677beb345b3a3fe9967e90dfbbcf458f73ae8fc522fdbfda5bab75a1e5014"}, + {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e77f1a708a895f25300f7dc9b4fd5b34218ecc9c7084733d5ebb849e3ff5af99"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:057feb33230caead5bfe25d488060d07065a1bf7f19f5b2004e661a38dddc9e3"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:31e4a23672cfb8f9864bebea6246182d9398a9131606dc53bce124955258705f"}, + {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:087ff4e3faca326da071bc73214d73b98c9d7ebea53e70fbe1c033bb6c75f847"}, + {file = "pyinstrument-4.5.3-cp38-cp38-win32.whl", hash = "sha256:e7ab85c0090fd21b5c7910ef01da37be25b574db2cbdc7584e4e2371cb1f13b0"}, + {file = "pyinstrument-4.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:2ef3c856d0ab98372e08e444f6a81efc93dc160d867e3aee1bf4702bd779535d"}, + {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f168dfa4328c25c0c3444b62cc8445ac7c0dbbb6cdaf79022267571e12d78d3c"}, + {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7241f588e770bfe642cd19e2c8b7560a9cf9e0c2998c0a70ee0ea6333d7404b3"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093d1119e20fc68a9f991a1de0bc046fb29e996298d0442c928415738b2546ae"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afa5fdcd65ae4d2c11871da01576c3c2c19f70135f6b107cb7550a334441b4f8"}, + {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5f5d219b5f52b33462179ecf33ad8651672bc9410f6f6dfd3edf2095acae42"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c9dc5c501ca01c8649a967442d52eedaee63c52fcdc0fd4fb69974bc4d678978"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37d25f3aafb4f24080dd4b0966d9a022f660735f8136b7234ec2c7b8ceab14c4"}, + {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ee99f15ac13d02a0319c2d7671dd2ccc19c615c167a9f5fbba43b50c225102"}, + {file = "pyinstrument-4.5.3-cp39-cp39-win32.whl", hash = "sha256:f467f9308a613fec0be43fa49469ad2f2c99e62e801802e8d59d938acc4acda9"}, + {file = "pyinstrument-4.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:b027951df515c896243145239e91aeb63b19b642d0f4d5ff702a9393dd4736fa"}, + {file = "pyinstrument-4.5.3.tar.gz", hash = "sha256:0885b01a901231d071cb182de33012e9b8cbd958fb048236ee2a6e760c6c6e21"}, ] [package.extras] -jupyter = ["ipython"] +bin = ["click", "nox"] +docs = ["furo (==2021.6.18b36)", "myst-parser (==0.15.1)", "sphinx (==4.2.0)", "sphinxcontrib-programoutput (==0.17)"] +examples = ["django", "numpy"] +test = ["flaky", "greenlet (>=3.0.0a1)", "ipython", "pytest", "pytest-asyncio (==0.12.0)", "sphinx-autobuild (==2021.3.14)", "trio"] +types = ["typing-extensions"] [[package]] name = "pyjwt" From a2e5fc901f6f98eda1b9c0f82b68293c2c3ad3d1 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 12 Sep 2023 10:11:39 -0300 Subject: [PATCH 024/262] Add timeout to our weekly docker build. (#1368) --- .github/workflows/build-base-image.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index 55a0fcc22a..5c9fbbdd7a 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -15,6 +15,7 @@ jobs: docker-build-baseimage: name: Build Base Image runs-on: ubuntu-latest + timeout-minutes: 60 permissions: contents: read packages: write From f079d2daff467aa400ca290eb46073609f35cef2 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 12 Sep 2023 10:24:28 -0300 Subject: [PATCH 025/262] =?UTF-8?q?Remove=20locally=20hosted=20LCP=20Colle?= =?UTF-8?q?ctions=20=F0=9F=94=A5=20(PP-393)=20(#1344)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 🔥 Local LCP content * Remove lcp-encrypt from base image * Remove self_hosted property from license_pools * Add migration --- ...1f_remove_self_hosted_from_licensepools.py | 31 ++ api/admin/controller/admin_search.py | 1 - api/admin/dashboard_stats.py | 6 +- api/base_controller.py | 25 +- api/circulation.py | 55 +- api/controller.py | 5 - api/integration/registry/license_providers.py | 2 - api/lcp/__init__.py | 0 api/lcp/collection.py | 368 ------------- api/lcp/controller.py | 138 ----- api/lcp/encrypt.py | 521 ------------------ api/lcp/factory.py | 26 - api/lcp/importer.py | 32 -- api/lcp/mirror.py | 170 ------ api/lcp/server.py | 357 ------------ api/lcp/utils.py | 49 -- api/routes.py | 53 -- api/s3_analytics_provider.py | 5 +- core/lane.py | 4 - core/model/collection.py | 2 - core/model/configuration.py | 1 - core/model/constants.py | 1 - core/model/licensing.py | 3 - core/model/listeners.py | 1 - core/model/work.py | 20 +- core/opds.py | 7 +- docker/Dockerfile.baseimage | 9 - .../test_admin_search_controller.py | 13 - tests/api/lcp/__init__.py | 0 tests/api/lcp/lcp_strings.py | 138 ----- tests/api/lcp/test_collection.py | 355 ------------ tests/api/lcp/test_controller.py | 155 ------ tests/api/lcp/test_encrypt.py | 156 ------ tests/api/lcp/test_importer.py | 36 -- tests/api/lcp/test_mirror.py | 81 --- tests/api/lcp/test_server.py | 319 ----------- tests/api/test_circulationapi.py | 33 +- tests/api/test_controller_base.py | 26 - tests/core/models/test_collection.py | 24 +- tests/core/models/test_listeners.py | 4 - tests/core/models/test_work.py | 30 - tests/core/test_external_search.py | 1 - tests/core/test_lane.py | 9 +- tests/core/test_opds.py | 18 - tests/core/test_s3_analytics_provider.py | 2 +- tests/fixtures/database.py | 9 - 46 files changed, 59 insertions(+), 3242 deletions(-) create mode 100644 alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py delete mode 100644 api/lcp/__init__.py delete mode 100644 api/lcp/collection.py delete mode 100644 api/lcp/controller.py delete mode 100644 api/lcp/encrypt.py delete mode 100644 api/lcp/factory.py delete mode 100644 api/lcp/importer.py delete mode 100644 api/lcp/mirror.py delete mode 100644 api/lcp/server.py delete mode 100644 api/lcp/utils.py delete mode 100644 tests/api/lcp/__init__.py delete mode 100644 tests/api/lcp/lcp_strings.py delete mode 100644 tests/api/lcp/test_collection.py delete mode 100644 tests/api/lcp/test_controller.py delete mode 100644 tests/api/lcp/test_encrypt.py delete mode 100644 tests/api/lcp/test_importer.py delete mode 100644 tests/api/lcp/test_mirror.py delete mode 100644 tests/api/lcp/test_server.py diff --git a/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py b/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py new file mode 100644 index 0000000000..2b2d0406a8 --- /dev/null +++ b/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py @@ -0,0 +1,31 @@ +"""Remove self_hosted from licensepools + +Revision ID: 1c566151741f +Revises: 2b672c6fb2b9 +Create Date: 2023-08-31 16:13:54.935093+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "1c566151741f" +down_revision = "2b672c6fb2b9" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_index("ix_licensepools_self_hosted", table_name="licensepools") + op.drop_column("licensepools", "self_hosted") + + +def downgrade() -> None: + op.add_column( + "licensepools", + sa.Column("self_hosted", sa.BOOLEAN(), autoincrement=False, nullable=False), + ) + op.create_index( + "ix_licensepools_self_hosted", "licensepools", ["self_hosted"], unique=False + ) diff --git a/api/admin/controller/admin_search.py b/api/admin/controller/admin_search.py index cc7c7c79b9..e2aa1fddbc 100644 --- a/api/admin/controller/admin_search.py +++ b/api/admin/controller/admin_search.py @@ -47,7 +47,6 @@ def _unzip(cls, values: List[Tuple[str, int]]) -> dict: def _search_field_values_cached(self, collection_ids: List[int]) -> dict: licenses_filter = or_( LicensePool.open_access == True, - LicensePool.self_hosted == True, LicensePool.licenses_owned != 0, ) diff --git a/api/admin/dashboard_stats.py b/api/admin/dashboard_stats.py index f5a9973c74..07df5a1c08 100644 --- a/api/admin/dashboard_stats.py +++ b/api/admin/dashboard_stats.py @@ -34,7 +34,6 @@ class Statistics: LicensePool.open_access == False, ) OPEN_ACCESS_FILTER = LicensePool.open_access == True - SELF_HOSTED_FILTER = LicensePool.self_hosted == True AT_LEAST_ONE_LENDABLE_FILTER = or_( UNLIMITED_LICENSE_FILTER, OPEN_ACCESS_FILTER, @@ -67,7 +66,10 @@ def _gather_collection_stats(self, collection: Collection) -> CollectionInventor metered_license_title_count = _count(self.METERED_LICENSE_FILTER) unlimited_license_title_count = _count(self.UNLIMITED_LICENSE_FILTER) open_access_title_count = _count(self.OPEN_ACCESS_FILTER) - self_hosted_title_count = _count(self.SELF_HOSTED_FILTER) + # TODO: We no longer support self-hosted books, so this should always be 0. + # this value is still included in the response for backwards compatibility, + # but should be removed in a future release. + self_hosted_title_count = 0 at_least_one_loanable_count = _count(self.AT_LEAST_ONE_LENDABLE_FILTER) licenses_owned_count, licenses_available_count = map( diff --git a/api/base_controller.py b/api/base_controller.py index 6e48345e70..55a9830440 100644 --- a/api/base_controller.py +++ b/api/base_controller.py @@ -3,7 +3,7 @@ from flask_babel import lazy_gettext as _ from werkzeug.datastructures import Authorization -from core.model import Library, Loan, Patron, get_one +from core.model import Library, Patron from core.util.problem_detail import ProblemDetail from .circulation_exceptions import * @@ -104,29 +104,6 @@ def authenticate(self): data = self.manager.authentication_for_opds_document return Response(data, 401, headers) - def library_through_external_loan_identifier(self, loan_external_identifier): - """Look up the library the user is trying to access using a loan's external identifier. - We assume that the external identifier is globally unique which is true, for example, - in the case of using Readium LCP. - - :param loan_external_identifier: External identifier of the patron's loan - :type loan_external_identifier: basestring - - :return: Library the patron is trying to access - :rtype: Library - """ - self.manager.reload_settings_if_changed() - - loan = get_one(self._db, Loan, external_identifier=loan_external_identifier) - - if loan is None: - return LOAN_NOT_FOUND - - library = loan.patron.library - flask.request.library = library - - return library - def library_for_request(self, library_short_name): """Look up the library the user is trying to access. diff --git a/api/circulation.py b/api/circulation.py index 1b4bf43f5e..078bea0613 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -37,13 +37,11 @@ ConfigurationFormItemType, FormField, ) -from core.mirror import MirrorUploader from core.model import ( CirculationEvent, Collection, DataSource, DeliveryMechanism, - ExternalIntegrationLink, Hold, Library, LicensePool, @@ -943,39 +941,6 @@ def can_revoke_hold(self, licensepool: LicensePool, hold: Hold) -> bool: return True return False - def _try_to_sign_fulfillment_link( - self, licensepool: LicensePool, fulfillment: FulfillmentInfo - ) -> FulfillmentInfo: - """Tries to sign the fulfillment URL (only works in the case when the collection has mirrors set up) - - :param licensepool: License pool - :param fulfillment: Fulfillment info - - :return: Fulfillment info with a possibly signed URL - """ - mirror_types = [ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS] - mirror = next( - iter( - [ - MirrorUploader.for_collection(licensepool.collection, mirror_type) - for mirror_type in mirror_types - ] - ) - ) - - if mirror: - signed_url = mirror.sign_url(fulfillment.content_link) - - self.log.info( - "Fulfillment link {} has been signed and translated into {}".format( - fulfillment.content_link, signed_url - ) - ) - - fulfillment.content_link = signed_url - - return fulfillment - def _collect_event( self, patron: Optional[Patron], @@ -1103,11 +1068,7 @@ def borrow( now = utc_now() api = self.api_for_license_pool(licensepool) - if ( - licensepool.open_access - or licensepool.self_hosted - or (not api and licensepool.unlimited_access) - ): + if licensepool.open_access or (not api and licensepool.unlimited_access): # We can 'loan' open-access content ourselves just by # putting a row in the database. __transaction = self._db.begin_nested() @@ -1538,11 +1499,7 @@ def fulfill( api = self.api_for_license_pool(licensepool) - if ( - licensepool.open_access - or licensepool.self_hosted - or (not api and licensepool.unlimited_access) - ): + if licensepool.open_access or (not api and licensepool.unlimited_access): # We ignore the vendor-specific arguments when doing # open-access fulfillment, because we just don't support # partial fulfillment of open-access content. @@ -1555,10 +1512,6 @@ def fulfill( patron, pin, licensepool, delivery_mechanism, fulfillment ) - if licensepool.self_hosted: - fulfillment = self._try_to_sign_fulfillment_link( - licensepool, fulfillment - ) else: if not api: raise CannotFulfill() @@ -1669,7 +1622,7 @@ def revoke_loan( ) if loan is not None: api = self.api_for_license_pool(licensepool) - if not (api is None or licensepool.open_access or licensepool.self_hosted): + if not (api is None or licensepool.open_access): try: api.checkin(patron, pin, licensepool) except NotCheckedOut as e: @@ -1703,7 +1656,7 @@ def release_hold( license_pool=licensepool, on_multiple="interchangeable", ) - if not licensepool.open_access and not licensepool.self_hosted: + if not licensepool.open_access: api = self.api_for_license_pool(licensepool) if api is None: raise TypeError(f"No api for licensepool: {licensepool}") diff --git a/api/controller.py b/api/controller.py index 420408384c..743697e886 100644 --- a/api/controller.py +++ b/api/controller.py @@ -455,10 +455,6 @@ def setup_one_time_controllers(self): self.patron_auth_token = PatronAuthTokenController(self) self.playtime_entries = PlaytimeEntriesController(self) - from api.lcp.controller import LCPController - - self.lcp_controller = LCPController(self) - def setup_configuration_dependent_controllers(self): """Set up all the controllers that depend on the current site configuration. @@ -762,7 +758,6 @@ def apply_borrowing_policy(self, patron, license_pool): and license_pool.licenses_available == 0 and not license_pool.open_access and not license_pool.unlimited_access - and not license_pool.self_hosted ): return FORBIDDEN_BY_POLICY.detailed( _("Library policy prohibits the placement of holds."), status_code=403 diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index ef5c539f90..eee96900e9 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -32,7 +32,6 @@ def __init__(self) -> None: from api.axis import Axis360API from api.bibliotheca import BibliothecaAPI from api.enki import EnkiAPI - from api.lcp.collection import LCPAPI from api.odilo import OdiloAPI from api.odl import ODLAPI from api.odl2 import ODL2API @@ -47,7 +46,6 @@ def __init__(self) -> None: self.register(OPDSForDistributorsAPI, canonical=OPDSForDistributorsAPI.NAME) self.register(ODLAPI, canonical=ODLAPI.NAME) self.register(ODL2API, canonical=ODL2API.NAME) - self.register(LCPAPI, canonical=LCPAPI.NAME) class OpenAccessLicenseProvidersRegistry(IntegrationRegistry["OPDSImporter"]): diff --git a/api/lcp/__init__.py b/api/lcp/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/api/lcp/collection.py b/api/lcp/collection.py deleted file mode 100644 index f238ad66cc..0000000000 --- a/api/lcp/collection.py +++ /dev/null @@ -1,368 +0,0 @@ -import datetime -import json -from io import BytesIO - -from flask import send_file -from sqlalchemy import or_ - -from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo -from api.lcp.encrypt import LCPEncryptionSettings -from api.lcp.hash import HasherFactory -from api.lcp.server import LCPServer, LCPServerSettings -from core.integration.base import HasLibraryIntegrationConfiguration -from core.integration.settings import BaseSettings -from core.lcp.credential import LCPCredentialFactory -from core.model import ( - Collection, - DeliveryMechanism, - ExternalIntegration, - LicensePool, - Loan, - get_one, -) -from core.model.configuration import HasExternalIntegration -from core.util.datetime_helpers import utc_now - - -class LCPFulfilmentInfo(FulfillmentInfo): - """Sends LCP licenses as fulfilment info""" - - def __init__( - self, - identifier, - collection, - data_source_name, - identifier_type, - content_link=None, - content_type=None, - content=None, - content_expires=None, - ): - """Initializes a new instance of LCPFulfilmentInfo class - - :param identifier: Identifier - :type identifier: string - - :param collection: Collection - :type collection: Collection - - :param data_source_name: Data source's name - :type data_source_name: string - - :param identifier_type: Identifier's type - :type identifier_type: string - - :param content_link: Content link - :type content_link: Optional[string] - - :param content_link: Identifier's type - :type content_link: string - - :param content: Identifier's type - :type content: Any - - :param content_expires: Time when the content expires - :type content_expires: Optional[datetime.datetime] - """ - super().__init__( - collection, - data_source_name, - identifier_type, - identifier, - content_link, - content_type, - content, - content_expires, - ) - - @property - def as_response(self): - """Returns LCP license as a Flask response - - :return: LCP license as a Flask response - :rtype: Response - """ - return send_file( - BytesIO(json.dumps(self.content)), - mimetype=DeliveryMechanism.LCP_DRM, - as_attachment=True, - attachment_filename=f"{self.identifier}.lcpl", - ) - - -class LCPSettings(LCPEncryptionSettings, LCPServerSettings): - pass - - -class LCPLibrarySettings(BaseSettings): - pass - - -class LCPAPI( - BaseCirculationAPI, HasExternalIntegration, HasLibraryIntegrationConfiguration -): - """Implements LCP workflow""" - - NAME = ExternalIntegration.LCP - SERVICE_NAME = "LCP" - DESCRIPTION = "Manually imported collection protected using Readium LCP DRM" - - @classmethod - def settings_class(cls): - return LCPSettings - - @classmethod - def library_settings_class(cls): - return LCPLibrarySettings - - def label(self): - return self.NAME - - def description(self): - return self.DESCRIPTION - - def __init__(self, db, collection): - """Initializes a new instance of LCPAPI class - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param collection: Book collection - :type collection: Collection - """ - if collection.protocol != ExternalIntegration.LCP: - raise ValueError( - "Collection protocol is {} but must be LCPAPI".format( - collection.protocol - ) - ) - - self._db = db - self._collection_id = collection.id - self._lcp_server_instance = None - - def internal_format(self, delivery_mechanism): - """Look up the internal format for this delivery mechanism or - raise an exception. - - :param delivery_mechanism: A LicensePoolDeliveryMechanism - :type delivery_mechanism: LicensePoolDeliveryMechanism - """ - return delivery_mechanism - - @property - def collection(self): - """Returns an associated Collection object - - :return: Associated Collection object - :rtype: Collection - """ - return Collection.by_id(self._db, id=self._collection_id) - - def external_integration(self, db): - """Returns an external integration associated with this object - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :return: External integration associated with this object - :rtype: core.model.configuration.ExternalIntegration - """ - return self.collection.external_integration - - def _create_lcp_server(self): - """Creates a new instance of LCPServer - - :return: New instance of LCPServer - :rtype: LCPServer - """ - - hasher_factory = HasherFactory() - credential_factory = LCPCredentialFactory() - lcp_server = LCPServer( - self.configuration, - hasher_factory, - credential_factory, - ) - - return lcp_server - - @property - def _lcp_server(self): - """Returns an instance of LCPServer - - :return: Instance of LCPServer - :rtype: LCPServer - """ - if self._lcp_server_instance is None: - self._lcp_server_instance = self._create_lcp_server() - - return self._lcp_server_instance - - def checkout(self, patron, pin, licensepool, internal_format): - """Checks out a book on behalf of a patron - - :param patron: A Patron object for the patron who wants to check out the book - :type patron: Patron - - :param pin: The patron's alleged password - :type pin: string - - :param licensepool: Contains lending info as well as link to parent Identifier - :type licensepool: LicensePool - - :param internal_format: Represents the patron's desired book format. - :type internal_format: Any - - :return: a LoanInfo object - :rtype: LoanInfo - """ - days = self.collection.default_loan_period(patron.library) - today = utc_now() - expires = today + datetime.timedelta(days=days) - loan = get_one( - self._db, - Loan, - patron=patron, - license_pool=licensepool, - on_multiple="interchangeable", - ) - - if loan: - license = self._lcp_server.get_license( - self._db, loan.external_identifier, patron - ) - else: - license = self._lcp_server.generate_license( - self._db, licensepool.identifier.identifier, patron, today, expires - ) - - loan = LoanInfo( - licensepool.collection, - licensepool.data_source.name, - identifier_type=licensepool.identifier.type, - identifier=licensepool.identifier.identifier, - start_date=today, - end_date=expires, - fulfillment_info=None, - external_identifier=license["id"], - ) - - return loan - - def fulfill( - self, - patron, - pin, - licensepool, - internal_format=None, - part=None, - fulfill_part_url=None, - ): - """Get the actual resource file to the patron. - - :param patron: A Patron object for the patron who wants to check out the book - :type patron: Patron - - :param pin: The patron's alleged password - :type pin: string - - :param licensepool: Contains lending info as well as link to parent Identifier - :type licensepool: LicensePool - - :param internal_format: A vendor-specific name indicating the format requested by the patron - :type internal_format: - - :param part: A vendor-specific identifier indicating that the - patron wants to fulfill one specific part of the book - (e.g. one chapter of an audiobook), not the whole thing - :type part: Any - - :param fulfill_part_url: A function that takes one argument (a - vendor-specific part identifier) and returns the URL to use - when fulfilling that part - :type fulfill_part_url: Any - - :return: a FulfillmentInfo object - :rtype: FulfillmentInfo - """ - loan = get_one( - self._db, - Loan, - patron=patron, - license_pool=licensepool, - on_multiple="interchangeable", - ) - license = self._lcp_server.get_license( - self._db, loan.external_identifier, patron - ) - fulfillment_info = LCPFulfilmentInfo( - licensepool.identifier.identifier, - licensepool.collection, - licensepool.data_source.name, - licensepool.identifier.type, - content_link=None, - content_type=DeliveryMechanism.LCP_DRM, - content=license, - content_expires=None, - ) - - return fulfillment_info - - def patron_activity(self, patron, pin): - """Returns patron's loans - - :param patron: A Patron object for the patron who wants to check out the book - :type patron: Patron - - :param pin: The patron's alleged password - :type pin: string - - :return: List of patron's loans - :rtype: List[LoanInfo] - """ - now = utc_now() - loans = ( - self._db.query(Loan) - .join(LicensePool) - .join(Collection) - .filter( - Collection.id == self._collection_id, - Loan.patron == patron, - or_(Loan.start is None, Loan.start <= now), - or_(Loan.end is None, Loan.end > now), - ) - ) - - loan_info_objects = [] - - for loan in loans: - licensepool = get_one(self._db, LicensePool, id=loan.license_pool_id) - - loan_info_objects.append( - LoanInfo( - collection=self.collection, - data_source_name=licensepool.data_source.name, - identifier_type=licensepool.identifier.type, - identifier=licensepool.identifier.identifier, - start_date=loan.start, - end_date=loan.end, - fulfillment_info=None, - external_identifier=loan.external_identifier, - ) - ) - - return loan_info_objects - - # TODO: Implement place_hold and release_hold (https://jira.nypl.org/browse/SIMPLY-3013) - def release_hold(self, patron, pin, licensepool): - raise NotImplementedError() - - def place_hold(self, patron, pin, licensepool, notification_email_address): - raise NotImplementedError() - - def checkin(self, patron, pin, licensepool): - raise NotImplementedError() - - def update_availability(self, licensepool): - pass diff --git a/api/lcp/controller.py b/api/lcp/controller.py deleted file mode 100644 index c254ca6bff..0000000000 --- a/api/lcp/controller.py +++ /dev/null @@ -1,138 +0,0 @@ -import logging - -import flask - -from api.admin.problem_details import MISSING_COLLECTION -from api.controller import CirculationManagerController -from api.lcp.factory import LCPServerFactory -from core.lcp.credential import LCPCredentialFactory, LCPUnhashedPassphrase -from core.model import Collection, ExternalIntegration, Session -from core.util.problem_detail import ProblemDetail - - -class LCPController(CirculationManagerController): - """Contains API endpoints related to LCP workflow""" - - def __init__(self, manager): - """Initializes a new instance of LCPController class - - :param manager: CirculationManager object - :type manager: CirculationManager - """ - super().__init__(manager) - - self._logger = logging.getLogger(__name__) - self._credential_factory = LCPCredentialFactory() - self._lcp_server_factory = LCPServerFactory() - - def _get_patron(self): - """Returns a patron associated with the request (if any) - - :return: Patron associated with the request (if any) - :rtype: core.model.patron.Patron - """ - self._logger.info( - "Started fetching an authenticated patron associated with the request" - ) - - patron = self.authenticated_patron_from_request() - - self._logger.info( - "Finished fetching an authenticated patron associated with the request: {}".format( - patron - ) - ) - - return patron - - def _get_lcp_passphrase(self, patron) -> LCPUnhashedPassphrase: - """Returns a patron's LCP passphrase - - :return: Patron's LCP passphrase - :rtype: string - """ - db = Session.object_session(patron) - - self._logger.info("Started fetching a patron's LCP passphrase") - - lcp_passphrase = self._credential_factory.get_patron_passphrase(db, patron) - - self._logger.info( - f"Finished fetching a patron's LCP passphrase: {lcp_passphrase}" - ) - - return lcp_passphrase - - def _get_lcp_collection(self, patron, collection_name): - """Returns an LCP collection for a specified library - NOTE: We assume that there is only ONE LCP collection per library - - :param patron: Patron object - :type patron: core.model.patron.Patron - - :param collection_name: Name of the collection - :type collection_name: string - - :return: LCP collection - :rtype: core.model.collection.Collection - """ - db = Session.object_session(patron) - lcp_collection, _ = Collection.by_name_and_protocol( - db, collection_name, ExternalIntegration.LCP - ) - - if not lcp_collection or lcp_collection not in patron.library.collections: - return MISSING_COLLECTION - - return lcp_collection - - def get_lcp_passphrase(self): - """Returns an LCP passphrase for the authenticated patron - - :return: Flask response containing the LCP passphrase for the authenticated patron - :rtype: Response - """ - self._logger.info("Started fetching a patron's LCP passphrase") - - patron = self._get_patron() - lcp_passphrase = self._get_lcp_passphrase(patron) - - self._logger.info( - "Finished fetching a patron's LCP passphrase: {}".format( - lcp_passphrase.text - ) - ) - - response = flask.jsonify({"passphrase": lcp_passphrase.text}) - - return response - - def get_lcp_license(self, collection_name, license_id): - """Returns an LCP license with the specified ID - - :param collection_name: Name of the collection - :type collection_name: string - - :param license_id: License ID - :type license_id: string - - :return: Flask response containing the LCP license with the specified ID - :rtype: string - """ - self._logger.info(f"Started fetching license # {license_id}") - - patron = self._get_patron() - lcp_collection = self._get_lcp_collection(patron, collection_name) - - if isinstance(lcp_collection, ProblemDetail): - return lcp_collection - - lcp_api = self.circulation.api_for_collection.get(lcp_collection.id) - lcp_server = self._lcp_server_factory.create(lcp_api) - - db = Session.object_session(patron) - lcp_license = lcp_server.get_license(db, license_id, patron) - - self._logger.info(f"Finished fetching license # {license_id}: {lcp_license}") - - return flask.jsonify(lcp_license) diff --git a/api/lcp/encrypt.py b/api/lcp/encrypt.py deleted file mode 100644 index dfb158800e..0000000000 --- a/api/lcp/encrypt.py +++ /dev/null @@ -1,521 +0,0 @@ -import json -import logging -import os -import re -import subprocess -from json import JSONEncoder -from typing import Optional - -from flask_babel import lazy_gettext as _ - -from api.lcp import utils -from core.exceptions import BaseError -from core.integration.settings import ( - BaseSettings, - ConfigurationFormItem, - ConfigurationFormItemType, - FormField, -) -from core.model.integration import IntegrationConfiguration - - -class LCPEncryptionException(BaseError): - """Raised in the case of any errors occurring during LCP encryption process""" - - -class LCPEncryptionConstants: - DEFAULT_LCPENCRYPT_LOCATION = "/go/bin/lcpencrypt" - DEFAULT_LCPENCRYPT_DOCKER_IMAGE = "readium/lcpencrypt" - - -class LCPEncryptionSettings(BaseSettings): - lcpencrypt_location: str = FormField( - default=LCPEncryptionConstants.DEFAULT_LCPENCRYPT_LOCATION, - form=ConfigurationFormItem( - label=_("lcpencrypt's location"), - description=_( - "Full path to the local lcpencrypt binary. " - "The default value is {}".format( - LCPEncryptionConstants.DEFAULT_LCPENCRYPT_LOCATION - ) - ), - type=ConfigurationFormItemType.TEXT, - required=False, - ), - ) - - lcpencrypt_output_directory: Optional[str] = FormField( - form=ConfigurationFormItem( - label=_("lcpencrypt's output directory"), - description=_( - "Full path to the directory where lcpencrypt stores encrypted content. " - "If not set encrypted books will be stored in lcpencrypt's working directory" - ), - type=ConfigurationFormItemType.TEXT, - required=False, - ) - ) - - -class LCPEncryptionResult: - """Represents an output sent by lcpencrypt""" - - CONTENT_ID = "content-id" - CONTENT_ENCRYPTION_KEY = "content-encryption-key" - PROTECTED_CONTENT_LOCATION = "protected-content-location" - PROTECTED_CONTENT_LENGTH = "protected-content-length" - PROTECTED_CONTENT_SHA256 = "protected-content-sha256" - PROTECTED_CONTENT_DISPOSITION = "protected-content-disposition" - PROTECTED_CONTENT_TYPE = "protected-content-type" - - def __init__( - self, - content_id, - content_encryption_key, - protected_content_location, - protected_content_disposition, - protected_content_type, - protected_content_length, - protected_content_sha256, - ): - """Initializes a new instance of LCPEncryptorResult class - - :param: content_id: Content identifier - :type content_id: Optional[string] - - :param: content_encryption_key: Content encryption key - :type content_encryption_key: Optional[string] - - :param: protected_content_location: Complete file path of the encrypted content - :type protected_content_location: Optional[string] - - :param: protected_content_disposition: File name of the encrypted content - :type protected_content_disposition: Optional[string] - - :param: protected_content_type: Media type of the encrypted content - :type protected_content_type: Optional[string] - - :param: protected_content_length: Size of the encrypted content - :type protected_content_length: Optional[string] - - :param: protected_content_sha256: Hash of the encrypted content - :type protected_content_sha256: Optional[string] - """ - self._content_id = content_id - self._content_encryption_key = content_encryption_key - self._protected_content_location = protected_content_location - self._protected_content_disposition = protected_content_disposition - self._protected_content_type = protected_content_type - self._protected_content_length = protected_content_length - self._protected_content_sha256 = protected_content_sha256 - - @property - def content_id(self): - """Returns a content encryption key - - :return: Content encryption key - :rtype: Optional[string] - """ - return self._content_id - - @property - def content_encryption_key(self): - """Returns a content identifier - - :return: Content identifier - :rtype: Optional[string] - """ - return self._content_encryption_key - - @property - def protected_content_location(self): - """Returns a complete file path of the encrypted content - - :return: Complete file path of the encrypted content - :rtype: Optional[string] - """ - return self._protected_content_location - - @property - def protected_content_disposition(self): - """Returns a file name of the encrypted content - - :return: File name of the encrypted content - :rtype: Optional[string] - """ - return self._protected_content_disposition - - @property - def protected_content_type(self): - """Returns a media type of the encrypted content - - :return: Media type of the encrypted content - :rtype: Optional[string] - """ - return self._protected_content_type - - @property - def protected_content_length(self): - """Returns a size of the encrypted content - - :return: Size of the encrypted content - :rtype: Optional[string] - """ - return self._protected_content_length - - @property - def protected_content_sha256(self): - """Returns a hash of the encrypted content - - :return: Hash of the encrypted content - :rtype: Optional[string] - """ - return self._protected_content_sha256 - - @classmethod - def from_dict(cls, result_dict): - """Creates an LCPEncryptorResult object from a Python dictionary - - :param result_dict: Python dictionary containing an lcpencrypt output - :type result_dict: Dict - - :return: LCPEncryptorResult object - :rtype: LCPEncryptionResult - """ - content_id = result_dict.get(cls.CONTENT_ID) - content_encryption_key = result_dict.get(cls.CONTENT_ENCRYPTION_KEY) - protected_content_location = result_dict.get(cls.PROTECTED_CONTENT_LOCATION) - protected_content_length = result_dict.get(cls.PROTECTED_CONTENT_LENGTH) - protected_content_sha256 = result_dict.get(cls.PROTECTED_CONTENT_SHA256) - protected_content_disposition = result_dict.get( - cls.PROTECTED_CONTENT_DISPOSITION - ) - protected_content_type = result_dict.get(cls.PROTECTED_CONTENT_TYPE) - - return cls( - content_id=content_id, - content_encryption_key=content_encryption_key, - protected_content_location=protected_content_location, - protected_content_disposition=protected_content_disposition, - protected_content_type=protected_content_type, - protected_content_length=protected_content_length, - protected_content_sha256=protected_content_sha256, - ) - - def __eq__(self, other): - """Compares two LCPEncryptorResult objects - - :param other: LCPEncryptorResult object - :type other: LCPEncryptionResult - - :return: Boolean value indicating whether two items are equal - :rtype: bool - """ - if not isinstance(other, LCPEncryptionResult): - return False - - return ( - self.content_id == other.content_id - and self.content_encryption_key == other.content_encryption_key - and self.protected_content_location == other.protected_content_location - and self.protected_content_length == other.protected_content_length - and self.protected_content_sha256 == other.protected_content_sha256 - and self.protected_content_disposition - == other.protected_content_disposition - and self.protected_content_type == other.protected_content_type - ) - - def __repr__(self): - """Returns a string representation of a LCPEncryptorResult object - - :return: string representation of a LCPEncryptorResult object - :rtype: string - """ - return ( - "".format( - self.content_id, - self.content_encryption_key, - self.protected_content_location, - self.protected_content_length, - self.protected_content_sha256, - self.protected_content_disposition, - self.protected_content_type, - ) - ) - - -class LCPEncryptorResultJSONEncoder(JSONEncoder): - """Serializes LCPEncryptorResult as a JSON object""" - - def default(self, result): - """Serializers a Subject object to JSON - - :param result: LCPEncryptorResult object - :type result: LCPEncryptionResult - - :return: String containing JSON representation of the LCPEncryptorResult object - :rtype: string - """ - if not isinstance(result, LCPEncryptionResult): - raise ValueError("result must have type LCPEncryptorResult") - - result = { - "content-id": result.content_id, - "content-encryption-key": result.content_encryption_key, - "protected-content-location": result.protected_content_location, - "protected-content-length": result.protected_content_length, - "protected-content-sha256": result.protected_content_sha256, - "protected-content-disposition": result.protected_content_disposition, - "protected-content-type": result.protected_content_type, - } - - return result - - -class LCPEncryptor: - """Wrapper around lcpencrypt tool containing logic to run it locally and in a Docker container""" - - class Parameters: - """Parses input parameters for lcpencrypt""" - - def __init__( - self, - file_path: str, - identifier: str, - configuration: IntegrationConfiguration, - ): - """Initializes a new instance of Parameters class - - :param file_path: File path to the book to be encrypted - - :param identifier: Book's identifier - - :param configuration: IntegrationConfiguration instance - """ - self._lcpencrypt_location = configuration.settings_dict.get( - "lcpencrypt_location" - ) - self._input_file_path = str(file_path) - self._content_id = str(identifier) - - output_directory = configuration.settings_dict.get( - "lcpencrypt_output_directory" - ) - - self._output_file_path = None - - if output_directory: - _, input_extension = os.path.splitext(file_path) - target_extension = utils.get_target_extension(input_extension) - output_file_path = os.path.join( - output_directory, - identifier + target_extension - if target_extension not in identifier - else identifier, - ) - - self._output_file_path = output_file_path - - @property - def lcpencrypt_location(self): - """Returns location of lcpencrypt binary - - :return: Location of lcpencrypt binary - :rtype: string - """ - return self._lcpencrypt_location - - @property - def input_file_path(self): - """Returns path of the input file - - :return: Path of the input file - :rtype: string - """ - return self._input_file_path - - @property - def content_id(self): - """Returns content ID - - :return: Content ID - :rtype: string - """ - return self._content_id - - @property - def output_file_path(self): - """Returns path of the output file - - :return: Path of the output file - :rtype: string - """ - return self._output_file_path - - def to_array(self): - """Returns parameters in an array - - :return: Parameters in an array - :rtype: List - """ - parameters = [ - self._lcpencrypt_location, - "-input", - self._input_file_path, - "-contentid", - self._content_id, - ] - - if self._output_file_path: - parameters.extend(["-output", self._output_file_path]) - - return parameters - - OUTPUT_REGEX = re.compile(r"(\{.+\})?(.+)", re.DOTALL) - - def __init__(self, configuration: IntegrationConfiguration): - """Initializes a new instance of LCPEncryptor class - - :param configuration: The integration configuration of the collection - """ - self._logger = logging.getLogger(__name__) - self.configuration = configuration - - def _lcpencrypt_exists_locally(self): - """Returns a Boolean value indicating whether lcpencrypt exists locally""" - return os.path.isfile( - self.configuration.settings_dict.get("lcpencrypt_location") - ) - - def _parse_output(self, output): - """Parses lcpencrypt's output - - :param output: lcpencrypt's output - :type output: string - - :return: Encryption result - :rtype: LCPEncryptionResult - """ - bracket_index = output.find("{") - - if bracket_index > 0: - output = output[bracket_index:] - - match = self.OUTPUT_REGEX.match(output) - - if not match: - raise LCPEncryptionException("Output has a wrong format") - - match_groups = match.groups() - - if not match_groups: - raise LCPEncryptionException("Output has a wrong format") - - if not match_groups[0]: - raise LCPEncryptionException(match_groups[1].strip()) - - json_output = match_groups[0] - json_result = json.loads(json_output) - result = LCPEncryptionResult.from_dict(json_result) - - if ( - not result.protected_content_length - or not result.protected_content_sha256 - or not result.content_encryption_key - ): - raise LCPEncryptionException("Encryption failed") - - return result - - def _run_lcpencrypt_locally( - self, file_path: str, identifier: str - ) -> LCPEncryptionResult: - """Runs lcpencrypt using a local binary - - :param file_path: File path to the book to be encrypted - :type file_path: string - - :param identifier: Book's identifier - :type identifier: string - - :return: Encryption result - :rtype: LCPEncryptionResult - """ - self._logger.info( - "Started running a local lcpencrypt binary. File path: {}. Identifier: {}".format( - file_path, identifier - ) - ) - - parameters = LCPEncryptor.Parameters(file_path, identifier, self.configuration) - - try: - if parameters.output_file_path: - self._logger.info( - "Creating a directory tree for {}".format( - parameters.output_file_path - ) - ) - - output_directory = os.path.dirname(parameters.output_file_path) - - if not os.path.exists(output_directory): - os.makedirs(output_directory) - - self._logger.info( - "Directory tree {} has been successfully created".format( - output_directory - ) - ) - - self._logger.info( - "Running lcpencrypt using the following parameters: {}".format( - parameters.to_array() - ) - ) - - output = subprocess.check_output(parameters.to_array()) - result = self._parse_output(output) - except Exception as exception: - self._logger.exception( - "An unhandled exception occurred during running a local lcpencrypt binary" - ) - - raise LCPEncryptionException(str(exception), inner_exception=exception) - - self._logger.info( - "Finished running a local lcpencrypt binary. File path: {}. Identifier: {}. Result: {}".format( - file_path, identifier, result - ) - ) - - return result - - def encrypt(self, db, file_path, identifier): - """Encrypts a book - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param file_path: File path to the book to be encrypted - :type file_path: string - - :param identifier: Book's identifier - :type identifier: string - - :return: Encryption result - :rtype: LCPEncryptionResult - """ - if self._lcpencrypt_exists_locally(): - result = self._run_lcpencrypt_locally(file_path, identifier) - - return result - else: - raise NotImplementedError() diff --git a/api/lcp/factory.py b/api/lcp/factory.py deleted file mode 100644 index 3c4f785abf..0000000000 --- a/api/lcp/factory.py +++ /dev/null @@ -1,26 +0,0 @@ -from api.lcp.hash import HasherFactory -from api.lcp.server import LCPServer -from core.lcp.credential import LCPCredentialFactory - - -class LCPServerFactory: - """Creates a new instance of LCPServer""" - - def create(self, integration_association) -> LCPServer: - """Creates a new instance of LCPServer - - :param integration_association: Association with an external integration - :type integration_association: core.model.configuration.HasExternalIntegration - - :return: New instance of LCPServer - :rtype: LCPServer - """ - hasher_factory = HasherFactory() - credential_factory = LCPCredentialFactory() - lcp_server = LCPServer( - integration_association.configuration, - hasher_factory, - credential_factory, - ) - - return lcp_server diff --git a/api/lcp/importer.py b/api/lcp/importer.py deleted file mode 100644 index 2bedfbbbef..0000000000 --- a/api/lcp/importer.py +++ /dev/null @@ -1,32 +0,0 @@ -class LCPImporter: - """Class implementing LCP import workflow""" - - def __init__(self, lcp_encryptor, lcp_server): - """Initializes a new instance of LCPImporter class - - :param lcp_encryptor: LCPEncryptor object - :type lcp_encryptor: encrypt.LCPEncryptor - - :param lcp_server: LCPServer object - :type lcp_server: server.LCPServer - """ - self._lcp_encryptor = lcp_encryptor - self._lcp_server = lcp_server - - def import_book(self, db, file_path, identifier): - """Encrypts a book and sends a notification to the LCP server - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param file_path: File path to the book to be encrypted - :type file_path: string - - :param identifier: Book's identifier - :type identifier: string - - :return: Encryption result - :rtype: LCPEncryptionResult - """ - encrypted_content = self._lcp_encryptor.encrypt(db, file_path, identifier) - self._lcp_server.add_content(db, encrypted_content) diff --git a/api/lcp/mirror.py b/api/lcp/mirror.py deleted file mode 100644 index 729daa5e36..0000000000 --- a/api/lcp/mirror.py +++ /dev/null @@ -1,170 +0,0 @@ -import tempfile - -from flask_babel import lazy_gettext as _ -from sqlalchemy.orm import Session - -from api.lcp.encrypt import LCPEncryptor -from api.lcp.hash import HasherFactory -from api.lcp.importer import LCPImporter -from api.lcp.server import LCPServer, LCPServerSettings -from core.lcp.credential import LCPCredentialFactory -from core.mirror import MirrorUploader -from core.model import Collection, ExternalIntegration -from core.model.collection import HasExternalIntegrationPerCollection -from core.model.configuration import ConfigurationAttributeType, ConfigurationMetadata -from core.s3 import MinIOUploader, MinIOUploaderConfiguration, S3UploaderConfiguration - - -class LCPMirrorConfiguration(S3UploaderConfiguration): - endpoint_url = ConfigurationMetadata( - key=MinIOUploaderConfiguration.endpoint_url.key, - label=_("Endpoint URL"), - description=_("S3 endpoint URL"), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - -class LCPMirror(MinIOUploader, HasExternalIntegrationPerCollection): - """Implements LCP import workflow: - 1. Encrypts unencrypted books using lcpencrypt - 2. Sends encrypted books to the LCP License Server - 3. LCP License Server generates license metadata and uploads encrypted books to the encrypted_repository - """ - - NAME = ExternalIntegration.LCP - SETTINGS = [ - S3UploaderConfiguration.access_key.to_settings(), - S3UploaderConfiguration.secret_key.to_settings(), - S3UploaderConfiguration.protected_access_content_bucket.to_settings(), - S3UploaderConfiguration.s3_region.to_settings(), - S3UploaderConfiguration.s3_addressing_style.to_settings(), - S3UploaderConfiguration.s3_presigned_url_expiration.to_settings(), - S3UploaderConfiguration.url_template.to_settings(), - LCPMirrorConfiguration.endpoint_url.to_settings(), - ] - - def __init__(self, integration): - """Initializes a new instance of LCPMirror class - - :param integration: External integration containing mirror's properties - :type integration: ExternalIntegration - """ - super().__init__(integration) - - self._lcp_importer_instance = None - - def _create_lcp_importer(self, collection): - """Creates a new instance of LCPImporter - - :param collection: Collection object - :type collection: Collection - - :return: New instance of LCPImporter - :rtype: LCPImporter - """ - configuration = collection.integration_configuration - hasher_factory = HasherFactory() - credential_factory = LCPCredentialFactory() - lcp_encryptor = LCPEncryptor(configuration) - lcp_server = LCPServer( - lambda: LCPServerSettings(**configuration.settings_dict), - hasher_factory, - credential_factory, - ) - lcp_importer = LCPImporter(lcp_encryptor, lcp_server) - - return lcp_importer - - def collection_external_integration(self, collection): - """Returns an external integration associated with the collection - - :param collection: Collection - :type collection: core.model.Collection - - :return: External integration associated with the collection - :rtype: core.model.configuration.ExternalIntegration - """ - db = Session.object_session(collection) - external_integration = ( - db.query(ExternalIntegration) - .join(Collection) - .filter(Collection.id == collection.id) - .one() - ) - - return external_integration - - def cover_image_root(self, bucket, data_source, scaled_size=None): - raise NotImplementedError() - - def marc_file_root(self, bucket, library): - raise NotImplementedError() - - def book_url( - self, - identifier, - extension=".epub", - open_access=False, - data_source=None, - title=None, - ): - """Returns the path to the hosted EPUB file for the given identifier.""" - bucket = self.get_bucket( - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY - if open_access - else S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY - ) - root = self.content_root(bucket) - book_url = root + self.key_join([identifier.identifier]) - - return book_url - - def cover_image_url(self, data_source, identifier, filename, scaled_size=None): - raise NotImplementedError() - - def marc_file_url(self, library, lane, end_time, start_time=None): - raise NotImplementedError() - - def mirror_one(self, representation, mirror_to, collection=None): - """Uploads an encrypted book to the encrypted_repository via LCP License Server - - :param representation: Book's representation - :type representation: Representation - - :param mirror_to: Mirror URL - :type mirror_to: string - - :param collection: Collection - :type collection: Optional[Collection] - """ - db = Session.object_session(representation) - bucket = self.get_bucket(S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY) - content_root = self.content_root(bucket) - identifier = mirror_to.replace(content_root, "") - lcp_importer = self._create_lcp_importer(collection) - - # First, we need to copy unencrypted book's content to a temporary file - with tempfile.NamedTemporaryFile( - suffix=representation.extension(representation.media_type) - ) as temporary_file: - temporary_file.write(representation.content_fh().read()) - temporary_file.flush() - - # Secondly, we execute import: - # 1. Encrypt the temporary file containing the unencrypted book using lcpencrypt - # 2. Send the encrypted book to the LCP License Server - # 3. LCP License Server generates license metadata - # 4. LCP License Server uploads the encrypted book to the encrypted_repository (S3 or EFS) - lcp_importer.import_book(db, temporary_file.name, identifier) - - # Thirdly, we remove unencrypted content from the database - transaction = db.begin_nested() - representation.content = None - transaction.commit() - - def do_upload(self, representation): - raise NotImplementedError() - - -MirrorUploader.IMPLEMENTATION_REGISTRY[LCPMirror.NAME] = LCPMirror diff --git a/api/lcp/server.py b/api/lcp/server.py deleted file mode 100644 index 18fe56fcbb..0000000000 --- a/api/lcp/server.py +++ /dev/null @@ -1,357 +0,0 @@ -from __future__ import annotations - -import json -import os -import urllib.parse -from typing import TYPE_CHECKING, Callable, Optional - -import requests -from flask_babel import lazy_gettext as _ -from pydantic import PositiveInt -from requests.auth import HTTPBasicAuth - -from api.lcp import utils -from api.lcp.encrypt import LCPEncryptionResult, LCPEncryptorResultJSONEncoder -from api.lcp.hash import HasherFactory, HashingAlgorithm -from core.integration.settings import ( - BaseSettings, - ConfigurationFormItem, - ConfigurationFormItemType, - FormField, -) -from core.lcp.credential import ( - LCPCredentialFactory, - LCPHashedPassphrase, - LCPUnhashedPassphrase, -) - -if TYPE_CHECKING: - pass - - -class LCPServerConstants: - DEFAULT_PAGE_SIZE = 100 - DEFAULT_PASSPHRASE_HINT = ( - "If you do not remember your passphrase, please contact your administrator" - ) - DEFAULT_ENCRYPTION_ALGORITHM = HashingAlgorithm.SHA256.value - - -class LCPServerSettings(BaseSettings): - lcpserver_url: str = FormField( - form=ConfigurationFormItem( - label=_("LCP License Server's URL"), - description=_("URL of the LCP License Server"), - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - - lcpserver_user: str = FormField( - form=ConfigurationFormItem( - label=_("LCP License Server's user"), - description=_("Name of the user used to connect to the LCP License Server"), - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - - lcpserver_password: str = FormField( - form=ConfigurationFormItem( - label=_("LCP License Server's password"), - description=_( - "Password of the user used to connect to the LCP License Server" - ), - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - - lcpserver_input_directory: str = FormField( - form=ConfigurationFormItem( - label=_("LCP License Server's input directory"), - description=_( - "Full path to the directory containing encrypted books. " - "This directory should be the same as lcpencrypt's output directory" - ), - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - - lcpserver_page_size: Optional[PositiveInt] = FormField( - default=LCPServerConstants.DEFAULT_PAGE_SIZE, - form=ConfigurationFormItem( - label=_("LCP License Server's page size"), - description=_("Number of licences returned by the server"), - type=ConfigurationFormItemType.NUMBER, - required=False, - ), - ) - - provider_name: str = FormField( - form=ConfigurationFormItem( - label=_("LCP service provider's identifier"), - description=_("URI that identifies the provider in an unambiguous way"), - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - - passphrase_hint: Optional[str] = FormField( - default=LCPServerConstants.DEFAULT_PASSPHRASE_HINT, - form=ConfigurationFormItem( - label=_("Passphrase hint"), - description=_("Hint proposed to the user for selecting their passphrase"), - type=ConfigurationFormItemType.TEXT, - required=False, - ), - ) - - encryption_algorithm: Optional[str] = FormField( - default=LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - form=ConfigurationFormItem( - label=_("Passphrase encryption algorithm"), - description=_("Algorithm used for encrypting the passphrase"), - type=ConfigurationFormItemType.SELECT, - required=False, - options=ConfigurationFormItemType.options_from_enum(HashingAlgorithm), - ), - ) - - max_printable_pages: Optional[PositiveInt] = FormField( - form=ConfigurationFormItem( - label=_("Maximum number or printable pages"), - description=_( - "Maximum number of pages that can be printed over the lifetime of the license" - ), - type=ConfigurationFormItemType.NUMBER, - required=False, - ), - ) - - max_copiable_pages: Optional[PositiveInt] = FormField( - form=ConfigurationFormItem( - label=_("Maximum number or copiable characters"), - description=_( - "Maximum number of characters that can be copied to the clipboard" - ), - type=ConfigurationFormItemType.NUMBER, - required=False, - ), - ) - - -class LCPServer: - """Wrapper around LCP License Server's API""" - - def __init__( - self, - get_configuration: Callable[[], BaseSettings], - hasher_factory: HasherFactory, - credential_factory: LCPCredentialFactory, - ): - """Initializes a new instance of LCPServer class - - :param get_configuration: Factory responsible for providing configuration objects from the database - :param hasher_factory: Factory responsible for creating Hasher implementations - :param credential_factory: Factory responsible for creating Hasher implementations - """ - self.get_configuration = get_configuration - self._hasher_factory = hasher_factory - self._credential_factory = credential_factory - self._hasher_instance = None - - def _get_hasher(self): - """Returns a Hasher instance - - :return: Hasher instance - :rtype: hash.Hasher - """ - if self._hasher_instance is None: - self._hasher_instance = self._hasher_factory.create( - self.get_configuration().encryption_algorithm - ) - - return self._hasher_instance - - def _create_partial_license(self, db, patron, license_start=None, license_end=None): - """Creates a partial LCP license used an input by the LCP License Server for generation of LCP licenses - - :param patron: Patron object - :type patron: Patron - - :param license_start: Date and time when the license begins - :type license_start: Optional[datetime.datetime] - - :param license_end: Date and time when the license ends - :type license_end: Optional[datetime.datetime] - - :return: Partial LCP license - :rtype: Dict - """ - hasher = self._get_hasher() - unhashed_passphrase: LCPUnhashedPassphrase = ( - self._credential_factory.get_patron_passphrase(db, patron) - ) - hashed_passphrase: LCPHashedPassphrase = unhashed_passphrase.hash(hasher) - self._credential_factory.set_hashed_passphrase(db, patron, hashed_passphrase) - - config = self.get_configuration() - partial_license = { - "provider": config.provider_name, - "encryption": { - "user_key": { - "text_hint": config.passphrase_hint, - "hex_value": hashed_passphrase.hashed, - } - }, - } - - if patron: - partial_license["user"] = { - "id": self._credential_factory.get_patron_id(db, patron) - } - - rights_fields = [ - license_start, - license_end, - config.max_printable_pages, - config.max_copiable_pages, - ] - - if any( - [ - rights_field is not None and rights_field != "" - for rights_field in rights_fields - ] - ): - partial_license["rights"] = {} - - if license_start: - partial_license["rights"]["start"] = utils.format_datetime(license_start) - if license_end: - partial_license["rights"]["end"] = utils.format_datetime(license_end) - if config.max_printable_pages is not None and config.max_printable_pages != "": - partial_license["rights"]["print"] = int(config.max_printable_pages) - if config.max_copiable_pages is not None and config.max_copiable_pages != "": - partial_license["rights"]["copy"] = int(config.max_copiable_pages) - - return partial_license - - @staticmethod - def _send_request(configuration, method, path, payload, json_encoder=None): - """Sends a request to the LCP License Server - - :param path: URL path part - :type path: string - - :param payload: Dictionary containing request's payload (should be JSON compatible) - :type payload: Union[Dict, object] - - :param json_encoder: JSON encoder - :type json_encoder: JSONEncoder - - :return: Dictionary containing LCP License Server's response - :rtype: Dict - """ - json_payload = json.dumps(payload, cls=json_encoder) - url = urllib.parse.urljoin(configuration.lcpserver_url, path) - response = requests.request( - method, - url, - data=json_payload, - headers={"Content-Type": "application/json"}, - auth=HTTPBasicAuth( - configuration.lcpserver_user, configuration.lcpserver_password - ), - ) - - response.raise_for_status() - - return response - - def add_content(self, db, encrypted_content): - """Notifies LCP License Server about new encrypted content - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param encrypted_content: LCPEncryptionResult object containing information about encrypted content - :type encrypted_content: LCPEncryptionResult - """ - config = self.get_configuration() - content_location = os.path.join( - config.lcpserver_input_directory, - encrypted_content.protected_content_disposition, - ) - payload = LCPEncryptionResult( - content_id=encrypted_content.content_id, - content_encryption_key=encrypted_content.content_encryption_key, - protected_content_location=content_location, - protected_content_disposition=encrypted_content.protected_content_disposition, - protected_content_type=encrypted_content.protected_content_type, - protected_content_length=encrypted_content.protected_content_length, - protected_content_sha256=encrypted_content.protected_content_sha256, - ) - path = f"/contents/{encrypted_content.content_id}" - - self._send_request(config, "put", path, payload, LCPEncryptorResultJSONEncoder) - - def generate_license(self, db, content_id, patron, license_start, license_end): - """Generates a new LCP license - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param content_id: Unique content ID - :type content_id: string - - :param patron: Patron object - :type patron: Patron - - :param license_start: Unique patron ID - :type license_start: string - - :param license_start: Date and time when the license begins - :type license_start: datetime.datetime - - :param license_end: Date and time when the license ends - :type license_end: datetime.datetime - - :return: LCP license - :rtype: Dict - """ - partial_license_payload = self._create_partial_license( - db, patron, license_start, license_end - ) - path = f"contents/{content_id}/license" - response = self._send_request( - self.get_configuration(), "post", path, partial_license_payload - ) - - return response.json() - - def get_license(self, db, license_id, patron): - """Returns an existing license - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param license_id: License's ID - :type license_id: int - - :param patron: Patron object - :type patron: Patron - - :return: Existing license - :rtype: string - """ - partial_license_payload = self._create_partial_license(db, patron) - path = f"licenses/{license_id}" - - response = self._send_request( - self.get_configuration(), "post", path, partial_license_payload - ) - - return response.json() diff --git a/api/lcp/utils.py b/api/lcp/utils.py deleted file mode 100644 index 89a37153c9..0000000000 --- a/api/lcp/utils.py +++ /dev/null @@ -1,49 +0,0 @@ -from core.lcp.exceptions import LCPError - - -def format_datetime(datetime_value): - """Converts a datetime value into a string using the format which Go understands - - :param datetime_value: Datetime value - :type datetime_value: datetime.datetime - - :return: String representation of the datetime value - :rtype: string - """ - datetime_string_value = datetime_value.strftime("%Y-%m-%dT%H:%M:%S") - - # NOTE: Go can parse only strings where the timezone contains a colon (e.g., -07:00) - # Unfortunately, Python doesn't support such format and we have to do it manually - # We assume that all the dates are in UTC - datetime_string_value += "+00:00" - - return datetime_string_value - - -def get_target_extension(input_extension): - if input_extension == ".epub": - target_extension = ".epub" - elif input_extension == ".pdf": - target_extension = ".lcpdf" - elif input_extension == ".lpf": - target_extension = ".audiobook" - elif input_extension == ".audiobook": - target_extension = ".audiobook" - else: - raise LCPError(f'Unknown extension "{input_extension}"') - - return target_extension - - -def bind_method(instance, func, as_name=None): - """Bind the function *func* to *instance*, with either provided name *as_name* - or the existing name of *func*. The provided *func* should accept the - instance as the first argument, i.e. "self". - """ - if as_name is None: - as_name = func.__name__ - - bound_method = func.__get__(instance, instance.__class__) - setattr(instance, as_name, bound_method) - - return bound_method diff --git a/api/routes.py b/api/routes.py index 840bd4812a..ed07b6c1c7 100644 --- a/api/routes.py +++ b/api/routes.py @@ -140,42 +140,6 @@ def decorated(*args, **kwargs): return decorated -def has_library_through_external_loan_identifier( - parameter_name="external_loan_identifier", -): - """Decorator to get a library using the loan's external identifier. - - :param parameter_name: Name of the parameter holding the loan's external identifier - :type parameter_name: string - - :return: Decorated function - :rtype: Callable - """ - - def decorator(func): - @wraps(func) - def wrapper(*args, **kwargs): - if parameter_name in kwargs: - external_loan_identifier = kwargs[parameter_name] - else: - external_loan_identifier = None - - library = ( - app.manager.index_controller.library_through_external_loan_identifier( - external_loan_identifier - ) - ) - - if isinstance(library, ProblemDetail): - return library.response - else: - return func(*args, **kwargs) - - return wrapper - - return decorator - - def allows_library(f): """Decorator similar to @has_library but if there is no library short name, then don't set the request library. @@ -650,23 +614,6 @@ def saml_callback(): ) -@app.route("//lcp/licenses//hint") -@app.route("//lcp/licenses//hint") -@has_library_through_external_loan_identifier(parameter_name="license_id") -@requires_auth -@returns_problem_detail -def lcp_passphrase(collection_name, license_id): - return app.manager.lcp_controller.get_lcp_passphrase() - - -@app.route("//lcp/licenses/") -@has_library_through_external_loan_identifier(parameter_name="license_id") -@requires_auth -@returns_problem_detail -def lcp_license(collection_name, license_id): - return app.manager.lcp_controller.get_lcp_license(collection_name, license_id) - - # Loan notifications for ODL distributors, eg. Feedbooks @library_route("/odl_notify/", methods=["GET", "POST"]) @has_library diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index d3182e32d2..f294cc3a5d 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -151,7 +151,10 @@ def _create_event_object( "patrons_in_hold_queue": license_pool.patrons_in_hold_queue if license_pool else None, - "self_hosted": license_pool.self_hosted if license_pool else None, + # TODO: We no longer support self-hosted books, so this should always be False. + # this value is still included in the response for backwards compatibility, + # but should be removed in a future release. + "self_hosted": False, "title": work.title if work else None, "author": work.author if work else None, "series": work.series if work else None, diff --git a/core/lane.py b/core/lane.py index 89b2df9ffc..ff57db7bd5 100644 --- a/core/lane.py +++ b/core/lane.py @@ -812,7 +812,6 @@ def modify_database_query(self, _db, qu): available_now = or_( LicensePool.open_access == True, - LicensePool.self_hosted == True, LicensePool.unlimited_access, LicensePool.licenses_available > 0, ) @@ -822,13 +821,10 @@ def modify_database_query(self, _db, qu): elif self.availability == self.AVAILABLE_ALL: availability_clause = or_( LicensePool.open_access == True, - LicensePool.self_hosted == True, LicensePool.licenses_owned > 0, LicensePool.unlimited_access, ) elif self.availability == self.AVAILABLE_OPEN_ACCESS: - # TODO: self-hosted content could be allowed here - # depending on what exactly the wording is. availability_clause = LicensePool.open_access == True elif self.availability == self.AVAILABLE_NOT_NOW: # The book must be licensed but currently unavailable. diff --git a/core/model/collection.py b/core/model/collection.py index efdb80da37..df102b004d 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -892,7 +892,6 @@ def restrict_to_ready_deliverable_works( LicensePool.licenses_owned > 0, LicensePool.open_access, LicensePool.unlimited_access, - LicensePool.self_hosted, ) ) @@ -906,7 +905,6 @@ def restrict_to_ready_deliverable_works( or_( LicensePool.licenses_available > 0, LicensePool.open_access, - LicensePool.self_hosted, LicensePool.unlimited_access, ) ) diff --git a/core/model/configuration.py b/core/model/configuration.py index 96df379648..b5713b1e65 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -181,7 +181,6 @@ class ExternalIntegration(Base): FEEDBOOKS = DataSourceConstants.FEEDBOOKS ODL = "ODL" ODL2 = "ODL 2.0" - LCP = DataSourceConstants.LCP PROQUEST = DataSourceConstants.PROQUEST # These protocols were used on the Content Server when mirroring diff --git a/core/model/constants.py b/core/model/constants.py index 6590df0533..230b7f3944 100644 --- a/core/model/constants.py +++ b/core/model/constants.py @@ -40,7 +40,6 @@ class DataSourceConstants: FEEDBOOKS = "FeedBooks" BIBBLIO = "Bibblio" ENKI = "Enki" - LCP = "LCP" PROQUEST = "ProQuest" DEPRECATED_NAMES = {"3M": BIBLIOTHECA} diff --git a/core/model/licensing.py b/core/model/licensing.py index 6ef35eaaff..788fe095af 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -260,9 +260,6 @@ class LicensePool(Base): licenses_reserved = Column(Integer, default=0) patrons_in_hold_queue = Column(Integer, default=0) - # Set to True for collections imported using MirrorUploaded - self_hosted = Column(Boolean, index=True, nullable=False, default=False) - # This lets us cache the work of figuring out the best open access # link for this LicensePool. _open_access_download_url = Column("open_access_download_url", Unicode) diff --git a/core/model/listeners.py b/core/model/listeners.py index b3e4e5e7ed..2b46cfca7a 100644 --- a/core/model/listeners.py +++ b/core/model/listeners.py @@ -162,7 +162,6 @@ def licensepool_collection_change(target, value, oldvalue, initiator): @event.listens_for(LicensePool.open_access, "set") -@event.listens_for(LicensePool.self_hosted, "set") def licensepool_storage_status_change(target, value, oldvalue, initiator): """A Work may need to have its search document re-indexed if one of its LicensePools changes its open-access status. diff --git a/core/model/work.py b/core/model/work.py index 4621d02932..3dd8f8edef 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -1216,7 +1216,7 @@ def active_license_pool(self, library: Library | None = None) -> LicensePool | N # We have an unlimited source for this book. # There's no need to keep looking. break - elif p.unlimited_access or p.self_hosted: + elif p.unlimited_access: active_license_pool = p elif ( edition and edition.title and p.licenses_owned and p.licenses_owned > 0 @@ -1693,24 +1693,15 @@ def _set_value(parent, key, target): if doc.license_pools: for item in doc.license_pools: if not ( - item.open_access - or item.unlimited_access - or item.self_hosted - or item.licenses_owned > 0 + item.open_access or item.unlimited_access or item.licenses_owned > 0 ): continue lc: dict = {} _set_value(item, "licensepools", lc) # lc["availability_time"] = getattr(item, "availability_time").timestamp() - lc["available"] = ( - item.unlimited_access - or item.self_hosted - or item.licenses_available > 0 - ) - lc["licensed"] = ( - item.unlimited_access or item.self_hosted or item.licenses_owned > 0 - ) + lc["available"] = item.unlimited_access or item.licenses_available > 0 + lc["licensed"] = item.unlimited_access or item.licenses_owned > 0 if doc.presentation_edition: lc["medium"] = doc.presentation_edition.medium lc["licensepool_id"] = item.id @@ -1895,7 +1886,6 @@ def explicit_bool(label, t): "available", or_( LicensePool.unlimited_access, - LicensePool.self_hosted, LicensePool.licenses_available > 0, ), ), @@ -1903,7 +1893,6 @@ def explicit_bool(label, t): "licensed", or_( LicensePool.unlimited_access, - LicensePool.self_hosted, LicensePool.licenses_owned > 0, ), ), @@ -1922,7 +1911,6 @@ def explicit_bool(label, t): or_( LicensePool.open_access, LicensePool.unlimited_access, - LicensePool.self_hosted, LicensePool.licenses_owned > 0, ), ) diff --git a/core/opds.py b/core/opds.py index 962eaa2c8d..942bf29114 100644 --- a/core/opds.py +++ b/core/opds.py @@ -1794,7 +1794,6 @@ def license_tags(cls, license_pool, loan, hold): elif ( license_pool.open_access or license_pool.unlimited_access - or license_pool.self_hosted or (license_pool.licenses_available > 0 and license_pool.licenses_owned > 0) ): status = "available" @@ -1811,11 +1810,7 @@ def license_tags(cls, license_pool, loan, hold): tags.append(availability_tag) # Open-access pools do not need to display or . - if ( - license_pool.open_access - or license_pool.unlimited_access - or license_pool.self_hosted - ): + if license_pool.open_access or license_pool.unlimited_access: return tags holds_kw = dict() diff --git a/docker/Dockerfile.baseimage b/docker/Dockerfile.baseimage index 6fe06a4d69..7353686365 100644 --- a/docker/Dockerfile.baseimage +++ b/docker/Dockerfile.baseimage @@ -3,12 +3,6 @@ # image is a long process, and we don't want to wait for it to build every time # we push a change to the code base. -############################################################################### -# This is a builder image that is used to build the lcpencrypt binary. -FROM golang:1.17 AS lcp-builder - -RUN go get -v github.com/readium/readium-lcp-server/lcpencrypt - ############################################################################### # This is the main base image build. It is based on phusion/baseimage, which is # a minimal Ubuntu image. Eventually I'd like to switch to using the official @@ -19,9 +13,6 @@ RUN go get -v github.com/readium/readium-lcp-server/lcpencrypt # https://github.com/phusion/baseimage-docker FROM phusion/baseimage:focal-1.2.0 As baseimage -# Copy LCP binary from builder into image. -COPY --from=lcp-builder /go/bin/lcpencrypt /go/bin/lcpencrypt - # Make sure base system is up to date RUN apt-get update && \ apt-get upgrade -y --no-install-recommends -o Dpkg::Options::="--force-confold" && \ diff --git a/tests/api/admin/controller/test_admin_search_controller.py b/tests/api/admin/controller/test_admin_search_controller.py index c13048c7da..6d34f0d19e 100644 --- a/tests/api/admin/controller/test_admin_search_controller.py +++ b/tests/api/admin/controller/test_admin_search_controller.py @@ -143,16 +143,3 @@ def test_different_license_types(self, admin_search_fixture: AdminSearchFixture) ) assert "Horror" in response["genres"] assert "Spanish" in response["languages"] - - # Same goes for self hosted titles - pool.open_access = False - pool.self_hosted = True - with admin_search_fixture.admin_ctrl_fixture.request_context_with_library_and_admin( - "/", - library=admin_search_fixture.admin_ctrl_fixture.ctrl.db.default_library(), - ): - response = ( - admin_search_fixture.manager.admin_search_controller.search_field_values() - ) - assert "Horror" in response["genres"] - assert "Spanish" in response["languages"] diff --git a/tests/api/lcp/__init__.py b/tests/api/lcp/__init__.py deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/tests/api/lcp/lcp_strings.py b/tests/api/lcp/lcp_strings.py deleted file mode 100644 index 25bb83e4f0..0000000000 --- a/tests/api/lcp/lcp_strings.py +++ /dev/null @@ -1,138 +0,0 @@ -EXISTING_BOOK_FILE_PATH = "/books/ebook.epub" -NOT_EXISTING_BOOK_FILE_PATH = "/books/notexistingbook.epub" - -BOOK_IDENTIFIER = "EBOOK" - -CONTENT_ENCRYPTION_KEY = "+RulyN2G8MfAahNEO/Xz0TwBT5xMzvbFFHqqWGPrO3M=" -PROTECTED_CONTENT_LOCATION = ( - "/opt/readium/files/encrypted/1f162bc2-be6f-42a9-8153-96d675418ff1.epub" -) -PROTECTED_CONTENT_DISPOSITION = "1f162bc2-be6f-42a9-8153-96d675418ff1.epub" -PROTECTED_CONTENT_TYPE = "application/epub+zip" -PROTECTED_CONTENT_LENGTH = 798385 -PROTECTED_CONTENT_SHA256 = ( - "e058281cbc11bae29451e5e2c8003efa1164c3f6dde6dcc003c8bb79e2acb88f" -) - - -LCPENCRYPT_NOT_EXISTING_DIRECTORY_RESULT = """Error opening input file, for more information type 'lcpencrypt -help' ; level 30 -open {}: no such file or directory -""".format( - NOT_EXISTING_BOOK_FILE_PATH -) - -LCPENCRYPT_FAILED_ENCRYPTION_RESULT = """{{ - "content-id": "{0}", - "content-encryption-key": null, - "protected-content-location": "{1}", - "protected-content-length": null, - "protected-content-sha256": null, - "protected-content-disposition": "{2}" -}} -Encryption was successful -""".format( - BOOK_IDENTIFIER, PROTECTED_CONTENT_LOCATION, NOT_EXISTING_BOOK_FILE_PATH -) - -LCPENCRYPT_SUCCESSFUL_ENCRYPTION_RESULT = """{{ - "content-id": "{0}", - "content-encryption-key": "{1}", - "protected-content-location": "{2}", - "protected-content-length": {3}, - "protected-content-sha256": "{4}", - "protected-content-disposition": "{5}", - "protected-content-type": "{6}" -}} -Encryption was successful -""".format( - BOOK_IDENTIFIER, - CONTENT_ENCRYPTION_KEY, - PROTECTED_CONTENT_LOCATION, - PROTECTED_CONTENT_LENGTH, - PROTECTED_CONTENT_SHA256, - PROTECTED_CONTENT_DISPOSITION, - PROTECTED_CONTENT_TYPE, -) - -LCPENCRYPT_FAILED_LCPSERVER_NOTIFICATION = """Error notifying the License Server; level 60 -lcp server error 401""" - -LCPENCRYPT_SUCCESSFUL_NOTIFICATION_RESULT = """License Server was notified -{{ - "content-id": "{0}", - "content-encryption-key": "{1}", - "protected-content-location": "{2}", - "protected-content-length": {3}, - "protected-content-sha256": "{4}", - "protected-content-disposition": "{5}", - "protected-content-type": "{6}" -}} -Encryption was successful -""".format( - BOOK_IDENTIFIER, - CONTENT_ENCRYPTION_KEY, - PROTECTED_CONTENT_LOCATION, - PROTECTED_CONTENT_LENGTH, - PROTECTED_CONTENT_SHA256, - PROTECTED_CONTENT_DISPOSITION, - PROTECTED_CONTENT_TYPE, -) - - -LCPSERVER_LICENSE = """ -{ - "provider": "http://circulation.manager", - "id": "e99be177-4902-426a-9b96-0872ae877e2f", - "issued": "2020-08-18T15:04:39Z", - "encryption": { - "profile": "http://readium.org/lcp/basic-profile", - "content_key": { - "algorithm": "http://www.w3.org/2001/04/xmlenc#aes256-cbc", - "encrypted_value": "rYjD9ijFELcraQvdeChvvI21ceHwF3XXN6e4tQpoCbDnnekb9UeGZVlocqANwJ28S0QnJPQk0EnDD6KEIS4dzw==" - }, - "user_key": { - "algorithm": "http://www.w3.org/2001/04/xmlenc#sha256", - "text_hint": "Not very helpful hint", - "key_check": "zf2gU5H8+JIYVbJB2AyotuAq+Fc6xQo85bkhqtWqIU4EVzewwv6HdHgUXvRZB+zp1yZdCTlQvbhA4SQv5oydCQ==" - } - }, - "links": [{ - "rel": "hint", - "href": "http://testfrontend:8991/static/hint.html" - }, { - "rel": "publication", - "href": "http://localhost:9000/books/9780231543973", - "type": "application/pdf+lcp", - "title": "9780231543973.lcpdf", - "length": 1703749, - "hash": "6657273fe78fb29472a0027c08254f57e58b61fe435c30978c00aacd55247bfd" - }, { - "rel": "status", - "href": "http://lsdserver:8990/licenses/e99be177-4902-426a-9b96-0872ae877e2f/status", - "type": "application/vnd.readium.license.status.v1.0+json" - }], - "user": { - "id": "1" - }, - "rights": { - "print": 10, - "copy": 2048, - "start": "2020-08-18T15:04:38Z", - "end": "2020-09-08T15:04:38Z" - }, - "signature": { - "certificate": "MIIFpTCCA42gAwIBAgIBATANBgkqhkiG9w0BAQsFADBnMQswCQYDVQQGEwJGUjEOMAwGA1UEBxMFUGFyaXMxDzANBgNVBAoTBkVEUkxhYjESMBAGA1UECxMJTENQIFRlc3RzMSMwIQYDVQQDExpFRFJMYWIgUmVhZGl1bSBMQ1AgdGVzdCBDQTAeFw0xNjAzMjUwMzM3MDBaFw0yNjAzMjMwNzM3MDBaMIGQMQswCQYDVQQGEwJGUjEOMAwGA1UEBxMFUGFyaXMxDzANBgNVBAoTBkVEUkxhYjESMBAGA1UECxMJTENQIFRlc3RzMSIwIAYDVQQDExlUZXN0IHByb3ZpZGVyIGNlcnRpZmljYXRlMSgwJgYJKoZIhvcNAQkBFhlsYXVyZW50LmxlbWV1ckBlZHJsYWIub3JnMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAq/gFXdvKb+EOzsEkHcoSOcPQmNzivzf+9NOJcxWi1/BwuxqAAPv+4LKoLz89U1xx5TE1swL11BsEkIdVYrjl1RiYRa8YV4bb4xyMTm8lm39P16H1fG7Ep8yyoVuN6LT3WT2xHGp2jYU8I2nW78cyYApAWAuiMc3epeIOxC2mKgf1pGnaX9j5l/Rx8hhxULqoHIHpR8e1eVRC7tgAz4Oy5qeLxGoL4S+GK/11eRlDO37whAWaMRbPnJDqqi8Z0Beovf6jmdoUTJdcPZZ9kFdtPsWjPNNHDldPuJBtCd7lupc0K4pClJSqtJKyxs05Yeb1j7kbs/i3grdlUcxz0zOaPN1YzrzOO7GLEWUnIe+LwVXAeUseHedOexITyDQXXCqMoQw/BC6ApGzR0FynC6ojq98tStYGJAGbKBN/9p20CvYf4/hmPU3fFkImWguPIoeJT//0rz+nSynykeEVtORRIcdyOnX2rL03xxBW7qlTlUXOfQk5oLIWXBW9Z2Q63MPWi8jQhSI0jC12iEqCT54xKRHNWKr04at9pJL85M0bDCbBH/jJ+AIbVx02ewtXcWgWTgK9vgSPN5kRCwIGaV9PMS193KHfNpGqV45EKrfP8U2nvNDeyqLqAN5847ABSW7UmA5Kj/x5uGxIWu9MUKjZlT0FpepswFvMMo1InLHANMcCAwEAAaMyMDAwDAYDVR0TAQH/BAIwADALBgNVHQ8EBAMCBaAwEwYDVR0lBAwwCgYIKwYBBQUHAwEwDQYJKoZIhvcNAQELBQADggIBAEGAqzHsCbrfQwlWas3q66FG/xbiOYQxpngA4CZWKQzJJDyOFgWEihW+H6NlSIH8076srpIZByjEGXZfOku4NH4DGNOj6jQ9mEfEwbrvCoEVHQf5YXladXpKqZgEB2FKeJVjC7yplelBtjBpSo23zhG/o3/Bj7zRySL6gUCewn7z/DkxM6AshDE4HKQxjxp7stpESev+0VTL813WXvwzmucr94H1VPrasFyVzQHj4Ib+Id1OAmgfzst0vSZyX6bjAuiN9yrs7wze5cAYTaswWr7GAnAZ/r1Z3PiDp50qaGRhHqJ+lRAhihpFP+ZjsYWRqnxZnDzJkJ6RZAHi2a3VN8x5WhOUMTf3JZcFVheDmA4SaEjAZAHU8zUxx1Fstjc8GJcjTwWxCsVM2aREBKXAYDhPTVLRKt6PyQxB0GxjDZZSvGI9uXn6S5wvjuE4T2TUwbJeGHqJr4FNpXVQ2XNww+sV2QSiAwrlORm8HNXqavj4rqz1PkUySXJ6b7zbjZoiACq4C7zb70tRYDyCfLTYtaTL3UK2Sa9ePSl0Fe6QfcqlGjalrqOo4GI6oqbAIkIXocHHksbLx0mIMSEWQOax+DqXhsl8tNGVwa5EiUSy83Sc0LyYXoWA35q8dugbkeNnY94rNG/hYKeci1VHhyg4rqxEeVwfBx121JqQSs+hHGKt", - "value": "pbfPRtb4oDT+1Q8nVrZuFrP/uCFqDG+/+jC3pUJfp+iLU+cBVWNCmciADVuq25UkpNOdiTAre8Xjglz1WVV+2AZjiLEaKjQZN0kjYLFjxSC67vUcHc6g5KpAQQTHSbjed5LAjShJWeVkIGQxQFP1a1o+cky8y1tzzRWoZZjCQHTj2ob621cAYgw39z2mj+oKm/vPIYbCrIlahSvjBMCOkWTOoRNZIuqnapRUv25OB9JQeqJzvotTOQvoxZpFg5q3EEmkZAIW55u6XBRaP9CvIAlDuCzevOVT1CojeyVPlP2nWs8b9oBp77S/SYEK0ZYMWMQ0S4LnAB8CNHdGEmF4+jvhrAAOgwpsiMRH0eMQAGZnzPUSKYIr/RqSd7Mp53nFn4a18dGcBgRxipCnVPafU+B7HwWcvkYBu4idlN3tFH1fjPl18yz0qHa8+RlTIyyw73CGQ8SUAY87BLO8tmBKihP+FePqnPX1Fbp6MprI6K4/GkWZoOe3n1oauVLIe7T0CRsA5rar2loUlIJsfESDj5tFnSh4UOeHA0ewHrzDS2qtdFL7sREZ/CnlDJPr0wuZB+uAyECrWe5FuQpEiSP2vxi9ROvTeZuUhphVghFPvBwunlL3AB/6GXkbnlKSJUAb3wiRNWk3r0ilVu9ORSsdq00IzShHGyy8DMVP+5dXSU4=", - "algorithm": "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256" - } -} -""" - -LCPSERVER_URL = "http://localhost:8989" -LCPSERVER_USER = "lcp" -LCPSERVER_PASSWORD = "secretpassword" -LCPSERVER_INPUT_DIRECTORY = "/opt/readium/encrypted" - -CONTENT_ID = "1" -TEXT_HINT = "Not very helpful hint" -PROVIDER_NAME = "http://circulation.manager" diff --git a/tests/api/lcp/test_collection.py b/tests/api/lcp/test_collection.py deleted file mode 100644 index 2aea735dc2..0000000000 --- a/tests/api/lcp/test_collection.py +++ /dev/null @@ -1,355 +0,0 @@ -import datetime -import json -from unittest.mock import MagicMock, create_autospec, patch - -import pytest -from freezegun import freeze_time - -from api.lcp.collection import LCPAPI, LCPFulfilmentInfo -from api.lcp.server import LCPServer, LCPServerConstants -from core.model import DataSource, ExternalIntegration -from core.model.configuration import HasExternalIntegration -from core.util.datetime_helpers import utc_now -from tests.api.lcp import lcp_strings -from tests.fixtures.database import DatabaseTransactionFixture - - -class LCPAPIFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.lcp_collection = self.db.collection(protocol=ExternalIntegration.LCP) - self.integration = self.lcp_collection.external_integration - - integration_association = create_autospec(spec=HasExternalIntegration) - integration_association.external_integration = MagicMock( - return_value=self.integration - ) - - -@pytest.fixture(scope="function") -def lcp_api_fixture(db: DatabaseTransactionFixture) -> LCPAPIFixture: - return LCPAPIFixture(db) - - -class TestLCPAPI: - @freeze_time("2020-01-01 00:00:00") - def test_checkout_without_existing_loan(self, lcp_api_fixture): - # Arrange - lcp_api = LCPAPI(lcp_api_fixture.db.session, lcp_api_fixture.lcp_collection) - patron = lcp_api_fixture.db.patron() - days = lcp_api_fixture.lcp_collection.default_loan_period(patron.library) - start_date = utc_now() - end_date = start_date + datetime.timedelta(days=days) - data_source = DataSource.lookup( - lcp_api_fixture.db.session, DataSource.LCP, autocreate=True - ) - data_source_name = data_source.name - edition = lcp_api_fixture.db.edition( - data_source_name=data_source_name, identifier_id=lcp_strings.CONTENT_ID - ) - license_pool = lcp_api_fixture.db.licensepool( - edition=edition, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - lcp_license = json.loads(lcp_strings.LCPSERVER_LICENSE) - lcp_server_mock = create_autospec(spec=LCPServer) - lcp_server_mock.generate_license = MagicMock(return_value=lcp_license) - - configuration = lcp_api_fixture.lcp_collection.integration_configuration - - with patch("api.lcp.collection.LCPServer") as lcp_server_constructor: - lcp_server_constructor.return_value = lcp_server_mock - - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_url", lcp_strings.LCPSERVER_URL - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_user", lcp_strings.LCPSERVER_USER - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_password", lcp_strings.LCPSERVER_PASSWORD - ) - DatabaseTransactionFixture.set_settings( - configuration, - "lcpserver_input_directory", - lcp_strings.LCPSERVER_INPUT_DIRECTORY, - ) - DatabaseTransactionFixture.set_settings( - configuration, "provider_name", lcp_strings.PROVIDER_NAME - ) - DatabaseTransactionFixture.set_settings( - configuration, "passphrase_hint", lcp_strings.TEXT_HINT - ) - DatabaseTransactionFixture.set_settings( - configuration, - "encryption_algorithm", - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - ) - - # Act - loan = lcp_api.checkout(patron, "pin", license_pool, "internal format") - - # Assert - assert loan.collection_id == lcp_api_fixture.lcp_collection.id - assert ( - loan.collection(lcp_api_fixture.db.session) - == lcp_api_fixture.lcp_collection - ) - assert loan.license_pool(lcp_api_fixture.db.session) == license_pool - assert loan.data_source_name == data_source_name - assert loan.identifier_type == license_pool.identifier.type - assert loan.external_identifier == lcp_license["id"] - assert loan.start_date == start_date - assert loan.end_date == end_date - - lcp_server_mock.generate_license.assert_called_once_with( - lcp_api_fixture.db.session, - lcp_strings.CONTENT_ID, - patron, - start_date, - end_date, - ) - - @freeze_time("2020-01-01 00:00:00") - def test_checkout_with_existing_loan(self, lcp_api_fixture): - # Arrange - lcp_api = LCPAPI(lcp_api_fixture.db.session, lcp_api_fixture.lcp_collection) - patron = lcp_api_fixture.db.patron() - days = lcp_api_fixture.lcp_collection.default_loan_period(patron.library) - start_date = utc_now() - end_date = start_date + datetime.timedelta(days=days) - data_source = DataSource.lookup( - lcp_api_fixture.db.session, DataSource.LCP, autocreate=True - ) - data_source_name = data_source.name - edition = lcp_api_fixture.db.edition( - data_source_name=data_source_name, identifier_id=lcp_strings.CONTENT_ID - ) - license_pool = lcp_api_fixture.db.licensepool( - edition=edition, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - lcp_license = json.loads(lcp_strings.LCPSERVER_LICENSE) - lcp_server_mock = create_autospec(spec=LCPServer) - lcp_server_mock.get_license = MagicMock(return_value=lcp_license) - loan_identifier = "e99be177-4902-426a-9b96-0872ae877e2f" - - license_pool.loan_to(patron, external_identifier=loan_identifier) - - configuration = lcp_api_fixture.lcp_collection.integration_configuration - with patch("api.lcp.collection.LCPServer") as lcp_server_constructor: - lcp_server_constructor.return_value = lcp_server_mock - - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_url", lcp_strings.LCPSERVER_URL - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_user", lcp_strings.LCPSERVER_USER - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_password", lcp_strings.LCPSERVER_PASSWORD - ) - DatabaseTransactionFixture.set_settings( - configuration, - "lcpserver_input_directory", - lcp_strings.LCPSERVER_INPUT_DIRECTORY, - ) - DatabaseTransactionFixture.set_settings( - configuration, "provider_name", lcp_strings.PROVIDER_NAME - ) - DatabaseTransactionFixture.set_settings( - configuration, "passphrase_hint", lcp_strings.TEXT_HINT - ) - DatabaseTransactionFixture.set_settings( - configuration, - "encryption_algorithm", - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - ) - - # Act - loan = lcp_api.checkout(patron, "pin", license_pool, "internal format") - - # Assert - assert loan.collection_id == lcp_api_fixture.lcp_collection.id - assert ( - loan.collection(lcp_api_fixture.db.session) - == lcp_api_fixture.lcp_collection - ) - assert loan.license_pool(lcp_api_fixture.db.session) == license_pool - assert loan.data_source_name == data_source_name - assert loan.identifier_type == license_pool.identifier.type - assert loan.external_identifier == loan_identifier - assert loan.start_date == start_date - assert loan.end_date == end_date - - lcp_server_mock.get_license.assert_called_once_with( - lcp_api_fixture.db.session, loan_identifier, patron - ) - - @freeze_time("2020-01-01 00:00:00") - def test_fulfil(self, lcp_api_fixture): - # Arrange - lcp_api = LCPAPI(lcp_api_fixture.db.session, lcp_api_fixture.lcp_collection) - patron = lcp_api_fixture.db.patron() - days = lcp_api_fixture.lcp_collection.default_loan_period(patron.library) - today = utc_now() - expires = today + datetime.timedelta(days=days) - data_source = DataSource.lookup( - lcp_api_fixture.db.session, DataSource.LCP, autocreate=True - ) - data_source_name = data_source.name - license_pool = lcp_api_fixture.db.licensepool( - edition=None, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - lcp_license = json.loads(lcp_strings.LCPSERVER_LICENSE) - lcp_server_mock = create_autospec(spec=LCPServer) - lcp_server_mock.get_license = MagicMock(return_value=lcp_license) - - configuration = lcp_api_fixture.lcp_collection.integration_configuration - with patch("api.lcp.collection.LCPServer") as lcp_server_constructor: - lcp_server_constructor.return_value = lcp_server_mock - - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_url", lcp_strings.LCPSERVER_URL - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_user", lcp_strings.LCPSERVER_USER - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_password", lcp_strings.LCPSERVER_PASSWORD - ) - DatabaseTransactionFixture.set_settings( - configuration, - "lcpserver_input_directory", - lcp_strings.LCPSERVER_INPUT_DIRECTORY, - ) - - DatabaseTransactionFixture.set_settings( - configuration, "provider_name", lcp_strings.PROVIDER_NAME - ) - DatabaseTransactionFixture.set_settings( - configuration, "passphrase_hint", lcp_strings.TEXT_HINT - ) - DatabaseTransactionFixture.set_settings( - configuration, - "encryption_algorithm", - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - ) - - # Act - license_pool.loan_to( - patron, - start=today, - end=expires, - external_identifier=lcp_license["id"], - ) - fulfilment_info = lcp_api.fulfill( - patron, "pin", license_pool, "internal format" - ) - - # Assert - assert isinstance(fulfilment_info, LCPFulfilmentInfo) == True - assert fulfilment_info.collection_id == lcp_api_fixture.lcp_collection.id - assert ( - fulfilment_info.collection(lcp_api_fixture.db.session) - == lcp_api_fixture.lcp_collection - ) - assert ( - fulfilment_info.license_pool(lcp_api_fixture.db.session) == license_pool - ) - assert fulfilment_info.data_source_name == data_source_name - assert fulfilment_info.identifier_type == license_pool.identifier.type - - lcp_server_mock.get_license.assert_called_once_with( - lcp_api_fixture.db.session, lcp_license["id"], patron - ) - - def test_patron_activity_returns_correct_result(self, lcp_api_fixture): - # Arrange - lcp_api = LCPAPI(lcp_api_fixture.db.session, lcp_api_fixture.lcp_collection) - - # 1. Correct loan - patron = lcp_api_fixture.db.patron() - days = lcp_api_fixture.lcp_collection.default_loan_period(patron.library) - today = utc_now() - expires = today + datetime.timedelta(days=days) - data_source = DataSource.lookup( - lcp_api_fixture.db.session, DataSource.LCP, autocreate=True - ) - data_source_name = data_source.name - external_identifier = "1" - license_pool = lcp_api_fixture.db.licensepool( - edition=None, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - license_pool.loan_to( - patron, start=today, end=expires, external_identifier=external_identifier - ) - - # 2. Loan from a different collection - other_collection = lcp_api_fixture.db.collection( - protocol=ExternalIntegration.LCP - ) - other_external_identifier = "2" - other_license_pool = lcp_api_fixture.db.licensepool( - edition=None, data_source_name=data_source_name, collection=other_collection - ) - other_license_pool.loan_to( - patron, - start=today, - end=expires, - external_identifier=other_external_identifier, - ) - - # 3. Other patron's loan - other_patron = lcp_api_fixture.db.patron() - other_license_pool = lcp_api_fixture.db.licensepool( - edition=None, data_source_name=data_source_name, collection=other_collection - ) - other_license_pool.loan_to(other_patron, start=today, end=expires) - - # 4. Expired loan - other_license_pool = lcp_api_fixture.db.licensepool( - edition=None, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - other_license_pool.loan_to( - patron, start=today, end=today - datetime.timedelta(days=1) - ) - - # 5. Not started loan - other_license_pool = lcp_api_fixture.db.licensepool( - edition=None, - data_source_name=data_source_name, - collection=lcp_api_fixture.lcp_collection, - ) - other_license_pool.loan_to( - patron, - start=today + datetime.timedelta(days=1), - end=today + datetime.timedelta(days=2), - ) - - # Act - loans = lcp_api.patron_activity(patron, "pin") - - # Assert - assert len(loans) == 1 - - loan = loans[0] - assert loan.collection_id == lcp_api_fixture.lcp_collection.id - assert ( - loan.collection(lcp_api_fixture.db.session) - == lcp_api_fixture.lcp_collection - ) - assert loan.license_pool(lcp_api_fixture.db.session) == license_pool - assert loan.data_source_name == data_source_name - assert loan.identifier_type == license_pool.identifier.type - assert loan.external_identifier == external_identifier - assert loan.start_date == today - assert loan.end_date == expires diff --git a/tests/api/lcp/test_controller.py b/tests/api/lcp/test_controller.py deleted file mode 100644 index 6bf2b7d3b6..0000000000 --- a/tests/api/lcp/test_controller.py +++ /dev/null @@ -1,155 +0,0 @@ -import json -from unittest.mock import MagicMock, call, create_autospec, patch - -from flask import request - -from api.lcp.collection import LCPAPI -from api.lcp.controller import LCPController -from api.lcp.factory import LCPServerFactory -from api.lcp.server import LCPServer -from core.external_search import MockExternalSearchIndex -from core.lcp.credential import LCPCredentialFactory, LCPUnhashedPassphrase -from core.model import ExternalIntegration -from core.model.library import Library -from tests.api.lcp import lcp_strings -from tests.api.mockapi.circulation import MockCirculationAPI, MockCirculationManager -from tests.fixtures.api_controller import ControllerFixture - -manager_api_cls = dict( - circulationapi_cls=MockCirculationAPI, - externalsearch_cls=MockExternalSearchIndex, -) - - -class TestLCPController: - def test_get_lcp_passphrase_returns_the_same_passphrase_for_authenticated_patron( - self, controller_fixture: ControllerFixture - ): - # Arrange - expected_passphrase = LCPUnhashedPassphrase( - "1cde00b4-bea9-48fc-819b-bd17c578a22c" - ) - - with patch( - "api.lcp.controller.LCPCredentialFactory" - ) as credential_factory_constructor_mock: - credential_factory = create_autospec(spec=LCPCredentialFactory) - credential_factory.get_patron_passphrase = MagicMock( - return_value=expected_passphrase - ) - credential_factory_constructor_mock.return_value = credential_factory - - patron = controller_fixture.default_patron - manager = MockCirculationManager(controller_fixture.db.session) - controller = LCPController(manager) - controller.authenticated_patron_from_request = MagicMock( # type: ignore - return_value=patron - ) - - url = "http://circulationmanager.org/lcp/hint" - - with controller_fixture.app.test_request_context(url): - request.library: Library = controller_fixture.db.default_library() # type: ignore - - # Act - result1 = controller.get_lcp_passphrase() - result2 = controller.get_lcp_passphrase() - - # Assert - for result in [result1, result2]: - assert result.status_code == 200 - assert ("passphrase" in result.json) == True - assert result.json["passphrase"] == expected_passphrase.text - - credential_factory.get_patron_passphrase.assert_has_calls( - [ - call(controller_fixture.db.session, patron), - call(controller_fixture.db.session, patron), - ] - ) - - def test_get_lcp_license_returns_problem_detail_when_collection_is_missing( - self, controller_fixture - ): - # Arrange - missing_collection_name = "missing-collection" - license_id = "e99be177-4902-426a-9b96-0872ae877e2f" - expected_license = json.loads(lcp_strings.LCPSERVER_LICENSE) - lcp_server = create_autospec(spec=LCPServer) - lcp_server.get_license = MagicMock(return_value=expected_license) - library = controller_fixture.db.default_library() - lcp_collection = controller_fixture.db.collection( - LCPAPI.NAME, ExternalIntegration.LCP - ) - library.collections.append(lcp_collection) - - with patch( - "api.lcp.controller.LCPServerFactory" - ) as lcp_server_factory_constructor_mock: - lcp_server_factory = create_autospec(spec=LCPServerFactory) - lcp_server_factory.create = MagicMock(return_value=lcp_server) - lcp_server_factory_constructor_mock.return_value = lcp_server_factory - - patron = controller_fixture.default_patron - manager = MockCirculationManager(controller_fixture.db.session) - controller = LCPController(manager) - controller.authenticated_patron_from_request = MagicMock( - return_value=patron - ) - - url = "http://circulationmanager.org/{}/licenses{}".format( - missing_collection_name, license_id - ) - - with controller_fixture.app.test_request_context(url): - request.library = controller_fixture.db.default_library() - - # Act - result = controller.get_lcp_license(missing_collection_name, license_id) - - # Assert - assert result.status_code == 404 - - def test_get_lcp_license_returns_the_same_license_for_authenticated_patron( - self, controller_fixture - ): - # Arrange - license_id = "e99be177-4902-426a-9b96-0872ae877e2f" - expected_license = json.loads(lcp_strings.LCPSERVER_LICENSE) - lcp_server = create_autospec(spec=LCPServer) - lcp_server.get_license = MagicMock(return_value=expected_license) - library = controller_fixture.db.default_library() - lcp_collection = controller_fixture.db.collection( - LCPAPI.NAME, ExternalIntegration.LCP - ) - library.collections.append(lcp_collection) - - with patch( - "api.lcp.controller.LCPServerFactory" - ) as lcp_server_factory_constructor_mock: - lcp_server_factory = create_autospec(spec=LCPServerFactory) - lcp_server_factory.create = MagicMock(return_value=lcp_server) - lcp_server_factory_constructor_mock.return_value = lcp_server_factory - - patron = controller_fixture.default_patron - manager = MockCirculationManager(controller_fixture.db.session) - controller = LCPController(manager) - controller.authenticated_patron_from_request = MagicMock( - return_value=patron - ) - - url = "http://circulationmanager.org/{}/licenses{}".format( - LCPAPI.NAME, license_id - ) - - with controller_fixture.app.test_request_context(url): - request.library = controller_fixture.db.default_library() - - # Act - result1 = controller.get_lcp_license(LCPAPI.NAME, license_id) - result2 = controller.get_lcp_license(LCPAPI.NAME, license_id) - - # Assert - for result in [result1, result2]: - assert result.status_code == 200 - assert result.json == expected_license diff --git a/tests/api/lcp/test_encrypt.py b/tests/api/lcp/test_encrypt.py deleted file mode 100644 index 684caf8ce6..0000000000 --- a/tests/api/lcp/test_encrypt.py +++ /dev/null @@ -1,156 +0,0 @@ -from unittest.mock import patch - -import pytest -from pyfakefs.fake_filesystem_unittest import Patcher - -from api.lcp.collection import LCPAPI -from api.lcp.encrypt import ( - LCPEncryptionConstants, - LCPEncryptionException, - LCPEncryptionResult, - LCPEncryptor, -) -from core.integration.goals import Goals -from core.model import Identifier -from core.model.integration import IntegrationConfiguration -from tests.api.lcp import lcp_strings -from tests.fixtures.database import DatabaseTransactionFixture - - -class LCPEncryptFixture: - db: DatabaseTransactionFixture - integration: IntegrationConfiguration - - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.integration = self.db.integration_configuration( - protocol=LCPAPI.NAME, goal=Goals.LICENSE_GOAL - ) - - -@pytest.fixture(scope="function") -def lcp_encrypt_fixture(db: DatabaseTransactionFixture) -> LCPEncryptFixture: - return LCPEncryptFixture(db) - - -class TestLCPEncryptor: - @pytest.mark.parametrize( - "_, file_path, lcpencrypt_output, expected_result, expected_exception, create_file", - [ - ( - "non_existing_directory", - lcp_strings.NOT_EXISTING_BOOK_FILE_PATH, - lcp_strings.LCPENCRYPT_NOT_EXISTING_DIRECTORY_RESULT, - None, - LCPEncryptionException( - lcp_strings.LCPENCRYPT_NOT_EXISTING_DIRECTORY_RESULT.strip() - ), - False, - ), - ( - "failed_encryption", - lcp_strings.NOT_EXISTING_BOOK_FILE_PATH, - lcp_strings.LCPENCRYPT_FAILED_ENCRYPTION_RESULT, - None, - LCPEncryptionException("Encryption failed"), - True, - ), - ( - "successful_encryption", - lcp_strings.EXISTING_BOOK_FILE_PATH, - lcp_strings.LCPENCRYPT_SUCCESSFUL_ENCRYPTION_RESULT, - LCPEncryptionResult( - content_id=lcp_strings.BOOK_IDENTIFIER, - content_encryption_key=lcp_strings.CONTENT_ENCRYPTION_KEY, - protected_content_location=lcp_strings.PROTECTED_CONTENT_LOCATION, - protected_content_disposition=lcp_strings.PROTECTED_CONTENT_DISPOSITION, - protected_content_type=lcp_strings.PROTECTED_CONTENT_TYPE, - protected_content_length=lcp_strings.PROTECTED_CONTENT_LENGTH, - protected_content_sha256=lcp_strings.PROTECTED_CONTENT_SHA256, - ), - None, - True, - ), - ( - "failed_lcp_server_notification", - lcp_strings.EXISTING_BOOK_FILE_PATH, - lcp_strings.LCPENCRYPT_FAILED_LCPSERVER_NOTIFICATION, - None, - LCPEncryptionException( - lcp_strings.LCPENCRYPT_FAILED_LCPSERVER_NOTIFICATION.strip() - ), - True, - ), - ( - "successful_lcp_server_notification", - lcp_strings.EXISTING_BOOK_FILE_PATH, - lcp_strings.LCPENCRYPT_SUCCESSFUL_NOTIFICATION_RESULT, - LCPEncryptionResult( - content_id=lcp_strings.BOOK_IDENTIFIER, - content_encryption_key=lcp_strings.CONTENT_ENCRYPTION_KEY, - protected_content_location=lcp_strings.PROTECTED_CONTENT_LOCATION, - protected_content_disposition=lcp_strings.PROTECTED_CONTENT_DISPOSITION, - protected_content_type=lcp_strings.PROTECTED_CONTENT_TYPE, - protected_content_length=lcp_strings.PROTECTED_CONTENT_LENGTH, - protected_content_sha256=lcp_strings.PROTECTED_CONTENT_SHA256, - ), - None, - True, - ), - ], - ) - def test_local_lcpencrypt( - self, - lcp_encrypt_fixture: LCPEncryptFixture, - _, - file_path, - lcpencrypt_output, - expected_result, - expected_exception, - create_file, - ): - # Arrange - # integration_owner = create_autospec(spec=HasIntegrationConfiguration) - # integration_owner.integration_configuration = MagicMock( - # return_value=lcp_encrypt_fixture.integration - # ) - configuration = lcp_encrypt_fixture.integration - encryptor = LCPEncryptor(configuration) - identifier = Identifier(identifier=lcp_strings.BOOK_IDENTIFIER) - - DatabaseTransactionFixture.set_settings( - configuration, - "lcpencrypt_location", - LCPEncryptionConstants.DEFAULT_LCPENCRYPT_LOCATION, - ) - - with Patcher() as patcher: - assert patcher.fs is not None - patcher.fs.create_file(LCPEncryptionConstants.DEFAULT_LCPENCRYPT_LOCATION) - - if create_file: - patcher.fs.create_file(file_path) - - with patch("subprocess.check_output") as subprocess_check_output_mock: - subprocess_check_output_mock.return_value = lcpencrypt_output - - if expected_exception: - with pytest.raises( - expected_exception.__class__ - ) as exception_metadata: - encryptor.encrypt( - lcp_encrypt_fixture.db.session, - file_path, - identifier.identifier, - ) - - # Assert - assert exception_metadata.value == expected_exception - else: - # Assert - result = encryptor.encrypt( - lcp_encrypt_fixture.db.session, - file_path, - identifier.identifier, - ) - assert result == expected_result diff --git a/tests/api/lcp/test_importer.py b/tests/api/lcp/test_importer.py deleted file mode 100644 index 3a054fc2f8..0000000000 --- a/tests/api/lcp/test_importer.py +++ /dev/null @@ -1,36 +0,0 @@ -from unittest.mock import MagicMock, create_autospec - -import sqlalchemy - -from api.lcp.encrypt import LCPEncryptionResult, LCPEncryptor -from api.lcp.importer import LCPImporter -from api.lcp.server import LCPServer - - -class TestLCPImporter: - def test_import_book(self): - # Arrange - file_path = "/opt/readium/raw_books/book.epub" - identifier = "123456789" - encrypted_content = LCPEncryptionResult( - content_id="1", - content_encryption_key="12345", - protected_content_location="/opt/readium/files/encrypted", - protected_content_disposition="encrypted_book", - protected_content_type="application/epub+zip", - protected_content_length=12345, - protected_content_sha256="12345", - ) - lcp_encryptor = create_autospec(spec=LCPEncryptor) - lcp_encryptor.encrypt = MagicMock(return_value=encrypted_content) - lcp_server = create_autospec(spec=LCPServer) - lcp_server.add_content = MagicMock() - importer = LCPImporter(lcp_encryptor, lcp_server) - db = create_autospec(spec=sqlalchemy.orm.session.Session) - - # Act - importer.import_book(db, file_path, identifier) - - # Assert - lcp_encryptor.encrypt.assert_called_once_with(db, file_path, identifier) - lcp_server.add_content.assert_called_once_with(db, encrypted_content) diff --git a/tests/api/lcp/test_mirror.py b/tests/api/lcp/test_mirror.py deleted file mode 100644 index 9f7ba84c51..0000000000 --- a/tests/api/lcp/test_mirror.py +++ /dev/null @@ -1,81 +0,0 @@ -from unittest.mock import ANY, create_autospec, patch - -import pytest - -from api.lcp.importer import LCPImporter -from api.lcp.mirror import LCPMirror -from core.model import ( - Collection, - DataSource, - ExternalIntegration, - Identifier, - Representation, -) -from core.s3 import MinIOUploaderConfiguration, S3UploaderConfiguration -from tests.fixtures.database import DatabaseTransactionFixture - - -class LCPMirrorFixture: - db: DatabaseTransactionFixture - lcp_collection: Collection - lcp_mirror: LCPMirror - - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - - settings = { - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "encrypted-books", - MinIOUploaderConfiguration.ENDPOINT_URL: "http://minio", - } - integration = self.db.external_integration( - ExternalIntegration.LCP, - goal=ExternalIntegration.STORAGE_GOAL, - settings=settings, - ) - self.lcp_collection = self.db.collection(protocol=ExternalIntegration.LCP) - self.lcp_mirror = LCPMirror(integration) - - -@pytest.fixture(scope="function") -def lcp_mirror_fixture(db: DatabaseTransactionFixture) -> LCPMirrorFixture: - return LCPMirrorFixture(db) - - -class TestLCPMirror: - def test_book_url(self, lcp_mirror_fixture: LCPMirrorFixture): - # Arrange - data_source = DataSource.lookup( - lcp_mirror_fixture.db.session, DataSource.LCP, autocreate=True - ) - identifier = Identifier(identifier="12345", type=Identifier.ISBN) - - # Act - result = lcp_mirror_fixture.lcp_mirror.book_url( - identifier, data_source=data_source - ) - - # Assert - assert result == "http://encrypted-books.minio/12345" - - def test_mirror_one(self, lcp_mirror_fixture: LCPMirrorFixture): - # Arrange - expected_identifier = "12345" - mirror_url = "http://encrypted-books.minio/" + expected_identifier - lcp_importer = create_autospec(spec=LCPImporter) - representation, _ = lcp_mirror_fixture.db.representation( - media_type=Representation.EPUB_MEDIA_TYPE, content="12345" - ) - - # Act - with patch("api.lcp.mirror.LCPImporter") as lcp_importer_constructor: - lcp_importer_constructor.return_value = lcp_importer - lcp_mirror_fixture.lcp_mirror.mirror_one( - representation, - mirror_to=mirror_url, - collection=lcp_mirror_fixture.lcp_collection, - ) - - # Assert - lcp_importer.import_book.assert_called_once_with( - lcp_mirror_fixture.db.session, ANY, expected_identifier - ) diff --git a/tests/api/lcp/test_server.py b/tests/api/lcp/test_server.py deleted file mode 100644 index ddbe544ef7..0000000000 --- a/tests/api/lcp/test_server.py +++ /dev/null @@ -1,319 +0,0 @@ -from __future__ import annotations - -import datetime -import json -import os -import urllib.parse -from typing import Literal -from unittest.mock import MagicMock - -import pytest -import requests_mock - -from api.lcp import utils -from api.lcp.encrypt import LCPEncryptionResult -from api.lcp.hash import HasherFactory -from api.lcp.server import LCPServer, LCPServerConstants, LCPServerSettings -from core.lcp.credential import LCPCredentialFactory, LCPUnhashedPassphrase -from core.model.collection import Collection -from core.model.configuration import ExternalIntegration -from tests.api.lcp import lcp_strings -from tests.fixtures.database import DatabaseTransactionFixture - - -class LCPServerFixture: - db: DatabaseTransactionFixture - lcp_collection: Collection - integration: ExternalIntegration - hasher_factory: HasherFactory - credential_factory: LCPCredentialFactory - lcp_server: LCPServer - - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.lcp_collection = self.db.collection(protocol=ExternalIntegration.LCP) - self.configuration = self.lcp_collection.integration_configuration - DatabaseTransactionFixture.set_settings( - self.configuration, "lcpserver_input_directory", "/tmp" - ) - self.hasher_factory = HasherFactory() - self.credential_factory = LCPCredentialFactory() - self.lcp_server = LCPServer( - lambda: LCPServerSettings(**self.configuration.settings_dict), - self.hasher_factory, - self.credential_factory, - ) - - -@pytest.fixture(scope="function") -def lcp_server_fixture(db: DatabaseTransactionFixture) -> LCPServerFixture: - return LCPServerFixture(db) - - -class TestLCPServer: - @pytest.mark.parametrize( - "_, input_directory", - [ - ("non_empty_input_directory", "/tmp/encrypted_books"), - ], - ) - def test_add_content( - self, - lcp_server_fixture: LCPServerFixture, - _: Literal["empty_input_directory", "non_empty_input_directory"], - input_directory: Literal["", "/tmp/encrypted_books"], - ): - # Arrange - lcp_server = LCPServer( - lambda: LCPServerSettings(**lcp_server_fixture.configuration.settings_dict), - lcp_server_fixture.hasher_factory, - lcp_server_fixture.credential_factory, - ) - encrypted_content = LCPEncryptionResult( - content_id=lcp_strings.CONTENT_ID, - content_encryption_key="12345", - protected_content_location="/opt/readium/files/encrypted", - protected_content_disposition="encrypted_book", - protected_content_type="application/epub+zip", - protected_content_length=12345, - protected_content_sha256="12345", - ) - expected_protected_content_disposition = os.path.join( - input_directory, encrypted_content.protected_content_disposition - ) - - configuration = lcp_server_fixture.configuration - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_url", lcp_strings.LCPSERVER_URL - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_user", lcp_strings.LCPSERVER_USER - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_password", lcp_strings.LCPSERVER_PASSWORD - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_input_directory", input_directory - ) - DatabaseTransactionFixture.set_settings( - configuration, "provider_name", lcp_strings.PROVIDER_NAME - ) - DatabaseTransactionFixture.set_settings( - configuration, "passphrase_hint", lcp_strings.TEXT_HINT - ) - DatabaseTransactionFixture.set_settings( - configuration, - "encryption_algorithm", - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - ) - - with requests_mock.Mocker() as request_mock: - url = urllib.parse.urljoin( - lcp_strings.LCPSERVER_URL, f"/contents/{lcp_strings.CONTENT_ID}" - ) - request_mock.put(url) - - # Act - lcp_server.add_content(lcp_server_fixture.db.session, encrypted_content) - - # Assert - assert request_mock.called == True - - json_request = json.loads(request_mock.last_request.text) - assert json_request["content-id"] == encrypted_content.content_id - assert ( - json_request["content-encryption-key"] - == encrypted_content.content_encryption_key - ) - assert ( - json_request["protected-content-location"] - == expected_protected_content_disposition - ) - assert ( - json_request["protected-content-disposition"] - == encrypted_content.protected_content_disposition - ) - assert ( - json_request["protected-content-type"] - == encrypted_content.protected_content_type - ) - assert ( - json_request["protected-content-length"] - == encrypted_content.protected_content_length - ) - assert ( - json_request["protected-content-sha256"] - == encrypted_content.protected_content_sha256 - ) - - @pytest.mark.parametrize( - "_, license_start, license_end, max_printable_pages, max_copiable_pages", - [ - ("none_rights", None, None, None, None), - ( - "license_start", - datetime.datetime(2020, 1, 1, 00, 00, 00), - None, - None, - None, - ), - ( - "license_end", - None, - datetime.datetime(2020, 12, 31, 23, 59, 59), - None, - None, - ), - ("max_printable_pages", None, None, 10, None), - ("max_printable_pages_empty_max_copiable_pages", None, None, 10, ""), - ("empty_max_printable_pages", None, None, "", None), - ("max_copiable_pages", None, None, None, 1024), - ("empty_max_printable_pages_max_copiable_pages", None, None, "", 1024), - ("empty_max_copiable_pages", None, None, None, ""), - ( - "dates", - datetime.datetime(2020, 1, 1, 00, 00, 00), - datetime.datetime(2020, 12, 31, 23, 59, 59), - None, - None, - ), - ( - "full_rights", - datetime.datetime(2020, 1, 1, 00, 00, 00), - datetime.datetime(2020, 12, 31, 23, 59, 59), - 10, - 1024, - ), - ], - ) - def test_generate_license( - self, - lcp_server_fixture: LCPServerFixture, - _: Literal[ - "none_rights", - "license_start", - "license_end", - "max_printable_pages", - "max_printable_pages_empty_max_copiable_pages", - "empty_max_printable_pages", - "max_copiable_pages", - "empty_max_printable_pages_max_copiable_pages", - "empty_max_copiable_pages", - "dates", - "full_rights", - ], - license_start: datetime.datetime | None, - license_end: datetime.datetime | None, - max_printable_pages: Literal[10, ""] | None, - max_copiable_pages: Literal["", 1024] | None, - ): - # Arrange - patron = lcp_server_fixture.db.patron() - expected_patron_id = "52a190d1-cd69-4794-9d7a-1ec50392697f" - expected_patron_passphrase = LCPUnhashedPassphrase( - "52a190d1-cd69-4794-9d7a-1ec50392697a" - ) - expected_patron_key = lcp_server_fixture.hasher_factory.create( - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM - ).hash(expected_patron_passphrase.text) - - configuration = lcp_server_fixture.configuration - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_url", lcp_strings.LCPSERVER_URL - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_user", lcp_strings.LCPSERVER_USER - ) - DatabaseTransactionFixture.set_settings( - configuration, "lcpserver_password", lcp_strings.LCPSERVER_PASSWORD - ) - DatabaseTransactionFixture.set_settings( - configuration, "provider_name", lcp_strings.PROVIDER_NAME - ) - DatabaseTransactionFixture.set_settings( - configuration, "passphrase_hint", lcp_strings.TEXT_HINT - ) - DatabaseTransactionFixture.set_settings( - configuration, - "encryption_algorithm", - LCPServerConstants.DEFAULT_ENCRYPTION_ALGORITHM, - ) - DatabaseTransactionFixture.set_settings( - configuration, "max_printable_pages", max_printable_pages - ) - DatabaseTransactionFixture.set_settings( - configuration, "max_copiable_pages", max_copiable_pages - ) - - lcp_server_fixture.credential_factory.get_patron_id = MagicMock( # type: ignore - return_value=expected_patron_id - ) - lcp_server_fixture.credential_factory.get_patron_passphrase = MagicMock( # type: ignore - return_value=expected_patron_passphrase - ) - - with requests_mock.Mocker() as request_mock: - url = urllib.parse.urljoin( - lcp_strings.LCPSERVER_URL, - f"/contents/{lcp_strings.CONTENT_ID}/license", - ) - request_mock.post(url, json=lcp_strings.LCPSERVER_LICENSE) - - # Act - license = lcp_server_fixture.lcp_server.generate_license( - lcp_server_fixture.db.session, - lcp_strings.CONTENT_ID, - patron, - license_start, - license_end, - ) - - # Assert - assert request_mock.called == True - assert license == lcp_strings.LCPSERVER_LICENSE - - json_request = json.loads(request_mock.last_request.text) - assert json_request["provider"] == lcp_strings.PROVIDER_NAME - assert json_request["user"]["id"] == expected_patron_id - assert ( - json_request["encryption"]["user_key"]["text_hint"] - == lcp_strings.TEXT_HINT - ) - assert ( - json_request["encryption"]["user_key"]["hex_value"] - == expected_patron_key - ) - - if license_start is not None: - assert json_request["rights"]["start"] == utils.format_datetime( - license_start - ) - if license_end is not None: - assert json_request["rights"]["end"] == utils.format_datetime( - license_end - ) - if max_printable_pages is not None and max_printable_pages != "": - assert json_request["rights"]["print"] == max_printable_pages - if max_copiable_pages is not None and max_copiable_pages != "": - assert json_request["rights"]["copy"] == max_copiable_pages - - all_rights_fields_are_empty = all( - [ - rights_field is None or rights_field == "" - for rights_field in [ - license_start, - license_end, - max_printable_pages, - max_copiable_pages, - ] - ] - ) - if all_rights_fields_are_empty: - assert ("rights" in json_request) == False - - lcp_server_fixture.credential_factory.get_patron_id.assert_called_once_with( - lcp_server_fixture.db.session, patron - ) - lcp_server_fixture.credential_factory.get_patron_passphrase.assert_called_once_with( - lcp_server_fixture.db.session, patron - ) diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index d3070b0771..cb42636d3d 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -172,21 +172,6 @@ def test_borrow_sends_analytics_event(self, circulation_api: CirculationAPIFixtu loan, hold, is_new = self.borrow(circulation_api) assert 3 == circulation_api.analytics.count - def test_borrowing_of_self_hosted_book_succeeds( - self, circulation_api: CirculationAPIFixture - ): - # Arrange - circulation_api.pool.self_hosted = True - - # Act - loan, hold, is_new = self.borrow(circulation_api) - - # Assert - assert True == is_new - assert circulation_api.pool == loan.license_pool - assert circulation_api.patron == loan.patron - assert hold is None - def test_borrowing_of_unlimited_access_book_succeeds( self, circulation_api: CirculationAPIFixture ): @@ -1132,14 +1117,9 @@ def yes_we_can(*args, **kwargs): result = try_to_fulfill() assert fulfillment == result - @pytest.mark.parametrize( - "open_access, self_hosted", [(True, False), (False, True), (False, False)] - ) - def test_revoke_loan( - self, circulation_api: CirculationAPIFixture, open_access, self_hosted - ): + @pytest.mark.parametrize("open_access", [True, False]) + def test_revoke_loan(self, circulation_api: CirculationAPIFixture, open_access): circulation_api.pool.open_access = open_access - circulation_api.pool.self_hosted = self_hosted circulation_api.patron.last_loan_activity_sync = utc_now() circulation_api.pool.loan_to(circulation_api.patron) @@ -1157,14 +1137,9 @@ def test_revoke_loan( assert 1 == circulation_api.analytics.count assert CirculationEvent.CM_CHECKIN == circulation_api.analytics.event_type - @pytest.mark.parametrize( - "open_access, self_hosted", [(True, False), (False, True), (False, False)] - ) - def test_release_hold( - self, circulation_api: CirculationAPIFixture, open_access, self_hosted - ): + @pytest.mark.parametrize("open_access", [True, False]) + def test_release_hold(self, circulation_api: CirculationAPIFixture, open_access): circulation_api.pool.open_access = open_access - circulation_api.pool.self_hosted = self_hosted circulation_api.patron.last_loan_activity_sync = utc_now() circulation_api.pool.on_hold_to(circulation_api.patron) diff --git a/tests/api/test_controller_base.py b/tests/api/test_controller_base.py index 8f8d4e9b8b..b80e51c4f5 100644 --- a/tests/api/test_controller_base.py +++ b/tests/api/test_controller_base.py @@ -460,7 +460,6 @@ def test_apply_borrowing_policy_succeeds_for_unlimited_access_books( ) [pool] = work.license_pools pool.open_access = False - pool.self_hosted = False pool.unlimited_access = True # Act @@ -471,31 +470,6 @@ def test_apply_borrowing_policy_succeeds_for_unlimited_access_books( # Assert assert problem is None - def test_apply_borrowing_policy_succeeds_for_self_hosted_books( - self, circulation_fixture: CirculationControllerFixture - ): - with circulation_fixture.request_context_with_library("/"): - # Arrange - patron = circulation_fixture.controller.authenticated_patron( - circulation_fixture.valid_credentials - ) - work = circulation_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - [pool] = work.license_pools - pool.licenses_available = 0 - pool.licenses_owned = 0 - pool.open_access = False - pool.self_hosted = True - - # Act - problem = circulation_fixture.controller.apply_borrowing_policy( - patron, pool - ) - - # Assert - assert problem is None - def test_apply_borrowing_policy_when_holds_prohibited( self, circulation_fixture: CirculationControllerFixture, diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index c053371431..b85247f201 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -843,9 +843,7 @@ def test_restrict_to_ready_deliverable_works( ): """A partial test of restrict_to_ready_deliverable_works. - This test covers the following cases: - 1. The bit that excludes audiobooks from certain data sources. - 2. Makes sure that self-hosted books and books with unlimited access are not get filtered out that come. + This test covers the bit that excludes audiobooks from certain data sources. The other cases are tested indirectly in lane.py, but could use a more explicit test here. """ @@ -869,20 +867,6 @@ def test_restrict_to_ready_deliverable_works( ) feedbooks_audiobook.presentation_edition.medium = Edition.AUDIO_MEDIUM - DataSource.lookup(db.session, DataSource.LCP, autocreate=True) - self_hosted_lcp_book = db.work( - data_source_name=DataSource.LCP, - title="Self-hosted LCP book", - with_license_pool=True, - self_hosted=True, - ) - unlimited_access_book = db.work( - data_source_name=DataSource.LCP, - title="Self-hosted LCP book", - with_license_pool=True, - unlimited_access=True, - ) - def expect(qu, works): """Modify the query `qu` by calling restrict_to_ready_deliverable_works(), then verify that @@ -912,8 +896,6 @@ def expect(qu, works): overdrive_ebook, overdrive_audiobook, feedbooks_audiobook, - self_hosted_lcp_book, - unlimited_access_book, ], ) # Putting a data source in the list excludes its audiobooks, but @@ -924,12 +906,10 @@ def expect(qu, works): [ overdrive_ebook, feedbooks_audiobook, - self_hosted_lcp_book, - unlimited_access_book, ], ) setting.value = json.dumps([DataSource.OVERDRIVE, DataSource.FEEDBOOKS]) - expect(qu, [overdrive_ebook, self_hosted_lcp_book, unlimited_access_book]) + expect(qu, [overdrive_ebook]) def test_delete(self, example_collection_fixture: ExampleCollectionFixture): """Verify that Collection.delete will only operate on collections diff --git a/tests/core/models/test_listeners.py b/tests/core/models/test_listeners.py index 230d537c50..2fc0c6e7c8 100644 --- a/tests/core/models/test_listeners.py +++ b/tests/core/models/test_listeners.py @@ -246,10 +246,6 @@ class TestListeners: "works_when_open_access_property_changes", functools.partial(_set_property, property_name="open_access"), ), - ( - "works_when_self_hosted_property_changes", - functools.partial(_set_property, property_name="self_hosted"), - ), ], ) def test_licensepool_storage_status_change( diff --git a/tests/core/models/test_work.py b/tests/core/models/test_work.py index fdddb39d5d..f15f495d86 100644 --- a/tests/core/models/test_work.py +++ b/tests/core/models/test_work.py @@ -1444,7 +1444,6 @@ def test_unlimited_access_books_are_available_by_default( work = db.work(presentation_edition=edition) pool.open_access = False - pool.self_hosted = False pool.unlimited_access = True # Make sure all of this will show up in a database query. @@ -1459,35 +1458,6 @@ def test_unlimited_access_books_are_available_by_default( assert licensepools[0]["open_access"] == False assert licensepools[0]["available"] == True - def test_self_hosted_books_are_available_by_default( - self, db: DatabaseTransactionFixture - ): - # Set up an edition and work. - edition, pool = db.edition( - authors=[ - db.fresh_str(), - db.fresh_str(), - ], - with_license_pool=True, - ) - work = db.work(presentation_edition=edition) - - pool.licenses_owned = 0 - pool.licenses_available = 0 - pool.self_hosted = True - - # Make sure all of this will show up in a database query. - db.session.flush() - - search_doc = work.to_search_document() - - # Each LicensePool for the Work is listed in - # the 'licensepools' section. - licensepools = search_doc["licensepools"] - assert 1 == len(licensepools) - assert licensepools[0]["open_access"] == False - assert licensepools[0]["available"] == True - def test_target_age_string(self, db: DatabaseTransactionFixture): work = db.work() work.target_age = NumericRange(7, 8, "[]") diff --git a/tests/core/test_external_search.py b/tests/core/test_external_search.py index 93dceda06e..5ca325cc7e 100644 --- a/tests/core/test_external_search.py +++ b/tests/core/test_external_search.py @@ -662,7 +662,6 @@ def _populate_works( with_license_pool=True, collection=result.tiny_collection, ) - result.tiny_book.license_pools[0].self_hosted = True # Both collections contain 'The Adventures of Sherlock # Holmes", but each collection licenses the book through a diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 039570c12d..10224a46d4 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -919,9 +919,6 @@ def test_modify_database_query(self, db: DatabaseTransactionFixture): # reasons why a book might or might not be 'available'. open_access = db.work(with_open_access_download=True, title="open access") open_access.quality = 1 - self_hosted = db.work( - with_license_pool=True, self_hosted=True, title="self hosted" - ) unlimited_access = db.work( with_license_pool=True, unlimited_access=True, title="unlimited access" ) @@ -949,11 +946,11 @@ def test_modify_database_query(self, db: DatabaseTransactionFixture): for availability, expect in [ ( Facets.AVAILABLE_NOW, - [open_access, available, self_hosted, unlimited_access], + [open_access, available, unlimited_access], ), ( Facets.AVAILABLE_ALL, - [open_access, available, not_available, self_hosted, unlimited_access], + [open_access, available, not_available, unlimited_access], ), (Facets.AVAILABLE_NOT_NOW, [not_available]), ]: @@ -970,7 +967,7 @@ def test_modify_database_query(self, db: DatabaseTransactionFixture): for collection, expect in [ ( Facets.COLLECTION_FULL, - [open_access, available, self_hosted, unlimited_access], + [open_access, available, unlimited_access], ), (Facets.COLLECTION_FEATURED, [open_access]), ]: diff --git a/tests/core/test_opds.py b/tests/core/test_opds.py index 9a4ec75306..6a595d81d0 100644 --- a/tests/core/test_opds.py +++ b/tests/core/test_opds.py @@ -1993,7 +1993,6 @@ def test_license_tags_show_unlimited_access_books( # Arrange edition, pool = db.edition(with_license_pool=True) pool.open_access = False - pool.self_hosted = False pool.unlimited_access = True # Act @@ -2021,23 +2020,6 @@ def test_unlimited_access_pool_loan(self, db: DatabaseTransactionFixture): assert "since" in tag.attrib assert "until" not in tag.attrib - def test_license_tags_show_self_hosted_books(self, db: DatabaseTransactionFixture): - - # Arrange - edition, pool = db.edition(with_license_pool=True) - pool.self_hosted = True - pool.open_access = False - pool.licenses_available = 0 - pool.licenses_owned = 0 - - # Act - tags = AcquisitionFeed.license_tags(pool, None, None) - - # Assert - assert 1 == len(tags) - assert "status" in tags[0].attrib - assert "available" == tags[0].attrib["status"] - def test_single_entry(self, db: DatabaseTransactionFixture): session = db.session diff --git a/tests/core/test_s3_analytics_provider.py b/tests/core/test_s3_analytics_provider.py index e0696ff23f..e07e0b45a7 100644 --- a/tests/core/test_s3_analytics_provider.py +++ b/tests/core/test_s3_analytics_provider.py @@ -298,7 +298,7 @@ def test_analytics_data_with_associated_license_pool_is_correctly_stored_in_s3( assert license_pool.licenses_available == event["licenses_available"] assert license_pool.licenses_reserved == event["licenses_reserved"] assert license_pool.patrons_in_hold_queue == event["patrons_in_hold_queue"] - assert license_pool.self_hosted == event["self_hosted"] + assert False == event["self_hosted"] assert work.title == event["title"] assert work.series == event["series"] assert work.series_position == event["series_position"] diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 647caf9412..ec1362ac24 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -343,7 +343,6 @@ def work( presentation_edition=None, collection=None, data_source_name=None, - self_hosted=False, unlimited_access=False, ): """Create a Work. @@ -378,16 +377,12 @@ def work( data_source_name=data_source_name, series=series, collection=collection, - self_hosted=self_hosted, unlimited_access=unlimited_access, ) if with_license_pool: presentation_edition, pool = presentation_edition if with_open_access_download: pool.open_access = True - if self_hosted: - pool.open_access = False - pool.self_hosted = True if unlimited_access: pool.open_access = False pool.unlimited_access = True @@ -446,7 +441,6 @@ def edition( series=None, collection=None, publication_date=None, - self_hosted=False, unlimited_access=False, ): id = identifier_id or self.fresh_str() @@ -486,7 +480,6 @@ def edition( data_source_name=data_source_name, with_open_access_download=with_open_access_download, collection=collection, - self_hosted=self_hosted, unlimited_access=unlimited_access, ) @@ -502,7 +495,6 @@ def licensepool( with_open_access_download=False, set_edition_as_presentation=False, collection=None, - self_hosted=False, unlimited_access=False, ): source = DataSource.lookup(self.session, data_source_name) @@ -518,7 +510,6 @@ def licensepool( data_source=source, collection=collection, availability_time=utc_now(), - self_hosted=self_hosted, unlimited_access=unlimited_access, ) From c704e09d179337fa48263015f6d5e3dff7dc4616 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:15:44 -0300 Subject: [PATCH 026/262] Bump docker/setup-buildx-action from 2 to 3 (#1369) Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 2 to 3. - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v2...v3) --- updated-dependencies: - dependency-name: docker/setup-buildx-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/test-build.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index 5c9fbbdd7a..f2e6c8e5ab 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -33,7 +33,7 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Login to GitHub Container Registry uses: docker/login-action@v2 diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index d840653ec5..1636d3ceb1 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -136,7 +136,7 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 # If the base image build was changed, we build it first, so we can test # using these changes throughout the rest of the build. If the base image @@ -265,7 +265,7 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Build image uses: docker/build-push-action@v4 @@ -324,7 +324,7 @@ jobs: uses: docker/setup-qemu-action@v2 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v2 + uses: docker/setup-buildx-action@v3 - name: Set up Python uses: actions/setup-python@v4 From 4e13467ca54359b838ff95ad14a7f230fa5d0bd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:17:41 -0300 Subject: [PATCH 027/262] Bump docker/metadata-action from 4 to 5 (#1373) Bumps [docker/metadata-action](https://github.com/docker/metadata-action) from 4 to 5. - [Release notes](https://github.com/docker/metadata-action/releases) - [Upgrade guide](https://github.com/docker/metadata-action/blob/master/UPGRADE.md) - [Commits](https://github.com/docker/metadata-action/compare/v4...v5) --- updated-dependencies: - dependency-name: docker/metadata-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/test-build.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index f2e6c8e5ab..75d6d2251c 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -44,7 +44,7 @@ jobs: - name: Generate tags for image id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ghcr.io/${{ github.repository_owner }}/circ-baseimage # Generate tags for the image diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 1636d3ceb1..7a4612a1a1 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -154,7 +154,7 @@ jobs: # characters are escaped, and the repo owner string is lowercase. - name: Generate tags for base image id: baseimage-meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ghcr.io/${{ github.repository_owner }}/circ-baseimage tags: | @@ -165,7 +165,7 @@ jobs: # We are using docker/metadata-action here for the same reason as above. - name: Generate tag for latest id: baseimage-latest - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ghcr.io/${{ github.repository_owner }}/circ-baseimage tags: | @@ -354,7 +354,7 @@ jobs: - name: Generate tags for image id: meta - uses: docker/metadata-action@v4 + uses: docker/metadata-action@v5 with: images: ghcr.io/${{ github.repository_owner }}/circ-${{ matrix.image }} tags: | From 6667fb9e691fcbdd96878481b082d1f0004fe771 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 14:32:14 -0300 Subject: [PATCH 028/262] Bump docker/build-push-action from 4 to 5 (#1372) Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 4 to 5. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v4...v5) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/test-build.yml | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index 75d6d2251c..06be17a987 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -56,7 +56,7 @@ jobs: type=raw,value=latest - name: Build base image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile.baseimage diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 7a4612a1a1..16fda75bba 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -181,7 +181,7 @@ jobs: # Build the base image, only if needed. - name: Build base image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile.baseimage @@ -217,7 +217,7 @@ jobs: echo tag="$tag" >> "$GITHUB_OUTPUT" - name: Build common image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile @@ -268,7 +268,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Build image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile @@ -364,7 +364,7 @@ jobs: type=sha,priority=40 - name: Push image - uses: docker/build-push-action@v4 + uses: docker/build-push-action@v5 with: context: . file: ./docker/Dockerfile From 766bec470a97fec4028a5d593d8009324b5e3f6c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 17:44:07 +0000 Subject: [PATCH 029/262] Bump pyjwt from 2.6.0 to 2.8.0 (#1375) --- api/authenticator.py | 4 ++-- poetry.lock | 41 +++++++++-------------------------------- pyproject.toml | 2 +- 3 files changed, 12 insertions(+), 35 deletions(-) diff --git a/api/authenticator.py b/api/authenticator.py index 9be8470c1c..f030bb26dc 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -541,12 +541,12 @@ def create_bearer_token( # Maybe we should use something custom instead. iss=provider_name, ) - return jwt.encode(payload, self.bearer_token_signing_secret, algorithm="HS256") # type: ignore[arg-type] + return jwt.encode(payload, self.bearer_token_signing_secret, algorithm="HS256") def decode_bearer_token(self, token: str) -> Tuple[str, str]: """Extract auth provider name and access token from JSON web token.""" decoded = jwt.decode( - token, self.bearer_token_signing_secret, algorithms=["HS256"] # type: ignore[arg-type] + token, self.bearer_token_signing_secret, algorithms=["HS256"] ) provider_name = decoded["iss"] token = decoded["token"] diff --git a/poetry.lock b/poetry.lock index 42710ed461..4d105794d9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. [[package]] name = "alembic" @@ -1172,12 +1172,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ + {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1279,8 +1279,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -2023,13 +2023,10 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, - {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, - {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, - {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -2038,7 +2035,6 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, - {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -2058,7 +2054,6 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, - {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -2068,7 +2063,6 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, - {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -2078,7 +2072,6 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, - {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -2088,7 +2081,6 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, - {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -2099,16 +2091,13 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, - {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, - {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, - {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -2496,7 +2485,6 @@ files = [ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, - {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, @@ -2506,7 +2494,6 @@ files = [ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, - {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, @@ -2935,13 +2922,13 @@ types = ["typing-extensions"] [[package]] name = "pyjwt" -version = "2.6.0" +version = "2.8.0" description = "JSON Web Token implementation in Python" optional = false python-versions = ">=3.7" files = [ - {file = "PyJWT-2.6.0-py3-none-any.whl", hash = "sha256:d83c3d892a77bbb74d3e1a2cfa90afaadb60945205d1095d9221f04466f64c14"}, - {file = "PyJWT-2.6.0.tar.gz", hash = "sha256:69285c7e31fc44f68a1feb309e948e0df53259d579295e6cfe2b1792329f05fd"}, + {file = "PyJWT-2.8.0-py3-none-any.whl", hash = "sha256:59127c392cc44c2da5bb3192169a91f429924e17aff6534d70fdc02ab3e04320"}, + {file = "PyJWT-2.8.0.tar.gz", hash = "sha256:57e28d156e3d5c10088e0c68abb90bfac3df82b40a71bd0daa20c65ccd5c23de"}, ] [package.dependencies] @@ -3237,7 +3224,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3245,15 +3231,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3270,7 +3249,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3278,7 +3256,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3681,7 +3658,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} mypy = {version = ">=0.910", optional = true, markers = "python_version >= \"3\" and extra == \"mypy\""} sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\""} @@ -4229,4 +4206,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "7600087ac13c7c72ff6fdbec8364c7e04780d06e80a9115c4483cb47ff393d7f" +content-hash = "4cf46205d47e3b72a365ff6f418785517c863a8b2770616ff1cebc61a316a21b" diff --git a/pyproject.toml b/pyproject.toml index d53471d147..83cb0192f1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -191,7 +191,7 @@ pillow = "^10.0" pycryptodome = "^3.18" pydantic = {version = "^1.10.9", extras = ["email"]} pyinstrument = "<4.6" -PyJWT = "2.6.0" +PyJWT = "^2.8" PyLD = "2.0.3" pymarc = "5.1.0" pyOpenSSL = "^23.1.0" From 10fd0bff4248aac4538af4898ec8b74e6b218080 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 17:44:44 +0000 Subject: [PATCH 030/262] Bump docker/setup-qemu-action from 2 to 3 (#1370) --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/test-build.yml | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index 06be17a987..738b997b9b 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -30,7 +30,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 16fda75bba..2927d12f05 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -133,7 +133,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -262,7 +262,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 @@ -321,7 +321,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up QEMU - uses: docker/setup-qemu-action@v2 + uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 From e558833a3349316dbc0a4d91a2daa4ccc96a9156 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Sep 2023 19:31:47 +0000 Subject: [PATCH 031/262] Bump docker/login-action from 2 to 3 (#1371) --- .github/workflows/build-base-image.yml | 2 +- .github/workflows/test-build.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build-base-image.yml b/.github/workflows/build-base-image.yml index 738b997b9b..691ee39349 100644 --- a/.github/workflows/build-base-image.yml +++ b/.github/workflows/build-base-image.yml @@ -36,7 +36,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 2927d12f05..426199d9ad 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -172,7 +172,7 @@ jobs: type=raw,value=latest - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} @@ -346,7 +346,7 @@ jobs: echo "__branch__ = '$(dunamai from git --format {branch})'" >> core/_version.py - name: Login to GitHub Container Registry - uses: docker/login-action@v2 + uses: docker/login-action@v3 with: registry: ghcr.io username: ${{ github.repository_owner }} From 4cfb3becae3373600fce54a9bf4c94a4b2c40ac4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Sep 2023 13:54:07 +0000 Subject: [PATCH 032/262] Bump types-psycopg2 from 2.9.21.11 to 2.9.21.12 (#1380) --- poetry.lock | 39 +++++++++++++++++++++++++++++++-------- 1 file changed, 31 insertions(+), 8 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d105794d9..b2ac521785 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alembic" @@ -1172,12 +1172,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1279,8 +1279,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -2023,10 +2023,13 @@ files = [ {file = "lxml-4.9.3-cp27-cp27m-macosx_11_0_x86_64.whl", hash = "sha256:b0a545b46b526d418eb91754565ba5b63b1c0b12f9bd2f808c852d9b4b2f9b5c"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:075b731ddd9e7f68ad24c635374211376aa05a281673ede86cbe1d1b3455279d"}, {file = "lxml-4.9.3-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1e224d5755dba2f4a9498e150c43792392ac9b5380aa1b845f98a1618c94eeef"}, + {file = "lxml-4.9.3-cp27-cp27m-win32.whl", hash = "sha256:2c74524e179f2ad6d2a4f7caf70e2d96639c0954c943ad601a9e146c76408ed7"}, + {file = "lxml-4.9.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4f1026bc732b6a7f96369f7bfe1a4f2290fb34dce00d8644bc3036fb351a4ca1"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0781a98ff5e6586926293e59480b64ddd46282953203c76ae15dbbbf302e8bb"}, {file = "lxml-4.9.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:cef2502e7e8a96fe5ad686d60b49e1ab03e438bd9123987994528febd569868e"}, {file = "lxml-4.9.3-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:b86164d2cff4d3aaa1f04a14685cbc072efd0b4f99ca5708b2ad1b9b5988a991"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:42871176e7896d5d45138f6d28751053c711ed4d48d8e30b498da155af39aebd"}, + {file = "lxml-4.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:ae8b9c6deb1e634ba4f1930eb67ef6e6bf6a44b6eb5ad605642b2d6d5ed9ce3c"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:411007c0d88188d9f621b11d252cce90c4a2d1a49db6c068e3c16422f306eab8"}, {file = "lxml-4.9.3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:cd47b4a0d41d2afa3e58e5bf1f62069255aa2fd6ff5ee41604418ca925911d76"}, {file = "lxml-4.9.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e2cb47860da1f7e9a5256254b74ae331687b9672dfa780eed355c4c9c3dbd23"}, @@ -2035,6 +2038,7 @@ files = [ {file = "lxml-4.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:97047f0d25cd4bcae81f9ec9dc290ca3e15927c192df17331b53bebe0e3ff96d"}, {file = "lxml-4.9.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:1f447ea5429b54f9582d4b955f5f1985f278ce5cf169f72eea8afd9502973dd5"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:57d6ba0ca2b0c462f339640d22882acc711de224d769edf29962b09f77129cbf"}, + {file = "lxml-4.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:9767e79108424fb6c3edf8f81e6730666a50feb01a328f4a016464a5893f835a"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:71c52db65e4b56b8ddc5bb89fb2e66c558ed9d1a74a45ceb7dcb20c191c3df2f"}, {file = "lxml-4.9.3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:d73d8ecf8ecf10a3bd007f2192725a34bd62898e8da27eb9d32a58084f93962b"}, {file = "lxml-4.9.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0a3d3487f07c1d7f150894c238299934a2a074ef590b583103a45002035be120"}, @@ -2054,6 +2058,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-macosx_11_0_x86_64.whl", hash = "sha256:64f479d719dc9f4c813ad9bb6b28f8390360660b73b2e4beb4cb0ae7104f1c12"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:dd708cf4ee4408cf46a48b108fb9427bfa00b9b85812a9262b5c668af2533ea5"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c31c7462abdf8f2ac0577d9f05279727e698f97ecbb02f17939ea99ae8daa98"}, + {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e3cd95e10c2610c360154afdc2f1480aea394f4a4f1ea0a5eacce49640c9b190"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:4930be26af26ac545c3dffb662521d4e6268352866956672231887d18f0eaab2"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4aec80cde9197340bc353d2768e2a75f5f60bacda2bab72ab1dc499589b3878c"}, {file = "lxml-4.9.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:14e019fd83b831b2e61baed40cab76222139926b1fb5ed0e79225bc0cae14584"}, @@ -2063,6 +2068,7 @@ files = [ {file = "lxml-4.9.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bef4e656f7d98aaa3486d2627e7d2df1157d7e88e7efd43a65aa5dd4714916cf"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:46f409a2d60f634fe550f7133ed30ad5321ae2e6630f13657fb9479506b00601"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:4c28a9144688aef80d6ea666c809b4b0e50010a2aca784c97f5e6bf143d9f129"}, + {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:141f1d1a9b663c679dc524af3ea1773e618907e96075262726c7612c02b149a4"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:53ace1c1fd5a74ef662f844a0413446c0629d151055340e9893da958a374f70d"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:17a753023436a18e27dd7769e798ce302963c236bc4114ceee5b25c18c52c693"}, {file = "lxml-4.9.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7d298a1bd60c067ea75d9f684f5f3992c9d6766fadbc0bcedd39750bf344c2f4"}, @@ -2072,6 +2078,7 @@ files = [ {file = "lxml-4.9.3-cp37-cp37m-win_amd64.whl", hash = "sha256:120fa9349a24c7043854c53cae8cec227e1f79195a7493e09e0c12e29f918e52"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4d2d1edbca80b510443f51afd8496be95529db04a509bc8faee49c7b0fb6d2cc"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:8d7e43bd40f65f7d97ad8ef5c9b1778943d02f04febef12def25f7583d19baac"}, + {file = "lxml-4.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:71d66ee82e7417828af6ecd7db817913cb0cf9d4e61aa0ac1fde0583d84358db"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:6fc3c450eaa0b56f815c7b62f2b7fba7266c4779adcf1cece9e6deb1de7305ce"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:65299ea57d82fb91c7f019300d24050c4ddeb7c5a190e076b5f48a2b43d19c42"}, {file = "lxml-4.9.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eadfbbbfb41b44034a4c757fd5d70baccd43296fb894dba0295606a7cf3124aa"}, @@ -2081,6 +2088,7 @@ files = [ {file = "lxml-4.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:92af161ecbdb2883c4593d5ed4815ea71b31fafd7fd05789b23100d081ecac96"}, {file = "lxml-4.9.3-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:9bb6ad405121241e99a86efff22d3ef469024ce22875a7ae045896ad23ba2340"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8ed74706b26ad100433da4b9d807eae371efaa266ffc3e9191ea436087a9d6a7"}, + {file = "lxml-4.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fbf521479bcac1e25a663df882c46a641a9bff6b56dc8b0fafaebd2f66fb231b"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:303bf1edce6ced16bf67a18a1cf8339d0db79577eec5d9a6d4a80f0fb10aa2da"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:5515edd2a6d1a5a70bfcdee23b42ec33425e405c5b351478ab7dc9347228f96e"}, {file = "lxml-4.9.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:690dafd0b187ed38583a648076865d8c229661ed20e48f2335d68e2cf7dc829d"}, @@ -2091,13 +2099,16 @@ files = [ {file = "lxml-4.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:4dd9a263e845a72eacb60d12401e37c616438ea2e5442885f65082c276dfb2b2"}, {file = "lxml-4.9.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6689a3d7fd13dc687e9102a27e98ef33730ac4fe37795d5036d18b4d527abd35"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:f6bdac493b949141b733c5345b6ba8f87a226029cbabc7e9e121a413e49441e0"}, + {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:05186a0f1346ae12553d66df1cfce6f251589fea3ad3da4f3ef4e34b2d58c6a3"}, {file = "lxml-4.9.3-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c2006f5c8d28dee289f7020f721354362fa304acbaaf9745751ac4006650254b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:5c245b783db29c4e4fbbbfc9c5a78be496c9fea25517f90606aa1f6b2b3d5f7b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4fb960a632a49f2f089d522f70496640fdf1218f1243889da3822e0a9f5f3ba7"}, + {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:50670615eaf97227d5dc60de2dc99fb134a7130d310d783314e7724bf163f75d"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:9719fe17307a9e814580af1f5c6e05ca593b12fb7e44fe62450a5384dbf61b4b"}, {file = "lxml-4.9.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3331bece23c9ee066e0fb3f96c61322b9e0f54d775fccefff4c38ca488de283a"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:ed667f49b11360951e201453fc3967344d0d0263aa415e1619e85ae7fd17b4e0"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8b77946fd508cbf0fccd8e400a7f71d4ac0e1595812e66025bac475a8e811694"}, + {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:e4da8ca0c0c0aea88fd46be8e44bd49716772358d648cce45fe387f7b92374a7"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fe4bda6bd4340caa6e5cf95e73f8fea5c4bfc55763dd42f1b50a94c1b4a2fbd4"}, {file = "lxml-4.9.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f3df3db1d336b9356dd3112eae5f5c2b8b377f3bc826848567f10bfddfee77e9"}, {file = "lxml-4.9.3.tar.gz", hash = "sha256:48628bd53a426c9eb9bc066a923acaa0878d1e86129fd5359aee99285f4eed9c"}, @@ -2485,6 +2496,7 @@ files = [ {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, + {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, @@ -2494,6 +2506,7 @@ files = [ {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, + {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, @@ -3224,6 +3237,7 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -3231,8 +3245,15 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -3249,6 +3270,7 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -3256,6 +3278,7 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -3658,7 +3681,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} mypy = {version = ">=0.910", optional = true, markers = "python_version >= \"3\" and extra == \"mypy\""} sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\""} @@ -3862,13 +3885,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.11" +version = "2.9.21.12" description = "Typing stubs for psycopg2" optional = false python-versions = "*" files = [ - {file = "types-psycopg2-2.9.21.11.tar.gz", hash = "sha256:d5077eacf90e61db8c0b8eea2fdc9d4a97d7aaa16865fb4bd7034a7571520b4d"}, - {file = "types_psycopg2-2.9.21.11-py3-none-any.whl", hash = "sha256:7a323d7744bc8a882fb5a6f63448e903fc70d3dc0d6da9ec1f9c6c4dc10a7102"}, + {file = "types-psycopg2-2.9.21.12.tar.gz", hash = "sha256:a4bd86dd2a22a7e221f6a3681cc182cb3d76be67cb40d60da12e64547713c6fd"}, + {file = "types_psycopg2-2.9.21.12-py3-none-any.whl", hash = "sha256:5b84ccb7265713dd17e5529d57f2bf18f1fd455b6da674575b7e7b5d80501732"}, ] [[package]] From 202a1ab635aca5d999738fdcd7512618f4d3e010 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 15 Sep 2023 11:55:40 +0530 Subject: [PATCH 033/262] PP-149 Refactor Acquisition feeds and Annotators (#1308) * OPDS 1.2 and 2.0 refactor With an acquisition page feed This aims to remove all XML directives from within the feed and annotator workflows And also to simplify the workflow making it more linear and less circular Only a Serializer should write XML to the feed Serializers are picked up the the basis of the Accept header sent to the endpoint. Currently only the /feed, /group and /loans endpoints react to the Accept headers, the rest are still OPDS 1.2 --- api/admin/controller/custom_lists.py | 12 +- api/admin/controller/work_editor.py | 8 +- api/annotations.py | 2 +- api/circulation.py | 5 - api/controller.py | 119 +- core/app_server.py | 13 +- core/feed/acquisition.py | 926 +++++++++ core/feed/admin.py | 70 + core/feed/annotator/admin.py | 105 + core/feed/annotator/base.py | 389 ++++ core/feed/annotator/circulation.py | 1564 ++++++++++++++ core/feed/annotator/loan_and_hold.py | 125 ++ core/feed/annotator/verbose.py | 110 + core/feed/base.py | 13 + core/feed/navigation.py | 92 + core/feed/opds.py | 96 + core/feed/serializer/base.py | 32 + core/feed/serializer/opds.py | 363 ++++ core/feed/serializer/opds2.py | 213 ++ core/feed/types.py | 240 +++ core/feed/util.py | 27 + core/model/constants.py | 1 + core/model/licensing.py | 6 +- core/model/work.py | 4 +- core/opds_schema.py | 2 + pyproject.toml | 1 + scripts.py | 17 +- .../feed/equivalence/test_feed_equivalence.py | 296 +++ tests/api/feed/fixtures.py | 47 + tests/api/feed/test_admin.py | 285 +++ tests/api/feed/test_annotators.py | 469 +++++ tests/api/feed/test_library_annotator.py | 1795 +++++++++++++++++ .../api/feed/test_loan_and_hold_annotator.py | 287 +++ tests/api/feed/test_opds2_serializer.py | 215 ++ tests/api/feed/test_opds_acquisition_feed.py | 1454 +++++++++++++ tests/api/feed/test_opds_base.py | 57 + tests/api/feed/test_opds_serializer.py | 232 +++ tests/api/test_controller_cm.py | 5 +- tests/api/test_controller_crawlfeed.py | 15 +- tests/api/test_controller_loan.py | 2 +- tests/api/test_controller_opdsfeed.py | 37 +- tests/api/test_controller_work.py | 64 +- tests/api/test_scripts.py | 12 +- tests/core/test_app_server.py | 6 +- 44 files changed, 9684 insertions(+), 149 deletions(-) create mode 100644 core/feed/acquisition.py create mode 100644 core/feed/admin.py create mode 100644 core/feed/annotator/admin.py create mode 100644 core/feed/annotator/base.py create mode 100644 core/feed/annotator/circulation.py create mode 100644 core/feed/annotator/loan_and_hold.py create mode 100644 core/feed/annotator/verbose.py create mode 100644 core/feed/base.py create mode 100644 core/feed/navigation.py create mode 100644 core/feed/opds.py create mode 100644 core/feed/serializer/base.py create mode 100644 core/feed/serializer/opds.py create mode 100644 core/feed/serializer/opds2.py create mode 100644 core/feed/types.py create mode 100644 core/feed/util.py create mode 100644 tests/api/feed/equivalence/test_feed_equivalence.py create mode 100644 tests/api/feed/fixtures.py create mode 100644 tests/api/feed/test_admin.py create mode 100644 tests/api/feed/test_annotators.py create mode 100644 tests/api/feed/test_library_annotator.py create mode 100644 tests/api/feed/test_loan_and_hold_annotator.py create mode 100644 tests/api/feed/test_opds2_serializer.py create mode 100644 tests/api/feed/test_opds_acquisition_feed.py create mode 100644 tests/api/feed/test_opds_base.py create mode 100644 tests/api/feed/test_opds_serializer.py diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index b2e72c10cf..c117aa215c 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -24,6 +24,7 @@ from api.controller import CirculationManagerController from api.problem_details import CANNOT_DELETE_SHARED_LIST from core.app_server import load_pagination_from_request +from core.feed.acquisition import OPDSAcquisitionFeed from core.lane import Lane, WorkList from core.model import ( Collection, @@ -36,10 +37,8 @@ create, get_one, ) -from core.opds import AcquisitionFeed from core.problem_details import INVALID_INPUT, METHOD_NOT_ALLOWED from core.query.customlist import CustomListQueries -from core.util.flask_util import OPDSFeedResponse from core.util.problem_detail import ProblemDetail @@ -351,12 +350,11 @@ def custom_list( annotator = self.manager.annotator(worklist) url_fn = self.url_for_custom_list(library, list) - feed = AcquisitionFeed.from_query( - query, self._db, list.name, url, pagination, url_fn, annotator + feed = OPDSAcquisitionFeed.from_query( + query, self._db, list.name or "", url, pagination, url_fn, annotator ) - annotator.annotate_feed(feed, worklist) - - return OPDSFeedResponse(str(feed), max_age=0) + annotator.annotate_feed(feed) + return feed.as_response(max_age=0) elif flask.request.method == "POST": ctx: Context = flask.request.context.body # type: ignore diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index 6b8f520f31..b8c41014fb 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -12,10 +12,11 @@ from flask_babel import lazy_gettext as _ from PIL import Image, ImageDraw, ImageFont -from api.admin.opds import AdminAnnotator from api.admin.problem_details import * from api.admin.validator import Validator from core.classifier import NO_NUMBER, NO_VALUE, SimplifiedGenreClassifier, genres +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.admin import AdminAnnotator from core.lane import Lane from core.metadata_layer import LinkData, Metadata, ReplacementPolicy from core.mirror import MirrorUploader @@ -37,7 +38,6 @@ get_one_or_create, ) from core.model.configuration import ExternalIntegrationLink -from core.opds import AcquisitionFeed from core.util import LanguageCodes from core.util.datetime_helpers import strptime_utc, utc_now from core.util.problem_detail import ProblemDetail @@ -68,7 +68,9 @@ def details(self, identifier_type, identifier): # single_entry returns an OPDSEntryResponse that will not be # cached, which is perfect. We want the admin interface # to update immediately when an admin makes a change. - return AcquisitionFeed.single_entry(self._db, work, annotator) + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator) + ) def roles(self): """Return a mapping from MARC codes to contributor roles.""" diff --git a/api/annotations.py b/api/annotations.py index 2fa5cf6360..d2de031610 100644 --- a/api/annotations.py +++ b/api/annotations.py @@ -1,9 +1,9 @@ import json import os +from flask import url_for from pyld import jsonld -from core.app_server import url_for from core.model import Annotation, Identifier from core.util.datetime_helpers import utc_now diff --git a/api/circulation.py b/api/circulation.py index 078bea0613..9765b0801e 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -1354,11 +1354,6 @@ def enforce_limits(self, patron: Patron, pool: LicensePool) -> None: if api is not None: api.update_availability(pool) - if pool.licenses_available is None: - # We don't know how many licenses are available, so we - # can't tell whether the patron is at their limit. - self.log.warning(f"License pool {pool} has unknown availability.") - return currently_available = pool.licenses_available > 0 if currently_available and at_loan_limit: raise PatronLoanLimitReached(limit=patron.library.settings.loan_limit) diff --git a/api/controller.py b/api/controller.py index 743697e886..6b07889caf 100644 --- a/api/controller.py +++ b/api/controller.py @@ -21,11 +21,12 @@ from sqlalchemy import select from sqlalchemy.orm import eagerload from sqlalchemy.orm.exc import NoResultFound +from werkzeug.datastructures import MIMEAccept from api.authentication.access_token import AccessTokenProvider from api.model.patron_auth import PatronAuthAccessToken from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse -from api.opds2 import OPDS2NavigationsAnnotator, OPDS2PublicationsAnnotator +from api.opds2 import OPDS2NavigationsAnnotator from api.saml.controller import SAMLController from core.analytics import Analytics from core.app_server import ApplicationVersionController @@ -37,6 +38,12 @@ ) from core.entrypoint import EverythingEntryPoint from core.external_search import ExternalSearchIndex, SortKeyPagination +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.circulation import ( + CirculationManagerAnnotator, + LibraryAnnotator, +) +from core.feed.navigation import NavigationFeed from core.lane import ( BaseFacets, Facets, @@ -76,7 +83,7 @@ InvalidTokenTypeError, ) from core.model.discovery_service_registration import DiscoveryServiceRegistration -from core.opds import AcquisitionFeed, NavigationFacets, NavigationFeed +from core.opds import NavigationFacets from core.opds2 import AcquisitonFeedOPDS2 from core.opensearch import OpenSearchDocument from core.query.playtime_entries import PlaytimeEntries @@ -112,11 +119,6 @@ ) from .odl import ODLAPI from .odl2 import ODL2API -from .opds import ( - CirculationManagerAnnotator, - LibraryAnnotator, - LibraryLoanAndHoldAnnotator, -) from .problem_details import * if TYPE_CHECKING: @@ -842,7 +844,7 @@ def appropriate_index_for_patron_type(self): class OPDSFeedController(CirculationManagerController): - def groups(self, lane_identifier, feed_class=AcquisitionFeed): + def groups(self, lane_identifier, feed_class=OPDSAcquisitionFeed): """Build or retrieve a grouped acquisition feed. :param lane_identifier: An identifier that uniquely identifiers @@ -910,9 +912,9 @@ def groups(self, lane_identifier, feed_class=AcquisitionFeed): annotator=annotator, facets=facets, search_engine=search_engine, - ) + ).as_response(mime_types=flask.request.accept_mimetypes) - def feed(self, lane_identifier, feed_class=AcquisitionFeed): + def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): """Build or retrieve a paginated acquisition feed. :param lane_identifier: An identifier that uniquely identifiers @@ -943,7 +945,7 @@ def feed(self, lane_identifier, feed_class=AcquisitionFeed): annotator = self.manager.annotator(lane, facets=facets) max_age = flask.request.args.get("max_age") - return feed_class.page( + feed = feed_class.page( _db=self._db, title=lane.display_name, url=url, @@ -952,7 +954,10 @@ def feed(self, lane_identifier, feed_class=AcquisitionFeed): facets=facets, pagination=pagination, search_engine=search_engine, + ) + return feed.as_response( max_age=int(max_age) if max_age else None, + mime_types=flask.request.accept_mimetypes, ) def navigation(self, lane_identifier): @@ -987,7 +992,7 @@ def navigation(self, lane_identifier): worklist=lane, annotator=annotator, facets=facets, - ) + ).as_response() def crawlable_library_feed(self): """Build or retrieve a crawlable acquisition feed for the @@ -1043,7 +1048,7 @@ def crawlable_list_feed(self, list_name): return self._crawlable_feed(title=title, url=url, worklist=lane) def _crawlable_feed( - self, title, url, worklist, annotator=None, feed_class=AcquisitionFeed + self, title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed ): """Helper method to create a crawlable feed. @@ -1052,7 +1057,7 @@ def _crawlable_feed( :param worklist: A crawlable Lane which controls which works show up in the feed. :param annotator: A custom Annotator to use when generating the feed. - :param feed_class: A drop-in replacement for AcquisitionFeed + :param feed_class: A drop-in replacement for OPDSAcquisitionFeed for use in tests. """ pagination = load_pagination_from_request( @@ -1080,7 +1085,7 @@ def _crawlable_feed( facets=facets, pagination=pagination, search_engine=search_engine, - ) + ).as_response() def _load_search_facets(self, lane): entrypoints = list(flask.request.library.entrypoints) @@ -1098,7 +1103,7 @@ def _load_search_facets(self, lane): default_entrypoint=default_entrypoint, ) - def search(self, lane_identifier, feed_class=AcquisitionFeed): + def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): """Search for books.""" lane = self.load_lane(lane_identifier) if isinstance(lane, ProblemDetail): @@ -1153,7 +1158,7 @@ def search(self, lane_identifier, feed_class=AcquisitionFeed): # Run a search. annotator = self.manager.annotator(lane, facets) info = OpenSearchDocument.search_info(lane) - return feed_class.search( + response = feed_class.search( _db=self._db, title=info["name"], url=make_url(), @@ -1164,6 +1169,9 @@ def search(self, lane_identifier, feed_class=AcquisitionFeed): pagination=pagination, facets=facets, ) + if isinstance(response, ProblemDetail): + return response + return response.as_response(mime_types=flask.request.accept_mimetypes) def _qa_feed( self, feed_factory, feed_title, controller_name, facet_class, worklist_factory @@ -1218,7 +1226,7 @@ def _qa_feed( max_age=CachedFeed.IGNORE_CACHE, ) - def qa_feed(self, feed_class=AcquisitionFeed): + def qa_feed(self, feed_class=OPDSAcquisitionFeed): """Create an OPDS feed containing the information necessary to run a full set of integration tests against this server and the vendors it relies on. @@ -1238,7 +1246,7 @@ def factory(library, facets): worklist_factory=factory, ) - def qa_series_feed(self, feed_class=AcquisitionFeed): + def qa_series_feed(self, feed_class=OPDSAcquisitionFeed): """Create an OPDS feed containing books that belong to _some_ series, without regard to _which_ series. @@ -1297,23 +1305,22 @@ def publications(self): params: FeedRequestParameters = self._parse_feed_request() if params.problem: return params.problem - annotator = OPDS2PublicationsAnnotator( - flask.request.url, params.facets, params.pagination, params.library - ) lane = self.load_lane(None) + annotator = self.manager.annotator(lane, params.facets) max_age = flask.request.args.get("max_age") - feed = AcquisitonFeedOPDS2.publications( + feed = OPDSAcquisitionFeed.page( self._db, + lane.display_name, + flask.request.url, lane, + annotator, params.facets, params.pagination, self.search_engine, - annotator, - max_age=int(max_age) if max_age is not None else None, ) - - return Response( - str(feed), status=200, headers={"Content-Type": annotator.OPDS2_TYPE} + return feed.as_response( + mime_types=MIMEAccept([("application/opds+json", 1)]), # Force the type + max_age=int(max_age) if max_age is not None else None, ) def navigation(self): @@ -1456,7 +1463,17 @@ def sync(self): ) # Then make the feed. - return LibraryLoanAndHoldAnnotator.active_loans_for(self.circulation, patron) + feed = OPDSAcquisitionFeed.active_loans_for(self.circulation, patron) + response = feed.as_response( + max_age=0, + private=True, + mime_types=flask.request.accept_mimetypes, + ) + + last_modified = patron.last_loan_activity_sync + if last_modified: + response.last_modified = last_modified + return response def borrow(self, identifier_type, identifier, mechanism_id=None): """Create a new loan or hold for a book. @@ -1504,7 +1521,7 @@ def borrow(self, identifier_type, identifier, mechanism_id=None): response_kwargs["status"] = 201 else: response_kwargs["status"] = 200 - return LibraryLoanAndHoldAnnotator.single_item_feed( + return OPDSAcquisitionFeed.single_entry_loans_feed( self.circulation, loan_or_hold, **response_kwargs ) @@ -1790,7 +1807,7 @@ def fulfill_part_url(part): if mechanism.delivery_mechanism.is_streaming: # If this is a streaming delivery mechanism, create an OPDS entry # with a fulfillment link to the streaming reader url. - feed = LibraryLoanAndHoldAnnotator.single_item_feed( + feed = OPDSAcquisitionFeed.single_entry_loans_feed( self.circulation, loan, fulfillment=fulfillment ) if isinstance(feed, ProblemDetail): @@ -1799,8 +1816,6 @@ def fulfill_part_url(part): return feed if isinstance(feed, Response): return feed - if isinstance(feed, OPDSFeed): # type: ignore - content = str(feed) else: content = etree.tostring(feed) status_code = 200 @@ -1917,7 +1932,9 @@ def revoke(self, license_pool_id): work = pool.work annotator = self.manager.annotator(None) - return AcquisitionFeed.single_entry(self._db, work, annotator) + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator) + ) def detail(self, identifier_type, identifier): if flask.request.method == "DELETE": @@ -1949,7 +1966,7 @@ def detail(self, identifier_type, identifier): item = loan else: item = hold - return LibraryLoanAndHoldAnnotator.single_item_feed(self.circulation, item) + return OPDSAcquisitionFeed.single_entry_loans_feed(self.circulation, item) class AnnotationController(CirculationManagerController): @@ -2042,7 +2059,7 @@ def _lane_details(self, languages, audiences): return languages, audiences def contributor( - self, contributor_name, languages, audiences, feed_class=AcquisitionFeed + self, contributor_name, languages, audiences, feed_class=OPDSAcquisitionFeed ): """Serve a feed of books written by a particular author""" library = flask.request.library @@ -2096,7 +2113,7 @@ def contributor( pagination=pagination, annotator=annotator, search_engine=search_engine, - ) + ).as_response() def permalink(self, identifier_type, identifier): """Serve an entry for a single book. @@ -2129,18 +2146,23 @@ def permalink(self, identifier_type, identifier): item = loan or hold pool = pool or pools[0] - return LibraryLoanAndHoldAnnotator.single_item_feed( + return OPDSAcquisitionFeed.single_entry_loans_feed( self.circulation, item or pool ) else: annotator = self.manager.annotator(lane=None) - return AcquisitionFeed.single_entry( - self._db, work, annotator, max_age=OPDSFeed.DEFAULT_MAX_AGE + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator), + max_age=OPDSFeed.DEFAULT_MAX_AGE, ) def related( - self, identifier_type, identifier, novelist_api=None, feed_class=AcquisitionFeed + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, ): """Serve a groups feed of books related to a given book.""" @@ -2185,12 +2207,17 @@ def related( url=url, worklist=lane, annotator=annotator, + pagination=None, facets=facets, search_engine=search_engine, - ) + ).as_response() def recommendations( - self, identifier_type, identifier, novelist_api=None, feed_class=AcquisitionFeed + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, ): """Serve a feed of recommendations related to a given book.""" @@ -2242,9 +2269,9 @@ def recommendations( pagination=pagination, annotator=annotator, search_engine=search_engine, - ) + ).as_response() - def series(self, series_name, languages, audiences, feed_class=AcquisitionFeed): + def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): """Serve a feed of books in a given series.""" library = flask.request.library if not series_name: @@ -2281,7 +2308,7 @@ def series(self, series_name, languages, audiences, feed_class=AcquisitionFeed): pagination=pagination, annotator=annotator, search_engine=search_engine, - ) + ).as_response() class ProfileController(CirculationManagerController): diff --git a/core/app_server.py b/core/app_server.py index 96f265fe90..5eaa9464b7 100644 --- a/core/app_server.py +++ b/core/app_server.py @@ -16,13 +16,12 @@ import core from api.admin.config import Configuration as AdminUiConfig +from core.feed.acquisition import LookupAcquisitionFeed, OPDSAcquisitionFeed from .lane import Facets, Pagination from .log import LogConfiguration from .model import Identifier -from .opds import AcquisitionFeed, LookupAcquisitionFeed from .problem_details import * -from .util.flask_util import OPDSFeedResponse from .util.opds_writer import OPDSMessage from .util.problem_detail import ProblemDetail @@ -303,16 +302,15 @@ def work_lookup(self, annotator, route_name="lookup", **process_urn_kwargs): if isinstance(handler, ProblemDetail): # In a subclass, self.process_urns may return a ProblemDetail return handler - opds_feed = LookupAcquisitionFeed( - self._db, "Lookup results", this_url, handler.works, annotator, precomposed_entries=handler.precomposed_entries, ) - return OPDSFeedResponse(str(opds_feed)) + opds_feed.generate_feed(annotate=False) + return opds_feed.as_response() def process_urns(self, urns, **process_urn_kwargs): """Process a number of URNs by instantiating a URNLookupHandler @@ -342,15 +340,14 @@ def permalink(self, urn, annotator, route_name="work"): # work) tuples, but an AcquisitionFeed's .works is just a # list of works. works = [work for (identifier, work) in handler.works] - opds_feed = AcquisitionFeed( - self._db, + opds_feed = OPDSAcquisitionFeed( urn, this_url, works, annotator, precomposed_entries=handler.precomposed_entries, ) - return OPDSFeedResponse(str(opds_feed)) + return opds_feed.as_response() class URNLookupHandler: diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py new file mode 100644 index 0000000000..12713e1daf --- /dev/null +++ b/core/feed/acquisition.py @@ -0,0 +1,926 @@ +"""OPDS feeds, they can be serialized to either OPDS 1 or OPDS 2""" +from __future__ import annotations + +import logging +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generator, + List, + Optional, + Tuple, + Type, +) + +from sqlalchemy.orm import Query, Session + +from api.problem_details import NOT_FOUND_ON_REMOTE +from core.entrypoint import EntryPoint +from core.external_search import ExternalSearchIndex, QueryParseException +from core.facets import FacetConstants +from core.feed.annotator.base import Annotator +from core.feed.annotator.circulation import ( + CirculationManagerAnnotator, + LibraryAnnotator, +) +from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator +from core.feed.opds import BaseOPDSFeed +from core.feed.types import FeedData, Link, WorkEntry +from core.feed.util import strftime +from core.lane import Facets, FacetsWithEntryPoint, Lane, Pagination, SearchFacets +from core.model.constants import LinkRelations +from core.model.edition import Edition +from core.model.identifier import Identifier +from core.model.licensing import LicensePool +from core.model.patron import Hold, Loan, Patron +from core.model.work import Work +from core.opds import UnfulfillableWork +from core.problem_details import INVALID_INPUT +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse +from core.util.opds_writer import OPDSMessage +from core.util.problem_detail import ProblemDetail + +if TYPE_CHECKING: + from api.circulation import CirculationAPI, FulfillmentInfo + from core.lane import WorkList + + +class OPDSAcquisitionFeed(BaseOPDSFeed): + """An Acquisition Feed which is not tied to any particular format. + It is simply responsible for creating different types of feeds.""" + + def __init__( + self, + title: str, + url: str, + works: List[Work], + annotator: CirculationManagerAnnotator, + facets: Optional[FacetsWithEntryPoint] = None, + pagination: Optional[Pagination] = None, + precomposed_entries: Optional[List[OPDSMessage]] = None, + ) -> None: + self.annotator = annotator + self._facets = facets + self._pagination = pagination + super().__init__(title, url, precomposed_entries=precomposed_entries) + for work in works: + entry = self.single_entry(work, self.annotator) + if isinstance(entry, WorkEntry): + self._feed.entries.append(entry) + + def generate_feed(self, annotate: bool = True) -> None: + """Generate the feed metadata and links. + We assume the entries have already been annotated.""" + self._feed.add_metadata("id", text=self.url) + self._feed.add_metadata("title", text=self.title) + self._feed.add_metadata("updated", text=strftime(utc_now())) + self._feed.add_link(href=self.url, rel="self") + if annotate: + self.annotator.annotate_feed(self._feed) + + def add_pagination_links(self, works: List[Work], lane: WorkList) -> None: + """Add pagination links to the feed""" + if not self._pagination: + return None + if len(works) and self._pagination.has_next_page: + self._feed.add_link( + href=self.annotator.feed_url( + lane, self._facets, self._pagination.next_page + ), + rel="next", + ) + + if self._pagination.offset > 0: + self._feed.add_link( + href=self.annotator.feed_url( + lane, self._facets, self._pagination.first_page + ), + rel="first", + ) + + if self._pagination.previous_page: + self._feed.add_link( + href=self.annotator.feed_url( + lane, self._facets, self._pagination.previous_page + ), + rel="previous", + ) + + def add_facet_links(self, lane: WorkList) -> None: + """Add facet links to the feed""" + if self._facets is None: + return None + else: + facets = self._facets + entrypoints = facets.selectable_entrypoints(lane) + if entrypoints: + # A paginated feed may have multiple entry points into the + # same dataset. + def make_link(ep: Type[EntryPoint]) -> str: + return self.annotator.feed_url( + lane, facets=facets.navigate(entrypoint=ep) + ) + + self.add_entrypoint_links( + self._feed, make_link, entrypoints, facets.entrypoint + ) + + # Facet links + facet_links = self.facet_links(self.annotator, self._facets) + for linkdata in facet_links: + self._feed.facet_links.append(linkdata) + + @classmethod + def facet_links( + cls, annotator: CirculationManagerAnnotator, facets: FacetsWithEntryPoint + ) -> Generator[Link, None, None]: + """Create links for this feed's navigational facet groups. + + This does not create links for the entry point facet group, + because those links should only be present in certain + circumstances, and this method doesn't know if those + circumstances apply. You need to decide whether to call + add_entrypoint_links in addition to calling this method. + """ + for group, value, new_facets, selected in facets.facet_groups: + url = annotator.facet_url(new_facets) + if not url: + continue + group_title = Facets.GROUP_DISPLAY_TITLES.get(group) + facet_title = Facets.FACET_DISPLAY_TITLES.get(value) + if not facet_title: + display_lambda = Facets.FACET_DISPLAY_TITLES_DYNAMIC.get(group) + facet_title = display_lambda(new_facets) if display_lambda else None + if not (group_title and facet_title): + # This facet group or facet, is not recognized by the + # system. It may be left over from an earlier version, + # or just weird junk data. + continue + yield cls.facet_link(url, str(facet_title), str(group_title), selected) + + @classmethod + def facet_link( + cls, href: str, title: str, facet_group_name: str, is_active: bool + ) -> Link: + """Build a set of attributes for a facet link. + + :param href: Destination of the link. + :param title: Human-readable description of the facet. + :param facet_group_name: The facet group to which the facet belongs, + e.g. "Sort By". + :param is_active: True if this is the client's currently + selected facet. + + :retusrn: A dictionary of attributes, suitable for passing as + keyword arguments into OPDSFeed.add_link_to_feed. + """ + args = dict(href=href, title=title) + args["rel"] = LinkRelations.FACET_REL + args["facetGroup"] = facet_group_name + if is_active: + args["activeFacet"] = "true" + return Link.create(**args) + + def as_error_response(self, **kwargs: Any) -> OPDSFeedResponse: + """Convert this feed into an OPDSFeedResponse that should be treated + by intermediaries as an error -- that is, treated as private + and not cached. + """ + kwargs["max_age"] = 0 + kwargs["private"] = True + return self.as_response(**kwargs) + + @classmethod + def _create_entry( + cls, + work: Work, + active_licensepool: Optional[LicensePool], + edition: Edition, + identifier: Identifier, + annotator: Annotator, + ) -> WorkEntry: + entry = WorkEntry( + work=work, + edition=edition, + identifier=identifier, + license_pool=active_licensepool, + ) + annotator.annotate_work_entry(entry) + return entry + + ## OPDS1 specifics + @classmethod + def add_entrypoint_links( + cls, + feed: FeedData, + url_generator: Callable[[Type[EntryPoint]], str], + entrypoints: List[Type[EntryPoint]], + selected_entrypoint: Optional[Type[EntryPoint]], + group_name: str = "Formats", + ) -> None: + """Add links to a feed forming an OPDS facet group for a set of + EntryPoints. + + :param feed: A FeedData object. + :param url_generator: A callable that returns the entry point + URL when passed an EntryPoint. + :param entrypoints: A list of all EntryPoints in the facet group. + :param selected_entrypoint: The current EntryPoint, if selected. + """ + if len(entrypoints) == 1 and selected_entrypoint in (None, entrypoints[0]): + # There is only one entry point. Unless the currently + # selected entry point is somehow different, there's no + # need to put any links at all here -- a facet group with + # one one facet might as well not be there. + return + + is_default = True + for entrypoint in entrypoints: + link = cls._entrypoint_link( + url_generator, entrypoint, selected_entrypoint, is_default, group_name + ) + if link is not None: + feed.links.append(link) + is_default = False + + @classmethod + def _entrypoint_link( + cls, + url_generator: Callable[[Type[EntryPoint]], str], + entrypoint: Type[EntryPoint], + selected_entrypoint: Optional[Type[EntryPoint]], + is_default: bool, + group_name: str, + ) -> Optional[Link]: + """Create arguments for add_link_to_feed for a link that navigates + between EntryPoints. + """ + display_title = EntryPoint.DISPLAY_TITLES.get(entrypoint) + if not display_title: + # Shouldn't happen. + return None + + url = url_generator(entrypoint) + is_selected = entrypoint is selected_entrypoint + link = cls.facet_link(url, display_title, group_name, is_selected) + + # Unlike a normal facet group, every link in this facet + # group has an additional attribute marking it as an entry + # point. + # + # In OPDS 2 this can become an additional rel value, + # removing the need for a custom attribute. + link.add_attributes({"facetGroupType": FacetConstants.ENTRY_POINT_REL}) + return link + + def add_breadcrumb_links( + self, lane: WorkList, entrypoint: Optional[Type[EntryPoint]] = None + ) -> None: + """Add information necessary to find your current place in the + site's navigation. + + A link with rel="start" points to the start of the site + + An Entrypoint section describes the current entry point. + + A breadcrumbs section contains a sequence of breadcrumb links. + """ + # Add the top-level link with rel='start' + annotator = self.annotator + top_level_title = annotator.top_level_title() or "Collection Home" + self.add_link(annotator.default_lane_url(), rel="start", title=top_level_title) + + # Add a link to the direct parent with rel="up". + # + # TODO: the 'direct parent' may be the same lane but without + # the entry point specified. Fixing this would also be a good + # opportunity to refactor the code for figuring out parent and + # parent_title. + parent = None + if isinstance(lane, Lane): + parent = lane.parent + if parent and parent.display_name: + parent_title = parent.display_name + else: + parent_title = top_level_title + + if parent: + up_uri = annotator.lane_url(parent) + self.add_link(up_uri, rel="up", title=parent_title) + self.add_breadcrumbs(lane, entrypoint=entrypoint) + + # Annotate the feed with a simplified:entryPoint for the + # current EntryPoint. + self.show_current_entrypoint(entrypoint) + + def add_breadcrumbs( + self, + lane: WorkList, + include_lane: bool = False, + entrypoint: Optional[Type[EntryPoint]] = None, + ) -> None: + """Add list of ancestor links in a breadcrumbs element. + + :param lane: Add breadcrumbs from up to this lane. + :param include_lane: Include `lane` itself in the breadcrumbs. + :param entrypoint: The currently selected entrypoint, if any. + + TODO: The switchover from "no entry point" to "entry point" needs + its own breadcrumb link. + """ + if entrypoint is None: + entrypoint_query = "" + else: + entrypoint_query = "?entrypoint=" + entrypoint.INTERNAL_NAME + + # Breadcrumbs for lanes may be end up being cut off by a + # patron-type-specific root lane. If so, that lane -- not the + # site root -- should become the first breadcrumb. + site_root_lane = None + usable_parentage = [] + if lane is not None: + for ancestor in [lane] + list(lane.parentage): + if isinstance(ancestor, Lane) and ancestor.root_for_patron_type: + # Root lane for a specific patron type. The root is + # treated specially, so it should not be added to + # usable_parentage. Any lanes between this lane and the + # library root should not be included at all. + site_root_lane = ancestor + break + + if ancestor != lane or include_lane: + # A lane may appear in its own breadcrumbs + # only if include_lane is True. + usable_parentage.append(ancestor) + + annotator = self.annotator + if lane == site_root_lane or ( + site_root_lane is None + and annotator.lane_url(lane) == annotator.default_lane_url() + ): + # There are no extra breadcrumbs: either we are at the + # site root, or we are at a lane that is the root for a + # specific patron type. + return + + breadcrumbs = [] + + # Add root link. This is either the link to the site root + # or to the root lane for some patron type. + if site_root_lane is None: + root_url = annotator.default_lane_url() + root_title = annotator.top_level_title() + else: + root_url = annotator.lane_url(site_root_lane) + root_title = site_root_lane.display_name + root_link = Link(href=root_url, title=root_title) + breadcrumbs.append(root_link) + + # Add entrypoint selection link + if entrypoint: + breadcrumbs.append( + Link( + href=root_url + entrypoint_query, + title=entrypoint.INTERNAL_NAME, + ) + ) + + # Add links for all usable lanes between `lane` and `site_root_lane` + # (possibly including `lane` itself). + for ancestor in reversed(usable_parentage): + lane_url = annotator.lane_url(ancestor) + if lane_url == root_url: + # Root lane for the entire site. + break + + breadcrumbs.append( + Link( + href=lane_url + entrypoint_query, + title=ancestor.display_name, + ) + ) + + # Append the breadcrumbs to the feed. + self._feed.breadcrumbs = breadcrumbs + + def show_current_entrypoint(self, entrypoint: Optional[Type[EntryPoint]]) -> None: + """Annotate this given feed with a simplified:entryPoint + attribute pointing to the current entrypoint's TYPE_URI. + + This gives clients an overall picture of the type of works in + the feed, and a way to distinguish between one EntryPoint + and another. + + :param entrypoint: An EntryPoint. + """ + if not entrypoint: + return None + + if not entrypoint.URI: + return None + self._feed.entrypoint = entrypoint.URI + + @classmethod + def error_message( + cls, identifier: Identifier, error_status: int, error_message: str + ) -> OPDSMessage: + """Turn an error result into an OPDSMessage suitable for + adding to a feed. + """ + return OPDSMessage(identifier.urn, error_status, error_message) + + # All feed generating classmethods below + # Each classmethod creates a different kind of feed + + @classmethod + def page( + cls, + _db: Session, + title: str, + url: str, + worklist: WorkList, + annotator: CirculationManagerAnnotator, + facets: Optional[FacetsWithEntryPoint], + pagination: Optional[Pagination], + search_engine: Optional[ExternalSearchIndex], + ) -> OPDSAcquisitionFeed: + works = worklist.works( + _db, facets=facets, pagination=pagination, search_engine=search_engine + ) + """A basic paged feed""" + # "works" MAY be a generator, we want a list + if not isinstance(works, list): + works = list(works) + + feed = OPDSAcquisitionFeed( + title, url, works, annotator, facets=facets, pagination=pagination + ) + + feed.generate_feed() + feed.add_pagination_links(works, worklist) + feed.add_facet_links(worklist) + + if isinstance(facets, FacetsWithEntryPoint): + feed.add_breadcrumb_links(worklist, facets.entrypoint) + + return feed + + @classmethod + def active_loans_for( + cls, + circulation: Optional[CirculationAPI], + patron: Patron, + annotator: Optional[LibraryAnnotator] = None, + **response_kwargs: Any, + ) -> OPDSAcquisitionFeed: + """A patron specific feed that only contains the loans and holds of a patron""" + db = Session.object_session(patron) + active_loans_by_work = {} + for loan in patron.loans: + work = loan.work + if work: + active_loans_by_work[work] = loan + + # There might be multiple holds for the same work so we gather all of them and choose the best one. + all_holds_by_work: Dict[Work, List[Hold]] = {} + for hold in patron.holds: + work = hold.work + if not work: + continue + + if work not in all_holds_by_work: + all_holds_by_work[work] = [] + + all_holds_by_work[work].append(hold) + + active_holds_by_work: Dict[Work, Hold] = {} + for work, list_of_holds in all_holds_by_work.items(): + active_holds_by_work[ + work + ] = LibraryLoanAndHoldAnnotator.choose_best_hold_for_work(list_of_holds) + + if not annotator: + annotator = LibraryLoanAndHoldAnnotator( + circulation, None, patron.library, patron + ) + + annotator.active_holds_by_work = active_holds_by_work + annotator.active_loans_by_work = active_loans_by_work + url = annotator.url_for( + "active_loans", library_short_name=patron.library.short_name, _external=True + ) + works = patron.works_on_loan_or_on_hold() + + feed = OPDSAcquisitionFeed("Active loans and holds", url, works, annotator) + feed.generate_feed() + return feed + + @classmethod + def single_entry_loans_feed( + cls, + circulation: Any, + item: LicensePool | Loan, + annotator: LibraryAnnotator | None = None, + fulfillment: FulfillmentInfo | None = None, + **response_kwargs: Any, + ) -> OPDSEntryResponse | ProblemDetail | None: + """A single entry as a standalone feed specific to a patron""" + if not item: + raise ValueError("Argument 'item' must be non-empty") + + if isinstance(item, LicensePool): + license_pool = item + library = circulation.library + elif isinstance(item, (Loan, Hold)): + license_pool = item.license_pool + library = item.library + else: + raise ValueError( + "Argument 'item' must be an instance of {}, {}, or {} classes".format( + Loan, Hold, LicensePool + ) + ) + + if not annotator: + annotator = LibraryLoanAndHoldAnnotator(circulation, None, library) + + log = logging.getLogger(cls.__name__) + + # Sometimes the pool or work may be None + # In those cases we have to protect against the exceptions + try: + work = license_pool.work or license_pool.presentation_edition.work + except AttributeError as ex: + log.error(f"Error retrieving a Work Object {ex}") + log.error( + f"Error Data: {license_pool} | {license_pool and license_pool.presentation_edition}" + ) + return NOT_FOUND_ON_REMOTE + + if not work: + return NOT_FOUND_ON_REMOTE + + _db = Session.object_session(item) + active_loans_by_work: Any = {} + active_holds_by_work: Any = {} + active_fulfillments_by_work = {} + item_dictionary = None + + if isinstance(item, Loan): + item_dictionary = active_loans_by_work + elif isinstance(item, Hold): + item_dictionary = active_holds_by_work + + if item_dictionary is not None: + item_dictionary[work] = item + + if fulfillment: + active_fulfillments_by_work[work] = fulfillment + + annotator.active_loans_by_work = active_loans_by_work + annotator.active_holds_by_work = active_holds_by_work + annotator.active_fulfillments_by_work = active_fulfillments_by_work + identifier = license_pool.identifier + + entry = cls.single_entry(work, annotator, even_if_no_license_pool=True) + + if isinstance(entry, WorkEntry) and entry.computed: + return cls.entry_as_response(entry, **response_kwargs) + elif isinstance(entry, OPDSMessage): + return cls.entry_as_response(entry, max_age=0) + + return None + + @classmethod + def single_entry( + cls, + work: Work | Edition | None, + annotator: Annotator, + even_if_no_license_pool: bool = False, + ) -> Optional[WorkEntry | OPDSMessage]: + """Turn a work into an annotated work entry for an acquisition feed.""" + identifier = None + _work: Work + if isinstance(work, Edition): + active_edition = work + identifier = active_edition.primary_identifier + active_license_pool = None + _work = active_edition.work # We always need a work for an entry + else: + if not work: + # We have a license pool but no work. Most likely we don't have + # metadata for this work yet. + return None + _work = work + active_license_pool = annotator.active_licensepool_for(work) + if active_license_pool: + identifier = active_license_pool.identifier + active_edition = active_license_pool.presentation_edition + elif work.presentation_edition: + active_edition = work.presentation_edition + identifier = active_edition.primary_identifier + + # There's no reason to present a book that has no active license pool. + if not identifier: + logging.warning("%r HAS NO IDENTIFIER", work) + return None + + if not active_license_pool and not even_if_no_license_pool: + logging.warning("NO ACTIVE LICENSE POOL FOR %r", work) + return cls.error_message( + identifier, + 403, + "I've heard about this work but have no active licenses for it.", + ) + + if not active_edition: + logging.warning("NO ACTIVE EDITION FOR %r", active_license_pool) + return cls.error_message( + identifier, + 403, + "I've heard about this work but have no metadata for it.", + ) + + try: + return cls._create_entry( + _work, active_license_pool, active_edition, identifier, annotator + ) + except UnfulfillableWork as e: + logging.info( + "Work %r is not fulfillable, refusing to create an .", + work, + ) + return cls.error_message( + identifier, + 403, + "I know about this work but can offer no way of fulfilling it.", + ) + except Exception as e: + logging.error("Exception generating OPDS entry for %r", work, exc_info=e) + return None + + @classmethod + def groups( + cls, + _db: Session, + title: str, + url: str, + worklist: WorkList, + annotator: LibraryAnnotator, + pagination: Optional[Pagination] = None, + facets: Optional[FacetsWithEntryPoint] = None, + search_engine: Optional[ExternalSearchIndex] = None, + search_debug: bool = False, + ) -> OPDSAcquisitionFeed: + """Internal method called by groups() when a grouped feed + must be regenerated. + """ + + # Try to get a set of (Work, WorkList) 2-tuples + # to make a normal grouped feed. + works_and_lanes = [ + x + for x in worklist.groups( + _db=_db, + pagination=pagination, + facets=facets, + search_engine=search_engine, + debug=search_debug, + ) + ] + # Make a typical grouped feed. + all_works = [] + for work, sublane in works_and_lanes: + if sublane == worklist: + # We are looking at the groups feed for (e.g.) + # "Science Fiction", and we're seeing a book + # that is featured within "Science Fiction" itself + # rather than one of the sublanes. + # + # We want to assign this work to a group called "All + # Science Fiction" and point its 'group URI' to + # the linear feed of the "Science Fiction" lane + # (as opposed to the groups feed, which is where we + # are now). + v = dict( + lane=worklist, + label=worklist.display_name_for_all, + link_to_list_feed=True, + ) + else: + # We are looking at the groups feed for (e.g.) + # "Science Fiction", and we're seeing a book + # that is featured within one of its sublanes, + # such as "Space Opera". + # + # We want to assign this work to a group derived + # from the sublane. + v = dict(lane=sublane) + + annotator.lanes_by_work[work].append(v) + all_works.append(work) + + feed = OPDSAcquisitionFeed( + title, url, all_works, annotator, facets=facets, pagination=pagination + ) + feed.generate_feed() + + # Regardless of whether or not the entries in feed can be + # grouped together, we want to apply certain feed-level + # annotations. + + # A grouped feed may link to alternate entry points into + # the data. + if facets: + entrypoints = facets.selectable_entrypoints(worklist) + if entrypoints: + + def make_link(ep: Type[EntryPoint]) -> str: + return annotator.groups_url( + worklist, facets=facets.navigate(entrypoint=ep) + ) + + cls.add_entrypoint_links( + feed._feed, make_link, entrypoints, facets.entrypoint + ) + + # A grouped feed may have breadcrumb links. + feed.add_breadcrumb_links(worklist, facets.entrypoint) + + return feed + + @classmethod + def search( + cls, + _db: Session, + title: str, + url: str, + lane: WorkList, + search_engine: ExternalSearchIndex, + query: str, + annotator: LibraryAnnotator, + pagination: Optional[Pagination] = None, + facets: Optional[FacetsWithEntryPoint] = None, + **response_kwargs: Any, + ) -> OPDSAcquisitionFeed | ProblemDetail: + """Run a search against the given search engine and return + the results as a Flask Response. + + :param _db: A database connection + :param title: The title of the resulting OPDS feed. + :param url: The URL from which the feed will be served. + :param search_engine: An ExternalSearchIndex. + :param query: The search query + :param pagination: A Pagination + :param facets: A Facets + :param annotator: An Annotator + :param response_kwargs: Keyword arguments to pass into the OPDSFeedResponse + constructor. + :return: An ODPSFeedResponse + """ + facets = facets or SearchFacets() + pagination = pagination or Pagination.default() + + try: + results = lane.search( + _db, query, search_engine, pagination=pagination, facets=facets + ) + except QueryParseException as e: + return INVALID_INPUT.detailed(e.detail) + + feed = OPDSAcquisitionFeed( + title, url, results, annotator, facets=facets, pagination=pagination + ) + feed.generate_feed() + feed.add_link( + annotator.default_lane_url(), rel="start", title=annotator.top_level_title() + ) + + # A feed of search results may link to alternate entry points + # into those results. + entrypoints = facets.selectable_entrypoints(lane) + if entrypoints: + + def make_link(ep: Type[EntryPoint]) -> str: + return annotator.search_url( + lane, query, pagination=None, facets=facets.navigate(entrypoint=ep) + ) + + cls.add_entrypoint_links( + feed._feed, + make_link, + entrypoints, + facets.entrypoint, + ) + + feed.add_pagination_links(results, lane) + + # Add "up" link. + feed.add_link( + annotator.lane_url(lane), + rel="up", + title=str(lane.display_name), + ) + + # We do not add breadcrumbs to this feed since you're not + # technically searching the this lane; you are searching the + # library's entire collection, using _some_ of the constraints + # imposed by this lane (notably language and audience). + return feed + + @classmethod + def from_query( + cls, + query: Query[Work], + _db: Session, + feed_name: str, + url: str, + pagination: Pagination, + url_fn: Callable[[int], str], + annotator: CirculationManagerAnnotator, + ) -> OPDSAcquisitionFeed: + """Build a feed representing one page of a given list. Currently used for + creating an OPDS feed for a custom list and not cached. + + TODO: This is used by the circulation manager admin interface. + Investigate changing the code that uses this to use the search + index -- this is inefficient and creates an alternate code path + that may harbor bugs. + + TODO: This cannot currently return OPDSFeedResponse because the + admin interface modifies the feed after it's generated. + + """ + page_of_works = pagination.modify_database_query(_db, query) + pagination.total_size = int(query.count()) + + feed = OPDSAcquisitionFeed( + feed_name, url, page_of_works, annotator, pagination=pagination + ) + feed.generate_feed(annotate=False) + + if pagination.total_size > 0 and pagination.has_next_page: + feed.add_link(url_fn(pagination.next_page.offset), rel="next") + if pagination.offset > 0: + feed.add_link(url_fn(pagination.first_page.offset), rel="first") + if pagination.previous_page: + feed.add_link( + url_fn(pagination.previous_page.offset), + rel="previous", + ) + + return feed + + +class LookupAcquisitionFeed(OPDSAcquisitionFeed): + """Used when the user has requested a lookup of a specific identifier, + which may be different from the identifier used by the Work's + default LicensePool. + """ + + @classmethod + def single_entry(cls, work: Tuple[Identifier, Work], annotator: Annotator) -> WorkEntry | OPDSMessage: # type: ignore[override] + # This comes in as a tuple, which deviates from the typical behaviour + identifier, _work = work + + active_licensepool: Optional[LicensePool] + if identifier.licensed_through: + active_licensepool = identifier.licensed_through[0] + else: + # Use the default active LicensePool for the Work. + active_licensepool = annotator.active_licensepool_for(_work) + + error_status = error_message = None + if not active_licensepool: + error_status = 404 + error_message = "Identifier not found in collection" + elif identifier.work != _work: + error_status = 500 + error_message = ( + 'I tried to generate an OPDS entry for the identifier "%s" using a Work not associated with that identifier.' + % identifier.urn + ) + + if error_status: + return cls.error_message(identifier, error_status, error_message or "") + + if active_licensepool: + edition = active_licensepool.presentation_edition + else: + edition = _work.presentation_edition + try: + return cls._create_entry( + _work, active_licensepool, edition, identifier, annotator + ) + except UnfulfillableWork as e: + logging.info( + "Work %r is not fulfillable, refusing to create an .", _work + ) + return cls.error_message( + identifier, + 403, + "I know about this work but can offer no way of fulfilling it.", + ) diff --git a/core/feed/admin.py b/core/feed/admin.py new file mode 100644 index 0000000000..a4536fa18e --- /dev/null +++ b/core/feed/admin.py @@ -0,0 +1,70 @@ +from typing import Optional + +from sqlalchemy import and_ +from sqlalchemy.orm import Session +from typing_extensions import Self + +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.admin import AdminAnnotator +from core.lane import Pagination +from core.model.licensing import LicensePool + + +class AdminFeed(OPDSAcquisitionFeed): + @classmethod + def suppressed( + cls, + _db: Session, + title: str, + url: str, + annotator: AdminAnnotator, + pagination: Optional[Pagination] = None, + ) -> Self: + _pagination = pagination or Pagination.default() + + q = ( + _db.query(LicensePool) + .filter( + and_( + LicensePool.suppressed == True, + LicensePool.superceded == False, + ) + ) + .order_by(LicensePool.id) + ) + pools = _pagination.modify_database_query(_db, q).all() + + works = [pool.work for pool in pools] + feed = cls(title, url, works, annotator, pagination=_pagination) + feed.generate_feed() + + # Render a 'start' link + top_level_title = annotator.top_level_title() + start_uri = annotator.groups_url(None) + + feed.add_link(start_uri, rel="start", title=top_level_title) + + # Render an 'up' link, same as the 'start' link to indicate top-level feed + feed.add_link(start_uri, rel="up", title=top_level_title) + + if len(works) > 0: + # There are works in this list. Add a 'next' link. + feed.add_link( + href=annotator.suppressed_url(_pagination.next_page), + rel="next", + ) + + if _pagination.offset > 0: + feed.add_link( + annotator.suppressed_url(_pagination.first_page), + rel="first", + ) + + previous_page = _pagination.previous_page + if previous_page: + feed.add_link( + annotator.suppressed_url(previous_page), + rel="previous", + ) + + return feed diff --git a/core/feed/annotator/admin.py b/core/feed/annotator/admin.py new file mode 100644 index 0000000000..193b8e70f2 --- /dev/null +++ b/core/feed/annotator/admin.py @@ -0,0 +1,105 @@ +from datetime import datetime +from typing import Optional + +from api.circulation import CirculationAPI +from core.feed.annotator.circulation import LibraryAnnotator +from core.feed.annotator.verbose import VerboseAnnotator +from core.feed.types import FeedData, Link, WorkEntry +from core.lane import Pagination +from core.mirror import MirrorUploader +from core.model import DataSource +from core.model.configuration import ExternalIntegrationLink +from core.model.library import Library + + +class AdminAnnotator(LibraryAnnotator): + def __init__(self, circulation: Optional[CirculationAPI], library: Library) -> None: + super().__init__(circulation, None, library) + + def annotate_work_entry( + self, entry: WorkEntry, updated: Optional[datetime] = None + ) -> None: + super().annotate_work_entry(entry) + if not entry.computed: + return + VerboseAnnotator.add_ratings(entry) + + identifier = entry.identifier + active_license_pool = entry.license_pool + + # Find staff rating and add a tag for it. + for measurement in identifier.measurements: + if ( + measurement.data_source.name == DataSource.LIBRARY_STAFF # type: ignore[attr-defined] + and measurement.is_most_recent + and measurement.value is not None + ): + entry.computed.ratings.append( + self.rating(measurement.quantity_measured, measurement.value) + ) + + if active_license_pool and active_license_pool.suppressed: + entry.computed.other_links.append( + Link( + href=self.url_for( + "unsuppress", + identifier_type=identifier.type, + identifier=identifier.identifier, + _external=True, + ), + rel="http://librarysimplified.org/terms/rel/restore", + ) + ) + else: + entry.computed.other_links.append( + Link( + href=self.url_for( + "suppress", + identifier_type=identifier.type, + identifier=identifier.identifier, + _external=True, + ), + rel="http://librarysimplified.org/terms/rel/hide", + ) + ) + + entry.computed.other_links.append( + Link( + href=self.url_for( + "edit", + identifier_type=identifier.type, + identifier=identifier.identifier, + _external=True, + ), + rel="edit", + ) + ) + + # If there is a storage integration for the collection, changing the cover is allowed. + if active_license_pool: + mirror = MirrorUploader.for_collection( + active_license_pool.collection, ExternalIntegrationLink.COVERS + ) + if mirror: + entry.computed.other_links.append( + Link( + href=self.url_for( + "work_change_book_cover", + identifier_type=identifier.type, + identifier=identifier.identifier, + _external=True, + ), + rel="http://librarysimplified.org/terms/rel/change_cover", + ) + ) + + def suppressed_url(self, pagination: Pagination) -> str: + kwargs = dict(list(pagination.items())) + return self.url_for("suppressed", _external=True, **kwargs) + + def annotate_feed(self, feed: FeedData) -> None: + # Add a 'search' link. + search_url = self.url_for("lane_search", languages=None, _external=True) + feed.add_link( + search_url, rel="search", type="application/opensearchdescription+xml" + ) diff --git a/core/feed/annotator/base.py b/core/feed/annotator/base.py new file mode 100644 index 0000000000..4ce50ea4fc --- /dev/null +++ b/core/feed/annotator/base.py @@ -0,0 +1,389 @@ +from __future__ import annotations + +import datetime +import logging +from collections import defaultdict +from decimal import Decimal +from typing import Any, Dict, List, Optional, Set, Tuple +from urllib.parse import quote + +from sqlalchemy.orm import Session, joinedload + +from core.classifier import Classifier +from core.feed.types import ( + Author, + FeedData, + FeedEntryType, + Link, + WorkEntry, + WorkEntryData, +) +from core.feed.util import strftime +from core.model.classification import Subject +from core.model.contributor import Contribution, Contributor +from core.model.datasource import DataSource +from core.model.edition import Edition +from core.model.library import Library +from core.model.licensing import LicensePool +from core.model.resource import Hyperlink +from core.model.work import Work +from core.util.opds_writer import AtomFeed, OPDSFeed + + +class ToFeedEntry: + @classmethod + def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + """Create one or more author (and contributor) objects for the given + Work. + + :param edition: The Edition to use as a reference + for bibliographic information, including the list of + Contributions. + :return: A dict with "authors" and "contributors" as a list of Author objects + """ + authors: Dict[str, List[Author]] = {"authors": [], "contributors": []} + state: Dict[Optional[str], Set[str]] = defaultdict(set) + for contribution in edition.contributions: + info = cls.contributor(contribution, state) + if info is None: + # contributor_tag decided that this contribution doesn't + # need a tag. + continue + key, tag = info + authors[f"{key}s"].append(tag) + + if authors["authors"]: + return authors + + # We have no author information, so we add empty tag + # to avoid the implication (per RFC 4287 4.2.1) that this book + # was written by whoever wrote the OPDS feed. + authors["authors"].append(Author(name="")) + return authors + + @classmethod + def contributor( + cls, contribution: Contribution, state: Dict[Optional[str], Set[str]] + ) -> Optional[Tuple[str, Author]]: + """Build an author (or contributor) object for a Contribution. + + :param contribution: A Contribution. + :param state: A defaultdict of sets, which may be used to keep + track of what happened during previous calls to + contributor for a given Work. + :return: An Author object, or None if creating an Author for this Contribution + would be redundant or of low value. + + """ + contributor = contribution.contributor + role = contribution.role + current_role: str + + if role in Contributor.AUTHOR_ROLES: + current_role = "author" + marc_role = None + elif role is not None: + current_role = "contributor" + marc_role = Contributor.MARC_ROLE_CODES.get(role) + if not marc_role: + # This contribution is not one that we publish as + # a tag. Skip it. + return None + else: + return None + + name = contributor.display_name or contributor.sort_name + name_key = name.lower() + if name_key in state[marc_role]: + # We've already credited this person with this + # MARC role. Returning a tag would be redundant. + return None + + # Okay, we're creating a tag. + properties: Dict[str, Any] = dict() + if marc_role: + properties["role"] = marc_role + entry = Author(name=name, **properties) + + # Record the fact that we credited this person with this role, + # so that we don't do it again on a subsequent call. + state[marc_role].add(name_key) + + return current_role, entry + + @classmethod + def series( + cls, series_name: Optional[str], series_position: Optional[int] | Optional[str] + ) -> Optional[FeedEntryType]: + """Generate a FeedEntryType object for the given name and position.""" + if not series_name: + return None + series_details = dict() + series_details["name"] = series_name + if series_position != None: + series_details["position"] = str(series_position) + series = FeedEntryType.create(**series_details) + return series + + @classmethod + def rating(cls, type_uri: Optional[str], value: float | Decimal) -> FeedEntryType: + """Generate a FeedEntryType object for the given type and value.""" + entry = FeedEntryType.create( + **dict(ratingValue="%.4f" % value, additionalType=type_uri) + ) + return entry + + @classmethod + def samples(cls, edition: Optional[Edition]) -> list[Hyperlink]: + if not edition: + return [] + _db = Session.object_session(edition) + links = ( + _db.query(Hyperlink) + .filter( + Hyperlink.rel == Hyperlink.SAMPLE, + Hyperlink.identifier_id == edition.primary_identifier_id, + ) + .options(joinedload(Hyperlink.resource)) + .all() + ) + return links + + @classmethod + def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: + """Return all relevant classifications of this work. + + :return: A dictionary mapping 'scheme' URLs to dictionaries of + attribute-value pairs. + + Notable attributes: 'term', 'label', 'ratingValue' + """ + if not work: + return {} + + categories = {} + + fiction_term = None + if work.fiction == True: + fiction_term = "Fiction" + elif work.fiction == False: + fiction_term = "Nonfiction" + if fiction_term: + fiction_scheme = Subject.SIMPLIFIED_FICTION_STATUS + categories[fiction_scheme] = [ + dict(term=fiction_scheme + fiction_term, label=fiction_term) + ] + + simplified_genres = [] + for wg in work.work_genres: + simplified_genres.append(wg.genre.name) # type: ignore[attr-defined] + + if simplified_genres: + categories[Subject.SIMPLIFIED_GENRE] = [ + dict(term=Subject.SIMPLIFIED_GENRE + quote(x), label=x) + for x in simplified_genres + ] + + # Add the appeals as a category of schema + # http://librarysimplified.org/terms/appeal + schema_url = AtomFeed.SIMPLIFIED_NS + "appeals/" + appeals: List[Dict[str, Any]] = [] + categories[schema_url] = appeals + for name, value in ( + (Work.CHARACTER_APPEAL, work.appeal_character), + (Work.LANGUAGE_APPEAL, work.appeal_language), + (Work.SETTING_APPEAL, work.appeal_setting), + (Work.STORY_APPEAL, work.appeal_story), + ): + if value: + appeal: Dict[str, Any] = dict(term=schema_url + name, label=name) + weight_field = "ratingValue" + appeal[weight_field] = value + appeals.append(appeal) + + # Add the audience as a category of schema + # http://schema.org/audience + if work.audience: + audience_uri = "http://schema.org/audience" + categories[audience_uri] = [dict(term=work.audience, label=work.audience)] + + # Any book can have a target age, but the target age + # is only relevant for childrens' and YA books. + audiences_with_target_age = ( + Classifier.AUDIENCE_CHILDREN, + Classifier.AUDIENCE_YOUNG_ADULT, + ) + if work.target_age and work.audience in audiences_with_target_age: + uri = Subject.uri_lookup[Subject.AGE_RANGE] + target_age = work.target_age_string + if target_age: + categories[uri] = [dict(term=target_age, label=target_age)] + + return categories + + @classmethod + def content(cls, work: Optional[Work]) -> str: + """Return an HTML summary of this work.""" + summary = "" + if work: + if work.summary_text is not None: + summary = work.summary_text + elif ( + work.summary + and work.summary.representation + and work.summary.representation.content + ): + content = work.summary.representation.content + if isinstance(content, bytes): + content = content.decode("utf-8") + work.summary_text = content + summary = work.summary_text + return summary + + +class Annotator(ToFeedEntry): + def annotate_work_entry( + self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + ) -> None: + """ + Any data that the serializer must consider while generating an "entry" + must be populated in this method. + The serializer may not use all the data populated based on the protocol it is bound to. + """ + if entry.computed: + return + + work = entry.work + edition = entry.edition + identifier = entry.identifier + pool = entry.license_pool + computed = WorkEntryData() + + image_links = [] + other_links = [] + for rel, url in [ + (Hyperlink.IMAGE, work.cover_full_url), + (Hyperlink.THUMBNAIL_IMAGE, work.cover_thumbnail_url), + ]: + if not url: + continue + image_type = "image/png" + if url.endswith(".jpeg") or url.endswith(".jpg"): + image_type = "image/jpeg" + elif url.endswith(".gif"): + image_type = "image/gif" + image_links.append(Link(rel=rel, href=url, type=image_type)) + + samples = self.samples(edition) + for sample in samples: + other_links.append( + Link( + rel=Hyperlink.CLIENT_SAMPLE, + href=sample.resource.url, + type=sample.resource.representation.media_type, + ) + ) + + if edition.medium: + additional_type = Edition.medium_to_additional_type.get(str(edition.medium)) + if not additional_type: + logging.warning("No additionalType for medium %s", edition.medium) + computed.additionalType = additional_type + + computed.title = FeedEntryType(text=(edition.title or OPDSFeed.NO_TITLE)) + + if edition.subtitle: + computed.subtitle = FeedEntryType(text=edition.subtitle) + if edition.sort_title: + computed.sort_title = FeedEntryType(text=edition.sort_title) + + author_entries = self.authors(edition) + computed.contributors = author_entries.get("contributors", []) + computed.authors = author_entries.get("authors", []) + + if edition.series: + computed.series = self.series(edition.series, edition.series_position) + + content = self.content(work) + if content: + computed.summary = FeedEntryType(text=content) + computed.summary.add_attributes(dict(type="html")) + + computed.pwid = edition.permanent_work_id + + categories_by_scheme = self.categories(work) + category_tags = [] + for scheme, categories in list(categories_by_scheme.items()): + for category in categories: + category = dict( + list(map(str, (k, v))) for k, v in list(category.items()) + ) + category_tag = FeedEntryType.create(scheme=scheme, **category) + category_tags.append(category_tag) + computed.categories = category_tags + + if edition.language_code: + computed.language = FeedEntryType(text=edition.language_code) + + if edition.publisher: + computed.publisher = FeedEntryType(text=edition.publisher) + + if edition.imprint: + computed.imprint = FeedEntryType(text=edition.imprint) + + if edition.issued or edition.published: + computed.issued = edition.issued or edition.published + + if identifier: + computed.identifier = identifier.urn + + if pool: + data_source = pool.data_source.name + if data_source != DataSource.INTERNAL_PROCESSING: + # INTERNAL_PROCESSING indicates a dummy LicensePool + # created as a stand-in, e.g. by the metadata wrangler. + # This component is not actually distributing the book, + # so it should not have a bibframe:distribution tag. + computed.distribution = FeedEntryType() + computed.distribution.add_attributes(dict(provider_name=data_source)) + + # We use Atom 'published' for the date the book first became + # available to people using this application. + avail = pool.availability_time + if avail: + today = datetime.date.today() + if isinstance(avail, datetime.datetime): + avail_date = avail.date() + else: + avail_date = avail # type: ignore[unreachable] + if avail_date <= today: # Avoid obviously wrong values. + computed.published = FeedEntryType(text=strftime(avail_date)) + + if not updated and entry.work.last_update_time: + # NOTE: This is a default that works in most cases. When + # ordering Opensearch results by last update time, + # `work` is a WorkSearchResult object containing a more + # reliable value that you can use if you want. + updated = entry.work.last_update_time + if updated: + computed.updated = FeedEntryType(text=strftime(updated)) + + computed.image_links = image_links + computed.other_links = other_links + entry.computed = computed + + def annotate_feed(self, feed: FeedData) -> None: + """Any additional metadata or links that should be added to the feed (not each entry) + should be added to the FeedData object in this method. + """ + + def active_licensepool_for( + self, work: Work, library: Library | None = None + ) -> LicensePool | None: + """Which license pool would be/has been used to issue a license for + this work? + """ + if not work: + return None + + return work.active_license_pool(library=library) diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py new file mode 100644 index 0000000000..1538f253b4 --- /dev/null +++ b/core/feed/annotator/circulation.py @@ -0,0 +1,1564 @@ +from __future__ import annotations + +import copy +import datetime +import logging +import urllib.error +import urllib.parse +import urllib.request +from collections import defaultdict +from typing import Any, Dict, List, Optional, Tuple + +from flask import url_for +from sqlalchemy.orm import Session + +from api.adobe_vendor_id import AuthdataUtility +from api.annotations import AnnotationWriter +from api.circulation import BaseCirculationAPI, CirculationAPI +from api.config import Configuration +from api.lanes import DynamicLane +from api.novelist import NoveListAPI +from core.analytics import Analytics +from core.classifier import Classifier +from core.config import CannotLoadConfiguration +from core.entrypoint import EverythingEntryPoint +from core.external_search import WorkSearchResult +from core.feed.annotator.base import Annotator +from core.feed.types import ( + Acquisition, + FeedData, + FeedEntryType, + IndirectAcquisition, + Link, + WorkEntry, +) +from core.feed.util import strftime +from core.lane import Facets, FacetsWithEntryPoint, Lane, Pagination, WorkList +from core.lcp.credential import LCPCredentialFactory, LCPHashedPassphrase +from core.lcp.exceptions import LCPError +from core.model.circulationevent import CirculationEvent +from core.model.collection import Collection +from core.model.edition import Edition +from core.model.formats import FormatPriorities +from core.model.identifier import Identifier +from core.model.integration import IntegrationConfiguration +from core.model.library import Library +from core.model.licensing import ( + DeliveryMechanism, + LicensePool, + LicensePoolDeliveryMechanism, +) +from core.model.patron import Hold, Loan, Patron +from core.model.work import Work +from core.opds import UnfulfillableWork +from core.util.datetime_helpers import from_timestamp +from core.util.opds_writer import OPDSFeed + + +class AcquisitionHelper: + @classmethod + def license_tags( + cls, + license_pool: Optional[LicensePool], + loan: Optional[Loan], + hold: Optional[Hold], + ) -> Optional[Dict[str, Any]]: + acquisition = {} + # Generate a list of licensing tags. These should be inserted + # into a tag. + status = None + since = None + until = None + + if not license_pool: + return None + default_loan_period = default_reservation_period = None + collection = license_pool.collection + obj: Loan | Hold + if (loan or hold) and not license_pool.open_access: + if loan: + obj = loan + elif hold: + obj = hold + default_loan_period = datetime.timedelta( + collection.default_loan_period(obj.library) + ) + if loan: + status = "available" + since = loan.start + if not loan.license_pool.unlimited_access: + until = loan.until(default_loan_period) + elif hold: + if not license_pool.open_access: + default_reservation_period = datetime.timedelta( + collection.default_reservation_period + ) + until = hold.until(default_loan_period, default_reservation_period) + if hold.position == 0: + status = "ready" + since = None + else: + status = "reserved" + since = hold.start + elif ( + license_pool.open_access + or license_pool.unlimited_access + or (license_pool.licenses_available > 0 and license_pool.licenses_owned > 0) + ): + status = "available" + else: + status = "unavailable" + + acquisition["availability_status"] = status + if since: + acquisition["availability_since"] = strftime(since) + if until: + acquisition["availability_until"] = strftime(until) + + # Open-access pools do not need to display or . + if license_pool.open_access or license_pool.unlimited_access: + return acquisition + + total = license_pool.patrons_in_hold_queue or 0 + + if hold: + if hold.position is None: + # This shouldn't happen, but if it does, assume we're last + # in the list. + position = total + else: + position = hold.position + + if position > 0: + acquisition["holds_position"] = str(position) + if position > total: + # The patron's hold position appears larger than the total + # number of holds. This happens frequently because the + # number of holds and a given patron's hold position are + # updated by different processes. Don't propagate this + # appearance to the client. + total = position + elif position == 0 and total == 0: + # The book is reserved for this patron but they're not + # counted as having it on hold. This is the only case + # where we know that the total number of holds is + # *greater* than the hold position. + total = 1 + acquisition["holds_total"] = str(total) + + acquisition["copies_total"] = str(license_pool.licenses_owned or 0) + acquisition["copies_available"] = str(license_pool.licenses_available or 0) + + return acquisition + + @classmethod + def format_types(cls, delivery_mechanism: DeliveryMechanism) -> List[str]: + """Generate a set of types suitable for passing into + acquisition_link(). + """ + types = [] + # If this is a streaming book, you have to get an OPDS entry, then + # get a direct link to the streaming reader from that. + if delivery_mechanism.is_streaming: + types.append(OPDSFeed.ENTRY_TYPE) + + # If this is a DRM-encrypted book, you have to get through the DRM + # to get the goodies inside. + drm = delivery_mechanism.drm_scheme_media_type + if drm: + types.append(drm) + + # Finally, you get the goodies. + media = delivery_mechanism.content_type_media_type + if media: + types.append(media) + + return types + + +class CirculationManagerAnnotator(Annotator): + hidden_content_types: list[str] + + def __init__( + self, + lane: Optional[WorkList], + active_loans_by_work: Optional[Dict[Work, Loan]] = None, + active_holds_by_work: Optional[Dict[Work, Hold]] = None, + active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, + hidden_content_types: Optional[List[str]] = None, + ) -> None: + if lane: + logger_name = "Circulation Manager Annotator for %s" % lane.display_name + else: + logger_name = "Circulation Manager Annotator" + self.log = logging.getLogger(logger_name) + self.lane = lane + self.active_loans_by_work = active_loans_by_work or {} + self.active_holds_by_work = active_holds_by_work or {} + self.active_fulfillments_by_work = active_fulfillments_by_work or {} + self.hidden_content_types = hidden_content_types or [] + self.facet_view = "feed" + + def is_work_entry_solo(self, work: Work) -> bool: + """Return a boolean value indicating whether the work's OPDS catalog entry is served by itself, + rather than as a part of the feed. + + :param work: Work object + :type work: core.model.work.Work + + :return: Boolean value indicating whether the work's OPDS catalog entry is served by itself, + rather than as a part of the feed + :rtype: bool + """ + return any( + work in x # type: ignore[operator] # Mypy gets confused with complex "in" statements + for x in ( + self.active_loans_by_work, + self.active_holds_by_work, + self.active_fulfillments_by_work, + ) + ) + + def _lane_identifier(self, lane: Optional[WorkList]) -> Optional[int]: + if isinstance(lane, Lane): + return lane.id + return None + + def top_level_title(self) -> str: + return "" + + def default_lane_url(self) -> str: + return self.feed_url(None) + + def lane_url(self, lane: WorkList) -> str: + return self.feed_url(lane) + + def url_for(self, *args: Any, **kwargs: Any) -> str: + return url_for(*args, **kwargs) + + def facet_url(self, facets: Facets) -> str: + return self.feed_url(self.lane, facets=facets, default_route=self.facet_view) + + def feed_url( + self, + lane: Optional[WorkList], + facets: Optional[FacetsWithEntryPoint] = None, + pagination: Optional[Pagination] = None, + default_route: str = "feed", + extra_kwargs: Optional[Dict[str, Any]] = None, + ) -> str: + if isinstance(lane, WorkList) and hasattr(lane, "url_arguments"): + route, kwargs = lane.url_arguments + else: + route = default_route + lane_identifier = self._lane_identifier(lane) + kwargs = dict(lane_identifier=lane_identifier) + if facets is not None: + kwargs.update(dict(list(facets.items()))) + if pagination is not None: + kwargs.update(dict(list(pagination.items()))) + if extra_kwargs: + kwargs.update(extra_kwargs) + return self.url_for(route, _external=True, **kwargs) + + def navigation_url(self, lane: Lane) -> str: + return self.url_for( + "navigation_feed", + lane_identifier=self._lane_identifier(lane), + library_short_name=lane.library.short_name, + _external=True, + ) + + def active_licensepool_for( + self, work: Work, library: Optional[Library] = None + ) -> Optional[LicensePool]: + loan = self.active_loans_by_work.get(work) or self.active_holds_by_work.get( + work + ) + if loan: + # The active license pool is the one associated with + # the loan/hold. + return loan.license_pool + else: + # There is no active loan. Use the default logic for + # determining the active license pool. + return super().active_licensepool_for(work, library=library) + + @staticmethod + def _prioritized_formats_for_pool( + licensepool: LicensePool, + ) -> tuple[list[str], list[str]]: + collection: Collection = licensepool.collection + config: IntegrationConfiguration = collection.integration_configuration + + # Consult the configuration information for the integration configuration + # that underlies the license pool's collection. The configuration + # information _might_ contain a set of prioritized DRM schemes and + # content types. + prioritized_drm_schemes: list[str] = ( + config.settings_dict.get(FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY) or [] + ) + + content_setting: List[str] = ( + config.settings_dict.get(FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY) + or [] + ) + return prioritized_drm_schemes, content_setting + + @staticmethod + def _deprioritized_lcp_content( + licensepool: LicensePool, + ) -> bool: + collection: Collection = licensepool.collection + config: IntegrationConfiguration = collection.integration_configuration + + # Consult the configuration information for the integration configuration + # that underlies the license pool's collection. The configuration + # information _might_ contain a flag that indicates whether to deprioritize + # LCP content. By default, if no configuration value is specified, then + # the priority of LCP content will be left completely unchanged. + + _prioritize: bool = config.settings_dict.get( + FormatPriorities.DEPRIORITIZE_LCP_NON_EPUBS_KEY, False + ) + return _prioritize + + def visible_delivery_mechanisms( + self, licensepool: LicensePool | None + ) -> list[LicensePoolDeliveryMechanism]: + if not licensepool: + return [] + + ( + prioritized_drm_schemes, + prioritized_content_types, + ) = CirculationManagerAnnotator._prioritized_formats_for_pool(licensepool) + + return FormatPriorities( + prioritized_drm_schemes=prioritized_drm_schemes, + prioritized_content_types=prioritized_content_types, + hidden_content_types=self.hidden_content_types, + deprioritize_lcp_non_epubs=CirculationManagerAnnotator._deprioritized_lcp_content( + licensepool + ), + ).prioritize_for_pool(licensepool) + + def annotate_work_entry( + self, + entry: WorkEntry, + updated: Optional[datetime.datetime] = None, + ) -> None: + work = entry.work + identifier = entry.identifier or work.presentation_edition.primary_identifier + active_license_pool = entry.license_pool or self.active_licensepool_for(work) + # If OpenSearch included a more accurate last_update_time, + # use it instead of Work.last_update_time + updated = entry.work.last_update_time + if isinstance(work, WorkSearchResult): + # Opensearch puts this field in a list, but we've set it up + # so there will be at most one value. + last_updates = getattr(work._hit, "last_update", []) + if last_updates: + # last_update is seconds-since epoch; convert to UTC datetime. + updated = from_timestamp(last_updates[0]) + + # There's a chance that work.last_updated has been + # modified but the change hasn't made it to the search + # engine yet. Even then, we stick with the search + # engine value, because a sorted list is more + # important to the import process than an up-to-date + # 'last update' value. + + super().annotate_work_entry(entry, updated=updated) + active_loan = self.active_loans_by_work.get(work) + active_hold = self.active_holds_by_work.get(work) + active_fulfillment = self.active_fulfillments_by_work.get(work) + + # Now we need to generate a tag for every delivery mechanism + # that has well-defined media types. + link_tags = self.acquisition_links( + active_license_pool, + active_loan, + active_hold, + active_fulfillment, + identifier, + ) + if entry.computed: + for tag in link_tags: + entry.computed.acquisition_links.append(tag) + + def acquisition_links( + self, + active_license_pool: Optional[LicensePool], + active_loan: Optional[Loan], + active_hold: Optional[Hold], + active_fulfillment: Optional[Any], + identifier: Identifier, + can_hold: bool = True, + can_revoke_hold: bool = True, + set_mechanism_at_borrow: bool = False, + direct_fulfillment_delivery_mechanisms: Optional[ + List[LicensePoolDeliveryMechanism] + ] = None, + add_open_access_links: bool = True, + ) -> List[Acquisition]: + """Generate a number of tags that enumerate all acquisition + methods. + + :param direct_fulfillment_delivery_mechanisms: A way to + fulfill each LicensePoolDeliveryMechanism in this list will be + presented as a link with + rel="http://opds-spec.org/acquisition/open-access", indicating + that it can be downloaded with no intermediate steps such as + authentication. + """ + can_borrow = False + can_fulfill = False + can_revoke = False + + if active_loan: + can_fulfill = True + can_revoke = True + elif active_hold: + # We display the borrow link even if the patron can't + # borrow the book right this minute. + can_borrow = True + + can_revoke = can_revoke_hold + elif active_fulfillment: + can_fulfill = True + can_revoke = True + else: + # The patron has no existing relationship with this + # work. Give them the opportunity to check out the work + # or put it on hold. + can_borrow = True + + # If there is something to be revoked for this book, + # add a link to revoke it. + revoke_links = [] + if active_license_pool and can_revoke: + revoke_links.append( + self.revoke_link(active_license_pool, active_loan, active_hold) + ) + + # Add next-step information for every useful delivery + # mechanism. + borrow_links = [] + if can_borrow: + # Borrowing a book gives you an OPDS entry that gives you + # fulfillment links for every visible delivery mechanism. + visible_mechanisms = self.visible_delivery_mechanisms(active_license_pool) + if set_mechanism_at_borrow and active_license_pool: + # The ebook distributor requires that the delivery + # mechanism be set at the point of checkout. This means + # a separate borrow link for each mechanism. + for mechanism in visible_mechanisms: + borrow_links.append( + self.borrow_link( + active_license_pool, mechanism, [mechanism], active_hold + ) + ) + elif active_license_pool: + # The ebook distributor does not require that the + # delivery mechanism be set at the point of + # checkout. This means a single borrow link with + # indirectAcquisition tags for every visible delivery + # mechanism. If a delivery mechanism must be set, it + # will be set at the point of fulfillment. + borrow_links.append( + self.borrow_link( + active_license_pool, None, visible_mechanisms, active_hold + ) + ) + + # Generate the licensing tags that tell you whether the book + # is available. + for link in borrow_links: + if link is not None: + license_tags = AcquisitionHelper.license_tags( + active_license_pool, active_loan, active_hold + ) + if license_tags is not None: + link.add_attributes(license_tags) + + # Add links for fulfilling an active loan. + fulfill_links: List[Optional[Acquisition]] = [] + if can_fulfill: + if active_fulfillment: + # We're making an entry for a specific fulfill link. + type = active_fulfillment.content_type + url = active_fulfillment.content_link + rel = OPDSFeed.ACQUISITION_REL + link_tag = self.acquisition_link( + rel=rel, href=url, types=[type], active_loan=active_loan + ) + fulfill_links.append(link_tag) + + elif active_loan and active_loan.fulfillment and active_license_pool: + # The delivery mechanism for this loan has been + # set. There is one link for the delivery mechanism + # that was locked in, and links for any streaming + # delivery mechanisms. + # + # Since the delivery mechanism has already been locked in, + # we choose not to use visible_delivery_mechanisms -- + # they already chose it and they're stuck with it. + for lpdm in active_license_pool.delivery_mechanisms: + if ( + lpdm is active_loan.fulfillment + or lpdm.delivery_mechanism.is_streaming + ): + fulfill_links.append( + self.fulfill_link( + active_license_pool, + active_loan, + lpdm.delivery_mechanism, + ) + ) + elif active_license_pool is not None: + # The delivery mechanism for this loan has not been + # set. There is one fulfill link for every visible + # delivery mechanism. + for lpdm in self.visible_delivery_mechanisms(active_license_pool): + fulfill_links.append( + self.fulfill_link( + active_license_pool, active_loan, lpdm.delivery_mechanism + ) + ) + + open_access_links: List[Optional[Acquisition]] = [] + if ( + active_license_pool is not None + and direct_fulfillment_delivery_mechanisms is not None + ): + for lpdm in direct_fulfillment_delivery_mechanisms: + # These links use the OPDS 'open-access' link relation not + # because they are open access in the licensing sense, but + # because they are ways to download the book "without any + # requirement, which includes payment and registration." + # + # To avoid confusion, we explicitly add a dc:rights + # statement to each link explaining what the rights are to + # this title. + direct_fulfill = self.fulfill_link( + active_license_pool, + active_loan, + lpdm.delivery_mechanism, + rel=OPDSFeed.OPEN_ACCESS_REL, + ) + if direct_fulfill: + direct_fulfill.add_attributes(self.rights_attributes(lpdm)) + open_access_links.append(direct_fulfill) + + # If this is an open-access book, add an open-access link for + # every delivery mechanism with an associated resource. + # But only if this library allows it, generally this is if + # a library has no patron authentication attached to it + if ( + add_open_access_links + and active_license_pool + and active_license_pool.open_access + ): + for lpdm in active_license_pool.delivery_mechanisms: + if lpdm.resource: + open_access_links.append( + self.open_access_link(active_license_pool, lpdm) + ) + + return [ + x + for x in borrow_links + fulfill_links + open_access_links + revoke_links + if x is not None + ] + + def revoke_link( + self, + active_license_pool: LicensePool, + active_loan: Optional[Loan], + active_hold: Optional[Hold], + ) -> Optional[Acquisition]: + return None + + def borrow_link( + self, + active_license_pool: LicensePool, + borrow_mechanism: Optional[LicensePoolDeliveryMechanism], + fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], + active_hold: Optional[Hold] = None, + ) -> Optional[Acquisition]: + return None + + def fulfill_link( + self, + license_pool: LicensePool, + active_loan: Optional[Loan], + delivery_mechanism: DeliveryMechanism, + rel: str = OPDSFeed.ACQUISITION_REL, + ) -> Optional[Acquisition]: + return None + + def open_access_link( + self, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism + ) -> Acquisition: + kw: Dict[str, Any] = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") + + # Start off assuming that the URL associated with the + # LicensePoolDeliveryMechanism's Resource is the URL we should + # send for download purposes. This will be the case unless we + # previously mirrored that URL somewhere else. + href = lpdm.resource.url + + rep = lpdm.resource.representation + if rep: + if rep.media_type: + kw["type"] = rep.media_type + href = rep.public_url + kw["href"] = href + kw.update(self.rights_attributes(lpdm)) + link = Acquisition(**kw) + link.availability_status = "available" + return link + + def rights_attributes( + self, lpdm: Optional[LicensePoolDeliveryMechanism] + ) -> Dict[str, str]: + """Create a dictionary of tag attributes that explain the + rights status of a LicensePoolDeliveryMechanism. + + If nothing is known, the dictionary will be empty. + """ + if not lpdm or not lpdm.rights_status or not lpdm.rights_status.uri: + return {} + rights_attr = "rights" + return {rights_attr: lpdm.rights_status.uri} + + @classmethod + def acquisition_link( + cls, + rel: str, + href: str, + types: Optional[List[str]], + active_loan: Optional[Loan] = None, + ) -> Acquisition: + if types: + initial_type = types[0] + indirect_types = types[1:] + else: + initial_type = None + indirect_types = [] + link = Acquisition( + href=href, + rel=rel, + type=initial_type, + is_loan=True if active_loan else False, + ) + indirect = cls.indirect_acquisition(indirect_types) + + if indirect is not None: + link.indirect_acquisitions = [indirect] + return link + + @classmethod + def indirect_acquisition( + cls, indirect_types: List[str] + ) -> Optional[IndirectAcquisition]: + top_level_parent: Optional[IndirectAcquisition] = None + parent: Optional[IndirectAcquisition] = None + for t in indirect_types: + indirect_link = IndirectAcquisition(type=t) + if parent is not None: + parent.children = [indirect_link] + parent = indirect_link + if top_level_parent is None: + top_level_parent = indirect_link + return top_level_parent + + +class LibraryAnnotator(CirculationManagerAnnotator): + def __init__( + self, + circulation: Optional[CirculationAPI], + lane: Optional[WorkList], + library: Library, + patron: Optional[Patron] = None, + active_loans_by_work: Optional[Dict[Work, Loan]] = None, + active_holds_by_work: Optional[Dict[Work, Hold]] = None, + active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, + facet_view: str = "feed", + top_level_title: str = "All Books", + library_identifies_patrons: bool = True, + facets: Optional[FacetsWithEntryPoint] = None, + ) -> None: + """Constructor. + + :param library_identifies_patrons: A boolean indicating + whether or not this library can distinguish between its + patrons. A library might not authenticate patrons at + all, or it might distinguish patrons from non-patrons in a + way that does not allow it to keep track of individuals. + + If this is false, links that imply the library can + distinguish between patrons will not be included. Depending + on the configured collections, some extra links may be + added, for direct acquisition of titles that would normally + require a loan. + """ + super().__init__( + lane, + active_loans_by_work=active_loans_by_work, + active_holds_by_work=active_holds_by_work, + active_fulfillments_by_work=active_fulfillments_by_work, + hidden_content_types=library.settings.hidden_content_types, + ) + self.circulation = circulation + self.library: Library = library + self.patron = patron + self.lanes_by_work: Dict[Work, List[Any]] = defaultdict(list) + self.facet_view = facet_view + self._adobe_id_cache: Dict[str, Any] = {} + self._top_level_title = top_level_title + self.identifies_patrons = library_identifies_patrons + self.facets = facets or None + + def top_level_title(self) -> str: + return self._top_level_title + + def permalink_for(self, identifier: Identifier) -> Tuple[str, str]: + # TODO: Do not force OPDS types + url = self.url_for( + "permalink", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=self.library.short_name, + _external=True, + ) + return url, OPDSFeed.ENTRY_TYPE + + def groups_url( + self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + ) -> str: + lane_identifier = self._lane_identifier(lane) + if facets: + kwargs = dict(list(facets.items())) + else: + kwargs = {} + + return self.url_for( + "acquisition_groups", + lane_identifier=lane_identifier, + library_short_name=self.library.short_name, + _external=True, + **kwargs, + ) + + def default_lane_url(self, facets: Optional[FacetsWithEntryPoint] = None) -> str: + return self.groups_url(None, facets=facets) + + def feed_url( # type: ignore [override] + self, + lane: Optional[WorkList], + facets: Optional[FacetsWithEntryPoint] = None, + pagination: Optional[Pagination] = None, + default_route: str = "feed", + ) -> str: + extra_kwargs = dict() + if self.library: + extra_kwargs["library_short_name"] = self.library.short_name + return super().feed_url(lane, facets, pagination, default_route, extra_kwargs) + + def search_url( + self, + lane: Optional[WorkList], + query: str, + pagination: Optional[Pagination], + facets: Optional[FacetsWithEntryPoint] = None, + ) -> str: + lane_identifier = self._lane_identifier(lane) + kwargs = dict(q=query) + if facets: + kwargs.update(dict(list(facets.items()))) + if pagination: + kwargs.update(dict(list(pagination.items()))) + return self.url_for( + "lane_search", + lane_identifier=lane_identifier, + library_short_name=self.library.short_name, + _external=True, + **kwargs, + ) + + def group_uri( + self, work: Work, license_pool: Optional[LicensePool], identifier: Identifier + ) -> Tuple[Optional[str], str]: + if not work in self.lanes_by_work: + return None, "" + + lanes = self.lanes_by_work[work] + if not lanes: + # I don't think this should ever happen? + lane_name = None + url = self.url_for( + "acquisition_groups", + lane_identifier=None, + library_short_name=self.library.short_name, + _external=True, + ) + title = "All Books" + return url, title + + lane = lanes[0] + self.lanes_by_work[work] = lanes[1:] + lane_name = "" + show_feed = False + + if isinstance(lane, dict): + show_feed = lane.get("link_to_list_feed", show_feed) + title = lane.get("label", lane_name) + lane = lane["lane"] + + if isinstance(lane, str): + return lane, lane_name + + if hasattr(lane, "display_name") and not title: + title = lane.display_name + + if show_feed: + return self.feed_url(lane, self.facets), title + + return self.lane_url(lane, self.facets), title + + def lane_url( + self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + ) -> str: + # If the lane has sublanes, the URL identifying the group will + # take the user to another set of groups for the + # sublanes. Otherwise it will take the user to a list of the + # books in the lane by author. + + if lane and isinstance(lane, Lane) and lane.sublanes: + url = self.groups_url(lane, facets=facets) + elif lane and (isinstance(lane, Lane) or isinstance(lane, DynamicLane)): + url = self.feed_url(lane, facets) + else: + # This lane isn't part of our lane hierarchy. It's probably + # a WorkList created to represent the top-level. Use the top-level + # url for it. + url = self.default_lane_url(facets=facets) + return url + + def annotate_work_entry( + self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + ) -> None: + super().annotate_work_entry(entry, updated=updated) + + if not entry.computed: + return + + work = entry.work + identifier = entry.identifier or work.presentation_edition.primary_identifier + + permalink_uri, permalink_type = self.permalink_for(identifier) + # TODO: Do not force OPDS types + if permalink_uri: + entry.computed.other_links.append( + Link(href=permalink_uri, rel="alternate", type=permalink_type) + ) + if self.is_work_entry_solo(work): + entry.computed.other_links.append( + Link(rel="self", href=permalink_uri, type=permalink_type) + ) + + # Add a link to each author tag. + self.add_author_links(entry) + + # And a series, if there is one. + if work.series: + self.add_series_link(entry) + + if NoveListAPI.is_configured(self.library): + # If NoveList Select is configured, there might be + # recommendations, too. + entry.computed.other_links.append( + Link( + rel="recommendations", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + title="Recommended Works", + href=self.url_for( + "recommendations", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=self.library.short_name, + _external=True, + ), + ) + ) + + # Add a link for related books if available. + if self.related_books_available(work, self.library): + entry.computed.other_links.append( + Link( + rel="related", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + title="Recommended Works", + href=self.url_for( + "related_books", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=self.library.short_name, + _external=True, + ), + ) + ) + + # Add a link to get a patron's annotations for this book. + if self.identifies_patrons: + entry.computed.other_links.append( + Link( + rel="http://www.w3.org/ns/oa#annotationService", + type=AnnotationWriter.CONTENT_TYPE, + href=self.url_for( + "annotations_for_work", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=self.library.short_name, + _external=True, + ), + ) + ) + + if Analytics.is_configured(self.library): + entry.computed.other_links.append( + Link( + rel="http://librarysimplified.org/terms/rel/analytics/open-book", + href=self.url_for( + "track_analytics_event", + identifier_type=identifier.type, + identifier=identifier.identifier, + event_type=CirculationEvent.OPEN_BOOK, + library_short_name=self.library.short_name, + _external=True, + ), + ) + ) + + # Groups is only from the library annotator + group_uri, group_title = self.group_uri( + entry.work, entry.license_pool, entry.identifier + ) + if group_uri: + entry.computed.other_links.append( + Link(href=group_uri, rel=OPDSFeed.GROUP_REL, title=str(group_title)) + ) + + @classmethod + def related_books_available(cls, work: Work, library: Library) -> bool: + """:return: bool asserting whether related books might exist for a particular Work""" + contributions = work.sort_author and work.sort_author != Edition.UNKNOWN_AUTHOR + + return bool(contributions or work.series or NoveListAPI.is_configured(library)) + + def language_and_audience_key_from_work( + self, work: Work + ) -> Tuple[Optional[str], Optional[str]]: + language_key = work.language + + audiences = None + if work.audience == Classifier.AUDIENCE_CHILDREN: + audiences = [Classifier.AUDIENCE_CHILDREN] + elif work.audience == Classifier.AUDIENCE_YOUNG_ADULT: + audiences = Classifier.AUDIENCES_JUVENILE + elif work.audience == Classifier.AUDIENCE_ALL_AGES: + audiences = [Classifier.AUDIENCE_CHILDREN, Classifier.AUDIENCE_ALL_AGES] + elif work.audience in Classifier.AUDIENCES_ADULT: + audiences = list(Classifier.AUDIENCES_NO_RESEARCH) + elif work.audience == Classifier.AUDIENCE_RESEARCH: + audiences = list(Classifier.AUDIENCES) + else: + audiences = [] + + audience_key = None + if audiences: + audience_strings = [urllib.parse.quote_plus(a) for a in sorted(audiences)] + audience_key = ",".join(audience_strings) + + return language_key, audience_key + + def add_author_links(self, entry: WorkEntry) -> None: + """Add a link to all authors""" + if not entry.computed: + return None + + languages, audiences = self.language_and_audience_key_from_work(entry.work) + for author_entry in entry.computed.authors: + if not (name := getattr(author_entry, "name", None)): + continue + + author_entry.add_attributes( + { + "link": Link( + rel="contributor", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + title=name, + href=self.url_for( + "contributor", + contributor_name=name, + languages=languages, + audiences=audiences, + library_short_name=self.library.short_name, + _external=True, + ), + ), + } + ) + + def add_series_link(self, entry: WorkEntry) -> None: + if not entry.computed: + return None + + series_entry = entry.computed.series + work = entry.work + + if series_entry is None: + # There is no series, and thus nothing to annotate. + # This probably indicates an out-of-date OPDS entry. + work_id = work.id + work_title = work.title + self.log.error( + 'add_series_link() called on work %s ("%s"), which has no Series data in its OPDS WorkEntry.', + work_id, + work_title, + ) + return + + series_name = work.series + languages, audiences = self.language_and_audience_key_from_work(work) + href = self.url_for( + "series", + series_name=series_name, + languages=languages, + audiences=audiences, + library_short_name=self.library.short_name, + _external=True, + ) + series_entry.add_attributes( + { + "link": Link( + rel="series", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + title=series_name, + href=href, + ), + } + ) + + def annotate_feed(self, feed: FeedData) -> None: + if self.patron: + # A patron is authenticated. + self.add_patron(feed) + else: + # No patron is authenticated. Show them how to + # authenticate (or that authentication is not supported). + self.add_authentication_document_link(feed) + + # Add a 'search' link if the lane is searchable. + if self.lane and self.lane.search_target: + search_facet_kwargs = {} + if self.facets is not None: + if self.facets.entrypoint_is_default: + # The currently selected entry point is a default. + # Rather than using it, we want the 'default' behavior + # for search, which is to search everything. + search_facets = self.facets.navigate( + entrypoint=EverythingEntryPoint + ) + else: + search_facets = self.facets + search_facet_kwargs.update(dict(list(search_facets.items()))) + + lane_identifier = self._lane_identifier(self.lane) + search_url = self.url_for( + "lane_search", + lane_identifier=lane_identifier, + library_short_name=self.library.short_name, + _external=True, + **search_facet_kwargs, + ) + search_link = dict( + rel="search", + type="application/opensearchdescription+xml", + href=search_url, + ) + feed.add_link(**search_link) + + if self.identifies_patrons: + # Since this library authenticates patrons it can offer + # a bookshelf and an annotation service. + shelf_link = dict( + rel="http://opds-spec.org/shelf", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + href=self.url_for( + "active_loans", + library_short_name=self.library.short_name, + _external=True, + ), + ) + feed.add_link(**shelf_link) + + annotations_link = dict( + rel="http://www.w3.org/ns/oa#annotationService", + type=AnnotationWriter.CONTENT_TYPE, + href=self.url_for( + "annotations", + library_short_name=self.library.short_name, + _external=True, + ), + ) + feed.add_link(**annotations_link) + + if self.lane and self.lane.uses_customlists: + name = None + if hasattr(self.lane, "customlists") and len(self.lane.customlists) == 1: + name = self.lane.customlists[0].name + else: + _db = Session.object_session(self.library) + customlist = self.lane.get_customlists(_db) + if customlist: + name = customlist[0].name + + if name: + crawlable_url = self.url_for( + "crawlable_list_feed", + list_name=name, + library_short_name=self.library.short_name, + _external=True, + ) + crawlable_link = dict( + rel="http://opds-spec.org/crawlable", + type=OPDSFeed.ACQUISITION_FEED_TYPE, + href=crawlable_url, + ) + feed.add_link(**crawlable_link) + + self.add_configuration_links(feed) + + def add_configuration_links(self, feed: FeedData) -> None: + _db = Session.object_session(self.library) + + def _add_link(l: Dict[str, Any]) -> None: + feed.add_link(**l) + + library = self.library + if library.settings.terms_of_service: + _add_link( + dict( + rel="terms-of-service", + href=library.settings.terms_of_service, + type="text/html", + ) + ) + + if library.settings.privacy_policy: + _add_link( + dict( + rel="privacy-policy", + href=library.settings.privacy_policy, + type="text/html", + ) + ) + + if library.settings.copyright: + _add_link( + dict( + rel="copyright", + href=library.settings.copyright, + type="text/html", + ) + ) + + if library.settings.about: + _add_link( + dict( + rel="about", + href=library.settings.about, + type="text/html", + ) + ) + + if library.settings.license: + _add_link( + dict( + rel="license", + href=library.settings.license, + type="text/html", + ) + ) + + navigation_urls = self.library.settings.web_header_links + navigation_labels = self.library.settings.web_header_labels + for url, label in zip(navigation_urls, navigation_labels): + d = dict( + href=url, + title=label, + type="text/html", + rel="related", + role="navigation", + ) + _add_link(d) + + for type, value in Configuration.help_uris(self.library): + d = dict(href=value, rel="help") + if type: + d["type"] = type + _add_link(d) + + def acquisition_links( # type: ignore [override] + self, + active_license_pool: Optional[LicensePool], + active_loan: Optional[Loan], + active_hold: Optional[Hold], + active_fulfillment: Optional[Any], + identifier: Identifier, + direct_fulfillment_delivery_mechanisms: Optional[ + List[LicensePoolDeliveryMechanism] + ] = None, + mock_api: Optional[Any] = None, + ) -> List[Acquisition]: + """Generate one or more tags that can be used to borrow, + reserve, or fulfill a book, depending on the state of the book + and the current patron. + + :param active_license_pool: The LicensePool for which we're trying to + generate tags. + :param active_loan: A Loan object representing the current patron's + existing loan for this title, if any. + :param active_hold: A Hold object representing the current patron's + existing hold on this title, if any. + :param active_fulfillment: A LicensePoolDeliveryMechanism object + representing the mechanism, if any, which the patron has chosen + to fulfill this work. + :param feed: The OPDSFeed that will eventually contain these + tags. + :param identifier: The Identifier of the title for which we're + trying to generate tags. + :param direct_fulfillment_delivery_mechanisms: A list of + LicensePoolDeliveryMechanisms for the given LicensePool + that should have fulfillment-type tags generated for + them, even if this method wouldn't normally think that + makes sense. + :param mock_api: A mock object to stand in for the API to the + vendor who provided this LicensePool. If this is not provided, a + live API for that vendor will be used. + """ + direct_fulfillment_delivery_mechanisms = ( + direct_fulfillment_delivery_mechanisms or [] + ) + api = mock_api + if not api and self.circulation and active_license_pool: + api = self.circulation.api_for_license_pool(active_license_pool) + if api: + set_mechanism_at_borrow = ( + api.SET_DELIVERY_MECHANISM_AT == BaseCirculationAPI.BORROW_STEP + ) + if active_license_pool and not self.identifies_patrons and not active_loan: + for lpdm in active_license_pool.delivery_mechanisms: + if api.can_fulfill_without_loan(None, active_license_pool, lpdm): + # This title can be fulfilled without an + # active loan, so we're going to add an acquisition + # link that goes directly to the fulfillment step + # without the 'borrow' step. + direct_fulfillment_delivery_mechanisms.append(lpdm) + else: + # This is most likely an open-access book. Just put one + # borrow link and figure out the rest later. + set_mechanism_at_borrow = False + + return super().acquisition_links( + active_license_pool, + active_loan, + active_hold, + active_fulfillment, + identifier, + can_hold=self.library.settings.allow_holds, + can_revoke_hold=bool( + active_hold + and ( + not self.circulation + or ( + active_license_pool + and self.circulation.can_revoke_hold( + active_license_pool, active_hold + ) + ) + ) + ), + set_mechanism_at_borrow=set_mechanism_at_borrow, + direct_fulfillment_delivery_mechanisms=direct_fulfillment_delivery_mechanisms, + add_open_access_links=(not self.identifies_patrons), + ) + + def revoke_link( + self, + active_license_pool: LicensePool, + active_loan: Optional[Loan], + active_hold: Optional[Hold], + ) -> Optional[Acquisition]: + if not self.identifies_patrons: + return None + url = self.url_for( + "revoke_loan_or_hold", + license_pool_id=active_license_pool.id, + library_short_name=self.library.short_name, + _external=True, + ) + kw: Dict[str, Any] = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) + revoke_link_tag = Acquisition(**kw) + return revoke_link_tag + + def borrow_link( + self, + active_license_pool: LicensePool, + borrow_mechanism: Optional[LicensePoolDeliveryMechanism], + fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], + active_hold: Optional[Hold] = None, + ) -> Optional[Acquisition]: + if not self.identifies_patrons: + return None + identifier = active_license_pool.identifier + if borrow_mechanism: + # Following this link will both borrow the book and set + # its delivery mechanism. + mechanism_id = borrow_mechanism.delivery_mechanism.id + else: + # Following this link will borrow the book but not set + # its delivery mechanism. + mechanism_id = None + borrow_url = self.url_for( + "borrow", + identifier_type=identifier.type, + identifier=identifier.identifier, + mechanism_id=mechanism_id, + library_short_name=self.library.short_name, + _external=True, + ) + rel = OPDSFeed.BORROW_REL + borrow_link = Acquisition( + rel=rel, + href=borrow_url, + type=OPDSFeed.ENTRY_TYPE, + is_hold=True if active_hold else False, + ) + + indirect_acquisitions: List[IndirectAcquisition] = [] + for lpdm in fulfillment_mechanisms: + # We have information about one or more delivery + # mechanisms that will be available at the point of + # fulfillment. To the extent possible, put information + # about these mechanisms into the tag as + # tags. + + # These are the formats mentioned in the indirect + # acquisition. + format_types = AcquisitionHelper.format_types(lpdm.delivery_mechanism) + + # If we can borrow this book, add this delivery mechanism + # to the borrow link as an . + if format_types: + indirect_acquisition = self.indirect_acquisition(format_types) + if indirect_acquisition: + indirect_acquisitions.append(indirect_acquisition) + + if not indirect_acquisitions: + # If there's no way to actually get the book, cancel the creation + # of an OPDS entry altogether. + raise UnfulfillableWork() + + borrow_link.indirect_acquisitions = indirect_acquisitions + return borrow_link + + def fulfill_link( + self, + license_pool: LicensePool, + active_loan: Optional[Loan], + delivery_mechanism: DeliveryMechanism, + rel: str = OPDSFeed.ACQUISITION_REL, + ) -> Optional[Acquisition]: + """Create a new fulfillment link. + + This link may include tags from the OPDS Extensions for DRM. + """ + if not self.identifies_patrons and rel != OPDSFeed.OPEN_ACCESS_REL: + return None + if isinstance(delivery_mechanism, LicensePoolDeliveryMechanism): + logging.warning( + "LicensePoolDeliveryMechanism passed into fulfill_link instead of DeliveryMechanism!" + ) + delivery_mechanism = delivery_mechanism.delivery_mechanism + format_types = AcquisitionHelper.format_types(delivery_mechanism) + if not format_types: + return None + + fulfill_url = self.url_for( + "fulfill", + license_pool_id=license_pool.id, + mechanism_id=delivery_mechanism.id, + library_short_name=self.library.short_name, + _external=True, + ) + + link_tag = self.acquisition_link( + rel=rel, href=fulfill_url, types=format_types, active_loan=active_loan + ) + + children = AcquisitionHelper.license_tags(license_pool, active_loan, None) + if children: + link_tag.add_attributes(children) + + drm_tags = self.drm_extension_tags( + license_pool, active_loan, delivery_mechanism + ) + link_tag.add_attributes(drm_tags) + return link_tag + + def open_access_link( + self, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism + ) -> Acquisition: + link_tag = super().open_access_link(pool, lpdm) + fulfill_url = self.url_for( + "fulfill", + license_pool_id=pool.id, + mechanism_id=lpdm.delivery_mechanism.id, + library_short_name=self.library.short_name, + _external=True, + ) + link_tag.href = fulfill_url + return link_tag + + def drm_extension_tags( + self, + license_pool: LicensePool, + active_loan: Optional[Loan], + delivery_mechanism: Optional[DeliveryMechanism], + ) -> Dict[str, Any]: + """Construct OPDS Extensions for DRM tags that explain how to + register a device with the DRM server that manages this loan. + :param delivery_mechanism: A DeliveryMechanism + """ + if not active_loan or not delivery_mechanism or not self.identifies_patrons: + return {} + + if delivery_mechanism.drm_scheme == DeliveryMechanism.ADOBE_DRM: + # Get an identifier for the patron that will be registered + # with the DRM server. + patron = active_loan.patron + + # Generate a tag that can feed into the + # Vendor ID service. + return self.adobe_id_tags(patron) + + if delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM: + # Generate a tag that can be used for the loan + # in the mobile apps. + + return self.lcp_key_retrieval_tags(active_loan) + + return {} + + def adobe_id_tags( + self, patron_identifier: str | Patron + ) -> Dict[str, FeedEntryType]: + """Construct tags using the DRM Extensions for OPDS standard that + explain how to get an Adobe ID for this patron, and how to + manage their list of device IDs. + :param delivery_mechanism: A DeliveryMechanism + :return: If Adobe Vendor ID delegation is configured, a list + containing a tag. If not, an empty list. + """ + # CirculationManagerAnnotators are created per request. + # Within the context of a single request, we can cache the + # tags that explain how the patron can get an Adobe ID, and + # reuse them across tags. This saves a little time, + # makes tests more reliable, and stops us from providing a + # different Short Client Token for every tag. + if isinstance(patron_identifier, Patron): + cache_key = str(patron_identifier.id) + else: + cache_key = patron_identifier + cached = self._adobe_id_cache.get(cache_key) + if cached is None: + cached = {} + authdata = None + try: + authdata = AuthdataUtility.from_config(self.library) + except CannotLoadConfiguration as e: + logging.error( + "Cannot load Short Client Token configuration; outgoing OPDS entries will not have DRM autodiscovery support", + exc_info=e, + ) + return {} + if authdata: + vendor_id, token = authdata.short_client_token_for_patron( + patron_identifier + ) + drm_licensor = FeedEntryType.create( + vendor=vendor_id, clientToken=FeedEntryType(text=token) + ) + cached = {"licensor": drm_licensor} + + self._adobe_id_cache[cache_key] = cached + else: + cached = copy.deepcopy(cached) + return cached + + def lcp_key_retrieval_tags(self, active_loan: Loan) -> Dict[str, FeedEntryType]: + # In the case of LCP we have to include a patron's hashed passphrase + # inside the acquisition link so client applications can use it to open the LCP license + # without having to ask the user to enter their password + # https://readium.org/lcp-specs/notes/lcp-key-retrieval.html#including-a-hashed-passphrase-in-an-opds-1-catalog + + db = Session.object_session(active_loan) + lcp_credential_factory = LCPCredentialFactory() + + response = {} + + try: + hashed_passphrase: LCPHashedPassphrase = ( + lcp_credential_factory.get_hashed_passphrase(db, active_loan.patron) + ) + response["hashed_passphrase"] = FeedEntryType(text=hashed_passphrase.hashed) + except LCPError: + # The patron's passphrase wasn't generated yet and not present in the database. + pass + + return response + + def add_patron(self, feed: FeedData) -> None: + if not self.patron or not self.identifies_patrons: + return None + patron_details = {} + if self.patron.username: + patron_details["username"] = self.patron.username + if self.patron.authorization_identifier: + patron_details[ + "authorizationIdentifier" + ] = self.patron.authorization_identifier + + patron_tag = FeedEntryType.create(**patron_details) + feed.add_metadata("patron", patron_tag) + + def add_authentication_document_link(self, feed_obj: FeedData) -> None: + """Create a tag that points to the circulation + manager's Authentication for OPDS document + for the current library. + """ + # Even if self.identifies_patrons is false, we include this link, + # because this document is the one that explains there is no + # patron authentication at this library. + feed_obj.add_link( + rel="http://opds-spec.org/auth/document", + href=self.url_for( + "authentication_document", + library_short_name=self.library.short_name, + _external=True, + ), + ) diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py new file mode 100644 index 0000000000..efdf42977a --- /dev/null +++ b/core/feed/annotator/loan_and_hold.py @@ -0,0 +1,125 @@ +import copy +from datetime import datetime +from typing import Any, Dict, List, Optional + +from core.feed.types import FeedData, Link, WorkEntry +from core.model.configuration import ExternalIntegration +from core.model.constants import EditionConstants, LinkRelations +from core.model.patron import Hold, Patron + +from .circulation import LibraryAnnotator + + +class LibraryLoanAndHoldAnnotator(LibraryAnnotator): + @staticmethod + def choose_best_hold_for_work(list_of_holds: List[Hold]) -> Hold: + # We don't want holds that are connected to license pools without any licenses owned. Also, we want hold that + # would result in the least wait time for the patron. + + best = list_of_holds[0] + + for hold in list_of_holds: + # We don't want holds with LPs with 0 licenses owned. + if hold.license_pool.licenses_owned == 0: + continue + + # Our current hold's LP owns some licenses but maybe the best one wasn't changed yet. + if best.license_pool.licenses_owned == 0: + best = hold + continue + + # Since these numbers are updated by different processes there might be situation where we don't have + # all data filled out. + hold_position = ( + hold.position or hold.license_pool.patrons_in_hold_queue or 0 + ) + best_position = ( + best.position or best.license_pool.patrons_in_hold_queue or 0 + ) + + # Both the best hold and current hold own some licenses, try to figure out which one is better. + if ( + hold_position / hold.license_pool.licenses_owned + < best_position / best.license_pool.licenses_owned + ): + best = hold + + return best + + def drm_device_registration_feed_tags(self, patron: Patron) -> Dict[str, Any]: + """Return tags that provide information on DRM device deregistration + independent of any particular loan. These tags will go under + the tag. + + This allows us to deregister an Adobe ID, in preparation for + logout, even if there is no active loan that requires one. + """ + tags = copy.deepcopy(self.adobe_id_tags(patron)) + attr = "scheme" + for tag, value in tags.items(): + value.add_attributes( + {attr: "http://librarysimplified.org/terms/drm/scheme/ACS"} + ) + return tags + + @property + def user_profile_management_protocol_link(self) -> Link: + """Create a tag that points to the circulation + manager's User Profile Management Protocol endpoint + for the current patron. + """ + return Link( + rel="http://librarysimplified.org/terms/rel/user-profile", + href=self.url_for( + "patron_profile", + library_short_name=self.library.short_name, + _external=True, + ), + ) + + def annotate_feed(self, feed: FeedData) -> None: + """Annotate the feed with top-level DRM device registration tags + and a link to the User Profile Management Protocol endpoint. + """ + super().annotate_feed(feed) + if self.patron: + tags = self.drm_device_registration_feed_tags(self.patron) + link = self.user_profile_management_protocol_link + if link.href is not None: + feed.add_link(link.href, rel=link.rel) + for name, value in tags.items(): + feed.add_metadata(name, feed_entry=value) + + def annotate_work_entry( + self, entry: WorkEntry, updated: Optional[datetime] = None + ) -> None: + super().annotate_work_entry(entry, updated=updated) + if not entry.computed: + return + active_license_pool = entry.license_pool + work = entry.work + edition = work.presentation_edition + identifier = edition.primary_identifier + # Only OPDS for Distributors should get the time tracking link + # And only if there is an active loan for the work + if ( + edition.medium == EditionConstants.AUDIO_MEDIUM + and active_license_pool + and active_license_pool.collection.protocol + == ExternalIntegration.OPDS_FOR_DISTRIBUTORS + and work in self.active_loans_by_work + ): + entry.computed.other_links.append( + Link( + rel=LinkRelations.TIME_TRACKING, + href=self.url_for( + "track_playtime_events", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=self.library.short_name, + collection_id=active_license_pool.collection.id, + _external=True, + ), + type="application/json", + ) + ) diff --git a/core/feed/annotator/verbose.py b/core/feed/annotator/verbose.py new file mode 100644 index 0000000000..965f36aa01 --- /dev/null +++ b/core/feed/annotator/verbose.py @@ -0,0 +1,110 @@ +from collections import defaultdict +from datetime import datetime +from typing import Dict, List, Optional + +from sqlalchemy.orm import Session + +from core.feed.annotator.base import Annotator +from core.feed.types import Author, WorkEntry +from core.model import PresentationCalculationPolicy +from core.model.classification import Subject +from core.model.contributor import Contributor +from core.model.edition import Edition +from core.model.identifier import Identifier +from core.model.measurement import Measurement +from core.model.work import Work + + +class VerboseAnnotator(Annotator): + """The default Annotator for machine-to-machine integration. + + This Annotator describes all categories and authors for the book + in great detail. + """ + + opds_cache_field = Work.verbose_opds_entry.name + + def annotate_work_entry( + self, entry: WorkEntry, updated: Optional[datetime] = None + ) -> None: + super().annotate_work_entry(entry, updated=updated) + self.add_ratings(entry) + + @classmethod + def add_ratings(cls, entry: WorkEntry) -> None: + """Add a quality rating to the work.""" + work = entry.work + for type_uri, value in [ + (Measurement.QUALITY, work.quality), + (None, work.rating), + (Measurement.POPULARITY, work.popularity), + ]: + if value and entry.computed: + entry.computed.ratings.append(cls.rating(type_uri, value)) + + @classmethod + def categories( + cls, work: Work, policy: Optional[PresentationCalculationPolicy] = None + ) -> Dict[str, List[Dict[str, str]]]: + """Send out _all_ categories for the work. + + (So long as the category type has a URI associated with it in + Subject.uri_lookup.) + + :param policy: A PresentationCalculationPolicy to + use when deciding how deep to go when finding equivalent + identifiers for the work. + """ + policy = policy or PresentationCalculationPolicy( + equivalent_identifier_cutoff=100 + ) + _db = Session.object_session(work) + by_scheme_and_term = dict() + identifier_ids = work.all_identifier_ids(policy=policy) + classifications = Identifier.classifications_for_identifier_ids( + _db, identifier_ids + ) + for c in classifications: + subject = c.subject + if subject.type in Subject.uri_lookup: + scheme = Subject.uri_lookup[subject.type] + term = subject.identifier + weight_field = "ratingValue" + key = (scheme, term) + if not key in by_scheme_and_term: + value = dict(term=subject.identifier) + if subject.name: + value["label"] = subject.name + value[weight_field] = 0 + by_scheme_and_term[key] = value + by_scheme_and_term[key][weight_field] += c.weight + + # Collapse by_scheme_and_term to by_scheme + by_scheme = defaultdict(list) + for (scheme, term), value in list(by_scheme_and_term.items()): + by_scheme[scheme].append(value) + by_scheme.update(super().categories(work)) + return by_scheme + + @classmethod + def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + """Create a detailed tag for each author.""" + return { + "authors": [ + cls.detailed_author(author) for author in edition.author_contributors + ], + "contributors": [], + } + + @classmethod + def detailed_author(cls, contributor: Contributor) -> Author: + """Turn a Contributor into a detailed tag.""" + author = Author() + author.name = contributor.display_name + author.sort_name = contributor.sort_name + author.family_name = contributor.family_name + author.wikipedia_name = contributor.wikipedia_name + author.viaf = f"http://viaf.org/viaf/{contributor.viaf}" + author.lc = f"http://id.loc.gov/authorities/names/{contributor.lc}" + + return author diff --git a/core/feed/base.py b/core/feed/base.py new file mode 100644 index 0000000000..d731e49244 --- /dev/null +++ b/core/feed/base.py @@ -0,0 +1,13 @@ +from abc import ABC, abstractmethod + +from flask import Response + + +class FeedInterface(ABC): + @abstractmethod + def generate_feed(self) -> None: + ... + + @abstractmethod + def as_response(self) -> Response: + ... diff --git a/core/feed/navigation.py b/core/feed/navigation.py new file mode 100644 index 0000000000..8c4031cc01 --- /dev/null +++ b/core/feed/navigation.py @@ -0,0 +1,92 @@ +from __future__ import annotations + +from typing import Any, Optional + +from sqlalchemy.orm import Session +from typing_extensions import Self +from werkzeug.datastructures import MIMEAccept + +from core.feed.annotator.circulation import CirculationManagerAnnotator +from core.feed.opds import BaseOPDSFeed +from core.feed.types import DataEntry, DataEntryTypes, Link +from core.feed.util import strftime +from core.lane import Facets, Pagination, WorkList +from core.opds import NavigationFacets +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSFeedResponse +from core.util.opds_writer import OPDSFeed + + +class NavigationFeed(BaseOPDSFeed): + def __init__( + self, + title: str, + url: str, + lane: WorkList, + annotator: CirculationManagerAnnotator, + facets: Optional[Facets] = None, + pagination: Optional[Pagination] = None, + ) -> None: + self.lane = lane + self.annotator = annotator + self._facets = facets + self._pagination = pagination + super().__init__(title, url) + + @classmethod + def navigation( + cls, + _db: Session, + title: str, + url: str, + worklist: WorkList, + annotator: CirculationManagerAnnotator, + facets: Optional[Facets] = None, + ) -> Self: + """The navigation feed with links to a given lane's sublanes.""" + + facets = facets or NavigationFacets.default(worklist) + feed = cls(title, url, worklist, annotator, facets=facets) + feed.generate_feed() + return feed + + def generate_feed(self) -> None: + self._feed.add_metadata("title", text=self.title) + self._feed.add_metadata("id", text=self.url) + self._feed.add_metadata("updated", text=strftime(utc_now())) + self._feed.add_link(href=self.url, rel="self") + if not self.lane.children: + # We can't generate links to children, since this Worklist + # has no children, so we'll generate a link to the + # Worklist's page-type feed instead. + title = "All " + self.lane.display_name + page_url = self.annotator.feed_url(self.lane) + self.add_entry(page_url, title, OPDSFeed.ACQUISITION_FEED_TYPE) + + for child in self.lane.visible_children: + title = child.display_name + if child.children: + child_url = self.annotator.navigation_url(child) + self.add_entry(child_url, title, OPDSFeed.NAVIGATION_FEED_TYPE) + else: + child_url = self.annotator.feed_url(child) + self.add_entry(child_url, title, OPDSFeed.ACQUISITION_FEED_TYPE) + + self.annotator.annotate_feed(self._feed) + + def add_entry( + self, url: str, title: str, type: str = OPDSFeed.NAVIGATION_FEED_TYPE + ) -> None: + """Create an OPDS navigation entry for a URL.""" + entry = DataEntry(type=DataEntryTypes.NAVIGATION, title=title, id=url) + entry.links.append(Link(rel="subsection", href=url, type=type)) + self._feed.data_entries.append(entry) + + def as_response( + self, + mime_types: Optional[MIMEAccept] = None, + **kwargs: Any, + ) -> OPDSFeedResponse: + response = super().as_response(mime_types=mime_types, **kwargs) + response.content_type = OPDSFeed.NAVIGATION_FEED_TYPE + return response diff --git a/core/feed/opds.py b/core/feed/opds.py new file mode 100644 index 0000000000..84fe5246dc --- /dev/null +++ b/core/feed/opds.py @@ -0,0 +1,96 @@ +from __future__ import annotations + +import logging +from typing import Any, Dict, List, Optional, Type + +from werkzeug.datastructures import MIMEAccept + +from core.feed.base import FeedInterface +from core.feed.serializer.base import SerializerInterface +from core.feed.serializer.opds import OPDS1Serializer +from core.feed.serializer.opds2 import OPDS2Serializer +from core.feed.types import FeedData, WorkEntry +from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse +from core.util.opds_writer import OPDSMessage + + +def get_serializer( + mime_types: Optional[MIMEAccept], +) -> SerializerInterface[Any]: + # Ordering matters for poor matches (eg. */*), so we will keep OPDS1 first + serializers: Dict[str, Type[SerializerInterface[Any]]] = { + "application/atom+xml": OPDS1Serializer, + "application/opds+json": OPDS2Serializer, + } + if mime_types: + match = mime_types.best_match( + serializers.keys(), default="application/atom+xml" + ) + return serializers[match]() + # Default + return OPDS1Serializer() + + +class BaseOPDSFeed(FeedInterface): + def __init__( + self, + title: str, + url: str, + precomposed_entries: Optional[List[OPDSMessage]] = None, + ) -> None: + self.url = url + self.title = title + self._precomposed_entries = precomposed_entries or [] + self._feed = FeedData() + self.log = logging.getLogger(self.__class__.__name__) + + def serialize(self, mime_types: Optional[MIMEAccept] = None) -> bytes: + serializer = get_serializer(mime_types) + return serializer.serialize_feed(self._feed) + + def add_link(self, href: str, rel: Optional[str] = None, **kwargs: Any) -> None: + self._feed.add_link(href, rel=rel, **kwargs) + + def as_response( + self, + mime_types: Optional[MIMEAccept] = None, + **kwargs: Any, + ) -> OPDSFeedResponse: + """Serialize the feed using the serializer protocol""" + serializer = get_serializer(mime_types) + return OPDSFeedResponse( + serializer.serialize_feed( + self._feed, precomposed_entries=self._precomposed_entries + ), + content_type=serializer.content_type(), + **kwargs, + ) + + @classmethod + def entry_as_response( + cls, + entry: WorkEntry | OPDSMessage, + mime_types: Optional[MIMEAccept] = None, + **response_kwargs: Any, + ) -> OPDSEntryResponse: + serializer = get_serializer(mime_types) + if isinstance(entry, OPDSMessage): + return OPDSEntryResponse( + response=serializer.to_string(serializer.serialize_opds_message(entry)), + status=entry.status_code, + content_type=serializer.content_type(), + **response_kwargs, + ) + + # A WorkEntry + if not entry.computed: + logging.getLogger().error(f"Entry data has not been generated for {entry}") + raise ValueError(f"Entry data has not been generated") + response = OPDSEntryResponse( + response=serializer.serialize_work_entry(entry.computed), + **response_kwargs, + ) + if isinstance(serializer, OPDS2Serializer): + # Only OPDS2 has the same content type for feed and entry + response.content_type = serializer.content_type() + return response diff --git a/core/feed/serializer/base.py b/core/feed/serializer/base.py new file mode 100644 index 0000000000..9141db8cea --- /dev/null +++ b/core/feed/serializer/base.py @@ -0,0 +1,32 @@ +from abc import ABC, abstractmethod +from typing import Generic, List, Optional, TypeVar + +from core.feed.types import FeedData, WorkEntryData +from core.util.opds_writer import OPDSMessage + +T = TypeVar("T") + + +class SerializerInterface(ABC, Generic[T]): + @classmethod + @abstractmethod + def to_string(cls, data: T) -> bytes: + ... + + @abstractmethod + def serialize_feed( + self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + ) -> bytes: + ... + + @abstractmethod + def serialize_work_entry(self, entry: WorkEntryData) -> T: + ... + + @abstractmethod + def serialize_opds_message(self, message: OPDSMessage) -> T: + ... + + @abstractmethod + def content_type(self) -> str: + ... diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py new file mode 100644 index 0000000000..d6ff9e49a2 --- /dev/null +++ b/core/feed/serializer/opds.py @@ -0,0 +1,363 @@ +from __future__ import annotations + +import datetime +from functools import partial +from typing import Any, Dict, List, Optional, cast + +from lxml import etree + +from core.feed.serializer.base import SerializerInterface +from core.feed.types import ( + Acquisition, + Author, + DataEntry, + FeedData, + FeedEntryType, + IndirectAcquisition, + WorkEntryData, +) +from core.util.datetime_helpers import utc_now +from core.util.opds_writer import OPDSFeed, OPDSMessage + +TAG_MAPPING = { + "indirectAcquisition": f"{{{OPDSFeed.OPDS_NS}}}indirectAcquisition", + "holds": f"{{{OPDSFeed.OPDS_NS}}}holds", + "copies": f"{{{OPDSFeed.OPDS_NS}}}copies", + "availability": f"{{{OPDSFeed.OPDS_NS}}}availability", + "licensor": f"{{{OPDSFeed.DRM_NS}}}licensor", + "patron": f"{{{OPDSFeed.SIMPLIFIED_NS}}}patron", + "series": f"{{{OPDSFeed.SCHEMA_NS}}}series", +} + +ATTRIBUTE_MAPPING = { + "vendor": f"{{{OPDSFeed.DRM_NS}}}vendor", + "scheme": f"{{{OPDSFeed.DRM_NS}}}scheme", + "username": f"{{{OPDSFeed.SIMPLIFIED_NS}}}username", + "authorizationIdentifier": f"{{{OPDSFeed.SIMPLIFIED_NS}}}authorizationIdentifier", + "rights": f"{{{OPDSFeed.DCTERMS_NS}}}rights", + "ProviderName": f"{{{OPDSFeed.BIBFRAME_NS}}}ProviderName", + "facetGroup": f"{{{OPDSFeed.OPDS_NS}}}facetGroup", + "activeFacet": f"{{{OPDSFeed.OPDS_NS}}}activeFacet", + "ratingValue": f"{{{OPDSFeed.SCHEMA_NS}}}ratingValue", +} + +AUTHOR_MAPPING = { + "name": f"{{{OPDSFeed.ATOM_NS}}}name", + "role": f"{{{OPDSFeed.OPF_NS}}}role", + "sort_name": f"{{{OPDSFeed.SIMPLIFIED_NS}}}sort_name", + "wikipedia_name": f"{{{OPDSFeed.SIMPLIFIED_NS}}}wikipedia_name", +} + + +class OPDS1Serializer(SerializerInterface[etree._Element], OPDSFeed): + """An OPDS 1.2 Atom feed serializer""" + + def __init__(self) -> None: + pass + + def _tag( + self, tag_name: str, *args: Any, mapping: Optional[Dict[str, str]] = None + ) -> etree._Element: + if not mapping: + mapping = TAG_MAPPING + return self.E(mapping.get(tag_name, tag_name), *args) + + def _attr_name( + self, attr_name: str, mapping: Optional[Dict[str, str]] = None + ) -> str: + if not mapping: + mapping = ATTRIBUTE_MAPPING + return mapping.get(attr_name, attr_name) + + def serialize_feed( + self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + ) -> bytes: + # First we do metadata + serialized = self.E.feed() + + if feed.entrypoint: + serialized.set(f"{{{OPDSFeed.SIMPLIFIED_NS}}}entrypoint", feed.entrypoint) + + for name, metadata in feed.metadata.items(): + element = self._serialize_feed_entry(name, metadata) + serialized.append(element) + + for entry in feed.entries: + if entry.computed: + element = self.serialize_work_entry(entry.computed) + serialized.append(element) + + for data_entry in feed.data_entries: + element = self._serialize_data_entry(data_entry) + serialized.append(element) + + if precomposed_entries: + for precomposed in precomposed_entries: + if isinstance(precomposed, OPDSMessage): + serialized.append(self.serialize_opds_message(precomposed)) + + for link in feed.links: + serialized.append(self._serialize_feed_entry("link", link)) + + if feed.breadcrumbs: + breadcrumbs = OPDSFeed.E._makeelement( + f"{{{OPDSFeed.SIMPLIFIED_NS}}}breadcrumbs" + ) + for link in feed.breadcrumbs: + breadcrumbs.append(self._serialize_feed_entry("link", link)) + serialized.append(breadcrumbs) + + for link in feed.facet_links: + serialized.append(self._serialize_feed_entry("link", link)) + + etree.indent(serialized) + return self.to_string(serialized) + + def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: + entry: etree._Element = OPDSFeed.entry() + + if feed_entry.additionalType: + entry.set( + f"{{{OPDSFeed.SCHEMA_NS}}}additionalType", feed_entry.additionalType + ) + + if feed_entry.title: + entry.append(OPDSFeed.E("title", feed_entry.title.text)) + if feed_entry.subtitle: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.SCHEMA_NS}}}alternativeHeadline", + feed_entry.subtitle.text, + ) + ) + if feed_entry.summary: + entry.append(OPDSFeed.E("summary", feed_entry.summary.text)) + if feed_entry.pwid: + entry.append( + OPDSFeed.E(f"{{{OPDSFeed.SIMPLIFIED_NS}}}pwid", feed_entry.pwid) + ) + + if feed_entry.language: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.DCTERMS_NS}}}language", feed_entry.language.text + ) + ) + if feed_entry.publisher: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.DCTERMS_NS}}}publisher", feed_entry.publisher.text + ) + ) + if feed_entry.imprint: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.BIB_SCHEMA_NS}}}publisherImprint", + feed_entry.imprint.text, + ) + ) + if feed_entry.issued: + # Entry.issued is the date the ebook came out, as distinct + # from Entry.published (which may refer to the print edition + # or some original edition way back when). + # + # For Dublin Core 'issued' we use Entry.issued if we have it + # and Entry.published if not. In general this means we use + # issued date for Gutenberg and published date for other + # sources. + # + # For the date the book was added to our collection we use + # atom:published. + # + # Note: feedparser conflates dc:issued and atom:published, so + # it can't be used to extract this information. However, these + # tags are consistent with the OPDS spec. + issued = feed_entry.issued + if isinstance(issued, datetime.datetime) or isinstance( + issued, datetime.date + ): + now = utc_now() + today = datetime.date.today() + issued_already = False + if isinstance(issued, datetime.datetime): + issued_already = issued <= now + elif isinstance(issued, datetime.date): + issued_already = issued <= today + if issued_already: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.DCTERMS_NS}}}issued", + issued.isoformat().split("T")[0], + ) + ) + + if feed_entry.identifier: + entry.append(OPDSFeed.E("id", feed_entry.identifier)) + if feed_entry.distribution and ( + provider := getattr(feed_entry.distribution, "provider_name", None) + ): + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.BIBFRAME_NS}}}distribution", + **{f"{{{OPDSFeed.BIBFRAME_NS}}}ProviderName": provider}, + ) + ) + if feed_entry.published: + entry.append(OPDSFeed.E("published", feed_entry.published.text)) + if feed_entry.updated: + entry.append(OPDSFeed.E("updated", feed_entry.updated.text)) + + if feed_entry.series: + entry.append(self._serialize_series_entry(feed_entry.series)) + + for category in feed_entry.categories: + element = OPDSFeed.category( + scheme=category.scheme, term=category.term, label=category.label # type: ignore[attr-defined] + ) + entry.append(element) + + for rating in feed_entry.ratings: + rating_tag = self._serialize_feed_entry("Rating", rating) + entry.append(rating_tag) + + for author in feed_entry.authors: + entry.append(self._serialize_author_tag("author", author)) + for contributor in feed_entry.contributors: + entry.append(self._serialize_author_tag("contributor", contributor)) + + for link in feed_entry.image_links: + entry.append(OPDSFeed.link(**link.dict())) + + for link in feed_entry.acquisition_links: + element = self._serialize_acquistion_link(link) + entry.append(element) + + for link in feed_entry.other_links: + entry.append(OPDSFeed.link(**link.dict())) + + return entry + + def serialize_opds_message(self, entry: OPDSMessage) -> etree._Element: + return entry.tag + + def _serialize_series_entry(self, series: FeedEntryType) -> etree._Element: + entry = self._tag("series") + if name := getattr(series, "name", None): + entry.set("name", name) + if position := getattr(series, "position", None): + entry.append(self._tag("position", position)) + if link := getattr(series, "link", None): + entry.append(self._serialize_feed_entry("link", link)) + + return entry + + def _serialize_feed_entry( + self, tag: str, feed_entry: FeedEntryType + ) -> etree._Element: + """Serialize a feed entry type in a recursive and blind manner""" + entry: etree._Element = self._tag(tag) + for attrib, value in feed_entry: + if value is None: + continue + if isinstance(value, list): + for item in value: + entry.append(self._serialize_feed_entry(attrib, item)) + elif isinstance(value, FeedEntryType): + entry.append(self._serialize_feed_entry(attrib, value)) + else: + if attrib == "text": + entry.text = value + else: + entry.set( + ATTRIBUTE_MAPPING.get(attrib, attrib), + value if value is not None else "", + ) + return entry + + def _serialize_author_tag(self, tag: str, author: Author) -> etree._Element: + entry: etree._Element = self._tag(tag) + attr = partial(self._attr_name, mapping=AUTHOR_MAPPING) + _tag = partial(self._tag, mapping=AUTHOR_MAPPING) + if author.name: + element = _tag("name") + element.text = author.name + entry.append(element) + if author.role: + entry.set(attr("role"), author.role) + if author.link: + entry.append(self._serialize_feed_entry("link", author.link)) + + # Verbose + if author.sort_name: + entry.append(_tag("sort_name", author.sort_name)) + if author.wikipedia_name: + entry.append(_tag("wikipedia_name", author.wikipedia_name)) + if author.viaf: + entry.append(_tag("sameas", author.viaf)) + if author.lc: + entry.append(_tag("sameas", author.lc)) + return entry + + def _serialize_acquistion_link(self, link: Acquisition) -> etree._Element: + element = OPDSFeed.link(**link.link_attribs()) + + def _indirect(item: IndirectAcquisition) -> etree._Element: + tag = self._tag("indirectAcquisition") + tag.set("type", item.type) + for child in item.children: + tag.append(_indirect(child)) + return tag + + for indirect in link.indirect_acquisitions: + element.append(_indirect(indirect)) + + if link.availability_status: + avail_tag = self._tag("availability") + avail_tag.set("status", link.availability_status) + if link.availability_since: + avail_tag.set(self._attr_name("since"), link.availability_since) + if link.availability_until: + avail_tag.set(self._attr_name("until"), link.availability_until) + element.append(avail_tag) + + if link.holds_total is not None: + holds_tag = self._tag("holds") + holds_tag.set(self._attr_name("total"), link.holds_total) + if link.holds_position: + holds_tag.set(self._attr_name("position"), link.holds_position) + element.append(holds_tag) + + if link.copies_total is not None: + copies_tag = self._tag("copies") + copies_tag.set(self._attr_name("total"), link.copies_total) + if link.copies_available: + copies_tag.set(self._attr_name("available"), link.copies_available) + element.append(copies_tag) + + if link.lcp_hashed_passphrase: + element.append( + self._tag("hashed_passphrase", link.lcp_hashed_passphrase.text) + ) + + if link.drm_licensor: + element.append(self._serialize_feed_entry("licensor", link.drm_licensor)) + + return element + + def _serialize_data_entry(self, entry: DataEntry) -> etree._Element: + element = self._tag("entry") + if entry.title: + element.append(self._tag("title", entry.title)) + if entry.id: + element.append(self._tag("id", entry.id)) + for link in entry.links: + link_ele = self._serialize_feed_entry("link", link) + element.append(link_ele) + return element + + @classmethod + def to_string(cls, element: etree._Element) -> bytes: + return cast(bytes, etree.tostring(element)) + + def content_type(self) -> str: + return OPDSFeed.ACQUISITION_FEED_TYPE diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py new file mode 100644 index 0000000000..26f59a3275 --- /dev/null +++ b/core/feed/serializer/opds2.py @@ -0,0 +1,213 @@ +import json +from collections import defaultdict +from typing import Any, Dict, List, Optional + +from core.feed.serializer.base import SerializerInterface +from core.feed.types import ( + Acquisition, + Author, + FeedData, + IndirectAcquisition, + Link, + WorkEntryData, +) +from core.model import Contributor +from core.util.opds_writer import OPDSMessage + +ALLOWED_ROLES = [ + "translator", + "editor", + "artist", + "illustrator", + "letterer", + "penciler", + "colorist", + "inker", + "narrator", +] +MARC_CODE_TO_ROLES = { + code: name.lower() + for name, code in Contributor.MARC_ROLE_CODES.items() + if name.lower() in ALLOWED_ROLES +} + + +class OPDS2Serializer(SerializerInterface[Dict[str, Any]]): + def __init__(self) -> None: + pass + + def serialize_feed( + self, feed: FeedData, precomposed_entries: Optional[List[Any]] = None + ) -> bytes: + serialized: Dict[str, Any] = {"publications": []} + serialized["metadata"] = self._serialize_metadata(feed) + + for entry in feed.entries: + if entry.computed: + publication = self.serialize_work_entry(entry.computed) + serialized["publications"].append(publication) + + serialized.update(self._serialize_feed_links(feed)) + + return self.to_string(serialized) + + def _serialize_metadata(self, feed: FeedData) -> Dict[str, Any]: + fmeta = feed.metadata + metadata: Dict[str, Any] = {} + if title := fmeta.get("title"): + metadata["title"] = title.text + if item_count := fmeta.get("items_per_page"): + metadata["itemsPerPage"] = int(item_count.text or 0) + return metadata + + def serialize_opds_message(self, entry: OPDSMessage) -> Dict[str, Any]: + return dict(urn=entry.urn, description=entry.message) + + def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: + metadata: Dict[str, Any] = {} + if data.additionalType: + metadata["@type"] = data.additionalType + + if data.title: + metadata["title"] = data.title.text + if data.sort_title: + metadata["sortAs"] = data.sort_title.text + + if data.subtitle: + metadata["subtitle"] = data.subtitle.text + if data.identifier: + metadata["identifier"] = data.identifier + if data.language: + metadata["language"] = data.language.text + if data.updated: + metadata["modified"] = data.updated.text + if data.published: + metadata["published"] = data.published.text + if data.summary: + metadata["description"] = data.summary.text + + if data.publisher: + metadata["publisher"] = dict(name=data.publisher.text) + if data.imprint: + metadata["imprint"] = dict(name=data.imprint.text) + + subjects = [] + if data.categories: + for subject in data.categories: + subjects.append( + { + "scheme": subject.scheme, # type: ignore[attr-defined] + "name": subject.label, # type: ignore[attr-defined] + "sortAs": subject.label, # type: ignore[attr-defined] # Same as above, don't think we have an alternate + } + ) + metadata["subject"] = subjects + + if data.series: + name = getattr(data.series, "name", None) + position = int(getattr(data.series, "position", 1)) + if name: + metadata["belongsTo"] = dict(name=name, position=position) + + if len(data.authors): + metadata["author"] = self._serialize_contributor(data.authors[0]) + for contributor in data.contributors: + if role := MARC_CODE_TO_ROLES.get(contributor.role or "", None): + metadata[role] = self._serialize_contributor(contributor) + + images = [self._serialize_link(link) for link in data.image_links] + links = [self._serialize_link(link) for link in data.other_links] + + for acquisition in data.acquisition_links: + links.append(self._serialize_acquisition_link(acquisition)) + + publication = {"metadata": metadata, "links": links, "images": images} + return publication + + def _serialize_link(self, link: Link) -> Dict[str, Any]: + serialized = {"href": link.href, "rel": link.rel} + if link.type: + serialized["type"] = link.type + if link.title: + serialized["title"] = link.title + return serialized + + def _serialize_acquisition_link(self, link: Acquisition) -> Dict[str, Any]: + item = self._serialize_link(link) + + def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: + result: Dict[str, Any] = dict(type=indirect.type) + if indirect.children: + result["child"] = [] + for child in indirect.children: + result["child"].append(_indirect(child)) + return result + + props: Dict[str, Any] = {} + if link.availability_status: + state = link.availability_status + if link.is_loan: + state = "ready" + elif link.is_hold: + state = "reserved" + # This only exists in the serializer because there is no case where cancellable is false, + # that logic should be in the annotator if it ever occurs + props["actions"] = dict(cancellable=True) + props["availability"] = dict(state=state) + if link.availability_since: + props["availability"]["since"] = link.availability_since + if link.availability_until: + props["availability"]["until"] = link.availability_until + + if link.indirect_acquisitions: + props["indirectAcquisition"] = [] + for indirect in link.indirect_acquisitions: + props["indirectAcquisition"].append(_indirect(indirect)) + + if link.lcp_hashed_passphrase: + props["lcp_hashed_passphrase"] = link.lcp_hashed_passphrase + + if link.drm_licensor: + props["licensor"] = { + "clientToken": getattr(link.drm_licensor, "clientToken"), + "vendor": getattr(link.drm_licensor, "vendor"), + } + + if props: + item["properties"] = props + + return item + + def _serialize_feed_links(self, feed: FeedData) -> Dict[str, Any]: + link_data: Dict[str, List[Dict[str, Any]]] = {"links": [], "facets": []} + for link in feed.links: + link_data["links"].append(self._serialize_link(link)) + + facet_links: Dict[str, Any] = defaultdict(lambda: {"metadata": {}, "links": []}) + for link in feed.facet_links: + group = getattr(link, "facetGroup", None) + if group: + facet_links[group]["links"].append(self._serialize_link(link)) + facet_links[group]["metadata"]["title"] = group + for _, facets in facet_links.items(): + link_data["facets"].append(facets) + + return link_data + + def _serialize_contributor(self, author: Author) -> Dict[str, Any]: + result: Dict[str, Any] = {"name": author.name} + if author.sort_name: + result["sortAs"] = author.sort_name + if author.link: + link = self._serialize_link(author.link) + # OPDS2 does not need "title" in the link + link.pop("title", None) + result["links"] = [link] + return result + + def content_type(self) -> str: + return "application/opds+json" + + @classmethod + def to_string(cls, data: Dict[str, Any]) -> bytes: + return json.dumps(data, indent=2).encode() diff --git a/core/feed/types.py b/core/feed/types.py new file mode 100644 index 0000000000..1b6d1d4000 --- /dev/null +++ b/core/feed/types.py @@ -0,0 +1,240 @@ +from __future__ import annotations + +from dataclasses import dataclass, field +from datetime import date, datetime +from typing import Any, Dict, Generator, List, Optional, Tuple, cast + +from typing_extensions import Self + +from core.model import LicensePool, Work +from core.model.edition import Edition +from core.model.identifier import Identifier + +NO_SUCH_KEY = object() + + +@dataclass +class BaseModel: + def _vars(self) -> Generator[Tuple[str, Any], None, None]: + """Yield attributes as a tuple""" + _attrs = vars(self) + for name, value in _attrs.items(): + if name.startswith("_"): + continue + elif callable(value): + continue + yield name, value + + def dict(self) -> Dict[str, Any]: + """Dataclasses do not return undefined attributes via `asdict` so we must implement this ourselves""" + attrs = {} + for name, value in self: + if isinstance(value, BaseModel): + attrs[name] = value.dict() + else: + attrs[name] = value + return attrs + + def __iter__(self) -> Generator[Tuple[str, Any], None, None]: + """Allow attribute iteration""" + yield from self._vars() + + def get(self, name: str, *default: Any) -> Any: + """Convenience function. Mimics getattr""" + value = getattr(self, name, NO_SUCH_KEY) + if value is NO_SUCH_KEY: + if len(default) > 0: + return default[0] + else: + raise AttributeError(f"No attribute '{name}' found in object {self}") + return value + + +@dataclass +class FeedEntryType(BaseModel): + text: Optional[str] = None + + @classmethod + def create(cls, **kwargs: Any) -> Self: + """Create a new object with arbitrary data""" + obj = cls() + obj.add_attributes(kwargs) + return obj + + def add_attributes(self, attrs: Dict[str, Any]) -> None: + for name, data in attrs.items(): + setattr(self, name, data) + + def children(self) -> Generator[Tuple[str, FeedEntryType], None, None]: + """Yield all FeedEntryType attributes""" + for name, value in self: + if isinstance(value, self.__class__): + yield name, value + return + + +@dataclass +class Link(FeedEntryType): + href: Optional[str] = None + rel: Optional[str] = None + type: Optional[str] = None + + # Additional types + role: Optional[str] = None + title: Optional[str] = None + + def dict(self) -> Dict[str, Any]: + """A dict without None values""" + d = super().dict() + santized = {} + for k, v in d.items(): + if v is not None: + santized[k] = v + return santized + + def link_attribs(self) -> Dict[str, Any]: + d = dict(href=self.href) + for key in ["rel", "type"]: + if (value := getattr(self, key, None)) is not None: + d[key] = value + return d + + +@dataclass +class IndirectAcquisition(BaseModel): + type: Optional[str] = None + children: List[IndirectAcquisition] = field(default_factory=list) + + +@dataclass +class Acquisition(Link): + holds_position: Optional[str] = None + holds_total: Optional[str] = None + + copies_available: Optional[str] = None + copies_total: Optional[str] = None + + availability_status: Optional[str] = None + availability_since: Optional[str] = None + availability_until: Optional[str] = None + + rights: Optional[str] = None + + lcp_hashed_passphrase: Optional[FeedEntryType] = None + drm_licensor: Optional[FeedEntryType] = None + + indirect_acquisitions: List[IndirectAcquisition] = field(default_factory=list) + + # Signal if the acquisition is for a loan or a hold for the patron + is_loan: bool = False + is_hold: bool = False + + +@dataclass +class Author(FeedEntryType): + name: Optional[str] = None + sort_name: Optional[str] = None + viaf: Optional[str] = None + role: Optional[str] = None + family_name: Optional[str] = None + wikipedia_name: Optional[str] = None + lc: Optional[str] = None + link: Optional[Link] = None + + +@dataclass +class WorkEntryData(BaseModel): + """All the metadata possible for a work. This is not a FeedEntryType because we want strict control.""" + + additionalType: Optional[str] = None + identifier: Optional[str] = None + pwid: Optional[str] = None + issued: Optional[datetime | date] = None + + summary: Optional[FeedEntryType] = None + language: Optional[FeedEntryType] = None + publisher: Optional[FeedEntryType] = None + published: Optional[FeedEntryType] = None + updated: Optional[FeedEntryType] = None + title: Optional[FeedEntryType] = None + sort_title: Optional[FeedEntryType] = None + subtitle: Optional[FeedEntryType] = None + series: Optional[FeedEntryType] = None + imprint: Optional[FeedEntryType] = None + + authors: List[Author] = field(default_factory=list) + contributors: List[Author] = field(default_factory=list) + categories: List[FeedEntryType] = field(default_factory=list) + ratings: List[FeedEntryType] = field(default_factory=list) + distribution: Optional[FeedEntryType] = None + + # Links + acquisition_links: List[Acquisition] = field(default_factory=list) + image_links: List[Link] = field(default_factory=list) + other_links: List[Link] = field(default_factory=list) + + +@dataclass +class WorkEntry(BaseModel): + work: Work + edition: Edition + identifier: Identifier + license_pool: Optional[LicensePool] = None + + # Actual, computed feed data + computed: Optional[WorkEntryData] = None + + def __init__( + self, + work: Optional[Work] = None, + edition: Optional[Edition] = None, + identifier: Optional[Identifier] = None, + license_pool: Optional[LicensePool] = None, + ) -> None: + if None in (work, edition, identifier): + raise ValueError( + "Work, Edition or Identifier cannot be None while initializing an entry" + ) + self.work = cast(Work, work) + self.edition = cast(Edition, edition) + self.identifier = cast(Identifier, identifier) + self.license_pool = license_pool + + +class DataEntryTypes: + NAVIGATION = "navigation" + + +@dataclass +class DataEntry(FeedEntryType): + """Other kinds of information, like entries of a navigation feed""" + + type: Optional[str] = None + title: Optional[str] = None + id: Optional[str] = None + links: List[Link] = field(default_factory=list) + + +@dataclass +class FeedData(BaseModel): + links: List[Link] = field(default_factory=list) + breadcrumbs: List[Link] = field(default_factory=list) + facet_links: List[Link] = field(default_factory=list) + entries: List[WorkEntry] = field(default_factory=list) + data_entries: List[DataEntry] = field(default_factory=list) + metadata: Dict[str, FeedEntryType] = field(default_factory=dict) + entrypoint: Optional[str] = None + + class Config: + arbitrary_types_allowed = True + + def add_link(self, href: str, **kwargs: Any) -> None: + self.links.append(Link(href=href, **kwargs)) + + def add_metadata( + self, name: str, feed_entry: Optional[FeedEntryType] = None, **kwargs: Any + ) -> None: + if not feed_entry: + self.metadata[name] = FeedEntryType(**kwargs) + else: + self.metadata[name] = feed_entry diff --git a/core/feed/util.py b/core/feed/util.py new file mode 100644 index 0000000000..5519f0a5b5 --- /dev/null +++ b/core/feed/util.py @@ -0,0 +1,27 @@ +import datetime +from typing import Union + +import pytz + +TIME_FORMAT_UTC = "%Y-%m-%dT%H:%M:%S+00:00" +TIME_FORMAT_NAIVE = "%Y-%m-%dT%H:%M:%SZ" + + +def strftime(date: Union[datetime.datetime, datetime.date]) -> str: + """ + Format a date for the OPDS feeds. + + 'A Date construct is an element whose content MUST conform to the + "date-time" production in [RFC3339]. In addition, an uppercase "T" + character MUST be used to separate date and time, and an uppercase + "Z" character MUST be present in the absence of a numeric time zone + offset.' (https://tools.ietf.org/html/rfc4287#section-3.3) + """ + if isinstance(date, datetime.datetime) and date.tzinfo is not None: + # Convert to UTC to make the formatting easier. + fmt = TIME_FORMAT_UTC + date = date.astimezone(pytz.UTC) + else: + fmt = TIME_FORMAT_NAIVE + + return date.strftime(fmt) diff --git a/core/model/constants.py b/core/model/constants.py index 230b7f3944..66052d9a55 100644 --- a/core/model/constants.py +++ b/core/model/constants.py @@ -230,6 +230,7 @@ class LinkRelations: SHORT_DESCRIPTION = "http://librarysimplified.org/terms/rel/short-description" AUTHOR = "http://schema.org/author" ALTERNATE = "alternate" + FACET_REL = "http://opds-spec.org/facet" # The rel for a link we feed to clients for samples/previews. CLIENT_SAMPLE = "preview" diff --git a/core/model/licensing.py b/core/model/licensing.py index 788fe095af..93ab351e59 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -255,9 +255,9 @@ class LicensePool(Base): open_access = Column(Boolean, index=True) last_checked = Column(DateTime(timezone=True), index=True) - licenses_owned = Column(Integer, default=0, index=True) - licenses_available = Column(Integer, default=0, index=True) - licenses_reserved = Column(Integer, default=0) + licenses_owned: int = Column(Integer, default=0, index=True) + licenses_available: int = Column(Integer, default=0, index=True) + licenses_reserved: int = Column(Integer, default=0) patrons_in_hold_queue = Column(Integer, default=0) # This lets us cache the work of figuring out the best open access diff --git a/core/model/work.py b/core/model/work.py index 3dd8f8edef..063f7cd446 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -7,7 +7,7 @@ from collections import Counter from datetime import date, datetime from decimal import Decimal -from typing import TYPE_CHECKING, Any, List, Union, cast +from typing import TYPE_CHECKING, Any, List, Optional, Union, cast import pytz from sqlalchemy import ( @@ -287,7 +287,7 @@ def sort_author(self): return self.presentation_edition.sort_author or self.presentation_edition.author @property - def language(self): + def language(self) -> Optional[str]: if self.presentation_edition: return self.presentation_edition.language return None diff --git a/core/opds_schema.py b/core/opds_schema.py index 23f24a0b87..05717c1811 100644 --- a/core/opds_schema.py +++ b/core/opds_schema.py @@ -36,6 +36,8 @@ def validate_schema(self, schema_path: str, feed: dict): class OPDS2SchemaValidation(OPDS2ImportMonitor, OPDS2SchemaValidationMixin): def import_one_feed(self, feed): + if type(feed) in (str, bytes): + feed = json.loads(feed) self.validate_schema("core/resources/opds2_schema/feed.schema.json", feed) return [], [] diff --git a/pyproject.toml b/pyproject.toml index 83cb0192f1..0f7a967861 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ module = [ "api.circulation", "api.discovery.*", "api.integration.*", + "core.feed.*", "core.integration.*", "core.model.announcements", "core.model.hassessioncache", diff --git a/scripts.py b/scripts.py index f088686dfb..2d3888e05f 100644 --- a/scripts.py +++ b/scripts.py @@ -32,6 +32,7 @@ from api.overdrive import OverdriveAPI from core.entrypoint import EntryPoint from core.external_search import ExternalSearchIndex +from core.feed.acquisition import OPDSAcquisitionFeed from core.lane import Facets, FeaturedFacets, Lane, Pagination from core.log import LogConfiguration from core.marc import MARCExporter @@ -55,7 +56,6 @@ pg_advisory_lock, ) from core.model.configuration import ExternalIntegrationLink -from core.opds import AcquisitionFeed from core.scripts import ( IdentifierInputScript, LaneSweeperScript, @@ -483,17 +483,17 @@ def do_generate(self, lane, facets, pagination, feed_class=None): library = lane.get_library(self._db) annotator = self.app.manager.annotator(lane, facets=facets) url = annotator.feed_url(lane, facets=facets, pagination=pagination) - feed_class = feed_class or AcquisitionFeed + feed_class = feed_class or OPDSAcquisitionFeed return feed_class.page( _db=self._db, title=title, url=url, worklist=lane, annotator=annotator, - facets=facets, pagination=pagination, - max_age=0, - ) + facets=facets, + search_engine=None, + ).as_response(max_age=0) class CacheOPDSGroupFeedPerLane(CacheRepresentationPerLane): @@ -512,7 +512,7 @@ def do_generate(self, lane, facets, pagination, feed_class=None): title = lane.display_name annotator = self.app.manager.annotator(lane, facets=facets) url = annotator.groups_url(lane, facets) - feed_class = feed_class or AcquisitionFeed + feed_class = feed_class or OPDSAcquisitionFeed # Since grouped feeds are only cached for lanes that have sublanes, # there's no need to consider the case of a lane with no sublanes, @@ -523,9 +523,10 @@ def do_generate(self, lane, facets, pagination, feed_class=None): url=url, worklist=lane, annotator=annotator, - max_age=0, + pagination=None, facets=facets, - ) + search_engine=None, + ).as_response(max_age=0) def facets(self, lane): """Generate a Facets object for each of the library's enabled diff --git a/tests/api/feed/equivalence/test_feed_equivalence.py b/tests/api/feed/equivalence/test_feed_equivalence.py new file mode 100644 index 0000000000..cf07fd153d --- /dev/null +++ b/tests/api/feed/equivalence/test_feed_equivalence.py @@ -0,0 +1,296 @@ +from __future__ import annotations + +from lxml import etree + +from api.admin.opds import AdminAnnotator as OldAdminAnnotator +from api.admin.opds import AdminFeed as OldAdminFeed +from api.app import app +from api.opds import LibraryAnnotator as OldLibraryAnnotator +from api.opds import LibraryLoanAndHoldAnnotator as OldLibraryLoanAndHoldAnnotator +from core.external_search import MockExternalSearchIndex +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.admin import AdminFeed +from core.feed.annotator.admin import AdminAnnotator +from core.feed.annotator.circulation import LibraryAnnotator +from core.feed.navigation import NavigationFeed +from core.lane import Facets, Pagination +from core.model.work import Work +from core.opds import AcquisitionFeed +from core.opds import NavigationFeed as OldNavigationFeed +from tests.api.feed.test_library_annotator import ( # noqa + LibraryAnnotatorFixture, + annotator_fixture, + patch_url_for, +) +from tests.fixtures.database import DatabaseTransactionFixture + + +def format_tags(tags1, tags2): + result = "" + result += "TAG1\n" + for tag in tags1: + result += f"{tag[1:]}\n" + result += "TAG2\n" + for tag in tags2: + result += f"{tag[1:]}\n" + return result + + +def assert_equal_xmls(xml1: str | etree._Element, xml2: str | etree._Element): + if isinstance(xml1, str) or isinstance(xml1, bytes): + parsed1 = etree.fromstring(xml1) + else: + parsed1 = xml1 + + if isinstance(xml2, str) or isinstance(xml2, bytes): + parsed2 = etree.fromstring(xml2) + else: + parsed2 = xml2 + + # Pull out comparable information + tags1 = [(tag, tag.tag, tag.text, tag.attrib) for tag in parsed1[1:]] + tags2 = [(tag, tag.tag, tag.text, tag.attrib) for tag in parsed2[1:]] + # Sort the tags on the information so it's easy to compare sequentially + tags1.sort(key=lambda x: (x[1], x[2] or "", x[3].values())) + tags2.sort(key=lambda x: (x[1], x[2] or "", x[3].values())) + + assert len(tags1) == len(tags2), format_tags(tags1, tags2) + + # Assert every tag is equal + for ix, tag1 in enumerate(tags1): + tag2 = tags2[ix] + # Comparable information should be equivalent + if tag1[1:] == tag2[1:]: + assert_equal_xmls(tag1[0], tag2[0]) + break + else: + assert False, format_tags([tag1], tags2) + + +class TestFeedEquivalence: + def test_page_feed(self, annotator_fixture: LibraryAnnotatorFixture): + db = annotator_fixture.db + lane = annotator_fixture.lane + library = db.default_library() + + work1 = db.work(with_license_pool=True) + work2 = db.work(with_open_access_download=True) + + search_index = MockExternalSearchIndex() + search_index.bulk_update([work1, work2]) + + with app.test_request_context("/"): + new_annotator = LibraryAnnotator(None, lane, library) + new_feed = OPDSAcquisitionFeed.page( + db.session, + lane.display_name, + "http://test-url/", + lane, + new_annotator, + Facets.default(library), + Pagination.default(), + search_index, + ) + + old_annotator = OldLibraryAnnotator(None, lane, library) + old_feed = AcquisitionFeed.page( + db.session, + lane.display_name, + "http://test-url/", + lane, + old_annotator, + Facets.default(library), + Pagination.default(), + search_engine=search_index, + ) + + assert_equal_xmls(str(old_feed), new_feed.serialize()) + + def test_page_feed_with_loan_annotator( + self, annotator_fixture: LibraryAnnotatorFixture + ): + db = annotator_fixture.db + library = db.default_library() + work1 = db.work(with_license_pool=True) + patron = db.patron() + work1.active_license_pool(library).loan_to(patron) + + with app.test_request_context("/"): + new_feed = OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() + old_feed = OldLibraryLoanAndHoldAnnotator.active_loans_for(None, patron) + + assert_equal_xmls(str(old_feed), str(new_feed)) + + def test_groups_feed(self, annotator_fixture: LibraryAnnotatorFixture): + db = annotator_fixture.db + lane = annotator_fixture.lane + de_lane = db.lane(parent=lane, languages=["de"]) + library = db.default_library() + + work1 = db.work(with_license_pool=True) + work2 = db.work(with_open_access_download=True, language="de") + + search_index = MockExternalSearchIndex() + search_index.bulk_update([work1, work2]) + + patron = db.patron() + work1.active_license_pool(library).loan_to(patron) + + with app.test_request_context("/"): + new_annotator = LibraryAnnotator(None, lane, library) + new_feed = OPDSAcquisitionFeed.groups( + db.session, + "Groups", + "http://groups/", + lane, + new_annotator, + Pagination.default(), + Facets.default(library), + search_index, + ) + + old_annotator = OldLibraryAnnotator(None, lane, library) + old_feed = AcquisitionFeed.groups( + db.session, + "Groups", + "http://groups/", + lane, + old_annotator, + pagination=Pagination.default(), + facets=Facets.default(library), + search_engine=search_index, + ) + + assert_equal_xmls(str(old_feed), new_feed.serialize().decode()) + + def test_search_feed(self, annotator_fixture: LibraryAnnotatorFixture): + db = annotator_fixture.db + lane = annotator_fixture.lane + de_lane = db.lane(parent=lane, languages=["de"]) + library = db.default_library() + + work1 = db.work(with_license_pool=True) + work2 = db.work(with_open_access_download=True, language="de") + + search_index = MockExternalSearchIndex() + search_index.bulk_update([work1, work2]) + + patron = db.patron() + work1.active_license_pool(library).loan_to(patron) + + with app.test_request_context("/"): + new_annotator = LibraryAnnotator(None, lane, library) + new_feed = OPDSAcquisitionFeed.search( # type: ignore[union-attr] + db.session, + "Search", + "http://search/", + lane, + search_index, + "query", + new_annotator, + Pagination.default(), + Facets.default(library), + ).as_response() + + old_annotator = OldLibraryAnnotator(None, lane, library) + old_feed = AcquisitionFeed.search( + db.session, + "Search", + "http://search/", + lane, + search_index, + "query", + Pagination.default(), + Facets.default(library), + old_annotator, + ) + + assert_equal_xmls(str(old_feed), str(new_feed)) + + def test_from_query_feed(self, annotator_fixture: LibraryAnnotatorFixture): + db = annotator_fixture.db + lane = annotator_fixture.lane + de_lane = db.lane(parent=lane, languages=["de"]) + library = db.default_library() + + work1 = db.work(with_license_pool=True) + work2 = db.work(with_open_access_download=True, language="de") + + search_index = MockExternalSearchIndex() + search_index.bulk_update([work1, work2]) + + patron = db.patron() + work1.active_license_pool(library).loan_to(patron) + + def url_fn(page): + return f"http://pagination?page={page}" + + query = db.session.query(Work) + + with app.test_request_context("/"): + new_annotator = LibraryAnnotator(None, lane, library) + new_feed = OPDSAcquisitionFeed.from_query( + query, + db.session, + "Search", + "http://search/", + Pagination(), + url_fn, + new_annotator, + ) + + old_annotator = OldLibraryAnnotator(None, lane, library) + old_feed = AcquisitionFeed.from_query( + query, + db.session, + "Search", + "http://search/", + Pagination(), + url_fn, + old_annotator, + ) + + assert_equal_xmls(str(old_feed), new_feed.serialize()) + + +class TestAdminAnnotator: + def test_suppressed(self, annotator_fixture: LibraryAnnotatorFixture): + db = annotator_fixture.db + library = db.default_library() + + work1 = db.work(with_open_access_download=True) + pool = work1.active_license_pool() + pool.suppressed = True + + with app.test_request_context("/"): + new_annotator = AdminAnnotator(None, library) + new_feed = AdminFeed.suppressed( + db.session, "", "http://verbose", new_annotator + ) + + old_annotator = OldAdminAnnotator(None, library) + old_feed = OldAdminFeed.suppressed( + db.session, "", "http://verbose", old_annotator + ) + + assert_equal_xmls(str(old_feed), new_feed.serialize()) + + +class TestNavigationFeed: + def test_feed(self, db: DatabaseTransactionFixture): + lane = db.lane() + child1 = db.lane(parent=lane) + child2 = db.lane(parent=lane) + + with app.test_request_context("/"): + new_annotator = LibraryAnnotator(None, lane, db.default_library()) + new_feed = NavigationFeed.navigation( + db.session, "Navigation", "http://navigation", lane, new_annotator + ) + + old_annotator = OldLibraryAnnotator(None, lane, db.default_library()) + old_feed = OldNavigationFeed.navigation( + db.session, "Navigation", "http://navigation", lane, old_annotator + ) + + assert_equal_xmls(str(old_feed), str(new_feed.as_response())) diff --git a/tests/api/feed/fixtures.py b/tests/api/feed/fixtures.py new file mode 100644 index 0000000000..002d806bac --- /dev/null +++ b/tests/api/feed/fixtures.py @@ -0,0 +1,47 @@ +import urllib +from dataclasses import dataclass +from functools import partial +from typing import Any, Callable +from unittest.mock import patch + +import pytest +from flask import has_request_context + +from core.feed.annotator.circulation import CirculationManagerAnnotator + + +def _patched_url_for(*args: Any, _original=None, **kwargs: Any) -> str: + """Test mode url_for for the annotators + :param _original: Is the original Annotator.url_for method + """ + if has_request_context() and _original: + # Ignore the patch if we have a request context + return _original(object(), *args, **kwargs) + # Generate a plausible-looking URL that doesn't depend on Flask + # being set up. + host = "host" + url = ("http://%s/" % host) + "/".join(args) + connector = "?" + for k, v in sorted(kwargs.items()): + if v is None: + v = "" + v = urllib.parse.quote(str(v)) + k = urllib.parse.quote(str(k)) + url += connector + f"{k}={v}" + connector = "&" + return url + + +@dataclass +class PatchedUrlFor: + patched_url_for: Callable + + +@pytest.fixture(scope="function") +def patch_url_for(): + """Patch the url_for method for annotators""" + with patch( + "core.feed.annotator.circulation.CirculationManagerAnnotator.url_for", + new=partial(_patched_url_for, _original=CirculationManagerAnnotator.url_for), + ) as patched: + yield PatchedUrlFor(patched) diff --git a/tests/api/feed/test_admin.py b/tests/api/feed/test_admin.py new file mode 100644 index 0000000000..283823e337 --- /dev/null +++ b/tests/api/feed/test_admin.py @@ -0,0 +1,285 @@ +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.admin import AdminFeed +from core.feed.annotator.admin import AdminAnnotator +from core.feed.types import FeedData +from core.lane import Pagination +from core.model.configuration import ExternalIntegration, ExternalIntegrationLink +from core.model.datasource import DataSource +from core.model.measurement import Measurement +from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa +from tests.fixtures.database import DatabaseTransactionFixture + + +class TestOPDS: + def links(self, feed: FeedData, rel=None): + all_links = feed.links + feed.facet_links + feed.breadcrumbs + links = sorted(all_links, key=lambda x: (x.rel, getattr(x, "title", None))) + r = [] + for l in links: + if not rel or l.rel == rel or (isinstance(rel, list) and l.rel in rel): + r.append(l) + return r + + def test_feed_includes_staff_rating( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + work = db.work(with_open_access_download=True) + lp = work.license_pools[0] + staff_data_source = DataSource.lookup(db.session, DataSource.LIBRARY_STAFF) + lp.identifier.add_measurement( + staff_data_source, Measurement.RATING, 3, weight=1000 + ) + + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, db.default_library()), + ) + feed.generate_feed() + + [entry] = feed._feed.entries + assert entry.computed is not None + assert len(entry.computed.ratings) == 2 + assert 3 == float(entry.computed.ratings[1].ratingValue) # type: ignore[attr-defined] + assert Measurement.RATING == entry.computed.ratings[1].additionalType # type: ignore[attr-defined] + + def test_feed_includes_refresh_link( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + work = db.work(with_open_access_download=True) + lp = work.license_pools[0] + lp.suppressed = False + db.session.commit() + + # If the metadata wrangler isn't configured, the link is left out. + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, db.default_library()), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + assert [] == [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/refresh" + ] + + def test_feed_includes_suppress_link( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + work = db.work(with_open_access_download=True) + lp = work.license_pools[0] + lp.suppressed = False + db.session.commit() + + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, db.default_library()), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + [suppress_link] = [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/hide" + ] + assert suppress_link.href and lp.identifier.identifier in suppress_link.href + unsuppress_links = [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/restore" + ] + assert 0 == len(unsuppress_links) + + lp.suppressed = True + db.session.commit() + + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, db.default_library()), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + [unsuppress_link] = [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/restore" + ] + assert unsuppress_link.href and lp.identifier.identifier in unsuppress_link.href + suppress_links = [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/hide" + ] + assert 0 == len(suppress_links) + + def test_feed_includes_edit_link( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + work = db.work(with_open_access_download=True) + lp = work.license_pools[0] + + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, db.default_library()), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + [edit_link] = [x for x in entry.computed.other_links if x.rel == "edit"] + assert edit_link.href and lp.identifier.identifier in edit_link.href + + def test_feed_includes_change_cover_link( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + work = db.work(with_open_access_download=True) + lp = work.license_pools[0] + library = db.default_library() + + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, library), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + + # Since there's no storage integration, the change cover link isn't included. + assert [] == [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/change_cover" + ] + + # There is now a covers storage integration that is linked to the external + # integration for a collection that the work is in. It will use that + # covers mirror and the change cover link is included. + storage = db.external_integration( + ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL + ) + storage.username = "user" + storage.password = "pass" + + collection = db.collection() + purpose = ExternalIntegrationLink.COVERS + external_integration_link = db.external_integration_link( + integration=collection._external_integration, + other_integration=storage, + purpose=purpose, + ) + library.collections.append(collection) + work = db.work(with_open_access_download=True, collection=collection) + lp = work.license_pools[0] + feed = OPDSAcquisitionFeed( + "test", + "url", + [work], + AdminAnnotator(None, library), + ) + [entry] = feed._feed.entries + assert entry.computed is not None + + [change_cover_link] = [ + x + for x in entry.computed.other_links + if x.rel == "http://librarysimplified.org/terms/rel/change_cover" + ] + assert ( + change_cover_link.href + and lp.identifier.identifier in change_cover_link.href + ) + + def test_suppressed_feed( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + # Test the ability to show a paginated feed of suppressed works. + + work1 = db.work(with_open_access_download=True) + work1.license_pools[0].suppressed = True + + work2 = db.work(with_open_access_download=True) + work2.license_pools[0].suppressed = True + + # This work won't be included in the feed since its + # suppressed pool is superceded. + work3 = db.work(with_open_access_download=True) + work3.license_pools[0].suppressed = True + work3.license_pools[0].superceded = True + + pagination = Pagination(size=1) + annotator = MockAnnotator(db.default_library()) + titles = [work1.title, work2.title] + + def make_page(pagination): + return AdminFeed.suppressed( + _db=db.session, + title="Hidden works", + url=db.fresh_url(), + annotator=annotator, + pagination=pagination, + ) + + feed = make_page(pagination)._feed + assert 1 == len(feed.entries) + assert feed.entries[0].computed.title.text in titles + titles.remove(feed.entries[0].computed.title.text) + [remaining_title] = titles + + # Make sure the links are in place. + [start] = self.links(feed, "start") + assert annotator.groups_url(None) == start.href + assert annotator.top_level_title() == start.title + + [up] = self.links(feed, "up") + assert annotator.groups_url(None) == up.href + assert annotator.top_level_title() == up.title + + [next_link] = self.links(feed, "next") + assert annotator.suppressed_url(pagination.next_page) == next_link.href + + # This was the first page, so no previous link. + assert [] == self.links(feed, "previous") + + # Now get the second page and make sure it has a 'previous' link. + second_page = make_page(pagination.next_page)._feed + [previous] = self.links(second_page, "previous") + assert annotator.suppressed_url(pagination) == previous.href + assert 1 == len(second_page.entries) + assert remaining_title == second_page.entries[0].computed.title.text + + # The third page is empty. + third_page = make_page(pagination.next_page.next_page)._feed + [previous] = self.links(third_page, "previous") + assert annotator.suppressed_url(pagination.next_page) == previous.href + assert 0 == len(third_page.entries) + + +class MockAnnotator(AdminAnnotator): + def __init__(self, library): + super().__init__(None, library) + + def groups_url(self, lane): + if lane: + name = lane.name + else: + name = "" + return "http://groups/%s" % name + + def suppressed_url(self, pagination): + base = "http://suppressed/" + sep = "?" + if pagination: + base += sep + pagination.query_string + return base + + def annotate_feed(self, feed): + super().annotate_feed(feed) diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py new file mode 100644 index 0000000000..e4e3b1032c --- /dev/null +++ b/tests/api/feed/test_annotators.py @@ -0,0 +1,469 @@ +from datetime import timedelta + +from core.classifier import Classifier +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.base import Annotator +from core.feed.annotator.circulation import CirculationManagerAnnotator +from core.feed.annotator.verbose import VerboseAnnotator +from core.feed.types import FeedEntryType, Link, WorkEntry +from core.feed.util import strftime +from core.model import tuple_to_numericrange +from core.model.classification import Subject +from core.model.contributor import Contributor +from core.model.datasource import DataSource +from core.model.edition import Edition +from core.model.measurement import Measurement +from core.model.resource import Hyperlink, Resource +from core.model.work import Work +from core.util.datetime_helpers import utc_now +from tests.core.test_opds import TestAnnotatorsFixture, annotators_fixture # noqa +from tests.fixtures.database import ( # noqa + DatabaseTransactionFixture, + DBStatementCounter, +) + + +class TestAnnotators: + def test_all_subjects(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(genre="Fiction", with_open_access_download=True) + edition = work.presentation_edition + identifier = edition.primary_identifier + source1 = DataSource.lookup(session, DataSource.GUTENBERG) + source2 = DataSource.lookup(session, DataSource.OCLC) + + subjects = [ + (source1, Subject.FAST, "fast1", "name1", 1), + (source1, Subject.LCSH, "lcsh1", "name2", 1), + (source2, Subject.LCSH, "lcsh1", "name2", 1), + (source1, Subject.LCSH, "lcsh2", "name3", 3), + ( + source1, + Subject.DDC, + "300", + "Social sciences, sociology & anthropology", + 1, + ), + ] + + for source, subject_type, subject, name, weight in subjects: + identifier.classify(source, subject_type, subject, name, weight=weight) + + # Mock Work.all_identifier_ids (called by VerboseAnnotator.categories) + # so we can track the value that was passed in for `cutoff`. + def mock_all_identifier_ids(policy=None): + work.called_with_policy = policy + # Do the actual work so that categories() gets the + # correct information. + return work.original_all_identifier_ids(policy) + + work.original_all_identifier_ids = work.all_identifier_ids + work.all_identifier_ids = mock_all_identifier_ids + category_tags = VerboseAnnotator.categories(work) + + # When we are generating subjects as part of an OPDS feed, by + # default we set a cutoff of 100 equivalent identifiers. This + # gives us reasonable worst-case performance at the cost of + # not showing every single random subject under which an + # extremely popular book is filed. + assert 100 == work.called_with_policy.equivalent_identifier_cutoff + + ddc_uri = Subject.uri_lookup[Subject.DDC] + rating_value = "ratingValue" + assert [ + { + "term": "300", + rating_value: 1, + "label": "Social sciences, sociology & anthropology", + } + ] == category_tags[ddc_uri] + + fast_uri = Subject.uri_lookup[Subject.FAST] + assert [{"term": "fast1", "label": "name1", rating_value: 1}] == category_tags[ + fast_uri + ] + + lcsh_uri = Subject.uri_lookup[Subject.LCSH] + assert [ + {"term": "lcsh1", "label": "name2", rating_value: 2}, + {"term": "lcsh2", "label": "name3", rating_value: 3}, + ] == sorted(category_tags[lcsh_uri], key=lambda x: x[rating_value]) + + genre_uri = Subject.uri_lookup[Subject.SIMPLIFIED_GENRE] + assert [ + dict(label="Fiction", term=Subject.SIMPLIFIED_GENRE + "Fiction") + ] == category_tags[genre_uri] + + # Age range assertions + work = db.work(fiction=False, audience=Classifier.AUDIENCE_CHILDREN) + work.target_age = tuple_to_numericrange((8, 12)) + categories = Annotator.categories(work) + assert categories[Subject.SIMPLIFIED_FICTION_STATUS] == [ + dict( + term=f"{Subject.SIMPLIFIED_FICTION_STATUS}Nonfiction", + label="Nonfiction", + ) + ] + assert categories[Subject.uri_lookup[Subject.AGE_RANGE]] == [ + dict(term=work.target_age_string, label=work.target_age_string) + ] + + def test_content(self, db: DatabaseTransactionFixture): + work = db.work() + work.summary_text = "A Summary" + assert Annotator.content(work) == "A Summary" + + resrc = Resource() + db.session.add(resrc) + resrc.set_fetched_content("text", "Representation Summary", None) + + work.summary = resrc + work.summary_text = None + # The resource sets the summary + assert Annotator.content(work) == "Representation Summary" + assert work.summary_text == "Representation Summary" + + assert Annotator.content(None) == "" + + def test_appeals(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(with_open_access_download=True) + work.appeal_language = 0.1 + work.appeal_character = 0.2 + work.appeal_story = 0.3 + work.appeal_setting = 0.4 + work.calculate_opds_entries(verbose=True) + + category_tags = VerboseAnnotator.categories(work) + appeal_tags = category_tags[Work.APPEALS_URI] + expect = [ + (Work.APPEALS_URI + Work.LANGUAGE_APPEAL, Work.LANGUAGE_APPEAL, 0.1), + (Work.APPEALS_URI + Work.CHARACTER_APPEAL, Work.CHARACTER_APPEAL, 0.2), + (Work.APPEALS_URI + Work.STORY_APPEAL, Work.STORY_APPEAL, 0.3), + (Work.APPEALS_URI + Work.SETTING_APPEAL, Work.SETTING_APPEAL, 0.4), + ] + actual = [(x["term"], x["label"], x["ratingValue"]) for x in appeal_tags] + assert set(expect) == set(actual) + + def test_detailed_author(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + c, ignore = db.contributor("Familyname, Givenname") + c.display_name = "Givenname Familyname" + c.family_name = "Familyname" + c.wikipedia_name = "Givenname Familyname (Author)" + c.viaf = "100" + c.lc = "n100" + + author = VerboseAnnotator.detailed_author(c) + + assert "Givenname Familyname" == author.name + assert "Familyname, Givenname" == author.sort_name + assert "Givenname Familyname (Author)" == author.wikipedia_name + assert "http://viaf.org/viaf/100" == author.viaf + assert "http://id.loc.gov/authorities/names/n100" == author.lc + + work = db.work(authors=[], with_license_pool=True) + work.presentation_edition.add_contributor(c, Contributor.PRIMARY_AUTHOR_ROLE) + + [same_tag] = VerboseAnnotator.authors(work.presentation_edition)["authors"] + assert same_tag.dict() == author.dict() + + def test_duplicate_author_names_are_ignored( + self, annotators_fixture: TestAnnotatorsFixture + ): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + # Ignores duplicate author names + work = db.work(with_license_pool=True) + duplicate = db.contributor()[0] + duplicate.sort_name = work.author + + edition = work.presentation_edition + edition.add_contributor(duplicate, Contributor.AUTHOR_ROLE) + + assert 1 == len(Annotator.authors(edition)["authors"]) + + def test_all_annotators_mention_every_relevant_author( + self, annotators_fixture: TestAnnotatorsFixture + ): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(authors=[], with_license_pool=True) + edition = work.presentation_edition + + primary_author, ignore = db.contributor() + author, ignore = db.contributor() + illustrator, ignore = db.contributor() + barrel_washer, ignore = db.contributor() + + edition.add_contributor(primary_author, Contributor.PRIMARY_AUTHOR_ROLE) + edition.add_contributor(author, Contributor.AUTHOR_ROLE) + + # This contributor is relevant because we have a MARC Role Code + # for the role. + edition.add_contributor(illustrator, Contributor.ILLUSTRATOR_ROLE) + + # This contributor is not relevant because we have no MARC + # Role Code for the role. + edition.add_contributor(barrel_washer, "Barrel Washer") + + illustrator_code = Contributor.MARC_ROLE_CODES[Contributor.ILLUSTRATOR_ROLE] + + tags = Annotator.authors(edition) + # We made two tags and one + # tag, for the illustrator. + assert 2 == len(tags["authors"]) + assert 1 == len(tags["contributors"]) + assert [None, None, illustrator_code] == [ + x.role for x in (tags["authors"] + tags["contributors"]) + ] + + # Verbose annotator only creates author tags + tags = VerboseAnnotator.authors(edition) + assert 2 == len(tags["authors"]) + assert 0 == len(tags["contributors"]) + assert [None, None] == [x.role for x in (tags["authors"])] + + def test_ratings(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(with_license_pool=True, with_open_access_download=True) + work.quality = 1.0 / 3 + work.popularity = 0.25 + work.rating = 0.6 + work.calculate_opds_entries(verbose=True) + entry = OPDSAcquisitionFeed._create_entry( + work, + work.active_license_pool(), + work.presentation_edition, + work.presentation_edition.primary_identifier, + VerboseAnnotator(), + ) + assert entry.computed is not None + + ratings = [ + ( + getattr(rating, "ratingValue"), + getattr(rating, "additionalType"), + ) + for rating in entry.computed.ratings + ] + expected = [ + ("0.3333", Measurement.QUALITY), + ("0.2500", Measurement.POPULARITY), + ("0.6000", None), + ] + assert set(expected) == set(ratings) + + def test_subtitle(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(with_license_pool=True, with_open_access_download=True) + work.presentation_edition.subtitle = "Return of the Jedi" + work.calculate_opds_entries() + + feed = OPDSAcquisitionFeed( + db.fresh_str(), + db.fresh_url(), + [work], + CirculationManagerAnnotator(None), + )._feed + + computed = feed.entries[0].computed + assert computed is not None + assert computed.subtitle is not None + assert computed.subtitle.text == "Return of the Jedi" + + # If there's no subtitle, the subtitle tag isn't included. + work.presentation_edition.subtitle = None + work.calculate_opds_entries() + feed = OPDSAcquisitionFeed( + db.fresh_str(), + db.fresh_url(), + [work], + CirculationManagerAnnotator(None), + )._feed + + computed = feed.entries[0].computed + assert computed is not None + assert computed.subtitle == None + + def test_series(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(with_license_pool=True, with_open_access_download=True) + work.presentation_edition.series = "Harry Otter and the Lifetime of Despair" + work.presentation_edition.series_position = 4 + work.calculate_opds_entries() + + feed = OPDSAcquisitionFeed( + db.fresh_str(), + db.fresh_url(), + [work], + CirculationManagerAnnotator(None), + )._feed + computed = feed.entries[0].computed + assert computed is not None + + assert computed.series is not None + assert computed.series.name == work.presentation_edition.series # type: ignore[attr-defined] + assert computed.series.position == str( # type: ignore[attr-defined] + work.presentation_edition.series_position + ) + + # The series position can be 0, for a prequel for example. + work.presentation_edition.series_position = 0 + work.calculate_opds_entries() + + feed = OPDSAcquisitionFeed( + db.fresh_str(), + db.fresh_url(), + [work], + CirculationManagerAnnotator(None), + )._feed + computed = feed.entries[0].computed + assert computed is not None + assert computed.series is not None + assert computed.series.name == work.presentation_edition.series # type: ignore[attr-defined] + assert computed.series.position == str( # type: ignore[attr-defined] + work.presentation_edition.series_position + ) + + # If there's no series title, the series tag isn't included. + work.presentation_edition.series = None + work.calculate_opds_entries() + feed = OPDSAcquisitionFeed( + db.fresh_str(), + db.fresh_url(), + [work], + CirculationManagerAnnotator(None), + )._feed + computed = feed.entries[0].computed + assert computed is not None + assert computed.series == None + + # No series name + assert Annotator.series(None, "") == None + + def test_samples(self, annotators_fixture: TestAnnotatorsFixture): + data, db, session = ( + annotators_fixture, + annotators_fixture.db, + annotators_fixture.session, + ) + + work = db.work(with_license_pool=True) + edition = work.presentation_edition + + resource = Resource(url="sampleurl") + session.add(resource) + session.commit() + + sample_link = Hyperlink( + rel=Hyperlink.SAMPLE, + resource_id=resource.id, + identifier_id=edition.primary_identifier_id, + data_source_id=2, + ) + session.add(sample_link) + session.commit() + + with DBStatementCounter(db.database.connection) as counter: + links = Annotator.samples(edition) + count = counter.count + + assert len(links) == 1 + assert links[0].id == sample_link.id + assert links[0].resource.url == "sampleurl" + # accessing resource should not be another query + assert counter.count == count + + # No edition = No samples + assert Annotator.samples(None) == [] + + +class TestAnnotator: + def test_annotate_work_entry(self, db: DatabaseTransactionFixture): + work = db.work(with_license_pool=True) + pool = work.active_license_pool() + edition: Edition = work.presentation_edition + now = utc_now() + + edition.cover_full_url = "http://coverurl.jpg" + edition.cover_thumbnail_url = "http://thumburl.gif" + work.summary_text = "Summary" + edition.language = None + work.last_update_time = now + edition.publisher = "publisher" + edition.imprint = "imprint" + edition.issued = utc_now().date() + + # datetime for > today + pool.availability_time = (utc_now() + timedelta(days=1)).date() + + entry = WorkEntry( + work=work, + edition=edition, + identifier=edition.primary_identifier, + license_pool=pool, + ) + Annotator().annotate_work_entry(entry) + data = entry.computed + assert data is not None + + # Images + assert len(data.image_links) == 2 + assert data.image_links[0] == Link( + href=edition.cover_full_url, rel=Hyperlink.IMAGE, type="image/jpeg" + ) + assert data.image_links[1] == Link( + href=edition.cover_thumbnail_url, + rel=Hyperlink.THUMBNAIL_IMAGE, + type="image/gif", + ) + + # Other values + assert data.imprint == FeedEntryType(text="imprint") + assert data.summary and data.summary.text == "Summary" + assert data.summary and data.summary.get("type") == "html" + assert data.publisher == FeedEntryType(text="publisher") + assert data.issued == edition.issued + + # Missing values + assert data.language == None + assert data.updated == FeedEntryType(text=strftime(now)) diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py new file mode 100644 index 0000000000..7c2cc3179d --- /dev/null +++ b/tests/api/feed/test_library_annotator.py @@ -0,0 +1,1795 @@ +import datetime +from collections import defaultdict +from typing import List +from unittest.mock import create_autospec + +import dateutil +import feedparser +import pytest +from freezegun import freeze_time +from lxml import etree + +from api.adobe_vendor_id import AuthdataUtility +from api.circulation import BaseCirculationAPI, CirculationAPI, FulfillmentInfo +from api.lanes import ContributorLane +from api.novelist import NoveListAPI +from core.analytics import Analytics +from core.classifier import ( # type: ignore[attr-defined] + Classifier, + Fantasy, + Urban_Fantasy, +) +from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint, EverythingEntryPoint +from core.external_search import MockExternalSearchIndex +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.circulation import LibraryAnnotator +from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator +from core.feed.types import FeedData, WorkEntry +from core.feed.util import strftime +from core.lane import Facets, FacetsWithEntryPoint, Pagination +from core.lcp.credential import LCPCredentialFactory, LCPHashedPassphrase +from core.model import ( + CirculationEvent, + Contributor, + DataSource, + DeliveryMechanism, + ExternalIntegration, + Hyperlink, + PresentationCalculationPolicy, + Representation, + RightsStatus, + Work, +) +from core.opds import UnfulfillableWork +from core.opds_import import OPDSXMLParser +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSFeedResponse +from core.util.opds_writer import OPDSFeed +from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa +from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.library import LibraryFixture +from tests.fixtures.vendor_id import VendorIDFixture + + +class LibraryAnnotatorFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.work = db.work(with_open_access_download=True) + parent = db.lane(display_name="Fiction", languages=["eng"], fiction=True) + self.lane = db.lane(display_name="Fantasy", languages=["eng"]) + self.lane.add_genre(Fantasy.name) + self.lane.parent = parent + self.annotator = LibraryAnnotator( + None, + self.lane, + db.default_library(), + top_level_title="Test Top Level Title", + ) + + # Initialize library with Adobe Vendor ID details + db.default_library().library_registry_short_name = "FAKE" + db.default_library().library_registry_shared_secret = "s3cr3t5" + + # A ContributorLane to test code that handles it differently. + self.contributor, ignore = db.contributor("Someone") + self.contributor_lane = ContributorLane( + db.default_library(), self.contributor, languages=["eng"], audiences=None + ) + + +@pytest.fixture(scope="function") +def annotator_fixture( + db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor +) -> LibraryAnnotatorFixture: + return LibraryAnnotatorFixture(db) + + +class TestLibraryAnnotator: + def test_add_configuration_links( + self, + annotator_fixture: LibraryAnnotatorFixture, + library_fixture: LibraryFixture, + ): + mock_feed = FeedData() + + # Set up configuration settings for links. + library = annotator_fixture.db.default_library() + settings = library_fixture.settings(library) + settings.terms_of_service = "http://terms/" # type: ignore[assignment] + settings.privacy_policy = "http://privacy/" # type: ignore[assignment] + settings.copyright = "http://copyright/" # type: ignore[assignment] + settings.about = "http://about/" # type: ignore[assignment] + settings.license = "http://license/" # type: ignore[assignment] + settings.help_email = "help@me" # type: ignore[assignment] + settings.help_web = "http://help/" # type: ignore[assignment] + + # Set up settings for navigation links. + settings.web_header_links = ["http://example.com/1", "http://example.com/2"] + settings.web_header_labels = ["one", "two"] + + annotator_fixture.annotator.add_configuration_links(mock_feed) + + assert 9 == len(mock_feed.links) + + mock_feed_links = sorted(mock_feed.links, key=lambda x: x.rel or "") + expected_links = [ + (link.href, link.type) for link in mock_feed_links if link.rel != "related" + ] + + # They are the links we'd expect. + assert [ + ("http://about/", "text/html"), + ("http://copyright/", "text/html"), + ("mailto:help@me", None), + ("http://help/", "text/html"), + ("http://license/", "text/html"), + ("http://privacy/", "text/html"), + ("http://terms/", "text/html"), + ] == expected_links + + # There are two navigation links. + navigation_links = [x for x in mock_feed_links if x.rel == "related"] + assert {"navigation"} == {x.role for x in navigation_links} + assert {"http://example.com/1", "http://example.com/2"} == { + x.href for x in navigation_links + } + assert {"one", "two"} == {x.title for x in navigation_links} + + def test_top_level_title(self, annotator_fixture: LibraryAnnotatorFixture): + assert "Test Top Level Title" == annotator_fixture.annotator.top_level_title() + + def test_group_uri_with_flattened_lane( + self, annotator_fixture: LibraryAnnotatorFixture + ): + spanish_lane = annotator_fixture.db.lane( + display_name="Spanish", languages=["spa"] + ) + flat_spanish_lane = dict( + {"lane": spanish_lane, "label": "All Spanish", "link_to_list_feed": True} + ) + spanish_work = annotator_fixture.db.work( + title="Spanish Book", with_license_pool=True, language="spa" + ) + lp = spanish_work.license_pools[0] + annotator_fixture.annotator.lanes_by_work[spanish_work].append( + flat_spanish_lane + ) + + feed_url = annotator_fixture.annotator.feed_url(spanish_lane) + group_uri = annotator_fixture.annotator.group_uri( + spanish_work, lp, lp.identifier + ) + assert (feed_url, "All Spanish") == group_uri + + def test_lane_url(self, annotator_fixture: LibraryAnnotatorFixture): + fantasy_lane_with_sublanes = annotator_fixture.db.lane( + display_name="Fantasy with sublanes", languages=["eng"] + ) + fantasy_lane_with_sublanes.add_genre(Fantasy.name) + + urban_fantasy_lane = annotator_fixture.db.lane(display_name="Urban Fantasy") + urban_fantasy_lane.add_genre(Urban_Fantasy.name) + fantasy_lane_with_sublanes.sublanes.append(urban_fantasy_lane) + + fantasy_lane_without_sublanes = annotator_fixture.db.lane( + display_name="Fantasy without sublanes", languages=["eng"] + ) + fantasy_lane_without_sublanes.add_genre(Fantasy.name) + + default_lane_url = annotator_fixture.annotator.lane_url(None) + assert default_lane_url == annotator_fixture.annotator.default_lane_url() + + facets = FacetsWithEntryPoint(entrypoint=EbooksEntryPoint) + default_lane_url = annotator_fixture.annotator.lane_url(None, facets=facets) + assert default_lane_url == annotator_fixture.annotator.default_lane_url( + facets=facets + ) + + groups_url = annotator_fixture.annotator.lane_url(fantasy_lane_with_sublanes) + assert groups_url == annotator_fixture.annotator.groups_url( + fantasy_lane_with_sublanes + ) + + groups_url = annotator_fixture.annotator.lane_url( + fantasy_lane_with_sublanes, facets=facets + ) + assert groups_url == annotator_fixture.annotator.groups_url( + fantasy_lane_with_sublanes, facets=facets + ) + + feed_url = annotator_fixture.annotator.lane_url(fantasy_lane_without_sublanes) + assert feed_url == annotator_fixture.annotator.feed_url( + fantasy_lane_without_sublanes + ) + + feed_url = annotator_fixture.annotator.lane_url( + fantasy_lane_without_sublanes, facets=facets + ) + assert feed_url == annotator_fixture.annotator.feed_url( + fantasy_lane_without_sublanes, facets=facets + ) + + def test_fulfill_link_issues_only_open_access_links_when_library_does_not_identify_patrons( + self, annotator_fixture: LibraryAnnotatorFixture + ): + # This library doesn't identify patrons. + annotator_fixture.annotator.identifies_patrons = False + + # Because of this, normal fulfillment links are not generated. + [pool] = annotator_fixture.work.license_pools + [lpdm] = pool.delivery_mechanisms + assert None == annotator_fixture.annotator.fulfill_link(pool, None, lpdm) + + # However, fulfillment links _can_ be generated with the + # 'open-access' link relation. + link = annotator_fixture.annotator.fulfill_link( + pool, None, lpdm, OPDSFeed.OPEN_ACCESS_REL + ) + assert link is not None + assert OPDSFeed.OPEN_ACCESS_REL == link.rel + + # We freeze the test time here, because this test checks that the client token + # in the feed matches a generated client token. The client token contains an + # expiry date based on the current time, so this test can be flaky in a slow + # integration environment unless we make sure the clock does not change as this + # test is being performed. + @freeze_time("1867-07-01") + def test_fulfill_link_includes_device_registration_tags( + self, + annotator_fixture: LibraryAnnotatorFixture, + vendor_id_fixture: VendorIDFixture, + ): + """Verify that when Adobe Vendor ID delegation is included, the + fulfill link for an Adobe delivery mechanism includes instructions + on how to get a Vendor ID. + """ + vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) + [pool] = annotator_fixture.work.license_pools + identifier = pool.identifier + patron = annotator_fixture.db.patron() + old_credentials = list(patron.credentials) + + loan, ignore = pool.loan_to(patron, start=utc_now()) + adobe_delivery_mechanism, ignore = DeliveryMechanism.lookup( + annotator_fixture.db.session, "text/html", DeliveryMechanism.ADOBE_DRM + ) + other_delivery_mechanism, ignore = DeliveryMechanism.lookup( + annotator_fixture.db.session, "text/html", DeliveryMechanism.OVERDRIVE_DRM + ) + + # The fulfill link for non-Adobe DRM does not + # include the drm:licensor tag. + link = annotator_fixture.annotator.fulfill_link( + pool, loan, other_delivery_mechanism + ) + assert link is not None + for name, child in link: + assert name != "licensor" + + # No new Credential has been associated with the patron. + assert old_credentials == patron.credentials + + # The fulfill link for Adobe DRM includes information + # on how to get an Adobe ID in the drm:licensor tag. + link = annotator_fixture.annotator.fulfill_link( + pool, loan, adobe_delivery_mechanism + ) + licensor = getattr(link, "licensor", None) + assert None != licensor + + # An Adobe ID-specific identifier has been created for the patron. + [adobe_id_identifier] = [ + x for x in patron.credentials if x not in old_credentials + ] + assert ( + AuthdataUtility.ADOBE_ACCOUNT_ID_PATRON_IDENTIFIER + == adobe_id_identifier.type + ) + assert DataSource.INTERNAL_PROCESSING == adobe_id_identifier.data_source.name + assert None == adobe_id_identifier.expires + + # The drm:licensor tag is the one we get by calling + # adobe_id_tags() on that identifier. + assert adobe_id_identifier.credential is not None + expect = annotator_fixture.annotator.adobe_id_tags( + adobe_id_identifier.credential + ) + assert expect.get("licensor") == licensor + + def test_no_adobe_id_tags_when_vendor_id_not_configured( + self, annotator_fixture: LibraryAnnotatorFixture + ): + """When vendor ID delegation is not configured, adobe_id_tags() + returns an empty list. + """ + assert {} == annotator_fixture.annotator.adobe_id_tags("patron identifier") + + def test_adobe_id_tags_when_vendor_id_configured( + self, + annotator_fixture: LibraryAnnotatorFixture, + vendor_id_fixture: VendorIDFixture, + ): + """When vendor ID delegation is configured, adobe_id_tags() + returns a list containing a single tag. The tag contains + the information necessary to get an Adobe ID and a link to the local + DRM Device Management Protocol endpoint. + """ + library = annotator_fixture.db.default_library() + vendor_id_fixture.initialize_adobe(library) + patron_identifier = "patron identifier" + element = annotator_fixture.annotator.adobe_id_tags(patron_identifier) + + assert "licensor" in element + assert vendor_id_fixture.TEST_VENDOR_ID == getattr( + element["licensor"], "vendor", None + ) + + token = getattr(element["licensor"], "clientToken", None) + assert token is not None + # token.text is a token which we can decode, since we know + # the secret. + token_text = token.text + authdata = AuthdataUtility.from_config(library) + assert authdata is not None + decoded = authdata.decode_short_client_token(token_text) + expected_url = library.settings.website + assert (expected_url, patron_identifier) == decoded + + # If we call adobe_id_tags again we'll get a distinct tag + # object that renders to the same data. + same_tag = annotator_fixture.annotator.adobe_id_tags(patron_identifier) + assert same_tag is not element + assert same_tag["licensor"].dict() == element["licensor"].dict() + + # If the Adobe Vendor ID configuration is present but + # incomplete, adobe_id_tags does nothing. + + # Delete one setting from the existing integration to check + # this. + vendor_id_fixture.registration.short_name = None + assert {} == annotator_fixture.annotator.adobe_id_tags("new identifier") + + def test_lcp_acquisition_link_contains_hashed_passphrase( + self, annotator_fixture: LibraryAnnotatorFixture + ): + [pool] = annotator_fixture.work.license_pools + identifier = pool.identifier + patron = annotator_fixture.db.patron() + + hashed_password = LCPHashedPassphrase("hashed password") + + # Setup LCP credentials + lcp_credential_factory = LCPCredentialFactory() + lcp_credential_factory.set_hashed_passphrase( + annotator_fixture.db.session, patron, hashed_password + ) + + loan, ignore = pool.loan_to(patron, start=utc_now()) + lcp_delivery_mechanism, ignore = DeliveryMechanism.lookup( + annotator_fixture.db.session, "text/html", DeliveryMechanism.LCP_DRM + ) + other_delivery_mechanism, ignore = DeliveryMechanism.lookup( + annotator_fixture.db.session, "text/html", DeliveryMechanism.OVERDRIVE_DRM + ) + + # The fulfill link for non-LCP DRM does not include the hashed_passphrase tag. + link = annotator_fixture.annotator.fulfill_link( + pool, loan, other_delivery_mechanism + ) + assert not hasattr(link, "hashed_passphrase") + + # The fulfill link for lcp DRM includes hashed_passphrase + link = annotator_fixture.annotator.fulfill_link( + pool, loan, lcp_delivery_mechanism + ) + hashed_passphrase = getattr(link, "hashed_passphrase", None) + assert hashed_passphrase is not None + assert hashed_passphrase.text == hashed_password.hashed + + def test_default_lane_url(self, annotator_fixture: LibraryAnnotatorFixture): + default_lane_url = annotator_fixture.annotator.default_lane_url() + assert "groups" in default_lane_url + assert str(annotator_fixture.lane.id) not in default_lane_url + + facets = FacetsWithEntryPoint(entrypoint=EbooksEntryPoint) + default_lane_url = annotator_fixture.annotator.default_lane_url(facets=facets) + assert "entrypoint=Book" in default_lane_url + + def test_groups_url(self, annotator_fixture: LibraryAnnotatorFixture): + groups_url_no_lane = annotator_fixture.annotator.groups_url(None) + assert "groups" in groups_url_no_lane + assert str(annotator_fixture.lane.id) not in groups_url_no_lane + + groups_url_fantasy = annotator_fixture.annotator.groups_url( + annotator_fixture.lane + ) + assert "groups" in groups_url_fantasy + assert str(annotator_fixture.lane.id) in groups_url_fantasy + + facets = Facets.default( + annotator_fixture.db.default_library(), order="someorder" + ) + groups_url_facets = annotator_fixture.annotator.groups_url(None, facets=facets) + assert "order=someorder" in groups_url_facets + + def test_feed_url(self, annotator_fixture: LibraryAnnotatorFixture): + # A regular Lane. + feed_url_fantasy = annotator_fixture.annotator.feed_url( + annotator_fixture.lane, + Facets.default(annotator_fixture.db.default_library(), order="order"), + Pagination.default(), + ) + assert "feed" in feed_url_fantasy + assert "order=order" in feed_url_fantasy + assert str(annotator_fixture.lane.id) in feed_url_fantasy + + default_library = annotator_fixture.db.default_library() + assert default_library.name is not None + assert default_library.name in feed_url_fantasy + + # A QueryGeneratedLane. + annotator_fixture.annotator.lane = annotator_fixture.contributor_lane + feed_url_contributor = annotator_fixture.annotator.feed_url( + annotator_fixture.contributor_lane, + Facets.default(annotator_fixture.db.default_library()), + Pagination.default(), + ) + assert annotator_fixture.contributor_lane.ROUTE in feed_url_contributor + assert ( + annotator_fixture.contributor_lane.contributor_key in feed_url_contributor + ) + default_library = annotator_fixture.db.default_library() + assert default_library.name is not None + assert default_library.name in feed_url_contributor + + def test_search_url(self, annotator_fixture: LibraryAnnotatorFixture): + search_url = annotator_fixture.annotator.search_url( + annotator_fixture.lane, + "query", + Pagination.default(), + Facets.default(annotator_fixture.db.default_library(), order="Book"), + ) + assert "search" in search_url + assert "query" in search_url + assert "order=Book" in search_url + assert str(annotator_fixture.lane.id) in search_url + + def test_facet_url(self, annotator_fixture: LibraryAnnotatorFixture): + # A regular Lane. + facets = Facets.default( + annotator_fixture.db.default_library(), collection="main" + ) + facet_url = annotator_fixture.annotator.facet_url(facets) + assert "collection=main" in facet_url + assert str(annotator_fixture.lane.id) in facet_url + + # A QueryGeneratedLane. + annotator_fixture.annotator.lane = annotator_fixture.contributor_lane + + facet_url_contributor = annotator_fixture.annotator.facet_url(facets) + assert "collection=main" in facet_url_contributor + assert annotator_fixture.contributor_lane.ROUTE in facet_url_contributor + assert ( + annotator_fixture.contributor_lane.contributor_key in facet_url_contributor + ) + + def test_alternate_link_is_permalink( + self, annotator_fixture: LibraryAnnotatorFixture + ): + work = annotator_fixture.db.work(with_open_access_download=True) + works = annotator_fixture.db.session.query(Work) + annotator = LibraryAnnotator( + None, + annotator_fixture.lane, + annotator_fixture.db.default_library(), + ) + pool = annotator.active_licensepool_for(work) + + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + assert entry.computed is not None + assert pool is not None + assert entry.computed.identifier == pool.identifier.urn + + [(alternate, type)] = [ + (x.href, x.type) for x in entry.computed.other_links if x.rel == "alternate" + ] + permalink, permalink_type = annotator_fixture.annotator.permalink_for( + pool.identifier + ) + assert alternate == permalink + assert OPDSFeed.ENTRY_TYPE == type + assert permalink_type == type + + # Make sure we are using the 'permalink' controller -- we were using + # 'work' and that was wrong. + assert "/host/permalink" in permalink + + def test_annotate_work_entry(self, annotator_fixture: LibraryAnnotatorFixture): + lane = annotator_fixture.db.lane() + + # Create a Work. + work = annotator_fixture.db.work(with_license_pool=True) + [pool] = work.license_pools + identifier = pool.identifier + edition = pool.presentation_edition + + # Try building an entry for this Work with and without + # patron authentication turned on -- each setting is valid + # but will result in different links being available. + linksets = [] + for auth in (True, False): + annotator = LibraryAnnotator( + None, + lane, + annotator_fixture.db.default_library(), + library_identifies_patrons=auth, + ) + work_entry = WorkEntry( + work=work, + license_pool=pool, + edition=work.presentation_edition, + identifier=work.presentation_edition.primary_identifier, + ) + annotator.annotate_work_entry(work_entry) + + assert work_entry.computed is not None + linksets.append( + { + x.rel + for x in ( + work_entry.computed.other_links + + work_entry.computed.acquisition_links + ) + } + ) + + with_auth, no_auth = linksets + + # Some links are present no matter what. + for expect in ["alternate", "related"]: + assert expect in with_auth + assert expect in no_auth + + # A library with patron authentication offers some additional + # links -- one to borrow the book and one to annotate the + # book. + for expect in [ + "http://www.w3.org/ns/oa#annotationService", + "http://opds-spec.org/acquisition/borrow", + ]: + assert expect in with_auth + assert expect not in no_auth + + # We can also build an entry for a work with no license pool, + # but it will have no borrow link. + work = annotator_fixture.db.work(with_license_pool=False) + edition = work.presentation_edition + identifier = edition.primary_identifier + + annotator = LibraryAnnotator( + None, + lane, + annotator_fixture.db.default_library(), + library_identifies_patrons=True, + ) + work_entry = WorkEntry( + work=work, license_pool=None, edition=edition, identifier=identifier + ) + annotator.annotate_work_entry(work_entry) + assert work_entry.computed is not None + links = { + x.rel + for x in ( + work_entry.computed.other_links + work_entry.computed.acquisition_links + ) + } + + # These links are still present. + for expect in [ + "alternate", + "related", + "http://www.w3.org/ns/oa#annotationService", + ]: + assert expect in links + + # But the borrow link is gone. + assert "http://opds-spec.org/acquisition/borrow" not in links + + # There are no links to create analytics events for this title, + # because the library has no analytics configured. + open_book_rel = "http://librarysimplified.org/terms/rel/analytics/open-book" + assert open_book_rel not in links + + # If analytics are configured, a link is added to + # create an 'open_book' analytics event for this title. + Analytics.GLOBAL_ENABLED = True + work_entry = WorkEntry( + work=work, license_pool=None, edition=edition, identifier=identifier + ) + annotator.annotate_work_entry(work_entry) + assert work_entry.computed is not None + [analytics_link] = [ + x.href for x in work_entry.computed.other_links if x.rel == open_book_rel + ] + expect = annotator.url_for( + "track_analytics_event", + identifier_type=identifier.type, + identifier=identifier.identifier, + event_type=CirculationEvent.OPEN_BOOK, + library_short_name=annotator_fixture.db.default_library().short_name, + _external=True, + ) + assert expect == analytics_link + + # Test sample link with media types + link, _ = edition.primary_identifier.add_link( + Hyperlink.SAMPLE, + "http://example.org/sample", + edition.data_source, + media_type="application/epub+zip", + ) + work_entry = WorkEntry( + work=work, license_pool=None, edition=edition, identifier=identifier + ) + annotator.annotate_work_entry(work_entry) + assert work_entry.computed is not None + [feed_link] = [ + l + for l in work_entry.computed.other_links + if l.rel == Hyperlink.CLIENT_SAMPLE + ] + assert feed_link.href == link.resource.url + assert feed_link.type == link.resource.representation.media_type + + def test_annotate_feed(self, annotator_fixture: LibraryAnnotatorFixture): + lane = annotator_fixture.db.lane() + linksets = [] + for auth in (True, False): + annotator = LibraryAnnotator( + None, + lane, + annotator_fixture.db.default_library(), + library_identifies_patrons=auth, + ) + feed = OPDSAcquisitionFeed("test", "url", [], annotator) + annotator.annotate_feed(feed._feed) + linksets.append([x.rel for x in feed._feed.links]) + + with_auth, without_auth = linksets + + # There's always a a search link, and an auth + # document link. + for rel in ("search", "http://opds-spec.org/auth/document"): + assert rel in with_auth + assert rel in without_auth + + # But there's only a bookshelf link and an annotation link + # when patron authentication is enabled. + for rel in ( + "http://opds-spec.org/shelf", + "http://www.w3.org/ns/oa#annotationService", + ): + assert rel in with_auth + assert rel not in without_auth + + def get_parsed_feed( + self, annotator_fixture: LibraryAnnotatorFixture, works, lane=None, **kwargs + ): + if not lane: + lane = annotator_fixture.db.lane(display_name="Main Lane") + + feed = OPDSAcquisitionFeed( + "url", + "test", + works, + LibraryAnnotator( + None, + lane, + annotator_fixture.db.default_library(), + **kwargs, + ), + facets=FacetsWithEntryPoint(), + pagination=Pagination.default(), + ) + feed.generate_feed() + return feed._feed + + def assert_link_on_entry( + self, entry, link_type=None, rels=None, partials_by_rel=None + ): + """Asserts that a link with a certain 'rel' value exists on a + given feed or entry, as well as its link 'type' value and parts + of its 'href' value. + """ + + def get_link_by_rel(rel): + if isinstance(entry, WorkEntry): + links = entry.computed.other_links + entry.computed.acquisition_links + elif isinstance(entry, List): + links = [e.link for e in entry] + else: + links = [entry.link] + try: + [link] = [x for x in links if x.rel == rel] + except ValueError as e: + raise AssertionError + if link_type: + assert link_type == link.type + return link + + if rels: + [get_link_by_rel(rel) for rel in rels] + + partials_by_rel = partials_by_rel or dict() + for rel, uri_partials in list(partials_by_rel.items()): + link = get_link_by_rel(rel) + if not isinstance(uri_partials, list): + uri_partials = [uri_partials] + for part in uri_partials: + assert part in link.href + + def test_work_entry_includes_open_access_or_borrow_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + open_access_work = annotator_fixture.db.work(with_open_access_download=True) + licensed_work = annotator_fixture.db.work(with_license_pool=True) + licensed_work.license_pools[0].open_access = False + + feed = self.get_parsed_feed( + annotator_fixture, [open_access_work, licensed_work] + ) + [open_access_entry, licensed_entry] = feed.entries + + self.assert_link_on_entry(open_access_entry, rels=[OPDSFeed.BORROW_REL]) + self.assert_link_on_entry(licensed_entry, rels=[OPDSFeed.BORROW_REL]) + + def test_language_and_audience_key_from_work( + self, annotator_fixture: LibraryAnnotatorFixture + ): + work = annotator_fixture.db.work( + language="eng", audience=Classifier.AUDIENCE_CHILDREN + ) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ("eng", "Children") == result + + work = annotator_fixture.db.work( + language="fre", audience=Classifier.AUDIENCE_YOUNG_ADULT + ) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ("fre", "All+Ages,Children,Young+Adult") == result + + work = annotator_fixture.db.work( + language="spa", audience=Classifier.AUDIENCE_ADULT + ) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ("spa", "Adult,Adults+Only,All+Ages,Children,Young+Adult") == result + + work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_ADULTS_ONLY) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ("eng", "Adult,Adults+Only,All+Ages,Children,Young+Adult") == result + + work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_RESEARCH) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ( + "eng", + "Adult,Adults+Only,All+Ages,Children,Research,Young+Adult", + ) == result + + work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_ALL_AGES) + result = annotator_fixture.annotator.language_and_audience_key_from_work(work) + assert ("eng", "All+Ages,Children") == result + + def test_work_entry_includes_contributor_links( + self, annotator_fixture: LibraryAnnotatorFixture + ): + """ContributorLane links are added to works with contributors""" + work = annotator_fixture.db.work(with_open_access_download=True) + contributor1 = work.presentation_edition.author_contributors[0] + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + + expected_rel_and_partial = dict(contributor="/contributor") + self.assert_link_on_entry( + entry.computed.authors, + link_type=OPDSFeed.ACQUISITION_FEED_TYPE, + partials_by_rel=expected_rel_and_partial, + ) + + # When there are two authors, they each get a contributor link. + work.presentation_edition.add_contributor("Oprah", Contributor.AUTHOR_ROLE) + work.calculate_presentation( + PresentationCalculationPolicy(regenerate_opds_entries=True), + MockExternalSearchIndex(), + ) + [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries + contributor_links = [ + l.link for l in entry.computed.authors if hasattr(l, "link") + ] + assert 2 == len(contributor_links) + contributor_links.sort(key=lambda l: l.href) + for l in contributor_links: + assert l.type == OPDSFeed.ACQUISITION_FEED_TYPE + assert "/contributor" in l.href + assert contributor1.sort_name in contributor_links[0].href + assert "Oprah" in contributor_links[1].href + + # When there's no author, there's no contributor link. + annotator_fixture.db.session.delete(work.presentation_edition.contributions[0]) + annotator_fixture.db.session.delete(work.presentation_edition.contributions[1]) + annotator_fixture.db.session.commit() + work.calculate_presentation( + PresentationCalculationPolicy(regenerate_opds_entries=True), + MockExternalSearchIndex(), + ) + [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries + assert [] == [l.link for l in entry.computed.authors if l.link] + + def test_work_entry_includes_series_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + """A series lane link is added to the work entry when its in a series""" + work = annotator_fixture.db.work( + with_open_access_download=True, series="Serious Cereals Series" + ) + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + expected_rel_and_partial = dict(series="/series") + self.assert_link_on_entry( + entry.computed.series, + link_type=OPDSFeed.ACQUISITION_FEED_TYPE, + partials_by_rel=expected_rel_and_partial, + ) + + # When there's no series, there's no series link. + work = annotator_fixture.db.work(with_open_access_download=True) + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + assert None == entry.computed.series + + def test_work_entry_includes_recommendations_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + work = annotator_fixture.db.work(with_open_access_download=True) + + # If NoveList Select isn't configured, there's no recommendations link. + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + assert [] == [ + l for l in entry.computed.other_links if l.rel == "recommendations" + ] + + # There's a recommendation link when configuration is found, though! + NoveListAPI.IS_CONFIGURED = None + annotator_fixture.db.external_integration( + ExternalIntegration.NOVELIST, + goal=ExternalIntegration.METADATA_GOAL, + username="library", + password="sure", + libraries=[annotator_fixture.db.default_library()], + ) + + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + expected_rel_and_partial = dict(recommendations="/recommendations") + self.assert_link_on_entry( + entry, + link_type=OPDSFeed.ACQUISITION_FEED_TYPE, + partials_by_rel=expected_rel_and_partial, + ) + + def test_work_entry_includes_annotations_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + work = annotator_fixture.db.work(with_open_access_download=True) + identifier_str = work.license_pools[0].identifier.identifier + uri_parts = ["/annotations", identifier_str] + annotation_rel = "http://www.w3.org/ns/oa#annotationService" + rel_with_partials = {annotation_rel: uri_parts} + + feed = self.get_parsed_feed(annotator_fixture, [work]) + [entry] = feed.entries + self.assert_link_on_entry(entry, partials_by_rel=rel_with_partials) + + # If the library does not authenticate patrons, no link to the + # annotation service is provided. + feed = self.get_parsed_feed( + annotator_fixture, [work], library_identifies_patrons=False + ) + [entry] = feed.entries + assert annotation_rel not in [x.rel for x in entry.computed.other_links] + + def test_active_loan_feed( + self, + annotator_fixture: LibraryAnnotatorFixture, + vendor_id_fixture: VendorIDFixture, + ): + vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) + patron = annotator_fixture.db.patron() + patron.last_loan_activity_sync = utc_now() + annotator = LibraryLoanAndHoldAnnotator( + None, + annotator_fixture.lane, + annotator_fixture.db.default_library(), + patron=patron, + ) + + response = OPDSAcquisitionFeed.active_loans_for( + None, patron, annotator + ).as_response() + + # The feed is private and should not be cached. + assert isinstance(response, OPDSFeedResponse) + + # No entries in the feed... + raw = str(response) + feed = feedparser.parse(raw) + assert 0 == len(feed["entries"]) + + # ... but we have a link to the User Profile Management + # Protocol endpoint... + links = feed["feed"]["links"] + [upmp_link] = [ + x + for x in links + if x["rel"] == "http://librarysimplified.org/terms/rel/user-profile" + ] + annotator = LibraryLoanAndHoldAnnotator( + None, None, library=patron.library, patron=patron + ) + expect_url = annotator.url_for( + "patron_profile", + library_short_name=patron.library.short_name, + _external=True, + ) + assert expect_url == upmp_link["href"] + + # ... and we have DRM licensing information. + tree = etree.fromstring(response.get_data(as_text=True)) + parser = OPDSXMLParser() + licensor = parser._xpath1(tree, "//atom:feed/drm:licensor") + + adobe_patron_identifier = AuthdataUtility._adobe_patron_identifier(patron) + + # The DRM licensing information includes the Adobe vendor ID + # and the patron's patron identifier for Adobe purposes. + assert ( + vendor_id_fixture.TEST_VENDOR_ID + == licensor.attrib["{http://librarysimplified.org/terms/drm}vendor"] + ) + [client_token] = licensor + assert vendor_id_fixture.registration.short_name is not None + expected = vendor_id_fixture.registration.short_name.upper() + assert client_token.text.startswith(expected) + assert adobe_patron_identifier in client_token.text + + # Unlike other places this tag shows up, we use the + # 'scheme' attribute to explicitly state that this + # tag is talking about an ACS licensing + # scheme. Since we're in a and not a to a + # specific book, that context would otherwise be lost. + assert ( + "http://librarysimplified.org/terms/drm/scheme/ACS" + == licensor.attrib["{http://librarysimplified.org/terms/drm}scheme"] + ) + + # Since we're taking a round trip to and from OPDS, which only + # represents times with second precision, generate the current + # time with second precision to make later comparisons + # possible. + now = utc_now().replace(microsecond=0) + tomorrow = now + datetime.timedelta(days=1) + + # A loan of an open-access book is open-ended. + work1 = annotator_fixture.db.work( + language="eng", with_open_access_download=True + ) + loan1 = work1.license_pools[0].loan_to(patron, start=now) + + # A loan of some other kind of book has an end point. + work2 = annotator_fixture.db.work(language="eng", with_license_pool=True) + loan2 = work2.license_pools[0].loan_to(patron, start=now, end=tomorrow) + unused = annotator_fixture.db.work( + language="eng", with_open_access_download=True + ) + + # Get the feed. + feed_obj = OPDSAcquisitionFeed.active_loans_for( + None, + patron, + LibraryLoanAndHoldAnnotator( + None, + annotator_fixture.lane, + annotator_fixture.db.default_library(), + patron=patron, + ), + ).as_response() + raw = str(feed_obj) + feed = feedparser.parse(raw) + + # The only entries in the feed is the work currently out on loan + # to this patron. + assert 2 == len(feed["entries"]) + e1, e2 = sorted(feed["entries"], key=lambda x: x["title"]) + assert work1.title == e1["title"] + assert work2.title == e2["title"] + + # Make sure that the start and end dates from the loan are present + # in an child of the acquisition link. + tree = etree.fromstring(raw) + parser = OPDSXMLParser() + acquisitions = parser._xpath( + tree, "//atom:entry/atom:link[@rel='http://opds-spec.org/acquisition']" + ) + assert 2 == len(acquisitions) + + availabilities = [parser._xpath1(x, "opds:availability") for x in acquisitions] + + # One of these availability tags has 'since' but not 'until'. + # The other one has both. + [no_until] = [x for x in availabilities if "until" not in x.attrib] + assert now == dateutil.parser.parse(no_until.attrib["since"]) + + [has_until] = [x for x in availabilities if "until" in x.attrib] + assert now == dateutil.parser.parse(has_until.attrib["since"]) + assert tomorrow == dateutil.parser.parse(has_until.attrib["until"]) + + def test_loan_feed_includes_patron( + self, annotator_fixture: LibraryAnnotatorFixture + ): + patron = annotator_fixture.db.patron() + + patron.username = "bellhooks" + patron.authorization_identifier = "987654321" + feed_obj = OPDSAcquisitionFeed.active_loans_for( + None, + patron, + LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library(), patron + ), + ).as_response() + raw = str(feed_obj) + feed_details = feedparser.parse(raw)["feed"] + + assert "simplified:authorizationIdentifier" in raw + assert "simplified:username" in raw + assert ( + patron.username == feed_details["simplified_patron"]["simplified:username"] + ) + assert ( + "987654321" + == feed_details["simplified_patron"]["simplified:authorizationidentifier"] + ) + + def test_loans_feed_includes_annotations_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + patron = annotator_fixture.db.patron() + feed_obj = OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() + raw = str(feed_obj) + feed = feedparser.parse(raw)["feed"] + links = feed["links"] + + [annotations_link] = [ + x + for x in links + if x["rel"].lower() == "http://www.w3.org/ns/oa#annotationService".lower() + ] + assert "/annotations" in annotations_link["href"] + + def test_active_loan_feed_ignores_inconsistent_local_data( + self, annotator_fixture: LibraryAnnotatorFixture + ): + patron = annotator_fixture.db.patron() + + work1 = annotator_fixture.db.work(language="eng", with_license_pool=True) + loan, ignore = work1.license_pools[0].loan_to(patron) + work2 = annotator_fixture.db.work(language="eng", with_license_pool=True) + hold, ignore = work2.license_pools[0].on_hold_to(patron) + + # Uh-oh, our local loan data is bad. + loan.license_pool.identifier = None + + # Our local hold data is also bad. + hold.license_pool = None + + # We can still get a feed... + feed_obj = OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() + + # ...but it's empty. + assert "" not in str(feed_obj) + + def test_acquisition_feed_includes_license_information( + self, annotator_fixture: LibraryAnnotatorFixture + ): + work = annotator_fixture.db.work(with_open_access_download=True) + pool = work.license_pools[0] + + # These numbers are impossible, but it doesn't matter for + # purposes of this test. + pool.open_access = False + pool.licenses_owned = 100 + pool.licenses_available = 50 + pool.patrons_in_hold_queue = 25 + + work_entry = WorkEntry( + work=work, + license_pool=pool, + edition=work.presentation_edition, + identifier=work.presentation_edition.primary_identifier, + ) + annotator_fixture.annotator.annotate_work_entry(work_entry) + assert work_entry.computed is not None + [link] = work_entry.computed.acquisition_links + assert link.holds_total == "25" + + assert link.copies_available == "50" + assert link.copies_total == "100" + + def test_loans_feed_includes_fulfill_links( + self, + annotator_fixture: LibraryAnnotatorFixture, + library_fixture: LibraryFixture, + ): + patron = annotator_fixture.db.patron() + + work = annotator_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + pool.open_access = False + mech1 = pool.delivery_mechanisms[0] + mech2 = pool.set_delivery_mechanism( + Representation.PDF_MEDIA_TYPE, + DeliveryMechanism.ADOBE_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + streaming_mech = pool.set_delivery_mechanism( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, + DeliveryMechanism.OVERDRIVE_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + + now = utc_now() + loan, ignore = pool.loan_to(patron, start=now) + + feed_obj = OPDSAcquisitionFeed.active_loans_for( + None, + patron, + ).as_response() + raw = str(feed_obj) + + entries = feedparser.parse(raw)["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + # Before we fulfill the loan, there are fulfill links for all three mechanisms. + fulfill_links = [ + link for link in links if link["rel"] == "http://opds-spec.org/acquisition" + ] + assert 3 == len(fulfill_links) + + assert { + mech1.delivery_mechanism.drm_scheme_media_type, + mech2.delivery_mechanism.drm_scheme_media_type, + OPDSFeed.ENTRY_TYPE, + } == {link["type"] for link in fulfill_links} + + # If one of the content types is hidden, the corresponding + # delivery mechanism does not have a link. + library = annotator_fixture.db.default_library() + settings = library_fixture.settings(library) + settings.hidden_content_types = [mech1.delivery_mechanism.content_type] + OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() + assert { + mech2.delivery_mechanism.drm_scheme_media_type, + OPDSFeed.ENTRY_TYPE, + } == {link["type"] for link in fulfill_links} + settings.hidden_content_types = [] + + # When the loan is fulfilled, there are only fulfill links for that mechanism + # and the streaming mechanism. + loan.fulfillment = mech1 + + feed_obj = OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() + raw = str(feed_obj) + + entries = feedparser.parse(raw)["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + fulfill_links = [ + link for link in links if link["rel"] == "http://opds-spec.org/acquisition" + ] + assert 2 == len(fulfill_links) + + assert { + mech1.delivery_mechanism.drm_scheme_media_type, + OPDSFeed.ENTRY_TYPE, + } == {link["type"] for link in fulfill_links} + + def test_incomplete_catalog_entry_contains_an_alternate_link_to_the_complete_entry( + self, annotator_fixture: LibraryAnnotatorFixture + ): + circulation = create_autospec(spec=CirculationAPI) + circulation.library = annotator_fixture.db.default_library() + work = annotator_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + + annotator = LibraryLoanAndHoldAnnotator( + circulation, annotator_fixture.lane, circulation.library + ) + + feed_obj = OPDSAcquisitionFeed.single_entry_loans_feed( + circulation, pool, annotator + ) + raw = str(feed_obj) + entries = feedparser.parse(raw)["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + # We want to make sure that an incomplete catalog entry contains an alternate link to the complete entry. + alternate_links = [ + link + for link in links + if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" + ] + assert 1 == len(alternate_links) + + def test_complete_catalog_entry_with_fulfillment_link_contains_self_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + patron = annotator_fixture.db.patron() + circulation = create_autospec(spec=CirculationAPI) + circulation.library = annotator_fixture.db.default_library() + work = annotator_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + loan, _ = pool.loan_to(patron) + + annotator = LibraryLoanAndHoldAnnotator(circulation, None, circulation.library) + feed_obj = OPDSAcquisitionFeed.single_entry_loans_feed( + circulation, loan, annotator + ) + raw = str(feed_obj) + + entries = feedparser.parse(raw)["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + # We want to make sure that a complete catalog entry contains an alternate link + # because it's required by some clients (for example, an Android version of SimplyE). + alternate_links = [ + link + for link in links + if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" + ] + assert 1 == len(alternate_links) + + # We want to make sure that the complete catalog entry contains a self link. + self_links = [ + link + for link in links + if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "self" + ] + assert 1 == len(self_links) + + # We want to make sure that alternate and self links are the same. + assert alternate_links[0]["href"] == self_links[0]["href"] + + def test_complete_catalog_entry_with_fulfillment_info_contains_self_link( + self, annotator_fixture: LibraryAnnotatorFixture + ): + patron = annotator_fixture.db.patron() + circulation = create_autospec(spec=CirculationAPI) + circulation.library = annotator_fixture.db.default_library() + work = annotator_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + loan, _ = pool.loan_to(patron) + fulfillment = FulfillmentInfo( + pool.collection, + pool.data_source.name, + pool.identifier.type, + pool.identifier.identifier, + "http://link", + Representation.EPUB_MEDIA_TYPE, + None, + None, + ) + + annotator = LibraryLoanAndHoldAnnotator(circulation, None, circulation.library) + feed_obj = OPDSAcquisitionFeed.single_entry_loans_feed( + circulation, + loan, + annotator, + fulfillment=fulfillment, + ) + raw = str(feed_obj) + + entries = feedparser.parse(raw)["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + # We want to make sure that a complete catalog entry contains an alternate link + # because it's required by some clients (for example, an Android version of SimplyE). + alternate_links = [ + link + for link in links + if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" + ] + assert 1 == len(alternate_links) + + # We want to make sure that the complete catalog entry contains a self link. + self_links = [ + link + for link in links + if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "self" + ] + assert 1 == len(self_links) + + # We want to make sure that alternate and self links are the same. + assert alternate_links[0]["href"] == self_links[0]["href"] + + def test_fulfill_feed(self, annotator_fixture: LibraryAnnotatorFixture): + patron = annotator_fixture.db.patron() + + work = annotator_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + pool.open_access = False + streaming_mech = pool.set_delivery_mechanism( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, + DeliveryMechanism.OVERDRIVE_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + + now = utc_now() + loan, ignore = pool.loan_to(patron, start=now) + fulfillment = FulfillmentInfo( + pool.collection, + pool.data_source.name, + pool.identifier.type, + pool.identifier.identifier, + "http://streaming_link", + Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE, + None, + None, + ) + + annotator = LibraryLoanAndHoldAnnotator(None, None, patron.library) + feed_obj = OPDSAcquisitionFeed.single_entry_loans_feed( + None, loan, annotator, fulfillment=fulfillment + ) + + entries = feedparser.parse(str(feed_obj))["entries"] + assert 1 == len(entries) + + links = entries[0]["links"] + + # The feed for a single fulfillment only includes one fulfill link. + fulfill_links = [ + link for link in links if link["rel"] == "http://opds-spec.org/acquisition" + ] + assert 1 == len(fulfill_links) + + assert ( + Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE + == fulfill_links[0]["type"] + ) + assert "http://streaming_link" == fulfill_links[0]["href"] + + def test_drm_device_registration_feed_tags( + self, + annotator_fixture: LibraryAnnotatorFixture, + vendor_id_fixture: VendorIDFixture, + ): + """Check that drm_device_registration_feed_tags returns + a generic drm:licensor tag, except with the drm:scheme attribute + set. + """ + vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) + annotator = LibraryLoanAndHoldAnnotator( + None, + None, + annotator_fixture.db.default_library(), + ) + patron = annotator_fixture.db.patron() + feed_tag = annotator.drm_device_registration_feed_tags(patron) + generic_tag = annotator.adobe_id_tags(patron) + + # The feed-level tag has the drm:scheme attribute set. + assert ( + "http://librarysimplified.org/terms/drm/scheme/ACS" + == feed_tag["licensor"].scheme + ) + + # If we remove that attribute, the feed-level tag is the same as the + # generic tag. + assert feed_tag["licensor"].dict() != generic_tag["licensor"].dict() + delattr(feed_tag["licensor"], "scheme") + assert feed_tag["licensor"].dict() == generic_tag["licensor"].dict() + + def test_borrow_link_raises_unfulfillable_work( + self, annotator_fixture: LibraryAnnotatorFixture + ): + edition, pool = annotator_fixture.db.edition(with_license_pool=True) + kindle_mechanism = pool.set_delivery_mechanism( + DeliveryMechanism.KINDLE_CONTENT_TYPE, + DeliveryMechanism.KINDLE_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + epub_mechanism = pool.set_delivery_mechanism( + Representation.EPUB_MEDIA_TYPE, + DeliveryMechanism.ADOBE_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + data_source_name = pool.data_source.name + identifier = pool.identifier + + annotator = LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library() + ) + + # If there's no way to fulfill the book, borrow_link raises + # UnfulfillableWork. + pytest.raises(UnfulfillableWork, annotator.borrow_link, pool, None, []) + + pytest.raises( + UnfulfillableWork, annotator.borrow_link, pool, None, [kindle_mechanism] + ) + + # If there's a fulfillable mechanism, everything's fine. + link = annotator.borrow_link(pool, None, [epub_mechanism]) + assert link != None + + link = annotator.borrow_link(pool, None, [epub_mechanism, kindle_mechanism]) + assert link != None + + def test_feed_includes_lane_links(self, annotator_fixture: LibraryAnnotatorFixture): + def annotated_links(lane, annotator): + # Create an AcquisitionFeed is using the given Annotator. + # extract its links and return a dictionary that maps link + # relations to URLs. + feed = OPDSAcquisitionFeed("test", "url", [], annotator) + annotator.annotate_feed(feed._feed) + links = feed._feed.links + + d = defaultdict(list) + for link in links: + d[link.rel.lower()].append(link.href) + return d + + # When an EntryPoint is explicitly selected, it shows up in the + # link to the search controller. + facets = FacetsWithEntryPoint(entrypoint=AudiobooksEntryPoint) + lane = annotator_fixture.db.lane() + annotator = LibraryAnnotator( + None, + lane, + annotator_fixture.db.default_library(), + facets=facets, + ) + [url] = annotated_links(lane, annotator)["search"] + assert "/lane_search" in url + assert "entrypoint=%s" % AudiobooksEntryPoint.INTERNAL_NAME in url + assert str(lane.id) in url + + # When the selected EntryPoint is a default, it's not used -- + # instead, we search everything. + assert annotator.facets is not None + annotator.facets.entrypoint_is_default = True + links = annotated_links(lane, annotator) + [url] = links["search"] + assert "entrypoint=%s" % EverythingEntryPoint.INTERNAL_NAME in url + + # This lane isn't based on a custom list, so there's no crawlable link. + assert [] == links["http://opds-spec.org/crawlable"] + + # It's also not crawlable if it's based on multiple lists. + list1, ignore = annotator_fixture.db.customlist() + list2, ignore = annotator_fixture.db.customlist() + lane.customlists = [list1, list2] + links = annotated_links(lane, annotator) + assert [] == links["http://opds-spec.org/crawlable"] + + # A lane based on a single list gets a crawlable link. + lane.customlists = [list1] + links = annotated_links(lane, annotator) + [crawlable] = links["http://opds-spec.org/crawlable"] + assert "/crawlable_list_feed" in crawlable + assert str(list1.name) in crawlable + + def test_acquisition_links( + self, + annotator_fixture: LibraryAnnotatorFixture, + library_fixture: LibraryFixture, + ): + annotator = LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library() + ) + + patron = annotator_fixture.db.patron() + + now = utc_now() + tomorrow = now + datetime.timedelta(days=1) + + # Loan of an open-access book. + work1 = annotator_fixture.db.work(with_open_access_download=True) + loan1, ignore = work1.license_pools[0].loan_to(patron, start=now) + + # Loan of a licensed book. + work2 = annotator_fixture.db.work(with_license_pool=True) + loan2, ignore = work2.license_pools[0].loan_to(patron, start=now, end=tomorrow) + + # Hold on a licensed book. + work3 = annotator_fixture.db.work(with_license_pool=True) + hold, ignore = work3.license_pools[0].on_hold_to( + patron, start=now, end=tomorrow + ) + + # Book with no loans or holds yet. + work4 = annotator_fixture.db.work(with_license_pool=True) + + # Loan of a licensed book without a loan end. + work5 = annotator_fixture.db.work(with_license_pool=True) + loan5, ignore = work5.license_pools[0].loan_to(patron, start=now) + + # Ensure the state variable + assert annotator.identifies_patrons == True + + loan1_links = annotator.acquisition_links( + loan1.license_pool, + loan1, + None, + None, + loan1.license_pool.identifier, + ) + # Fulfill, and revoke. + [revoke, fulfill] = sorted(loan1_links, key=lambda x: x.rel or "") + assert revoke.href and "revoke_loan_or_hold" in revoke.href + assert ( + revoke.rel and "http://librarysimplified.org/terms/rel/revoke" == revoke.rel + ) + assert fulfill.href and "fulfill" in fulfill.href + assert fulfill.rel and "http://opds-spec.org/acquisition" == fulfill.rel + + # Allow direct open-access downloads + # This will also filter out loan revoke links + annotator.identifies_patrons = False + loan1_links = annotator.acquisition_links( + loan1.license_pool, loan1, None, None, loan1.license_pool.identifier + ) + assert len(loan1_links) == 1 + assert {"http://opds-spec.org/acquisition/open-access"} == { + link.rel for link in loan1_links + } + + # Work 2 has no open access links + loan2_links = annotator.acquisition_links( + loan2.license_pool, loan2, None, None, loan2.license_pool.identifier + ) + assert len(loan2_links) == 0 + + # Revert the annotator state + annotator.identifies_patrons = True + + assert strftime(loan1.start) == fulfill.availability_since + assert loan1.end == fulfill.availability_until == None + + loan2_links = annotator.acquisition_links( + loan2.license_pool, loan2, None, None, loan2.license_pool.identifier + ) + # Fulfill and revoke. + [revoke, fulfill] = sorted(loan2_links, key=lambda x: x.rel or "") + assert revoke.href and "revoke_loan_or_hold" in revoke.href + assert "http://librarysimplified.org/terms/rel/revoke" == revoke.rel + assert fulfill.href and "fulfill" in fulfill.href + assert "http://opds-spec.org/acquisition" == fulfill.rel + + assert strftime(loan2.start) == fulfill.availability_since + assert strftime(loan2.end) == fulfill.availability_until + + # If a book is ready to be fulfilled, but the library has + # hidden all of its available content types, the fulfill link does + # not show up -- only the revoke link. + library = annotator_fixture.db.default_library() + settings = library_fixture.settings(library) + available_types = [ + lpdm.delivery_mechanism.content_type + for lpdm in loan2.license_pool.delivery_mechanisms + ] + settings.hidden_content_types = available_types + + # The list of hidden content types is stored in the Annotator + # constructor, so this particular test needs a fresh Annotator. + annotator_with_hidden_types = LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library() + ) + loan2_links = annotator_with_hidden_types.acquisition_links( + loan2.license_pool, loan2, None, None, loan2.license_pool.identifier + ) + [revoke] = loan2_links + assert "http://librarysimplified.org/terms/rel/revoke" == revoke.rel + # Un-hide the content types so the test can continue. + settings.hidden_content_types = [] + + hold_links = annotator.acquisition_links( + hold.license_pool, None, hold, None, hold.license_pool.identifier + ) + # Borrow and revoke. + [revoke, borrow] = sorted(hold_links, key=lambda x: x.rel or "") + assert revoke.href and "revoke_loan_or_hold" in revoke.href + assert "http://librarysimplified.org/terms/rel/revoke" == revoke.rel + assert borrow.href and "borrow" in borrow.href + assert "http://opds-spec.org/acquisition/borrow" == borrow.rel + + work4_links = annotator.acquisition_links( + work4.license_pools[0], + None, + None, + None, + work4.license_pools[0].identifier, + ) + # Borrow only. + [borrow] = work4_links + assert borrow.href and "borrow" in borrow.href + assert "http://opds-spec.org/acquisition/borrow" == borrow.rel + + loan5_links = annotator.acquisition_links( + loan5.license_pool, loan5, None, None, loan5.license_pool.identifier + ) + # Fulfill and revoke. + [revoke, fulfill] = sorted(loan5_links, key=lambda x: x.rel or "") + assert revoke.href and "revoke_loan_or_hold" in revoke.href + assert "http://librarysimplified.org/terms/rel/revoke" == revoke.rel + assert fulfill.href and "fulfill" in fulfill.href + assert "http://opds-spec.org/acquisition" == fulfill.rel + + assert strftime(loan5.start) == fulfill.availability_since + # TODO: This currently fails, it should be uncommented when the CM 21 day loan bug is fixed + # assert loan5.end == availability.until + assert None == loan5.end + + # If patron authentication is turned off for the library, then + # only open-access links are displayed. + annotator.identifies_patrons = False + + [open_access] = annotator.acquisition_links( + loan1.license_pool, loan1, None, None, loan1.license_pool.identifier + ) + assert "http://opds-spec.org/acquisition/open-access" == open_access.rel + + # This may include links with the open-access relation for + # non-open-access works that are available without + # authentication. To get such link, you pass in a list of + # LicensePoolDeliveryMechanisms as + # `direct_fufillment_delivery_mechanisms`. + [lp4] = work4.license_pools + [lpdm4] = lp4.delivery_mechanisms + lpdm4.set_rights_status(RightsStatus.IN_COPYRIGHT) + [not_open_access] = annotator.acquisition_links( + lp4, + None, + None, + None, + lp4.identifier, + direct_fulfillment_delivery_mechanisms=[lpdm4], + ) + + # The link relation is OPDS 'open-access', which just means the + # book can be downloaded with no hassle. + assert "http://opds-spec.org/acquisition/open-access" == not_open_access.rel + + # The dcterms:rights attribute provides a more detailed + # explanation of the book's copyright status -- note that it's + # not "open access" in the typical sense. + rights = not_open_access.rights + assert RightsStatus.IN_COPYRIGHT == rights + + # Hold links are absent even when there are active holds in the + # database -- there is no way to distinguish one patron from + # another so the concept of a 'hold' is meaningless. + hold_links = annotator.acquisition_links( + hold.license_pool, None, hold, None, hold.license_pool.identifier + ) + assert [] == hold_links + + def test_acquisition_links_multiple_links( + self, + annotator_fixture: LibraryAnnotatorFixture, + library_fixture: LibraryFixture, + ): + annotator = LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library() + ) + + # This book has two delivery mechanisms + work = annotator_fixture.db.work(with_license_pool=True) + [pool] = work.license_pools + [mech1] = pool.delivery_mechanisms + mech2 = pool.set_delivery_mechanism( + Representation.PDF_MEDIA_TYPE, + DeliveryMechanism.NO_DRM, + RightsStatus.IN_COPYRIGHT, + None, + ) + + # The vendor API for LicensePools of this type requires that a + # delivery mechanism be chosen at the point of borrowing. + class MockAPI: + SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP + + # This means that two different acquisition links will be + # generated -- one for each delivery mechanism. + links = annotator.acquisition_links( + pool, None, None, None, pool.identifier, mock_api=MockAPI() + ) + assert 2 == len(links) + + mech1_param = "mechanism_id=%s" % mech1.delivery_mechanism.id + mech2_param = "mechanism_id=%s" % mech2.delivery_mechanism.id + + # Instead of sorting, which may be wrong if the id is greater than 10 + # due to how double digits are sorted, extract the links associated + # with the expected delivery mechanism. + if links[0].href and mech1_param in links[0].href: + [mech1_link, mech2_link] = links + else: + [mech2_link, mech1_link] = links + + indirects = [] + for link in [mech1_link, mech2_link]: + # Both links should have the same subtags. + assert link.availability_status is not None + assert link.copies_total is not None + assert link.holds_total is not None + assert len(link.indirect_acquisitions) > 0 + indirects.append(link.indirect_acquisitions[0]) + + # The target of the top-level link is different. + assert mech1_link.href and mech1_param in mech1_link.href + assert mech2_link.href and mech2_param in mech2_link.href + + # So is the media type seen in the indirectAcquisition subtag. + [mech1_indirect, mech2_indirect] = indirects + + # The first delivery mechanism (created when the Work was created) + # uses Adobe DRM, so that shows up as the first indirect acquisition + # type. + assert mech1.delivery_mechanism.drm_scheme == mech1_indirect.type + + # The second delivery mechanism doesn't use DRM, so the content + # type shows up as the first (and only) indirect acquisition type. + assert mech2.delivery_mechanism.content_type == mech2_indirect.type + + # If we configure the library to hide one of the content types, + # we end up with only one link -- the one for the delivery + # mechanism that's not hidden. + library = annotator_fixture.db.default_library() + settings = library_fixture.settings(library) + settings.hidden_content_types = [mech1.delivery_mechanism.content_type] + annotator = LibraryLoanAndHoldAnnotator( + None, None, annotator_fixture.db.default_library() + ) + [link] = annotator.acquisition_links( + pool, None, None, None, pool.identifier, mock_api=MockAPI() + ) + assert ( + mech2.delivery_mechanism.content_type == link.indirect_acquisitions[0].type + ) diff --git a/tests/api/feed/test_loan_and_hold_annotator.py b/tests/api/feed/test_loan_and_hold_annotator.py new file mode 100644 index 0000000000..79df7ed502 --- /dev/null +++ b/tests/api/feed/test_loan_and_hold_annotator.py @@ -0,0 +1,287 @@ +from unittest.mock import MagicMock, patch + +from api.app import app +from api.problem_details import NOT_FOUND_ON_REMOTE +from core.classifier import ( # type: ignore[attr-defined] + Classifier, + Fantasy, + Urban_Fantasy, +) +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator +from core.feed.types import WorkEntry, WorkEntryData +from core.lane import WorkList +from core.model import ExternalIntegration, get_one +from core.model.constants import EditionConstants, LinkRelations +from core.model.licensing import LicensePool +from core.model.patron import Loan +from tests.fixtures.database import DatabaseTransactionFixture + + +class TestLibraryLoanAndHoldAnnotator: + def test_single_item_feed(self, db: DatabaseTransactionFixture): + # Test the generation of single-item OPDS feeds for loans (with and + # without fulfillment) and holds. + class MockAnnotator(LibraryLoanAndHoldAnnotator): + def url_for(self, controller, **kwargs): + self.url_for_called_with = (controller, kwargs) + return "a URL" + + def mock_single_entry(work, annotator, *args, **kwargs): + annotator._single_entry_response_called_with = ( + (work, annotator) + args, + kwargs, + ) + w = WorkEntry( + work=work, + license_pool=work.active_license_pool(), + edition=work.presentation_edition, + identifier=work.presentation_edition.primary_identifier, + ) + w.computed = WorkEntryData() + return w + + def test_annotator(item, fulfillment=None): + # Call MockAnnotator.single_item_feed with certain arguments + # and make some general assertions about the return value. + circulation = object() + test_mode = object() + feed_class = object() + annotator = MockAnnotator(MagicMock(), None, db.default_library()) + + with patch.object( + OPDSAcquisitionFeed, "single_entry", new=mock_single_entry + ): + result = OPDSAcquisitionFeed.single_entry_loans_feed( + MagicMock(), + item, + annotator, + fulfillment=fulfillment, + ) + + assert db.default_library() == annotator.library + + # Now let's see what we did with it after calling its + # constructor. + + # The return value of that was the string "a URL". We then + # passed that into _single_entry_response, along with + # `item` and a number of arguments that we made up. + response_call = annotator._single_entry_response_called_with + (_work, _annotator), kwargs = response_call + assert work == _work + assert annotator == _annotator + + # Return the MockAnnotator for further examination. + return annotator + + # Now we're going to call test_annotator a couple times in + # different situations. + work = db.work(with_license_pool=True) + [pool] = work.license_pools + patron = db.patron() + loan, ignore = pool.loan_to(patron) + + # First, let's ask for a single-item feed for a loan. + annotator = test_annotator(loan) + + # Everything tested by test_annotator happened, but _also_, + # when the annotator was created, the Loan was stored in + # active_loans_by_work. + assert {work: loan} == annotator.active_loans_by_work + + # Since we passed in a loan rather than a hold, + # active_holds_by_work is empty. + assert {} == annotator.active_holds_by_work + + # Since we didn't pass in a fulfillment for the loan, + # active_fulfillments_by_work is empty. + assert {} == annotator.active_fulfillments_by_work + + # Now try it again, but give the loan a fulfillment. + fulfillment = object() + annotator = test_annotator(loan, fulfillment) + assert {work: loan} == annotator.active_loans_by_work + assert {work: fulfillment} == annotator.active_fulfillments_by_work + + # Finally, try it with a hold. + hold, ignore = pool.on_hold_to(patron) + annotator = test_annotator(hold) + assert {work: hold} == annotator.active_holds_by_work + assert {} == annotator.active_loans_by_work + assert {} == annotator.active_fulfillments_by_work + + def test_single_item_feed_without_work(self, db: DatabaseTransactionFixture): + """If a licensepool has no work or edition the single_item_feed mustn't raise an exception""" + mock = MagicMock() + # A loan without a pool + annotator = LibraryLoanAndHoldAnnotator(mock, None, db.default_library()) + loan = Loan() + loan.patron = db.patron() + not_found_result = OPDSAcquisitionFeed.single_entry_loans_feed( + mock, + loan, + annotator, + ) + assert not_found_result == NOT_FOUND_ON_REMOTE + + work = db.work(with_license_pool=True) + pool = get_one(db.session, LicensePool, work_id=work.id) + assert isinstance(pool, LicensePool) + # Pool with no work, and the presentation edition has no work either + pool.work_id = None + work.presentation_edition_id = None + db.session.commit() + assert ( + OPDSAcquisitionFeed.single_entry_loans_feed( + mock, + pool, + annotator, + ) + == NOT_FOUND_ON_REMOTE + ) + + # pool with no work and no presentation edition + pool.presentation_edition_id = None + db.session.commit() + assert ( + OPDSAcquisitionFeed.single_entry_loans_feed( + mock, + pool, + annotator, + ) + == NOT_FOUND_ON_REMOTE + ) + + def test_choose_best_hold_for_work(self, db: DatabaseTransactionFixture): + # First create two license pools for the same work so we could create two holds for the same work. + patron = db.patron() + + coll_1 = db.collection(name="Collection 1") + coll_2 = db.collection(name="Collection 2") + + work = db.work() + + pool_1 = db.licensepool( + edition=work.presentation_edition, open_access=False, collection=coll_1 + ) + pool_2 = db.licensepool( + edition=work.presentation_edition, open_access=False, collection=coll_2 + ) + + hold_1, _ = pool_1.on_hold_to(patron) + hold_2, _ = pool_2.on_hold_to(patron) + + # When there is no licenses_owned/available on one license pool the LibraryLoanAndHoldAnnotator should choose + # hold associated with the other license pool. + pool_1.licenses_owned = 0 + pool_1.licenses_available = 0 + + assert hold_2 == LibraryLoanAndHoldAnnotator.choose_best_hold_for_work( + [hold_1, hold_2] + ) + + # Now we have different number of licenses owned across two LPs and the same hold position. + # Hold associated with LP with more owned licenses will be chosen as best. + pool_1.licenses_owned = 2 + + pool_2.licenses_owned = 3 + pool_2.licenses_available = 0 + + hold_1.position = 7 + hold_2.position = 7 + + assert hold_2 == LibraryLoanAndHoldAnnotator.choose_best_hold_for_work( + [hold_1, hold_2] + ) + + def test_annotate_work_entry(self, db: DatabaseTransactionFixture): + library = db.default_library() + patron = db.patron() + identifier = db.identifier() + lane = WorkList() + lane.initialize( + library, + ) + annotator = LibraryLoanAndHoldAnnotator(None, lane, library, patron) + feed = OPDSAcquisitionFeed("title", "url", [], annotator) + + # Annotate time tracking + opds_for_distributors = db.collection( + protocol=ExternalIntegration.OPDS_FOR_DISTRIBUTORS + ) + work = db.work(with_license_pool=True, collection=opds_for_distributors) + edition = work.presentation_edition + edition.medium = EditionConstants.AUDIO_MEDIUM + edition.primary_identifier = identifier + loan, _ = work.active_license_pool().loan_to(patron) + annotator.active_loans_by_work = {work: loan} + + with app.test_request_context("/") as request: + request.library = library # type: ignore [attr-defined] + entry = feed.single_entry(work, annotator) + assert isinstance(entry, WorkEntry) + assert entry and entry.computed is not None + time_tracking_links = list( + filter( + lambda l: l.rel == LinkRelations.TIME_TRACKING, + entry.computed.other_links, + ) + ) + assert len(time_tracking_links) == 1 + assert time_tracking_links[0].href == annotator.url_for( + "track_playtime_events", + identifier_type=identifier.type, + identifier=identifier.identifier, + library_short_name=annotator.library.short_name, + collection_id=opds_for_distributors.id, + _external=True, + ) + + # No active loan means no tracking link + annotator.active_loans_by_work = {} + entry = feed.single_entry(work, annotator) + assert isinstance(entry, WorkEntry) + assert entry and entry.computed is not None + + time_tracking_links = list( + filter( + lambda l: l.rel == LinkRelations.TIME_TRACKING, + entry.computed.other_links, + ) + ) + assert len(time_tracking_links) == 0 + + # Add the loan back in + annotator.active_loans_by_work = {work: loan} + + # Book mediums don't get time tracking + edition.medium = EditionConstants.BOOK_MEDIUM + entry = feed.single_entry(work, annotator) + assert isinstance(entry, WorkEntry) + assert entry and entry.computed is not None + + time_tracking_links = list( + filter( + lambda l: l.rel == LinkRelations.TIME_TRACKING, + entry.computed.other_links, + ) + ) + assert len(time_tracking_links) == 0 + + # Non OPDS for distributor works do not get links either + work = db.work(with_license_pool=True) + edition = work.presentation_edition + edition.medium = EditionConstants.AUDIO_MEDIUM + + entry = feed.single_entry(work, annotator) + assert isinstance(entry, WorkEntry) + assert entry and entry.computed is not None + + time_tracking_links = list( + filter( + lambda l: l.rel == LinkRelations.TIME_TRACKING, + entry.computed.other_links, + ) + ) + assert len(time_tracking_links) == 0 diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py new file mode 100644 index 0000000000..2b2bfcdf68 --- /dev/null +++ b/tests/api/feed/test_opds2_serializer.py @@ -0,0 +1,215 @@ +import json + +from core.feed.serializer.opds2 import OPDS2Serializer +from core.feed.types import ( + Acquisition, + Author, + FeedData, + FeedEntryType, + IndirectAcquisition, + Link, + WorkEntry, + WorkEntryData, +) +from core.model.edition import Edition +from core.model.identifier import Identifier +from core.model.work import Work +from core.util.opds_writer import OPDSMessage + + +class TestOPDS2Serializer: + def test_serialize_feed(self): + feed = FeedData( + metadata=dict( + items_per_page=FeedEntryType(text="20"), + title=FeedEntryType(text="Title"), + ) + ) + w = WorkEntry( + work=Work(), + edition=Edition(), + identifier=Identifier(), + ) + w.computed = WorkEntryData(identifier="identifier", pwid="permanent-id") + feed.entries = [w] + feed.links = [Link(href="http://link", rel="link-rel")] + feed.facet_links = [ + Link.create( + href="http://facet-link", rel="facet-rel", facetGroup="FacetGroup" + ) + ] + + serialized = OPDS2Serializer().serialize_feed(feed) + result = json.loads(serialized) + + assert result["metadata"]["title"] == "Title" + assert result["metadata"]["itemsPerPage"] == 20 + + assert len(result["publications"]) == 1 + assert result["publications"][0] == dict( + metadata={"identifier": "identifier"}, images=[], links=[] + ) + + assert len(result["links"]) == 1 + assert result["links"][0] == dict(href="http://link", rel="link-rel") + + assert len(result["facets"]) == 1 + assert result["facets"][0] == dict( + metadata={"title": "FacetGroup"}, + links=[{"href": "http://facet-link", "rel": "facet-rel"}], + ) + + def test_serialize_work_entry(self): + data = WorkEntryData( + additionalType="type", + title=FeedEntryType(text="The Title"), + sort_title=FeedEntryType(text="Title, The"), + subtitle=FeedEntryType(text="Sub Title"), + identifier="urn:id", + language=FeedEntryType(text="de"), + updated=FeedEntryType(text="2022-02-02"), + published=FeedEntryType(text="2020-02-02"), + summary=FeedEntryType(text="Summary"), + publisher=FeedEntryType(text="Publisher"), + imprint=FeedEntryType(text="Imprint"), + categories=[ + FeedEntryType.create(scheme="scheme", label="label"), + ], + series=FeedEntryType.create(name="Series", position="3"), + image_links=[Link(href="http://image", rel="image-rel")], + acquisition_links=[ + Acquisition(href="http://acquisition", rel="acquisition-rel") + ], + other_links=[Link(href="http://link", rel="rel")], + ) + + serializer = OPDS2Serializer() + + entry = serializer.serialize_work_entry(data) + metadata = entry["metadata"] + + assert metadata["@type"] == data.additionalType + assert metadata["title"] == data.title.text + assert metadata["sortAs"] == data.sort_title.text + assert metadata["subtitle"] == data.subtitle.text + assert metadata["identifier"] == data.identifier + assert metadata["language"] == data.language.text + assert metadata["modified"] == data.updated.text + assert metadata["published"] == data.published.text + assert metadata["description"] == data.summary.text + assert metadata["publisher"] == dict(name=data.publisher.text) + assert metadata["imprint"] == dict(name=data.imprint.text) + assert metadata["subject"] == [ + dict(scheme="scheme", name="label", sortAs="label") + ] + assert metadata["belongsTo"] == dict(name="Series", position=3) + + assert entry["links"] == [ + dict(href="http://link", rel="rel"), + dict(href="http://acquisition", rel="acquisition-rel"), + ] + assert entry["images"] == [dict(href="http://image", rel="image-rel")] + + # Test the different author types + data = WorkEntryData( + authors=[Author(name="author1"), Author(name="author2")], + contributors=[ + Author(name="translator", role="trl"), + Author(name="editor", role="edt"), + Author(name="artist", role="art"), + Author(name="illustrator", role="ill"), + Author(name="letterer", role="ctb"), + Author(name="penciller", role="ctb"), + Author(name="colorist", role="clr"), + Author(name="inker", role="ctb"), + Author(name="narrator", role="nrt"), + Author(name="narrator2", role="nrt"), + ], + ) + + entry = serializer.serialize_work_entry(data) + metadata = entry["metadata"] + # Only the first author is considered + assert metadata["author"] == dict(name="author1") + # Of the allowed roles + assert metadata["translator"] == dict(name="translator") + assert metadata["editor"] == dict(name="editor") + assert metadata["artist"] == dict(name="artist") + assert metadata["illustrator"] == dict(name="illustrator") + assert metadata["colorist"] == dict(name="colorist") + # Of letterer, penciller, and inker, only inker is used, since the marc roles overlap + assert metadata["inker"] == dict(name="inker") + # Of repeated roles, only the last entry is picked + assert metadata["narrator"] == dict(name="narrator2") + + def test__serialize_acquisition_link(self): + serializer = OPDS2Serializer() + acquisition = Acquisition( + href="http://acquisition", + rel="acquisition", + availability_status="available", + availability_since="2022-02-02", + availability_until="2222-02-02", + indirect_acquisitions=[ + IndirectAcquisition( + type="indirect1", + children=[ + IndirectAcquisition(type="indirect1-1"), + IndirectAcquisition(type="indirect1-2"), + ], + ), + ], + ) + + result = serializer._serialize_acquisition_link(acquisition) + + assert result["href"] == acquisition.href + assert result["rel"] == acquisition.rel + assert result["properties"] == dict( + availability={ + "since": "2022-02-02", + "until": "2222-02-02", + "state": "available", + }, + indirectAcquisition=[ + { + "type": "indirect1", + "child": [{"type": "indirect1-1"}, {"type": "indirect1-2"}], + } + ], + ) + + # Test availability states + acquisition = Acquisition( + href="http://hold", + rel="hold", + is_hold=True, + availability_status="available", + ) + result = serializer._serialize_acquisition_link(acquisition) + assert result["properties"]["availability"]["state"] == "reserved" + + acquisition = Acquisition( + href="http://loan", + rel="loan", + is_loan=True, + availability_status="available", + ) + result = serializer._serialize_acquisition_link(acquisition) + assert result["properties"]["availability"]["state"] == "ready" + + def test__serialize_contributor(self): + author = Author( + name="Author", + sort_name="Author,", + link=Link(href="http://author", rel="contributor", title="Delete me!"), + ) + result = OPDS2Serializer()._serialize_contributor(author) + assert result["name"] == "Author" + assert result["sortAs"] == "Author," + assert result["links"] == [{"href": "http://author", "rel": "contributor"}] + + def test_serialize_opds_message(self): + assert OPDS2Serializer().serialize_opds_message( + OPDSMessage("URN", 200, "Description") + ) == dict(urn="URN", description="Description") diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py new file mode 100644 index 0000000000..3f6a098713 --- /dev/null +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -0,0 +1,1454 @@ +import datetime +import logging +from collections import defaultdict +from typing import Any, Callable, Generator, List, Type +from unittest.mock import MagicMock, patch + +import pytest +from sqlalchemy.orm import Session +from werkzeug.datastructures import MIMEAccept + +from core.entrypoint import ( + AudiobooksEntryPoint, + EbooksEntryPoint, + EntryPoint, + EverythingEntryPoint, + MediumEntryPoint, +) +from core.external_search import MockExternalSearchIndex +from core.facets import FacetConstants +from core.feed.acquisition import LookupAcquisitionFeed, OPDSAcquisitionFeed +from core.feed.annotator.base import Annotator +from core.feed.annotator.circulation import ( + AcquisitionHelper, + CirculationManagerAnnotator, + LibraryAnnotator, +) +from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator +from core.feed.annotator.verbose import VerboseAnnotator +from core.feed.navigation import NavigationFeed +from core.feed.opds import BaseOPDSFeed +from core.feed.types import FeedData, Link, WorkEntry, WorkEntryData +from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList +from core.model import DeliveryMechanism, Representation +from core.model.constants import LinkRelations +from core.opds import MockUnfulfillableAnnotator +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse +from core.util.opds_writer import OPDSFeed, OPDSMessage +from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa +from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.search import ExternalSearchPatchFixture + + +class TestOPDSFeedProtocol: + def test_entry_as_response(self, db: DatabaseTransactionFixture): + work = db.work() + entry = WorkEntry( + work=work, + edition=work.presentation_edition, + identifier=work.presentation_edition.primary_identifier, + ) + + with pytest.raises(ValueError) as raised: + BaseOPDSFeed.entry_as_response(entry) + assert str(raised.value) == "Entry data has not been generated" + + entry.computed = WorkEntryData() + + response = BaseOPDSFeed.entry_as_response(entry) + assert isinstance(response, OPDSEntryResponse) + # default content type is XML + assert response.content_type == OPDSEntryResponse().content_type + + # Specifically asking for a json type + response = BaseOPDSFeed.entry_as_response( + entry, mime_types=MIMEAccept([("application/opds+json", 0.9)]) + ) + assert isinstance(response, OPDSEntryResponse) + assert response.content_type == "application/opds+json" + + response = BaseOPDSFeed.entry_as_response( + OPDSMessage("URN", 204, "Test OPDS Message") + ) + assert isinstance(response, OPDSEntryResponse) + assert response.status_code == 204 + assert ( + b"Test OPDS Message" + in response.data + ) + + response = BaseOPDSFeed.entry_as_response( + OPDSMessage("URN", 204, "Test OPDS Message"), + mime_types=MIMEAccept([("application/opds+json", 1)]), + ) + assert isinstance(response, OPDSEntryResponse) + assert response.status_code == 204 + assert dict(description="Test OPDS Message", urn="URN") == response.json + + +class MockAnnotator(CirculationManagerAnnotator): + def __init__(self): + self.lanes_by_work = defaultdict(list) + + @classmethod + def lane_url(cls, lane): + if lane and lane.has_visible_children: + return cls.groups_url(lane) + elif lane: + return cls.feed_url(lane) + else: + return "" + + @classmethod + def feed_url(cls, lane, facets=None, pagination=None): + if isinstance(lane, Lane): + base = "http://%s/" % lane.url_name + else: + base = "http://%s/" % lane.display_name + sep = "?" + if facets: + base += sep + facets.query_string + sep = "&" + if pagination: + base += sep + pagination.query_string + return base + + @classmethod + def search_url(cls, lane, query, pagination, facets=None): + if isinstance(lane, Lane): + base = "http://%s/" % lane.url_name + else: + base = "http://%s/" % lane.display_name + sep = "?" + if pagination: + base += sep + pagination.query_string + sep = "&" + if facets: + facet_query_string = facets.query_string + if facet_query_string: + base += sep + facet_query_string + return base + + @classmethod + def groups_url(cls, lane, facets=None): + if lane and isinstance(lane, Lane): + identifier = lane.id + else: + identifier = "" + if facets: + facet_string = "?" + facets.query_string + else: + facet_string = "" + + return f"http://groups/{identifier}{facet_string}" + + @classmethod + def default_lane_url(cls): + return cls.groups_url(None) + + @classmethod + def facet_url(cls, facets): + return "http://facet/" + "&".join( + [f"{k}={v}" for k, v in sorted(facets.items())] + ) + + @classmethod + def navigation_url(cls, lane): + if lane and isinstance(lane, Lane): + identifier = lane.id + else: + identifier = "" + return "http://navigation/%s" % identifier + + @classmethod + def top_level_title(cls): + return "Test Top Level Title" + + +class TestOPDSAcquisitionFeed: + def test_page( + self, + db, + external_search_patch_fixture: ExternalSearchPatchFixture, + ): + session = db.session + + # Verify that AcquisitionFeed.page() returns an appropriate OPDSFeedResponse + + wl = WorkList() + wl.initialize(db.default_library()) + private = object() + response = OPDSAcquisitionFeed.page( + session, + "feed title", + "url", + wl, + CirculationManagerAnnotator(None), + None, + None, + None, + ).as_response(max_age=10, private=private) + + # The result is an OPDSFeedResponse. The 'private' argument, + # unused by page(), was passed along into the constructor. + assert isinstance(response, OPDSFeedResponse) + assert 10 == response.max_age + assert private == response.private + + assert "feed title" in str(response) + + def test_as_response(self, db: DatabaseTransactionFixture): + session = db.session + + # Verify the ability to convert an AcquisitionFeed object to an + # OPDSFeedResponse containing the feed. + feed = OPDSAcquisitionFeed( + "feed title", + "http://url/", + [], + CirculationManagerAnnotator(None), + ) + feed.generate_feed() + + # Some other piece of code set expectations for how this feed should + # be cached. + response = feed.as_response(max_age=101, private=False) + assert 200 == response.status_code + + # We get an OPDSFeedResponse containing the feed in its + # entity-body. + assert isinstance(response, OPDSFeedResponse) + assert "feed title" in str(response) + + # The caching expectations are respected. + assert 101 == response.max_age + assert False == response.private + + def test_as_error_response(self, db: DatabaseTransactionFixture): + session = db.session + + # Verify the ability to convert an AcquisitionFeed object to an + # OPDSFeedResponse that is to be treated as an error message. + feed = OPDSAcquisitionFeed( + "feed title", + "http://url/", + [], + CirculationManagerAnnotator(None), + ) + feed.generate_feed() + + # Some other piece of code set expectations for how this feed should + # be cached. + kwargs = dict(max_age=101, private=False) + + # But we know that something has gone wrong and the feed is + # being served as an error message. + response = feed.as_error_response(**kwargs) + assert isinstance(response, OPDSFeedResponse) + + # The content of the feed is unchanged. + assert 200 == response.status_code + assert "feed title" in str(response) + + # But the max_age and private settings have been overridden. + assert 0 == response.max_age + assert True == response.private + + def test_add_entrypoint_links(self): + """Verify that add_entrypoint_links calls _entrypoint_link + on every EntryPoint passed in. + """ + + class Mock: + attrs = dict(href="the response") + + def __init__(self): + self.calls = [] + + def __call__(self, *args): + self.calls.append(args) + return Link(**self.attrs) + + mock = Mock() + old_entrypoint_link = OPDSAcquisitionFeed._entrypoint_link + OPDSAcquisitionFeed._entrypoint_link = mock + + feed = FeedData() + entrypoints = [AudiobooksEntryPoint, EbooksEntryPoint] + url_generator = object() + OPDSAcquisitionFeed.add_entrypoint_links( + feed, url_generator, entrypoints, EbooksEntryPoint, "Some entry points" + ) + + # Two different calls were made to the mock method. + c1, c2 = mock.calls + + # The first entry point is not selected. + assert c1 == ( + url_generator, + AudiobooksEntryPoint, + EbooksEntryPoint, + True, + "Some entry points", + ) + # The second one is selected. + assert c2 == ( + url_generator, + EbooksEntryPoint, + EbooksEntryPoint, + False, + "Some entry points", + ) + + # Two identical tags were added to the tag, one + # for each call to the mock method. + l1, l2 = feed.links + for l in l1, l2: + assert mock.attrs == l.link_attribs() + OPDSAcquisitionFeed._entrypoint_link = old_entrypoint_link + + # If there is only one facet in the facet group, no links are + # added. + feed = FeedData() + mock.calls = [] + entrypoints = [EbooksEntryPoint] + OPDSAcquisitionFeed.add_entrypoint_links( + feed, url_generator, entrypoints, EbooksEntryPoint, "Some entry points" + ) + assert [] == mock.calls + + def test_entrypoint_link(self): + """Test the _entrypoint_link method's ability to create + attributes for tags. + """ + m = OPDSAcquisitionFeed._entrypoint_link + + def g(entrypoint): + """A mock URL generator.""" + return "%s" % (entrypoint.INTERNAL_NAME) + + # If the entry point is not registered, None is returned. + assert None == m(g, object(), object(), True, "group") + + # Now make a real set of link attributes. + l = m(g, AudiobooksEntryPoint, AudiobooksEntryPoint, False, "Grupe") + + # The link is identified as belonging to an entry point-type + # facet group. + assert l.rel == LinkRelations.FACET_REL + assert getattr(l, "facetGroupType") == FacetConstants.ENTRY_POINT_REL + assert "Grupe" == getattr(l, "facetGroup") + + # This facet is the active one in the group. + assert "true" == getattr(l, "activeFacet") + + # The URL generator was invoked to create the href. + assert l.href == g(AudiobooksEntryPoint) + + # The facet title identifies it as a way to look at audiobooks. + assert EntryPoint.DISPLAY_TITLES[AudiobooksEntryPoint] == l.title + + # Now try some variants. + + # Here, the entry point is the default one. + l = m(g, AudiobooksEntryPoint, AudiobooksEntryPoint, True, "Grupe") + + # This may affect the URL generated for the facet link. + assert l.href == g(AudiobooksEntryPoint) + + # Here, the entry point for which we're generating the link is + # not the selected one -- EbooksEntryPoint is. + l = m(g, AudiobooksEntryPoint, EbooksEntryPoint, True, "Grupe") + + # This means the 'activeFacet' attribute is not present. + assert getattr(l, "activeFacet", None) == None + + def test_license_tags_no_loan_or_hold(self, db: DatabaseTransactionFixture): + edition, pool = db.edition(with_license_pool=True) + tags = AcquisitionHelper.license_tags(pool, None, None) + assert ( + dict( + availability_status="available", + holds_total="0", + copies_total="1", + copies_available="1", + ) + == tags + ) + + def test_license_tags_hold_position(self, db: DatabaseTransactionFixture): + # When a book is placed on hold, it typically takes a while + # for the LicensePool to be updated with the new number of + # holds. This test verifies the normal and exceptional + # behavior used to generate the opds:holds tag in different + # scenarios. + edition, pool = db.edition(with_license_pool=True) + patron = db.patron() + + # If the patron's hold position is less than the total number + # of holds+reserves, that total is used as opds:total. + pool.patrons_in_hold_queue = 3 + hold, is_new = pool.on_hold_to(patron, position=1) + + tags = AcquisitionHelper.license_tags(pool, None, hold) + assert tags is not None + assert "1" == tags["holds_position"] + assert "3" == tags["holds_total"] + + # If the patron's hold position is missing, we assume they + # are last in the list. + hold.position = None + tags = AcquisitionHelper.license_tags(pool, None, hold) + assert tags is not None + assert "3" == tags["holds_position"] + assert "3" == tags["holds_total"] + + # If the patron's current hold position is greater than the + # total recorded number of holds+reserves, their position will + # be used as the value of opds:total. + hold.position = 5 + tags = AcquisitionHelper.license_tags(pool, None, hold) + assert tags is not None + assert "5" == tags["holds_position"] + assert "5" == tags["holds_total"] + + # A patron earlier in the holds queue may see a different + # total number of holds, but that's fine -- it doesn't matter + # very much to that person the precise number of people behind + # them in the queue. + hold.position = 4 + tags = AcquisitionHelper.license_tags(pool, None, hold) + assert tags is not None + assert "4" == tags["holds_position"] + assert "4" == tags["holds_total"] + + # If the patron's hold position is zero (because the book is + # reserved to them), we do not represent them as having a hold + # position (so no opds:position), but they still count towards + # opds:total in the case where the LicensePool's information + # is out of date. + hold.position = 0 + pool.patrons_in_hold_queue = 0 + tags = AcquisitionHelper.license_tags(pool, None, hold) + assert tags is not None + assert "holds_position" not in tags + assert "1" == tags["holds_total"] + + def test_license_tags_show_unlimited_access_books( + self, db: DatabaseTransactionFixture + ): + # Arrange + edition, pool = db.edition(with_license_pool=True) + pool.open_access = False + pool.unlimited_access = True + + # Act + tags = AcquisitionHelper.license_tags(pool, None, None) + + # Assert + assert tags is not None + assert 1 == len(tags.keys()) + assert tags["availability_status"] == "available" + + def test_unlimited_access_pool_loan(self, db: DatabaseTransactionFixture): + patron = db.patron() + work = db.work(unlimited_access=True, with_license_pool=True) + pool = work.active_license_pool() + loan, _ = pool.loan_to(patron) + tags = AcquisitionHelper.license_tags(pool, loan, None) + + assert tags is not None + assert "availability_since" in tags + assert "availability_until" not in tags + + def test_single_entry(self, db: DatabaseTransactionFixture): + session = db.session + + # Here's a Work with two LicensePools. + work = db.work(with_open_access_download=True) + original_pool = work.license_pools[0] + edition, new_pool = db.edition( + with_license_pool=True, with_open_access_download=True + ) + work.license_pools.append(new_pool) + + # The presentation edition of the Work is associated with + # the first LicensePool added to it. + assert work.presentation_edition == original_pool.presentation_edition + + # This is the edition used when we create an tag for + # this Work. + private = object() + entry = OPDSAcquisitionFeed.single_entry( + work, + Annotator(), + ) + assert isinstance(entry, WorkEntry) + assert entry.computed is not None + assert entry.computed.title is not None + + assert new_pool.presentation_edition.title != entry.computed.title.text + assert original_pool.presentation_edition.title == entry.computed.title.text + + # If the edition was issued before 1980, no datetime formatting error + # is raised. + work.simple_opds_entry = work.verbose_opds_entry = None + five_hundred_years = datetime.timedelta(days=(500 * 365)) + work.presentation_edition.issued = utc_now() - five_hundred_years + + entry = OPDSAcquisitionFeed.single_entry(work, Annotator()) + assert isinstance(entry, WorkEntry) + assert entry.computed is not None + assert entry.computed.issued is not None + + assert work.presentation_edition.issued == entry.computed.issued + + def test_error_when_work_has_no_identifier(self, db: DatabaseTransactionFixture): + session = db.session + + # We cannot create an OPDS entry for a Work that cannot be associated + # with an Identifier. + work = db.work(title="Hello, World!", with_license_pool=True) + work.license_pools[0].identifier = None + work.presentation_edition.primary_identifier = None + entry = OPDSAcquisitionFeed.single_entry(work, Annotator()) + assert entry == None + + def test_error_when_work_has_no_licensepool(self, db: DatabaseTransactionFixture): + session = db.session + + work = db.work() + entry = OPDSAcquisitionFeed.single_entry(work, Annotator()) + expect = OPDSAcquisitionFeed.error_message( + work.presentation_edition.primary_identifier, + 403, + "I've heard about this work but have no active licenses for it.", + ) + assert expect == entry + + def test_error_when_work_has_no_presentation_edition( + self, db: DatabaseTransactionFixture + ): + session = db.session + + """We cannot create an OPDS entry (or even an error message) for a + Work that is disconnected from any Identifiers. + """ + work = db.work(title="Hello, World!", with_license_pool=True) + work.license_pools[0].presentation_edition = None + work.presentation_edition = None + entry = OPDSAcquisitionFeed.single_entry(work, Annotator()) + assert None == entry + + def test_exception_during_entry_creation_is_not_reraised( + self, db: DatabaseTransactionFixture + ): + # This feed will raise an exception whenever it's asked + # to create an entry. + class DoomedFeed(OPDSAcquisitionFeed): + @classmethod + def _create_entry(cls, *args, **kwargs): + raise Exception("I'm doomed!") + + work = db.work(with_open_access_download=True) + + # But calling create_entry() doesn't raise an exception, it + # just returns None. + entry = DoomedFeed.single_entry(work, Annotator()) + assert entry == None + + def test_unfilfullable_work(self, db: DatabaseTransactionFixture): + work = db.work(with_open_access_download=True) + [pool] = work.license_pools + response = OPDSAcquisitionFeed.single_entry( + work, + MockUnfulfillableAnnotator(), # type: ignore[arg-type] + ) + assert isinstance(response, OPDSMessage) + expect = OPDSAcquisitionFeed.error_message( + pool.identifier, + 403, + "I know about this work but can offer no way of fulfilling it.", + ) + + assert str(expect) == str(response) + + def test_format_types(self, db: DatabaseTransactionFixture): + session = db.session + + m = AcquisitionHelper.format_types + + epub_no_drm, ignore = DeliveryMechanism.lookup( + session, Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM + ) + assert [Representation.EPUB_MEDIA_TYPE] == m(epub_no_drm) + + epub_adobe_drm, ignore = DeliveryMechanism.lookup( + session, Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM + ) + assert [DeliveryMechanism.ADOBE_DRM, Representation.EPUB_MEDIA_TYPE] == m( + epub_adobe_drm + ) + + overdrive_streaming_text, ignore = DeliveryMechanism.lookup( + session, + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, + DeliveryMechanism.OVERDRIVE_DRM, + ) + assert [ + OPDSFeed.ENTRY_TYPE, + Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE, + ] == m(overdrive_streaming_text) + + audiobook_drm, ignore = DeliveryMechanism.lookup( + session, + Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE, + DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM, + ) + + assert [ + Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE + + DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_PROFILE + ] == m(audiobook_drm) + + # Test a case where there is a DRM scheme but no underlying + # content type. + findaway_manifest, ignore = DeliveryMechanism.lookup( + session, DeliveryMechanism.FINDAWAY_DRM, None + ) + assert [DeliveryMechanism.FINDAWAY_DRM] == m(findaway_manifest) + + def test_add_breadcrumbs(self, db: DatabaseTransactionFixture): + session = db.session + _db = session + + def getElementChildren(feed): + f = feed.feed[0] + children = f + return children + + class MockFeed(OPDSAcquisitionFeed): + def __init__(self): + super().__init__("", "", [], MockAnnotator()) + self.feed = [] + + lane = db.lane(display_name="lane") + sublane = db.lane(parent=lane, display_name="sublane") + subsublane = db.lane(parent=sublane, display_name="subsublane") + subsubsublane = db.lane(parent=subsublane, display_name="subsubsublane") + + top_level = object() + ep = AudiobooksEntryPoint + + def assert_breadcrumbs(expect_breadcrumbs_for, lane, **add_breadcrumbs_kwargs): + # Create breadcrumbs leading up to `lane` and verify that + # there is a breadcrumb for everything in + # `expect_breadcrumbs_for` -- Lanes, EntryPoints, and the + # top-level lane. Verify that the titles and URLs of the + # breadcrumbs match what we expect. + # + # For easier reading, all assertions in this test are + # written as calls to this function. + feed = MockFeed() + annotator = MockAnnotator() + + feed.add_breadcrumbs(lane, **add_breadcrumbs_kwargs) + + if not expect_breadcrumbs_for: + # We are expecting no breadcrumbs at all; + # nothing should have been added to the feed. + assert [] == feed.feed + return + + # At this point we expect at least one breadcrumb. + crumbs = feed._feed.breadcrumbs + + entrypoint_selected = False + entrypoint_query = "?entrypoint=" + + # First, compare the titles of the breadcrumbs to what was + # passed in. This makes test writing much easier. + def title(x): + if x is top_level: + return annotator.top_level_title() + elif x is ep: + return x.INTERNAL_NAME + else: + return x.display_name + + expect_titles = [title(x) for x in expect_breadcrumbs_for] + actual_titles = [getattr(x, "title", None) for x in crumbs] + assert expect_titles == actual_titles + + # Now, compare the URLs of the breadcrumbs. This is + # trickier, mainly because the URLs change once an + # entrypoint is selected. + previous_breadcrumb_url = None + + for i, crumb in enumerate(crumbs): + expect = expect_breadcrumbs_for[i] + actual_url = crumb.href + + if expect is top_level: + # Breadcrumb for the library root. + expect_url = annotator.default_lane_url() + elif expect is ep: + # Breadcrumb for the entrypoint selection. + + # Beyond this point all URLs must propagate the + # selected entrypoint. + entrypoint_selected = True + entrypoint_query += expect.INTERNAL_NAME + + # The URL for this breadcrumb is the URL for the + # previous breadcrumb with the addition of the + # entrypoint selection query. + expect_url = previous_breadcrumb_url + entrypoint_query + else: + # Breadcrumb for a lane. + + # The breadcrumb URL is determined by the + # Annotator. + lane_url = annotator.lane_url(expect) + if entrypoint_selected: + # All breadcrumbs after the entrypoint selection + # must propagate the entrypoint. + expect_url = lane_url + entrypoint_query + else: + expect_url = lane_url + + logging.debug( + "%s: expect=%s actual=%s", expect_titles[i], expect_url, actual_url + ) + assert expect_url == actual_url + + # Keep track of the URL just used, in case the next + # breadcrumb is the same URL but with an entrypoint + # selection appended. + previous_breadcrumb_url = actual_url + + # That was a complicated method, but now our assertions + # are very easy to write and understand. + + # At the top level, there are no breadcrumbs whatsoever. + assert_breadcrumbs([], None) + + # It doesn't matter if an entrypoint is selected. + assert_breadcrumbs([], None, entrypoint=ep) + + # A lane with no entrypoint -- note that the breadcrumbs stop + # _before_ the lane in question. + assert_breadcrumbs([top_level], lane) + + # If you pass include_lane=True into add_breadcrumbs, the lane + # itself is included. + assert_breadcrumbs([top_level, lane], lane, include_lane=True) + + # A lane with an entrypoint selected + assert_breadcrumbs([top_level, ep], lane, entrypoint=ep) + assert_breadcrumbs( + [top_level, ep, lane], lane, entrypoint=ep, include_lane=True + ) + + # One lane level down. + assert_breadcrumbs([top_level, lane], sublane) + assert_breadcrumbs([top_level, ep, lane], sublane, entrypoint=ep) + assert_breadcrumbs( + [top_level, ep, lane, sublane], sublane, entrypoint=ep, include_lane=True + ) + + # Two lane levels down. + assert_breadcrumbs([top_level, lane, sublane], subsublane) + assert_breadcrumbs([top_level, ep, lane, sublane], subsublane, entrypoint=ep) + + # Three lane levels down. + assert_breadcrumbs( + [top_level, lane, sublane, subsublane], + subsubsublane, + ) + + assert_breadcrumbs( + [top_level, ep, lane, sublane, subsublane], subsubsublane, entrypoint=ep + ) + + # Make the sublane a root lane for a certain patron type, and + # the breadcrumbs will be start at that lane -- we won't see + # the sublane's parent or the library root. + sublane.root_for_patron_type = ["ya"] + assert_breadcrumbs([], sublane) + + assert_breadcrumbs([sublane, subsublane], subsubsublane) + + assert_breadcrumbs( + [sublane, subsublane, subsubsublane], subsubsublane, include_lane=True + ) + + # However, if an entrypoint is selected we will see a + # breadcrumb for it between the patron root lane and its + # child. + assert_breadcrumbs([sublane, ep, subsublane], subsubsublane, entrypoint=ep) + + assert_breadcrumbs( + [sublane, ep, subsublane, subsubsublane], + subsubsublane, + entrypoint=ep, + include_lane=True, + ) + + def test_add_breadcrumb_links(self, db: DatabaseTransactionFixture): + class MockFeed(OPDSAcquisitionFeed): + add_link_calls = [] + add_breadcrumbs_call = None + current_entrypoint = None + + def add_link(self, href, **kwargs): + kwargs["href"] = href + self.add_link_calls.append(kwargs) + + def add_breadcrumbs(self, lane, entrypoint): + self.add_breadcrumbs_call = (lane, entrypoint) + + def show_current_entrypoint(self, entrypoint): + self.current_entrypoint = entrypoint + + annotator = MockAnnotator + feed = MockFeed("title", "url", [], MockAnnotator()) + + lane = db.lane() + sublane = db.lane(parent=lane) + ep = AudiobooksEntryPoint + feed.add_breadcrumb_links(sublane, ep) + + # add_link_to_feed was called twice, to create the 'start' and + # 'up' links. + start, up = feed.add_link_calls + assert "start" == start["rel"] + assert annotator.top_level_title() == start["title"] + + assert "up" == up["rel"] + assert lane.display_name == up["title"] + + # The Lane and EntryPoint were passed into add_breadcrumbs. + assert (sublane, ep) == feed.add_breadcrumbs_call + + # The EntryPoint was passed into show_current_entrypoint. + assert ep == feed.current_entrypoint + + def test_show_current_entrypoint(self, db: DatabaseTransactionFixture): + """Calling OPDSAcquisitionFeed.show_current_entrypoint annotates + the top-level tag with information about the currently + selected entrypoint, if any. + """ + feed = OPDSAcquisitionFeed( + "title", + "url", + [], + CirculationManagerAnnotator(None), + ) + + # No entry point, no annotation. + feed.show_current_entrypoint(None) + assert feed._feed.entrypoint is None + + ep = AudiobooksEntryPoint + feed.show_current_entrypoint(ep) + assert ep.URI == feed._feed.entrypoint + + def test_facet_links_unrecognized_facets(self): + # OPDSAcquisitionFeed.facet_links does not produce links for any + # facet groups or facets not known to the current version of + # the system, because it doesn't know what the links should look + # like. + class MockAnnotator: + def facet_url(self, new_facets): + return "url: " + new_facets + + class MockFacets: + @property + def facet_groups(self): + """Yield a facet group+facet 4-tuple that passes the test we're + running (which will be turned into a link), and then a + bunch that don't (which will be ignored). + """ + + # Real facet group, real facet + yield ( + Facets.COLLECTION_FACET_GROUP_NAME, + Facets.COLLECTION_FULL, + "try the featured collection instead", + True, + ) + + # Real facet group, nonexistent facet + yield ( + Facets.COLLECTION_FACET_GROUP_NAME, + "no such facet", + "this facet does not exist", + True, + ) + + # Nonexistent facet group, real facet + yield ( + "no such group", + Facets.COLLECTION_FULL, + "this facet exists but it's in a nonexistent group", + True, + ) + + # Nonexistent facet group, nonexistent facet + yield ( + "no such group", + "no such facet", + "i just don't know", + True, + ) + + class MockFeed(OPDSAcquisitionFeed): + links = [] + + @classmethod + def facet_link(cls, url, facet_title, group_title, selected): + # Return the passed-in objects as is. + return (url, facet_title, group_title, selected) + + annotator = MockAnnotator() + facets = MockFacets() + + # The only 4-tuple yielded by facet_groups was passed on to us. + # The link was run through MockAnnotator.facet_url(), + # and the human-readable titles were found using lookups. + # + # The other three 4-tuples were ignored since we don't know + # how to generate human-readable titles for them. + [[url, facet, group, selected]] = MockFeed.facet_links(annotator, facets) + assert "url: try the featured collection instead" == url + assert Facets.FACET_DISPLAY_TITLES[Facets.COLLECTION_FULL] == facet + assert Facets.GROUP_DISPLAY_TITLES[Facets.COLLECTION_FACET_GROUP_NAME] == group + assert True == selected + + def test_active_loans_for_with_holds( + self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor + ): + patron = db.patron() + work = db.work(with_license_pool=True) + hold, _ = work.active_license_pool().on_hold_to(patron) + + feed = OPDSAcquisitionFeed.active_loans_for( + None, patron, LibraryAnnotator(None, None, db.default_library()) + ) + assert feed.annotator.active_holds_by_work == {work: hold} + + def test_single_entry_loans_feed_errors(self, db: DatabaseTransactionFixture): + with pytest.raises(ValueError) as raised: + # Mandatory loans item was missing + OPDSAcquisitionFeed.single_entry_loans_feed(None, None) # type: ignore[arg-type] + assert str(raised.value) == "Argument 'item' must be non-empty" + + with pytest.raises(ValueError) as raised: + # Mandatory loans item was incorrect + OPDSAcquisitionFeed.single_entry_loans_feed(None, object()) # type: ignore[arg-type] + assert "Argument 'item' must be an instance of" in str(raised.value) + + # A work and pool that has no edition, will not have an entry + work = db.work(with_open_access_download=True) + pool = work.active_license_pool() + work.presentation_edition = None + pool.presentation_edition = None + response = OPDSAcquisitionFeed.single_entry_loans_feed(MagicMock(), pool) + assert isinstance(response, OPDSEntryResponse) + assert response.status_code == 403 + + def test_single_entry_loans_feed_default_annotator( + self, db: DatabaseTransactionFixture + ): + patron = db.patron() + work = db.work(with_license_pool=True) + pool = work.active_license_pool() + assert pool is not None + loan, _ = pool.loan_to(patron) + + with patch.object(OPDSAcquisitionFeed, "single_entry") as mock: + mock.return_value = None + response = OPDSAcquisitionFeed.single_entry_loans_feed(None, loan) + + assert response == None + assert mock.call_count == 1 + _work, annotator = mock.call_args[0] + assert isinstance(annotator, LibraryLoanAndHoldAnnotator) + assert _work == work + assert annotator.library == db.default_library() + + def test_single_entry_with_edition(self, db: DatabaseTransactionFixture): + work = db.work(with_license_pool=True) + annotator = object() + + with patch.object(OPDSAcquisitionFeed, "_create_entry") as mock: + OPDSAcquisitionFeed.single_entry( + work.presentation_edition, annotator, even_if_no_license_pool=True # type: ignore[arg-type] + ) + + assert mock.call_count == 1 + _work, _pool, _edition, _identifier, _annotator = mock.call_args[0] + assert _work == work + assert _pool == None + assert _edition == work.presentation_edition + assert _identifier == work.presentation_edition.primary_identifier + assert _annotator == annotator + + +class TestEntrypointLinkInsertionFixture: + db: DatabaseTransactionFixture + mock: Any + no_eps: WorkList + entrypoints: List[MediumEntryPoint] + wl: WorkList + lane: Lane + annotator: Type[MockAnnotator] + old_add_entrypoint_links: Callable + + +@pytest.fixture() +def entrypoint_link_insertion_fixture( + db, +) -> Generator[TestEntrypointLinkInsertionFixture, None, None]: + data = TestEntrypointLinkInsertionFixture() + data.db = db + + # Mock for AcquisitionFeed.add_entrypoint_links + class Mock: + def add_entrypoint_links(self, *args): + self.called_with = args + + data.mock = Mock() + + # A WorkList with no EntryPoints -- should not call the mock method. + data.no_eps = WorkList() + data.no_eps.initialize(library=db.default_library(), display_name="no_eps") + + # A WorkList with two EntryPoints -- may call the mock method + # depending on circumstances. + data.entrypoints = [AudiobooksEntryPoint, EbooksEntryPoint] # type: ignore[list-item] + data.wl = WorkList() + # The WorkList must have at least one child, or we won't generate + # a real groups feed for it. + data.lane = db.lane() + data.wl.initialize( + library=db.default_library(), + display_name="wl", + entrypoints=data.entrypoints, + children=[data.lane], + ) + + def works(_db, **kwargs): + """Mock WorkList.works so we don't need any actual works + to run the test. + """ + return [] + + data.no_eps.works = works # type: ignore[method-assign, assignment] + data.wl.works = works # type: ignore[method-assign, assignment] + + data.annotator = MockAnnotator + data.old_add_entrypoint_links = OPDSAcquisitionFeed.add_entrypoint_links + OPDSAcquisitionFeed.add_entrypoint_links = data.mock.add_entrypoint_links # type: ignore[method-assign] + yield data + OPDSAcquisitionFeed.add_entrypoint_links = data.old_add_entrypoint_links # type: ignore[method-assign] + + +class TestEntrypointLinkInsertion: + """Verify that the three main types of OPDS feeds -- grouped, + paginated, and search results -- will all include links to the same + feed but through a different entry point. + """ + + def test_groups( + self, + entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture, + external_search_patch_fixture: ExternalSearchPatchFixture, + ): + data, db, session = ( + entrypoint_link_insertion_fixture, + entrypoint_link_insertion_fixture.db, + entrypoint_link_insertion_fixture.db.session, + ) + + # When AcquisitionFeed.groups() generates a grouped + # feed, it will link to different entry points into the feed, + # assuming the WorkList has different entry points. + def run(wl=None, facets=None): + """Call groups() and see what add_entrypoint_links + was called with. + """ + data.mock.called_with = None + search = MockExternalSearchIndex() + feed = OPDSAcquisitionFeed.groups( + session, + "title", + "url", + wl, + MockAnnotator(), + None, + facets, + search, + ) + return data.mock.called_with + + # This WorkList has no entry points, so the mock method is not + # even called. + assert None == run(data.no_eps) + + # A WorkList with entry points does cause the mock method + # to be called. + facets = FeaturedFacets( + minimum_featured_quality=db.default_library().settings.minimum_featured_quality, + entrypoint=EbooksEntryPoint, + ) + feed, make_link, entrypoints, selected = run(data.wl, facets) + + # add_entrypoint_links was passed both possible entry points + # and the selected entry point. + assert data.wl.entrypoints == entrypoints + assert selected == EbooksEntryPoint + + # The make_link function that was passed in calls + # TestAnnotator.groups_url() when passed an EntryPoint. + assert "http://groups/?entrypoint=Book" == make_link(EbooksEntryPoint) + + def test_page( + self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture + ): + data, db, session = ( + entrypoint_link_insertion_fixture, + entrypoint_link_insertion_fixture.db, + entrypoint_link_insertion_fixture.db.session, + ) + + # When AcquisitionFeed.page() generates the first page of a paginated + # list, it will link to different entry points into the list, + # assuming the WorkList has different entry points. + + def run(wl=None, facets=None, pagination=None): + """Call page() and see what add_entrypoint_links + was called with. + """ + data.mock.called_with = None + private = object() + OPDSAcquisitionFeed.page( + session, + "title", + "url", + wl, + data.annotator(), + facets, + pagination, + MockExternalSearchIndex(), + ) + + return data.mock.called_with + + # The WorkList has no entry points, so the mock method is not + # even called. + assert None == run(data.no_eps) + + # Let's give the WorkList two possible entry points, and choose one. + facets = Facets.default(db.default_library()).navigate( + entrypoint=EbooksEntryPoint + ) + feed, make_link, entrypoints, selected = run(data.wl, facets) + + # This time, add_entrypoint_links was called, and passed both + # possible entry points and the selected entry point. + assert data.wl.entrypoints == entrypoints + assert selected == EbooksEntryPoint + + # The make_link function that was passed in calls + # TestAnnotator.feed_url() when passed an EntryPoint. The + # Facets object's other facet groups are propagated in this URL. + first_page_url = "http://wl/?available=all&collection=full&collectionName=All&distributor=All&entrypoint=Book&order=author" + assert first_page_url == make_link(EbooksEntryPoint) + + # Pagination information is not propagated through entry point links + # -- you always start at the beginning of the list. + pagination = Pagination(offset=100) + feed, make_link, entrypoints, selected = run(data.wl, facets, pagination) + assert first_page_url == make_link(EbooksEntryPoint) + + def test_search( + self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture + ): + data, db, session = ( + entrypoint_link_insertion_fixture, + entrypoint_link_insertion_fixture.db, + entrypoint_link_insertion_fixture.db.session, + ) + + # When OPDSAcquisitionFeed.search() generates the first page of + # search results, it will link to related searches for different + # entry points, assuming the WorkList has different entry points. + def run(wl=None, facets=None, pagination=None): + """Call search() and see what add_entrypoint_links + was called with. + """ + data.mock.called_with = None + OPDSAcquisitionFeed.search( + session, + "title", + "url", + wl, + None, + None, + pagination=pagination, + facets=facets, + annotator=data.annotator(), + ) + return data.mock.called_with + + # Mock search() so it never tries to return anything. + def mock_search(self, *args, **kwargs): + return [] + + data.no_eps.search = mock_search # type: ignore[method-assign, assignment] + data.wl.search = mock_search # type: ignore[method-assign, assignment] + + # This WorkList has no entry points, so the mock method is not + # even called. + assert None == run(data.no_eps) + + # The mock method is called for a WorkList that does have + # entry points. + facets = SearchFacets().navigate(entrypoint=EbooksEntryPoint) + assert isinstance(facets, SearchFacets) + feed, make_link, entrypoints, selected = run(data.wl, facets) + + # Since the SearchFacets has more than one entry point, + # the EverythingEntryPoint is prepended to the list of possible + # entry points. + assert [ + EverythingEntryPoint, + AudiobooksEntryPoint, + EbooksEntryPoint, + ] == entrypoints + + # add_entrypoint_links was passed the three possible entry points + # and the selected entry point. + assert selected == EbooksEntryPoint + + # The make_link function that was passed in calls + # TestAnnotator.search_url() when passed an EntryPoint. + first_page_url = "http://wl/?available=all&collection=full&entrypoint=Book&order=relevance&search_type=default" + assert first_page_url == make_link(EbooksEntryPoint) + + # Pagination information is not propagated through entry point links + # -- you always start at the beginning of the list. + pagination = Pagination(offset=100) + feed, make_link, entrypoints, selected = run(data.wl, facets, pagination) + assert first_page_url == make_link(EbooksEntryPoint) + + +class TestLookupAcquisitionFeed: + @staticmethod + def _feed(session: Session, annotator=VerboseAnnotator, **kwargs): + """Helper method to create a LookupAcquisitionFeed.""" + return LookupAcquisitionFeed( + "Feed Title", + "http://whatever.io", + [], + annotator(), + **kwargs, + ) + + @staticmethod + def _entry( + session: Session, identifier, work, annotator=VerboseAnnotator, **kwargs + ): + """Helper method to create an entry.""" + feed = TestLookupAcquisitionFeed._feed(session, annotator, **kwargs) + entry = feed.single_entry((identifier, work), feed.annotator) + if isinstance(entry, OPDSMessage): + return feed, entry + return feed, entry + + def test_create_entry_uses_specified_identifier( + self, db: DatabaseTransactionFixture + ): + # Here's a Work with two LicensePools. + work = db.work(with_open_access_download=True) + original_pool = work.license_pools[0] + edition, new_pool = db.edition( + with_license_pool=True, with_open_access_download=True + ) + work.license_pools.append(new_pool) + + # We can generate two different OPDS entries for a single work + # depending on which identifier we look up. + ignore, e1 = self._entry(db.session, original_pool.identifier, work) + assert original_pool.identifier.urn == e1.computed.identifier + assert original_pool.presentation_edition.title == e1.computed.title.text + assert new_pool.identifier.urn != e1.computed.identifier + assert new_pool.presentation_edition.title != e1.computed.title.text + + # Different identifier and pool = different information + i = new_pool.identifier + ignore, e2 = self._entry(db.session, i, work) + assert new_pool.identifier.urn == e2.computed.identifier + assert new_pool.presentation_edition.title == e2.computed.title.text + assert original_pool.presentation_edition.title != e2.computed.title.text + assert original_pool.identifier.urn != e2.computed.identifier + + def test_error_on_mismatched_identifier(self, db: DatabaseTransactionFixture): + """We get an error if we try to make it look like an Identifier lookup + retrieved a Work that's not actually associated with that Identifier. + """ + work = db.work(with_open_access_download=True) + + # Here's an identifier not associated with any LicensePool or + # Work. + identifier = db.identifier() + + # It doesn't make sense to make an OPDS feed out of that + # Identifier and a totally random Work. + expect_error = 'I tried to generate an OPDS entry for the identifier "%s" using a Work not associated with that identifier.' + feed, entry = self._entry(db.session, identifier, work) + assert entry == OPDSMessage(identifier.urn, 500, expect_error % identifier.urn) + + # Even if the Identifier does have a Work, if the Works don't + # match, we get the same error. + edition, lp = db.edition(with_license_pool=True) + feed, entry = self._entry(db.session, lp.identifier, work) + assert entry == OPDSMessage( + lp.identifier.urn, 500, expect_error % lp.identifier.urn + ) + + def test_error_when_work_has_no_licensepool(self, db: DatabaseTransactionFixture): + """Under most circumstances, a Work must have at least one + LicensePool for a lookup to succeed. + """ + + # Here's a work with no LicensePools. + work = db.work(title="Hello, World!", with_license_pool=False) + identifier = work.presentation_edition.primary_identifier + feed, entry = self._entry(db.session, identifier, work) + # By default, a work is treated as 'not in the collection' if + # there is no LicensePool for it. + isinstance(entry, OPDSMessage) + assert 404 == entry.status_code + assert "Identifier not found in collection" == entry.message + + def test_unfilfullable_work(self, db: DatabaseTransactionFixture): + work = db.work(with_open_access_download=True) + [pool] = work.license_pools + feed, entry = self._entry( + db.session, pool.identifier, work, MockUnfulfillableAnnotator + ) + expect = OPDSAcquisitionFeed.error_message( + pool.identifier, + 403, + "I know about this work but can offer no way of fulfilling it.", + ) + assert expect == entry + + +class TestNavigationFeedFixture: + db: DatabaseTransactionFixture + fiction: Lane + fantasy: Lane + romance: Lane + contemporary_romance: Lane + + +@pytest.fixture() +def navigation_feed_fixture( + db, +) -> TestNavigationFeedFixture: + data = TestNavigationFeedFixture() + data.db = db + data.fiction = db.lane("Fiction") + data.fantasy = db.lane("Fantasy", parent=data.fiction) + data.romance = db.lane("Romance", parent=data.fiction) + data.contemporary_romance = db.lane("Contemporary Romance", parent=data.romance) + return data + + +class TestNavigationFeed: + def test_add_entry(self): + feed = NavigationFeed("title", "http://navigation", None, None) + feed.add_entry("http://example.com", "Example", "text/html") + [entry] = feed._feed.data_entries + assert "Example" == entry.title + [link] = entry.links + assert "http://example.com" == link.href + assert "text/html" == link.type + assert "subsection" == link.rel + + def test_navigation_with_sublanes( + self, navigation_feed_fixture: TestNavigationFeedFixture + ): + data, db, session = ( + navigation_feed_fixture, + navigation_feed_fixture.db, + navigation_feed_fixture.db.session, + ) + + private = object() + response = NavigationFeed.navigation( + session, + "Navigation", + "http://navigation", + data.fiction, + MockAnnotator(), + ) + + # The media type of this response is different than from the + # typical OPDSFeedResponse. + assert OPDSFeed.NAVIGATION_FEED_TYPE == response.as_response().content_type + + feed = response._feed + + assert "Navigation" == feed.metadata["title"].text + [self_link] = feed.links + assert "http://navigation" == self_link.href + assert "self" == self_link.rel + assert "http://navigation" == feed.metadata["id"].text + [fantasy, romance] = sorted(feed.data_entries, key=lambda x: x.title or "") + + assert data.fantasy.display_name == fantasy.title + assert "http://%s/" % data.fantasy.id == fantasy.id + [fantasy_link] = fantasy.links + assert "http://%s/" % data.fantasy.id == fantasy_link.href + assert "subsection" == fantasy_link.rel + assert OPDSFeed.ACQUISITION_FEED_TYPE == fantasy_link.type + + assert data.romance.display_name == romance.title + assert "http://navigation/%s" % data.romance.id == romance.id + [romance_link] = romance.links + assert "http://navigation/%s" % data.romance.id == romance_link.href + assert "subsection" == romance_link.rel + assert OPDSFeed.NAVIGATION_FEED_TYPE == romance_link.type + + def test_navigation_without_sublanes( + self, navigation_feed_fixture: TestNavigationFeedFixture + ): + data, db, session = ( + navigation_feed_fixture, + navigation_feed_fixture.db, + navigation_feed_fixture.db.session, + ) + + feed = NavigationFeed.navigation( + session, "Navigation", "http://navigation", data.fantasy, MockAnnotator() + ) + parsed = feed._feed + assert "Navigation" == parsed.metadata["title"].text + [self_link] = parsed.links + assert "http://navigation" == self_link.href + assert "self" == self_link.rel + assert "http://navigation" == parsed.metadata["id"].text + [fantasy] = parsed.data_entries + + assert "All " + data.fantasy.display_name == fantasy.title + assert "http://%s/" % data.fantasy.id == fantasy.id + [fantasy_link] = fantasy.links + assert "http://%s/" % data.fantasy.id == fantasy_link.href + assert "subsection" == fantasy_link.rel + assert OPDSFeed.ACQUISITION_FEED_TYPE == fantasy_link.type diff --git a/tests/api/feed/test_opds_base.py b/tests/api/feed/test_opds_base.py new file mode 100644 index 0000000000..ead68ec710 --- /dev/null +++ b/tests/api/feed/test_opds_base.py @@ -0,0 +1,57 @@ +from flask import Request + +from core.feed.opds import get_serializer +from core.feed.serializer.opds import OPDS1Serializer +from core.feed.serializer.opds2 import OPDS2Serializer + + +class TestBaseOPDSFeed: + def test_get_serializer(self): + # The q-value should take priority + request = Request.from_values( + headers=dict( + Accept="application/atom+xml;q=0.8,application/opds+json;q=0.9" + ) + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS2Serializer) + + # Multiple additional key-value pairs don't matter + request = Request.from_values( + headers=dict( + Accept="application/atom+xml;profile=opds-catalog;kind=acquisition;q=0.08, application/opds+json;q=0.9" + ) + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS2Serializer) + + request = Request.from_values( + headers=dict( + Accept="application/atom+xml;profile=opds-catalog;kind=acquisition" + ) + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS1Serializer) + + # The default q-value should be 1, but opds2 specificity is higher + request = Request.from_values( + headers=dict( + Accept="application/atom+xml;profile=feed,application/opds+json;q=0.9" + ) + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS2Serializer) + + # The default q-value should sort above 0.9 + request = Request.from_values( + headers=dict(Accept="application/opds+json;q=0.9,application/atom+xml") + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS1Serializer) + + # Same q-values respect order of definition in the code + request = Request.from_values( + headers=dict( + Accept="application/opds+json;q=0.9,application/atom+xml;q=0.9" + ) + ) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS1Serializer) + + # No valid accept mimetype should default to OPDS1.x + request = Request.from_values(headers=dict(Accept="text/html")) + assert isinstance(get_serializer(request.accept_mimetypes), OPDS1Serializer) diff --git a/tests/api/feed/test_opds_serializer.py b/tests/api/feed/test_opds_serializer.py new file mode 100644 index 0000000000..afe28b71c5 --- /dev/null +++ b/tests/api/feed/test_opds_serializer.py @@ -0,0 +1,232 @@ +import datetime + +import pytz +from lxml import etree + +from core.feed.serializer.opds import OPDS1Serializer +from core.feed.types import ( + Acquisition, + Author, + FeedEntryType, + IndirectAcquisition, + Link, + WorkEntryData, +) +from core.util.opds_writer import OPDSFeed, OPDSMessage + + +class TestOPDSSerializer: + def test__serialize_feed_entry(self): + grandchild = FeedEntryType.create(text="grandchild", attr="gcattr") + child = FeedEntryType.create(text="child", attr="chattr", grandchild=grandchild) + parent = FeedEntryType.create(text="parent", attr="pattr", child=child) + + serialized = OPDS1Serializer()._serialize_feed_entry("parent", parent) + + assert serialized.tag == "parent" + assert serialized.text == "parent" + assert serialized.get("attr") == "pattr" + children = list(serialized) + assert len(children) == 1 + assert children[0].tag == "child" + assert children[0].text == "child" + assert children[0].get("attr") == "chattr" + children = list(children[0]) + assert len(children) == 1 + assert children[0].tag == "grandchild" + assert children[0].text == "grandchild" + assert children[0].get("attr") == "gcattr" + + def test__serialize_author_tag(self): + author = Author( + name="Author", + sort_name="sort_name", + role="role", + link=Link(href="http://author", title="link title"), + viaf="viaf", + family_name="family name", + wikipedia_name="wiki name", + lc="lc", + ) + + element = OPDS1Serializer()._serialize_author_tag("author", author) + + assert element.tag == "author" + assert element.get(f"{{{OPDSFeed.OPF_NS}}}role") == author.role + + expected_child_tags = [ + (f"{{{OPDSFeed.ATOM_NS}}}name", author.name, None), + (f"{{{OPDSFeed.SIMPLIFIED_NS}}}sort_name", author.sort_name, None), + ( + f"{{{OPDSFeed.SIMPLIFIED_NS}}}wikipedia_name", + author.wikipedia_name, + None, + ), + ("sameas", author.viaf, None), + ("sameas", author.lc, None), + ("link", None, dict(href=author.link.href, title=author.link.title)), + ] + + child: etree._Element + for expect in expected_child_tags: + tag, text, attrs = expect + + # element.find is not working for "link" :| + for child in element: + if child.tag == tag: + break + else: + assert False, f"Did not find {expect}" + + # Remove the element so we don't find it again + element.remove(child) + + # Assert the data + assert child.text == text + if attrs: + assert dict(child.attrib) == attrs + + # No more children + assert list(element) == [] + + def test__serialize_acquistion_link(self): + link = Acquisition( + href="http://acquisition", + holds_total="0", + copies_total="1", + availability_status="available", + indirect_acquisitions=[IndirectAcquisition(type="indirect")], + ) + element = OPDS1Serializer()._serialize_acquistion_link(link) + assert element.tag == "link" + assert dict(element.attrib) == dict(href=link.href) + + for child in element: + if child.tag == f"{{{OPDSFeed.OPDS_NS}}}indirectAcquisition": + assert child.get("type") == "indirect" + elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}holds": + assert child.get("total") == "0" + elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}copies": + assert child.get("total") == "1" + elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}availability": + assert child.get("status") == "available" + + def test_serialize_work_entry(self): + data = WorkEntryData( + additionalType="type", + identifier="identifier", + pwid="permanent-work-id", + summary=FeedEntryType(text="summary"), + language=FeedEntryType(text="language"), + publisher=FeedEntryType(text="publisher"), + issued=datetime.datetime(2020, 2, 2, tzinfo=pytz.UTC), + published=FeedEntryType(text="published"), + updated=FeedEntryType(text="updated"), + title=FeedEntryType(text="title"), + subtitle=FeedEntryType(text="subtitle"), + series=FeedEntryType.create( + name="series", + link=Link(href="http://series", title="series title", rel="series"), + ), + imprint=FeedEntryType(text="imprint"), + authors=[Author(name="author")], + contributors=[Author(name="contributor")], + categories=[ + FeedEntryType.create(scheme="scheme", term="term", label="label") + ], + ratings=[FeedEntryType(text="rating")], + ) + + element = OPDS1Serializer().serialize_work_entry(data) + + assert ( + element.get(f"{{{OPDSFeed.SCHEMA_NS}}}additionalType") + == data.additionalType + ) + + child = element.xpath(f"id") + assert len(child) == 1 + assert child[0].text == data.identifier + + child = element.findall(f"{{{OPDSFeed.SIMPLIFIED_NS}}}pwid") + assert len(child) == 1 + assert child[0].text == data.pwid + + child = element.xpath("summary") + assert len(child) == 1 + assert child[0].text == data.summary.text + + child = element.findall(f"{{{OPDSFeed.DCTERMS_NS}}}language") + assert len(child) == 1 + assert child[0].text == data.language.text + + child = element.findall(f"{{{OPDSFeed.DCTERMS_NS}}}publisher") + assert len(child) == 1 + assert child[0].text == data.publisher.text + + child = element.findall(f"{{{OPDSFeed.DCTERMS_NS}}}issued") + assert len(child) == 1 + assert child[0].text == data.issued.date().isoformat() + + child = element.findall(f"published") + assert len(child) == 1 + assert child[0].text == data.published.text + + child = element.findall(f"updated") + assert len(child) == 1 + assert child[0].text == data.updated.text + + child = element.findall(f"title") + assert len(child) == 1 + assert child[0].text == data.title.text + + child = element.findall(f"{{{OPDSFeed.SCHEMA_NS}}}alternativeHeadline") + assert len(child) == 1 + assert child[0].text == data.subtitle.text + + child = element.findall(f"{{{OPDSFeed.SCHEMA_NS}}}series") + assert len(child) == 1 + assert child[0].get("name") == getattr(data.series, "name") + link = list(child[0])[0] + assert link.tag == "link" + assert link.get("title") == "series title" + assert link.get("href") == "http://series" + + child = element.findall(f"{{{OPDSFeed.BIB_SCHEMA_NS}}}publisherImprint") + assert len(child) == 1 + assert child[0].text == data.imprint.text + + child = element.findall(f"author") + assert len(child) == 1 + name_tag = list(child[0])[0] + assert name_tag.tag == f"{{{OPDSFeed.ATOM_NS}}}name" + assert name_tag.text == "author" + + child = element.findall(f"contributor") + assert len(child) == 1 + name_tag = list(child[0])[0] + assert name_tag.tag == f"{{{OPDSFeed.ATOM_NS}}}name" + assert name_tag.text == "contributor" + + child = element.findall(f"category") + assert len(child) == 1 + assert child[0].get("scheme") == "scheme" + assert child[0].get("term") == "term" + assert child[0].get("label") == "label" + + child = element.findall(f"Rating") + assert len(child) == 1 + assert child[0].text == data.ratings[0].text + + def test_serialize_work_entry_empty(self): + # A no-data work entry + element = OPDS1Serializer().serialize_work_entry(WorkEntryData()) + # This will create an empty tag + assert element.tag == "entry" + assert list(element) == [] + + def test_serialize_opds_message(self): + message = OPDSMessage("URN", 200, "Description") + serializer = OPDS1Serializer() + result = serializer.serialize_opds_message(message) + assert serializer.to_string(result) == serializer.to_string(message.tag) diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index f97796b238..af0a0229d7 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -4,9 +4,12 @@ from api.config import Configuration from api.controller import CirculationManager from api.custom_index import CustomIndexView -from api.opds import CirculationManagerAnnotator, LibraryAnnotator from api.problem_details import * from core.external_search import MockExternalSearchIndex +from core.feed.annotator.circulation import ( + CirculationManagerAnnotator, + LibraryAnnotator, +) from core.lane import Facets, WorkList from core.model import Admin, CachedFeed, ConfigurationSetting, create from core.model.discovery_service_registration import DiscoveryServiceRegistration diff --git a/tests/api/test_controller_crawlfeed.py b/tests/api/test_controller_crawlfeed.py index 0750124a2a..4dee039352 100644 --- a/tests/api/test_controller_crawlfeed.py +++ b/tests/api/test_controller_crawlfeed.py @@ -1,6 +1,7 @@ import json from contextlib import contextmanager from typing import Any +from unittest.mock import MagicMock import feedparser from flask import url_for @@ -11,10 +12,10 @@ CrawlableFacets, DynamicLane, ) -from api.opds import CirculationManagerAnnotator from api.problem_details import NO_SUCH_COLLECTION, NO_SUCH_LIST from core.external_search import MockSearchResult, SortKeyPagination -from core.opds import AcquisitionFeed +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.circulation import CirculationManagerAnnotator from core.problem_details import INVALID_INPUT from core.util.flask_util import Response from core.util.problem_detail import ProblemDetail @@ -30,7 +31,7 @@ def mock_crawlable_feed(self, circulation_fixture: CirculationControllerFixture) controller = circulation_fixture.manager.opds_feeds original = controller._crawlable_feed - def mock(title, url, worklist, annotator=None, feed_class=AcquisitionFeed): + def mock(title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed): self._crawlable_feed_called_with = dict( title=title, url=url, @@ -70,7 +71,7 @@ def test_crawlable_library_feed( assert expect_url == kwargs.pop("url") assert library.name == kwargs.pop("title") assert None == kwargs.pop("annotator") - assert AcquisitionFeed == kwargs.pop("feed_class") + assert OPDSAcquisitionFeed == kwargs.pop("feed_class") # A CrawlableCollectionBasedLane has been set up to show # everything in any of the requested library's collections. @@ -173,7 +174,7 @@ def test_crawlable_list_feed( assert expect_url == kwargs.pop("url") assert customlist.name == kwargs.pop("title") assert None == kwargs.pop("annotator") - assert AcquisitionFeed == kwargs.pop("feed_class") + assert OPDSAcquisitionFeed == kwargs.pop("feed_class") # A CrawlableCustomListBasedLane was created to fetch only # the works in the custom list. @@ -190,7 +191,9 @@ class MockFeed: @classmethod def page(cls, **kwargs): self.page_called_with = kwargs - return Response("An OPDS feed") + feed = MagicMock() + feed.as_response.return_value = Response("An OPDS feed") + return feed work = circulation_fixture.db.work(with_open_access_download=True) diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index a8b3664f6c..e0eb5032bf 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -885,7 +885,7 @@ def test_fulfill_without_single_item_feed(self, loan_fixture: LoanFixture): authenticated = controller.authenticated_patron_from_request() loan_fixture.pool.loan_to(authenticated) with patch( - "api.controller.LibraryLoanAndHoldAnnotator.single_item_feed" + "api.controller.OPDSAcquisitionFeed.single_entry_loans_feed" ) as feed, patch.object(circulation, "fulfill") as fulfill: # Complex setup # The fulfillmentInfo should not be have response type diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index 5074db8136..dd81e7b5c5 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -8,14 +8,16 @@ from api.controller import CirculationManager from api.lanes import HasSeriesFacets, JackpotFacets, JackpotWorkList -from api.opds import LibraryAnnotator from api.problem_details import REMOTE_INTEGRATION_FAILED from core.app_server import load_facets_from_request from core.entrypoint import AudiobooksEntryPoint, EverythingEntryPoint from core.external_search import SortKeyPagination -from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.circulation import LibraryAnnotator +from core.feed.navigation import NavigationFeed +from core.lane import Facets, FeaturedFacets, Pagination, SearchFacets, WorkList from core.model import CachedFeed, Edition -from core.opds import AcquisitionFeed, NavigationFacets, NavigationFeed +from core.opds import NavigationFacets from core.util.flask_util import Response from tests.fixtures.api_controller import CirculationControllerFixture, WorkSpec from tests.fixtures.library import LibraryFixture @@ -107,9 +109,6 @@ def test_feed( # index. assert 200 == response.status_code - assert ( - "max-age=%d" % Lane.MAX_CACHE_AGE in response.headers["Cache-Control"] - ) feed = feedparser.parse(response.data) assert {x.title for x in circulation_fixture.works} == { x["title"] for x in feed["entries"] @@ -171,7 +170,9 @@ class Mock: @classmethod def page(cls, **kwargs): self.called_with = kwargs - return Response("An OPDS feed") + resp = MagicMock() + resp.as_response.return_value = Response("An OPDS feed") + return resp sort_key = ["sort", "pagination", "key"] with circulation_fixture.request_context_with_library( @@ -228,9 +229,6 @@ def page(cls, **kwargs): "search_engine" ) - # max age - assert 10 == kwargs.pop("max_age") - # No other arguments were passed into page(). assert {} == kwargs @@ -293,7 +291,9 @@ def groups(cls, **kwargs): # the grouped feed controller is activated. self.groups_called_with = kwargs self.page_called_with = None - return Response("A grouped feed") + resp = MagicMock() + resp.as_response.return_value = Response("A grouped feed") + return resp @classmethod def page(cls, **kwargs): @@ -301,7 +301,9 @@ def page(cls, **kwargs): # ends up being called instead. self.groups_called_with = None self.page_called_with = kwargs - return Response("A paginated feed") + resp = MagicMock() + resp.as_response.return_value = Response("A paginated feed") + return resp # Earlier we tested an authenticated request for a patron with an # external type. Now try an authenticated request for a patron with @@ -327,7 +329,6 @@ def page(cls, **kwargs): # The Response returned by Mock.groups() has been converted # into a Flask response. - assert 200 == response.status_code assert "A grouped feed" == response.get_data(as_text=True) # While we're in request context, generate the URL we @@ -504,7 +505,9 @@ class Mock: @classmethod def search(cls, **kwargs): self.called_with = kwargs - return "An OPDS feed" + resp = MagicMock() + resp.as_response.return_value = "An OPDS feed" + return resp with circulation_fixture.request_context_with_library( "/?q=t&size=99&after=22&media=Music" @@ -783,7 +786,7 @@ def test_qa_feed(self, circulation_fixture: CirculationControllerFixture): # For the most part, we're verifying that the expected values # are passed in to _qa_feed. - assert AcquisitionFeed.groups == kwargs.pop("feed_factory") # type: ignore + assert OPDSAcquisitionFeed.groups == kwargs.pop("feed_factory") # type: ignore assert JackpotFacets == kwargs.pop("facet_class") # type: ignore assert "qa_feed" == kwargs.pop("controller_name") # type: ignore assert "QA test feed" == kwargs.pop("feed_title") # type: ignore @@ -822,7 +825,7 @@ def test_qa_feed2(self, circulation_fixture: CirculationControllerFixture): # For the most part, we're verifying that the expected values # are passed in to _qa_feed. - assert AcquisitionFeed.groups == kwargs.pop("feed_factory") # type: ignore + assert OPDSAcquisitionFeed.groups == kwargs.pop("feed_factory") # type: ignore assert JackpotFacets == kwargs.pop("facet_class") # type: ignore assert "qa_feed" == kwargs.pop("controller_name") # type: ignore assert "QA test feed" == kwargs.pop("feed_title") # type: ignore @@ -865,7 +868,7 @@ def test_qa_series_feed(self, circulation_fixture: CirculationControllerFixture) # Note that the feed_method is different from the one in qa_feed. # We want to generate an ungrouped feed rather than a grouped one. - assert AcquisitionFeed.page == kwargs.pop("feed_factory") # type: ignore + assert OPDSAcquisitionFeed.page == kwargs.pop("feed_factory") # type: ignore assert HasSeriesFacets == kwargs.pop("facet_class") # type: ignore assert "qa_series_feed" == kwargs.pop("controller_name") # type: ignore assert "QA series test feed" == kwargs.pop("feed_title") # type: ignore diff --git a/tests/api/test_controller_work.py b/tests/api/test_controller_work.py index 0712ab298a..acc1a61f19 100644 --- a/tests/api/test_controller_work.py +++ b/tests/api/test_controller_work.py @@ -2,6 +2,7 @@ import json import urllib.parse from typing import Any, Dict +from unittest.mock import MagicMock import feedparser import flask @@ -18,15 +19,16 @@ SeriesLane, ) from api.novelist import MockNoveListAPI -from api.opds import LibraryAnnotator from api.problem_details import NO_SUCH_LANE, NOT_FOUND_ON_REMOTE from core.classifier import Classifier from core.entrypoint import AudiobooksEntryPoint from core.external_search import SortKeyPagination, mock_search_index +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.annotator.circulation import LibraryAnnotator +from core.feed.types import WorkEntry from core.lane import Facets, FeaturedFacets from core.metadata_layer import ContributorData, Metadata from core.model import ( - CachedFeed, DataSource, Edition, Identifier, @@ -37,7 +39,6 @@ tuple_to_numericrange, ) from core.model.work import Work -from core.opds import AcquisitionFeed from core.problem_details import INVALID_INPUT from core.util.datetime_helpers import utc_now from core.util.flask_util import Response @@ -138,11 +139,6 @@ def test_contributor(self, work_fixture: WorkFixture): ] assert 10 == len(facet_links) - # The feed was cached. - cached = work_fixture.db.session.query(CachedFeed).one() - assert CachedFeed.CONTRIBUTOR_TYPE == cached.type - assert "John Bull-eng,spa-Children,Young+Adult" == cached.unique_key - # At this point we don't want to generate real feeds anymore. # We can't do a real end-to-end test without setting up a real # search index, which is obnoxiously slow. @@ -164,7 +160,9 @@ class Mock: @classmethod def page(cls, **kwargs): self.called_with = kwargs - return Response("An OPDS feed") + resp = MagicMock() + resp.as_response.return_value = Response("An OPDS feed") + return resp # Test a basic request with custom faceting, pagination, and a # language and audience restriction. This will exercise nearly @@ -291,12 +289,12 @@ def test_permalink(self, work_fixture: WorkFixture): work_fixture.identifier.type, work_fixture.identifier.identifier ) annotator = LibraryAnnotator(None, None, work_fixture.db.default_library()) - expect = AcquisitionFeed.single_entry( - work_fixture.db.session, work_fixture.english_1, annotator - ).data + feed = OPDSAcquisitionFeed.single_entry(work_fixture.english_1, annotator) + assert isinstance(feed, WorkEntry) + expect = OPDSAcquisitionFeed.entry_as_response(feed) assert 200 == response.status_code - assert expect == response.get_data() + assert expect.data == response.get_data() assert OPDSFeed.ENTRY_TYPE == response.headers["Content-Type"] def test_permalink_does_not_return_fulfillment_links_for_authenticated_patrons_without_loans( @@ -334,9 +332,9 @@ def test_permalink_does_not_return_fulfillment_links_for_authenticated_patrons_w work_fixture.db.default_library(), active_loans_by_work=active_loans_by_work, ) - expect = AcquisitionFeed.single_entry( - work_fixture.db.session, work, annotator - ).data + feed = OPDSAcquisitionFeed.single_entry(work, annotator) + assert isinstance(feed, WorkEntry) + expect = OPDSAcquisitionFeed.entry_as_response(feed).data response = work_fixture.manager.work_controller.permalink( identifier_type, identifier @@ -382,9 +380,9 @@ def test_permalink_returns_fulfillment_links_for_authenticated_patrons_with_loan work_fixture.db.default_library(), active_loans_by_work=active_loans_by_work, ) - expect = AcquisitionFeed.single_entry( - work_fixture.db.session, work, annotator - ).data + feed = OPDSAcquisitionFeed.single_entry(work, annotator) + assert isinstance(feed, WorkEntry) + expect = OPDSAcquisitionFeed.entry_as_response(feed).data response = work_fixture.manager.work_controller.permalink( identifier_type, identifier @@ -475,9 +473,9 @@ def test_permalink_returns_fulfillment_links_for_authenticated_patrons_with_fulf work_fixture.db.default_library(), active_loans_by_work=active_loans_by_work, ) - expect = AcquisitionFeed.single_entry( - work_fixture.db.session, work, annotator - ).data + feed = OPDSAcquisitionFeed.single_entry(work, annotator) + assert isinstance(feed, WorkEntry) + expect = OPDSAcquisitionFeed.entry_as_response(feed).data response = work_fixture.manager.work_controller.permalink( identifier_type, identifier @@ -561,7 +559,9 @@ class Mock: @classmethod def page(cls, **kwargs): cls.called_with = kwargs - return Response("A bunch of titles") + resp = MagicMock() + resp.as_response.return_value = Response("A bunch of titles") + return resp kwargs["feed_class"] = Mock with work_fixture.request_context_with_library( @@ -754,7 +754,9 @@ class Mock: @classmethod def groups(cls, **kwargs): cls.called_with = kwargs - return Response("An OPDS feed") + resp = MagicMock() + resp.as_response.return_value = Response("An OPDS feed") + return resp mock_api.setup_method(metadata) with work_fixture.request_context_with_library("/?entrypoint=Audio"): @@ -818,7 +820,7 @@ def groups(cls, **kwargs): **url_kwargs, ) assert kwargs.pop("url") == expect_url - + assert kwargs.pop("pagination") == None # That's it! assert {} == kwargs @@ -910,14 +912,6 @@ def test_series(self, work_fixture: WorkFixture): assert "Sort by" == series_position["opds:facetgroup"] assert "true" == series_position["opds:activefacet"] - # The feed was cached. - cached = work_fixture.db.session.query(CachedFeed).one() - assert CachedFeed.SERIES_TYPE == cached.type - assert ( - "Like As If Whatever Mysteries-eng,spa-Children,Young+Adult" - == cached.unique_key - ) - # At this point we don't want to generate real feeds anymore. # We can't do a real end-to-end test without setting up a real # search index, which is obnoxiously slow. @@ -938,7 +932,9 @@ class Mock: @classmethod def page(cls, **kwargs): self.called_with = kwargs - return Response("An OPDS feed") + resp = MagicMock() + resp.as_response.return_value = Response("An OPDS feed") + return resp # Test a basic request with custom faceting, pagination, and a # language and audience restriction. This will exercise nearly diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index db2e62a92c..50846c5697 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -337,7 +337,9 @@ class MockAcquisitionFeed: @classmethod def page(cls, **kwargs): cls.called_with = kwargs - return "here's your feed" + resp = MagicMock() + resp.as_response.return_value = "here's your feed" + return resp # Test our ability to generate a single feed. script = CacheFacetListsPerLane(db.session, testing=True, cmd_args=[]) @@ -355,7 +357,6 @@ def page(cls, **kwargs): assert db.session == args["_db"] # type: ignore assert lane == args["worklist"] # type: ignore assert lane.display_name == args["title"] # type: ignore - assert 0 == args["max_age"] # type: ignore # The Pagination object was passed into # MockAcquisitionFeed.page, and it was also used to make the @@ -415,7 +416,9 @@ class MockAcquisitionFeed: @classmethod def groups(cls, **kwargs): cls.called_with = kwargs - return "here's your feed" + resp = MagicMock() + resp.as_response.return_value = "here's your feed" + return resp # Test our ability to generate a single feed. script = CacheOPDSGroupFeedPerLane(db.session, testing=True, cmd_args=[]) @@ -433,7 +436,6 @@ def groups(cls, **kwargs): assert db.session == args["_db"] # type: ignore assert lane == args["worklist"] # type: ignore assert lane.display_name == args["title"] # type: ignore - assert 0 == args["max_age"] # type: ignore assert pagination == None # The Facets object was passed into @@ -503,6 +505,8 @@ def test_facets( (no_entry_point,) = script.facets(lane) assert None == no_entry_point.entrypoint + # We no longer cache the feeds + @pytest.mark.skip def test_do_run(self, lane_script_fixture: LaneScriptFixture): db = lane_script_fixture.db diff --git a/tests/core/test_app_server.py b/tests/core/test_app_server.py index 9a07373f68..da73365833 100644 --- a/tests/core/test_app_server.py +++ b/tests/core/test_app_server.py @@ -22,10 +22,10 @@ ) from core.config import Configuration from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint +from core.feed.annotator.base import Annotator from core.lane import Facets, Pagination, SearchFacets, WorkList from core.log import LogConfiguration from core.model import ConfigurationSetting, Identifier -from core.opds import MockAnnotator from core.problem_details import INVALID_INPUT, INVALID_URN from core.util.opds_writer import OPDSFeed, OPDSMessage from tests.fixtures.database import DatabaseTransactionFixture @@ -265,7 +265,7 @@ def test_work_lookup( work = data.transaction.work(with_license_pool=True) identifier = work.license_pools[0].identifier - annotator = MockAnnotator() + annotator = Annotator() # NOTE: We run this test twice to verify that the controller # doesn't keep any state between requests. At one point there # was a bug which would have caused a book to show up twice on @@ -311,7 +311,7 @@ def test_permalink(self, urn_lookup_controller_fixture: URNLookupControllerFixtu work = data.transaction.work(with_license_pool=True) work.license_pools[0].open_access = False identifier = work.license_pools[0].identifier - annotator = MockAnnotator() + annotator = Annotator() with data.app.test_request_context("/?urn=%s" % identifier.urn): response = data.controller.permalink(identifier.urn, annotator) From a75cbdea9a21e9207e44f03e00754c5b63c1937b Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 15 Sep 2023 09:40:07 -0300 Subject: [PATCH 034/262] Remove collection content mirroring (PP-415) (#1358) We are not using the collection content mirroring functionality anywhere in our production CMs and the settings to support this add additional complexity to collection configuration screens. This was done as part of the work for PP-95. --- api/admin/controller/collection_settings.py | 88 +-- api/admin/controller/settings.py | 61 -- api/admin/controller/work_editor.py | 316 ---------- api/admin/opds.py | 18 - api/admin/routes.py | 26 - api/odl2.py | 5 - bin/repair/mirror_resources | 11 - core/feed/annotator/admin.py | 20 - core/metadata_layer.py | 306 +--------- core/model/configuration.py | 56 -- core/model/constants.py | 2 - core/model/resource.py | 38 +- core/opds2_import.py | 4 - core/opds_import.py | 33 +- core/scripts.py | 179 +----- .../api/admin/controller/test_collections.py | 248 -------- tests/api/admin/controller/test_settings.py | 81 --- .../api/admin/controller/test_work_editor.py | 399 ------------ tests/api/admin/test_opds.py | 59 +- tests/api/admin/test_routes.py | 20 - tests/api/feed/test_admin.py | 62 -- tests/api/files/images/blue_small.jpg | Bin 699 -> 0 bytes .../files/images/blue_with_title_author.png | Bin 15239 -> 0 bytes tests/core/models/test_configuration.py | 34 -- tests/core/models/test_resource.py | 52 -- tests/core/test_circulation_data.py | 244 -------- tests/core/test_coverage.py | 29 - tests/core/test_metadata.py | 566 +----------------- tests/core/test_opds_import.py | 425 ------------- tests/core/test_scripts.py | 350 +---------- 30 files changed, 15 insertions(+), 3717 deletions(-) delete mode 100755 bin/repair/mirror_resources delete mode 100644 tests/api/files/images/blue_small.jpg delete mode 100644 tests/api/files/images/blue_with_title_author.png diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 4b1508f1bd..d5a596d044 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -11,12 +11,9 @@ CANNOT_DELETE_COLLECTION_WITH_CHILDREN, COLLECTION_NAME_ALREADY_IN_USE, INCOMPLETE_CONFIGURATION, - INTEGRATION_GOAL_CONFLICT, MISSING_COLLECTION, MISSING_COLLECTION_NAME, - MISSING_INTEGRATION, MISSING_PARENT, - MISSING_SERVICE, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, @@ -27,13 +24,11 @@ from core.model import ( Collection, ConfigurationSetting, - ExternalIntegration, Library, get_one, get_one_or_create, ) from core.model.admin import Admin -from core.model.configuration import ExternalIntegrationLink from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail, ProblemError @@ -67,12 +62,6 @@ def _get_collection_protocols(self): _settings.append(protocol["settings"][v]) protocol["settings"] = _settings - # If there are storage integrations, add a mirror integration - # setting to every protocol's 'settings' block. - mirror_integration_settings = self._mirror_integration_settings() - if mirror_integration_settings: - for protocol in protocols: - protocol["settings"] += mirror_integration_settings return protocols def process_collections(self): @@ -104,9 +93,7 @@ def process_get(self): collection_dict[ "settings" ] = collection_object.integration_configuration.settings_dict - self.load_settings( - protocol["settings"], collection_object, collection_dict["settings"] - ) + self.load_settings(collection_object, collection_dict["settings"]) collection_dict["self_test_results"] = self._get_prior_test_results( collection_object ) @@ -152,29 +139,11 @@ def load_libraries(self, collection_object: Collection, user: Admin) -> List[Dic return libraries - def load_settings(self, protocol_settings, collection_object, collection_settings): + def load_settings(self, collection_object, collection_settings): """Compile the information about the collection that corresponds to the settings externally imposed by the collection's protocol.""" settings = collection_settings - for protocol_setting in protocol_settings: - if not protocol_setting: - continue - key = protocol_setting.get("key") - if not collection_settings or key not in collection_settings: - if key.endswith("mirror_integration_id"): - storage_integration = get_one( - self._db, - ExternalIntegrationLink, - external_integration_id=collection_object.external_integration_id, - # either 'books_mirror' or 'covers_mirror' - purpose=key.rsplit("_", 2)[0], - ) - if storage_integration: - value = str(storage_integration.other_integration_id) - else: - value = self.NO_MIRROR_INTEGRATION - settings[key] = value settings["external_account_id"] = collection_object.external_account_id def find_protocol_class(self, collection_object): @@ -333,16 +302,6 @@ def process_settings( if error: return error collection.external_account_id = value - elif key.endswith("mirror_integration_id") and value: - external_integration_link = self._set_external_integration_link( - self._db, - key, - value, - collection, - ) - - if isinstance(external_integration_link, ProblemDetail): - return external_integration_link elif value is not None: # Only if the key was present in the request should we add it collection_settings[key] = value @@ -362,49 +321,6 @@ def process_settings( collection.integration_configuration.settings_dict = validated_settings.dict() return None - def _set_external_integration_link( - self, - _db, - key, - value, - collection, - ): - """Find or create a ExternalIntegrationLink and either delete it - or update the other external integration it links to. - """ - collection_service = get_one( - _db, ExternalIntegration, id=collection.external_integration_id - ) - - storage_service = None - other_integration_id = None - - purpose = key.rsplit("_", 2)[0] - external_integration_link, ignore = get_one_or_create( - _db, - ExternalIntegrationLink, - library_id=None, - external_integration_id=collection_service.id, - purpose=purpose, - ) - if not external_integration_link: - return MISSING_INTEGRATION - - if value == self.NO_MIRROR_INTEGRATION: - _db.delete(external_integration_link) - else: - storage_service = get_one(_db, ExternalIntegration, id=value) - if storage_service: - if storage_service.goal != ExternalIntegration.STORAGE_GOAL: - return INTEGRATION_GOAL_CONFLICT - other_integration_id = storage_service.id - else: - return MISSING_SERVICE - - external_integration_link.other_integration_id = other_integration_id - - return external_integration_link - def process_libraries(self, protocol, collection): """Go through the libraries that the user is trying to associate with this collection; check that each library actually exists, and that the library-related configuration settings diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 7f05d7d73d..3b75a54431 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -1,6 +1,5 @@ from __future__ import annotations -import copy import json import logging from typing import TYPE_CHECKING, Any, Dict, Optional, Type, cast @@ -47,7 +46,6 @@ get_one_or_create, ) from core.opds_import import OPDSImporter, OPDSImportMonitor -from core.s3 import S3UploaderConfiguration from core.selftest import BaseHasSelfTests from core.util.problem_detail import ProblemDetail @@ -532,65 +530,6 @@ def _get_prior_test_results(self, item, protocol_class=None, *extra_args): return self_test_results - def _mirror_integration_settings(self): - """Create a setting interface for selecting a storage integration to - be used when mirroring items from a collection. - """ - integrations = ( - self._db.query(ExternalIntegration) - .filter(ExternalIntegration.goal == ExternalIntegration.STORAGE_GOAL) - .order_by(ExternalIntegration.name) - ) - - if not integrations.all(): - return - - mirror_integration_settings = copy.deepcopy( - ExternalIntegrationLink.COLLECTION_MIRROR_SETTINGS - ) - for integration in integrations: - book_covers_bucket = integration.setting( - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY - ).value - open_access_bucket = integration.setting( - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY - ).value - protected_access_bucket = integration.setting( - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY - ).value - - analytics_bucket = integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value - - for setting in mirror_integration_settings: - if ( - setting["key"] == ExternalIntegrationLink.COVERS_KEY - and book_covers_bucket - ): - setting["options"].append( - {"key": str(integration.id), "label": integration.name} - ) - elif setting["key"] == ExternalIntegrationLink.OPEN_ACCESS_BOOKS_KEY: - if open_access_bucket: - setting["options"].append( - {"key": str(integration.id), "label": integration.name} - ) - elif ( - setting["key"] == ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS_KEY - ): - if protected_access_bucket: - setting["options"].append( - {"key": str(integration.id), "label": integration.name} - ) - elif setting["key"] == ExternalIntegrationLink.ANALYTICS_KEY: - if protected_access_bucket: - setting["options"].append( - {"key": str(integration.id), "label": integration.name} - ) - - return mirror_integration_settings - def _create_integration(self, protocol_definitions, protocol, goal): """Create a new ExternalIntegration for the given protocol and goal, assuming that doing so is compatible with the protocol's diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index b8c41014fb..fc61840dd5 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -1,25 +1,14 @@ -import base64 import json -import os -import textwrap -import urllib.error -import urllib.parse -import urllib.request -from io import BytesIO import flask from flask import Response from flask_babel import lazy_gettext as _ -from PIL import Image, ImageDraw, ImageFont from api.admin.problem_details import * -from api.admin.validator import Validator from core.classifier import NO_NUMBER, NO_VALUE, SimplifiedGenreClassifier, genres from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.admin import AdminAnnotator from core.lane import Lane -from core.metadata_layer import LinkData, Metadata, ReplacementPolicy -from core.mirror import MirrorUploader from core.model import ( Classification, Contributor, @@ -30,14 +19,12 @@ Hyperlink, Measurement, PresentationCalculationPolicy, - Representation, RightsStatus, Subject, create, get_one, get_one_or_create, ) -from core.model.configuration import ExternalIntegrationLink from core.util import LanguageCodes from core.util.datetime_helpers import strptime_utc, utc_now from core.util.problem_detail import ProblemDetail @@ -645,309 +632,6 @@ def edit_classifications(self, identifier_type, identifier): return Response("", 200) - MINIMUM_COVER_WIDTH = 600 - MINIMUM_COVER_HEIGHT = 900 - TOP = "top" - CENTER = "center" - BOTTOM = "bottom" - TITLE_POSITIONS = [TOP, CENTER, BOTTOM] - - def _validate_cover_image(self, image): - image_width, image_height = image.size - if ( - image_width < self.MINIMUM_COVER_WIDTH - or image_height < self.MINIMUM_COVER_HEIGHT - ): - return INVALID_IMAGE.detailed( - _( - "Cover image must be at least %(width)spx in width and %(height)spx in height.", - width=self.MINIMUM_COVER_WIDTH, - height=self.MINIMUM_COVER_HEIGHT, - ) - ) - return True - - def _process_cover_image(self, work, image, title_position): - title = work.presentation_edition.title - author = work.presentation_edition.author - if author == Edition.UNKNOWN_AUTHOR: - author = "" - - if title_position in self.TITLE_POSITIONS: - # Convert image to 'RGB' mode if it's not already, so drawing on it works. - if image.mode != "RGB": - image = image.convert("RGB") - - draw = ImageDraw.Draw(image) - image_width, image_height = image.size - - admin_dir = os.path.dirname(os.path.split(__file__)[0]) - package_dir = os.path.join(admin_dir, "../..") - bold_font_path = os.path.join(package_dir, "resources/OpenSans-Bold.ttf") - regular_font_path = os.path.join( - package_dir, "resources/OpenSans-Regular.ttf" - ) - font_size = image_width // 20 - bold_font = ImageFont.truetype(bold_font_path, font_size) - regular_font = ImageFont.truetype(regular_font_path, font_size) - - padding = image_width / 40 - - max_line_width = 0 - bold_char_width = bold_font.getlength("n") - bold_char_count = image_width / bold_char_width - regular_char_width = regular_font.getlength("n") - regular_char_count = image_width / regular_char_width - title_lines = textwrap.wrap(title, bold_char_count) - author_lines = textwrap.wrap(author, regular_char_count) - for lines, font in [(title_lines, bold_font), (author_lines, regular_font)]: - for line in lines: - line_width = font.getlength(line) - if line_width > max_line_width: - max_line_width = line_width - - ascent, descent = bold_font.getmetrics() - line_height = ascent + descent - - total_text_height = line_height * (len(title_lines) + len(author_lines)) - rectangle_height = total_text_height + line_height - - rectangle_width = max_line_width + 2 * padding - - start_x = (image_width - rectangle_width) / 2 - if title_position == self.BOTTOM: - start_y = image_height - rectangle_height - image_height / 14 - elif title_position == self.CENTER: - start_y = (image_height - rectangle_height) / 2 - else: - start_y = image_height / 14 - - draw.rectangle( - [ - (start_x, start_y), - (start_x + rectangle_width, start_y + rectangle_height), - ], - fill=(255, 255, 255, 255), - ) - - current_y = start_y + line_height / 2 - for lines, font in [(title_lines, bold_font), (author_lines, regular_font)]: - for line in lines: - line_width = font.getlength(line) - draw.text( - (start_x + (rectangle_width - line_width) / 2, current_y), - line, - font=font, - fill=(0, 0, 0, 255), - ) - current_y += line_height - - del draw - - return image - - def preview_book_cover(self, identifier_type, identifier): - """Return a preview of the submitted cover image information.""" - self.require_librarian(flask.request.library) - work = self.load_work(flask.request.library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - image = self.generate_cover_image(work, identifier_type, identifier, True) - if isinstance(image, ProblemDetail): - return image - - buffer = BytesIO() - image.save(buffer, format="PNG") - b64 = base64.b64encode(buffer.getvalue()) - value = "data:image/png;base64,%s" % b64 - - return Response(value, 200) - - def generate_cover_image(self, work, identifier_type, identifier, preview=False): - image_file = flask.request.files.get("cover_file") - image_url = flask.request.form.get("cover_url") - if not image_file and not image_url: - return INVALID_IMAGE.detailed(_("Image file or image URL is required.")) - elif image_url and not Validator()._is_url(image_url, []): - return INVALID_URL.detailed( - _('"%(url)s" is not a valid URL.', url=image_url) - ) - - title_position = flask.request.form.get("title_position") - if image_url and not image_file: - image_file = BytesIO(urllib.request.urlopen(image_url).read()) - - image = Image.open(image_file) - result = self._validate_cover_image(image) - if isinstance(result, ProblemDetail): - return result - - if preview: - image = self._title_position(work, image) - - return image - - def _title_position(self, work, image): - title_position = flask.request.form.get("title_position") - if title_position and title_position in self.TITLE_POSITIONS: - return self._process_cover_image(work, image, title_position) - return image - - def _original_cover_info( - self, image, work, data_source, rights_uri, rights_explanation - ): - original, derivation_settings, cover_href = None, None, None - cover_rights_explanation = rights_explanation - title_position = flask.request.form.get("title_position") - cover_url = flask.request.form.get("cover_url") - if title_position in self.TITLE_POSITIONS: - original_href = cover_url - original_buffer = BytesIO() - image.save(original_buffer, format="PNG") - original_content = original_buffer.getvalue() - if not original_href: - original_href = Hyperlink.generic_uri( - data_source, - work.presentation_edition.primary_identifier, - Hyperlink.IMAGE, - content=original_content, - ) - - image = self._process_cover_image(work, image, title_position) - - original_rights_explanation = None - if rights_uri != RightsStatus.IN_COPYRIGHT: - original_rights_explanation = rights_explanation - original = LinkData( - Hyperlink.IMAGE, - original_href, - rights_uri=rights_uri, - rights_explanation=original_rights_explanation, - content=original_content, - ) - derivation_settings = dict(title_position=title_position) - if rights_uri in RightsStatus.ALLOWS_DERIVATIVES: - cover_rights_explanation = ( - "The original image license allows derivatives." - ) - else: - cover_href = cover_url - - return original, derivation_settings, cover_href, cover_rights_explanation - - def _get_collection_from_pools(self, identifier_type, identifier): - pools = self.load_licensepools( - flask.request.library, identifier_type, identifier - ) - if isinstance(pools, ProblemDetail): - return pools - if not pools: - return NO_LICENSES - collection = pools[0].collection - return collection - - def change_book_cover(self, identifier_type, identifier, mirrors=None): - """Save a new book cover based on the submitted form.""" - self.require_librarian(flask.request.library) - - data_source = DataSource.lookup(self._db, DataSource.LIBRARY_STAFF) - - work = self.load_work(flask.request.library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - rights_uri = flask.request.form.get("rights_status") - rights_explanation = flask.request.form.get("rights_explanation") - - if not rights_uri: - return INVALID_IMAGE.detailed(_("You must specify the image's license.")) - - collection = self._get_collection_from_pools(identifier_type, identifier) - if isinstance(collection, ProblemDetail): - return collection - - # Look for an appropriate mirror to store this cover image. Since the - # mirror should be used for covers, we don't need a mirror for books. - mirrors = mirrors or dict( - covers_mirror=MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ), - books_mirror=None, - ) - if not mirrors.get(ExternalIntegrationLink.COVERS): - return INVALID_CONFIGURATION_OPTION.detailed( - _("Could not find a storage integration for uploading the cover.") - ) - - image = self.generate_cover_image(work, identifier_type, identifier) - if isinstance(image, ProblemDetail): - return image - - ( - original, - derivation_settings, - cover_href, - cover_rights_explanation, - ) = self._original_cover_info( - image, work, data_source, rights_uri, rights_explanation - ) - - buffer = BytesIO() - image.save(buffer, format="PNG") - content = buffer.getvalue() - - if not cover_href: - cover_href = Hyperlink.generic_uri( - data_source, - work.presentation_edition.primary_identifier, - Hyperlink.IMAGE, - content=content, - ) - - cover_data = LinkData( - Hyperlink.IMAGE, - href=cover_href, - media_type=Representation.PNG_MEDIA_TYPE, - content=content, - rights_uri=rights_uri, - rights_explanation=cover_rights_explanation, - original=original, - transformation_settings=derivation_settings, - ) - - presentation_policy = PresentationCalculationPolicy( - choose_edition=False, - set_edition_metadata=False, - classify=False, - choose_summary=False, - calculate_quality=False, - choose_cover=True, - regenerate_opds_entries=True, - regenerate_marc_record=True, - update_search_index=False, - ) - - replacement_policy = ReplacementPolicy( - links=True, - # link_content is false because we already have the content. - # We don't want the metadata layer to try to fetch it again. - link_content=False, - mirrors=mirrors, - presentation_calculation_policy=presentation_policy, - ) - - metadata = Metadata(data_source, links=[cover_data]) - metadata.apply( - work.presentation_edition, collection, replace=replacement_policy - ) - - # metadata.apply only updates the edition, so we also need - # to update the work. - work.calculate_presentation(policy=presentation_policy) - - return Response(_("Success"), 200) - def custom_lists(self, identifier_type, identifier): self.require_librarian(flask.request.library) diff --git a/api/admin/opds.py b/api/admin/opds.py index 55171f09a4..2a4df51c95 100644 --- a/api/admin/opds.py +++ b/api/admin/opds.py @@ -2,9 +2,7 @@ from api.opds import LibraryAnnotator from core.lane import Pagination -from core.mirror import MirrorUploader from core.model import DataSource, LicensePool -from core.model.configuration import ExternalIntegrationLink from core.opds import AcquisitionFeed, VerboseAnnotator @@ -66,22 +64,6 @@ def annotate_work_entry( ), ) - # If there is a storage integration for the collection, changing the cover is allowed. - mirror = MirrorUploader.for_collection( - active_license_pool.collection, ExternalIntegrationLink.COVERS - ) - if mirror: - feed.add_link_to_entry( - entry, - rel="http://librarysimplified.org/terms/rel/change_cover", - href=self.url_for( - "work_change_book_cover", - identifier_type=identifier.type, - identifier=identifier.identifier, - _external=True, - ), - ) - def suppressed_url(self, pagination): kwargs = dict(list(pagination.items())) return self.url_for("suppressed", _external=True, **kwargs) diff --git a/api/admin/routes.py b/api/admin/routes.py index ccabccf285..41c218cc98 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -155,32 +155,6 @@ def work_classifications(identifier_type, identifier): ) -@library_route( - "/admin/works///preview_book_cover", - methods=["POST"], -) -@has_library -@returns_problem_detail -@requires_admin -def work_preview_book_cover(identifier_type, identifier): - return app.manager.admin_work_controller.preview_book_cover( - identifier_type, identifier - ) - - -@library_route( - "/admin/works///change_book_cover", - methods=["POST"], -) -@has_library -@returns_problem_detail -@requires_admin -def work_change_book_cover(identifier_type, identifier): - return app.manager.admin_work_controller.change_book_cover( - identifier_type, identifier - ) - - @library_route( "/admin/works///lists", methods=["GET", "POST"] ) diff --git a/api/odl2.py b/api/odl2.py index fbf32a14d9..b4b8bc0dbd 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -130,7 +130,6 @@ def __init__( http_get=None, content_modifier=None, map_from_collection=None, - mirrors=None, ): """Initialize a new instance of ODL2Importer class. @@ -161,9 +160,6 @@ def __init__( :param map_from_collection: Identifier mapping :type map_from_collection: Dict - - :param mirrors: A dictionary of different MirrorUploader objects for different purposes - :type mirrors: Dict[MirrorUploader] """ super().__init__( db, @@ -174,7 +170,6 @@ def __init__( http_get, content_modifier, map_from_collection, - mirrors, ) self._logger = logging.getLogger(__name__) diff --git a/bin/repair/mirror_resources b/bin/repair/mirror_resources deleted file mode 100755 index 3613da8de8..0000000000 --- a/bin/repair/mirror_resources +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -"""Mirror resources that haven't been mirrored yet.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..", "..") -sys.path.append(os.path.abspath(package_dir)) -from core.scripts import MirrorResourcesScript - -MirrorResourcesScript().run() diff --git a/core/feed/annotator/admin.py b/core/feed/annotator/admin.py index 193b8e70f2..8b5e903e29 100644 --- a/core/feed/annotator/admin.py +++ b/core/feed/annotator/admin.py @@ -6,9 +6,7 @@ from core.feed.annotator.verbose import VerboseAnnotator from core.feed.types import FeedData, Link, WorkEntry from core.lane import Pagination -from core.mirror import MirrorUploader from core.model import DataSource -from core.model.configuration import ExternalIntegrationLink from core.model.library import Library @@ -75,24 +73,6 @@ def annotate_work_entry( ) ) - # If there is a storage integration for the collection, changing the cover is allowed. - if active_license_pool: - mirror = MirrorUploader.for_collection( - active_license_pool.collection, ExternalIntegrationLink.COVERS - ) - if mirror: - entry.computed.other_links.append( - Link( - href=self.url_for( - "work_change_book_cover", - identifier_type=identifier.type, - identifier=identifier.identifier, - _external=True, - ), - rel="http://librarysimplified.org/terms/rel/change_cover", - ) - ) - def suppressed_url(self, pagination: Pagination) -> str: kwargs = dict(list(pagination.items())) return self.url_for("suppressed", _external=True, **kwargs) diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 9290b68d0f..e73ae1dc8b 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -41,7 +41,6 @@ get_one, get_one_or_create, ) -from .model.configuration import ExternalIntegrationLink from .model.licensing import LicenseFunctions, LicenseStatus from .util import LanguageCodes from .util.datetime_helpers import to_utc, utc_now @@ -63,10 +62,8 @@ def __init__( formats=False, rights=False, link_content=False, - mirrors=None, content_modifier=None, analytics=None, - http_get=None, even_if_not_apparently_updated=False, presentation_calculation_policy=None, ): @@ -78,10 +75,8 @@ def __init__( self.formats = formats self.link_content = link_content self.even_if_not_apparently_updated = even_if_not_apparently_updated - self.mirrors = mirrors self.content_modifier = content_modifier self.analytics = analytics - self.http_get = http_get self.presentation_calculation_policy = ( presentation_calculation_policy or PresentationCalculationPolicy() ) @@ -489,16 +484,6 @@ def __repr__(self): content, ) - def mirror_type(self): - """Returns the type of mirror that should be used for the link.""" - if self.rel in [Hyperlink.IMAGE, Hyperlink.THUMBNAIL_IMAGE]: - return ExternalIntegrationLink.COVERS - - if self.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - return ExternalIntegrationLink.OPEN_ACCESS_BOOKS - elif self.rel == Hyperlink.GENERIC_OPDS_ACQUISITION: - return ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS - class MeasurementData: def __init__(self, quantity_measured, value, weight=1, taken_at=None): @@ -684,234 +669,7 @@ def apply(self, _db): ) -class MetaToModelUtility: - """ - Contains functionality common to both CirculationData and Metadata. - """ - - log = logging.getLogger("Abstract metadata layer - mirror code") - - def mirror_link(self, model_object, data_source, link, link_obj, policy): - """Retrieve a copy of the given link and make sure it gets - mirrored. If it's a full-size image, create a thumbnail and - mirror that too. - - The model_object can be either a pool or an edition. - """ - if link_obj.rel not in Hyperlink.MIRRORED: - # we only host locally open-source epubs and cover images - if link.href: - # The log message only makes sense if the resource is - # hosted elsewhere. - self.log.info("Not mirroring %s: rel=%s", link.href, link_obj.rel) - return - - if link.rights_uri and link.rights_uri == RightsStatus.IN_COPYRIGHT: - self.log.info(f"Not mirroring {link.href}: rights status={link.rights_uri}") - return - - mirror_type = link.mirror_type() - - if mirror_type in policy.mirrors: - mirror = policy.mirrors[mirror_type] - if not mirror: - return - else: - self.log.info("No mirror uploader with key %s found" % mirror_type) - return - - http_get = policy.http_get - - _db = Session.object_session(link_obj) - original_url = link.href - - self.log.info("About to mirror %s" % original_url) - pools = [] - edition = None - title = None - identifier = None - if model_object: - if isinstance(model_object, LicensePool): - pools = [model_object] - identifier = model_object.identifier - - if ( - identifier - and identifier.primarily_identifies - and identifier.primarily_identifies[0] - ): - edition = identifier.primarily_identifies[0] - elif isinstance(model_object, Edition): - pools = model_object.license_pools - identifier = model_object.primary_identifier - edition = model_object - if edition and edition.title: - title = edition.title - else: - title = getattr(self, "title", None) or None - - if (not identifier) or ( - link_obj.identifier and identifier != link_obj.identifier - ): - # insanity found - self.log.warning( - "Tried to mirror a link with an invalid identifier %r" % identifier - ) - return - - max_age = None - if policy.link_content: - # We want to fetch the representation again, even if we - # already have a recent usable copy. If we fetch it and it - # hasn't changed, we'll keep using the one we have. - max_age = 0 - - # This will fetch a representation of the original and - # store it in the database. - representation, is_new = Representation.get( - _db, - link.href, - do_get=http_get, - presumed_media_type=link.media_type, - max_age=max_age, - ) - - # Make sure the (potentially newly-fetched) representation is - # associated with the resource. - link_obj.resource.representation = representation - - # If we couldn't fetch this representation, don't mirror it, - # and if this was an open/protected access link, then suppress the associated - # license pool until someone fixes it manually. - # The license pool to suppress will be either the passed-in model_object (if it's of type pool), - # or the license pool associated with the passed-in model object (if it's of type edition). - if representation.fetch_exception: - if pools and link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - for pool in pools: - pool.suppressed = True - pool.license_exception = ( - "Fetch exception: %s" % representation.fetch_exception - ) - self.log.error(pool.license_exception) - return - - # If we fetched the representation and it hasn't changed, - # the previously mirrored version is fine. Don't mirror it - # again. - if representation.status_code == 304 and representation.mirror_url: - self.log.info( - "Representation has not changed, assuming mirror at %s is up to date.", - representation.mirror_url, - ) - return - - if representation.status_code // 100 not in (2, 3): - self.log.info( - "Representation %s gave %s status code, not mirroring.", - representation.url, - representation.status_code, - ) - return - - if policy.content_modifier: - policy.content_modifier(representation) - - # The metadata may have some idea about the media type for this - # LinkObject, but it could be wrong. If the representation we - # actually just saw is a mirrorable media type, that takes - # precedence. If we were expecting this link to be mirrorable - # but we actually saw something that's not, assume our original - # metadata was right and the server told us the wrong media type. - if representation.media_type and representation.mirrorable_media_type: - link.media_type = representation.media_type - - if not representation.mirrorable_media_type: - if link.media_type: - self.log.info( - "Saw unsupported media type for %s: %s. Assuming original media type %s is correct", - representation.url, - representation.media_type, - link.media_type, - ) - representation.media_type = link.media_type - else: - self.log.info( - "Not mirroring %s: unsupported media type %s", - representation.url, - representation.media_type, - ) - return - - # Determine the best URL to use when mirroring this - # representation. - if ( - link.media_type in Representation.BOOK_MEDIA_TYPES - or link.media_type in Representation.AUDIOBOOK_MEDIA_TYPES - ): - url_title = title or identifier.identifier - extension = representation.extension() - mirror_url = mirror.book_url( - identifier, - data_source=data_source, - title=url_title, - extension=extension, - open_access=link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD, - ) - else: - filename = representation.default_filename( - link_obj, representation.media_type - ) - mirror_url = mirror.cover_image_url(data_source, identifier, filename) - - # Mirror it. - collection = pools[0].collection if pools else None - mirror.mirror_one(representation, mirror_to=mirror_url, collection=collection) - - # If we couldn't mirror an open/protected access link representation, suppress - # the license pool until someone fixes it manually. - if representation.mirror_exception: - if pools and link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - for pool in pools: - pool.suppressed = True - pool.license_exception = ( - "Mirror exception: %s" % representation.mirror_exception - ) - self.log.error(pool.license_exception) - - if link_obj.rel == Hyperlink.IMAGE: - # Create and mirror a thumbnail. - thumbnail_filename = representation.default_filename( - link_obj, Representation.PNG_MEDIA_TYPE - ) - thumbnail_url = mirror.cover_image_url( - data_source, - identifier, - thumbnail_filename, - Edition.MAX_THUMBNAIL_HEIGHT, - ) - thumbnail, is_new = representation.scale( - max_height=Edition.MAX_THUMBNAIL_HEIGHT, - max_width=Edition.MAX_THUMBNAIL_WIDTH, - destination_url=thumbnail_url, - destination_media_type=Representation.PNG_MEDIA_TYPE, - force=True, - ) - if is_new: - # A thumbnail was created distinct from the original - # image. Mirror it as well. - mirror.mirror_one( - thumbnail, mirror_to=thumbnail_url, collection=collection - ) - - if link_obj.rel in Hyperlink.SELF_HOSTED_BOOKS: - # If we mirrored book content successfully, remove it from - # the database to save space. We do keep images in case we - # ever need to resize them or mirror them elsewhere. - if representation.mirrored_at and not representation.mirror_exception: - representation.content = None - - -class CirculationData(MetaToModelUtility): +class CirculationData: """Information about actual copies of a book that can be delivered to patrons. @@ -1189,8 +947,7 @@ def apply(self, _db, collection, replace=None): data_source = self.data_source(_db) identifier = self.primary_identifier(_db) - # First, make sure all links in self.links are mirrored (if necessary) - # and associated with the book's identifier. + # First, make sure all links in self.links are associated with the book's identifier. # TODO: be able to handle the case where the URL to a link changes or # a link disappears. @@ -1206,13 +963,6 @@ def apply(self, _db, collection, replace=None): ) link_objects[link] = link_obj - for link in self.links: - if link.rel in Hyperlink.CIRCULATION_ALLOWED: - link_obj = link_objects[link] - if replace.mirrors: - # We need to mirror this resource. - self.mirror_link(pool, data_source, link, link_obj, replace) - # Next, make sure the DeliveryMechanisms associated # with the book reflect the formats in self.formats. old_lpdms = new_lpdms = [] @@ -1328,7 +1078,7 @@ def _availability_needs_update(self, pool): return self.last_checked >= pool.last_checked -class Metadata(MetaToModelUtility): +class Metadata: """A (potentially partial) set of metadata for a published work.""" @@ -1952,69 +1702,27 @@ def _key(classification): if self.circulation: self.circulation.apply(_db, collection, replace) - # obtains a presentation_edition for the title, which will later be used to get a mirror link. + # obtains a presentation_edition for the title has_image = any([link.rel == Hyperlink.IMAGE for link in self.links]) for link in self.links: link_obj = link_objects[link] if link_obj.rel == Hyperlink.THUMBNAIL_IMAGE and has_image: - # This is a thumbnail but we also have a full-sized image link, - # so we don't need to separately mirror the thumbnail. + # This is a thumbnail but we also have a full-sized image link continue - if replace.mirrors: - # We need to mirror this resource. If it's an image, a - # thumbnail may be provided as a side effect. - self.mirror_link(edition, data_source, link, link_obj, replace) elif link.thumbnail: - # We don't need to mirror this image, but we do need - # to make sure that its thumbnail exists locally and + # We need to make sure that its thumbnail exists locally and # is associated with the original image. self.make_thumbnail(data_source, link, link_obj) - # Make sure the work we just did shows up. This needs to happen after mirroring - # so mirror urls are available. + # Make sure the work we just did shows up. made_changes = edition.calculate_presentation( policy=replace.presentation_calculation_policy ) if made_changes: work_requires_new_presentation_edition = True - # The metadata wrangler doesn't need information from these data sources. - # We don't need to send it information it originally provided, and - # Overdrive makes metadata accessible to everyone without buying licenses - # for the book, so the metadata wrangler can obtain it directly from - # Overdrive. - # TODO: Remove Bibliotheca and Axis 360 from this list. - METADATA_UPLOAD_BLACKLIST = [ - DataSource.METADATA_WRANGLER, - DataSource.OVERDRIVE, - DataSource.BIBLIOTHECA, - DataSource.AXIS_360, - ] - if work_requires_new_presentation_edition and ( - data_source.name not in METADATA_UPLOAD_BLACKLIST - ): - # Create a transient failure CoverageRecord for this edition - # so it will be processed by the MetadataUploadCoverageProvider. - internal_processing = DataSource.lookup(_db, DataSource.INTERNAL_PROCESSING) - - # If there's already a CoverageRecord, don't change it to transient failure. - # TODO: Once the metadata wrangler can handle it, we'd like to re-sync the - # metadata every time there's a change. For now, - cr = CoverageRecord.lookup( - edition, - internal_processing, - operation=CoverageRecord.METADATA_UPLOAD_OPERATION, - ) - if not cr: - CoverageRecord.add_for( - edition, - internal_processing, - operation=CoverageRecord.METADATA_UPLOAD_OPERATION, - status=CoverageRecord.TRANSIENT_FAILURE, - ) - # Update the coverage record for this edition and data # source. We omit the collection information, even if we know # which collection this is, because we only changed metadata. diff --git a/core/model/configuration.py b/core/model/configuration.py index b5713b1e65..fcfe36d79e 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -9,7 +9,6 @@ from enum import Enum from typing import TYPE_CHECKING, Any, Iterable, Iterator, List, Optional, TypeVar -from flask_babel import lazy_gettext as _ from sqlalchemy import Column, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship from sqlalchemy.orm.session import Session @@ -64,61 +63,6 @@ class ExternalIntegrationLink(Base): ) purpose = Column(Unicode, index=True) - mirror_settings = [ - { - "key": COVERS_KEY, - "type": COVERS, - "description_type": "cover images", - "label": "Covers Mirror", - }, - { - "key": OPEN_ACCESS_BOOKS_KEY, - "type": OPEN_ACCESS_BOOKS, - "description_type": "free books", - "label": "Open Access Books Mirror", - }, - { - "key": PROTECTED_ACCESS_BOOKS_KEY, - "type": PROTECTED_ACCESS_BOOKS, - "description_type": "self-hosted, commercially licensed books", - "label": "Protected Access Books Mirror", - }, - { - "key": ANALYTICS_KEY, - "type": ANALYTICS, - "description_type": "Analytics", - "label": "Analytics Mirror", - }, - ] - settings = [] - - for mirror_setting in mirror_settings: - mirror_type = mirror_setting["type"] - mirror_description_type = mirror_setting["description_type"] - mirror_label = mirror_setting["label"] - - settings.append( - { - "key": f"{mirror_type.lower()}_integration_id", - "label": _(mirror_label), - "description": _( - "Any {} encountered while importing content from this collection " - "can be mirrored to a server you control.".format( - mirror_description_type - ) - ), - "type": "select", - "options": [ - { - "key": NO_MIRROR_INTEGRATION, - "label": _(f"None - Do not mirror {mirror_description_type}"), - } - ], - } - ) - - COLLECTION_MIRROR_SETTINGS = settings - class ExternalIntegration(Base): diff --git a/core/model/constants.py b/core/model/constants.py index 66052d9a55..4457f45071 100644 --- a/core/model/constants.py +++ b/core/model/constants.py @@ -262,8 +262,6 @@ class LinkRelations: ALTERNATE, SAMPLE, ] - MIRRORED = [OPEN_ACCESS_DOWNLOAD, GENERIC_OPDS_ACQUISITION, IMAGE, THUMBNAIL_IMAGE] - SELF_HOSTED_BOOKS = list(set(CIRCULATION_ALLOWED) & set(MIRRORED)) class MediaTypes: diff --git a/core/model/resource.py b/core/model/resource.py index 173d4dab64..299839862e 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -29,7 +29,6 @@ from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import Mapped, backref, relationship from sqlalchemy.orm.session import Session -from sqlalchemy.sql.expression import or_ from ..util.datetime_helpers import utc_now from ..util.http import HTTP @@ -41,7 +40,7 @@ MediaTypes, ) from .edition import Edition -from .licensing import LicensePool, LicensePoolDeliveryMechanism +from .licensing import LicensePoolDeliveryMechanism if TYPE_CHECKING: from core.model import CachedMARCFile, Work # noqa: autoflake @@ -410,41 +409,6 @@ class Hyperlink(Base, LinkRelations): ) resource: Resource - @classmethod - def unmirrored(cls, collection): - """Find all Hyperlinks associated with an item in the - given Collection that could be mirrored but aren't. - TODO: We don't cover the case where an image was mirrored but no - thumbnail was created of it. (We do cover the case where the thumbnail - was created but not mirrored.) - """ - from .identifier import Identifier - - _db = Session.object_session(collection) - qu = ( - _db.query(Hyperlink) - .join(Hyperlink.identifier) - .join(Identifier.licensed_through) - .outerjoin(Hyperlink.resource) - .outerjoin(Resource.representation) - ) - qu = qu.filter(LicensePool.collection_id == collection.id) - qu = qu.filter(Hyperlink.rel.in_(Hyperlink.MIRRORED)) - qu = qu.filter(Hyperlink.data_source == collection.data_source) - qu = qu.filter( - or_( - Representation.id == None, - Representation.mirror_url == None, - ) - ) - # Without this ordering, the query does a table scan looking for - # items that match. With the ordering, they're all at the front. - qu = qu.order_by( - Representation.mirror_url.asc().nullsfirst(), - Representation.id.asc().nullsfirst(), - ) - return qu - @classmethod def generic_uri(cls, data_source, identifier, rel, content=None): """Create a generic URI for the other end of this hyperlink. diff --git a/core/opds2_import.py b/core/opds2_import.py index 11955be837..39edf52071 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -25,7 +25,6 @@ ConfigurationFormItemType, FormField, ) -from core.mirror import MirrorUploader from core.model.configuration import ConfigurationSetting, HasExternalIntegration from core.model.integration import IntegrationConfiguration @@ -165,7 +164,6 @@ def __init__( http_get: Callable | None = None, content_modifier: Callable | None = None, map_from_collection: dict | None = None, - mirrors: dict[str, MirrorUploader] | None = None, ): """Initialize a new instance of OPDS2Importer class. @@ -184,7 +182,6 @@ def __init__( :param content_modifier: A function that may modify-in-place representations (such as images and EPUB documents) as they come in from the network. :param map_from_collection: Identifier mapping - :param mirrors: A dictionary of different MirrorUploader objects for different purposes """ super().__init__( db, @@ -194,7 +191,6 @@ def __init__( http_get, content_modifier, map_from_collection, - mirrors, ) if not isinstance(parser, RWPMManifestParser): diff --git a/core/opds_import.py b/core/opds_import.py index 01673216b4..7fc4dc618b 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -40,7 +40,6 @@ SubjectData, TimestampData, ) -from .mirror import MirrorUploader from .model import ( Collection, CoverageRecord, @@ -57,7 +56,7 @@ Subject, get_one, ) -from .model.configuration import ExternalIntegrationLink, HasExternalIntegration +from .model.configuration import HasExternalIntegration from .monitor import CollectionMonitor from .util.datetime_helpers import datetime_utc, to_utc, utc_now from .util.http import HTTP, BadResponseException @@ -307,7 +306,6 @@ def __init__( http_get=None, content_modifier=None, map_from_collection=None, - mirrors=None, ): """:param collection: LicensePools created by this OPDS import will be associated with the given Collection. If this is None, @@ -321,9 +319,6 @@ def __init__( here. This is only for use when you are importing OPDS metadata without any particular Collection in mind. - :param mirrors: A dictionary of different MirrorUploader objects for - different purposes. - :param http_get: Use this method to make an HTTP GET request. This can be replaced with a stub method for testing purposes. @@ -332,8 +327,6 @@ def __init__( come in from the network. :param map_from_collection - - :param mirrors """ self._db = _db self.log = logging.getLogger("OPDS Importer") @@ -357,32 +350,10 @@ def __init__( self.data_source_name = data_source_name self.identifier_mapping = identifier_mapping - # Check to see if a mirror for each purpose was passed in. - # If not, then attempt to create one. - covers_mirror = ( - mirrors.get(ExternalIntegrationLink.COVERS, None) if mirrors else None - ) - books_mirror = ( - mirrors.get(ExternalIntegrationLink.OPEN_ACCESS_BOOKS, None) - if mirrors - else None - ) self.primary_identifier_source = None if collection: - if not covers_mirror: - # If this Collection is configured to mirror the assets it - # discovers, this will create a MirrorUploader for that - # Collection for its purpose. Otherwise, this will return None. - covers_mirror = MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - if not books_mirror: - books_mirror = MirrorUploader.for_collection( - collection, ExternalIntegrationLink.OPEN_ACCESS_BOOKS - ) self.primary_identifier_source = collection.primary_identifier_source - self.mirrors = dict(covers_mirror=covers_mirror, books_mirror=books_mirror) self.content_modifier = content_modifier # In general, we are cautious when mirroring resources so that @@ -587,9 +558,7 @@ def import_edition_from_metadata(self, metadata): link_content=True, formats=True, even_if_not_apparently_updated=True, - mirrors=self.mirrors, content_modifier=self.content_modifier, - http_get=self.http_get, ) metadata.apply( edition=edition, diff --git a/core/scripts.py b/core/scripts.py index 364fd7bf71..44bb746dd4 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -27,13 +27,7 @@ from .coverage import CollectionCoverageProviderJob, CoverageProviderProgress from .external_search import ExternalSearchIndex, Filter, SearchIndexCoverageProvider from .lane import Lane -from .metadata_layer import ( - LinkData, - MetaToModelUtility, - ReplacementPolicy, - TimestampData, -) -from .mirror import MirrorUploader +from .metadata_layer import TimestampData from .model import ( BaseCoverageRecord, CachedFeed, @@ -44,14 +38,12 @@ DataSource, Edition, ExternalIntegration, - Hyperlink, Identifier, Library, LicensePool, LicensePoolDeliveryMechanism, Patron, PresentationCalculationPolicy, - Representation, SessionManager, Subject, Timestamp, @@ -62,7 +54,6 @@ get_one_or_create, production_session, ) -from .model.configuration import ExternalIntegrationLink from .model.listeners import site_configuration_has_changed from .monitor import CollectionMonitor, ReaperMonitor from .opds_import import OPDSImporter, OPDSImportMonitor @@ -2062,174 +2053,6 @@ def run_monitor(self, collection, force=None): monitor.run() -class MirrorResourcesScript(CollectionInputScript): - """Make sure that all mirrorable resources in a collection have - in fact been mirrored. - """ - - # This object contains the actual logic of mirroring. - MIRROR_UTILITY = MetaToModelUtility() - - @classmethod - def arg_parser(cls): - parser = super().arg_parser() - parser.add_argument( - "--collection-type", - help="Collection type. Valid values are: OPEN_ACCESS (default), PROTECTED_ACCESS.", - type=CollectionType, - choices=list(CollectionType), - default=CollectionType.OPEN_ACCESS, - ) - return parser - - def do_run(self, cmd_args=None): - parsed = self.parse_command_line(self._db, cmd_args=cmd_args) - collections = parsed.collections - collection_type = parsed.collection_type - if not collections: - # Assume they mean all collections. - collections = self._db.query(Collection).all() - - # But only process collections that have an associated MirrorUploader. - for collection, policy in self.collections_with_uploader( - collections, collection_type - ): - self.process_collection(collection, policy) - - def collections_with_uploader( - self, collections, collection_type=CollectionType.OPEN_ACCESS - ): - """Filter out collections that have no MirrorUploader. - - :yield: 2-tuples (Collection, ReplacementPolicy). The - ReplacementPolicy is the appropriate one for this script - to use for that Collection. - """ - for collection in collections: - covers = MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - books_mirror_type = ( - ExternalIntegrationLink.OPEN_ACCESS_BOOKS - if collection_type == CollectionType.OPEN_ACCESS - else ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS - ) - books = MirrorUploader.for_collection(collection, books_mirror_type) - if covers or books: - mirrors = { - ExternalIntegrationLink.COVERS: covers, - books_mirror_type: books, - } - policy = self.replacement_policy(mirrors) - yield collection, policy - else: - self.log.info("Skipping %r as it has no MirrorUploader.", collection) - - @classmethod - def replacement_policy(cls, mirrors): - """Create a ReplacementPolicy for this script that uses the - given mirrors. - """ - return ReplacementPolicy( - mirrors=mirrors, - link_content=True, - even_if_not_apparently_updated=True, - http_get=Representation.cautious_http_get, - ) - - def process_collection(self, collection, policy, unmirrored=None): - """Make sure every mirrorable resource in this collection has - been mirrored. - - :param unmirrored: A replacement for Hyperlink.unmirrored, - for use in tests. - - """ - unmirrored = unmirrored or Hyperlink.unmirrored - for link in unmirrored(collection): - self.process_item(collection, link, policy) - self._db.commit() - - @classmethod - def derive_rights_status(cls, license_pool, resource): - """Make a best guess about the rights status for the given - resource. - - This relies on the information having been available at one point, - but having been stored in the database at a slight remove. - """ - rights_status = None - if not license_pool: - return None - if resource: - lpdm = resource.as_delivery_mechanism_for(license_pool) - # When this Resource was associated with this LicensePool, - # the rights information was recorded in its - # LicensePoolDeliveryMechanism. - if lpdm: - rights_status = lpdm.rights_status - if not rights_status: - # We could not find a LicensePoolDeliveryMechanism for - # this particular resource, but if every - # LicensePoolDeliveryMechanism has the same rights - # status, we can assume it's that one. - statuses = list({x.rights_status for x in license_pool.delivery_mechanisms}) - if len(statuses) == 1: - [rights_status] = statuses - if rights_status: - rights_status = rights_status.uri - return rights_status - - def process_item(self, collection, link_obj, policy): - """Determine the URL that needs to be mirrored and (for books) - the rationale that lets us mirror that URL. Then mirror it. - """ - identifier = link_obj.identifier - license_pool, ignore = LicensePool.for_foreign_id( - self._db, - collection.data_source, - identifier.type, - identifier.identifier, - collection=collection, - autocreate=False, - ) - if not license_pool: - # This shouldn't happen. - self.log.warning( - "Could not find LicensePool for %r, skipping it rather than mirroring something we shouldn't." - ) - return - resource = link_obj.resource - - if link_obj.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - rights_status = self.derive_rights_status(license_pool, resource) - if not rights_status: - self.log.warning( - "Could not unambiguously determine rights status for %r, skipping.", - link_obj, - ) - return - else: - # For resources like book covers, the rights status is - # irrelevant -- we rely on fair use. - rights_status = None - - # Mock up a LinkData that MetaToModelUtility can use to - # mirror this link (or decide not to mirror it). - linkdata = LinkData( - rel=link_obj.rel, href=resource.url, rights_uri=rights_status - ) - - # Mirror the link (or not). - self.MIRROR_UTILITY.mirror_link( - model_object=license_pool, - data_source=collection.data_source, - link=linkdata, - link_obj=link_obj, - policy=policy, - ) - - class CheckContributorNamesInDB(IdentifierInputScript): """Checks that contributor sort_names are display_names in "last name, comma, other names" format. diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index c22d98b754..575b870229 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -10,11 +10,9 @@ CANNOT_DELETE_COLLECTION_WITH_CHILDREN, COLLECTION_NAME_ALREADY_IN_USE, INCOMPLETE_CONFIGURATION, - INTEGRATION_GOAL_CONFLICT, MISSING_COLLECTION, MISSING_COLLECTION_NAME, MISSING_PARENT, - MISSING_SERVICE, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, @@ -29,8 +27,6 @@ create, get_one, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration from core.selftest import HasSelfTests from tests.fixtures.api_admin import SettingsControllerFixture from tests.fixtures.database import DatabaseTransactionFixture @@ -54,127 +50,6 @@ def test_collections_get_with_no_collections( assert ExternalIntegration.OVERDRIVE in names assert ExternalIntegration.OPDS_IMPORT in names - def test_collections_get_collection_protocols( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - old_prior_test_results = HasSelfTests.prior_test_results - setattr( - HasSelfTests, - "prior_test_results", - settings_ctrl_fixture.mock_prior_test_results, - ) - - l1 = settings_ctrl_fixture.ctrl.db.default_library() - [c1] = l1.collections - - # When there is no storage integration configured, - # the protocols will not offer a 'mirror_integration_id' - # setting for covers or books. - with settings_ctrl_fixture.request_context_with_admin("/"): - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - protocols = response.get("protocols") - for protocol in protocols: - assert all( - [ - not s.get("key").endswith("mirror_integration_id") - for s in protocol["settings"] - if s - ] - ) - - # When storage integrations are configured, each protocol will - # offer a 'mirror_integration_id' setting for covers and books. - storage1 = settings_ctrl_fixture.ctrl.db.external_integration( - name="integration 1", - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - settings={ - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "covers", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "open-access-books", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "protected-access-books", - }, - ) - storage2 = settings_ctrl_fixture.ctrl.db.external_integration( - name="integration 2", - protocol="Some other protocol", - goal=ExternalIntegration.STORAGE_GOAL, - settings={ - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "covers", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "open-access-books", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "protected-access-books", - }, - ) - - with settings_ctrl_fixture.request_context_with_admin("/"): - controller = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller - ) - response = controller.process_collections() - protocols = response.get("protocols") - for protocol in protocols: - mirror_settings = [ - x - for x in protocol["settings"] - if x.get("key").endswith("mirror_integration_id") - ] - - covers_mirror = mirror_settings[0] - open_access_books_mirror = mirror_settings[1] - protected_access_books_mirror = mirror_settings[2] - assert "Covers Mirror" == covers_mirror["label"] - assert "Open Access Books Mirror" == open_access_books_mirror["label"] - assert ( - "Protected Access Books Mirror" - == protected_access_books_mirror["label"] - ) - covers_mirror_option = covers_mirror["options"] - open_books_mirror_option = open_access_books_mirror["options"] - protected_books_mirror_option = protected_access_books_mirror["options"] - - # The first option is to disable mirroring on this - # collection altogether. - no_mirror_covers = covers_mirror_option[0] - no_mirror_open_books = open_books_mirror_option[0] - no_mirror_protected_books = protected_books_mirror_option[0] - assert controller.NO_MIRROR_INTEGRATION == no_mirror_covers["key"] - assert controller.NO_MIRROR_INTEGRATION == no_mirror_open_books["key"] - assert ( - controller.NO_MIRROR_INTEGRATION == no_mirror_protected_books["key"] - ) - - # The other options are to use one of the storage - # integrations to do the mirroring. - use_covers_mirror = [ - (x["key"], x["label"]) for x in covers_mirror_option[1:] - ] - use_open_books_mirror = [ - (x["key"], x["label"]) for x in open_books_mirror_option[1:] - ] - use_protected_books_mirror = [ - (x["key"], x["label"]) for x in protected_books_mirror_option[1:] - ] - - # Expect to have two separate mirrors - expect_covers = [ - (str(integration.id), integration.name) - for integration in (storage1, storage2) - ] - assert expect_covers == use_covers_mirror - expect_open_books = [ - (str(integration.id), integration.name) - for integration in (storage1, storage2) - ] - assert expect_open_books == use_open_books_mirror - expect_protected_books = [ - (str(integration.id), integration.name) - for integration in (storage1, storage2) - ] - assert expect_protected_books == use_protected_books_mirror - - setattr(HasSelfTests, "prior_test_results", old_prior_test_results) - def test_collections_get_collections_with_multiple_collections( self, settings_ctrl_fixture: SettingsControllerFixture ): @@ -192,16 +67,6 @@ def test_collections_get_collections_with_multiple_collections( name="Collection 2", protocol=ExternalIntegration.OVERDRIVE, ) - c2_storage = settings_ctrl_fixture.ctrl.db.external_integration( - protocol=ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL - ) - c2_external_integration_link = ( - settings_ctrl_fixture.ctrl.db.external_integration_link( - integration=c2.external_integration, - other_integration=c2_storage, - purpose=ExternalIntegrationLink.COVERS, - ) - ) c2.external_account_id = "1234" DatabaseTransactionFixture.set_settings( @@ -266,24 +131,6 @@ def test_collections_get_collections_with_multiple_collections( settings2 = coll2.get("settings", {}) settings3 = coll3.get("settings", {}) - assert controller.NO_MIRROR_INTEGRATION == settings1.get( - "covers_mirror_integration_id" - ) - assert controller.NO_MIRROR_INTEGRATION == settings1.get( - "books_mirror_integration_id" - ) - # Only added an integration for S3 storage for covers. - assert str(c2_storage.id) == settings2.get("covers_mirror_integration_id") - assert controller.NO_MIRROR_INTEGRATION == settings2.get( - "books_mirror_integration_id" - ) - assert controller.NO_MIRROR_INTEGRATION == settings3.get( - "covers_mirror_integration_id" - ) - assert controller.NO_MIRROR_INTEGRATION == settings3.get( - "books_mirror_integration_id" - ) - assert c1.external_account_id == settings1.get("external_account_id") assert c2.external_account_id == settings2.get("external_account_id") assert c3.external_account_id == settings3.get("external_account_id") @@ -815,101 +662,6 @@ def _base_collections_post_request(self, collection): ("url", "http://axis.test/"), ] - def test_collections_post_edit_mirror_integration( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( - name="Collection 1", protocol=ExternalIntegration.AXIS_360 - ) - - # There is a storage integration not associated with the collection. - storage = settings_ctrl_fixture.ctrl.db.external_integration( - protocol=ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL - ) - - # It's possible to associate the storage integration with the - # collection for either a books or covers mirror. - base_request = self._base_collections_post_request(collection) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - base_request + [("books_mirror_integration_id", storage.id)] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.status_code == 200 - - # There is an external integration link to associate the collection's - # external integration with the storage integration for a books mirror. - external_integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=collection.external_integration.id, - ) - assert isinstance(external_integration_link, ExternalIntegrationLink) - assert storage.id == external_integration_link.other_integration_id - - # It's possible to unset the mirror integration. - controller = settings_ctrl_fixture.manager.admin_collection_settings_controller - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - base_request - + [ - ( - "books_mirror_integration_id", - str(controller.NO_MIRROR_INTEGRATION), - ) - ] - ) - response = controller.process_collections() - assert response.status_code == 200 - external_integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=collection.external_integration.id, - ) - assert None == external_integration_link - - # Providing a nonexistent integration ID gives an error. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - base_request + [("books_mirror_integration_id", -200)] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_SERVICE - - def test_cannot_set_non_storage_integration_as_mirror_integration( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( - name="Collection 1", protocol=ExternalIntegration.AXIS_360 - ) - - # There is a storage integration not associated with the collection, - # which makes it possible to associate storage integrations - # with collections through the collections controller. - storage = settings_ctrl_fixture.ctrl.db.external_integration( - protocol=ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL - ) - - # Trying to set a non-storage integration (such as the - # integration associated with the collection's licenses) as - # the collection's mirror integration gives an error. - base_request = self._base_collections_post_request(collection) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - base_request - + [("books_mirror_integration_id", collection.external_integration.id)] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == INTEGRATION_GOAL_CONFLICT - def test_collections_post_edit_library_specific_configuration( self, settings_ctrl_fixture: SettingsControllerFixture ): diff --git a/tests/api/admin/controller/test_settings.py b/tests/api/admin/controller/test_settings.py index 4082c9ce49..a9823c0fd2 100644 --- a/tests/api/admin/controller/test_settings.py +++ b/tests/api/admin/controller/test_settings.py @@ -18,7 +18,6 @@ from core.integration.registry import IntegrationRegistry from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField from core.model import ExternalIntegration -from core.s3 import S3UploaderConfiguration from core.util.problem_detail import ProblemError from tests.fixtures.api_admin import AdminControllerFixture, SettingsControllerFixture @@ -135,86 +134,6 @@ def test_create_integration(self, settings_ctrl_fixture: SettingsControllerFixtu assert False == is_new2 assert DUPLICATE_INTEGRATION == i2 - def test__mirror_integration_settings( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - # If no storage integrations are available, return none - mirror_integration_settings = ( - settings_ctrl_fixture.manager.admin_settings_controller._mirror_integration_settings - ) - - assert None == mirror_integration_settings() - - # Storages created will appear for settings of any purpose - storage1 = settings_ctrl_fixture.ctrl.db.external_integration( - "protocol1", - ExternalIntegration.STORAGE_GOAL, - name="storage1", - settings={ - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "covers", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "open-access-books", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "protected-access-books", - }, - ) - - settings = mirror_integration_settings() - - assert settings[0]["key"] == "covers_mirror_integration_id" - assert settings[0]["label"] == "Covers Mirror" - assert ( - settings[0]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[0]["options"][1]["key"] == str(storage1.id) - assert settings[1]["key"] == "books_mirror_integration_id" - assert settings[1]["label"] == "Open Access Books Mirror" - assert ( - settings[1]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[1]["options"][1]["key"] == str(storage1.id) - assert settings[2]["label"] == "Protected Access Books Mirror" - assert ( - settings[2]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[2]["options"][1]["key"] == str(storage1.id) - - storage2 = settings_ctrl_fixture.ctrl.db.external_integration( - "protocol2", - ExternalIntegration.STORAGE_GOAL, - name="storage2", - settings={ - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "covers", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "open-access-books", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "protected-access-books", - }, - ) - settings = mirror_integration_settings() - - assert settings[0]["key"] == "covers_mirror_integration_id" - assert settings[0]["label"] == "Covers Mirror" - assert ( - settings[0]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[0]["options"][1]["key"] == str(storage1.id) - assert settings[0]["options"][2]["key"] == str(storage2.id) - assert settings[1]["key"] == "books_mirror_integration_id" - assert settings[1]["label"] == "Open Access Books Mirror" - assert ( - settings[1]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[1]["options"][1]["key"] == str(storage1.id) - assert settings[1]["options"][2]["key"] == str(storage2.id) - assert settings[2]["label"] == "Protected Access Books Mirror" - assert ( - settings[2]["options"][0]["key"] - == settings_ctrl_fixture.manager.admin_settings_controller.NO_MIRROR_INTEGRATION - ) - assert settings[2]["options"][1]["key"] == str(storage1.id) - def test_check_url_unique(self, settings_ctrl_fixture: SettingsControllerFixture): # Verify our ability to catch duplicate integrations for a # given URL. diff --git a/tests/api/admin/controller/test_work_editor.py b/tests/api/admin/controller/test_work_editor.py index a43ff4091c..d5995d284d 100644 --- a/tests/api/admin/controller/test_work_editor.py +++ b/tests/api/admin/controller/test_work_editor.py @@ -1,15 +1,8 @@ -import base64 import json -import logging -import math -import os -import tempfile -from io import BytesIO import feedparser import flask import pytest -from PIL import Image, ImageChops from werkzeug.datastructures import ImmutableMultiDict from api.admin.controller.custom_lists import CustomListsController @@ -17,13 +10,10 @@ from api.admin.problem_details import ( EROTICA_FOR_ADULTS_ONLY, INCOMPATIBLE_GENRE, - INVALID_CONFIGURATION_OPTION, INVALID_DATE_FORMAT, INVALID_EDIT, - INVALID_IMAGE, INVALID_RATING, INVALID_SERIES_POSITION, - INVALID_URL, METADATA_REFRESH_FAILURE, MISSING_CUSTOM_LIST, UNKNOWN_LANGUAGE, @@ -40,15 +30,10 @@ DataSource, Edition, Genre, - Hyperlink, - Representation, - ResourceTransformation, RightsStatus, Subject, create, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc from tests.core.mock import ( AlwaysSuccessfulCoverageProvider, @@ -895,390 +880,6 @@ def test_classifications(self, work_fixture: WorkFixture): lp.identifier.identifier, ) - def test_validate_cover_image(self, work_fixture: WorkFixture): - base_path = os.path.split(__file__)[0] - folder = os.path.dirname(base_path) - resource_path = os.path.join(folder, "..", "files", "images") - - path = os.path.join(resource_path, "blue_small.jpg") - too_small = Image.open(path) - - result = work_fixture.manager.admin_work_controller._validate_cover_image( - too_small - ) - assert INVALID_IMAGE.uri == result.uri - assert ( - "Cover image must be at least 600px in width and 900px in height." - == result.detail - ) - - path = os.path.join(resource_path, "blue.jpg") - valid = Image.open(path) - result = work_fixture.manager.admin_work_controller._validate_cover_image(valid) - assert True == result - - @pytest.mark.parametrize( - "original_file_path,processed_file_path,title_position", - [ - # Without a title position, the image won't be changed. - pytest.param("blue.jpg", "blue.jpg", "none", id="no_title_position"), - # Here the title and author are added in the center. Compare the result - # with a pre-generated version. - pytest.param( - "blue_with_title_author.png", - "blue.jpg", - "center", - id="center_title_position", - ), - ], - ) - def test_process_cover_image( - self, - work_fixture: WorkFixture, - original_file_path: str, - processed_file_path: str, - title_position: str, - ): - work = work_fixture.ctrl.db.work( - with_license_pool=True, title="Title", authors="Author" - ) - - base_path = os.path.split(__file__)[0] - folder = os.path.dirname(base_path) - resource_path = os.path.join(folder, "..", "files", "images") - - original_path = os.path.join(resource_path, original_file_path) - processed_path = os.path.join(resource_path, processed_file_path) - original = Image.open(original_path) - processed = Image.open(processed_path) - - tmpfile_before = tempfile.NamedTemporaryFile( - prefix="image-before-no-title_", suffix=".png", delete=False - ) - tmpfile_after = tempfile.NamedTemporaryFile( - prefix="image-after-no-title_", suffix=".png", delete=False - ) - logging.info("image before processing (no title): %s", tmpfile_before.name) - logging.info("image after processing (no title): %s", tmpfile_after.name) - - processed = work_fixture.manager.admin_work_controller._process_cover_image( - work, processed, title_position - ) - - original.save(fp=tmpfile_before.name, format="PNG") - processed.save(fp=tmpfile_after.name, format="PNG") - - # RMS difference between the two images should be less than 12. - # This function was taken from https://stackoverflow.com/a/40176818 - histogram = ImageChops.difference(original, processed).histogram() - sum_of_squares = sum( - (value * ((idx % 256) ** 2) for idx, value in enumerate(histogram)) - ) - root_mean_square = math.sqrt( - sum_of_squares / float(original.size[0] * original.size[1]) - ) - - assert root_mean_square < 12 - - # Remove temporary files if we've gotten this far. Assertion failures should leave - # the files intact for manual inspection. - for f in [tmpfile_before, tmpfile_after]: - os.remove(f.name) - - def test_preview_book_cover(self, work_fixture: WorkFixture): - work = work_fixture.ctrl.db.work(with_license_pool=True) - identifier = work.license_pools[0].identifier - - with work_fixture.request_context_with_library_and_admin("/"): - response = work_fixture.manager.admin_work_controller.preview_book_cover( - identifier.type, identifier.identifier - ) - assert INVALID_IMAGE.uri == response.uri - assert "Image file or image URL is required." == response.detail - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("cover_url", "bad_url"), - ] - ) - response = work_fixture.manager.admin_work_controller.preview_book_cover( - identifier.type, identifier.identifier - ) - assert INVALID_URL.uri == response.uri - assert '"bad_url" is not a valid URL.' == response.detail - - class TestFileUpload(BytesIO): - headers = {"Content-Type": "image/png"} - - base_path = os.path.split(__file__)[0] - folder = os.path.dirname(base_path) - resource_path = os.path.join(folder, "..", "files", "images") - path = os.path.join(resource_path, "blue.jpg") - original = Image.open(path) - buffer = BytesIO() - original.save(buffer, format="PNG") - image_data = buffer.getvalue() - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict([("title_position", "none")]) - flask.request.files = ImmutableMultiDict( - [ - ("cover_file", TestFileUpload(image_data)), # type: ignore[list-item] - ] - ) - response = work_fixture.manager.admin_work_controller.preview_book_cover( - identifier.type, identifier.identifier - ) - assert 200 == response.status_code - assert "data:image/png;base64,%r" % base64.b64encode( - image_data - ) == response.get_data(as_text=True) - - work_fixture.admin.remove_role( - AdminRole.LIBRARIAN, work_fixture.ctrl.db.default_library() - ) - with work_fixture.request_context_with_library_and_admin("/"): - pytest.raises( - AdminNotAuthorized, - work_fixture.manager.admin_work_controller.preview_book_cover, - identifier.type, - identifier.identifier, - ) - - def test_change_book_cover(self, work_fixture: WorkFixture): - # Mock image processing which has been tested in other methods. - process_called_with = [] - - def mock_process(work, image, position): - # Modify the image to ensure it gets a different generic URI. - image.thumbnail((500, 500)) - process_called_with.append((work, image, position)) - return image - - old_process = work_fixture.manager.admin_work_controller._process_cover_image - setattr( - work_fixture.manager.admin_work_controller, - "_process_cover_image", - mock_process, - ) - - work = work_fixture.ctrl.db.work(with_license_pool=True) - identifier = work.license_pools[0].identifier - mirror_type = ExternalIntegrationLink.COVERS - mirrors = dict(covers_mirror=MockS3Uploader(), books_mirror=None) - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("rights_status", RightsStatus.CC_BY), - ("rights_explanation", "explanation"), - ] - ) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier, mirrors - ) - assert INVALID_IMAGE.uri == response.uri - assert "Image file or image URL is required." == response.detail - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("cover_url", "http://example.com"), - ("title_position", "none"), - ] - ) - flask.request.files = ImmutableMultiDict([]) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier - ) - assert INVALID_IMAGE.uri == response.uri - assert "You must specify the image's license." == response.detail - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("cover_url", "bad_url"), - ("title_position", "none"), - ("rights_status", RightsStatus.CC_BY), - ] - ) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier, mirrors - ) - assert INVALID_URL.uri == response.uri - assert '"bad_url" is not a valid URL.' == response.detail - - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("cover_url", "http://example.com"), - ("title_position", "none"), - ("rights_status", RightsStatus.CC_BY), - ("rights_explanation", "explanation"), - ] - ) - flask.request.files = ImmutableMultiDict([]) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier - ) - assert INVALID_CONFIGURATION_OPTION.uri == response.uri - assert "Could not find a storage integration" in response.detail - - class TestFileUpload(BytesIO): - headers = {"Content-Type": "image/png"} - - base_path = os.path.split(__file__)[0] - folder = os.path.dirname(base_path) - resource_path = os.path.join(folder, "..", "files", "images") - path = os.path.join(resource_path, "blue.jpg") - original = Image.open(path) - buffer = BytesIO() - original.save(buffer, format="PNG") - image_data = buffer.getvalue() - - staff_data_source = DataSource.lookup( - work_fixture.ctrl.db.session, DataSource.LIBRARY_STAFF - ) - - # Upload a new cover image but don't modify it. - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("title_position", "none"), - ("rights_status", RightsStatus.CC_BY), - ("rights_explanation", "explanation"), - ] - ) - flask.request.files = ImmutableMultiDict( - [ - ("cover_file", TestFileUpload(image_data)), # type: ignore[list-item] - ] - ) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier, mirrors - ) - assert 200 == response.status_code - - [link] = identifier.links - assert Hyperlink.IMAGE == link.rel - assert staff_data_source == link.data_source - - resource = link.resource - assert identifier.urn in resource.url - assert staff_data_source == resource.data_source - assert RightsStatus.CC_BY == resource.rights_status.uri - assert "explanation" == resource.rights_explanation - - representation = resource.representation - [thumbnail] = resource.representation.thumbnails - - assert resource.url == representation.url - assert Representation.PNG_MEDIA_TYPE == representation.media_type - assert Representation.PNG_MEDIA_TYPE == thumbnail.media_type - assert image_data == representation.content - assert identifier.identifier in representation.mirror_url - assert identifier.identifier in thumbnail.mirror_url - - assert [] == process_called_with - s3_uploader = mirrors[mirror_type] - assert isinstance(s3_uploader, MockS3Uploader) - assert [representation, thumbnail] == s3_uploader.uploaded - assert [ - representation.mirror_url, - thumbnail.mirror_url, - ] == s3_uploader.destinations - - work = work_fixture.ctrl.db.work(with_license_pool=True) - identifier = work.license_pools[0].identifier - - # Upload a new cover image and add the title and author to it. - # Both the original image and the generated image will become resources. - with work_fixture.request_context_with_library_and_admin("/"): - flask.request.form = ImmutableMultiDict( - [ - ("title_position", "center"), - ("rights_status", RightsStatus.CC_BY), - ("rights_explanation", "explanation"), - ] - ) - flask.request.files = ImmutableMultiDict( - [ - ("cover_file", TestFileUpload(image_data)), # type: ignore[list-item] - ] - ) - response = work_fixture.manager.admin_work_controller.change_book_cover( - identifier.type, identifier.identifier, mirrors - ) - assert 200 == response.status_code - - [link] = identifier.links - assert Hyperlink.IMAGE == link.rel - assert staff_data_source == link.data_source - - resource = link.resource - assert identifier.urn in resource.url - assert staff_data_source == resource.data_source - assert RightsStatus.CC_BY == resource.rights_status.uri - assert ( - "The original image license allows derivatives." - == resource.rights_explanation - ) - - transformation = ( - work_fixture.ctrl.db.session.query(ResourceTransformation) - .filter(ResourceTransformation.derivative_id == resource.id) - .one() - ) - original_resource = transformation.original - assert resource != original_resource - assert identifier.urn in original_resource.url - assert staff_data_source == original_resource.data_source - assert RightsStatus.CC_BY == original_resource.rights_status.uri - assert "explanation" == original_resource.rights_explanation - assert image_data == original_resource.representation.content - assert None == original_resource.representation.mirror_url - assert "center" == transformation.settings.get("title_position") - assert ( - resource.representation.content - != original_resource.representation.content - ) - assert image_data != resource.representation.content - - assert work == process_called_with[0][0] - assert "center" == process_called_with[0][2] - - assert [] == original_resource.representation.thumbnails - [thumbnail] = resource.representation.thumbnails - assert Representation.PNG_MEDIA_TYPE == thumbnail.media_type - assert image_data != thumbnail.content - assert resource.representation.content != thumbnail.content - assert identifier.identifier in resource.representation.mirror_url - assert identifier.identifier in thumbnail.mirror_url - - assert [resource.representation, thumbnail] == s3_uploader.uploaded[2:] - assert [ - resource.representation.mirror_url, - thumbnail.mirror_url, - ] == s3_uploader.destinations[2:] - - work_fixture.admin.remove_role( - AdminRole.LIBRARIAN, work_fixture.ctrl.db.default_library() - ) - with work_fixture.request_context_with_library_and_admin("/"): - pytest.raises( - AdminNotAuthorized, - work_fixture.manager.admin_work_controller.preview_book_cover, - identifier.type, - identifier.identifier, - ) - - setattr( - work_fixture.manager.admin_work_controller, - "_process_cover_image", - old_process, - ) - def test_custom_lists_get(self, work_fixture: WorkFixture): staff_data_source = DataSource.lookup( work_fixture.ctrl.db.session, DataSource.LIBRARY_STAFF diff --git a/tests/api/admin/test_opds.py b/tests/api/admin/test_opds.py index fc8889befc..9d0b31f4d7 100644 --- a/tests/api/admin/test_opds.py +++ b/tests/api/admin/test_opds.py @@ -3,8 +3,7 @@ from api.admin.opds import AdminAnnotator, AdminFeed from api.opds import AcquisitionFeed from core.lane import Pagination -from core.model import DataSource, ExternalIntegration, Measurement -from core.model.configuration import ExternalIntegrationLink +from core.model import DataSource, Measurement from tests.fixtures.database import DatabaseTransactionFixture @@ -130,62 +129,6 @@ def test_feed_includes_edit_link(self, db: DatabaseTransactionFixture): [edit_link] = [x for x in entry["links"] if x["rel"] == "edit"] assert lp.identifier.identifier in edit_link["href"] - def test_feed_includes_change_cover_link(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - library = db.default_library() - - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, library, test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - - # Since there's no storage integration, the change cover link isn't included. - assert [] == [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/change_cover" - ] - - # There is now a covers storage integration that is linked to the external - # integration for a collection that the work is in. It will use that - # covers mirror and the change cover link is included. - storage = db.external_integration( - ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL - ) - storage.username = "user" - storage.password = "pass" - - collection = db.collection() - purpose = ExternalIntegrationLink.COVERS - external_integration_link = db.external_integration_link( - integration=collection._external_integration, - other_integration=storage, - purpose=purpose, - ) - library.collections.append(collection) - work = db.work(with_open_access_download=True, collection=collection) - lp = work.license_pools[0] - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, library, test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - - [change_cover_link] = [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/change_cover" - ] - assert lp.identifier.identifier in change_cover_link["href"] - def test_suppressed_feed(self, db: DatabaseTransactionFixture): # Test the ability to show a paginated feed of suppressed works. diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index f83b62da7f..465aec0e7f 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -325,26 +325,6 @@ def test_classifications(self, fixture: AdminRouteFixture): ) fixture.assert_supported_methods(url, "GET") - def test_preview_book_cover(self, fixture: AdminRouteFixture): - url = "/admin/works//an/identifier/preview_book_cover" - fixture.assert_authenticated_request_calls( - url, - fixture.controller.preview_book_cover, # type: ignore - "", - "an/identifier", - http_method="POST", - ) - - def test_change_book_cover(self, fixture: AdminRouteFixture): - url = "/admin/works//an/identifier/change_book_cover" - fixture.assert_authenticated_request_calls( - url, - fixture.controller.change_book_cover, # type: ignore - "", - "an/identifier", - http_method="POST", - ) - def test_custom_lists(self, fixture: AdminRouteFixture): url = "/admin/works//an/identifier/lists" fixture.assert_authenticated_request_calls( diff --git a/tests/api/feed/test_admin.py b/tests/api/feed/test_admin.py index 283823e337..cd2757880b 100644 --- a/tests/api/feed/test_admin.py +++ b/tests/api/feed/test_admin.py @@ -3,7 +3,6 @@ from core.feed.annotator.admin import AdminAnnotator from core.feed.types import FeedData from core.lane import Pagination -from core.model.configuration import ExternalIntegration, ExternalIntegrationLink from core.model.datasource import DataSource from core.model.measurement import Measurement from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa @@ -137,67 +136,6 @@ def test_feed_includes_edit_link( [edit_link] = [x for x in entry.computed.other_links if x.rel == "edit"] assert edit_link.href and lp.identifier.identifier in edit_link.href - def test_feed_includes_change_cover_link( - self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor - ): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - library = db.default_library() - - feed = OPDSAcquisitionFeed( - "test", - "url", - [work], - AdminAnnotator(None, library), - ) - [entry] = feed._feed.entries - assert entry.computed is not None - - # Since there's no storage integration, the change cover link isn't included. - assert [] == [ - x - for x in entry.computed.other_links - if x.rel == "http://librarysimplified.org/terms/rel/change_cover" - ] - - # There is now a covers storage integration that is linked to the external - # integration for a collection that the work is in. It will use that - # covers mirror and the change cover link is included. - storage = db.external_integration( - ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL - ) - storage.username = "user" - storage.password = "pass" - - collection = db.collection() - purpose = ExternalIntegrationLink.COVERS - external_integration_link = db.external_integration_link( - integration=collection._external_integration, - other_integration=storage, - purpose=purpose, - ) - library.collections.append(collection) - work = db.work(with_open_access_download=True, collection=collection) - lp = work.license_pools[0] - feed = OPDSAcquisitionFeed( - "test", - "url", - [work], - AdminAnnotator(None, library), - ) - [entry] = feed._feed.entries - assert entry.computed is not None - - [change_cover_link] = [ - x - for x in entry.computed.other_links - if x.rel == "http://librarysimplified.org/terms/rel/change_cover" - ] - assert ( - change_cover_link.href - and lp.identifier.identifier in change_cover_link.href - ) - def test_suppressed_feed( self, db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor ): diff --git a/tests/api/files/images/blue_small.jpg b/tests/api/files/images/blue_small.jpg deleted file mode 100644 index 3b7885214c88ab04feda8db1a4237915ec955153..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 699 zcmex=_1P|rX?qqI0P zFI~aY%U!`Mz|~!$%)&rZMU_b4?usLlYAdd38%$3nLpnV-q8g zA&i`yoIKn-61=<;Mv5|uMkIs(2N(o77`Pa?m>HEAm;@P_1sVSzVUTBFU}OdQ7UW?l zU}R!uVP#|I;N;>4D%dK(z{JSR%*4XX%F4n5R9y>{XJ8Rz6;d>GWD^cdWLGK_F>0K+ zkVDyN<3Z7&iyu^slZu)+xx~aJB&Af<)HO7&0Dr^+rDGx zu0w~996fgY#K}{aE?>EN?fQ+Iw;n!v{N(Ag=PzEq`uOSdm#^Qx|M>X}>z(JGL-`{vmgtrq9L1*V<3BCp|FxsBZr97#DyCVaw;1KeGpA5 zy2vG_V)9V+BgkuDpAqM=CbE16_ZY%ow-|Vs8G(_+I7O;9NjO%cr$%?;d95c%J}{J(nMJTIOX1J?yy_jO0l$b|I~gN85udypVysaWOm(_k=gxZ-!9QF#m8*nhPBJf%xa z;mjyE?7*IVkiW}+J@2A9BB!7O{)y-M@1uPhiw}-~7XRn+@BW**+xLFbxKQ)i>FTwI z9#3{%2RnQ02dCRMj_r{fEV+5+>|xL!7rRW&xJ;q2lf`8jnAQ3T^DInb*To6!eqVI2 zos|L3PsC!fm@9Q4n2gMIti##?8JTY`XZQUi^ZLjWnZG{o`$Oh;|2@CSoVonyu*~68 zSNF;6OF6Sg=85&;T{3^1`|q^>7UVw<`EM`$7aabJmjC62|3ji=mx=busMEO)YqtA( zx*gYSWgPbD{J)T1|Mq0oFCHzEw=|wNTB}I-$%Goo;deBM_nh13=*qCfElt-ih-C8i z)a?tsa%Qh^4`-3O^wVAAGyC4mZN3Sn^(nM2K3!p2B}%s?(|KXNv@r>Pg7ay-J&A-a z6p#4Xz`tqKBwOk3UpGvoPTNV5V_e_58g7n&Q4z66g#P85a`gQxktM zYlvmXkE1!Js9Vf{3b$9w7y<>asPXrmTN75>6WOh+s-Pr!6-)7aZ3tDN;eegcJSuF(Ji7IHqdoY>;ey~1tR1Gi z$%?D0p<{|!8%jD>A6GaD)5rCgcf`*)KNBaN9Z`jO#!~$*G{nAR?C<4ir=;v(9`Dh; zFi@IiiYr)p>1RH1l6himAF?-D^`;-&;J&gP`LRdk$t!i3^jRFPgVI(PGq6I{N` zK;3R&&=FalVTw*u+3uQ;a`**}?}jGSZX!&ZzejEj+sE6R4Y3cF9hbtKfwMqfLYj7B;*A-Q~`<`V|^$*?apybwyq<%n5f%E445y=Mi)XIOaU zcE@dO3)`f*ZJW8$MS1DPk(F{A3q;gnr1(#(jd${v3jvO1!jZ6$_&N9Ls;bfPL+FjC z@QtV0E%QEuUa;rGXuN{YeLv0wkxS}_WtODnZb$bR!pYmd|$decx z{KO)p{4RFYZq2kB%cekF4aq1Cq~(YIMMeNC1tLoqT9n_ z>tf;B$=Kspqhh*P2?edu*%<7^)YF`ZfaZ{yQwUx6jVI>ZwT+@Z6mR1%YxV$ugx>1xyqu?NeN5I zaF6xB6oVVb7suO)L}FUPYO!N;qmJ7tg@H1+9GIt?s%pbfLej0%S#E|~rv2$8dOWqX zmF@=36i^=#=wsI1}2$bK75&iX?~FxZJq$eA%d zS?GO&Zm%1Hha%lkp+C+kp^n5Jy&Bc@W|`|yk<-;y3fo$wY#)tuQdziWzE0+v||&%p<)vO z+hJC@f&?L%p&R@DXhF?2y+p+hhz10e0RdOoz6&B(n$K)|4v!FWUW_ zqLWX-<I=MS%3mJPXEjO zBHzndH?Ba#K9jYzs2HLXfdssQrcrK|xuX)kF)zRJmhnea4zOZsY!UdzMla*7^>F0o5eA5bWrP z2JR6{cyRGrMB!;9je|kWxoK$;=3F3Z707b~Z*KY@;os%`Jxu@YLH1b<5Ek3c2`=H3 zK+JAY`c$;j5@HJMJSueaZ%xrL!``VM{BI`f%<3(;J@Ewvll!DY^jZ|hA!!t59HX9g zh$F9D$*E##Pz(&Tf@Yo{h$``Y51H6WS`$`@-_;ZDX2?pJ zKo$cfm^zQTx)^t{!nx#SqLwWcd#WOU7jN|phO3+K4KAImAULT)2%5aSoDN#r=q&$l z#5nyRi+ZwHkX#BeHip@=nCveXoy#+Gdn(EI2~8-z& z@ncQfs{7yyLJ*P^ve&3;KE8xa&q-*o;b1g<^mc!bI~8T5#5I-H=A8n8K@cpo$r4|x zA=Yzc^}alFf_W$}mCQ<4#6n%z1ZaOhyQHGu?tE>e8q$+%Nu0cInqLgAKdV=)skUs5 zoI@yJ71@S^9<^(-W8o}&bpiHICm&3IQ-322+?7#*dEUI&A+e^jrecT-ZeW&TI8)xt zF1ghkuMlr)Yg!Otkf~K@SNSFrT1PHI+yI_m6COg-W|vFr))v-TcL&ezLlc8{p*CkD zm;OauwyIIwHJtzPpW~9#Y41F8;))9&2QU8Rgqd!Y8xkbaL$KY`O=dKu8|cP-cU+ei z);=0b*VSH=olO3D$Pb2hgI1(F5&N@(-5Bvy?g`=7e8@tO`uOnhu$sJ=r>6zA+IKkn z4uJMp zS6Qwdy^SC~AHmT-Ox)v50-3G)ZItiI!DY9%g9(~!OqV+kIvRO2w%){Tn9Cce=UyOX zY7bI|1E^nP{Ragx-1}GYC18X)!aaJ5=DpobfZ{ZvM!0sXpj@yG$yyma2;n{o_`V1r`M5a_Y6^ z40M7H@M1Wkn|wNb-hXBj6*_!wAKIc;AAS?BRflBvrf#p>hj7@;p;KTN4*D4KL_{m- zSF!)>-Lbw5h*9&T4=?hlBq{V#yx!u-Jx8=yC{U4Z;+S#18x|qsho{Ho6E|Ap#m}sg zh_}0`+oHMYfqYX~lUL6zqmnB5Yd)ya?sDEX$o)Ack3AK9H~^pN1nxpMJ*#T!eI7uK zn58ce_)C-tR4+-M=g?V@I;Q7Y9Q%>~!{MFvwb_H7nJ}b*#Ry+6Kf_FzUr8UeroMDl zS%+J$NCX4lZ#&ILg%pILp*4j5Ad^5-EJl-9sfWI*KnM`0lyPS|6Sq2DU0nwU2N{Lg z9;mJz7;ZMYl7}iaLgL%yjN<@IxUqBWxntb-0%7yCo^)BSaKzZa%jy&3$_xa^=3Hj<#s%aR z=;rOU&bVJQkfsaj#QWS+mzMK9Eotj9u!rxOARJU1sv)&uDL(PY<77>2NIzJ@oA(Id$?Oy?K}TIwDMb4q~@xIu0{jO znV_KL#LNtYkTU5}?fNNKkS43j&meiD`6mMd0|C>tkF8aqu~uQd?E?2zOm_36bD^l8 zmksyY-ueP0)ocJYm~Ck`bam6CZsO^PiW2J?g#|YeE8VkoTqe*&I^RA}S^cLetxO-R zHqD>^cJW26nV*N6l}kL;=aoM=t3lyOJQXtuobbQZ1F4Mng~U+<2SH2MsB3Rh`0hVz zhkYX{%!~)OXZgN4eEa3qsKnOBduuHY@TS-yJIYo7zx@wW?yy_534hd=TJ{`(Fg(A4 z=&4YvcqDQGz~s`83h=xvooe-&<0tg5eAm=@e}ApqATd0qA@Hb!&1F3=8=APk#m;h` zTw|tR=c7Jtj)PzkbI+Q~3=Y2DSN@i+fJakEKEnXfU59U=72(HeMikHwCw?BO` zJlvtanGee;$2}WDSVs3faS_R9-1&nErxP5KXJ}NTG1EqLbQWRRwZ$!BWtU;>c)JTO zLe))o?>eKR$FcV7IdO3q(?S>q6mdH^7bsa^u@dRm^@JY=gfK0 zasK)AAyWiKI!97R0WWovmusayWMHB4GnS{BRj|4Ch z_l$iR;Q{vS{HN=FfeUD8G9_)`@1+z)C7AM)EJ!Z253GvjJx8&6UH|Y=2tS8gQu~kUSVg0hzf_1P>J4>A`4kxcp<6dNRH> z*Mo#18WCrkpePC{rx+Gp)CXyLv%RsJy4|X+fjyH+1bfDNfrx=!o^*sd9Z8HVU%~)! zLyHhd4ZvX4kJ=9>1Uw_nKDvpo?N(Kx_rRi^tcfWMJbz(t-99v>)$h&0^iWmj^#pJ= zcFN5?$KI&v4_5cqWQxjCDV&R1|5)<+b4pr+Tw738?Cz=2>EUptypQsF(>9@^OBzF; zb@TFNuEcadQGm@_%i%lybwPP#=+zG)-e*9*Wa3FvoW!bpBJ+BW_k zvAZfnoq*At({*#vwYWDE)mk19=Vi@n_~_?TPg&U3f65(2n9P`H%VDfT>J#_-`8Zff z${i}&CdK48uX7XXM~zpIE-IzO?$pv1`vZIjr0`t$@=|}{UL5)*>W#wZvm>OA_WN z$E^9*f4V3BQbR?(F+Ti+HhB4;W&d?2Somca!C4;$Ms;ZlvIUiNm%9ekZuZoDu1l2S z!kMgNL&D@#7J7XwEv(sh@ZhYPrt5As@tb z*WX6M)(Ia9g(QKM^$a)-XHveR|U@(2d87{{k@s(o4fbCp(Czn)YIRgoHxCe8KdVgp}h z3;TL*!bmjR6M~6&*_RpQUYPz&Pyb|?B7I
8uB4)TpRxW}p;q1yd=RcF|u`%kqCT=P45-L=(Pg;v zkT3VsU^F{${YGOm&q_L)3LD*bWD23z&Cm}s& zb(=_J9Ja)Rq2LK81y#6z)t-w=i=HktqR~$HN>U!P>=$bGxuRX&YBE0PP8Ww3d|Z4; zkbdf{jV)!mm!1y52NZ|4dmPNZ57Y1?dRFx-{hFh$dAR=3nGq5JwT1+d33JZ^PEknO zA#)-Gl2aN~QTf2PG)5IvVLCkDeezcslb$0_Xf6SJvW|OGciA;ef^)IcHp-`oNgXX~ z9oYL3TqFqwe-LkY_sH+R=lwO*^6X(q`|9LK9jC3;O*JCBy^GYaP)>`O_;|)dDt|`V z?Fp?vd;<-GYG&8pQC7=OQ>Man-5UOv%cv!zs_0a!s_LI*_UgR%q>G8DZ0-4u)EFq( zC@`6myLk_`cqB>Y`|BHx?h(!kB6zKQe$hnTm8m_^!sW`LlMh*YtxR?How)+s0s@JU zdsn36C+iGb3}{pev;Uas)DQ%84a~mni#ls_m>gx#zvxHY`HI?%+5<_lj)$@ zs`JLpDfLQg6RCQcKSp)bXC!VWo$k60DdTY~{`vntC9WcqY{42h)rttFzAXqjhtx8# z^@&imcl<5WQaa;CQNJ17rshmE&m`8fAB5V2`k5Aw&wN0sFIvDGP23vxeY%8pnuo7H zQIecs{9((|X^L&zGxKb=E@Lle>t}JMkCDXdkHdVgVysEXyP#KQIiu+)v%QBG zU*~3)DeU>ASmkl=0DO4ye-PbZ|5{-{IQAEilLIC~QWUVQY3@t+qUU?*y(J5?oknJ3 z&AnVbBiL=*b8X|nD4fpeV*wUyyaCnqglFyCAHzgmnf z$z8XB{8MyR0|;EJn)SZY?1^46Gfbaor=`Ztqv%FsRJ%qd5vs0p2uQYiHc=%~k7mdl zSEE!}=X8%o7kRDZ3^s}%?v|ws)LY-Bi~{6l!OI^0{`Oy}s3G3`XcUW{(MR`x@ft?G z8(zWaGRNtW8b;mpAp@$;K7{+AfeZHCxs`deYwhPDATRYN)TAuf}LP zV>4aoJqJRD+##h`x;&-`83>Jv9=cbn5k?hH*H*nJrP?fBEx0?edpt$<^ANAfrB7|B z(K)pL=6wB|l8XB%fo?uQf88PL_(AwZA^(p?5=skeaT`+gb2dKZnsSi;onClY>*=cu zPhA5dL7%UiZQC!e(~ENr%HokKW7|Mkyq@D@g6Fs4@Wgh-(K9o9^`ah@7`M#W8;`}U z{1Or|>UHS7J^nn*@a61jJCz!5W0#m+BmJI#U8HIg+mw@gls3Pv1}uM3b55u26Fzo$&x<@yhH zUE%@mMk)WNV(+-F?#yS=u?Lvfd_xqYp%4xBtnN>cOfF8YEC}O!1=BPw&(WCR&tAd1 z!4PWkM9ng1VZ@0W7nL9B z8=UqralPj&`IzmiR>OHP&+SCcnWAHl+A2D{c=>Y$b8laGh<-T+!kW|2Wn*)bP=33j zqT28@L!9W*R5GX4RolLb#ADi6%-)*V%d*<+a!f7Eb;7LX3*x4unX#kM)A?v1Xzi9gG_nwk z7IViW+#Ss&xB2M~wYsAM0rhS?aK>X<`GT-sQ^(k0)($_Z7rVvqTGuy-oiZrY(3svD z2DtnLA#Dt(Sc1_sj%27%GNiT_2?>bjRaq5R`E7%WkB|GV`T1Ak?`uSaoj_SVzB~R6 z$K%CFgL($?@RU!CMQ2F-(OW)UV^LwTXYYJj@buoi`Kv3x$q0{;H?T0&?5qbNXbzvL zg{%!b5EfseYp^n(9^&dI?XF*QQP0MqoN`6Urw}p!cLlmSi?E4hVHsB^)k4QoO#PxE+B+BDcN)OBna#no7+4 zJ(XC%wdBxV(LAgx@R2*ZT8eI-P}(W&rt8~UTer>HR?I>G(x4OV6~Tt=#LBVlg|Y2X zfn*fE^%3p@!Qa>sN|a1dlYsP)?HLK9PIoAVH1E(2yF$?CiFsQ(?D~_bZ6f9+2D14s z!$Wd1AuHnr2$qw%7rhd`4~yfymb=h}hf)s`VP3(5ZLOCMbh0&6A^2-;c}gycN9uwn zmi3>e-yAvMf280p<6MmtXJJgI=Di_SIuPu-o_#SKRsa4dw~2ofpQ%YCRB_pt;CaijD+k?4ZQv9EwtT%Z4_EB|qypb3G1HN{ot{24b<0yj(<>+bGp{Y%QI;PWf) z^Nc6>B{blQVnDrWE7@7kor0JWcFordP%=E?*Vx(cgy_u`xsa%K_Br6e<2&UoV^)9c zthhuhY!k7oTbJot0BNC;u*rZYeAMy^UHta$oK31ZPs4GuO)74qE?5CSH8eIhI<2fKApl>X8O@}8Vm<59c5cA z+lkkLWjzC75$6Jrw~h6KkIs}6O0|Rs;TDtZ)_ZMmE3ciXjtq%)2UW+giOL}X+xQV8c9f^9#$ToTQqh7+&Mz28F zvgT-Pm^JL;17i(x_VTKfSHs^T*^mbE^S=8_#(1&mEyG(aRc!M(cpVecz-5uJJ_e2z z&2#ISg5Mh#KBWN655z6TM<*)}@^i!>IACOWE^$4-Y{g34fm4^8(`zEqbo+jozsB{T z4VoO~r-?p+#xb}F%D0-~%a$rLDVeIsS0#-;5v`IJ{+dFXw(`zPZ z24r`u(Z&i}UFzg~Ei-HNg=_O&Z`h8Zc3ihjL~H{GB(3i@xc6`K>h71WxS8WM`2}?Y zeIk|xDY2+Tn=)jxQ1j(Fx^Np^?}1%mRR~UfS)TfK(aGA0JaZtjB7nxv=qt!;bvX4j zcF)zu%1gK>tx`{6LoWMZwx!abJsQy%NJ9O zM;mK)8U~^ny=Roms)(&Fq>}lb^z~0t3dt6c7;-MX6OLvKkckGI1Cxvb?tW15QGrcx@j4kdS&?NOsqULY#s`}R+VzCmf zz(9TH0)NzXw~&$%B`keXGkX{I!*&?++v5?EQC%BQ!RJzLet&SI9y+vr%&dmMM!A1M zkg#B_E@(FbU(&ZL&+alVb48-Y8E$2^1g6%rzWnTyc8gSkAPu;YD91=7f64&D)!*$F zfp)Xkv-6s11(|nXS2$1PL4|^}K!Fr!AgL-mBy!C}I05jD*_P{X(m*00r7TbrMWiv2 z4ybTzQe7$;lS(#>5=Quu4K$r*>9==}F=`p!yIxcQX|5irDp_@uu98XEFC=@GN9f06 z2UHe615xq1*TO(b8Vh_395JiOED|}V{+(l;@q9Q!gH{Q3vv`LhVP^YX3+`o_;XZ|B zsh)hiUua_a&9f}`NS<~3S-d$Lk*O+&FNP)L9PJSN2(yYF%=o!Eg_*vZo9;&eXn~spjkW>7 z^InOO)(EyyfugGxg{KmCZ&2`xi6qut%QbS$OoQ-48-d=0(XAc zK45dg7D>z|^ZmMXwlAlxIdXJ@wTuD!s75E)`g&GXa=|LsVP{@vot<#c{ ziPt9tZjd3yW(r@MQRJE3!z5gw?GNk9!1l7ek*v%4UO%!(T6D&CVCi2sCIU*OvO$-B zP7vjAeS9Kp7?^I9%6dRT%=mgUz=8waNOOP&GqzG7e5t-Jw=GPR3NNKacgq=%EY<^g zxF}h7_`5HV86u5>J0)M!B)2ztIe}HM^;hGCI|!5ezv_UDbsSwbCoFLJa{seTg+UcT zM;P{&u3A@ZT2iOKw*+=OI5HYqpVzm#$==BAlbj6$L}^dsR;BsRu^PcEqqDvpixHgeTJcXS=FLWE zy^Jxg$2Fl(^in)Ap<8vAWfS3oU1=B_jb$XUUDicH?%mSERtg}MM zI#nv4s^|lv&X^5O&pN`9O~iITAKrZDrSACB5$}J(4eB_4sbwCsiUk@VFp1U-sS>mI z;Hi4&*LO8pqcsGbfgJx=dEnKTes7~_-=N!Fai=tkTO46^F`?S_NtE2KQVA)HIPa*g zknL_pdmL6#`HzR3`XyEor%2(Vri%o7yQR$s9>*yKvVSrl6P5MLzkrnG z*&{0(f1`C&VotJs+*I*j$nVs~4qt+5u#4TPeb-@zI%r(F_K=#N`?JTsxfYH?NzPqm z!8s82!oTJf2XEPTIr#vLm7?b_cgD4$J*Q#f@~`)Op+=Ue9j`~0ZYC`-A8NJV=f_Q( zg|xn)17+5OT+WDf?aBo{pyd47v^kY)TyS@*7u~wDoZS{TUK$J}^z70}8$iujXdSjU zpMx>=Yk^`SP%q|7veEWj;Z|2+oMaB3y*lq_6)Stdvm@<`0~se9Q8?2l%BbYKlGiFn zA|;9e(GnQ6vPT+V1W)W(8^X9mz*`5z8T$e*gxWp@`mhf3&q~q*QK|tq2zYvrM^{gC zcTZo?;X@7H!q1{^9d(|IZoq9_uh}7TF8Z;&6Z!^g9J(Ju4=(6ooV{4!JMcEZ#@LvL z*xE&}9iEp=PU~96hx=Q`Y6%*2t3P`dkV2HTX+e4R@CT!#B_g2CSkeiJPA&mjiTpEc z(`<(uLdHV2Izs-8VYrX#S0dM6b()vfRV7G)ExXfV1i)(cPL~v*fp*}ty+lAC%i@nO z#XE9chyB7>n{+yXKYf0uSx>xTC0z-uqD}$ME?t95DP1!yq^Lz8k5~Fx*!JzBtI?rfoSv(Q#6Ecm%0S_&A^U3bfDeW0P0 zvK0usy8t9mhF90-bfMl1$T6mGL;`sB;!{13tRD)V;a@}|Q3naDIsm*_aByn*_}bULs0f%9}>;P6HPNFl8%n3jkN8wE=cEi2VMi|$8Oo@CP)N(^ z)cNoVk;FRv|3r`N|8H)_K%;PWP)6ph#c0WA|3>Nln=Slr*z*6% eSn04-R?Y0j#ksAaNnpTM26V&Wdet9y9{qnnMir3& diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 0f90faaa2a..f21c2b65b5 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -4,7 +4,6 @@ import pytest import sqlalchemy -from flask_babel import lazy_gettext as _ from sqlalchemy.exc import IntegrityError from core.config import CannotLoadConfiguration, Configuration @@ -413,39 +412,6 @@ def test_duplicate_library_integration_setting( class TestExternalIntegrationLink: - def test_collection_mirror_settings(self): - settings = ExternalIntegrationLink.COLLECTION_MIRROR_SETTINGS - - assert settings[0]["key"] == ExternalIntegrationLink.COVERS_KEY - assert settings[0]["label"] == "Covers Mirror" - assert ( - settings[0]["options"][0]["key"] - == ExternalIntegrationLink.NO_MIRROR_INTEGRATION - ) - assert settings[0]["options"][0]["label"] == _( - "None - Do not mirror cover images" - ) - - assert settings[1]["key"] == ExternalIntegrationLink.OPEN_ACCESS_BOOKS_KEY - assert settings[1]["label"] == "Open Access Books Mirror" - assert ( - settings[1]["options"][0]["key"] - == ExternalIntegrationLink.NO_MIRROR_INTEGRATION - ) - assert settings[1]["options"][0]["label"] == _( - "None - Do not mirror free books" - ) - - assert settings[2]["key"] == ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS_KEY - assert settings[2]["label"] == "Protected Access Books Mirror" - assert ( - settings[2]["options"][0]["key"] - == ExternalIntegrationLink.NO_MIRROR_INTEGRATION - ) - assert settings[2]["options"][0]["label"] == _( - "None - Do not mirror self-hosted, commercially licensed books" - ) - def test_relationships(self, db: DatabaseTransactionFixture): # Create a collection with two storage external integrations. collection = db.collection( diff --git a/tests/core/models/test_resource.py b/tests/core/models/test_resource.py index 6ceed67fb6..8f38870115 100644 --- a/tests/core/models/test_resource.py +++ b/tests/core/models/test_resource.py @@ -54,58 +54,6 @@ def test_default_filename(self): assert "cover" == m(Hyperlink.IMAGE) assert "cover-thumbnail" == m(Hyperlink.THUMBNAIL_IMAGE) - def test_unmirrored(self, db: DatabaseTransactionFixture): - ds = DataSource.lookup(db.session, DataSource.GUTENBERG) - overdrive = DataSource.lookup(db.session, DataSource.OVERDRIVE) - - c1 = db.default_collection() - c1.data_source = ds - - # Here's an Identifier associated with a collection. - work = db.work(with_license_pool=True, collection=c1) - [pool] = work.license_pools - i1 = pool.identifier - - # This is a random identifier not associated with the collection. - i2 = db.identifier() - - def m(): - return Hyperlink.unmirrored(c1).all() - - # Identifier is not in the collection. - not_in_collection, ignore = i2.add_link(Hyperlink.IMAGE, db.fresh_url(), ds) - assert [] == m() - - # Hyperlink rel is not mirrorable. - wrong_type, ignore = i1.add_link( - "not mirrorable", db.fresh_url(), ds, "text/plain" - ) - assert [] == m() - - # Hyperlink has no associated representation -- it needs to be - # mirrored, which will create one! - hyperlink, ignore = i1.add_link( - Hyperlink.IMAGE, db.fresh_url(), ds, "image/png" - ) - assert [hyperlink] == m() - - # Representation is already mirrored, so does not show up - # in the unmirrored list. - representation = hyperlink.resource.representation - representation.set_as_mirrored(db.fresh_url()) - assert [] == m() - - # Representation exists in database but is not mirrored -- it needs - # to be mirrored! - representation.mirror_url = None - assert [hyperlink] == m() - - # Hyperlink is associated with a data source other than the - # data source of the collection. It ought to be mirrored, but - # this collection isn't responsible for mirroring it. - hyperlink.data_source = overdrive - assert [] == m() - class TestResource: def test_as_delivery_mechanism_for(self, db: DatabaseTransactionFixture): diff --git a/tests/core/test_circulation_data.py b/tests/core/test_circulation_data.py index 47a95e9ade..be42e1a4e2 100644 --- a/tests/core/test_circulation_data.py +++ b/tests/core/test_circulation_data.py @@ -10,7 +10,6 @@ IdentifierData, LicenseData, LinkData, - Metadata, ReplacementPolicy, SubjectData, ) @@ -23,11 +22,8 @@ RightsStatus, Subject, ) -from core.model.configuration import ExternalIntegrationLink from core.model.licensing import LicenseStatus -from core.s3 import MockS3Uploader from core.util.datetime_helpers import utc_now -from tests.core.mock import DummyHTTPClient from tests.fixtures.database import DatabaseTransactionFixture @@ -734,243 +730,3 @@ def test_format_change_may_change_open_access_status( # The original LPDM has been removed and only the new one remains. assert False == pool.open_access assert 1 == len(pool.delivery_mechanisms) - - -class TestMetaToModelUtility: - def test_open_access_content_mirrored(self, db: DatabaseTransactionFixture): - # Make sure that open access material links are translated to our S3 buckets, and that - # commercial material links are left as is. - # Note: Mirroring tests passing does not guarantee that all code now - # correctly calls on CirculationData, as well as Metadata. This is a risk. - - mirrors = dict(books_mirror=MockS3Uploader(), covers_mirror=None) - mirror_type = ExternalIntegrationLink.OPEN_ACCESS_BOOKS - # Here's a book. - edition, pool = db.edition(with_license_pool=True) - - # Here's a link to the content of the book, which will be mirrored. - link_mirrored = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - href="http://example.com/", - media_type=Representation.EPUB_MEDIA_TYPE, - content="i am a tiny book", - ) - - # This link will not be mirrored. - link_unmirrored = LinkData( - rel=Hyperlink.DRM_ENCRYPTED_DOWNLOAD, - href="http://example.com/2", - media_type=Representation.EPUB_MEDIA_TYPE, - content="i am a pricy book", - ) - - # Apply the metadata. - policy = ReplacementPolicy(mirrors=mirrors) - - metadata = Metadata( - data_source=edition.data_source, - links=[link_mirrored, link_unmirrored], - ) - metadata.apply(edition, pool.collection, replace=policy) - mirror = mirrors[mirror_type] - assert isinstance(mirror, MockS3Uploader) - # make sure the refactor is done right, and metadata does not upload - assert 0 == len(mirror.uploaded) - - circulation_data = CirculationData( - data_source=edition.data_source, - primary_identifier=edition.primary_identifier, - links=[link_mirrored, link_unmirrored], - ) - circulation_data.apply(db.session, pool.collection, replace=policy) - - # make sure the refactor is done right, and circulation does upload - assert 1 == len(mirror.uploaded) - - # Only the open-access link has been 'mirrored'. - [book] = mirror.uploaded - - # It's remained an open-access link. - assert [Hyperlink.OPEN_ACCESS_DOWNLOAD] == [x.rel for x in book.resource.links] - - # It's been 'mirrored' to the appropriate S3 bucket. - assert book.mirror_url.startswith( - "https://test-content-bucket.s3.amazonaws.com/" - ) - expect = "/{}/{}.epub".format( - edition.primary_identifier.identifier, edition.title - ) - assert book.mirror_url.endswith(expect) - - # make sure the mirrored link is safely on edition - sorted_edition_links = sorted(pool.identifier.links, key=lambda x: x.rel) - unmirrored_representation, mirrored_representation = ( - edlink.resource.representation for edlink in sorted_edition_links - ) - assert mirrored_representation.mirror_url.startswith( - "https://test-content-bucket.s3.amazonaws.com/" - ) - - # make sure the unmirrored link is safely on edition - assert "http://example.com/2" == unmirrored_representation.url - # make sure the unmirrored link has not been translated to an S3 URL - assert None == unmirrored_representation.mirror_url - - def test_mirror_open_access_link_fetch_failure( - self, db: DatabaseTransactionFixture - ): - mirrors = dict(books_mirror=MockS3Uploader()) - h = DummyHTTPClient() - - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - circulation_data = CirculationData( - data_source=edition.data_source, - primary_identifier=edition.primary_identifier, - ) - - link = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - media_type=Representation.EPUB_MEDIA_TYPE, - href=db.fresh_url(), - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - h.queue_response(403) - - circulation_data.mirror_link(pool, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # Fetch failed, so we should have a fetch exception but no mirror url. - assert representation.fetch_exception != None - assert None == representation.mirror_exception - assert None == representation.mirror_url - assert link.href == representation.url - assert representation.fetched_at != None - assert None == representation.mirrored_at - - # The license pool is suppressed when fetch fails. - assert True == pool.suppressed - assert representation.fetch_exception in pool.license_exception - - def test_mirror_open_access_link_mirror_failure( - self, db: DatabaseTransactionFixture - ): - mirrors = dict(books_mirror=MockS3Uploader(fail=True), covers_mirror=None) - h = DummyHTTPClient() - - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - circulation_data = CirculationData( - data_source=edition.data_source, - primary_identifier=edition.primary_identifier, - ) - - link = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - media_type=Representation.EPUB_MEDIA_TYPE, - href=db.fresh_url(), - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - h.queue_response(200, media_type=Representation.EPUB_MEDIA_TYPE) - - circulation_data.mirror_link(pool, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # The representation was fetched successfully. - assert None == representation.fetch_exception - assert representation.fetched_at != None - - # But mirroing failed. - assert representation.mirror_exception != None - assert None == representation.mirrored_at - assert link.media_type == representation.media_type - assert link.href == representation.url - - # The mirror url was never set. - assert None == representation.mirror_url - - # Book content is still there since it wasn't mirrored. - assert representation.content != None - - # The license pool is suppressed when mirroring fails. - assert True == pool.suppressed - assert representation.mirror_exception in pool.license_exception - - def test_has_open_access_link(self, db: DatabaseTransactionFixture): - identifier = IdentifierData(Identifier.GUTENBERG_ID, "1") - - circulationdata = CirculationData( - DataSource.GUTENBERG, - identifier, - ) - - # No links - assert False == circulationdata.has_open_access_link - - linkdata = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - href=db.fresh_url(), - ) - circulationdata.links = [linkdata] - - # Open-access link with no explicit rights URI. - assert True == circulationdata.has_open_access_link - - # Open-access link with contradictory rights URI. - linkdata.rights_uri = RightsStatus.IN_COPYRIGHT - assert False == circulationdata.has_open_access_link - - # Open-access link with consistent rights URI. - linkdata.rights_uri = RightsStatus.GENERIC_OPEN_ACCESS - assert True == circulationdata.has_open_access_link - - def test_availability_needs_update(self, db: DatabaseTransactionFixture): - """Test the logic that controls whether a LicensePool's availability - information should actually be updated. - """ - - identifier = IdentifierData(Identifier.GUTENBERG_ID, "1") - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - recent_data = CirculationData(DataSource.GUTENBERG, identifier) - # CirculationData.last_checked defaults to the current time. - assert (recent_data.last_checked - now).total_seconds() < 10 - old_data = CirculationData( - DataSource.GUTENBERG, identifier, last_checked=yesterday - ) - - edition, pool = db.edition(with_license_pool=True) - - # A pool that has never been checked always needs to be updated. - pool.last_checked = None - assert True == recent_data._availability_needs_update(pool) - assert True == old_data._availability_needs_update(pool) - - # A pool that has been checked before only needs to be updated - # if the information is at least as new as what we had before. - pool.last_checked = now - assert True == recent_data._availability_needs_update(pool) - assert False == old_data._availability_needs_update(pool) diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index df495ed480..7e38ea5acd 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -42,15 +42,12 @@ Work, WorkCoverageRecord, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc, utc_now from tests.core.mock import ( AlwaysSuccessfulBibliographicCoverageProvider, AlwaysSuccessfulCollectionCoverageProvider, AlwaysSuccessfulCoverageProvider, AlwaysSuccessfulWorkCoverageProvider, - DummyHTTPClient, NeverSuccessfulBibliographicCoverageProvider, NeverSuccessfulCoverageProvider, NeverSuccessfulWorkCoverageProvider, @@ -1586,19 +1583,6 @@ def test_set_metadata_incorporates_replacement_policy( edition, pool = db.edition(with_license_pool=True) identifier = edition.primary_identifier - # All images and open-access content will be fetched through this - # 'HTTP client'... - http = DummyHTTPClient() - http.queue_response( - 200, - content="I am an epub.", - media_type=Representation.EPUB_MEDIA_TYPE, - ) - - # ..and will then be uploaded to this 'mirror'. - mirrors = dict(books_mirror=MockS3Uploader()) - mirror_type = ExternalIntegrationLink.OPEN_ACCESS_BOOKS - class Tripwire(PresentationCalculationPolicy): # This class sets a variable if one of its properties is # accessed. @@ -1615,8 +1599,6 @@ def __getattr__(self, name): presentation_calculation_policy = Tripwire() replacement_policy = ReplacementPolicy( - mirrors=mirrors, - http_get=http.do_get, presentation_calculation_policy=presentation_calculation_policy, ) @@ -1652,17 +1634,6 @@ def __getattr__(self, name): identifier, metadata, circulationdata ) - # The open-access download was 'downloaded' and 'mirrored'. - [mirrored] = mirrors[mirror_type].uploaded - assert "http://foo.com/" == mirrored.url - assert mirrored.mirror_url.endswith( - f"/{identifier.identifier}/{edition.title}.epub" - ) - - # The book content was removed from the db after it was - # mirrored successfully. - assert None == mirrored.content - # Our custom PresentationCalculationPolicy was used when # determining whether to recalculate the work's # presentation. We know this because the tripwire was diff --git a/tests/core/test_metadata.py b/tests/core/test_metadata.py index 0c3cd1b3d0..995eccd88a 100644 --- a/tests/core/test_metadata.py +++ b/tests/core/test_metadata.py @@ -34,13 +34,10 @@ Work, WorkCoverageRecord, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc, utc_now -from tests.core.mock import DummyHTTPClient, LogCaptureHandler +from tests.core.mock import LogCaptureHandler from tests.fixtures.csv_files import CSVFilesFixture from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.sample_covers import SampleCoversFixture class TestIdentifierData: @@ -337,516 +334,6 @@ def test_image_becomes_representation_but_thumbnail_does_not( assert "http://largeimage.com/" == edition.cover_full_url assert None == edition.cover_thumbnail_url - def test_image_scale_and_mirror( - self, - db, - sample_covers_fixture: SampleCoversFixture, - ): - # Make sure that open access material links are translated to our S3 buckets, and that - # commercial material links are left as is. - # Note: mirroring links is now also CirculationData's job. So the unit tests - # that test for that have been changed to call to mirror cover images. - # However, updated tests passing does not guarantee that all code now - # correctly calls on CirculationData, too. This is a risk. - - mirrors = dict(covers_mirror=MockS3Uploader(), books_mirror=None) - edition, pool = db.edition(with_license_pool=True) - content = open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ).read() - l1 = LinkData( - rel=Hyperlink.IMAGE, - href="http://example.com/", - media_type=Representation.JPEG_MEDIA_TYPE, - content=content, - ) - l2 = LinkData( - rel=Hyperlink.THUMBNAIL_IMAGE, - href="http://example.com/thumb.jpg", - media_type=Representation.JPEG_MEDIA_TYPE, - content=content, - ) - - # When we call metadata.apply, all image links will be scaled and - # 'mirrored'. - policy = ReplacementPolicy(mirrors=mirrors) - metadata = Metadata(links=[l1, l2], data_source=edition.data_source) - metadata.apply(edition, pool.collection, replace=policy) - - # Two Representations were 'mirrored'. - mirror = mirrors[ExternalIntegrationLink.COVERS] - assert isinstance(mirror, MockS3Uploader) - image, thumbnail = mirror.uploaded - - # The image... - [image_link] = image.resource.links - assert Hyperlink.IMAGE == image_link.rel - - # And its thumbnail. - assert image == thumbnail.thumbnail_of - - # The original image is too big to be a thumbnail. - assert 600 == image.image_height - assert 400 == image.image_width - - # The thumbnail is the right height. - assert Edition.MAX_THUMBNAIL_HEIGHT == thumbnail.image_height - assert Edition.MAX_THUMBNAIL_WIDTH == thumbnail.image_width - - # The thumbnail is newly generated from the full-size - # image--the thumbnail that came in from the OPDS feed was - # ignored. - assert thumbnail.url != l2.href - assert thumbnail.content != l2.content - - # Both images have been 'mirrored' to Amazon S3. - assert image.mirror_url.startswith( - "https://test-cover-bucket.s3.amazonaws.com/" - ) - assert image.mirror_url.endswith("cover.jpg") - - # The thumbnail image has been converted to PNG. - assert thumbnail.mirror_url.startswith( - "https://test-cover-bucket.s3.amazonaws.com/scaled/300/" - ) - assert thumbnail.mirror_url.endswith("cover.png") - - def test_mirror_thumbnail_only( - self, - db, - sample_covers_fixture: SampleCoversFixture, - ): - # Make sure a thumbnail image is mirrored when there's no cover image. - mirrors = dict(covers_mirror=MockS3Uploader()) - mirror_type = ExternalIntegrationLink.COVERS - edition, pool = db.edition(with_license_pool=True) - thumbnail_content = open( - sample_covers_fixture.sample_cover_path("tiny-image-cover.png"), "rb" - ).read() - l = LinkData( - rel=Hyperlink.THUMBNAIL_IMAGE, - href="http://example.com/thumb.png", - media_type=Representation.PNG_MEDIA_TYPE, - content=thumbnail_content, - ) - - policy = ReplacementPolicy(mirrors=mirrors) - metadata = Metadata(links=[l], data_source=edition.data_source) - metadata.apply(edition, pool.collection, replace=policy) - - # One Representation was 'mirrored'. - [thumbnail] = mirrors[mirror_type].uploaded - - # The image has been 'mirrored' to Amazon S3. - assert thumbnail.mirror_url.startswith( - "https://test-cover-bucket.s3.amazonaws.com/" - ) - assert thumbnail.mirror_url.endswith("thumb.png") - - def test_mirror_open_access_link_fetch_failure( - self, db: DatabaseTransactionFixture - ): - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirrors = dict(covers_mirror=MockS3Uploader()) - h = DummyHTTPClient() - - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - link = LinkData( - rel=Hyperlink.IMAGE, - media_type=Representation.JPEG_MEDIA_TYPE, - href="http://example.com/", - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - h.queue_response(403) - - m.mirror_link(edition, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # Fetch failed, so we should have a fetch exception but no mirror url. - assert representation.fetch_exception != None - assert None == representation.mirror_exception - assert None == representation.mirror_url - assert link.href == representation.url - assert representation.fetched_at != None - assert None == representation.mirrored_at - - # the edition's identifier-associated license pool should not be - # suppressed just because fetch failed on getting image. - assert False == pool.suppressed - - # the license pool only gets its license_exception column filled in - # if fetch failed on getting an Hyperlink.OPEN_ACCESS_DOWNLOAD-type epub. - assert None == pool.license_exception - - def test_mirror_404_error(self, db: DatabaseTransactionFixture): - mirrors = dict(covers_mirror=MockS3Uploader(), books_mirror=None) - mirror_type = ExternalIntegrationLink.COVERS - h = DummyHTTPClient() - h.queue_response(404) - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - - link = LinkData( - rel=Hyperlink.IMAGE, - media_type=Representation.JPEG_MEDIA_TYPE, - href="http://example.com/", - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - m = Metadata(data_source=data_source) - - m.mirror_link(edition, data_source, link, link_obj, policy) - - # Since we got a 404 error, the cover image was not mirrored. - assert 404 == link_obj.resource.representation.status_code - assert None == link_obj.resource.representation.mirror_url - mirror = mirrors[mirror_type] - assert isinstance(mirror, MockS3Uploader) - assert [] == mirror.uploaded - - def test_mirror_open_access_link_mirror_failure( - self, - db, - sample_covers_fixture: SampleCoversFixture, - ): - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirrors = dict(covers_mirror=MockS3Uploader(fail=True)) - h = DummyHTTPClient() - - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - content = open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ).read() - link = LinkData( - rel=Hyperlink.IMAGE, - media_type=Representation.JPEG_MEDIA_TYPE, - href="http://example.com/", - content=content, - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - h.queue_response(200, media_type=Representation.JPEG_MEDIA_TYPE) - - m.mirror_link(edition, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # The representation was fetched successfully. - assert None == representation.fetch_exception - assert representation.fetched_at != None - - # But mirroring failed. - assert representation.mirror_exception != None - assert None == representation.mirrored_at - assert link.media_type == representation.media_type - assert link.href == representation.url - - # The mirror url is not set. - assert None == representation.mirror_url - - # Book content is still there since it wasn't mirrored. - assert representation.content != None - - # the edition's identifier-associated license pool should not be - # suppressed just because fetch failed on getting image. - assert False == pool.suppressed - - # the license pool only gets its license_exception column filled in - # if fetch failed on getting an Hyperlink.OPEN_ACCESS_DOWNLOAD-type epub. - assert None == pool.license_exception - - def test_mirror_link_bad_media_type( - self, - db, - sample_covers_fixture: SampleCoversFixture, - ): - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirrors = dict(covers_mirror=MockS3Uploader()) - h = DummyHTTPClient() - - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - content = open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ).read() - - # We thought this link was for an image file. - link = LinkData( - rel=Hyperlink.IMAGE, - media_type=Representation.JPEG_MEDIA_TYPE, - href="http://example.com/", - content=content, - ) - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - ) - - # The remote server told us a generic media type. - h.queue_response( - 200, media_type=Representation.OCTET_STREAM_MEDIA_TYPE, content=content - ) - - m.mirror_link(edition, data_source, link, link_obj, policy) - representation = link_obj.resource.representation - - # The representation was fetched and mirrored successfully. - # We assumed the original image media type was correct. - assert None == representation.fetch_exception - assert representation.fetched_at != None - assert None == representation.mirror_exception - assert representation.mirrored_at != None - assert Representation.JPEG_MEDIA_TYPE == representation.media_type - assert link.href == representation.url - assert "Gutenberg" in representation.mirror_url - assert representation.mirror_url.endswith( - "%s/cover.jpg" % edition.primary_identifier.identifier - ) - - # We don't know the media type for this link, but it has a file extension. - link = LinkData( - rel=Hyperlink.IMAGE, href="http://example.com/image.png", content=content - ) - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - ) - h.queue_response( - 200, media_type=Representation.OCTET_STREAM_MEDIA_TYPE, content=content - ) - m.mirror_link(edition, data_source, link, link_obj, policy) - representation = link_obj.resource.representation - - # The representation is still fetched and mirrored successfully. - # We used the media type from the file extension. - assert None == representation.fetch_exception - assert representation.fetched_at != None - assert None == representation.mirror_exception - assert representation.mirrored_at != None - assert Representation.PNG_MEDIA_TYPE == representation.media_type - assert link.href == representation.url - assert "Gutenberg" in representation.mirror_url - assert representation.mirror_url.endswith( - "%s/image.png" % edition.primary_identifier.identifier - ) - - # We don't know the media type of this link, and there's no extension. - link = LinkData( - rel=Hyperlink.IMAGE, href="http://example.com/unknown", content=content - ) - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - ) - h.queue_response( - 200, media_type=Representation.OCTET_STREAM_MEDIA_TYPE, content=content - ) - m.mirror_link(edition, data_source, link, link_obj, policy) - representation = link_obj.resource.representation - - # The representation is fetched, but we don't try to mirror it - # since it doesn't have a mirrorable media type. - assert None == representation.fetch_exception - assert representation.fetched_at != None - assert None == representation.mirror_exception - assert None == representation.mirrored_at - assert Representation.OCTET_STREAM_MEDIA_TYPE == representation.media_type - assert link.href == representation.url - assert None == representation.mirror_url - - def test_non_open_access_book_not_mirrored(self, db: DatabaseTransactionFixture): - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirrors = dict(covers_mirror=MockS3Uploader(fail=True)) - mirror_type = ExternalIntegrationLink.COVERS - h = DummyHTTPClient() - - policy = ReplacementPolicy(mirrors=mirrors, http_get=h.do_get) - - content = "foo" - link = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - media_type=Representation.EPUB_MEDIA_TYPE, - href="http://example.com/", - content=content, - rights_uri=RightsStatus.IN_COPYRIGHT, - ) - - identifier = db.identifier() - link_obj, is_new = identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - # The Hyperlink object makes it look like an open-access book, - # but the context we have from the OPDS feed says that it's - # not. - m.mirror_link(None, data_source, link, link_obj, policy) - - # No HTTP requests were made. - assert [] == h.requests - - # Nothing was uploaded. - assert [] == mirrors[mirror_type].uploaded - - def test_mirror_with_content_modifier(self, db: DatabaseTransactionFixture): - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirrors = dict(books_mirror=MockS3Uploader()) - mirror_type = ExternalIntegrationLink.OPEN_ACCESS_BOOKS - - def dummy_content_modifier(representation): - representation.content = b"Replaced Content" - - h = DummyHTTPClient() - - policy = ReplacementPolicy( - mirrors=mirrors, content_modifier=dummy_content_modifier, http_get=h.do_get - ) - - link = LinkData( - rel=Hyperlink.OPEN_ACCESS_DOWNLOAD, - media_type=Representation.EPUB_MEDIA_TYPE, - href="http://example.com/test.epub", - content="I'm an epub", - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - h.queue_response(200, media_type=Representation.EPUB_MEDIA_TYPE) - - m.mirror_link(edition, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # The representation was fetched successfully. - assert None == representation.fetch_exception - assert representation.fetched_at != None - - # The mirror url is set. - assert "Gutenberg" in representation.mirror_url - assert representation.mirror_url.endswith( - f"{edition.primary_identifier.identifier}/{edition.title}.epub" - ) - - # Content isn't there since it was mirrored. - assert None == representation.content - - # The representation was mirrored, with the modified content. - assert [representation] == mirrors[mirror_type].uploaded - assert [b"Replaced Content"] == mirrors[mirror_type].content - - def test_mirror_protected_access_book(self, db: DatabaseTransactionFixture): - edition, pool = db.edition(with_license_pool=True) - - data_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - m = Metadata(data_source=data_source) - - mirror_type = ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS - mirrors = {mirror_type: MockS3Uploader()} - - def dummy_content_modifier(representation): - representation.content = b"Replaced Content" - - h = DummyHTTPClient() - - policy = ReplacementPolicy( - mirrors=mirrors, content_modifier=dummy_content_modifier, http_get=h.do_get - ) - - link = LinkData( - rel=Hyperlink.GENERIC_OPDS_ACQUISITION, - media_type=Representation.EPUB_MEDIA_TYPE, - href="http://example.com/test.epub", - content="I'm an epub", - ) - - link_obj, ignore = edition.primary_identifier.add_link( - rel=link.rel, - href=link.href, - data_source=data_source, - media_type=link.media_type, - content=link.content, - ) - - h.queue_response(200, media_type=Representation.EPUB_MEDIA_TYPE) - - m.mirror_link(edition, data_source, link, link_obj, policy) - - representation = link_obj.resource.representation - - # The representation was fetched successfully. - assert None == representation.fetch_exception - assert representation.fetched_at is not None - - # The mirror url is set. - assert "Gutenberg" in representation.mirror_url - assert representation.mirror_url.endswith( - f"{edition.primary_identifier.identifier}/{edition.title}.epub" - ) - - # Content isn't there since it was mirrored. - assert None == representation.content - - # The representation was mirrored, with the modified content. - assert [representation] == mirrors[mirror_type].uploaded - assert [b"Replaced Content"] == mirrors[mirror_type].content - def test_measurements(self, db: DatabaseTransactionFixture): edition = db.edition() measurement = MeasurementData( @@ -1140,35 +627,6 @@ def test_find_sort_name(self, db: DatabaseTransactionFixture): class TestLinkData: - @pytest.mark.parametrize( - "name,rel,expected_mirror_type", - [ - ("image", Hyperlink.IMAGE, ExternalIntegrationLink.COVERS), - ("thumbnail", Hyperlink.THUMBNAIL_IMAGE, ExternalIntegrationLink.COVERS), - ( - "open_access_book", - Hyperlink.OPEN_ACCESS_DOWNLOAD, - ExternalIntegrationLink.OPEN_ACCESS_BOOKS, - ), - ( - "protected_access_book", - Hyperlink.GENERIC_OPDS_ACQUISITION, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ), - ], - ) - def test_mirror_type_returns_correct_mirror_type_for( - self, name, rel, expected_mirror_type - ): - # Arrange - link_data = LinkData(rel, href="dummy") - - # Act - result = link_data.mirror_type() - - # Assert - assert result == expected_mirror_type - def test_guess_media_type(self): rel = Hyperlink.IMAGE @@ -1476,17 +934,6 @@ def test_apply_creates_coverage_records(self, db: DatabaseTransactionFixture): assert 1 == records.count() assert CoverageRecord.SUCCESS == records.all()[0].status - # No metadata upload failure was recorded, because this metadata - # came from Overdrive. - records = ( - db.session.query(CoverageRecord) - .filter(CoverageRecord.identifier_id == edition.primary_identifier.id) - .filter( - CoverageRecord.operation == CoverageRecord.METADATA_UPLOAD_OPERATION - ) - ) - assert 0 == records.count() - # Apply metadata from a different source. metadata = Metadata(data_source=DataSource.GUTENBERG, title=db.fresh_str()) @@ -1502,17 +949,6 @@ def test_apply_creates_coverage_records(self, db: DatabaseTransactionFixture): for record in records.all(): assert CoverageRecord.SUCCESS == record.status - # But now there's also a metadata upload failure. - records = ( - db.session.query(CoverageRecord) - .filter(CoverageRecord.identifier_id == edition.primary_identifier.id) - .filter( - CoverageRecord.operation == CoverageRecord.METADATA_UPLOAD_OPERATION - ) - ) - assert 1 == records.count() - assert CoverageRecord.TRANSIENT_FAILURE == records.all()[0].status - def test_update_contributions(self, db: DatabaseTransactionFixture): edition = db.edition() diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 49942226d4..4a4a8369f1 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -37,9 +37,7 @@ Work, WorkCoverageRecord, ) -from core.model.configuration import ExternalIntegrationLink from core.opds_import import OPDSImporter, OPDSImportMonitor, OPDSXMLParser -from core.s3 import MockS3Uploader, S3Uploader, S3UploaderConfiguration from core.util import first_or_default from core.util.datetime_helpers import datetime_utc from core.util.http import BadResponseException @@ -47,7 +45,6 @@ from tests.core.mock import DummyHTTPClient from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.opds_files import OPDSFilesFixture -from tests.fixtures.sample_covers import SampleCoversFixture class DoomedOPDSImporter(OPDSImporter): @@ -1933,428 +1930,6 @@ def test_combine_present_value_extends_dictionary(self): ) -class TestMirroring: - @pytest.fixture() - def http(self): - class DummyHashedHttpClient: - def __init__(self): - self.responses = {} - self.requests = [] - - def queue_response( - self, - url, - response_code, - media_type="text_html", - other_headers=None, - content="", - ): - headers = {} - if media_type: - headers["content-type"] = media_type - if other_headers: - for k, v in other_headers.items(): - headers[k.lower()] = v - self.responses[url] = (response_code, headers, content) - - def do_get(self, url, *args, **kwargs): - self.requests.append(url) - return self.responses.pop(url) - - return DummyHashedHttpClient() - - @pytest.fixture() - def svg(self): - svg = """ - - - - """ - return svg - - @pytest.fixture() - def png(self, sample_covers_fixture: SampleCoversFixture): - with open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ) as png_file: - png = png_file.read() - return png - - @pytest.fixture() - def epub10441(self): - return { - "url": "http://www.gutenberg.org/ebooks/10441.epub.images", - "response_code": 200, - "content": b"I am 10441.epub.images", - "media_type": Representation.EPUB_MEDIA_TYPE, - } - - @pytest.fixture() - def epub10441_cover(self, svg): - return { - "url": "https://s3.amazonaws.com/book-covers.nypl.org/Gutenberg-Illustrated/10441/cover_10441_9.png", - "response_code": 200, - "content": svg, - "media_type": Representation.SVG_MEDIA_TYPE, - } - - @pytest.fixture() - def epub10557(self): - return { - "url": "http://www.gutenberg.org/ebooks/10557.epub.images", - "response_code": 200, - "content": b"I am 10557.epub.images", - "media_type": Representation.EPUB_MEDIA_TYPE, - } - - @pytest.fixture() - def epub10557_cover_broken(self): - return { - "url": "http://root/broken-cover-image", - "response_code": 404, - "media_type": "text/plain", - } - - @pytest.fixture() - def epub10557_cover_working(self, png): - return { - "url": "http://root/working-cover-image", - "response_code": 200, - "content": png, - "media_type": Representation.PNG_MEDIA_TYPE, - } - - def test_importer_gets_appropriate_mirror_for_collection( - self, opds_importer_fixture: OPDSImporterFixture - ): - data, transaction, session = ( - opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, - ) - - # The default collection is not configured to mirror the - # resources it finds for either its books or covers. - collection = transaction.default_collection() - importer = OPDSImporter(session, collection=collection) - assert None == importer.mirrors[ExternalIntegrationLink.OPEN_ACCESS_BOOKS] - assert None == importer.mirrors[ExternalIntegrationLink.COVERS] - - # Let's configure mirrors integration for it. - - # First set up a storage integration. - integration = transaction.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - settings={S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "some-covers"}, - ) - # Associate the collection's integration with the storage integration - # for the purpose of 'covers'. - integration_link = transaction.external_integration_link( - integration=collection._external_integration, - other_integration=integration, - purpose=ExternalIntegrationLink.COVERS, - ) - - # Now an OPDSImporter created for this collection has an - # appropriately configured MirrorUploader associated with it for the - # 'covers' purpose. - importer = OPDSImporter(session, collection=collection) - mirrors = importer.mirrors - - assert isinstance(mirrors[ExternalIntegrationLink.COVERS], S3Uploader) - assert "some-covers" == mirrors[ExternalIntegrationLink.COVERS].get_bucket( - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY - ) - assert mirrors[ExternalIntegrationLink.OPEN_ACCESS_BOOKS] == None - - # An OPDSImporter can have two types of mirrors. - integration = transaction.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - settings={S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "some-books"}, - ) - # Associate the collection's integration with the storage integration - # for the purpose of 'covers'. - integration_link = transaction.external_integration_link( - integration=collection._external_integration, - other_integration=integration, - purpose=ExternalIntegrationLink.OPEN_ACCESS_BOOKS, - ) - - importer = OPDSImporter(session, collection=collection) - mirrors = importer.mirrors - - assert isinstance( - mirrors[ExternalIntegrationLink.OPEN_ACCESS_BOOKS], S3Uploader - ) - assert "some-books" == mirrors[ - ExternalIntegrationLink.OPEN_ACCESS_BOOKS - ].get_bucket(S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY) - assert "some-covers" == mirrors[ExternalIntegrationLink.COVERS].get_bucket( - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY - ) - - def test_resources_are_mirrored_on_import( - self, - opds_importer_fixture: OPDSImporterFixture, - http, - png, - svg, - epub10441, - epub10557, - epub10441_cover, - epub10557_cover_broken, - epub10557_cover_working, - ): - data, transaction, session = ( - opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, - ) - - http.queue_response(**epub10441) - http.queue_response(**epub10441_cover) - http.queue_response(**epub10557) - # The request to http://root/broken-cover-image - # will result in a 404 error, and the image will not be mirrored. - http.queue_response(**epub10557_cover_broken) - http.queue_response(**epub10557_cover_working) - - s3_for_books = MockS3Uploader() - s3_for_covers = MockS3Uploader() - mirrors = dict(books_mirror=s3_for_books, covers_mirror=s3_for_covers) - - importer = OPDSImporter( - session, - collection=transaction.default_collection(), - mirrors=mirrors, - http_get=http.do_get, - ) - - imported_editions, pools, works, failures = importer.import_from_feed( - data.content_server_mini_feed, feed_url="http://root" - ) - - assert 2 == len(pools) - - # Both items were requested - assert epub10441["url"] in http.requests - assert epub10557["url"] in http.requests - - # The import process requested each remote resource in the feed. The thumbnail - # image was not requested, since we never trust foreign thumbnails. The order they - # are requested in is not deterministic, but after requesting the epub the images - # should be requested. - index = http.requests.index(epub10441["url"]) - assert http.requests[index + 1] == epub10441_cover["url"] - - index = http.requests.index(epub10557["url"]) - assert http.requests[index : index + 3] == [ - epub10557["url"], - epub10557_cover_broken["url"], - epub10557_cover_working["url"], - ] - - e_10441 = next( - e for e in imported_editions if e.primary_identifier.identifier == "10441" - ) - e_10557 = next( - e for e in imported_editions if e.primary_identifier.identifier == "10557" - ) - - [ - e_10441_oa_link, - e_10441_image_link, - e_10441_thumbnail_link, - e_10441_description_link, - ] = sorted(e_10441.primary_identifier.links, key=lambda x: x.rel) - [ - e_10557_broken_image_link, - e_10557_working_image_link, - e_10557_oa_link, - ] = sorted(e_10557.primary_identifier.links, key=lambda x: x.resource.url) - - # The thumbnail image is associated with the Identifier, but - # it's not used because it's associated with a representation - # (cover_10441_9.png with media type "image/png") that no - # longer has a resource associated with it. - assert Hyperlink.THUMBNAIL_IMAGE == e_10441_thumbnail_link.rel - hypothetical_full_representation = ( - e_10441_thumbnail_link.resource.representation.thumbnail_of - ) - assert None == hypothetical_full_representation.resource - assert ( - Representation.PNG_MEDIA_TYPE == hypothetical_full_representation.media_type - ) - - # That's because when we actually got cover_10441_9.png, - # it turned out to be an SVG file, not a PNG, so we created a new - # Representation. TODO: Obviously we could do better here. - assert ( - Representation.SVG_MEDIA_TYPE - == e_10441_image_link.resource.representation.media_type - ) - - # The two open-access links were mirrored to S3, as were the - # original SVG image, the working PNG image, and its thumbnail, which we generated. The - # The broken PNG image was not mirrored because our attempt to download - # it resulted in a 404 error. - imported_book_representations = { - e_10441_oa_link.resource.representation, - e_10557_oa_link.resource.representation, - } - imported_cover_representations = { - e_10441_image_link.resource.representation, - e_10557_working_image_link.resource.representation, - e_10557_working_image_link.resource.representation.thumbnails[0], - } - - assert imported_book_representations == set(s3_for_books.uploaded) - assert imported_cover_representations == set(s3_for_covers.uploaded) - - assert 2 == len(s3_for_books.uploaded) - assert 3 == len(s3_for_covers.uploaded) - - assert epub10441["content"] in s3_for_books.content - assert epub10557["content"] in s3_for_books.content - - svg_bytes = svg.encode("utf8") - covers_content = s3_for_covers.content[:] - assert svg_bytes in covers_content - covers_content.remove(svg_bytes) - assert png in covers_content - covers_content.remove(png) - - # We don't know what the thumbnail is, but we know it's smaller than the original cover image. - assert len(png) > len(covers_content[0]) - - # Each resource was 'mirrored' to an Amazon S3 bucket. - # - # The "mouse" book was mirrored to a book bucket corresponding to - # Project Gutenberg, its data source. - # - # The images were mirrored to a covers bucket corresponding to the - # open-access content server, _their_ data source. Each image - # has an extension befitting its media type. - # - # The "crow" book was mirrored to a bucket corresponding to - # the open-access content source, the default data source used - # when no distributor was specified for a book. - book1_url = "https://test-content-bucket.s3.amazonaws.com/Gutenberg/Gutenberg%20ID/10441/The%20Green%20Mouse.epub.images" - book1_svg_cover = "https://test-cover-bucket.s3.amazonaws.com/Library%20Simplified%20Open%20Access%20Content%20Server/Gutenberg%20ID/10441/cover_10441_9.svg" - book2_url = "https://test-content-bucket.s3.amazonaws.com/Library%20Simplified%20Open%20Access%20Content%20Server/Gutenberg%20ID/10557/Johnny%20Crow%27s%20Party.epub.images" - book2_png_cover = "https://test-cover-bucket.s3.amazonaws.com/Library%20Simplified%20Open%20Access%20Content%20Server/Gutenberg%20ID/10557/working-cover-image.png" - book2_png_thumbnail = "https://test-cover-bucket.s3.amazonaws.com/scaled/300/Library%20Simplified%20Open%20Access%20Content%20Server/Gutenberg%20ID/10557/working-cover-image.png" - uploaded_urls = {x.mirror_url for x in s3_for_covers.uploaded} - uploaded_book_urls = {x.mirror_url for x in s3_for_books.uploaded} - assert {book1_svg_cover, book2_png_cover, book2_png_thumbnail} == uploaded_urls - assert {book1_url, book2_url} == uploaded_book_urls - - # If we fetch the feed again, and the entries have been updated since the - # cutoff, but the content of the open access links hasn't changed, we won't mirror - # them again. - cutoff = datetime_utc(2013, 1, 2, 16, 56, 40) - - http.queue_response( - epub10441["url"], 304, media_type=Representation.EPUB_MEDIA_TYPE - ) - - http.queue_response( - epub10441_cover["url"], 304, media_type=Representation.SVG_MEDIA_TYPE - ) - - http.queue_response( - epub10557["url"], 304, media_type=Representation.EPUB_MEDIA_TYPE - ) - - imported_editions, pools, works, failures = importer.import_from_feed( - data.content_server_mini_feed - ) - - assert {e_10441, e_10557} == set(imported_editions) - - # Nothing new has been uploaded - assert 2 == len(s3_for_books.uploaded) - - # If the content has changed, it will be mirrored again. - epub10441_updated = epub10441.copy() - epub10441_updated["content"] = b"I am a new version of 10441.epub.images" - http.queue_response(**epub10441_updated) - http.queue_response(**epub10441_cover) - epub10557_updated = epub10557.copy() - epub10557_updated["content"] = b"I am a new version of 10557.epub.images" - http.queue_response(**epub10557_updated) - - imported_editions, pools, works, failures = importer.import_from_feed( - data.content_server_mini_feed - ) - - assert {e_10441, e_10557} == set(imported_editions) - assert 4 == len(s3_for_books.uploaded) - assert epub10441_updated["content"] in s3_for_books.content[-2:] - assert svg_bytes == s3_for_covers.content.pop() - assert epub10557_updated["content"] in s3_for_books.content[-2:] - - def test_content_resources_not_mirrored_on_import_if_no_collection( - self, - opds_importer_fixture: OPDSImporterFixture, - http, - svg, - epub10557_cover_broken, - epub10557_cover_working, - epub10441_cover, - ): - data, transaction, session = ( - opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, - ) - - # If you don't provide a Collection to the OPDSImporter, no - # LicensePools are created for the book and content resources - # (like EPUB editions of the book) are not mirrored. Only - # metadata resources (like the book cover) are mirrored. - - # The request to http://root/broken-cover-image - # will result in a 404 error, and the image will not be mirrored. - http.queue_response(**epub10557_cover_broken) - http.queue_response(**epub10557_cover_working) - http.queue_response(**epub10441_cover) - - s3 = MockS3Uploader() - mirrors = dict(covers_mirror=s3) - - importer = OPDSImporter( - session, collection=None, mirrors=mirrors, http_get=http.do_get - ) - - imported_editions, pools, works, failures = importer.import_from_feed( - data.content_server_mini_feed, feed_url="http://root" - ) - - # No LicensePools were created, since no Collection was - # provided. - assert [] == pools - - # The import process requested each remote resource in the - # order they appeared in the OPDS feed. The EPUB resources - # were not requested because no Collection was provided to the - # importer. The thumbnail image was not requested, since we - # were going to make our own thumbnail anyway. - assert len(http.requests) == 3 - assert set(http.requests) == { - epub10441_cover["url"], - epub10557_cover_broken["url"], - epub10557_cover_working["url"], - } - - class TestOPDSImportMonitor: def test_constructor(self, db: DatabaseTransactionFixture): session = db.session diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index d8378b51bb..68009f345e 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -18,8 +18,7 @@ from core.config import CannotLoadConfiguration, Configuration, ConfigurationConstants from core.external_search import Filter, MockExternalSearchIndex from core.lane import Lane, WorkList -from core.metadata_layer import LinkData, TimestampData -from core.mirror import MirrorUploader +from core.metadata_layer import TimestampData from core.model import ( CachedFeed, Collection, @@ -28,11 +27,9 @@ CoverageRecord, DataSource, ExternalIntegration, - Hyperlink, Identifier, Library, LicensePool, - RightsStatus, Timestamp, Work, WorkCoverageRecord, @@ -40,20 +37,17 @@ get_one_or_create, ) from core.model.classification import Classification, Subject -from core.model.configuration import ExternalIntegrationLink from core.model.customlist import CustomList from core.model.devicetokens import DeviceToken, DeviceTokenTypes from core.model.patron import Patron from core.monitor import CollectionMonitor, Monitor, ReaperMonitor from core.opds_import import OPDSImportMonitor from core.overdrive import OverdriveAdvantageAccount -from core.s3 import MinIOUploader, MinIOUploaderConfiguration, S3Uploader from core.scripts import ( AddClassificationScript, CheckContributorNamesInDB, CollectionArgumentsScript, CollectionInputScript, - CollectionType, ConfigureCollectionScript, ConfigureIntegrationScript, ConfigureLaneScript, @@ -68,7 +62,6 @@ LibraryInputScript, ListCollectionMetadataIdentifiersScript, LoanNotificationsScript, - MirrorResourcesScript, MockStdin, OPDSImportScript, PatronInputScript, @@ -2006,347 +1999,6 @@ def expected(c): assert "2 collections found.\n" in output -class TestMirrorResourcesScript: - def test_do_run(self, db: DatabaseTransactionFixture): - has_uploader = db.collection() - mock_uploader = object() - - class Mock(MirrorResourcesScript): - - processed = [] - - def collections_with_uploader(self, collections, collection_type): - # Pretend that `has_uploader` is the only Collection - # with an uploader. - for collection in collections: - if collection == has_uploader: - yield collection, mock_uploader - - def process_collection(self, collection, policy): - self.processed.append((collection, policy)) - - script = Mock(db.session) - - # If there are no command-line arguments, process_collection - # is called on every Collection in the system that is okayed - # by collections_with_uploader. - script.do_run(cmd_args=[]) - processed = script.processed.pop() - assert (has_uploader, mock_uploader) == processed - assert [] == script.processed - - # If a Collection is named on the command line, - # process_collection is called on that Collection _if_ it has - # an uploader. - args = ["--collection=%s" % db.default_collection().name] - script.do_run(cmd_args=args) - assert [] == script.processed - - script.do_run(cmd_args=["--collection=%s" % has_uploader.name]) - processed = script.processed.pop() - assert (has_uploader, mock_uploader) == processed - - @pytest.mark.parametrize( - "name,collection_type,book_mirror_type,protocol,uploader_class,settings", - [ - ( - "containing_open_access_books_with_s3_uploader", - CollectionType.OPEN_ACCESS, - ExternalIntegrationLink.OPEN_ACCESS_BOOKS, - ExternalIntegration.S3, - S3Uploader, - None, - ), - ( - "containing_protected_access_books_with_s3_uploader", - CollectionType.PROTECTED_ACCESS, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ExternalIntegration.S3, - S3Uploader, - None, - ), - ( - "containing_open_access_books_with_minio_uploader", - CollectionType.OPEN_ACCESS, - ExternalIntegrationLink.OPEN_ACCESS_BOOKS, - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ( - "containing_protected_access_books_with_minio_uploader", - CollectionType.PROTECTED_ACCESS, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ], - ) - def test_collections( - self, - db, - name, - collection_type, - book_mirror_type, - protocol, - uploader_class, - settings, - ): - class Mock(MirrorResourcesScript): - - mock_policy = object() - - @classmethod - def replacement_policy(cls, uploader): - cls.replacement_policy_called_with = uploader - return cls.mock_policy - - script = Mock() - - # The default collection does not have an uploader. - # This new collection does. - has_uploader = db.collection() - mirror = db.external_integration(protocol, ExternalIntegration.STORAGE_GOAL) - - if settings: - for key, value in settings.items(): - mirror.setting(key).value = value - - integration_link = db.external_integration_link( - integration=has_uploader._external_integration, - other_integration=mirror, - purpose=ExternalIntegrationLink.COVERS, - ) - - # Calling collections_with_uploader will do nothing for collections - # that don't have an uploader. It will make a MirrorUploader for - # the other collection, pass it into replacement_policy, - # and yield the result. - result = script.collections_with_uploader( - [ - db.default_collection(), - has_uploader, - db.default_collection(), - ], - collection_type, - ) - - [(collection, policy)] = result - assert has_uploader == collection - assert Mock.mock_policy == policy - # The mirror uploader was associated with a purpose of "covers", so we only - # expect to have one MirrorUploader. - assert Mock.replacement_policy_called_with[book_mirror_type] == None - assert isinstance( - Mock.replacement_policy_called_with[ExternalIntegrationLink.COVERS], - MirrorUploader, - ) - - # Add another storage for books. - another_mirror = db.external_integration( - protocol, ExternalIntegration.STORAGE_GOAL - ) - - integration_link = db.external_integration_link( - integration=has_uploader._external_integration, - other_integration=another_mirror, - purpose=book_mirror_type, - ) - - result = script.collections_with_uploader( - [ - db.default_collection(), - has_uploader, - db.default_collection(), - ], - collection_type, - ) - - [(collection, policy)] = result - assert has_uploader == collection - assert Mock.mock_policy == policy - # There should be two MirrorUploaders, one for each purpose. - assert isinstance( - Mock.replacement_policy_called_with[ExternalIntegrationLink.COVERS], - uploader_class, - ) - assert isinstance( - Mock.replacement_policy_called_with[book_mirror_type], uploader_class - ) - - def test_replacement_policy(self): - uploader = object() - p = MirrorResourcesScript.replacement_policy(uploader) - assert uploader == p.mirrors - assert True == p.link_content - assert True == p.even_if_not_apparently_updated - assert False == p.rights - - def test_process_collection(self, db: DatabaseTransactionFixture): - class MockScript(MirrorResourcesScript): - process_item_called_with = [] - - def process_item(self, collection, link, policy): - self.process_item_called_with.append((collection, link, policy)) - - # Mock the Hyperlink.unmirrored method - link1 = object() - link2 = object() - - def unmirrored(collection): - assert collection == db.default_collection() - yield link1 - yield link2 - - script = MockScript(db.session) - policy = object() - script.process_collection(db.default_collection(), policy, unmirrored) - - # Process_collection called unmirrored() and then called process_item - # on every item yielded by unmirrored() - call1, call2 = script.process_item_called_with - assert (db.default_collection(), link1, policy) == call1 - assert (db.default_collection(), link2, policy) == call2 - - def test_derive_rights_status(self, db: DatabaseTransactionFixture): - """Test our ability to determine the rights status of a Resource, - in the absence of immediate information from the server. - """ - m = MirrorResourcesScript.derive_rights_status - work = db.work(with_open_access_download=True) - [pool] = work.license_pools - [lpdm] = pool.delivery_mechanisms - resource = lpdm.resource - - expect = lpdm.rights_status.uri - - # Given the LicensePool, we can figure out the Resource's - # rights status based on what was previously recovered. This lets - # us know whether it's okay to mirror that Resource. - assert expect == m(pool, resource) - - # In theory, a Resource can be associated with several - # LicensePoolDeliveryMechanisms. That's why a LicensePool is - # necessary -- to see which LicensePoolDeliveryMechanism we're - # looking at. - assert None == m(None, resource) - - # If there's no Resource-specific information, but a - # LicensePool has only one rights URI among all of its - # LicensePoolDeliveryMechanisms, then we can assume all Resources - # for that LicensePool use that same set of rights. - w2 = db.work(with_license_pool=True) - [pool2] = w2.license_pools - assert pool2.delivery_mechanisms[0].rights_status.uri == m(pool2, None) - - # If there's more than one possibility, or the LicensePool has - # no LicensePoolDeliveryMechanisms at all, then we just don't - # know. - pool2.set_delivery_mechanism( - content_type="text/plain", drm_scheme=None, rights_uri=RightsStatus.CC_BY_ND - ) - assert None == m(pool2, None) - - pool2.delivery_mechanisms = [] - assert None == m(pool2, None) - - def test_process_item(self, db: DatabaseTransactionFixture): - """Test the code that actually sets up the mirror operation.""" - # Every time process_item() is called, it's either going to ask - # this thing to mirror the item, or it's going to decide not to. - class MockMirrorUtility: - def __init__(self): - self.mirrored = [] - - def mirror_link(self, **kwargs): - self.mirrored.append(kwargs) - - mirror = MockMirrorUtility() - - class MockScript(MirrorResourcesScript): - MIRROR_UTILITY = mirror - RIGHTS_STATUS = None - - def derive_rights_status(self, license_pool, resource): - """Always return the same rights status information. - To start out, act like no rights information is available. - """ - self.derive_rights_status_called_with = (license_pool, resource) - return self.RIGHTS_STATUS - - # Resource and Hyperlink are a pain to use for real, so here - # are some cheap mocks. - class MockResource: - def __init__(self, url): - self.url = url - - class MockLink: - def __init__(self, rel, href, identifier): - self.rel = rel - self.resource = MockResource(href) - self.identifier = identifier - - script = MockScript(db.session) - m = script.process_item - - # If we can't tie the Hyperlink to a LicensePool in the given - # Collection, no upload happens. (This shouldn't happen - # because Hyperlink.unmirrored only finds Hyperlinks - # associated with Identifiers licensed through a Collection.) - identifier = db.identifier() - policy = object() - download_link = MockLink( - Hyperlink.OPEN_ACCESS_DOWNLOAD, db.fresh_url(), identifier - ) - db.default_collection().data_source = DataSource.GUTENBERG - m(db.default_collection(), download_link, policy) - assert [] == mirror.mirrored - - # This HyperLink does match a LicensePool, but it's not - # in the collection we're mirroring, so mirroring it might not be - # appropriate. - work = db.work( - with_open_access_download=True, collection=db.default_collection() - ) - pool = work.license_pools[0] - download_link.identifier = pool.identifier - wrong_collection = db.collection() - wrong_collection.data_source = DataSource.GUTENBERG - m(wrong_collection, download_link, policy) - assert [] == mirror.mirrored - - # For "open-access" downloads of actual books, if we can't - # determine the actual rights status of the book, then we - # don't do anything. - m(db.default_collection(), download_link, policy) - assert [] == mirror.mirrored - assert (pool, download_link.resource) == script.derive_rights_status_called_with - - # If we _can_ determine the rights status, a mirror attempt is made. - script.RIGHTS_STATUS = object() - m(db.default_collection(), download_link, policy) - attempt = mirror.mirrored.pop() - assert policy == attempt["policy"] - assert pool.data_source == attempt["data_source"] - assert pool == attempt["model_object"] - assert download_link == attempt["link_obj"] - - link = attempt["link"] - assert isinstance(link, LinkData) - assert download_link.resource.url == link.href - - # For other types of links, we rely on fair use, so the "rights - # status" doesn't matter. - script.RIGHTS_STATUS = None - thumb_link = MockLink( - Hyperlink.THUMBNAIL_IMAGE, db.fresh_url(), pool.identifier - ) - m(db.default_collection(), thumb_link, policy) - attempt = mirror.mirrored.pop() - assert thumb_link.resource.url == attempt["link"].href - - class TestRebuildSearchIndexScript: def test_do_run(self, db: DatabaseTransactionFixture): class MockSearchIndex: From ae8d048eb928b8442a233f39166149b5d09dd64d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Sep 2023 17:56:16 +0000 Subject: [PATCH 035/262] Bump pillow from 10.0.0 to 10.0.1 (#1384) --- poetry.lock | 112 ++++++++++++++++++++++++++-------------------------- 1 file changed, 55 insertions(+), 57 deletions(-) diff --git a/poetry.lock b/poetry.lock index b2ac521785..c42e7a7699 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2473,67 +2473,65 @@ uritemplate = ">=3.0.1,<5.0.0" [[package]] name = "pillow" -version = "10.0.0" +version = "10.0.1" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"}, - {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"}, - {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, - {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, - {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, - {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, - {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"}, - {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"}, - {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"}, - {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"}, + {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, + {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, + {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, + {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, + {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, + {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, + {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, + {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, + {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, + {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, + {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, + {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, + {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, + {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, + {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, + {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, + {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, + {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, + {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, + {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, + {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, + {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, + {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, + {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, + {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, + {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, ] [package.extras] From 69e5203dca1a466823a2c00ca300645906e4f286 Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Fri, 15 Sep 2023 15:13:48 -0400 Subject: [PATCH 036/262] Add patron "Basic Token" authentication feature switch. (PP-431) (#1383) --- README.md | 11 ++++ api/authentication/basic_token.py | 4 +- api/authenticator.py | 4 +- core/config.py | 26 +++++++++- core/util/__init__.py | 33 ++++++++++++ tests/api/test_authenticator.py | 86 +++++++++++++++++++++---------- tests/api/test_config.py | 38 ++++++++++++++ tests/core/util/test_util.py | 63 ++++++++++++++++++++++ 8 files changed, 236 insertions(+), 29 deletions(-) diff --git a/README.md b/README.md index e379626402..3b6ee6d953 100644 --- a/README.md +++ b/README.md @@ -161,6 +161,17 @@ To let the application know which database to use, set the `SIMPLIFIED_PRODUCTIO export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" ``` +##### Patron `Basic Token` authentication + +Enables/disables patron "basic token" authentication through setting the designated environment variable to any +(case-insensitive) value of "true"/"yes"/"on"/"1" or "false"/"no"/"off"/"0", respectively. +If the value is the empty string or the variable is not present in the environment, it is disabled by default. +- `SIMPLIFIED_ENABLE_BASIC_TOKEN_AUTH` + +```sh +export SIMPLIFIED_ENABLE_BASIC_TOKEN_AUTH=true +``` + ##### Firebase Cloud Messaging For Firebase Cloud Messaging (FCM) support (e.g., for notifications), `one` (and only one) of the following should be set: diff --git a/api/authentication/basic_token.py b/api/authentication/basic_token.py index 19b59e73c5..6e445c8aeb 100644 --- a/api/authentication/basic_token.py +++ b/api/authentication/basic_token.py @@ -23,6 +23,8 @@ class BasicTokenAuthenticationProvider(AuthenticationProvider): It is a companion to the basic authentication, and has no meaning without it. """ + FLOW_TYPE = "http://thepalaceproject.org/authtype/basic-token" + def __init__( self, _db: Session, @@ -105,7 +107,7 @@ def remote_patron_lookup(self, _db): @property def flow_type(self) -> str: - return "http://thepalaceproject.org/authtype/basic-token" + return self.FLOW_TYPE @classmethod def description(cls) -> str: diff --git a/api/authenticator.py b/api/authenticator.py index f030bb26dc..11d655b0e1 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -401,7 +401,9 @@ def register_basic_auth_provider( ): raise CannotLoadConfiguration("Two basic auth providers configured") self.basic_auth_provider = provider - if self.library is not None: + # TODO: We can remove the configuration test once + # basic token authentication is fully deployed. + if self.library is not None and Configuration.basic_token_auth_is_enabled(): self.access_token_authentication_provider = ( BasicTokenAuthenticationProvider( self._db, self.library, self.basic_auth_provider diff --git a/core/config.py b/core/config.py index 5a9cf07b46..9744a2ece1 100644 --- a/core/config.py +++ b/core/config.py @@ -11,7 +11,7 @@ # from this module, alongside CannotLoadConfiguration. from core.exceptions import IntegrationException -from .util import LanguageCodes +from .util import LanguageCodes, ansible_boolean from .util.datetime_helpers import to_utc, utc_now @@ -40,6 +40,10 @@ class Configuration(ConfigurationConstants): DATABASE_TEST_ENVIRONMENT_VARIABLE = "SIMPLIFIED_TEST_DATABASE" DATABASE_PRODUCTION_ENVIRONMENT_VARIABLE = "SIMPLIFIED_PRODUCTION_DATABASE" + # TODO: We can remove this variable once basic token authentication is fully deployed. + # Patron token authentication enabled switch. + BASIC_TOKEN_AUTH_ENABLED_ENVVAR = "SIMPLIFIED_ENABLE_BASIC_TOKEN_AUTH" + # Environment variables for Firebase Cloud Messaging (FCM) service account key FCM_CREDENTIALS_FILE_ENVIRONMENT_VARIABLE = "SIMPLIFIED_FCM_CREDENTIALS_FILE" FCM_CREDENTIALS_JSON_ENVIRONMENT_VARIABLE = "SIMPLIFIED_FCM_CREDENTIALS_JSON" @@ -207,6 +211,26 @@ def database_url(cls): logging.info("Connecting to database: %s" % url_obj.__to_string__()) return url + # TODO: We can remove this method once basic token authentication is fully deployed. + @classmethod + def basic_token_auth_is_enabled(cls) -> bool: + """Is basic token authentication enabled? + + Return False, if the variable is unset or is an empty string. + Raises CannotLoadConfiguration, if the setting is invalid. + :raise CannotLoadConfiguration: If the setting contains an unsupported value. + """ + try: + return ansible_boolean( + os.environ.get(cls.BASIC_TOKEN_AUTH_ENABLED_ENVVAR), + label=cls.BASIC_TOKEN_AUTH_ENABLED_ENVVAR, + default=False, + ) + except (TypeError, ValueError) as e: + raise CannotLoadConfiguration( + f"Invalid value for {cls.BASIC_TOKEN_AUTH_ENABLED_ENVVAR} environment variable." + ) from e + @classmethod def fcm_credentials(cls) -> Dict[str, str]: """Returns a dictionary containing Firebase Cloud Messaging credentials. diff --git a/core/util/__init__.py b/core/util/__init__.py index eb8cee21e5..c3fd91159e 100644 --- a/core/util/__init__.py +++ b/core/util/__init__.py @@ -582,6 +582,39 @@ def chunks(lst, chunk_size, start_index=0): yield lst[i : i + chunk_size] +def ansible_boolean( + value: Optional[str | bool], + label: Optional[str] = None, + default: Optional[bool] = None, +) -> bool: + """Map Ansible "truthy" and "falsy" values to a Python boolean. + + :param value: The value from which to map. + :param label: Optional name or label associated with the value. + :param default: Default result if value is empty string or None. + """ + _value_label = f"Value of '{label}'" if label else "Value" + if default is not None and not isinstance(default, bool): + raise TypeError("'default' must be a boolean, when specified.") + if isinstance(value, bool): + return value + if value is None or value == "": + if default is not None: + return default + raise ValueError( + f"{_value_label} must be non-null and non-empty if no default is specified." + ) + if not isinstance(value, str): + raise TypeError(f"{_value_label} must be a string or boolean.") + + if value.upper() in ("TRUE", "T", "ON", "YES", "Y", "1"): + return True + if value.upper() in ("FALSE", "F", "OFF", "NO", "N", "0"): + return False + + raise ValueError(f"{_value_label} does not map to True or False.") + + class ValuesMeta(type): """Metaclass to allow operators on simple constants defining classes""" diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 34c2a7a5ca..eed0516412 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -31,6 +31,7 @@ Keyboards, LibraryIdentifierRestriction, ) +from api.authentication.basic_token import BasicTokenAuthenticationProvider from api.authenticator import ( Authenticator, BaseSAMLAuthenticationProvider, @@ -924,12 +925,18 @@ def test_authenticated_patron_bearer( assert saml.authenticated_patron.call_count == 1 def test_authenticated_patron_bearer_access_token( - self, db: DatabaseTransactionFixture, mock_basic: MockBasicFixture + self, + db: DatabaseTransactionFixture, + mock_basic: MockBasicFixture, ): basic = mock_basic() - authenticator = LibraryAuthenticator( - _db=db.session, library=db.default_library(), basic_auth_provider=basic - ) + # TODO: We can remove this patch once basic token authentication is fully deployed. + with patch.object( + Configuration, "basic_token_auth_is_enabled", return_value=True + ): + authenticator = LibraryAuthenticator( + _db=db.session, library=db.default_library(), basic_auth_provider=basic + ) patron = db.patron() token = AccessTokenProvider.generate_token(db.session, patron, "pass") auth = Authorization(auth_type="bearer", token=token) @@ -951,44 +958,56 @@ def test_authenticated_patron_unsupported_mechanism( assert UNSUPPORTED_AUTHENTICATION_MECHANISM == problem def test_get_credential_from_header( - self, db: DatabaseTransactionFixture, mock_basic: MockBasicFixture + self, + db: DatabaseTransactionFixture, + mock_basic: MockBasicFixture, ): + def get_library_authenticator( + basic_auth_provider: BasicAuthenticationProvider | None, + ) -> LibraryAuthenticator: + # TODO: We can remove this patch once basic token authentication is fully deployed. + with patch.object( + Configuration, "basic_token_auth_is_enabled", return_value=True + ): + return LibraryAuthenticator( + _db=db.session, + library=db.default_library(), + basic_auth_provider=basic_auth_provider, + ) + basic = mock_basic() # We can pull the password out of a Basic Auth credential # if a Basic Auth authentication provider is configured. - authenticator = LibraryAuthenticator( - _db=db.session, - library=db.default_library(), - basic_auth_provider=basic, - ) + authenticator = get_library_authenticator(basic_auth_provider=basic) credential = Authorization(auth_type="basic", data=dict(password="foo")) assert "foo" == authenticator.get_credential_from_header(credential) # We can't pull the password out if no basic auth provider - authenticator = LibraryAuthenticator( - _db=db.session, - library=db.default_library(), - basic_auth_provider=None, - ) + authenticator = get_library_authenticator(basic_auth_provider=None) assert authenticator.get_credential_from_header(credential) is None - authenticator = LibraryAuthenticator( - _db=db.session, - library=db.default_library(), - basic_auth_provider=basic, - ) + authenticator = get_library_authenticator(basic_auth_provider=basic) patron = db.patron() token = AccessTokenProvider.generate_token(db.session, patron, "passworx") credential = Authorization(auth_type="bearer", token=token) assert authenticator.get_credential_from_header(credential) == "passworx" + @pytest.mark.parametrize( + "token_auth_enabled, auth_count", + [ + [True, 2], + [False, 1], + ], + ) def test_create_authentication_document( self, db: DatabaseTransactionFixture, mock_basic: MockBasicFixture, announcement_fixture: AnnouncementFixture, library_fixture: LibraryFixture, + token_auth_enabled: bool, + auth_count: int, ): class MockAuthenticator(LibraryAuthenticator): """Mock the _geographic_areas method.""" @@ -1003,11 +1022,16 @@ def _geographic_areas(cls, library): library_settings = library_fixture.settings(library) basic = mock_basic() library.name = "A Fabulous Library" - authenticator = MockAuthenticator( - _db=db.session, - library=library, - basic_auth_provider=basic, - ) + # TODO: We can remove this patch once basic token authentication is fully deployed. + with patch.object( + Configuration, "basic_token_auth_is_enabled" + ) as token_auth_enabled_method: + token_auth_enabled_method.return_value = token_auth_enabled + authenticator = MockAuthenticator( + _db=db.session, + library=library, + basic_auth_provider=basic, + ) def annotate_authentication_document(library, doc, url_for): doc["modified"] = "Kilroy was here" @@ -1103,7 +1127,17 @@ def annotate_authentication_document(library, doc, url_for): # The main thing we need to test is that the # authentication sub-documents are assembled properly and # placed in the right position. - [token_doc, basic_doc] = doc["authentication"] + # TODO: token doc will be here only when correct environment variable set to true. + # If basic token auth is enabled, then there should be two authentication + # mechanisms and the first should be for token auth. + authenticators = doc["authentication"] + assert auth_count > 0 + assert auth_count == len(authenticators) + # TODO: We can remove this `if` block/restructure once basic token authentication is fully deployed. + if token_auth_enabled: + token_doc = authenticators[0] + assert BasicTokenAuthenticationProvider.FLOW_TYPE == token_doc["type"] + basic_doc = authenticators[auth_count - 1] expect_basic = basic.authentication_flow_document(db.session) assert expect_basic == basic_doc diff --git a/tests/api/test_config.py b/tests/api/test_config.py index 76a00f3bb5..ac50a16dc0 100644 --- a/tests/api/test_config.py +++ b/tests/api/test_config.py @@ -1,6 +1,7 @@ import json import os from collections import Counter +from contextlib import nullcontext as does_not_raise from unittest.mock import patch import pytest @@ -196,3 +197,40 @@ def test_fcm_credentials(self, notifications_files_fixture): match=r"Cannot parse value of FCM credential environment variable .* as JSON.", ): Configuration.fcm_credentials() + + @pytest.mark.parametrize( + "env_var_value, expected_result, raises_exception", + [ + ["true", True, False], + ["True", True, False], + [None, False, False], + ["", False, False], + ["false", False, False], + ["False", False, False], + ["3", None, True], + ["X", None, True], + ], + ) + @patch.object(os, "environ", new=dict()) + def test_basic_token_auth_is_enabled( + self, env_var_value, expected_result, raises_exception + ): + env_var = Configuration.BASIC_TOKEN_AUTH_ENABLED_ENVVAR + + # Simulate an unset environment variable with the `None` value. + if env_var_value is None: + del os.environ[env_var] + else: + os.environ[env_var] = env_var_value + + expected_exception = ( + pytest.raises( + CannotLoadConfiguration, + match=f"Invalid value for {env_var} environment variable.", + ) + if raises_exception + else does_not_raise() + ) + + with expected_exception: + assert expected_result == Configuration.basic_token_auth_is_enabled() diff --git a/tests/core/util/test_util.py b/tests/core/util/test_util.py index 2bb66cb2d6..4110d7cad7 100644 --- a/tests/core/util/test_util.py +++ b/tests/core/util/test_util.py @@ -14,6 +14,7 @@ MetadataSimilarity, MoneyUtility, TitleProcessor, + ansible_boolean, english_bigrams, fast_query_count, slugify, @@ -478,3 +479,65 @@ def test_parse( def test_parsing_bad_value_raises_valueerror(self, bad_value): with pytest.raises(ValueError): MoneyUtility.parse(bad_value) + + +class TestAnsibleBoolean: + _truthy_values = ["TRUE", "T", "ON", "YES", "Y", "1"] + _falsy_values = ["FALSE", "F", "OFF", "NO", "N", "0"] + # Values are case-insensitive. + TRUTHY = [True] + _truthy_values + [v.lower() for v in _truthy_values] + FALSY = [False] + _falsy_values + [v.lower() for v in _falsy_values] + MISSING = [None, ""] + + @pytest.mark.parametrize( + "expected_result, example_values, default_value", + [ + [True, TRUTHY, False], + [True, TRUTHY, True], + [True, TRUTHY, None], + [True, MISSING, True], + [False, FALSY, False], + [False, FALSY, True], + [False, FALSY, None], + [False, MISSING, False], + ], + ) + def test_ansible_boolean_true_or_false( + self, expected_result, example_values, default_value + ): + for value in example_values: + assert expected_result == ansible_boolean(value, default=default_value) + assert expected_result == ansible_boolean( + value, default=default_value, label="some label" + ) + + @pytest.mark.parametrize( + "example_value, default_value, expected_exception, expected_message", + [ + ["TRUE", "", TypeError, "'default' must be a boolean, when specified"], + ["TRUE", "X", TypeError, "'default' must be a boolean, when specified"], + ["TRUE", 0, TypeError, "'default' must be a boolean, when specified"], + ["TRUE", "TRUE", TypeError, "'default' must be a boolean, when specified"], + [1, None, TypeError, "must be a string"], + [3.3, None, TypeError, "must be a string"], + ["!", None, ValueError, "does not map to True or False"], + ["x", None, ValueError, "does not map to True or False"], + [ + None, + None, + ValueError, + "must be non-null and non-empty if no default is specified", + ], + [ + "", + None, + ValueError, + "must be non-null and non-empty if no default is specified", + ], + ], + ) + def test_ansible_boolean_exceptions( + self, example_value, default_value, expected_exception, expected_message + ): + with pytest.raises(expected_exception, match=expected_message): + ansible_boolean(example_value, default=default_value) From 358caaa9ec517bfb1fc66b0c5fdfc9053f461975 Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Mon, 18 Sep 2023 09:09:59 -0400 Subject: [PATCH 037/262] Fix for `previous months` helper function. (PP- (#1385) * Add some clarifying tests. * Fix `previous_months` and add docstring. * Improve and clarify tests and docstring. * Remove redundant tests. --- core/util/datetime_helpers.py | 25 ++-- tests/core/util/test_datetime_helpers.py | 143 +++++++++++++++++++++-- 2 files changed, 149 insertions(+), 19 deletions(-) diff --git a/core/util/datetime_helpers.py b/core/util/datetime_helpers.py index a81db41a3f..4d236984cc 100644 --- a/core/util/datetime_helpers.py +++ b/core/util/datetime_helpers.py @@ -1,8 +1,8 @@ import datetime -import math from typing import Optional, Tuple import pytz +from dateutil.relativedelta import relativedelta # datetime helpers # As part of the python 3 conversion, the datetime object went through a @@ -63,12 +63,23 @@ def strptime_utc(date_string, format): return to_utc(datetime.datetime.strptime(date_string, format)) -def previous_months(number_of_months) -> Tuple[datetime.date, datetime.date]: +def previous_months(number_of_months: int) -> Tuple[datetime.date, datetime.date]: + """Calculate date boundaries for matching the specified previous number of months. + + :param number_of_months: The number of months in the interval. + :returns: Date interval boundaries, consisting of a 2-tuple of + `start` and `until` dates. + + These boundaries should be used such that matching dates are on the + half-closed/half-open interval `[start, until)` (i.e., start <= match < until). + Only dates/datetimes greater than or equal to `start` and less than + (NOT less than or equal to) `until` should be considered as matching. + + `start` will be the first day of the designated month. + `until` will be the first day of the current month. + """ now = utc_now() - # Start from the first of number_of_months ago, where 0=12 - expected_year = now.year - math.floor(number_of_months / 12) - expected_month = ((now.month - number_of_months) % 12) or 12 - start = now.replace(year=expected_year, month=expected_month, day=1) - # Until the first of this month + start = now - relativedelta(months=number_of_months) + start = start.replace(day=1) until = now.replace(day=1) return start.date(), until.date() diff --git a/tests/core/util/test_datetime_helpers.py b/tests/core/util/test_datetime_helpers.py index 0230ad2f76..7b63adea61 100644 --- a/tests/core/util/test_datetime_helpers.py +++ b/tests/core/util/test_datetime_helpers.py @@ -138,19 +138,138 @@ def test_strptime_utc_error(self): class TestPreviousMonths: @pytest.mark.parametrize( - "start,until,months", + "current_datetime, expected_start, expected_until, months", [ - (datetime.date(2000, 6, 1), datetime.date(2000, 12, 1), 6), - (datetime.date(1999, 6, 1), datetime.date(2000, 12, 1), 18), - (datetime.date(1990, 6, 1), datetime.date(2000, 12, 1), 126), - (datetime.date(1999, 12, 1), datetime.date(2000, 12, 1), 12), + ( + datetime_utc(2000, 1, 15), + datetime.date(1999, 12, 1), + datetime.date(2000, 1, 1), + 1, + ), + ( + datetime_utc(2000, 1, 15), + datetime.date(1999, 11, 1), + datetime.date(2000, 1, 1), + 2, + ), + ( + datetime_utc(2000, 1, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 1, 1), + 3, + ), + ( + datetime_utc(2000, 2, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 2, 1), + 4, + ), + ( + datetime_utc(2000, 3, 31), + datetime.date(2000, 2, 1), + datetime.date(2000, 3, 1), + 1, + ), + ( + datetime_utc(2000, 3, 31), + datetime.date(1999, 10, 1), + datetime.date(2000, 3, 1), + 5, + ), + ( + datetime_utc(2000, 4, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 4, 1), + 6, + ), + ( + datetime_utc(2000, 5, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 5, 1), + 7, + ), + ( + datetime_utc(2000, 6, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 6, 1), + 8, + ), + ( + datetime_utc(2000, 7, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 7, 1), + 9, + ), + ( + datetime_utc(2000, 8, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 8, 1), + 10, + ), + ( + datetime_utc(2000, 9, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 9, 1), + 11, + ), + ( + datetime_utc(2000, 10, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 10, 1), + 12, + ), + ( + datetime_utc(2000, 11, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 11, 1), + 13, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(1999, 10, 1), + datetime.date(2000, 12, 1), + 14, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(2000, 6, 1), + datetime.date(2000, 12, 1), + 6, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(2000, 6, 1), + datetime.date(2000, 12, 1), + 6, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(1999, 6, 1), + datetime.date(2000, 12, 1), + 18, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(1990, 6, 1), + datetime.date(2000, 12, 1), + 126, + ), + ( + datetime_utc(2000, 12, 15), + datetime.date(1999, 12, 1), + datetime.date(2000, 12, 1), + 12, + ), ], ) - def test_boundaries(self, start, until, months): + def test_boundaries_at_different_current_times( + self, current_datetime, expected_start, expected_until, months + ): with patch("core.util.datetime_helpers.utc_now") as mock_utc_now: - mock_utc_now.return_value = datetime.datetime( - 2000, 12, 15, 0, 0, 0, 0, tzinfo=pytz.UTC - ) - actual_start, actual_until = previous_months(number_of_months=months) - assert actual_start == start - assert actual_until == until + mock_utc_now.return_value = current_datetime + computed_start, computed_until = previous_months(number_of_months=months) + assert computed_start == expected_start + assert computed_until == expected_until + # Both dates should be the 1st of the month. + assert 1 == computed_start.day + assert 1 == computed_until.day From edcc78eb1fb5e7942c3cfbc620ffdda75243a2c6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 18:38:35 +0000 Subject: [PATCH 038/262] Bump typing-extensions from 4.7.1 to 4.8.0 (#1387) --- poetry.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index c42e7a7699..bc4fb005d5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3977,13 +3977,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] From 38ffd8e7943ffe8830b14c5395dbc37cac7f27d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 18:39:09 +0000 Subject: [PATCH 039/262] Bump types-psycopg2 from 2.9.21.12 to 2.9.21.13 (#1386) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index bc4fb005d5..f33dbc3361 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3883,13 +3883,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.12" +version = "2.9.21.13" description = "Typing stubs for psycopg2" optional = false python-versions = "*" files = [ - {file = "types-psycopg2-2.9.21.12.tar.gz", hash = "sha256:a4bd86dd2a22a7e221f6a3681cc182cb3d76be67cb40d60da12e64547713c6fd"}, - {file = "types_psycopg2-2.9.21.12-py3-none-any.whl", hash = "sha256:5b84ccb7265713dd17e5529d57f2bf18f1fd455b6da674575b7e7b5d80501732"}, + {file = "types-psycopg2-2.9.21.13.tar.gz", hash = "sha256:662e6d7b03d89e3bac6aaf2892a97f2cca287f861e693dcefc96ca2e996642c5"}, + {file = "types_psycopg2-2.9.21.13-py3-none-any.whl", hash = "sha256:3ee7c32918d18c133bd0b4d92db7e1bd4c5f78a2ff217d74d4e3b6a09c95d506"}, ] [[package]] From e35b8476c513d82b0fda3344557ae252002db4c7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 18:39:30 +0000 Subject: [PATCH 040/262] Bump types-pytz from 2023.3.0.1 to 2023.3.1.0 (#1388) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index f33dbc3361..883ed3de09 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3919,13 +3919,13 @@ files = [ [[package]] name = "types-pytz" -version = "2023.3.0.1" +version = "2023.3.1.0" description = "Typing stubs for pytz" optional = false python-versions = "*" files = [ - {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, - {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, + {file = "types-pytz-2023.3.1.0.tar.gz", hash = "sha256:8e7d2198cba44a72df7628887c90f68a568e1445f14db64631af50c3cab8c090"}, + {file = "types_pytz-2023.3.1.0-py3-none-any.whl", hash = "sha256:a660a38ed86d45970603e4f3b4877c7ba947668386a896fb5d9589c17e7b8407"}, ] [[package]] From 7cad74471eb6ee2f2d94b18637118f843939e9fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Sep 2023 15:40:33 -0300 Subject: [PATCH 041/262] Bump pycryptodome from 3.18.0 to 3.19.0 (#1389) Bumps [pycryptodome](https://github.com/Legrandin/pycryptodome) from 3.18.0 to 3.19.0. - [Release notes](https://github.com/Legrandin/pycryptodome/releases) - [Changelog](https://github.com/Legrandin/pycryptodome/blob/master/Changelog.rst) - [Commits](https://github.com/Legrandin/pycryptodome/compare/v3.18.0...v3.19.0) --- updated-dependencies: - dependency-name: pycryptodome dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 66 ++++++++++++++++++++++++++--------------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 883ed3de09..385cbb4b58 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2752,43 +2752,43 @@ files = [ [[package]] name = "pycryptodome" -version = "3.18.0" +version = "3.19.0" description = "Cryptographic library for Python" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodome-3.18.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:d1497a8cd4728db0e0da3c304856cb37c0c4e3d0b36fcbabcc1600f18504fc54"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:928078c530da78ff08e10eb6cada6e0dff386bf3d9fa9871b4bbc9fbc1efe024"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:157c9b5ba5e21b375f052ca78152dd309a09ed04703fd3721dce3ff8ecced148"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:d20082bdac9218649f6abe0b885927be25a917e29ae0502eaf2b53f1233ce0c2"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e8ad74044e5f5d2456c11ed4cfd3e34b8d4898c0cb201c4038fe41458a82ea27"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win32.whl", hash = "sha256:62a1e8847fabb5213ccde38915563140a5b338f0d0a0d363f996b51e4a6165cf"}, - {file = "pycryptodome-3.18.0-cp27-cp27m-win_amd64.whl", hash = "sha256:16bfd98dbe472c263ed2821284118d899c76968db1a6665ade0c46805e6b29a4"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7a3d22c8ee63de22336679e021c7f2386f7fc465477d59675caa0e5706387944"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:78d863476e6bad2a592645072cc489bb90320972115d8995bcfbee2f8b209918"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b6a610f8bfe67eab980d6236fdc73bfcdae23c9ed5548192bb2d530e8a92780e"}, - {file = "pycryptodome-3.18.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:422c89fd8df8a3bee09fb8d52aaa1e996120eafa565437392b781abec2a56e14"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:9ad6f09f670c466aac94a40798e0e8d1ef2aa04589c29faa5b9b97566611d1d1"}, - {file = "pycryptodome-3.18.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:53aee6be8b9b6da25ccd9028caf17dcdce3604f2c7862f5167777b707fbfb6cb"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:10da29526a2a927c7d64b8f34592f461d92ae55fc97981aab5bbcde8cb465bb6"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f21efb8438971aa16924790e1c3dba3a33164eb4000106a55baaed522c261acf"}, - {file = "pycryptodome-3.18.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4944defabe2ace4803f99543445c27dd1edbe86d7d4edb87b256476a91e9ffa4"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:51eae079ddb9c5f10376b4131be9589a6554f6fd84f7f655180937f611cd99a2"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:83c75952dcf4a4cebaa850fa257d7a860644c70a7cd54262c237c9f2be26f76e"}, - {file = "pycryptodome-3.18.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:957b221d062d5752716923d14e0926f47670e95fead9d240fa4d4862214b9b2f"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win32.whl", hash = "sha256:795bd1e4258a2c689c0b1f13ce9684fa0dd4c0e08680dcf597cf9516ed6bc0f3"}, - {file = "pycryptodome-3.18.0-cp35-abi3-win_amd64.whl", hash = "sha256:b1d9701d10303eec8d0bd33fa54d44e67b8be74ab449052a8372f12a66f93fb9"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:cb1be4d5af7f355e7d41d36d8eec156ef1382a88638e8032215c215b82a4b8ec"}, - {file = "pycryptodome-3.18.0-pp27-pypy_73-win32.whl", hash = "sha256:fc0a73f4db1e31d4a6d71b672a48f3af458f548059aa05e83022d5f61aac9c08"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f022a4fd2a5263a5c483a2bb165f9cb27f2be06f2f477113783efe3fe2ad887b"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:363dd6f21f848301c2dcdeb3c8ae5f0dee2286a5e952a0f04954b82076f23825"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12600268763e6fec3cefe4c2dcdf79bde08d0b6dc1813887e789e495cb9f3403"}, - {file = "pycryptodome-3.18.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4604816adebd4faf8810782f137f8426bf45fee97d8427fa8e1e49ea78a52e2c"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:01489bbdf709d993f3058e2996f8f40fee3f0ea4d995002e5968965fa2fe89fb"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3811e31e1ac3069988f7a1c9ee7331b942e605dfc0f27330a9ea5997e965efb2"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4b967bb11baea9128ec88c3d02f55a3e338361f5e4934f5240afcb667fdaec"}, - {file = "pycryptodome-3.18.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9c8eda4f260072f7dbe42f473906c659dcbadd5ae6159dfb49af4da1293ae380"}, - {file = "pycryptodome-3.18.0.tar.gz", hash = "sha256:c9adee653fc882d98956e33ca2c1fb582e23a8af7ac82fee75bd6113c55a0413"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3006c44c4946583b6de24fe0632091c2653d6256b99a02a3db71ca06472ea1e4"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:7c760c8a0479a4042111a8dd2f067d3ae4573da286c53f13cf6f5c53a5c1f631"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:08ce3558af5106c632baf6d331d261f02367a6bc3733086ae43c0f988fe042db"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45430dfaf1f421cf462c0dd824984378bef32b22669f2635cb809357dbaab405"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:a9bcd5f3794879e91970f2bbd7d899780541d3ff439d8f2112441769c9f2ccea"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win32.whl", hash = "sha256:190c53f51e988dceb60472baddce3f289fa52b0ec38fbe5fd20dd1d0f795c551"}, + {file = "pycryptodome-3.19.0-cp27-cp27m-win_amd64.whl", hash = "sha256:22e0ae7c3a7f87dcdcf302db06ab76f20e83f09a6993c160b248d58274473bfa"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:7822f36d683f9ad7bc2145b2c2045014afdbbd1d9922a6d4ce1cbd6add79a01e"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:05e33267394aad6db6595c0ce9d427fe21552f5425e116a925455e099fdf759a"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:829b813b8ee00d9c8aba417621b94bc0b5efd18c928923802ad5ba4cf1ec709c"}, + {file = "pycryptodome-3.19.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:fc7a79590e2b5d08530175823a242de6790abc73638cc6dc9d2684e7be2f5e49"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:542f99d5026ac5f0ef391ba0602f3d11beef8e65aae135fa5b762f5ebd9d3bfb"}, + {file = "pycryptodome-3.19.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:61bb3ccbf4bf32ad9af32da8badc24e888ae5231c617947e0f5401077f8b091f"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d49a6c715d8cceffedabb6adb7e0cbf41ae1a2ff4adaeec9432074a80627dea1"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e249a784cc98a29c77cea9df54284a44b40cafbfae57636dd2f8775b48af2434"}, + {file = "pycryptodome-3.19.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d033947e7fd3e2ba9a031cb2d267251620964705a013c5a461fa5233cc025270"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:84c3e4fffad0c4988aef0d5591be3cad4e10aa7db264c65fadbc633318d20bde"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:139ae2c6161b9dd5d829c9645d781509a810ef50ea8b657e2257c25ca20efe33"}, + {file = "pycryptodome-3.19.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:5b1986c761258a5b4332a7f94a83f631c1ffca8747d75ab8395bf2e1b93283d9"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win32.whl", hash = "sha256:536f676963662603f1f2e6ab01080c54d8cd20f34ec333dcb195306fa7826997"}, + {file = "pycryptodome-3.19.0-cp35-abi3-win_amd64.whl", hash = "sha256:04dd31d3b33a6b22ac4d432b3274588917dcf850cc0c51c84eca1d8ed6933810"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:8999316e57abcbd8085c91bc0ef75292c8618f41ca6d2b6132250a863a77d1e7"}, + {file = "pycryptodome-3.19.0-pp27-pypy_73-win32.whl", hash = "sha256:a0ab84755f4539db086db9ba9e9f3868d2e3610a3948cbd2a55e332ad83b01b0"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0101f647d11a1aae5a8ce4f5fad6644ae1b22bb65d05accc7d322943c69a74a6"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c1601e04d32087591d78e0b81e1e520e57a92796089864b20e5f18c9564b3fa"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:506c686a1eee6c00df70010be3b8e9e78f406af4f21b23162bbb6e9bdf5427bc"}, + {file = "pycryptodome-3.19.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7919ccd096584b911f2a303c593280869ce1af9bf5d36214511f5e5a1bed8c34"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560591c0777f74a5da86718f70dfc8d781734cf559773b64072bbdda44b3fc3e"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1cc2f2ae451a676def1a73c1ae9120cd31af25db3f381893d45f75e77be2400"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17940dcf274fcae4a54ec6117a9ecfe52907ed5e2e438fe712fe7ca502672ed5"}, + {file = "pycryptodome-3.19.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d04f5f623a280fbd0ab1c1d8ecbd753193ab7154f09b6161b0f857a1a676c15f"}, + {file = "pycryptodome-3.19.0.tar.gz", hash = "sha256:bc35d463222cdb4dbebd35e0784155c81e161b9284e567e7e933d722e533331e"}, ] [[package]] From e20fb7a496de3abe90c23ad51369e8ae5220ecb7 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 18 Sep 2023 15:54:14 -0300 Subject: [PATCH 042/262] Accept POST requests to the /patrons/me/token endpoint. (#1390) --- api/routes.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/routes.py b/api/routes.py index ed07b6c1c7..3a6a172c28 100644 --- a/api/routes.py +++ b/api/routes.py @@ -395,7 +395,7 @@ def delete_patron_devices(): return app.manager.patron_devices.delete_patron_device() -@library_dir_route("/patrons/me/token", methods=["GET"]) +@library_dir_route("/patrons/me/token", methods=["POST"]) @api_spec.validate(resp=SpecResponse(HTTP_200=PatronAuthAccessToken), tags=["patron"]) @has_library @requires_auth From e8e42ca679f1db5d771d1b5232291fe966aa313b Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Mon, 18 Sep 2023 14:59:50 -0400 Subject: [PATCH 043/262] Add `start` and `until` date arguments to playback time reporting. (PP-273) (#1376) * Add `start` and `until` date arguments to playback time reporting. * Make report tests pass. * WIP before previous months bug * Handle out of order dates and parse into UTC. - Dates are parsed into UTC, rather than naive, `datetime`s. - Factored out the query into a helper function to improve readability. * Add tests for new functionality. --- core/jobs/playtime_entries.py | 129 +++++++++++++++---- tests/core/jobs/test_playtime_entries.py | 156 ++++++++++++++++++++++- 2 files changed, 256 insertions(+), 29 deletions(-) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index 2b271fc3c0..b68378446b 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -1,9 +1,14 @@ +from __future__ import annotations + +import argparse import csv import os from collections import defaultdict from datetime import datetime, timedelta from tempfile import TemporaryFile +from typing import TYPE_CHECKING +import dateutil.parser import pytz from sqlalchemy.sql.functions import sum @@ -15,6 +20,9 @@ from core.util.email import EmailManager from scripts import Script +if TYPE_CHECKING: + from sqlalchemy.orm import Query + class PlaytimeEntriesSummationScript(Script): def do_run(self): @@ -64,42 +72,85 @@ def do_run(self): class PlaytimeEntriesEmailReportsScript(Script): - def do_run(self): - """Send a quarterly report with aggregated playtimes via email""" - # 3 months prior, shifted to the 1st of the month - start, until = previous_months(number_of_months=3) + REPORT_DATE_FORMAT = "%Y-%m-%d" - # Let the database do the math for us - result = ( - self._db.query(PlaytimeSummary) - .with_entities( - PlaytimeSummary.identifier_str, - PlaytimeSummary.collection_name, - PlaytimeSummary.library_name, - PlaytimeSummary.identifier_id, - sum(PlaytimeSummary.total_seconds_played), - ) - .filter( - PlaytimeSummary.timestamp >= start, - PlaytimeSummary.timestamp < until, - ) - .group_by( - PlaytimeSummary.identifier_str, - PlaytimeSummary.collection_name, - PlaytimeSummary.library_name, - PlaytimeSummary.identifier_id, + @classmethod + def arg_parser(cls): + # The default `start` and `until` dates encompass the previous three months. + # We convert them to strings here so that they are handled the same way + # as non-default dates specified as arguments. + default_start, default_until = ( + date.isoformat() for date in previous_months(number_of_months=3) + ) + + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter + ) + parser.add_argument( + "--start", + metavar="YYYY-MM-DD", + default=default_start, + type=dateutil.parser.isoparse, + help="Start date for report in ISO 8601 'yyyy-mm-dd' format.", + ) + parser.add_argument( + "--until", + metavar="YYYY-MM-DD", + default=default_until, + type=dateutil.parser.isoparse, + help="'Until' date for report in ISO 8601 'yyyy-mm-dd' format." + " The report will represent entries from the 'start' date up until," + " but not including, this date.", + ) + return parser + + @classmethod + def parse_command_line(cls, _db=None, cmd_args=None, *args, **kwargs): + parsed = super().parse_command_line(_db=_db, cmd_args=cmd_args, *args, **kwargs) + utc_start = pytz.utc.localize(parsed.start) + utc_until = pytz.utc.localize(parsed.until) + if utc_start >= utc_until: + cls.arg_parser().error( + f"start date ({utc_start.strftime(cls.REPORT_DATE_FORMAT)}) must be before " + f"until date ({utc_until.strftime(cls.REPORT_DATE_FORMAT)})." ) + return argparse.Namespace( + **{**vars(parsed), **dict(start=utc_start, until=utc_until)} + ) + + def do_run(self): + """Produce a report for the given (or default) date range.""" + parsed = self.parse_command_line() + start = parsed.start + until = parsed.until + + formatted_start_date = start.strftime(self.REPORT_DATE_FORMAT) + formatted_until_date = until.strftime(self.REPORT_DATE_FORMAT) + report_date_label = f"{formatted_start_date} - {formatted_until_date}" + email_subject = ( + f"Playtime Summaries {formatted_start_date} - {formatted_until_date}" + ) + attachment_name = ( + f"playtime-summary-{formatted_start_date}-{formatted_until_date}" ) # Write to a temporary file so we don't overflow the memory - with TemporaryFile("w+", prefix=f"playtimereport{until}", suffix="csv") as temp: + with TemporaryFile( + "w+", prefix=f"playtimereport{formatted_until_date}", suffix="csv" + ) as temp: # Write the data as a CSV writer = csv.writer(temp) writer.writerow( ["date", "urn", "collection", "library", "title", "total seconds"] ) - for urn, collection_name, library_name, identifier_id, total in result: + for ( + urn, + collection_name, + library_name, + identifier_id, + total, + ) in self._fetch_report_records(start=start, until=until): edition = None if identifier_id: edition = get_one( @@ -107,7 +158,7 @@ def do_run(self): ) title = edition and edition.title row = ( - f"{start} - {until}", + report_date_label, urn, collection_name, library_name, @@ -124,11 +175,33 @@ def do_run(self): ) if recipient: EmailManager.send_email( - f"Playtime Summaries {start} - {until}", + email_subject, receivers=[recipient], text="", - attachments={f"playtime-summary-{start}-{until}": temp.read()}, + attachments={attachment_name: temp.read()}, ) else: self.log.error("No reporting email found, logging complete report.") self.log.warning(temp.read()) + + def _fetch_report_records(self, start: datetime, until: datetime) -> Query: + return ( + self._db.query(PlaytimeSummary) + .with_entities( + PlaytimeSummary.identifier_str, + PlaytimeSummary.collection_name, + PlaytimeSummary.library_name, + PlaytimeSummary.identifier_id, + sum(PlaytimeSummary.total_seconds_played), + ) + .filter( + PlaytimeSummary.timestamp >= start, + PlaytimeSummary.timestamp < until, + ) + .group_by( + PlaytimeSummary.identifier_str, + PlaytimeSummary.collection_name, + PlaytimeSummary.library_name, + PlaytimeSummary.identifier_id, + ) + ) diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index ae14633f29..0cef68e379 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -1,8 +1,13 @@ +from __future__ import annotations + +import re from datetime import datetime, timedelta from typing import List from unittest.mock import MagicMock, call, patch +import pytest import pytz +from freezegun import freeze_time from api.model.time_tracking import PlaytimeTimeEntry from core.config import Configuration @@ -15,7 +20,7 @@ from core.model.identifier import Identifier from core.model.library import Library from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary -from core.util.datetime_helpers import previous_months, utc_now +from core.util.datetime_helpers import datetime_utc, previous_months, utc_now from tests.fixtures.database import DatabaseTransactionFixture @@ -307,3 +312,152 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): assert script._log.error.call_count == 1 assert script._log.warning.call_count == 1 assert "date,urn,collection," in script._log.warning.call_args[0][0] + + @pytest.mark.parametrize( + "current_utc_time, start_arg, expected_start, until_arg, expected_until", + [ + # Default values from two dates within the same month (next two cases). + [ + datetime(2020, 1, 1, 0, 0, 0), + None, + datetime_utc(2019, 10, 1, 0, 0, 0), + None, + datetime_utc(2020, 1, 1, 0, 0, 0), + ], + [ + datetime(2020, 1, 31, 0, 0, 0), + None, + datetime_utc(2019, 10, 1, 0, 0, 0), + None, + datetime_utc(2020, 1, 1, 0, 0, 0), + ], + # `start` specified, `until` defaulted. + [ + datetime(2020, 1, 31, 0, 0, 0), + "2019-06-11", + datetime_utc(2019, 6, 11, 0, 0, 0), + None, + datetime_utc(2020, 1, 1, 0, 0, 0), + ], + # `start` defaulted, `until` specified. + [ + datetime(2020, 1, 31, 0, 0, 0), + None, + datetime_utc(2019, 10, 1, 0, 0, 0), + "2019-11-20", + datetime_utc(2019, 11, 20, 0, 0, 0), + ], + # When both dates are specified, the current datetime doesn't matter. + # Both dates specified, but we test at a specific time here anyway. + [ + datetime(2020, 1, 31, 0, 0, 0), + "2018-07-03", + datetime_utc(2018, 7, 3, 0, 0, 0), + "2019-04-30", + datetime_utc(2019, 4, 30, 0, 0, 0), + ], + # The same dates are specified, but we test at the actual current time. + [ + utc_now(), + "2018-07-03", + datetime_utc(2018, 7, 3, 0, 0, 0), + "2019-04-30", + datetime_utc(2019, 4, 30, 0, 0, 0), + ], + # The same dates are specified, but we test at the actual current time. + [ + utc_now(), + "4099-07-03", + datetime_utc(4099, 7, 3, 0, 0, 0), + "4150-04-30", + datetime_utc(4150, 4, 30, 0, 0, 0), + ], + ], + ) + def test_parse_command_line( + self, + current_utc_time: datetime, + start_arg: str | None, + expected_start: datetime, + until_arg: str | None, + expected_until: datetime, + ): + start_args = ["--start", start_arg] if start_arg else [] + until_args = ["--until", until_arg] if until_arg else [] + cmd_args = start_args + until_args + + with freeze_time(current_utc_time): + parsed = PlaytimeEntriesEmailReportsScript.parse_command_line( + cmd_args=cmd_args + ) + assert expected_start == parsed.start + assert expected_until == parsed.until + assert pytz.UTC == parsed.start.tzinfo + assert pytz.UTC == parsed.until.tzinfo + + @pytest.mark.parametrize( + "current_utc_time, start_arg, expected_start, until_arg, expected_until", + [ + # `start` specified, `until` defaulted. + [ + datetime(2020, 1, 31, 0, 0, 0), + "2020-02-01", + datetime_utc(2020, 2, 1, 0, 0, 0), + None, + datetime_utc(2020, 1, 1, 0, 0, 0), + ], + # `start` defaulted, `until` specified. + [ + datetime(2020, 1, 31, 0, 0, 0), + None, + datetime_utc(2019, 10, 1, 0, 0, 0), + "2019-06-11", + datetime_utc(2019, 6, 11, 0, 0, 0), + ], + # When both dates are specified, the current datetime doesn't matter. + # Both dates specified, but we test at a specific time here anyway. + [ + datetime(2020, 1, 31, 0, 0, 0), + "2019-04-30", + datetime_utc(2019, 4, 30, 0, 0, 0), + "2018-07-03", + datetime_utc(2018, 7, 3, 0, 0, 0), + ], + # The same dates are specified, but we test at the actual current time. + [ + utc_now(), + "2019-04-30", + datetime_utc(2019, 4, 30, 0, 0, 0), + "2018-07-03", + datetime_utc(2018, 7, 3, 0, 0, 0), + ], + # The same dates are specified, but we test at the actual current time. + [ + utc_now(), + "4150-04-30", + datetime_utc(4150, 4, 30, 0, 0, 0), + "4099-07-03", + datetime_utc(4099, 7, 3, 0, 0, 0), + ], + ], + ) + def test_parse_command_line_start_not_before_until( + self, + capsys, + current_utc_time: datetime, + start_arg: str | None, + expected_start: datetime, + until_arg: str | None, + expected_until: datetime, + ): + start_args = ["--start", start_arg] if start_arg else [] + until_args = ["--until", until_arg] if until_arg else [] + cmd_args = start_args + until_args + + with freeze_time(current_utc_time), pytest.raises(SystemExit) as excinfo: + parsed = PlaytimeEntriesEmailReportsScript.parse_command_line( + cmd_args=cmd_args + ) + _, err = capsys.readouterr() + assert 2 == excinfo.value.code + assert re.search(r"start date \(.*\) must be before until date \(.*\).", err) From f01807ee62f27d1dd64881bf76e64fe85e67a94e Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 19 Sep 2023 18:14:37 +0530 Subject: [PATCH 044/262] PP-463 Empty author tag fix (#1391) * Empty author tag fix --- core/feed/annotator/base.py | 8 ++++---- core/feed/serializer/opds.py | 4 +++- tests/api/feed/test_annotators.py | 21 +++++++++++++++++++++ 3 files changed, 28 insertions(+), 5 deletions(-) diff --git a/core/feed/annotator/base.py b/core/feed/annotator/base.py index 4ce50ea4fc..d100cdc2d4 100644 --- a/core/feed/annotator/base.py +++ b/core/feed/annotator/base.py @@ -93,9 +93,10 @@ def contributor( return None name = contributor.display_name or contributor.sort_name - name_key = name.lower() - if name_key in state[marc_role]: - # We've already credited this person with this + name_key = name and name.lower() + if not name_key or name_key in state[marc_role]: + # Either there is no valid name present or + # we've already credited this person with this # MARC role. Returning a tag would be redundant. return None @@ -108,7 +109,6 @@ def contributor( # Record the fact that we credited this person with this role, # so that we don't do it again on a subsequent call. state[marc_role].add(name_key) - return current_role, entry @classmethod diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index d6ff9e49a2..db4c67c7dd 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -221,7 +221,9 @@ def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: entry.append(rating_tag) for author in feed_entry.authors: - entry.append(self._serialize_author_tag("author", author)) + # Author must at a minimum have a name + if author.name: + entry.append(self._serialize_author_tag("author", author)) for contributor in feed_entry.contributors: entry.append(self._serialize_author_tag("contributor", contributor)) diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py index e4e3b1032c..8b9dc57a3b 100644 --- a/tests/api/feed/test_annotators.py +++ b/tests/api/feed/test_annotators.py @@ -155,6 +155,27 @@ def test_appeals(self, annotators_fixture: TestAnnotatorsFixture): actual = [(x["term"], x["label"], x["ratingValue"]) for x in appeal_tags] assert set(expect) == set(actual) + def test_authors(self, annotators_fixture: TestAnnotatorsFixture): + db = annotators_fixture.db + edition = db.edition() + [c_orig] = list(edition.contributors) + + c1 = edition.add_contributor("c1", Contributor.AUTHOR_ROLE, _sort_name="c1") + # No name contributor + c_none = edition.add_contributor("c2", Contributor.AUTHOR_ROLE) + c_none.display_name = "" + c_none._sort_name = "" + + authors = Annotator.authors(edition) + # The default, c1 and c_none + assert len(edition.contributions) == 3 + # Only default and c1 are used in the feed, because c_none has no name + assert len(authors["authors"]) == 2 + assert set(map(lambda x: x.name, authors["authors"])) == { + c1.sort_name, + c_orig.sort_name, + } + def test_detailed_author(self, annotators_fixture: TestAnnotatorsFixture): data, db, session = ( annotators_fixture, From cf04c5c69db43e5468a566ad25137dd6fdea3509 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 19 Sep 2023 18:28:21 +0530 Subject: [PATCH 045/262] Ensured the optional attributes of a patron are vetted before being used in a notification (#1392) --- core/util/notifications.py | 68 ++++++++++++++++----------- tests/core/util/test_notifications.py | 10 ++-- 2 files changed, 46 insertions(+), 32 deletions(-) diff --git a/core/util/notifications.py b/core/util/notifications.py index 5641c0d15c..00a49246bb 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -67,22 +67,26 @@ def send_loan_expiry_message( library_short_name = loan.library and loan.library.short_name title = f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!" body = f"Your loan on {edition.title} is expiring soon" + data = dict( + title=title, + body=body, + event_type=NotificationConstants.LOAN_EXPIRY_TYPE, + loans_endpoint=f"{url}/{loan.library.short_name}/loans", + type=identifier.type, + identifier=identifier.identifier, + library=library_short_name, + days_to_expiry=days_to_expiry, + ) + if loan.patron.external_identifier: + data["external_identifier"] = loan.patron.external_identifier + if loan.patron.authorization_identifier: + data["authorization_identifier"] = loan.patron.authorization_identifier + for token in tokens: msg = messaging.Message( token=token.device_token, notification=messaging.Notification(title=title, body=body), - data=dict( - title=title, - body=body, - event_type=NotificationConstants.LOAN_EXPIRY_TYPE, - loans_endpoint=f"{url}/{loan.library.short_name}/loans", - external_identifier=loan.patron.external_identifier, - authorization_identifier=loan.patron.authorization_identifier, - identifier=identifier.identifier, - type=identifier.type, - library=library_short_name, - days_to_expiry=days_to_expiry, - ), + data=data, ) resp = messaging.send(msg, dry_run=cls.TESTING_MODE, app=cls.fcm_app()) responses.append(resp) @@ -102,15 +106,19 @@ def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: for patron in patrons: tokens = cls.notifiable_tokens(patron) loans_api = f"{url}/{patron.library.short_name}/loans" + data = dict( + event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, + loans_endpoint=loans_api, + ) + if patron.external_identifier: + data["external_identifier"] = patron.external_identifier + if patron.authorization_identifier: + data["authorization_identifier"] = patron.authorization_identifier + for token in tokens: msg = messaging.Message( token=token.device_token, - data=dict( - event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, - loans_endpoint=loans_api, - external_identifier=patron.external_identifier, - authorization_identifier=patron.authorization_identifier, - ), + data=data, ) msgs.append(msg) batch: messaging.BatchResponse = messaging.send_all( @@ -133,20 +141,24 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: work: Work = hold.work identifier: Identifier = hold.license_pool.identifier title = f'Your hold on "{work.title}" is available!' + data = dict( + title=title, + event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, + loans_endpoint=loans_api, + identifier=identifier.identifier, + type=identifier.type, + library=hold.patron.library.short_name, + ) + if hold.patron.external_identifier: + data["external_identifier"] = hold.patron.external_identifier + if hold.patron.authorization_identifier: + data["authorization_identifier"] = hold.patron.authorization_identifier + for token in tokens: msg = messaging.Message( token=token.device_token, notification=messaging.Notification(title=title), - data=dict( - title=title, - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold.patron.external_identifier, - authorization_identifier=hold.patron.authorization_identifier, - identifier=identifier.identifier, - type=identifier.type, - library=hold.patron.library.short_name, - ), + data=data, ) msgs.append(msg) batch: messaging.BatchResponse = messaging.send_all( diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index c47914ecc4..54b08efd4c 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -30,7 +30,8 @@ def push_notf_fixture(db: DatabaseTransactionFixture) -> PushNotificationsFixtur class TestPushNotifications: def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixture): db = push_notf_fixture.db - patron = db.patron() + patron = db.patron(external_identifier="xyz1") + patron.authorization_identifier = "abc1" device_token, _ = get_one_or_create( db.session, @@ -78,7 +79,9 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): db = push_notf_fixture.db + # Only patron 1 will get authorization identifiers patron1 = db.patron() + patron1.authorization_identifier = "auth1" patron2 = db.patron() patron3 = db.patron() @@ -132,7 +135,6 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", external_identifier=patron2.external_identifier, - authorization_identifier=patron2.authorization_identifier, ), ), mock.call( @@ -141,7 +143,6 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", external_identifier=patron2.external_identifier, - authorization_identifier=patron2.authorization_identifier, ), ), ] @@ -150,7 +151,9 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): db = push_notf_fixture.db + # Only patron1 will get an identifier patron1 = db.patron() + patron1.authorization_identifier = "auth1" patron2 = db.patron() DeviceToken.create( db.session, DeviceTokenTypes.FCM_ANDROID, "test-token-1", patron1 @@ -220,7 +223,6 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, loans_endpoint=loans_api, external_identifier=hold2.patron.external_identifier, - authorization_identifier=hold2.patron.authorization_identifier, identifier=hold2.license_pool.identifier.identifier, type=hold2.license_pool.identifier.type, library=hold2.patron.library.short_name, From e79ab58d392a5084c322c4f9ed57c1ce99476282 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 19 Sep 2023 10:00:11 -0300 Subject: [PATCH 046/262] Update OPDS2 token fulfillment using opaque patron identifier (PP-416) (#1374) ## Description Instead of directly sending the patron username or external_identifier to the token auth endpoint, instead use the identifier_to_remote_service function to create an identifier to send to the remote service. ## Motivation and Context In PP-416, OPDS2 with token auth is failing because the patron record doesn't have the required fields (either username or external_identifier). We've seen failures like this in the past as well. By generating an opaque identifier and sending that instead we make sure that we are able to use token auth with any valid patron auth, eliminating one source of errors, and helping to improve patron privacy. --- api/opds2.py | 26 +++++++++++++------------- tests/api/test_opds2.py | 15 +++++++++------ 2 files changed, 22 insertions(+), 19 deletions(-) diff --git a/api/opds2.py b/api/opds2.py index fa38f3d82b..9f26dddf4e 100644 --- a/api/opds2.py +++ b/api/opds2.py @@ -9,7 +9,7 @@ from api.circulation import CirculationFulfillmentPostProcessor, FulfillmentInfo from api.circulation_exceptions import CannotFulfill from core.lane import Facets -from core.model import ConfigurationSetting, ExternalIntegration +from core.model import ConfigurationSetting, DataSource, ExternalIntegration from core.model.edition import Edition from core.model.identifier import Identifier from core.model.licensing import LicensePoolDeliveryMechanism @@ -94,6 +94,10 @@ class TokenAuthenticationFulfillmentProcessor(CirculationFulfillmentPostProcesso def __init__(self, collection) -> None: pass + @classmethod + def logger(cls) -> logging.Logger: + return logging.getLogger(f"{cls.__module__}.{cls.__name__}") + def fulfill( self, patron: Patron, @@ -120,7 +124,9 @@ def fulfill( if not token_auth or token_auth.value is None: return fulfillment - token = self.get_authentication_token(patron, token_auth.value) + token = self.get_authentication_token( + patron, licensepool.data_source, token_auth.value + ) if isinstance(token, ProblemDetail): raise CannotFulfill() @@ -130,24 +136,18 @@ def fulfill( @classmethod def get_authentication_token( - cls, patron: Patron, token_auth_url: str + cls, patron: Patron, datasource: DataSource, token_auth_url: str ) -> ProblemDetail | str: """Get the authentication token for a patron""" - log = logging.getLogger("OPDS2API") - - patron_id = patron.username if patron.username else patron.external_identifier - if patron_id is None: - log.error( - f"Could not authenticate the patron({patron.authorization_identifier}): " - f"both username and external_identifier are None." - ) - return INVALID_CREDENTIALS + log = cls.logger() + patron_id = patron.identifier_to_remote_service(datasource) url = URITemplate(token_auth_url).expand(patron_id=patron_id) response = HTTP.get_with_timeout(url) if response.status_code != 200: log.error( - f"Could not authenticate the patron({patron_id}): {str(response.content)}" + f"Could not authenticate the patron (authorization identifier: '{patron.authorization_identifier}' " + f"external identifier: '{patron_id}'): {str(response.content)}" ) return INVALID_CREDENTIALS diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py index d4a896adb6..969b290261 100644 --- a/tests/api/test_opds2.py +++ b/tests/api/test_opds2.py @@ -175,7 +175,6 @@ class TestTokenAuthenticationFulfillmentProcessor: @patch("api.opds2.HTTP") def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): patron = db.patron() - patron.username = "username" collection: Collection = db.collection( protocol=ExternalIntegration.OPDS2_IMPORT ) @@ -207,10 +206,14 @@ def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): processor = TokenAuthenticationFulfillmentProcessor(collection) ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) + patron_id = patron.identifier_to_remote_service( + work.license_pools[0].data_source + ) + assert mock_http.get_with_timeout.call_count == 1 assert ( mock_http.get_with_timeout.call_args[0][0] - == "http://example.org/token?userName=username" + == f"http://example.org/token?userName={patron_id}" ) assert ( @@ -265,9 +268,9 @@ def test_get_authentication_token(self, mock_http, db: DatabaseTransactionFixtur resp.raw = io.BytesIO(b"plaintext-auth-token") mock_http.get_with_timeout.return_value = resp patron = db.patron() - patron.username = "test" + datasource = DataSource.lookup(db.session, "test", autocreate=True) token = TokenAuthenticationFulfillmentProcessor.get_authentication_token( - patron, "http://example.org/token" + patron, datasource, "http://example.org/token" ) assert token == "plaintext-auth-token" @@ -280,9 +283,9 @@ def test_get_authentication_token_errors( resp = Response() resp.status_code = 400 mock_http.get_with_timeout.return_value = resp - + datasource = DataSource.lookup(db.session, "test", autocreate=True) token = TokenAuthenticationFulfillmentProcessor.get_authentication_token( - db.patron(), "http://example.org/token" + db.patron(), datasource, "http://example.org/token" ) assert token == INVALID_CREDENTIALS From e99b463fd1525c888bac5fc3a781c10f3dd92abc Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Wed, 20 Sep 2023 15:52:49 -0400 Subject: [PATCH 047/262] Some patron authenticators delegate their patron lookup. (PP-461) (#1396) * Make distinction between patron authentication and lookup providers. * Fix an existing test. * Patron authentication providers know their lookup provider. --- api/admin/controller/patron.py | 5 +++-- api/authentication/base.py | 8 +++++++ api/authentication/basic_token.py | 4 ++++ api/authenticator.py | 27 +++++++++++++++++++++-- tests/api/admin/controller/test_patron.py | 4 ++-- tests/api/test_authenticator.py | 9 ++++++++ 6 files changed, 51 insertions(+), 6 deletions(-) diff --git a/api/admin/controller/patron.py b/api/admin/controller/patron.py index 2865696be7..31204c98c3 100644 --- a/api/admin/controller/patron.py +++ b/api/admin/controller/patron.py @@ -35,13 +35,14 @@ def _load_patrondata(self, authenticator=None): patron_data = PatronData(authorization_identifier=identifier) complete_patron_data = None + patron_lookup_providers = list(authenticator.unique_patron_lookup_providers) - if not authenticator.providers: + if not patron_lookup_providers: return NO_SUCH_PATRON.detailed( _("This library has no authentication providers, so it has no patrons.") ) - for provider in authenticator.providers: + for provider in patron_lookup_providers: complete_patron_data = provider.remote_patron_lookup(patron_data) if complete_patron_data: return complete_patron_data diff --git a/api/authentication/base.py b/api/authentication/base.py index a8481ecfc4..0ffa125f86 100644 --- a/api/authentication/base.py +++ b/api/authentication/base.py @@ -79,6 +79,14 @@ def identifies_individuals(self): # it should override this value and set it to False. ... + @property + def patron_lookup_provider(self): + """Return the provider responsible for patron lookup. + + By default, we'll put ourself forward for this task. + """ + return self + @abstractmethod def authenticated_patron( self, _db: Session, header: dict | str diff --git a/api/authentication/basic_token.py b/api/authentication/basic_token.py index 6e445c8aeb..628210e685 100644 --- a/api/authentication/basic_token.py +++ b/api/authentication/basic_token.py @@ -36,6 +36,10 @@ def __init__( # An access token provider is a companion authentication to the basic providers self.basic_provider = basic_provider + @property + def patron_lookup_provider(self): + return self.basic_provider + def authenticated_patron( self, _db: Session, token: dict | str ) -> Patron | ProblemDetail | None: diff --git a/api/authenticator.py b/api/authenticator.py index 11d655b0e1..02ed93cfe0 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -33,8 +33,6 @@ from core.util.log import elapsed_time_logging from core.util.problem_detail import ProblemDetail, ProblemError -from .authentication.base import AuthenticationProvider -from .authentication.basic import BasicAuthenticationProvider from .config import CannotLoadConfiguration, Configuration from .integration.registry.patron_auth import PatronAuthRegistry from .problem_details import * @@ -430,6 +428,31 @@ def providers(self) -> Iterable[AuthenticationProvider]: yield self.basic_auth_provider yield from self.saml_providers_by_name.values() + def _unique_basic_lookup_providers( + self, auth_providers: Iterable[AuthenticationProvider | None] + ) -> Iterable[AuthenticationProvider]: + providers: filter[AuthenticationProvider] = filter( + None, + (p.patron_lookup_provider for p in auth_providers if p is not None), + ) + # De-dupe, but preserve provider order. + return dict.fromkeys(list(providers)).keys() + + @property + def unique_patron_lookup_providers(self) -> Iterable[AuthenticationProvider]: + """Iterator over unique patron data providers for registered AuthenticationProviders. + + We want a unique list of providers in order to avoid hitting the same + provider multiple times, most likely in the case of failing lookups. + """ + yield from self._unique_basic_lookup_providers( + [ + self.access_token_authentication_provider, + self.basic_auth_provider, + ] + ) + yield from self.saml_providers_by_name.values() + def authenticated_patron( self, _db: Session, auth: Authorization ) -> Patron | ProblemDetail | None: diff --git a/tests/api/admin/controller/test_patron.py b/tests/api/admin/controller/test_patron.py index 11abf3f6e8..d41662c522 100644 --- a/tests/api/admin/controller/test_patron.py +++ b/tests/api/admin/controller/test_patron.py @@ -33,7 +33,7 @@ def test__load_patrondata(self, patron_controller_fixture: PatronControllerFixtu class MockAuthenticator: def __init__(self, providers): - self.providers = providers + self.unique_patron_lookup_providers = providers class MockAuthenticationProvider: def __init__(self, patron_dict): @@ -73,7 +73,7 @@ def remote_patron_lookup(self, patrondata): ) # Authenticator can't find patron with this identifier - authenticator.providers.append(auth_provider) + authenticator.unique_patron_lookup_providers.append(auth_provider) with patron_controller_fixture.request_context_with_library_and_admin("/"): flask.request.form = form response = m(authenticator) diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index eed0516412..8f2dc76ae2 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -937,6 +937,15 @@ def test_authenticated_patron_bearer_access_token( authenticator = LibraryAuthenticator( _db=db.session, library=db.default_library(), basic_auth_provider=basic ) + + token_auth_provider, basic_auth_provider = authenticator.providers + [patron_lookup_provider] = authenticator.unique_patron_lookup_providers + assert ( + cast(BasicTokenAuthenticationProvider, token_auth_provider).basic_provider + == basic_auth_provider + ) + assert patron_lookup_provider == basic_auth_provider + patron = db.patron() token = AccessTokenProvider.generate_token(db.session, patron, "pass") auth = Authorization(auth_type="bearer", token=token) From 37e7004b3e843a57ceccc786b93c74a05536c0b2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Sep 2023 21:13:45 -0300 Subject: [PATCH 048/262] Bump types-requests from 2.31.0.2 to 2.31.0.3 (#1394) Bumps [types-requests](https://github.com/python/typeshed) from 2.31.0.2 to 2.31.0.3. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 385cbb4b58..f6700416f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3941,13 +3941,13 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.2" +version = "2.31.0.3" description = "Typing stubs for requests" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, - {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, + {file = "types-requests-2.31.0.3.tar.gz", hash = "sha256:d5d7a08965fca12bedf716eaf5430c6e3d0da9f3164a1dba2a7f3885f9ebe3c0"}, + {file = "types_requests-2.31.0.3-py3-none-any.whl", hash = "sha256:938f51653c757716aeca5d72c405c5e2befad8b0d330e3b385ce7f148e1b10dc"}, ] [package.dependencies] From 6953b8d80420d97ae920852eb5d41bd74d3711ce Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Sep 2023 21:14:05 -0300 Subject: [PATCH 049/262] Bump types-pytz from 2023.3.1.0 to 2023.3.1.1 (#1395) Bumps [types-pytz](https://github.com/python/typeshed) from 2023.3.1.0 to 2023.3.1.1. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pytz dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index f6700416f0..7432a84cc4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3919,13 +3919,13 @@ files = [ [[package]] name = "types-pytz" -version = "2023.3.1.0" +version = "2023.3.1.1" description = "Typing stubs for pytz" optional = false python-versions = "*" files = [ - {file = "types-pytz-2023.3.1.0.tar.gz", hash = "sha256:8e7d2198cba44a72df7628887c90f68a568e1445f14db64631af50c3cab8c090"}, - {file = "types_pytz-2023.3.1.0-py3-none-any.whl", hash = "sha256:a660a38ed86d45970603e4f3b4877c7ba947668386a896fb5d9589c17e7b8407"}, + {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, + {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, ] [[package]] From 64afa287283b3f2716987c96f51cc6e2fff5118b Mon Sep 17 00:00:00 2001 From: Mark Raynsford Date: Thu, 21 Sep 2023 11:04:58 +0000 Subject: [PATCH 050/262] PP65: Opensearch migrations (#1268) * Initial migration API for Opensearch This creates the definitions for schemas, and an API for performing upgrades. Affects: https://ebce-lyrasis.atlassian.net/browse/PP-37 * Add tests for search document types * Implement mappings updates in populate_index() * Implement bulk search document uploads. * Adjust the interface so that search documents are created lazily. * Adjust the API to allow for separate migration and population. * Set index for search documents inside search service. * Properly clean up indexes, as this was affecting test stability. * Ensure that search documents must be written to aliases. * Always specify the alias pointer for a search query * Updated all Keyword type fields as filterable_strings * Search index removal and repopulation via search_index_refresh * SearchService object is now base name dependent Since we have no use case of different base names in a single CM * Search version is an implicit class variable now It is not passed through the init, but should be part of the class definition * Removed the burden of migration initialization from ExternalSearchIndex Since instance_initialization does the same job, we depend on it to run before hand Added test cases specifically for these scenarios --------- Co-authored-by: Rishi Diwan --- api/admin/controller/custom_lists.py | 7 +- core/coverage.py | 1 - core/external_search.py | 1220 +++-------------- core/opds.py | 6 +- core/scripts.py | 51 +- core/search/__init__.py | 0 core/search/coverage_remover.py | 33 + core/search/document.py | 255 ++++ core/search/migrator.py | 178 +++ core/search/revision.py | 41 + core/search/revision_directory.py | 52 + core/search/service.py | 370 +++++ core/search/v5.py | 300 ++++ scripts.py | 14 + .../api/admin/controller/test_custom_lists.py | 63 +- tests/api/admin/controller/test_dashboard.py | 2 +- tests/api/admin/controller/test_lanes.py | 1 - .../admin/controller/test_search_services.py | 40 +- .../api/admin/controller/test_work_editor.py | 10 +- .../feed/equivalence/test_feed_equivalence.py | 49 +- tests/api/feed/test_library_annotator.py | 3 - tests/api/feed/test_opds_acquisition_feed.py | 11 +- tests/api/mockapi/circulation.py | 21 +- tests/api/test_controller_cm.py | 4 +- tests/api/test_controller_crawlfeed.py | 12 +- tests/api/test_controller_work.py | 17 +- tests/api/test_lanes.py | 21 +- tests/api/test_opds.py | 11 +- tests/api/test_scripts.py | 95 +- tests/core/mock.py | 12 - tests/core/models/test_work.py | 40 +- tests/core/search/__init__.py | 0 tests/core/search/test_documents.py | 168 +++ tests/core/search/test_migration_states.py | 119 ++ tests/core/search/test_migrator.py | 250 ++++ .../search/test_search_revision_directory.py | 50 + tests/core/search/test_service.py | 158 +++ tests/core/test_external_search.py | 560 +++----- tests/core/test_lane.py | 47 +- tests/core/test_marc.py | 20 +- tests/core/test_opds.py | 134 +- tests/core/test_opds2.py | 59 +- tests/core/test_scripts.py | 173 ++- tests/fixtures/api_controller.py | 6 +- tests/fixtures/search.py | 192 ++- tests/mocks/__init__.py | 0 tests/mocks/search.py | 293 ++++ 47 files changed, 3389 insertions(+), 1780 deletions(-) create mode 100644 core/search/__init__.py create mode 100644 core/search/coverage_remover.py create mode 100644 core/search/document.py create mode 100644 core/search/migrator.py create mode 100644 core/search/revision.py create mode 100644 core/search/revision_directory.py create mode 100644 core/search/service.py create mode 100644 core/search/v5.py create mode 100644 tests/core/search/__init__.py create mode 100644 tests/core/search/test_documents.py create mode 100644 tests/core/search/test_migration_states.py create mode 100644 tests/core/search/test_migrator.py create mode 100644 tests/core/search/test_search_revision_directory.py create mode 100644 tests/core/search/test_service.py create mode 100644 tests/mocks/__init__.py create mode 100644 tests/mocks/search.py diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index c117aa215c..905a857062 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -283,7 +283,12 @@ def _create_or_update_list( if membership_change: # We need to update the search index entries for works that caused a membership change, # so the upstream counts can be calculated correctly. - self.search_engine.bulk_update(works_to_update_in_search) + documents = self.search_engine.create_search_documents_from_works( + works_to_update_in_search + ) + index = self.search_engine.start_updating_search_documents() + index.add_documents(documents) + index.finish() # If this list was used to populate any lanes, those lanes need to have their counts updated. for lane in Lane.affected_by_customlist(list): diff --git a/core/coverage.py b/core/coverage.py index 89a11efa29..b4d344e3fb 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -230,7 +230,6 @@ def operation(self): def run(self): start = utc_now() result = self.run_once_and_update_timestamp() - result = result or CoverageProviderProgress() self.finalize_timestampdata(result, start=start) return result diff --git a/core/external_search.py b/core/external_search.py index 9cb826b207..458c35700f 100644 --- a/core/external_search.py +++ b/core/external_search.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import contextlib import datetime import json @@ -5,11 +7,11 @@ import re import time from collections import defaultdict -from typing import Any, Dict, Optional, Union +from typing import Any, Callable, Dict, Iterable, List, Optional, Union from attr import define from flask_babel import lazy_gettext as _ -from opensearch_dsl import SF, MultiSearch, Search +from opensearch_dsl import SF, Search from opensearch_dsl.query import ( Bool, DisMax, @@ -25,10 +27,9 @@ from opensearch_dsl.query import Query as BaseQuery from opensearch_dsl.query import Range, Regexp, Term, Terms from opensearchpy import OpenSearch -from opensearchpy.exceptions import OpenSearchException, RequestError -from opensearchpy.helpers import bulk as opensearch_bulk from spellchecker import SpellChecker +from core.search.coverage_remover import RemovesSearchCoverage from core.util import Values from core.util.languages import LanguageNames @@ -57,6 +58,15 @@ numericrange_to_tuple, ) from .problem_details import INVALID_INPUT +from .search.migrator import ( + SearchDocumentReceiver, + SearchDocumentReceiverType, + SearchMigrationInProgress, + SearchMigrator, +) +from .search.revision import SearchSchemaRevision +from .search.revision_directory import SearchRevisionDirectory +from .search.service import SearchService, SearchServiceOpensearch1 from .selftest import HasSelfTests from .util.cache import CachedData from .util.datetime_helpers import from_timestamp @@ -78,7 +88,6 @@ def mock_search_index(mock=None): class ExternalSearchIndex(HasSelfTests): - NAME = ExternalIntegration.OPENSEARCH # A test may temporarily set this to a mock of this class. @@ -89,17 +98,9 @@ class ExternalSearchIndex(HasSelfTests): WORKS_INDEX_PREFIX_KEY = "works_index_prefix" DEFAULT_WORKS_INDEX_PREFIX = "circulation-works" - TEST_SEARCH_TERM_KEY = "test_search_term" + TEST_SEARCH_TERM_KEY = "a search term" DEFAULT_TEST_SEARCH_TERM = "test" - - SEARCH_VERSION = "search_version" - SEARCH_VERSION_OS1_X = "Opensearch 1.x" - DEFAULT_SEARCH_VERSION = SEARCH_VERSION_OS1_X - - __client = None - CURRENT_ALIAS_SUFFIX = "current" - VERSION_RE = re.compile("-v([0-9]+)$") SETTINGS = [ { @@ -123,70 +124,17 @@ class ExternalSearchIndex(HasSelfTests): "default": DEFAULT_TEST_SEARCH_TERM, "description": _("Self tests will use this value as the search term."), }, - { - "key": SEARCH_VERSION, - "label": _("The search service version"), - "default": DEFAULT_SEARCH_VERSION, - "description": _( - "Which version of the search engine is being used. Changing this value will require a CM restart." - ), - "required": True, - "type": "select", - "options": [ - {"key": SEARCH_VERSION_OS1_X, "label": SEARCH_VERSION_OS1_X}, - ], - }, ] SITEWIDE = True @classmethod - def reset(cls): - """Resets the __client object to None so a new configuration - can be applied during object initialization. - - This method is only intended for use in testing. - """ - cls.__client = None - - @classmethod - def search_integration(cls, _db) -> ExternalIntegration: + def search_integration(cls, _db) -> Optional[ExternalIntegration]: """Look up the ExternalIntegration for Opensearch.""" return ExternalIntegration.lookup( _db, ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL ) - @classmethod - def works_prefixed(cls, _db, value): - """Prefix the given value with the prefix to use when generating index - and alias names. - - :return: A string "{prefix}-{value}", or None if no prefix is configured. - - """ - integration = cls.search_integration(_db) - if not integration: - return None - setting = integration.setting(cls.WORKS_INDEX_PREFIX_KEY) - prefix = setting.value_or_default(cls.DEFAULT_WORKS_INDEX_PREFIX) - return prefix + "-" + value - - @classmethod - def works_index_name(cls, _db): - """Look up the name of the search index. - - It's possible, but unlikely, that the search index alias will - point to some other index. But if there were no indexes, and a - new one needed to be created, this would be the name of that - index. - """ - return cls.works_prefixed(_db, CurrentMapping.version_name()) - - @classmethod - def works_alias_name(cls, _db): - """Look up the name of the search index alias.""" - return cls.works_prefixed(_db, cls.CURRENT_ALIAS_SUFFIX) - @classmethod def load(cls, _db, *args, **kwargs): """Load a generic implementation.""" @@ -194,298 +142,120 @@ def load(cls, _db, *args, **kwargs): return cls.MOCK_IMPLEMENTATION return cls(_db, *args, **kwargs) + _bulk: Callable[..., Any] + _revision: SearchSchemaRevision + _revision_base_name: str + _revision_directory: SearchRevisionDirectory + _search: Search + _search_migrator: SearchMigrator + _search_service: SearchService + _search_read_pointer: str + _test_search_term: str + def __init__( self, _db, - url=None, - works_index=None, - test_search_term=None, - in_testing=False, - mapping=None, - version=None, + url: Optional[str] = None, + test_search_term: Optional[str] = None, + revision_directory: Optional[SearchRevisionDirectory] = None, + version: Optional[int] = None, + custom_client_service: Optional[SearchService] = None, ): """Constructor - :param in_testing: Set this to true if you don't want an - Opensearch client to be created, e.g. because you're - running a unit test of the constructor. - - - :param mapping: A custom Mapping object, for use in unit tests. By - default, the most recent mapping will be instantiated. + :param revision_directory Override the directory of revisions that will be used. If this isn't provided, + the default directory will be used. + :param version The specific revision that will be used. If not specified, the highest version in the + revision directory will be used. """ self.log = logging.getLogger("External search index") - self.works_index = None - self.works_alias = None - integration = None - - self.version = None - integration = self.search_integration(_db) - if not integration: - raise CannotLoadConfiguration("No search integration configured.") - - valid_versions = [self.SEARCH_VERSION_OS1_X] - if version and version not in valid_versions: - raise ValueError( - f"{version} is not a valid search version, must be one of {valid_versions}" - ) - elif version: - self.version = version - else: - self.version = integration.setting(self.SEARCH_VERSION).value_or_default( - self.DEFAULT_SEARCH_VERSION - ) - - self.mapping = mapping or CurrentMapping(self) - - if isinstance(url, ExternalIntegration): - # This is how the self-test initializes this object. - integration = url - url = integration.url + # We can't proceed without a database. if not _db: raise CannotLoadConfiguration( "Cannot load Search configuration without a database.", ) - # initialize the cached data if not already done so - CachedData.initialize(_db) + # Load the search integration. + integration = self.search_integration(_db) + if not integration: + raise CannotLoadConfiguration("No search integration configured.") - if not url or not works_index: + if not url: url = url or integration.url - if not works_index: - works_index = self.works_index_name(_db) test_search_term = integration.setting(self.TEST_SEARCH_TERM_KEY).value - if not url: - raise CannotLoadConfiguration("No URL configured to the search server.") - self.test_search_term = test_search_term or self.DEFAULT_TEST_SEARCH_TERM - - if not in_testing: - if not ExternalSearchIndex.__client: - use_ssl = url.startswith("https://") - self.log.info( - "Connecting to index %s in the search cluster at %s", - works_index, - url, - ) - ExternalSearchIndex.__client = OpenSearch( - url, use_ssl=use_ssl, timeout=20, maxsize=25 - ) - - self.indices = self.__client.indices - self.index = self.__client.index - self.delete = self.__client.delete - self.exists = self.__client.exists - self.put_script = self.__client.put_script - - # Sets self.works_index and self.works_alias values. - # Document upload runs against the works_index. - # Search queries run against works_alias. - if works_index and integration and not in_testing: - try: - self.set_works_index_and_alias(_db) - except RequestError: - # This is almost certainly a problem with our code, - # not a communications error. - raise - except OpenSearchException as e: - raise CannotLoadConfiguration( - "Exception communicating with Search server: %s" % repr(e) - ) - - self.search = Search(using=self.__client, index=self.works_alias) - def bulk(docs, **kwargs): - return opensearch_bulk(self.__client, docs, **kwargs) + self._test_search_term = test_search_term or self.DEFAULT_TEST_SEARCH_TERM - self.bulk = bulk + if not url: + raise CannotLoadConfiguration("No URL configured to the search server.") - def prime_query_values(self, _db): - JSONQuery.data_sources = _db.query(DataSource).all() + # Determine the base name we're going to use for storing revisions. + self._revision_base_name = integration.setting( + ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY + ).value - def set_works_index_and_alias(self, _db): - """Finds or creates the works_index and works_alias based on - the current configuration. - """ - # The index name to use is the one known to be right for this - # version. - self.works_index = self.__client.works_index = self.works_index_name(_db) - if not self.indices.exists(self.works_index): - # That index doesn't actually exist. Set it up. - self.setup_index() + # Create the necessary search client, and the service used by the schema migrator. + if custom_client_service: + self._search_service = custom_client_service else: - # Update the mapping incase there are any new properties - self._update_index_mapping() - - # Make sure the alias points to the most recent index. - self.setup_current_alias(_db) - - # Make sure the stored scripts for the latest mapping exist. - self.set_stored_scripts() - - def _update_index_mapping(self, dry_run=False) -> Dict: - """Updates the index mapping with any NEW properties added""" - - def _properties(mapping: Dict) -> Dict: - """We may or may not have doc types depending on the versioning""" - return mapping["properties"] - - current_mapping: Dict = _properties( - self.indices.get_mapping(self.works_index)[self.works_index]["mappings"] - ) - new_mapping = _properties(self.mapping.body()["mappings"]) - puts = {} - for name, v in new_mapping.items(): - split_name = name.split(".")[0] # dot based names become dicts - if split_name not in current_mapping: - puts[name] = v - - if not dry_run and puts: - self.indices.put_mapping( - dict(properties=puts), - index=self.works_index, + use_ssl = url.startswith("https://") + self.log.info("Connecting to the search cluster at %s", url) + new_client = OpenSearch(url, use_ssl=use_ssl, timeout=20, maxsize=25) + self._search_service = SearchServiceOpensearch1( + new_client, self._revision_base_name ) - self.log.info(f"Updated {self.works_index} mapping with {puts}") - return puts - - def setup_current_alias(self, _db): - """Finds or creates the works_alias as named by the current site - settings. - - If the resulting alias exists and is affixed to a different - index or if it can't be generated for any reason, the alias will - not be created or moved. Instead, the search client will use the - the works_index directly for search queries. - """ - alias_name = self.works_alias_name(_db) - alias_is_set = self.indices.exists_alias(name=alias_name) - - def _use_as_works_alias(name): - self.works_alias = self.__client.works_alias = name - - if alias_is_set: - # The alias exists on the Opensearch server, so it must - # point _somewhere. - exists_on_works_index = self.indices.exists_alias( - index=self.works_index, name=alias_name - ) - if exists_on_works_index: - # It points to the index we were expecting it to point to. - # Use it. - _use_as_works_alias(alias_name) - else: - # The alias exists but it points somewhere we didn't - # expect. Rather than changing how the alias works and - # then using the alias, use the index directly instead - # of going through the alias. - _use_as_works_alias(self.works_index) - return - - # Create the alias and search against it. - response = self.indices.put_alias(index=self.works_index, name=alias_name) - if not response.get("acknowledged"): - self.log.error("Alias '%s' could not be created", alias_name) - # Work against the index instead of an alias. - _use_as_works_alias(self.works_index) - return - _use_as_works_alias(alias_name) - - def setup_index(self, new_index=None, **index_settings): - """Create the search index with appropriate mapping. - - This will destroy the search index, and all works will need - to be indexed again. In production, don't use this on an - existing index. Use it to create a new index, then change the - alias to point to the new index. - """ - index_name = new_index or self.works_index - if self.indices.exists(index_name): - self.log.info("Deleting index %s", index_name) - self.indices.delete(index_name) - - self.log.info("Creating index %s", index_name) - body = self.mapping.body() - body.setdefault("settings", {}).update(index_settings) - index = self.indices.create(index=index_name, body=body) - - def set_stored_scripts(self): - for name, definition in self.mapping.stored_scripts(): - # Make sure the name of the script is scoped and versioned. - if not name.startswith("simplified."): - name = self.mapping.script_name(name) - - # If only the source code was provided, configure it as a - # Painless script. - if isinstance(definition, (bytes, str)): - definition = dict(script=dict(lang="painless", source=definition)) - - # Put it in the database. - self.put_script(name, definition) - - def transfer_current_alias(self, _db, new_index): - """Force -current alias onto a new index""" - if not self.indices.exists(index=new_index): - raise ValueError("Index '%s' does not exist on this client." % new_index) - - current_base_name = self.base_index_name(self.works_index) - new_base_name = self.base_index_name(new_index) - - if new_base_name != current_base_name: - raise ValueError( - ( - "Index '%s' is not in series with current index '%s'. " - "Confirm the base name (without version number) of both indices" - "is the same." - ) - % (new_index, self.works_index) - ) - - self.works_index = self.__client.works_index = new_index - alias_name = self.works_alias_name(_db) - exists = self.indices.exists_alias(name=alias_name) - if not exists: - # The alias doesn't already exist. Set it. - self.setup_current_alias(_db) - return + # Locate the revision of the search index that we're going to use. + # This will fail fast if the requested version isn't available. + self._revision_directory = ( + revision_directory or SearchRevisionDirectory.create() + ) + if version: + self._revision = self._revision_directory.find(version) + else: + self._revision = self._revision_directory.highest() - # We know the alias already exists. Before we set it to point - # to self.works_index, we may need to remove it from some - # other indices. - other_indices = list(self.indices.get_alias(name=alias_name).keys()) + # initialize the cached data if not already done so + CachedData.initialize(_db) - if self.works_index in other_indices: - # If the alias already points to the works index, - # that's fine -- we want to see if it points to any - # _other_ indices. - other_indices.remove(self.works_index) + # Get references to the read and write pointers. + self._search_read_pointer = self._search_service.read_pointer_name() + self._search_write_pointer = self._search_service.write_pointer_name() - if other_indices: - # The alias exists on one or more other indices. Remove - # the alias altogether, then put it back on the works - # index. - self.indices.delete_alias(index="_all", name=alias_name) - self.indices.put_alias(index=self.works_index, name=alias_name) + def search_service(self) -> SearchService: + """Get the underlying search service.""" + return self._search_service - self.works_alias = self.__client.works_alias = alias_name + def start_migration(self) -> Optional[SearchMigrationInProgress]: + """Update to the latest schema, indexing the given works.""" + migrator = SearchMigrator( + revisions=self._revision_directory, + service=self._search_service, + ) + return migrator.migrate( + base_name=self._revision_base_name, version=self._revision.version + ) - def base_index_name(self, index_or_alias): - """Removes version or current suffix from base index name""" + def start_updating_search_documents(self) -> SearchDocumentReceiver: + """Start submitting search documents for whatever is the current write pointer.""" + return SearchDocumentReceiver( + pointer=self._search_write_pointer, service=self._search_service + ) - current_re = re.compile(self.CURRENT_ALIAS_SUFFIX + "$") - base_works_index = re.sub(current_re, "", index_or_alias) - base_works_index = re.sub(self.VERSION_RE, "", base_works_index) + def clear_search_documents(self) -> None: + self._search_service.index_clear_documents(pointer=self._search_write_pointer) - return base_works_index + def prime_query_values(self, _db): + JSONQuery.data_sources = _db.query(DataSource).all() def create_search_doc(self, query_string, filter, pagination, debug): - if filter and filter.search_type == "json": query = JSONQuery(query_string, filter) else: query = Query(query_string, filter) - search = query.build(self.search, pagination) + search = query.build(self._search_service.search_client(), pagination) if debug: search = search.extra(explain=True) @@ -540,8 +310,15 @@ def query_works(self, query_string, filter=None, pagination=None, debug=False): pagination = pagination or Pagination.default() query_data = (query_string, filter, pagination) - [result] = self.query_works_multi([query_data], debug) - return result + query_hits = self.query_works_multi([query_data], debug) + if not query_hits: + return [] + + result_list = list(query_hits) + if not result_list: + return [] + + return result_list[0] def query_works_multi(self, queries, debug=False): """Run several queries simultaneously and return the results @@ -554,21 +331,12 @@ def query_works_multi(self, queries, debug=False): each containing the search results from that (query string, Filter, Pagination) 3-tuple. """ - # If the works alias is not set, all queries return empty. - # - # TODO: Maybe an unset works_alias should raise - # CannotLoadConfiguration in the constructor. Then we wouldn't - # have to worry about this. - if not self.works_alias: - for q in queries: - yield [] - # Create a MultiSearch. - multi = MultiSearch(using=self.__client) + multi = self._search_service.search_multi_client() # Give it a Search object for every query definition passed in # as part of `queries`. - for (query_string, filter, pagination) in queries: + for query_string, filter, pagination in queries: search = self.create_search_doc( query_string, filter=filter, pagination=pagination, debug=debug ) @@ -622,118 +390,46 @@ def count_works(self, filter): ) return qu.count() - def bulk_update(self, works, retry_on_batch_failure=True): - """Upload a batch of works to the search index at once.""" - + def create_search_documents_from_works( + self, works: Iterable[Work] + ) -> Iterable[dict]: + """Create search documents for all the given works.""" if not works: # There's nothing to do. Don't bother making any requests # to the search index. - return [], [] + return [] time1 = time.time() needs_add = [] - successes = [] for work in works: needs_add.append(work) # Add/update any works that need adding/updating. docs = Work.to_search_documents(needs_add) - - for doc in docs: - doc["_index"] = self.works_index time2 = time.time() - success_count, errors = self.bulk( - docs, - raise_on_error=False, - raise_on_exception=False, - ) - - # If the entire update failed, try it one more time before - # giving up on the batch. - if len(errors) == len(docs): - if retry_on_batch_failure: - self.log.info("Opensearch bulk update timed out, trying again.") - return self.bulk_update(needs_add, retry_on_batch_failure=False) - else: - docs = [] - - time3 = time.time() self.log.info( "Created %i search documents in %.2f seconds" % (len(docs), time2 - time1) ) - self.log.info( - "Uploaded %i search documents in %.2f seconds" % (len(docs), time3 - time2) - ) - - doc_ids = [d["_id"] for d in docs] - - # We weren't able to create search documents for these works, maybe - # because they don't have presentation editions yet. - def get_error_id(error): - return error.get("data", {}).get("_id", None) or error.get("index", {}).get( - "_id", None - ) - - error_ids = [get_error_id(error) for error in errors] - - missing_works = [ - work - for work in works - if work.id not in doc_ids - and work.id not in error_ids - and work not in successes - ] - - successes.extend( - [work for work in works if work.id in doc_ids and work.id not in error_ids] - ) - - failures = [] - for missing in missing_works: - failures.append((work, "Work not indexed")) - - for error in errors: - - error_id = get_error_id(error) - work = None - works_with_error = [work for work in works if work.id == error_id] - if works_with_error: - work = works_with_error[0] - - exception = error.get("exception", None) - error_message = error.get("error", None) - if not error_message: - error_message = error.get("index", {}).get("error", None) - - failures.append((work, error_message)) - - self.log.info( - "Successfully indexed %i documents, failed to index %i." - % (success_count, len(failures)) - ) - - return successes, failures + return docs def remove_work(self, work): """Remove the search document for `work` from the search index.""" - args = dict(index=self.works_index, id=work.id) - args["doc_type"] = "_doc" - - if self.exists(**args): - self.delete(**args) + self._search_service.index_remove_document( + pointer=self._search_read_pointer, id=work.id + ) - def _run_self_tests(self, _db, in_testing=False): + def _run_self_tests(self, _db): # Helper methods for setting up the self-tests: def _search(): return self.create_search_doc( - self.test_search_term, filter=None, pagination=None, debug=True + self._test_search_term, filter=None, pagination=None, debug=True ) def _works(): return self.query_works( - self.test_search_term, filter=None, pagination=None, debug=True + self._test_search_term, filter=None, pagination=None, debug=True ) # The self-tests: @@ -743,36 +439,32 @@ def _search_for_term(): return titles yield self.run_test( - ("Search results for '%s':" % (self.test_search_term)), _search_for_term + ("Search results for '%s':" % self._test_search_term), _search_for_term ) def _get_raw_doc(): search = _search() - if in_testing: - if not len(search): - return str(search) - search = search[0] return json.dumps(search.to_dict(), indent=1) yield self.run_test( - ("Search document for '%s':" % (self.test_search_term)), _get_raw_doc + ("Search document for '%s':" % (self._test_search_term)), _get_raw_doc ) def _get_raw_results(): return [json.dumps(x.to_dict(), indent=1) for x in _works()] yield self.run_test( - ("Raw search results for '%s':" % (self.test_search_term)), _get_raw_results + ("Raw search results for '%s':" % (self._test_search_term)), + _get_raw_results, ) def _count_docs(): - # The mock methods used in testing return a list, so we have to call len() rather than count(). - if in_testing: - return str(len(self.search)) - return str(self.search.count()) + service = self.search_service() + client = service.search_client() + return str(client.count()) yield self.run_test( - ("Total number of search results for '%s':" % (self.test_search_term)), + ("Total number of search results for '%s':" % (self._test_search_term)), _count_docs, ) @@ -795,461 +487,25 @@ def _collections(): yield self.run_test("Total number of documents per collection:", _collections) + def initialize_indices(self) -> bool: + """Attempt to initialize the indices and pointers for a first time run""" + service = self.search_service() + read_pointer = service.read_pointer() + if not read_pointer or service.is_pointer_empty(read_pointer): + # A read pointer does not exist, or points to the empty index + # This means either this is a new deployment or the first time + # the new opensearch code was deployed. + # In both cases doing a migration to the latest version is safe. + migration = self.start_migration() + if migration is not None: + migration.finish() + else: + self.log.warning( + "Read pointer was set to empty, but no migration was available." + ) + return False -class MappingDocument: - """This class knows a lot about how the 'properties' section of an - Opensearch mapping document (or one of its subdocuments) is - created. - """ - - def __init__(self, service: ExternalSearchIndex): - self.service = service - self.properties: Dict[str, Any] = {} - self.subdocuments: Dict[str, Any] = {} - - def add_property(self, name, type, **description): - """Add a field to the list of properties. - - :param name: Name of the field as found in search documents. - :param type: Type of the field. This may be a custom type, - so long as a hook method is defined for that type. - :param description: Description of the field. - """ - # TODO: For some fields we could set index: False here, which - # would presumably lead to a smaller index and faster - # updates. However, it might hurt performance of - # searches. When this code is more mature we can do a - # side-by-side comparison. - - defaults = dict(index=True, store=False) - description["type"] = type - for default_name, default_value in list(defaults.items()): - if default_name not in description: - description[default_name] = default_value - - hook_method = getattr(self, type + "_property_hook", None) - if hook_method is not None: - hook_method(description) - # TODO: Cross-check the description for correctness. Do the - # things it mention actually exist? Better to fail now with a - # useful error than to fail when talking to Opensearch. - self.properties[name] = description - - def add_properties(self, properties_by_type): - """Turn a dictionary mapping types to field names into a - bunch of add_property() calls. - - Useful when you have a lot of fields that don't need any - customization. - """ - for type, properties in list(properties_by_type.items()): - for name in properties: - self.add_property(name, type) - - def subdocument(self, name): - """Create a new HasProperties object and register it as a - sub-document of this one. - """ - subdocument = MappingDocument(self.service) - self.subdocuments[name] = subdocument - return subdocument - - def basic_text_property_hook(self, description): - """Hook method to handle the custom 'basic_text' - property type. - - This type does not exist in Opensearch. It's our name for a - text field that is indexed three times: once using our default - English analyzer ("title"), once using an analyzer with - minimal stemming ("title.minimal") for close matches, and once - using an analyzer that leaves stopwords in place, for searches - that rely on stopwords. - """ - description["type"] = "text" - description["analyzer"] = "en_default_text_analyzer" - description["fields"] = { - "minimal": {"type": "text", "analyzer": "en_minimal_text_analyzer"}, - "with_stopwords": { - "type": "text", - "analyzer": "en_with_stopwords_text_analyzer", - }, - } - - def filterable_text_property_hook(self, description): - """Hook method to handle the custom 'filterable_text' - property type. - - This type does not exist in Opensearch. It's our name for a - text field that can be used in both queries and filters. - - This field is indexed _four_ times -- the three ways a normal - text field is indexed, plus again as an unparsed keyword that - can be used in filters. - """ - self.basic_text_property_hook(description) - description["fields"]["keyword"] = { - "type": "keyword", - "index": True, - "store": False, - "normalizer": "filterable_string", - } - - def keyword_property_hook(self, description): - """Hook method to ensure the keyword type attributes are case-insensitive""" - description["normalizer"] = "filterable_string" - - -class Mapping(MappingDocument): - """A class that defines the mapping for a particular version of the search index. - - Code that won't change between versions can go here. (Or code that - can change between versions without affecting anything.) - """ - - VERSION_NAME: Optional[str] = None - - @classmethod - def version_name(cls): - """Return the name of this Mapping subclass.""" - version = cls.VERSION_NAME - if not version: - raise NotImplementedError("VERSION_NAME not defined") - if not version.startswith("v"): - version = "v%s" % version - return version - - @classmethod - def script_name(cls, base_name): - """Scope a script name with "simplified" (to avoid confusion with - other applications on the Opensearch server), and the - version number (to avoid confusion with other versions *of - this application*, which may implement the same script - differently, on this Opensearch server). - """ - return f"simplified.{base_name}.{cls.version_name()}" - - def __init__(self, service): - super().__init__(service) - self.filters = {} - self.char_filters = {} - self.normalizers = {} - self.analyzers = {} - - def create(self, search_client, base_index_name): - """Ensure that an index exists in `search_client` for this Mapping. - - :return: True or False, indicating whether the index was created new. - """ - versioned_index = base_index_name + "-" + self.version_name() - if search_client.indices.exists(index=versioned_index): - return False - else: - search_client.setup_index(new_index=versioned_index) - return True - - def sort_author_keyword_property_hook(self, description): - """Give the `sort_author` property its custom analyzer.""" - description["type"] = "text" - description["analyzer"] = "en_sort_author_analyzer" - description["fielddata"] = True - - def body(self): - """Generate the body of the mapping document for this version of the - mapping. - """ - settings = dict( - analysis=dict( - filter=self.filters, - char_filter=self.char_filters, - normalizer=self.normalizers, - analyzer=self.analyzers, - ) - ) - - # Start with the normally defined properties. - properties = dict(self.properties) - - # Add subdocuments as additional properties. - for name, subdocument in list(self.subdocuments.items()): - properties[name] = dict(type="nested", properties=subdocument.properties) - - mappings = dict(properties=properties) - return dict(settings=settings, mappings=mappings) - - -class CurrentMapping(Mapping): - """The first mapping to support only Opensearch 1.x. - - The body of this mapping looks for bibliographic information in - the core document, primarily used for matching search - requests. It also has nested documents, which are used for - filtering and ranking Works when generating other types of - feeds: - - * licensepools -- the Work has these LicensePools (includes current - availability as a boolean, but not detailed availability information) - * customlists -- the Work is on these CustomLists - * contributors -- these Contributors worked on the Work - """ - - VERSION_NAME = "v5" - - # Use regular expressions to normalized values in sortable fields. - # These regexes are applied in order; that way "H. G. Wells" - # becomes "H G Wells" becomes "HG Wells". - CHAR_FILTERS = { - "remove_apostrophes": dict( - type="pattern_replace", - pattern="'", - replacement="", - ) - } - AUTHOR_CHAR_FILTER_NAMES = [] - for name, pattern, replacement in [ - # The special author name "[Unknown]" should sort after everything - # else. REPLACEMENT CHARACTER is the final valid Unicode character. - ("unknown_author", r"\[Unknown\]", "\N{REPLACEMENT CHARACTER}"), - # Works by a given primary author should be secondarily sorted - # by title, not by the other contributors. - ("primary_author_only", r"\s+;.*", ""), - # Remove parentheticals (e.g. the full name of someone who - # goes by initials). - ("strip_parentheticals", r"\s+\([^)]+\)", ""), - # Remove periods from consideration. - ("strip_periods", r"\.", ""), - # Collapse spaces for people whose sort names end with initials. - ("collapse_three_initials", r" ([A-Z]) ([A-Z]) ([A-Z])$", " $1$2$3"), - ("collapse_two_initials", r" ([A-Z]) ([A-Z])$", " $1$2"), - ]: - normalizer = dict( - type="pattern_replace", pattern=pattern, replacement=replacement - ) - CHAR_FILTERS[name] = normalizer - AUTHOR_CHAR_FILTER_NAMES.append(name) - - def __init__(self, service): - super().__init__(service) - - # Set up character filters. - # - self.char_filters = self.CHAR_FILTERS - - # This normalizer is used on freeform strings that - # will be used as tokens in filters. This way we can, - # e.g. ignore capitalization when considering whether - # two books belong to the same series or whether two - # author names are the same. - self.normalizers["filterable_string"] = dict( - type="custom", filter=["lowercase", "asciifolding"] - ) - - # Set up analyzers. - # - - # We use three analyzers: - # - # 1. An analyzer based on Opensearch's default English - # analyzer, with a normal stemmer -- used as the default - # view of a text field such as 'description'. - # - # 2. An analyzer that's exactly the same as #1 but with a less - # aggressive stemmer -- used as the 'minimal' view of a - # text field such as 'description.minimal'. - # - # 3. An analyzer that's exactly the same as #2 but with - # English stopwords left in place instead of filtered out -- - # used as the 'with_stopwords' view of a text field such as - # 'title.with_stopwords'. - # - # The analyzers are identical except for the end of the filter - # chain. - # - # All three analyzers are based on Opensearch's default English - # analyzer, defined here: - # https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-lang-analyzer.html#english-analyzer - - # First, recreate the filters from the default English - # analyzer. We'll be using these to build our own analyzers. - - # Filter out English stopwords. - self.filters["english_stop"] = dict(type="stop", stopwords=["_english_"]) - - # The default English stemmer, used in the en_default analyzer. - self.filters["english_stemmer"] = dict(type="stemmer", language="english") - - # A less aggressive English stemmer, used in the en_minimal analyzer. - self.filters["minimal_english_stemmer"] = dict( - type="stemmer", language="minimal_english" - ) - - # A filter that removes English posessives such as "'s" - self.filters["english_posessive_stemmer"] = dict( - type="stemmer", language="possessive_english" - ) - - # Some potentially useful filters that are currently not used: - # - # * keyword_marker -- Exempt certain keywords from stemming - # * synonym -- Introduce synonyms for words - # (but probably better to use synonym_graph during the search - # -- it's more flexible). - - # Here's the common analyzer configuration. The comment NEW - # means this is something we added on top of Opensearch's - # default configuration for the English analyzer. - common_text_analyzer = dict( - type="custom", - char_filter=["html_strip", "remove_apostrophes"], # NEW - tokenizer="standard", - ) - common_filter = [ - "lowercase", - "asciifolding", # NEW - ] - - # The default_text_analyzer uses Opensearch's standard - # English stemmer and removes stopwords. - self.analyzers["en_default_text_analyzer"] = dict(common_text_analyzer) - self.analyzers["en_default_text_analyzer"]["filter"] = common_filter + [ - "english_stop", - "english_stemmer", - ] - - # The minimal_text_analyzer uses a less aggressive English - # stemmer, and removes stopwords. - self.analyzers["en_minimal_text_analyzer"] = dict(common_text_analyzer) - self.analyzers["en_minimal_text_analyzer"]["filter"] = common_filter + [ - "english_stop", - "minimal_english_stemmer", - ] - - # The en_with_stopwords_text_analyzer uses the less aggressive - # stemmer and does not remove stopwords. - self.analyzers["en_with_stopwords_text_analyzer"] = dict(common_text_analyzer) - self.analyzers["en_with_stopwords_text_analyzer"]["filter"] = common_filter + [ - "minimal_english_stemmer" - ] - - # Now we need to define a special analyzer used only by the - # 'sort_author' property. - - # Here's a special filter used only by that analyzer. It - # duplicates the filter used by the icu_collation_keyword data - # type. - self.filters["en_sortable_filter"] = dict( - type="icu_collation", language="en", country="US" - ) - - # Here's the analyzer used by the 'sort_author' property. - # It's the same as icu_collation_keyword, but it has some - # extra character filters -- regexes that do things like - # convert "Tolkien, J. R. R." to "Tolkien, JRR". - # - # This is necessary because normal icu_collation_keyword - # fields can't specify char_filter. - self.analyzers["en_sort_author_analyzer"] = dict( - tokenizer="keyword", - filter=["en_sortable_filter"], - char_filter=self.AUTHOR_CHAR_FILTER_NAMES, - ) - - # Now, the main event. Set up the field properties for the - # base document. - fields_by_type = { - "basic_text": ["summary"], - "filterable_text": [ - "title", - "subtitle", - "series", - "classifications.term", - "author", - "publisher", - "imprint", - ], - "boolean": ["presentation_ready"], - "icu_collation_keyword": ["sort_title"], - "sort_author_keyword": ["sort_author"], - "integer": ["series_position", "work_id"], - "long": ["last_update_time", "published"], - "keyword": ["audience", "language"], - } - self.add_properties(fields_by_type) - - # Set up subdocuments. - contributors = self.subdocument("contributors") - contributor_fields = { - "filterable_text": ["sort_name", "display_name", "family_name"], - "keyword": ["role", "lc", "viaf"], - } - contributors.add_properties(contributor_fields) - - licensepools = self.subdocument("licensepools") - licensepool_fields = { - "integer": ["collection_id", "data_source_id"], - "long": ["availability_time"], - "boolean": ["available", "open_access", "suppressed", "licensed"], - "keyword": ["medium"], - } - licensepools.add_properties(licensepool_fields) - - identifiers = self.subdocument("identifiers") - identifier_fields = {"keyword": ["identifier", "type"]} - identifiers.add_properties(identifier_fields) - - genres = self.subdocument("genres") - genre_fields = { - "keyword": ["scheme", "name", "term"], - "float": ["weight"], - } - genres.add_properties(genre_fields) - - customlists = self.subdocument("customlists") - customlist_fields = { - "integer": ["list_id"], - "long": ["first_appearance"], - "boolean": ["featured"], - } - customlists.add_properties(customlist_fields) - - @classmethod - def stored_scripts(cls): - """This version defines a single stored script, "work_last_update", - defined below. - """ - yield "work_last_update", cls.WORK_LAST_UPDATE_SCRIPT - - # Definition of the work_last_update_script. - WORK_LAST_UPDATE_SCRIPT = """ -double champion = -1; -// Start off by looking at the work's last update time. -for (candidate in doc['last_update_time']) { - if (champion == -1 || candidate > champion) { champion = candidate; } -} -if (params.collection_ids != null && params.collection_ids.length > 0) { - // Iterate over all licensepools looking for a pool in a collection - // relevant to this filter. When one is found, check its - // availability time to see if it's later than the last update time. - for (licensepool in params._source.licensepools) { - if (!params.collection_ids.contains(licensepool['collection_id'])) { continue; } - double candidate = licensepool['availability_time']; - if (champion == -1 || candidate > champion) { champion = candidate; } - } -} -if (params.list_ids != null && params.list_ids.length > 0) { - - // Iterate over all customlists looking for a list relevant to - // this filter. When one is found, check the previous work's first - // appearance on that list to see if it's later than the last - // update time. - for (customlist in params._source.customlists) { - if (!params.list_ids.contains(customlist['list_id'])) { continue; } - double candidate = customlist['first_appearance']; - if (champion == -1 || candidate > champion) { champion = candidate; } - } -} - -return champion; -""" + return True class SearchBase: @@ -3093,15 +2349,14 @@ def last_update_time_script_field(self): filter=dict(terms={"customlists.list_id": list(all_list_ids)}), ) params = dict(collection_ids=collection_ids, list_ids=list(all_list_ids)) - return dict( - script=dict( - stored=CurrentMapping.script_name("work_last_update"), params=params - ) + # Messy, but this is the only way to get the "current mapping" for the index + script_name = ( + SearchRevisionDirectory.create().highest().script_name("work_last_update") ) + return dict(script=dict(stored=script_name, params=params)) @property def _last_update_time_order_by(self): - """We're sorting works by the time of their 'last update'. Add the 'last update' field to the dictionary of script fields @@ -3489,129 +2744,7 @@ def __getattr__(self, k): return getattr(self._work, k) -class MockExternalSearchIndex(ExternalSearchIndex): - def __init__(self, url=None, version=ExternalSearchIndex.SEARCH_VERSION_OS1_X): - self.url = url - self.docs = {} - self.works_index = "works" - self.works_alias = "works-current" - self.log = logging.getLogger("Mock external search index") - self.queries = [] - self.search = list(self.docs.keys()) - self.test_search_term = "a search term" - self.version = version - - def _key(self, index, id): - return (index, id) - - def index(self, index, id, body): - self.docs[self._key(index, id)] = body - self.search = list(self.docs.keys()) - - def delete(self, index, id): - key = self._key(index, id) - if key in self.docs: - del self.docs[key] - - def exists(self, index, id, doc_type=None): - return self._key(index, id) in self.docs - - def create_search_doc( - self, query_string, filter=None, pagination=None, debug=False - ): - return list(self.docs.values()) - - def query_works(self, query_string, filter, pagination, debug=False): - self.queries.append((query_string, filter, pagination, debug)) - # During a test we always sort works by the order in which the - # work was created. - - def sort_key(x): - # This needs to work with either a MockSearchResult or a - # dictionary representing a raw search result. - if isinstance(x, MockSearchResult): - return x.work_id - else: - return x["_id"] - - docs = sorted(list(self.docs.values()), key=sort_key) - if pagination: - start_at = 0 - if isinstance(pagination, SortKeyPagination): - # Figure out where the previous page ended by looking - # for the corresponding work ID. - if pagination.last_item_on_previous_page: - look_for = pagination.last_item_on_previous_page[-1] - for i, x in enumerate(docs): - if x["_id"] == look_for: - start_at = i + 1 - break - else: - start_at = pagination.offset - stop = start_at + pagination.size - docs = docs[start_at:stop] - - results = [] - for x in docs: - if isinstance(x, MockSearchResult): - results.append(x) - else: - results.append(MockSearchResult(x["title"], x["author"], {}, x["_id"])) - - if pagination: - pagination.page_loaded(results) - return results - - def query_works_multi(self, queries, debug=False): - # Implement query_works_multi by calling query_works several - # times. This is the opposite of what happens in the - # non-mocked ExternalSearchIndex, because it's easier to mock - # the simple case and performance isn't an issue. - for (query_string, filter, pagination) in queries: - yield self.query_works(query_string, filter, pagination, debug) - - def count_works(self, filter): - return len(self.docs) - - def bulk(self, docs, **kwargs): - for doc in docs: - self.index(doc["_index"], doc["_id"], doc) - return len(docs), [] - - -class MockMeta(dict): - """Mock the .meta object associated with an Opensearch search - result. This is necessary to get SortKeyPagination to work with - MockExternalSearchIndex. - """ - - @property - def sort(self): - return self["_sort"] - - -class MockSearchResult: - def __init__(self, sort_title, sort_author, meta, id): - self.sort_title = sort_title - self.sort_author = sort_author - meta["id"] = id - meta["_sort"] = [sort_title, sort_author, id] - self.meta = MockMeta(meta) - self.work_id = id - - def __contains__(self, k): - return False - - def to_dict(self): - return { - "title": self.sort_title, - "author": self.sort_author, - "id": self.meta["id"], - "meta": self.meta, - } - - -class SearchIndexCoverageProvider(WorkPresentationProvider): +class SearchIndexCoverageProvider(RemovesSearchCoverage, WorkPresentationProvider): """Make sure all Works have up-to-date representation in the search index. """ @@ -3627,16 +2760,55 @@ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.search_index_client = search_index_client or ExternalSearchIndex(self._db) - def process_batch(self, works): - """ - :return: a mixed list of Works and CoverageFailure objects. - """ - successes, failures = self.search_index_client.bulk_update(works) + # + # Try to migrate to the latest schema. If the function returns None, it means + # that no migration is necessary, and we're already at the latest version. If + # we're already at the latest version, then simply upload search documents instead. + # + self.receiver = None + self.migration: Optional[ + SearchMigrationInProgress + ] = self.search_index_client.start_migration() + if self.migration is None: + self.receiver: SearchDocumentReceiver = ( + self.search_index_client.start_updating_search_documents() + ) + else: + # We do have a migration, we must clear out the index and repopulate the index + self.remove_search_coverage_records() + + def on_completely_finished(self): + # Tell the search migrator that no more documents are going to show up. + target: SearchDocumentReceiverType = self.migration or self.receiver + target.finish() + + def run_once_and_update_timestamp(self): + # We do not catch exceptions here, so that the on_completely finished should not run + # if there was a runtime error + result = super().run_once_and_update_timestamp() + self.on_completely_finished() + return result + + def process_batch(self, works) -> List[Work | CoverageFailure]: + target: SearchDocumentReceiverType = self.migration or self.receiver + failures = target.add_documents( + documents=self.search_index_client.create_search_documents_from_works(works) + ) - records = list(successes) - for (work, error) in failures: - if not isinstance(error, (bytes, str)): - error = repr(error) - records.append(CoverageFailure(work, error)) + # Maintain a dictionary of works so that we can efficiently remove failed works later. + work_map: Dict[int, Work] = {} + for work in works: + work_map[work.id] = work + + # Remove all the works that failed and create failure records for them. + results: List[Work | CoverageFailure] = [] + for failure in failures: + work = work_map[failure.id] + del work_map[failure.id] + results.append(CoverageFailure(work, repr(failure))) - return records + # Append all the remaining works that didn't fail. + for work in work_map.values(): + results.append(work) + + return results diff --git a/core/opds.py b/core/opds.py index 942bf29114..a429d37b6e 100644 --- a/core/opds.py +++ b/core/opds.py @@ -3,14 +3,14 @@ import datetime import logging from collections import defaultdict -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional from urllib.parse import quote from lxml import etree from sqlalchemy.orm import joinedload from sqlalchemy.orm.session import Session -from core.external_search import QueryParseException +from core.external_search import ExternalSearchIndex, QueryParseException from core.problem_details import INVALID_INPUT from .classifier import Classifier @@ -741,7 +741,7 @@ def page( facets=None, pagination=None, max_age=None, - search_engine=None, + search_engine: Optional[ExternalSearchIndex] = None, search_debug=False, **response_kwargs, ): diff --git a/core/scripts.py b/core/scripts.py index 44bb746dd4..328de301a3 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -21,6 +21,7 @@ from core.model.devicetokens import DeviceToken, DeviceTokenTypes from core.model.patron import Loan from core.query.customlist import CustomListQueries +from core.search.coverage_remover import RemovesSearchCoverage from core.util.notifications import PushNotifications from .config import CannotLoadConfiguration, Configuration, ConfigurationConstants @@ -48,7 +49,6 @@ Subject, Timestamp, Work, - WorkCoverageRecord, create, get_one, get_one_or_create, @@ -2463,7 +2463,9 @@ class WhereAreMyBooksScript(CollectionInputScript): is being configured. """ - def __init__(self, _db=None, output=None, search=None): + def __init__( + self, _db=None, output=None, search: Optional[ExternalSearchIndex] = None + ): _db = _db or self._db super().__init__(_db) self.output = output or sys.stdout @@ -2636,6 +2638,11 @@ def add_line(id, name, protocol, metadata_identifier): class UpdateLaneSizeScript(LaneSweeperScript): + def __init__(self, _db, *args, **kwargs): + super().__init__(_db, *args, **kwargs) + search = kwargs.get("search_index_client", None) + self._search: ExternalSearchIndex = search or ExternalSearchIndex(self._db) + def should_process_lane(self, lane): """We don't want to process generic WorkLists -- there's nowhere to store the data. @@ -2652,7 +2659,7 @@ def process_lane(self, lane): # This is done because calling site_configuration_has_changed repeatedly # was causing performance problems, when we have lots of lanes to update. lane._suppress_before_flush_listeners = True - lane.update_size(self._db) + lane.update_size(self._db, search_engine=self._search) self.log.info("%s: %d", lane.full_identifier, lane.size) def do_run(self, *args, **kwargs): @@ -2665,50 +2672,16 @@ def process_custom_list(self, custom_list): custom_list.update_size(self._db) -class RemovesSearchCoverage: - """Mix-in class for a script that might remove all coverage records - for the search engine. - """ - - def remove_search_coverage_records(self): - """Delete all search coverage records from the database. - - :return: The number of records deleted. - """ - wcr = WorkCoverageRecord - clause = wcr.operation == wcr.UPDATE_SEARCH_INDEX_OPERATION - count = self._db.query(wcr).filter(clause).count() - - # We want records to be updated in ascending order in order to avoid deadlocks. - # To guarantee lock order, we explicitly acquire locks by using a subquery with FOR UPDATE (with_for_update). - # Please refer for my details to this SO article: - # https://stackoverflow.com/questions/44660368/postgres-update-with-order-by-how-to-do-it - self._db.execute( - wcr.__table__.delete().where( - wcr.id.in_( - self._db.query(wcr.id) - .with_for_update() - .filter(clause) - .order_by(WorkCoverageRecord.id) - ) - ) - ) - - return count - - class RebuildSearchIndexScript(RunWorkCoverageProviderScript, RemovesSearchCoverage): """Completely delete the search index and recreate it.""" def __init__(self, *args, **kwargs): search = kwargs.get("search_index_client", None) - self.search = search or ExternalSearchIndex(self._db) + self.search: ExternalSearchIndex = search or ExternalSearchIndex(self._db) super().__init__(SearchIndexCoverageProvider, *args, **kwargs) def do_run(self): - # Calling setup_index will destroy the index and recreate it - # empty. - self.search.setup_index() + self.search.clear_search_documents() # Remove all search coverage records so the # SearchIndexCoverageProvider will start from scratch. diff --git a/core/search/__init__.py b/core/search/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/core/search/coverage_remover.py b/core/search/coverage_remover.py new file mode 100644 index 0000000000..a764b6ab6c --- /dev/null +++ b/core/search/coverage_remover.py @@ -0,0 +1,33 @@ +from core.model.coverage import WorkCoverageRecord + + +class RemovesSearchCoverage: + """Mix-in class for a script that might remove all coverage records + for the search engine. + """ + + def remove_search_coverage_records(self): + """Delete all search coverage records from the database. + + :return: The number of records deleted. + """ + wcr = WorkCoverageRecord + clause = wcr.operation == wcr.UPDATE_SEARCH_INDEX_OPERATION + count = self._db.query(wcr).filter(clause).count() + + # We want records to be updated in ascending order in order to avoid deadlocks. + # To guarantee lock order, we explicitly acquire locks by using a subquery with FOR UPDATE (with_for_update). + # Please refer for my details to this SO article: + # https://stackoverflow.com/questions/44660368/postgres-update-with-order-by-how-to-do-it + self._db.execute( + wcr.__table__.delete().where( + wcr.id.in_( + self._db.query(wcr.id) + .with_for_update() + .filter(clause) + .order_by(WorkCoverageRecord.id) + ) + ) + ) + + return count diff --git a/core/search/document.py b/core/search/document.py new file mode 100644 index 0000000000..987433ed9c --- /dev/null +++ b/core/search/document.py @@ -0,0 +1,255 @@ +from abc import ABC, abstractmethod +from typing import Dict + + +class SearchMappingFieldType(ABC): + """ + The type of field types. Subclasses of this class implement the serialization + behaviour for specific types. + + https://opensearch.org/docs/latest/field-types/supported-field-types/index/ + """ + + @abstractmethod + def serialize(self) -> dict: + pass + + +class SearchMappingFieldTypeScalar(SearchMappingFieldType): + """ + A scalar field type such as "boolean", "long", "integer", etc. + + See: https://opensearch.org/docs/latest/field-types/supported-field-types/index/ + """ + + def __init__(self, name: str): + self._name = name + + def serialize(self) -> dict: + return {"type": self._name} + + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/binary/ +BINARY: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("binary") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/boolean/ +BOOLEAN: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("boolean") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +BYTE: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("byte") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +DOUBLE: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("double") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +FLOAT: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("float") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +HALF_FLOAT: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("half_float") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +INTEGER: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("integer") + +# See: https://opensearch.org/docs/latest/opensearch/supported-field-types/ip/ +IP: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("ip") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +LONG: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("long") + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/unsigned-long/ +UNSIGNED_LONG: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar( + "unsigned_long" +) + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/numeric/ +SHORT: SearchMappingFieldTypeScalar = SearchMappingFieldTypeScalar("short") + + +class SearchMappingFieldTypeParameterized(SearchMappingFieldType): + """The base class for types that have parameters (date, keyword, etc)""" + + _parameters: Dict[str, str] + + def __init__(self, name: str): + self._name = name + self._parameters = {} + + @property + def parameters(self) -> Dict[str, str]: + return self._parameters + + def serialize(self) -> dict: + output = dict(self._parameters) + output["type"] = self._name + return output + + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/date/ +def date() -> SearchMappingFieldTypeParameterized: + return SearchMappingFieldTypeParameterized("date") + + +# See: https://opensearch.org/docs/latest/field-types/supported-field-types/keyword/ +def keyword() -> SearchMappingFieldTypeParameterized: + mapping = SearchMappingFieldTypeParameterized("keyword") + mapping.parameters["normalizer"] = "filterable_string" + return mapping + + +# See: https://www.elastic.co/guide/en/elasticsearch/plugins/current/analysis-icu-collation-keyword-field.html +def icu_collation_keyword() -> SearchMappingFieldTypeParameterized: + return SearchMappingFieldTypeParameterized("icu_collation_keyword") + + +def sort_author_keyword() -> SearchMappingFieldTypeParameterized: + t = SearchMappingFieldTypeParameterized("text") + t.parameters["analyzer"] = "en_sort_author_analyzer" + t.parameters["fielddata"] = "true" + return t + + +class SearchMappingFieldTypeObject(SearchMappingFieldType): + """See: https://opensearch.org/docs/latest/field-types/supported-field-types/object/""" + + _properties: Dict[str, SearchMappingFieldType] + + def __init__(self, type: str): + self._type = type + self._properties = {} + + @property + def properties(self) -> Dict[str, SearchMappingFieldType]: + return self._properties + + def add_property(self, name, type: SearchMappingFieldType): + self.properties[name] = type + + def serialize(self) -> dict: + output_properties: dict = {} + for name, prop in self._properties.items(): + output_properties[name] = prop.serialize() + + return {"type": self._type, "properties": output_properties} + + +def nested() -> SearchMappingFieldTypeObject: + """See: https://opensearch.org/docs/latest/field-types/supported-field-types/object/""" + return SearchMappingFieldTypeObject("nested") + + +class SearchMappingFieldTypeCustom(SearchMappingFieldType, ABC): + """The base class for our custom Opensearch types.""" + + +class SearchMappingFieldTypeCustomBasicText(SearchMappingFieldTypeCustom): + """The custom 'basic_text' property type. + + This type does not exist in Opensearch. It's our name for a + text field that is indexed three times: once using our default + English analyzer ("title"), once using an analyzer with + minimal stemming ("title.minimal") for close matches, and once + using an analyzer that leaves stopwords in place, for searches + that rely on stopwords. + """ + + def serialize(self) -> dict: + return { + "type": "text", + "analyzer": "en_default_text_analyzer", + "fields": { + "minimal": {"type": "text", "analyzer": "en_minimal_text_analyzer"}, + "with_stopwords": { + "type": "text", + "analyzer": "en_with_stopwords_text_analyzer", + }, + }, + } + + +BASIC_TEXT: SearchMappingFieldTypeCustomBasicText = ( + SearchMappingFieldTypeCustomBasicText() +) + + +class SearchMappingFieldTypeCustomFilterable(SearchMappingFieldTypeCustom): + """The custom 'filterable_text' property type. + + This type does not exist in Opensearch. It's our name for a + text field that can be used in both queries and filters. + + This field is indexed _four_ times -- the three ways a normal + text field is indexed, plus again as an unparsed keyword that + can be used in filters. + """ + + def __init__(self): + self._basic = SearchMappingFieldTypeCustomBasicText() + + def serialize(self) -> dict: + output = self._basic.serialize() + output["fields"]["keyword"] = { + "type": "keyword", + "index": True, + "store": False, + "normalizer": "filterable_string", + } + return output + + +FILTERABLE_TEXT: SearchMappingFieldTypeCustomFilterable = ( + SearchMappingFieldTypeCustomFilterable() +) + + +class SearchMappingFieldTypeCustomKeyword(SearchMappingFieldTypeCustom): + """A custom extension to the keyword type that ensures case-insensitivity.""" + + def __init__(self): + self._base = keyword() + + def serialize(self) -> dict: + output = self._base.serialize() + output["normalizer"] = "filterable_string" + return output + + +CUSTOM_KEYWORD: SearchMappingFieldTypeCustomKeyword = ( + SearchMappingFieldTypeCustomKeyword() +) + + +class SearchMappingDocument: + """ + A top-level Opensearch mapping document. + + See: https://opensearch.org/docs/latest/field-types/index/ + """ + + def __init__(self): + self._settings: Dict[str, dict] = {} + self._fields: Dict[str, SearchMappingFieldType] = {} + self._scripts: Dict[str, str] = {} + + @property + def settings(self) -> Dict[str, dict]: + return self._settings + + @property + def scripts(self) -> Dict[str, str]: + return self._scripts + + @property + def properties(self) -> Dict[str, SearchMappingFieldType]: + return self._fields + + @properties.setter + def properties(self, fields: Dict[str, SearchMappingFieldType]): + self._fields = dict(fields) + + def serialize(self) -> dict: + output_properties = self.serialize_properties() + output_mappings = {"properties": output_properties} + return {"settings": self.settings, "mappings": output_mappings} + + def serialize_properties(self): + return {name: prop.serialize() for name, prop in self._fields.items()} diff --git a/core/search/migrator.py b/core/search/migrator.py new file mode 100644 index 0000000000..e68141e259 --- /dev/null +++ b/core/search/migrator.py @@ -0,0 +1,178 @@ +import logging +from abc import ABC, abstractmethod +from typing import Iterable, List, Optional + +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from core.search.service import SearchService, SearchServiceFailedDocument + + +class SearchMigrationException(Exception): + """The type of exceptions raised by the search migrator.""" + + def __init__(self, fatal: bool, message: str): + super().__init__(message) + self.fatal = fatal + + +class SearchDocumentReceiverType(ABC): + """A receiver of search documents.""" + + @abstractmethod + def add_documents( + self, documents: Iterable[dict] + ) -> List[SearchServiceFailedDocument]: + """Submit documents to be indexed.""" + + @abstractmethod + def finish(self) -> None: + """Make sure all changes are committed.""" + + +class SearchDocumentReceiver(SearchDocumentReceiverType): + """A receiver of search documents.""" + + def __init__(self, pointer: str, service: SearchService): + self._logger = logging.getLogger(SearchDocumentReceiver.__name__) + self._pointer = pointer + self._service = service + + @property + def pointer(self) -> str: + """The name of the index that will receive search documents.""" + return self._pointer + + def add_documents( + self, documents: Iterable[dict] + ) -> List[SearchServiceFailedDocument]: + """Submit documents to be indexed.""" + return self._service.index_submit_documents( + pointer=self._pointer, documents=documents + ) + + def finish(self) -> None: + """Make sure all changes are committed.""" + self._logger.info("Finishing search documents.") + self._service.refresh() + self._logger.info("Finished search documents.") + + +class SearchMigrationInProgress(SearchDocumentReceiverType): + """A migration in progress. Documents are being submitted, and the migration must be + explicitly finished or cancelled to take effect (or not!).""" + + def __init__( + self, + base_name: str, + revision: SearchSchemaRevision, + service: SearchService, + ): + self._logger = logging.getLogger(SearchMigrationInProgress.__name__) + self._base_name = base_name + self._revision = revision + self._service = service + self._receiver = SearchDocumentReceiver( + pointer=self._revision.name_for_index(base_name), service=self._service + ) + + def add_documents( + self, documents: Iterable[dict] + ) -> List[SearchServiceFailedDocument]: + """Submit documents to be indexed.""" + return self._receiver.add_documents(documents) + + def finish(self) -> None: + """Finish the migration.""" + self._logger.info(f"Completing migration to {self._revision.version}") + # Make sure all changes are committed. + self._receiver.finish() + # Create the "indexed" alias. + self._service.index_set_populated(self._revision) + # Update the write pointer to point to the now-populated index. + self._service.write_pointer_set(self._revision) + # Set the read pointer to point at the now-populated index + self._service.read_pointer_set(self._revision) + self._service.refresh() + self._logger.info(f"Completed migration to {self._revision.version}") + + def cancel(self) -> None: + """Cancel the migration, leaving the read and write pointers untouched.""" + self._logger.info(f"Cancelling migration to {self._revision.version}") + return None + + +class SearchMigrator: + """A search migrator. This moves a search service to the targeted schema version.""" + + def __init__(self, revisions: SearchRevisionDirectory, service: SearchService): + self._logger = logging.getLogger(SearchMigrator.__name__) + self._revisions = revisions + self._service = service + + def migrate( + self, base_name: str, version: int + ) -> Optional[SearchMigrationInProgress]: + """ + Migrate to the given version using the given base name (such as 'circulation-works'). The function returns + an object that expects to receive batches of search documents used to populate any new index. When all + the batches of documents have been sent to the object, callers must call 'finish' to indicate to the search + migrator that no more documents are coming. Only at this point will the migrator consider the new index to be + "populated". + + :arg base_name: The base name used for indices (such as 'circulation-works'). + :arg version: The version number to which we are migrating + + :raises SearchMigrationException: On errors, but always leaves the system in a usable state. + """ + + self._logger.info(f"starting migration to {base_name} {version}") + + try: + target = self._revisions.available.get(version) + if target is None: + raise SearchMigrationException( + fatal=True, + message=f"No support is available for schema version {version}", + ) + + # Does the empty index exist? Create it if not. + self._service.create_empty_index() + + # Does the read pointer exist? Point it at the empty index if not. + read = self._service.read_pointer() + if read is None: + self._logger.info("Read pointer did not exist.") + self._service.read_pointer_set_empty() + + # We're probably going to have to do a migration. We might end up returning + # this instance so that users can submit documents for indexing. + in_progress = SearchMigrationInProgress( + base_name=base_name, revision=target, service=self._service + ) + + # Does the write pointer exist? + write = self._service.write_pointer() + if write is None or (not write.version == version): + self._logger.info( + f"Write pointer does not point to the desired version: {write} != {version}." + ) + # Either the write pointer didn't exist, or it's pointing at a version + # other than the one we want. Create a new index for the version we want. + self._service.index_create(target) + self._service.index_set_mapping(target) + + # The index now definitely exists, but it might not be populated. Populate it if necessary. + if not self._service.index_is_populated(target): + self._logger.info("Write index is not populated.") + return in_progress + + # If we didn't need to return the migration, finish it here. This will + # update the read and write pointers appropriately. + in_progress.finish() + return None + except SearchMigrationException: + raise + except Exception as e: + raise SearchMigrationException( + fatal=True, message=f"Service raised exception: {repr(e)}" + ) from e diff --git a/core/search/revision.py b/core/search/revision.py new file mode 100644 index 0000000000..4bfd539011 --- /dev/null +++ b/core/search/revision.py @@ -0,0 +1,41 @@ +from abc import ABC, abstractmethod + +from core.search.document import SearchMappingDocument + + +class SearchSchemaRevision(ABC): + """ + A versioned schema revision. A revision has an associated version number and can produce + a top-level Opensearch mapping document on demand. Revision version numbers are unique, + and revisions are treated as immutable once created. + """ + + _version: int + # The SEARCH_VERSION variable MUST be populated in the implemented child classes + SEARCH_VERSION: int + + def __init__(self): + if self.SEARCH_VERSION is None: + raise ValueError("The SEARCH_VERSION must be defined with an integer value") + self._version = self.SEARCH_VERSION + + @abstractmethod + def mapping_document(self) -> SearchMappingDocument: + """Produce a mapping document for this schema revision.""" + + @property + def version(self) -> int: + return self._version + + def name_for_index(self, base_name: str) -> str: + """Produce the name of the index as it will appear in Opensearch, + such as 'circulation-works-v5'.""" + return f"{base_name}-v{self.version}" + + def name_for_indexed_pointer(self, base_name: str) -> str: + """Produce the name of the "indexed pointer" as it will appear in Opensearch, + such as 'circulation-works-v5-indexed'.""" + return f"{base_name}-v{self.version}-indexed" + + def script_name(self, script_name): + return f"simplified.{script_name}.v{self.version}" diff --git a/core/search/revision_directory.py b/core/search/revision_directory.py new file mode 100644 index 0000000000..6adbdb01bf --- /dev/null +++ b/core/search/revision_directory.py @@ -0,0 +1,52 @@ +from typing import Mapping + +from core.config import CannotLoadConfiguration +from core.search.revision import SearchSchemaRevision +from core.search.v5 import SearchV5 + +REVISIONS = [SearchV5()] + + +class SearchRevisionDirectory: + """A directory of the supported search index schemas.""" + + @staticmethod + def _create_revisions() -> Mapping[int, SearchSchemaRevision]: + numbers = set() + revisions = {} + for r in REVISIONS: + if r.version in numbers: + raise ValueError( + f"Revision version {r.version} is defined multiple times" + ) + numbers.add(r.version) + revisions[r.version] = r + return revisions + + def __init__(self, available: Mapping[int, SearchSchemaRevision]): + self._available = available + + @staticmethod + def create() -> "SearchRevisionDirectory": + return SearchRevisionDirectory(SearchRevisionDirectory._create_revisions()) + + @staticmethod + def empty() -> "SearchRevisionDirectory": + return SearchRevisionDirectory({}) + + @property + def available(self) -> Mapping[int, SearchSchemaRevision]: + return self._available + + def find(self, version: int) -> SearchSchemaRevision: + """Find the revision with the given version number.""" + try: + return self._available[version] + except KeyError: + raise CannotLoadConfiguration( + f"No revision available with version {version}" + ) + + def highest(self) -> SearchSchemaRevision: + """Find the revision with the highest version.""" + return self.find(max(self._available.keys())) diff --git a/core/search/service.py b/core/search/service.py new file mode 100644 index 0000000000..aad21dc2e8 --- /dev/null +++ b/core/search/service.py @@ -0,0 +1,370 @@ +import logging +import re +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Iterable, List, Optional + +import opensearchpy.helpers +from opensearch_dsl import MultiSearch, Search +from opensearchpy import NotFoundError, OpenSearch, RequestError + +from core.search.revision import SearchSchemaRevision + + +@dataclass +class SearchWritePointer: + """The 'write' pointer; the pointer that will be used to populate an index with search documents.""" + + base_name: str + version: int + + @property + def name(self) -> str: + return f"{self.base_name}-search-write" + + @property + def target_name(self) -> str: + return f"{self.base_name}-v{self.version}" + + +class SearchServiceException(Exception): + """The type of exceptions raised by the search service.""" + + def __init__(self, message: str): + super().__init__(message) + + +@dataclass +class SearchServiceFailedDocument: + """An error indicating that a document failed to index.""" + + id: int + error_message: str + error_status: int + error_exception: str + + @classmethod + def from_bulk_error(cls, error: dict): + """Transform an error dictionary returned from opensearchpy's bulk API to a typed error""" + if error.get("index"): + error_indexed = error["index"] + error_id = int(error_indexed["_id"]) + error_status = error_indexed["status"] + error_reason = error_indexed["error"]["reason"] + return SearchServiceFailedDocument( + id=error_id, + error_message=error_reason, + error_status=error_status, + error_exception="", + ) + else: + # Not exactly ideal, but we really have no idea what the bulk API can return. + return SearchServiceFailedDocument( + id=-1, + error_message="Unrecognized error returned from Opensearch bulk API.", + error_status=-1, + error_exception=f"{error}", + ) + + +class SearchService(ABC): + """The interface we need from services like Opensearch. Essentially, it provides the operations we want with + sensible types, rather than the untyped pile of JSON the actual search client provides.""" + + @abstractmethod + def read_pointer_name(self) -> str: + """Get the name used for the read pointer.""" + + @abstractmethod + def write_pointer_name(self) -> str: + """Get the name used for the write pointer.""" + + @abstractmethod + def read_pointer(self) -> Optional[str]: + """Get the read pointer, if it exists.""" + + @abstractmethod + def write_pointer(self) -> Optional[SearchWritePointer]: + """Get the writer pointer, if it exists.""" + + @abstractmethod + def create_empty_index(self) -> None: + """Atomically create the empty index for the given base name.""" + + @abstractmethod + def read_pointer_set(self, revision: SearchSchemaRevision) -> None: + """Atomically set the read pointer to the index for the given revision and base name.""" + + @abstractmethod + def read_pointer_set_empty(self) -> None: + """Atomically set the read pointer to the empty index for the base name.""" + + @abstractmethod + def index_create(self, revision: SearchSchemaRevision) -> None: + """Atomically create an index for the given base name and revision.""" + + @abstractmethod + def indexes_created(self) -> List[str]: + """A log of all the indexes that have been created by this client service.""" + + @abstractmethod + def index_is_populated(self, revision: SearchSchemaRevision) -> bool: + """Return True if the index for the given base name and revision has been populated.""" + + @abstractmethod + def index_set_populated(self, revision: SearchSchemaRevision) -> None: + """Set an index as populated.""" + + @abstractmethod + def index_set_mapping(self, revision: SearchSchemaRevision) -> None: + """Set the schema mappings for the given index.""" + + @abstractmethod + def index_submit_documents( + self, + pointer: str, + documents: Iterable[dict], + ) -> List[SearchServiceFailedDocument]: + """Submit search documents to the given index.""" + + @abstractmethod + def write_pointer_set(self, revision: SearchSchemaRevision) -> None: + """Atomically set the write pointer to the index for the given revision and base name.""" + + @abstractmethod + def refresh(self): + """Synchronously refresh the service and wait for changes to be completed.""" + + @abstractmethod + def index_clear_documents(self, pointer: str): + """Clear all search documents in the given index.""" + + @abstractmethod + def search_client(self, write: bool = False) -> Search: + """Return the underlying search client.""" + + @abstractmethod + def search_multi_client(self, write: bool = False) -> MultiSearch: + """Return the underlying search client.""" + + @abstractmethod + def index_remove_document(self, pointer: str, id: int): + """Remove a specific document from the given index.""" + + @abstractmethod + def is_pointer_empty(self, pointer: str): + """Check to see if a pointer points to an empty index""" + + +class SearchServiceOpensearch1(SearchService): + """The real Opensearch 1.x service.""" + + def __init__(self, client: OpenSearch, base_revision_name: str): + self._logger = logging.getLogger(SearchServiceOpensearch1.__name__) + self._client = client + self._search = Search(using=self._client) + self.base_revision_name = base_revision_name + self._multi_search = MultiSearch(using=self._client) + self._indexes_created: List[str] = [] + + # Documents are not allowed to automatically create indexes. + self._client.cluster.put_settings( + body={"persistent": {"action": {"auto_create_index": "false"}}} + ) + + def indexes_created(self) -> List[str]: + return self._indexes_created + + def write_pointer(self) -> Optional[SearchWritePointer]: + try: + result: dict = self._client.indices.get_alias( + name=self.write_pointer_name() + ) + for name in result.keys(): + match = re.search(f"{self.base_revision_name}-v([0-9]+)", string=name) + if match: + return SearchWritePointer( + self.base_revision_name, int(match.group(1)) + ) + return None + except NotFoundError: + return None + + def create_empty_index(self) -> None: + try: + index_name = self._empty(self.base_revision_name) + self._logger.debug(f"creating empty index {index_name}") + self._client.indices.create(index=index_name) + self._indexes_created.append(index_name) + except RequestError as e: + if e.error == "resource_already_exists_exception": + return + raise e + + def read_pointer_set(self, revision: SearchSchemaRevision) -> None: + alias_name = self.read_pointer_name() + target_index = revision.name_for_index(self.base_revision_name) + action = { + "actions": [ + {"remove": {"index": "*", "alias": alias_name}}, + {"add": {"index": target_index, "alias": alias_name}}, + ] + } + self._logger.debug(f"setting read pointer {alias_name} to index {target_index}") + self._client.indices.update_aliases(body=action) + + def index_set_populated(self, revision: SearchSchemaRevision) -> None: + alias_name = revision.name_for_indexed_pointer(self.base_revision_name) + target_index = revision.name_for_index(self.base_revision_name) + action = { + "actions": [ + {"remove": {"index": "*", "alias": alias_name}}, + {"add": {"index": target_index, "alias": alias_name}}, + ] + } + self._logger.debug( + f"creating 'indexed' flag alias {alias_name} for index {target_index}" + ) + self._client.indices.update_aliases(body=action) + + def read_pointer_set_empty(self) -> None: + alias_name = self.read_pointer_name() + target_index = self._empty(self.base_revision_name) + action = { + "actions": [ + {"remove": {"index": "*", "alias": alias_name}}, + {"add": {"index": target_index, "alias": alias_name}}, + ] + } + self._logger.debug( + f"setting read pointer {alias_name} to empty index {target_index}" + ) + self._client.indices.update_aliases(body=action) + + def index_create(self, revision: SearchSchemaRevision) -> None: + try: + index_name = revision.name_for_index(self.base_revision_name) + self._logger.info(f"creating index {index_name}") + self._client.indices.create( + index=index_name, + body=revision.mapping_document().serialize(), + ) + self._indexes_created.append(index_name) + except RequestError as e: + if e.error == "resource_already_exists_exception": + return + raise e + + def index_is_populated(self, revision: SearchSchemaRevision) -> bool: + return self._client.indices.exists_alias( + name=revision.name_for_indexed_pointer(self.base_revision_name) + ) + + def index_set_mapping(self, revision: SearchSchemaRevision) -> None: + data = {"properties": revision.mapping_document().serialize_properties()} + index_name = revision.name_for_index(self.base_revision_name) + self._logger.debug(f"setting mappings for index {index_name}") + self._client.indices.put_mapping(index=index_name, body=data) + self._ensure_scripts(revision) + + def _ensure_scripts(self, revision: SearchSchemaRevision) -> None: + for name, body in revision.mapping_document().scripts.items(): + script = dict(script=dict(lang="painless", source=body)) + if not name.startswith("simplified"): + name = revision.script_name(name) + self._client.put_script(name, script) # type: ignore [misc] ## Seems the types aren't up to date + + def index_submit_documents( + self, pointer: str, documents: Iterable[dict] + ) -> List[SearchServiceFailedDocument]: + self._logger.info(f"submitting documents to index {pointer}") + + # Specifically override the target in all documents to the target pointer + # Add a hard requirement that the target be an alias (this prevents documents from implicitly creating + # indexes). + for document in documents: + document["_index"] = pointer + document["_require_alias"] = True + + # See: Sources for "streaming_bulk": + # https://github.com/opensearch-project/opensearch-py/blob/db972e615b9156b4e364091d6a893d64fb3ef4f3/opensearchpy/helpers/actions.py#L267 + # The documentation is incredibly vague about what the function actually returns, but these + # parameters _should_ cause it to return a tuple containing the number of successfully indexed documents + # and a list of documents that failed. Unfortunately, the type checker disagrees and the documentation + # gives no hint as to what an "int" might mean for errors. + (success_count, errors) = opensearchpy.helpers.bulk( + client=self._client, + actions=documents, + raise_on_error=False, + max_retries=3, + max_backoff=30, + yield_ok=False, + ) + + error_results: List[SearchServiceFailedDocument] = [] + if isinstance(errors, list): + for error in errors: + error_results.append(SearchServiceFailedDocument.from_bulk_error(error)) + else: + raise SearchServiceException( + f"Opensearch returned {errors} instead of a list of errors." + ) + + return error_results + + def index_clear_documents(self, pointer: str): + self._client.delete_by_query( + index=pointer, body={"query": {"match_all": {}}}, wait_for_completion=True + ) + + def refresh(self): + self._logger.debug(f"waiting for indexes to become ready") + self._client.indices.refresh() + + def write_pointer_set(self, revision: SearchSchemaRevision) -> None: + alias_name = self.write_pointer_name() + target_index = revision.name_for_index(self.base_revision_name) + action = { + "actions": [ + {"remove": {"index": "*", "alias": alias_name}}, + {"add": {"index": target_index, "alias": alias_name}}, + ] + } + self._logger.debug(f"setting write pointer {alias_name} to {target_index}") + self._client.indices.update_aliases(body=action) + + def read_pointer(self) -> Optional[str]: + try: + result: dict = self._client.indices.get_alias(name=self.read_pointer_name()) + for name in result.keys(): + if name.startswith(f"{self.base_revision_name}-"): + return name + return None + except NotFoundError: + return None + + def search_client(self, write=False) -> Search: + return self._search.index( + self.read_pointer_name() if not write else self.write_pointer_name() + ) + + def search_multi_client(self, write=False) -> MultiSearch: + return self._multi_search.index( + self.read_pointer_name() if not write else self.write_pointer_name() + ) + + def read_pointer_name(self) -> str: + return f"{self.base_revision_name}-search-read" + + def write_pointer_name(self) -> str: + return f"{self.base_revision_name}-search-write" + + @staticmethod + def _empty(base_name): + return f"{base_name}-empty" + + def index_remove_document(self, pointer: str, id: int): + self._client.delete(index=pointer, id=id, doc_type="_doc") + + def is_pointer_empty(self, pointer: str) -> bool: + return pointer == self._empty(self.base_revision_name) diff --git a/core/search/v5.py b/core/search/v5.py new file mode 100644 index 0000000000..d206f5e78e --- /dev/null +++ b/core/search/v5.py @@ -0,0 +1,300 @@ +from typing import Dict + +from core.search.document import ( + BASIC_TEXT, + BOOLEAN, + FILTERABLE_TEXT, + FLOAT, + INTEGER, + LONG, + SearchMappingDocument, + SearchMappingFieldType, + icu_collation_keyword, + keyword, + nested, + sort_author_keyword, +) +from core.search.revision import SearchSchemaRevision + + +class SearchV5(SearchSchemaRevision): + SEARCH_VERSION = 5 + """ + The body of this mapping looks for bibliographic information in + the core document, primarily used for matching search + requests. It also has nested documents, which are used for + filtering and ranking Works when generating other types of + feeds: + + * licensepools -- the Work has these LicensePools (includes current + availability as a boolean, but not detailed availability information) + * customlists -- the Work is on these CustomLists + * contributors -- these Contributors worked on the Work + """ + + # Definition of the work_last_update_script. + WORK_LAST_UPDATE_SCRIPT = """ +double champion = -1; +// Start off by looking at the work's last update time. +for (candidate in doc['last_update_time']) { + if (champion == -1 || candidate > champion) { champion = candidate; } +} +if (params.collection_ids != null && params.collection_ids.length > 0) { + // Iterate over all licensepools looking for a pool in a collection + // relevant to this filter. When one is found, check its + // availability time to see if it's later than the last update time. + for (licensepool in params._source.licensepools) { + if (!params.collection_ids.contains(licensepool['collection_id'])) { continue; } + double candidate = licensepool['availability_time']; + if (champion == -1 || candidate > champion) { champion = candidate; } + } +} +if (params.list_ids != null && params.list_ids.length > 0) { + + // Iterate over all customlists looking for a list relevant to + // this filter. When one is found, check the previous work's first + // appearance on that list to see if it's later than the last + // update time. + for (customlist in params._source.customlists) { + if (!params.list_ids.contains(customlist['list_id'])) { continue; } + double candidate = customlist['first_appearance']; + if (champion == -1 || candidate > champion) { champion = candidate; } + } +} + +return champion; +""" + + # Use regular expressions to normalized values in sortable fields. + # These regexes are applied in order; that way "H. G. Wells" + # becomes "H G Wells" becomes "HG Wells". + CHAR_FILTERS = { + "remove_apostrophes": dict( + type="pattern_replace", + pattern="'", + replacement="", + ) + } + + AUTHOR_CHAR_FILTER_NAMES = [] + for name, pattern, replacement in [ + # The special author name "[Unknown]" should sort after everything + # else. REPLACEMENT CHARACTER is the final valid Unicode character. + ("unknown_author", r"\[Unknown\]", "\N{REPLACEMENT CHARACTER}"), + # Works by a given primary author should be secondarily sorted + # by title, not by the other contributors. + ("primary_author_only", r"\s+;.*", ""), + # Remove parentheticals (e.g. the full name of someone who + # goes by initials). + ("strip_parentheticals", r"\s+\([^)]+\)", ""), + # Remove periods from consideration. + ("strip_periods", r"\.", ""), + # Collapse spaces for people whose sort names end with initials. + ("collapse_three_initials", r" ([A-Z]) ([A-Z]) ([A-Z])$", " $1$2$3"), + ("collapse_two_initials", r" ([A-Z]) ([A-Z])$", " $1$2"), + ]: + normalizer = dict( + type="pattern_replace", pattern=pattern, replacement=replacement + ) + CHAR_FILTERS[name] = normalizer + AUTHOR_CHAR_FILTER_NAMES.append(name) + + def __init__(self): + super().__init__() + + self._normalizers = {} + self._char_filters = {} + self._filters = {} + self._analyzers = {} + + # Set up character filters. + # + self._char_filters = self.CHAR_FILTERS + + # This normalizer is used on freeform strings that + # will be used as tokens in filters. This way we can, + # e.g. ignore capitalization when considering whether + # two books belong to the same series or whether two + # author names are the same. + self._normalizers["filterable_string"] = dict( + type="custom", filter=["lowercase", "asciifolding"] + ) + + # Set up analyzers. + # + + # We use three analyzers: + # + # 1. An analyzer based on Opensearch's default English + # analyzer, with a normal stemmer -- used as the default + # view of a text field such as 'description'. + # + # 2. An analyzer that's exactly the same as #1 but with a less + # aggressive stemmer -- used as the 'minimal' view of a + # text field such as 'description.minimal'. + # + # 3. An analyzer that's exactly the same as #2 but with + # English stopwords left in place instead of filtered out -- + # used as the 'with_stopwords' view of a text field such as + # 'title.with_stopwords'. + # + # The analyzers are identical except for the end of the filter + # chain. + # + # All three analyzers are based on Opensearch's default English + # analyzer, defined here: + # https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-lang-analyzer.html#english-analyzer + + # First, recreate the filters from the default English + # analyzer. We'll be using these to build our own analyzers. + + # Filter out English stopwords. + self._filters["english_stop"] = dict(type="stop", stopwords=["_english_"]) + # The default English stemmer, used in the en_default analyzer. + self._filters["english_stemmer"] = dict(type="stemmer", language="english") + # A less aggressive English stemmer, used in the en_minimal analyzer. + self._filters["minimal_english_stemmer"] = dict( + type="stemmer", language="minimal_english" + ) + # A filter that removes English posessives such as "'s" + self._filters["english_posessive_stemmer"] = dict( + type="stemmer", language="possessive_english" + ) + + # Some potentially useful filters that are currently not used: + # + # * keyword_marker -- Exempt certain keywords from stemming + # * synonym -- Introduce synonyms for words + # (but probably better to use synonym_graph during the search + # -- it's more flexible). + + # Here's the common analyzer configuration. The comment NEW + # means this is something we added on top of Opensearch's + # default configuration for the English analyzer. + common_text_analyzer = dict( + type="custom", + char_filter=["html_strip", "remove_apostrophes"], # NEW + tokenizer="standard", + ) + common_filter = [ + "lowercase", + "asciifolding", # NEW + ] + + # The default_text_analyzer uses Opensearch's standard + # English stemmer and removes stopwords. + self._analyzers["en_default_text_analyzer"] = dict(common_text_analyzer) + self._analyzers["en_default_text_analyzer"]["filter"] = common_filter + [ + "english_stop", + "english_stemmer", + ] + + # The minimal_text_analyzer uses a less aggressive English + # stemmer, and removes stopwords. + self._analyzers["en_minimal_text_analyzer"] = dict(common_text_analyzer) + self._analyzers["en_minimal_text_analyzer"]["filter"] = common_filter + [ + "english_stop", + "minimal_english_stemmer", + ] + + # The en_with_stopwords_text_analyzer uses the less aggressive + # stemmer and does not remove stopwords. + self._analyzers["en_with_stopwords_text_analyzer"] = dict(common_text_analyzer) + self._analyzers["en_with_stopwords_text_analyzer"]["filter"] = common_filter + [ + "minimal_english_stemmer" + ] + + # Now we need to define a special analyzer used only by the + # 'sort_author' property. + + # Here's a special filter used only by that analyzer. It + # duplicates the filter used by the icu_collation_keyword data + # type. + self._filters["en_sortable_filter"] = dict( + type="icu_collation", language="en", country="US" + ) + + # Here's the analyzer used by the 'sort_author' property. + # It's the same as icu_collation_keyword, but it has some + # extra character filters -- regexes that do things like + # convert "Tolkien, J. R. R." to "Tolkien, JRR". + # + # This is necessary because normal icu_collation_keyword + # fields can't specify char_filter. + self._analyzers["en_sort_author_analyzer"] = dict( + tokenizer="keyword", + filter=["en_sortable_filter"], + char_filter=self.AUTHOR_CHAR_FILTER_NAMES, + ) + + self._fields: Dict[str, SearchMappingFieldType] = { + "summary": BASIC_TEXT, + "title": FILTERABLE_TEXT, + "subtitle": FILTERABLE_TEXT, + "series": FILTERABLE_TEXT, + "classifications.term": FILTERABLE_TEXT, + "author": FILTERABLE_TEXT, + "publisher": FILTERABLE_TEXT, + "imprint": FILTERABLE_TEXT, + "presentation_ready": BOOLEAN, + "sort_title": icu_collation_keyword(), + "sort_author": sort_author_keyword(), + "series_position": INTEGER, + "work_id": INTEGER, + "last_update_time": LONG, + "published": LONG, + "audience": keyword(), + "language": keyword(), + } + + contributors = nested() + contributors.add_property("display_name", FILTERABLE_TEXT) + contributors.add_property("sort_name", FILTERABLE_TEXT) + contributors.add_property("family_name", FILTERABLE_TEXT) + contributors.add_property("role", keyword()) + contributors.add_property("lc", keyword()) + contributors.add_property("viaf", keyword()) + self._fields["contributors"] = contributors + + licensepools = nested() + licensepools.add_property("collection_id", INTEGER) + licensepools.add_property("data_source_id", INTEGER) + licensepools.add_property("availability_time", LONG) + licensepools.add_property("available", BOOLEAN) + licensepools.add_property("open_access", BOOLEAN) + licensepools.add_property("suppressed", BOOLEAN) + licensepools.add_property("licensed", BOOLEAN) + licensepools.add_property("medium", keyword()) + self._fields["licensepools"] = licensepools + + identifiers = nested() + identifiers.add_property("type", keyword()) + identifiers.add_property("identifier", keyword()) + self._fields["identifiers"] = identifiers + + genres = nested() + genres.add_property("scheme", keyword()) + genres.add_property("name", keyword()) + genres.add_property("term", keyword()) + genres.add_property("weight", FLOAT) + self._fields["genres"] = genres + + customlists = nested() + customlists.add_property("list_id", INTEGER) + customlists.add_property("first_appearance", LONG) + customlists.add_property("featured", BOOLEAN) + self._fields["customlists"] = customlists + + def mapping_document(self) -> SearchMappingDocument: + document = SearchMappingDocument() + document.settings["analysis"] = dict( + filter=dict(self._filters), + char_filter=dict(self._char_filters), + normalizer=dict(self._normalizers), + analyzer=dict(self._analyzers), + ) + document.properties = self._fields + document.scripts[ + self.script_name("work_last_update") + ] = SearchV5.WORK_LAST_UPDATE_SCRIPT + return document diff --git a/scripts.py b/scripts.py index 2d3888e05f..07b2421684 100644 --- a/scripts.py +++ b/scripts.py @@ -879,6 +879,17 @@ def initialize_database(self, connection: Connection) -> None: alembic_conf = self._get_alembic_config(connection) command.stamp(alembic_conf, "head") + def initialize_search_indexes(self, _db: Session) -> bool: + try: + search = ExternalSearchIndex(_db) + except CannotLoadConfiguration as ex: + self.log.error( + "No search integration found yet, cannot initialize search indices." + ) + self.log.error(f"Error: {ex}") + return False + return search.initialize_indices() + def initialize(self, connection: Connection): """Initialize the database if necessary.""" inspector = inspect(connection) @@ -898,6 +909,9 @@ def initialize(self, connection: Connection): self.initialize_database(connection) self.log.info("Initialization complete.") + with Session(connection) as session: + self.initialize_search_indexes(session) + def run(self) -> None: """ Initialize the database if necessary. This script is idempotent, so it diff --git a/tests/api/admin/controller/test_custom_lists.py b/tests/api/admin/controller/test_custom_lists.py index ecc96e704c..14ee6b90ab 100644 --- a/tests/api/admin/controller/test_custom_lists.py +++ b/tests/api/admin/controller/test_custom_lists.py @@ -36,6 +36,7 @@ from core.util.problem_detail import ProblemDetail from tests.core.util.test_flask_util import add_request_context from tests.fixtures.api_admin import AdminLibrarianFixture +from tests.mocks.search import ExternalSearchIndexFake, SearchServiceFake class TestCustomListsController: @@ -559,8 +560,6 @@ def test_custom_list_edit(self, admin_librarian_fixture: AdminLibrarianFixture): lane.customlists.append(list) lane.size = 350 - admin_librarian_fixture.ctrl.controller.search_engine.docs = {} - w1 = admin_librarian_fixture.ctrl.db.work( title="Alpha", with_license_pool=True, language="eng" ) @@ -578,19 +577,17 @@ def test_custom_list_edit(self, admin_librarian_fixture: AdminLibrarianFixture): list.add_entry(w1) list.add_entry(w2) - admin_librarian_fixture.ctrl.controller.search_engine.bulk_update([w1, w2, w3]) - - # All three works should be indexed, but only w1 and w2 should be related to the list - assert len(admin_librarian_fixture.ctrl.controller.search_engine.docs) == 3 - currently_indexed_on_list = [ - v["title"] - for ( - k, - v, - ) in admin_librarian_fixture.ctrl.controller.search_engine.docs.items() - if v["customlists"] is not None - ] - assert sorted(currently_indexed_on_list) == ["Alpha", "Bravo"] + + # All asserts in this test case depend on the external search being mocked + assert isinstance( + admin_librarian_fixture.ctrl.controller.search_engine, + ExternalSearchIndexFake, + ) + + search_service: SearchServiceFake = ( + admin_librarian_fixture.ctrl.controller.search_engine.search_service() # type: ignore [assignment] + ) + external_search = admin_librarian_fixture.ctrl.controller.search_engine new_entries = [ dict( @@ -626,6 +623,9 @@ def test_custom_list_edit(self, admin_librarian_fixture: AdminLibrarianFixture): # Test fails without expiring the ORM cache admin_librarian_fixture.ctrl.db.session.expire_all() + # Mock the right count + external_search.mock_count_works(2) + with admin_librarian_fixture.request_context_with_library_and_admin( "/", method="POST" ): @@ -648,18 +648,8 @@ def test_custom_list_edit(self, admin_librarian_fixture: AdminLibrarianFixture): ) assert isinstance(response, flask.Response) - # The works associated with the list in ES should have changed, though the total - # number of documents in the index should be the same. - assert len(admin_librarian_fixture.ctrl.controller.search_engine.docs) == 3 - currently_indexed_on_list = [ - v["title"] - for ( - k, - v, - ) in admin_librarian_fixture.ctrl.controller.search_engine.docs.items() - if v["customlists"] is not None - ] - assert sorted(currently_indexed_on_list) == ["Bravo", "Charlie"] + # Two works are indexed again + assert len(search_service.documents_all()) == 2 assert 200 == response.status_code assert list.id == int(response.get_data(as_text=True)) @@ -668,13 +658,7 @@ def test_custom_list_edit(self, admin_librarian_fixture: AdminLibrarianFixture): assert {w2, w3} == {entry.work for entry in list.entries} assert new_collections == list.collections - # If we were using a real search engine instance, the lane's size would be set - # to 2, since that's the number of works that would be associated with the - # custom list that the lane is based on. In this case we're using an instance of - # MockExternalSearchIndex, whose count_works() method (called in Lane.update_size()) - # returns the number of items in search_engine.docs. Testing that lane.size is now - # set to 3 shows that .update_size() was called during the call to custom_list(). - assert lane.size == 3 + assert lane.size == 2 # Edit for auto update values update_query = {"query": {"key": "title", "value": "title"}} @@ -862,17 +846,6 @@ def test_custom_list_delete_success( admin_librarian_fixture.ctrl.db.session, Lane, id=lane.id ) - # The second and third lanes were not removed, because they - # weren't based solely on this specific list. But their .size - # attributes were updated to reflect the removal of the list from - # the lane. - # - # In the context of this test, this means that - # MockExternalSearchIndex.count_works() was called, and we set - # it up to always return 2. - assert 2 == lane2.size - assert 2 == lane3.size - def test_custom_list_delete_errors( self, admin_librarian_fixture: AdminLibrarianFixture ): diff --git a/tests/api/admin/controller/test_dashboard.py b/tests/api/admin/controller/test_dashboard.py index d7f37e792a..8891d5d05e 100644 --- a/tests/api/admin/controller/test_dashboard.py +++ b/tests/api/admin/controller/test_dashboard.py @@ -26,7 +26,7 @@ def __init__(self, controller_fixture: ControllerFixture): self.english_1.license_pools[0].collection = self.ctrl.collection self.works = [self.english_1] - self.manager.external_search.bulk_update(self.works) + self.manager.external_search.mock_query_works(self.works) @pytest.fixture(scope="function") diff --git a/tests/api/admin/controller/test_lanes.py b/tests/api/admin/controller/test_lanes.py index 1a7b354db7..6d54d245bf 100644 --- a/tests/api/admin/controller/test_lanes.py +++ b/tests/api/admin/controller/test_lanes.py @@ -348,7 +348,6 @@ def test_lanes_edit(self, alm_fixture: AdminLibraryManagerFixture): assert [list2] == lane.customlists assert True == lane.inherit_parent_restrictions assert None == lane.media - assert 2 == lane.size def test_default_lane_edit(self, alm_fixture: AdminLibraryManagerFixture): """Default lanes only allow the display_name to be edited""" diff --git a/tests/api/admin/controller/test_search_services.py b/tests/api/admin/controller/test_search_services.py index ee92ebec1f..8b6fcebdf8 100644 --- a/tests/api/admin/controller/test_search_services.py +++ b/tests/api/admin/controller/test_search_services.py @@ -17,6 +17,13 @@ class TestSearchServices: def test_search_services_get_with_no_services(self, settings_ctrl_fixture): + # Delete the search integration + session = settings_ctrl_fixture.ctrl.db.session + integration = ExternalIntegration.lookup( + session, ExternalIntegration.OPENSEARCH, ExternalIntegration.SEARCH_GOAL + ) + session.delete(integration) + with settings_ctrl_fixture.request_context_with_admin("/"): response = ( settings_ctrl_fixture.manager.admin_search_services_controller.process_services() @@ -34,8 +41,15 @@ def test_search_services_get_with_no_services(self, settings_ctrl_fixture): ) def test_search_services_get_with_one_service(self, settings_ctrl_fixture): + # Delete the pre-existing integration + session = settings_ctrl_fixture.ctrl.db.session + integration = ExternalIntegration.lookup( + session, ExternalIntegration.OPENSEARCH, ExternalIntegration.SEARCH_GOAL + ) + session.delete(integration) + search_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, + session, ExternalIntegration, protocol=ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL, @@ -67,6 +81,13 @@ def test_search_services_get_with_one_service(self, settings_ctrl_fixture): def test_search_services_post_errors(self, settings_ctrl_fixture): controller = settings_ctrl_fixture.manager.admin_search_services_controller + # Delete the previous integrations + session = settings_ctrl_fixture.ctrl.db.session + integration = ExternalIntegration.lookup( + session, ExternalIntegration.OPENSEARCH, ExternalIntegration.SEARCH_GOAL + ) + session.delete(integration) + with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = MultiDict( [ @@ -93,7 +114,7 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): assert response == MISSING_SERVICE service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, + session, ExternalIntegration, protocol=ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL, @@ -109,9 +130,9 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): response = controller.process_services() assert response.uri == MULTIPLE_SITEWIDE_SERVICES.uri - settings_ctrl_fixture.ctrl.db.session.delete(service) + session.delete(service) service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, + session, ExternalIntegration, protocol="test", goal=ExternalIntegration.LICENSE_GOAL, @@ -129,7 +150,7 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): assert response == INTEGRATION_NAME_ALREADY_IN_USE service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, + session, ExternalIntegration, protocol=ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL, @@ -158,6 +179,13 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): pytest.raises(AdminNotAuthorized, controller.process_services) def test_search_services_post_create(self, settings_ctrl_fixture): + # Delete the previous integrations + session = settings_ctrl_fixture.ctrl.db.session + integration = ExternalIntegration.lookup( + session, ExternalIntegration.OPENSEARCH, ExternalIntegration.SEARCH_GOAL + ) + session.delete(integration) + with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = MultiDict( [ @@ -174,7 +202,7 @@ def test_search_services_post_create(self, settings_ctrl_fixture): assert response.status_code == 201 service = get_one( - settings_ctrl_fixture.ctrl.db.session, + session, ExternalIntegration, goal=ExternalIntegration.SEARCH_GOAL, ) diff --git a/tests/api/admin/controller/test_work_editor.py b/tests/api/admin/controller/test_work_editor.py index d5995d284d..10ea9f0372 100644 --- a/tests/api/admin/controller/test_work_editor.py +++ b/tests/api/admin/controller/test_work_editor.py @@ -58,7 +58,7 @@ def __init__(self, controller_fixture: ControllerFixture): self.english_1.license_pools[0].collection = self.ctrl.collection self.works = [self.english_1] - self.manager.external_search.bulk_update(self.works) + self.manager.external_search.mock_query_works(self.works) self.admin.add_role(AdminRole.LIBRARIAN, self.ctrl.db.default_library()) @@ -970,11 +970,6 @@ def test_custom_lists_edit_success(self, work_fixture: WorkFixture): assert list == work.custom_list_entries[0].customlist assert True == work.custom_list_entries[0].featured - # Lane.size will not be updated until the work is - # reindexed with its new list memebership and lane sizes - # are recalculated. - assert 2 == lane.size - # Now remove the work from the list. work_fixture.ctrl.controller.search_engine.docs = dict(id1="doc1") with work_fixture.request_context_with_library_and_admin("/", method="POST"): @@ -990,9 +985,6 @@ def test_custom_lists_edit_success(self, work_fixture: WorkFixture): assert 0 == len(work.custom_list_entries) assert 0 == len(list.entries) - # The lane size was recalculated once again. - assert 1 == lane.size - # Add a list that didn't exist before. with work_fixture.request_context_with_library_and_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( diff --git a/tests/api/feed/equivalence/test_feed_equivalence.py b/tests/api/feed/equivalence/test_feed_equivalence.py index cf07fd153d..2140b3dd5e 100644 --- a/tests/api/feed/equivalence/test_feed_equivalence.py +++ b/tests/api/feed/equivalence/test_feed_equivalence.py @@ -7,7 +7,6 @@ from api.app import app from api.opds import LibraryAnnotator as OldLibraryAnnotator from api.opds import LibraryLoanAndHoldAnnotator as OldLibraryLoanAndHoldAnnotator -from core.external_search import MockExternalSearchIndex from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.admin import AdminFeed from core.feed.annotator.admin import AdminAnnotator @@ -23,6 +22,8 @@ patch_url_for, ) from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.search import ExternalSearchFixture +from tests.mocks.search import ExternalSearchIndexFake def format_tags(tags1, tags2): @@ -68,7 +69,11 @@ def assert_equal_xmls(xml1: str | etree._Element, xml2: str | etree._Element): class TestFeedEquivalence: - def test_page_feed(self, annotator_fixture: LibraryAnnotatorFixture): + def test_page_feed( + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fixture: ExternalSearchFixture, + ): db = annotator_fixture.db lane = annotator_fixture.lane library = db.default_library() @@ -76,8 +81,8 @@ def test_page_feed(self, annotator_fixture: LibraryAnnotatorFixture): work1 = db.work(with_license_pool=True) work2 = db.work(with_open_access_download=True) - search_index = MockExternalSearchIndex() - search_index.bulk_update([work1, work2]) + search_index = ExternalSearchIndexFake(db.session) + search_index.mock_query_works_multi([work1, work2]) with app.test_request_context("/"): new_annotator = LibraryAnnotator(None, lane, library) @@ -121,7 +126,11 @@ def test_page_feed_with_loan_annotator( assert_equal_xmls(str(old_feed), str(new_feed)) - def test_groups_feed(self, annotator_fixture: LibraryAnnotatorFixture): + def test_groups_feed( + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fixture: ExternalSearchFixture, + ): db = annotator_fixture.db lane = annotator_fixture.lane de_lane = db.lane(parent=lane, languages=["de"]) @@ -130,8 +139,8 @@ def test_groups_feed(self, annotator_fixture: LibraryAnnotatorFixture): work1 = db.work(with_license_pool=True) work2 = db.work(with_open_access_download=True, language="de") - search_index = MockExternalSearchIndex() - search_index.bulk_update([work1, work2]) + search_index = ExternalSearchIndexFake(db.session) + search_index.mock_query_works_multi([work1, work2], [work1, work2]) patron = db.patron() work1.active_license_pool(library).loan_to(patron) @@ -163,7 +172,11 @@ def test_groups_feed(self, annotator_fixture: LibraryAnnotatorFixture): assert_equal_xmls(str(old_feed), new_feed.serialize().decode()) - def test_search_feed(self, annotator_fixture: LibraryAnnotatorFixture): + def test_search_feed( + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fixture: ExternalSearchFixture, + ): db = annotator_fixture.db lane = annotator_fixture.lane de_lane = db.lane(parent=lane, languages=["de"]) @@ -172,8 +185,8 @@ def test_search_feed(self, annotator_fixture: LibraryAnnotatorFixture): work1 = db.work(with_license_pool=True) work2 = db.work(with_open_access_download=True, language="de") - search_index = MockExternalSearchIndex() - search_index.bulk_update([work1, work2]) + search_index = ExternalSearchIndexFake(db.session) + search_index.mock_query_works_multi([work1, work2]) patron = db.patron() work1.active_license_pool(library).loan_to(patron) @@ -207,7 +220,11 @@ def test_search_feed(self, annotator_fixture: LibraryAnnotatorFixture): assert_equal_xmls(str(old_feed), str(new_feed)) - def test_from_query_feed(self, annotator_fixture: LibraryAnnotatorFixture): + def test_from_query_feed( + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fixture: ExternalSearchFixture, + ): db = annotator_fixture.db lane = annotator_fixture.lane de_lane = db.lane(parent=lane, languages=["de"]) @@ -216,8 +233,8 @@ def test_from_query_feed(self, annotator_fixture: LibraryAnnotatorFixture): work1 = db.work(with_license_pool=True) work2 = db.work(with_open_access_download=True, language="de") - search_index = MockExternalSearchIndex() - search_index.bulk_update([work1, work2]) + search_index = ExternalSearchIndexFake(db.session) + search_index.mock_query_works_multi([work1, work2]) patron = db.patron() work1.active_license_pool(library).loan_to(patron) @@ -254,7 +271,11 @@ def url_fn(page): class TestAdminAnnotator: - def test_suppressed(self, annotator_fixture: LibraryAnnotatorFixture): + def test_suppressed( + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fixture: ExternalSearchFixture, + ): db = annotator_fixture.db library = db.default_library() diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 7c2cc3179d..7a304fd663 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -20,7 +20,6 @@ Urban_Fantasy, ) from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint, EverythingEntryPoint -from core.external_search import MockExternalSearchIndex from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.circulation import LibraryAnnotator from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator @@ -800,7 +799,6 @@ def test_work_entry_includes_contributor_links( work.presentation_edition.add_contributor("Oprah", Contributor.AUTHOR_ROLE) work.calculate_presentation( PresentationCalculationPolicy(regenerate_opds_entries=True), - MockExternalSearchIndex(), ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries contributor_links = [ @@ -820,7 +818,6 @@ def test_work_entry_includes_contributor_links( annotator_fixture.db.session.commit() work.calculate_presentation( PresentationCalculationPolicy(regenerate_opds_entries=True), - MockExternalSearchIndex(), ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries assert [] == [l.link for l in entry.computed.authors if l.link] diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index 3f6a098713..035e799a33 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -15,7 +15,6 @@ EverythingEntryPoint, MediumEntryPoint, ) -from core.external_search import MockExternalSearchIndex from core.facets import FacetConstants from core.feed.acquisition import LookupAcquisitionFeed, OPDSAcquisitionFeed from core.feed.annotator.base import Annotator @@ -38,7 +37,6 @@ from core.util.opds_writer import OPDSFeed, OPDSMessage from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import ExternalSearchPatchFixture class TestOPDSFeedProtocol: @@ -170,7 +168,6 @@ class TestOPDSAcquisitionFeed: def test_page( self, db, - external_search_patch_fixture: ExternalSearchPatchFixture, ): session = db.session @@ -187,7 +184,7 @@ def test_page( CirculationManagerAnnotator(None), None, None, - None, + MagicMock(), ).as_response(max_age=10, private=private) # The result is an OPDSFeedResponse. The 'private' argument, @@ -1065,7 +1062,6 @@ class TestEntrypointLinkInsertion: def test_groups( self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, ): data, db, session = ( entrypoint_link_insertion_fixture, @@ -1081,7 +1077,8 @@ def run(wl=None, facets=None): was called with. """ data.mock.called_with = None - search = MockExternalSearchIndex() + search = MagicMock() + search.query_works_multi.return_value = [[]] feed = OPDSAcquisitionFeed.groups( session, "title", @@ -1142,7 +1139,7 @@ def run(wl=None, facets=None, pagination=None): data.annotator(), facets, pagination, - MockExternalSearchIndex(), + MagicMock(), ) return data.mock.called_with diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index bf0863a2a2..f3f3b1cab7 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -4,9 +4,11 @@ from api.circulation import BaseCirculationAPI, CirculationAPI, HoldInfo, LoanInfo from api.controller import CirculationManager -from core.external_search import MockExternalSearchIndex +from core.external_search import ExternalSearchIndex from core.integration.settings import BaseSettings -from core.model import DataSource, Hold, Loan +from core.model import DataSource, Hold, Loan, get_one_or_create +from core.model.configuration import ExternalIntegration +from tests.mocks.search import ExternalSearchIndexFake class MockBaseCirculationAPI(BaseCirculationAPI, ABC): @@ -167,7 +169,20 @@ class MockCirculationManager(CirculationManager): def setup_search(self): """Set up a search client.""" - return MockExternalSearchIndex() + integration, _ = get_one_or_create( + self._db, + ExternalIntegration, + goal=ExternalIntegration.SEARCH_GOAL, + protocol=ExternalIntegration.OPENSEARCH, + ) + integration.set_setting( + ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY, "test_index" + ) + integration.set_setting( + ExternalSearchIndex.TEST_SEARCH_TERM_KEY, "a search term" + ) + integration.url = "http://does-not-exist.com/" + return ExternalSearchIndexFake(self._db) def setup_circulation(self, library, analytics): """Set up the Circulation object.""" diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index af0a0229d7..e376c9d5c1 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -5,7 +5,6 @@ from api.controller import CirculationManager from api.custom_index import CustomIndexView from api.problem_details import * -from core.external_search import MockExternalSearchIndex from core.feed.annotator.circulation import ( CirculationManagerAnnotator, LibraryAnnotator, @@ -19,6 +18,7 @@ # TODO: we can drop this when we drop support for Python 3.6 and 3.7 from tests.fixtures.api_controller import CirculationControllerFixture from tests.fixtures.database import IntegrationConfigurationFixture +from tests.mocks.search import SearchServiceFake class TestCirculationManager: @@ -110,7 +110,7 @@ def mock_for_library(incoming_library): ) # The ExternalSearch object has been reset. - assert isinstance(manager.external_search, MockExternalSearchIndex) + assert isinstance(manager.external_search.search_service(), SearchServiceFake) # So have the patron web domains, and their paths have been # removed. diff --git a/tests/api/test_controller_crawlfeed.py b/tests/api/test_controller_crawlfeed.py index 4dee039352..03b7994b44 100644 --- a/tests/api/test_controller_crawlfeed.py +++ b/tests/api/test_controller_crawlfeed.py @@ -5,6 +5,7 @@ import feedparser from flask import url_for +from opensearch_dsl.response.hit import Hit from api.lanes import ( CrawlableCollectionBasedLane, @@ -13,7 +14,7 @@ DynamicLane, ) from api.problem_details import NO_SUCH_COLLECTION, NO_SUCH_LIST -from core.external_search import MockSearchResult, SortKeyPagination +from core.external_search import SortKeyPagination from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.circulation import CirculationManagerAnnotator from core.problem_details import INVALID_INPUT @@ -207,7 +208,14 @@ def works(self, _db, facets, pagination, *args, **kwargs): # It's not necessary for this test to call it with a # realistic value, but we might as well. results = [ - MockSearchResult(work.sort_title, work.sort_author, {}, work.id) + Hit( + { + "_source": { + "work_id": work.id, + }, + "_sort": [work.sort_title, work.sort_author, work.id], + } + ) ] pagination.page_loaded(results) return [work] diff --git a/tests/api/test_controller_work.py b/tests/api/test_controller_work.py index acc1a61f19..092e2e3ae0 100644 --- a/tests/api/test_controller_work.py +++ b/tests/api/test_controller_work.py @@ -46,6 +46,7 @@ from core.util.problem_detail import ProblemDetail from tests.fixtures.api_controller import CirculationControllerFixture from tests.fixtures.database import DatabaseTransactionFixture +from tests.mocks.search import fake_hits class WorkFixture(CirculationControllerFixture): @@ -532,11 +533,11 @@ def test_recommendations(self, work_fixture: WorkFixture): # If the NoveList API is configured, the search index is asked # about its recommendations. # - # In this test it doesn't matter whether NoveList actually - # provides any recommendations. The Filter object will be - # created with .return_nothing set, but our mock - # ExternalSearchIndex will ignore that setting and return - # everything in its index -- as it always does. + # This test no longer makes sense, the external_search no longer blindly returns information + # The query_works is not overidden, so we mock it manually + work_fixture.manager.external_search.query_works = MagicMock( + return_value=fake_hits([work_fixture.english_1]) + ) with work_fixture.request_context_with_library("/"): response = work_fixture.manager.work_controller.recommendations( *args, **kwargs @@ -678,8 +679,7 @@ def test_related_books(self, work_fixture: WorkFixture): same_author_and_series = work_fixture.db.work( title="Same author and series", with_license_pool=True ) - work_fixture.manager.external_search.docs = {} - work_fixture.manager.external_search.bulk_update([same_author_and_series]) + work_fixture.manager.external_search.mock_query_works([same_author_and_series]) mock_api = MockNoveListAPI(work_fixture.db.session) @@ -879,8 +879,7 @@ def test_series(self, work_fixture: WorkFixture): # that is the job of a non-mocked search engine. work = work_fixture.db.work(with_open_access_download=True) search_engine = work_fixture.manager.external_search - search_engine.docs = {} - search_engine.bulk_update([work]) + search_engine.mock_query_works([work]) # If a series is provided, a feed for that series is returned. with work_fixture.request_context_with_library("/"): diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index 0004fc0668..7238f5821e 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -28,7 +28,7 @@ from api.novelist import MockNoveListAPI from core.classifier import Classifier from core.entrypoint import AudiobooksEntryPoint -from core.external_search import Filter, MockExternalSearchIndex +from core.external_search import Filter from core.lane import DefaultSortOrderFacets, Facets, FeaturedFacets, Lane, WorkList from core.metadata_layer import ContributorData, Metadata from core.model import ( @@ -41,6 +41,7 @@ ) from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture +from tests.fixtures.search import ExternalSearchFixtureFake class TestLaneCreation: @@ -943,7 +944,6 @@ def test_default(self, db: DatabaseTransactionFixture): class TestCrawlableCollectionBasedLane: def test_init(self, db: DatabaseTransactionFixture): - # Collection-based crawlable feeds are cached for 2 hours. assert 2 * 60 * 60 == CrawlableCollectionBasedLane.MAX_CACHE_AGE @@ -1000,24 +1000,29 @@ def test_url_arguments(self, db: DatabaseTransactionFixture): assert CrawlableCollectionBasedLane.COLLECTION_ROUTE == route assert other_collection.name == kwargs.get("collection_name") - def test_works(self, db: DatabaseTransactionFixture): + def test_works( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): w1 = db.work(collection=db.default_collection()) w2 = db.work(collection=db.default_collection()) w3 = db.work(collection=db.collection()) lane = CrawlableCollectionBasedLane() lane.initialize([db.default_collection()]) - search = MockExternalSearchIndex() + search = external_search_fake_fixture.external_search + search.query_works = MagicMock(return_value=[]) # type: ignore [method-assign] lane.works( db.session, facets=CrawlableFacets.default(None), search_engine=search ) - assert len(search.queries) == 1 - filter = search.queries[0][1] + queries = search.query_works.call_args[1] + assert search.query_works.call_count == 1 # Only target a single collection - assert filter.collection_ids == [db.default_collection().id] + assert queries["filter"].collection_ids == [db.default_collection().id] # without any search query - assert None == search.queries[0][0] + assert None == queries["query_string"] class TestCrawlableCustomListBasedLane: diff --git a/tests/api/test_opds.py b/tests/api/test_opds.py index 2da5851593..ba51138402 100644 --- a/tests/api/test_opds.py +++ b/tests/api/test_opds.py @@ -28,7 +28,7 @@ Urban_Fantasy, ) from core.entrypoint import AudiobooksEntryPoint, EverythingEntryPoint -from core.external_search import MockExternalSearchIndex, WorkSearchResult +from core.external_search import WorkSearchResult from core.lane import FacetsWithEntryPoint, WorkList from core.lcp.credential import LCPCredentialFactory, LCPHashedPassphrase from core.model import ( @@ -57,6 +57,7 @@ from core.util.opds_writer import AtomFeed, OPDSFeed from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture +from tests.fixtures.search import ExternalSearchFixtureFake from tests.fixtures.vendor_id import VendorIDFixture _strftime = AtomFeed._strftime @@ -1052,7 +1053,9 @@ def test_language_and_audience_key_from_work( assert ("eng", "All+Ages,Children") == result def test_work_entry_includes_contributor_links( - self, annotator_fixture: LibraryAnnotatorFixture + self, + annotator_fixture: LibraryAnnotatorFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): """ContributorLane links are added to works with contributors""" work = annotator_fixture.db.work(with_open_access_download=True) @@ -1071,7 +1074,7 @@ def test_work_entry_includes_contributor_links( work.presentation_edition.add_contributor("Oprah", Contributor.AUTHOR_ROLE) work.calculate_presentation( PresentationCalculationPolicy(regenerate_opds_entries=True), - MockExternalSearchIndex(), + external_search_fake_fixture.external_search, ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries contributor_links = [l for l in entry.links if l.rel == "contributor"] @@ -1089,7 +1092,7 @@ def test_work_entry_includes_contributor_links( annotator_fixture.db.session.commit() work.calculate_presentation( PresentationCalculationPolicy(regenerate_opds_entries=True), - MockExternalSearchIndex(), + external_search_fake_fixture.external_search, ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries assert [] == [l for l in entry.links if l.rel == "contributor"] diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 50846c5697..5094b5f4d8 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -16,11 +16,7 @@ from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint -from core.external_search import ( - ExternalSearchIndex, - MockExternalSearchIndex, - mock_search_index, -) +from core.external_search import ExternalSearchIndex, mock_search_index from core.lane import Facets, FeaturedFacets, Pagination, WorkList from core.marc import MARCExporter from core.model import ( @@ -51,6 +47,8 @@ ) from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.library import LibraryFixture +from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixtureFake +from tests.mocks.search import fake_hits if TYPE_CHECKING: from tests.fixtures.authenticator import SimpleAuthIntegrationFixture @@ -76,7 +74,6 @@ def set_value(credential): AuthdataUtility.ADOBE_ACCOUNT_ID_PATRON_IDENTIFIER, "Some other type", ): - credential = Credential.lookup( db.session, data_source, type, patron, set_value, True ) @@ -326,9 +323,13 @@ def test_pagination(self, lane_script_fixture: LaneScriptFixture): def test_do_generate( self, lane_script_fixture: LaneScriptFixture, - external_search_fixture: ExternalSearchFixture, + end_to_end_search_fixture: EndToEndSearchFixture, ): db = lane_script_fixture.db + migration = end_to_end_search_fixture.external_search_index.start_migration() + assert migration is not None + migration.finish() + # When it's time to generate a feed, AcquisitionFeed.page # is called with the right arguments. class MockAcquisitionFeed: @@ -407,6 +408,7 @@ def test_do_generate( external_search_fixture: ExternalSearchFixture, ): db = lane_script_fixture.db + external_search_fixture.init_indices() # When it's time to generate a feed, AcquisitionFeed.groups # is called with the right arguments. @@ -507,7 +509,11 @@ def test_facets( # We no longer cache the feeds @pytest.mark.skip - def test_do_run(self, lane_script_fixture: LaneScriptFixture): + def test_do_run( + self, + lane_script_fixture: LaneScriptFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): db = lane_script_fixture.db work = db.work(fiction=True, with_license_pool=True, genre="Science Fiction") @@ -519,8 +525,10 @@ def test_do_run(self, lane_script_fixture: LaneScriptFixture): fiction=True, genres=["Science Fiction"], ) - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([work]) + search_engine = external_search_fake_fixture.external_search + search_engine.query_works_multi = MagicMock( # type: ignore [method-assign] + return_value=[fake_hits([work]), fake_hits([work])] + ) with mock_search_index(search_engine): script = CacheOPDSGroupFeedPerLane(db.session, cmd_args=[]) script.do_run(cmd_args=[]) @@ -720,8 +728,9 @@ def test_initialize(self, db: DatabaseTransactionFixture): # as necessary. with patch("scripts.inspect") as inspect: script = InstanceInitializationScript() - script.migrate_database = MagicMock() - script.initialize_database = MagicMock() + script.migrate_database = MagicMock() # type: ignore[method-assign] + script.initialize_database = MagicMock() # type: ignore[method-assign] + script.initialize_search_indexes = MagicMock() # type: ignore[method-assign] # If the database is uninitialized, initialize_database() is called. inspect().has_table.return_value = False @@ -746,6 +755,7 @@ def test_initialize_alembic_exception(self, caplog: LogCaptureFixture): caplog.set_level(logging.ERROR) script.migrate_database = MagicMock(side_effect=CommandError("test")) script.initialize_database = MagicMock() + script.initialize_search_indexes = MagicMock() # If the database is initialized, migrate_database() is called. inspect().has_table.return_value = True @@ -793,6 +803,67 @@ def test_find_alembic_ini(self, db: DatabaseTransactionFixture): assert conf.attributes["connection"] == mock_connection.engine assert conf.attributes["configure_logger"] is False + def test_initialize_search_indexes( + self, end_to_end_search_fixture: EndToEndSearchFixture + ): + db = end_to_end_search_fixture.db + search = end_to_end_search_fixture.external_search_index + base_name = search._revision_base_name + script = InstanceInitializationScript() + + _mockable_search = ExternalSearchIndex(db.session) + _mockable_search.start_migration = MagicMock() # type: ignore [method-assign] + _mockable_search.search_service = MagicMock() # type: ignore [method-assign] + _mockable_search.log = MagicMock() + + def mockable_search(*args): + return _mockable_search + + # Initially this should not exist, if InstanceInit has not been run + assert search.search_service().read_pointer() == None + + with patch("scripts.ExternalSearchIndex", new=mockable_search): + # To fake "no migration is available", mock all the values + + _mockable_search.start_migration.return_value = None + _mockable_search.search_service().is_pointer_empty.return_value = True + # Migration should fail + assert script.initialize_search_indexes(db.session) == False + # Logs were emitted + assert _mockable_search.log.warning.call_count == 1 + assert ( + "no migration was available" + in _mockable_search.log.warning.call_args[0][0] + ) + + _mockable_search.search_service.reset_mock() + _mockable_search.start_migration.reset_mock() + _mockable_search.log.reset_mock() + + # In case there is no need for a migration, read pointer exists as a non-empty pointer + _mockable_search.search_service().is_pointer_empty.return_value = False + # Initialization should pass, as a no-op + assert script.initialize_search_indexes(db.session) == True + assert _mockable_search.start_migration.call_count == 0 + + # Initialization should work now + assert script.initialize_search_indexes(db.session) == True + # Then we have the latest version index + assert ( + search.search_service().read_pointer() + == search._revision.name_for_index(base_name) + ) + + def test_initialize_search_indexes_no_integration( + self, db: DatabaseTransactionFixture + ): + script = InstanceInitializationScript() + script._log = MagicMock() + # No integration mean no migration + assert script.initialize_search_indexes(db.session) == False + assert script._log.error.call_count == 2 + assert "No search integration" in script._log.error.call_args[0][0] + class TestLanguageListScript: def test_languages(self, db: DatabaseTransactionFixture): diff --git a/tests/core/mock.py b/tests/core/mock.py index b58f08a288..050beb4708 100644 --- a/tests/core/mock.py +++ b/tests/core/mock.py @@ -8,7 +8,6 @@ IdentifierCoverageProvider, WorkCoverageProvider, ) -from core.external_search import ExternalSearchIndex from core.model import DataSource, ExternalIntegration @@ -69,17 +68,6 @@ def __getattr__(self, item): return self.__getitem__(item) -class SearchClientForTesting(ExternalSearchIndex): - """When creating an index, limit it to a single shard and disable - replicas. - - This makes search results more predictable. - """ - - def setup_index(self, new_index=None): - return super().setup_index(new_index, number_of_shards=1, number_of_replicas=0) - - class MockCoverageProvider: """Mixin class for mock CoverageProviders that defines common constants.""" diff --git a/tests/core/models/test_work.py b/tests/core/models/test_work.py index f15f495d86..dfee7fc156 100644 --- a/tests/core/models/test_work.py +++ b/tests/core/models/test_work.py @@ -7,7 +7,6 @@ from core.classifier import Classifier, Fantasy, Romance, Science_Fiction from core.equivalents_coverage import EquivalentIdentifiersCoverageProvider -from core.external_search import MockExternalSearchIndex from core.model import get_one_or_create, tuple_to_numericrange from core.model.classification import Genre, Subject from core.model.contributor import Contributor @@ -21,6 +20,7 @@ from core.util.datetime_helpers import datetime_utc, from_timestamp, utc_now from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.sample_covers import SampleCoversFixture +from tests.fixtures.search import ExternalSearchFixtureFake class TestWork: @@ -95,7 +95,11 @@ def test_from_identifiers(self, db: DatabaseTransactionFixture): # Because the work's license_pool isn't suppressed, it isn't returned. assert [] == result - def test_calculate_presentation(self, db: DatabaseTransactionFixture): + def test_calculate_presentation( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): # Test that: # - work coverage records are made on work creation and primary edition selection. # - work's presentation information (author, title, etc. fields) does a proper job @@ -221,7 +225,7 @@ def test_calculate_presentation(self, db: DatabaseTransactionFixture): work.last_update_time = None work.presentation_ready = True - index = MockExternalSearchIndex() + index = external_search_fake_fixture.external_search work.calculate_presentation(search_index_client=index) @@ -258,7 +262,7 @@ def test_calculate_presentation(self, db: DatabaseTransactionFixture): assert (utc_now() - work.last_update_time) < datetime.timedelta(seconds=2) # type: ignore[unreachable] # The index has not been updated. - assert [] == list(index.docs.items()) + assert [] == external_search_fake_fixture.search.documents_all() # The Work now has a complete set of WorkCoverageRecords # associated with it, reflecting all the operations that @@ -468,24 +472,19 @@ def set_summary(self, summary): assert l1.resource.representation.content.decode("utf-8") == w.summary_text def test_set_presentation_ready_based_on_content( - self, db: DatabaseTransactionFixture + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): work = db.work(with_license_pool=True) - search = MockExternalSearchIndex() - # This is how the work will be represented in the dummy search - # index. - index_key = ( - search.works_index, - work.id, - ) - + search = external_search_fake_fixture.external_search presentation = work.presentation_edition work.set_presentation_ready_based_on_content(search_index_client=search) assert True == work.presentation_ready # The work has not been added to the search index. - assert [] == list(search.docs.keys()) + assert [] == external_search_fake_fixture.search.documents_all() # But the work of adding it to the search engine has been # registered. @@ -845,6 +844,7 @@ def test_reject_covers( self, db, sample_covers_fixture: SampleCoversFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): edition, lp = db.edition(with_open_access_download=True) @@ -913,7 +913,7 @@ def reset_cover(): assert url in work.verbose_opds_entry # Suppressing the cover removes the cover from the work. - index = MockExternalSearchIndex() + index = external_search_fake_fixture.external_search Work.reject_covers(db.session, [work], search_index_client=index) assert has_no_cover(work) reset_cover() @@ -1578,13 +1578,17 @@ def find_record(work): record = find_record(work) assert registered == record.status - def test_reset_coverage(self, db: DatabaseTransactionFixture): + def test_reset_coverage( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): # Test the methods that reset coverage for works, indicating # that some task needs to be performed again. WCR = WorkCoverageRecord work = db.work() work.presentation_ready = True - index = MockExternalSearchIndex() + index = external_search_fake_fixture.external_search # Calling _reset_coverage when there is no coverage creates # a new WorkCoverageRecord in the REGISTERED state @@ -1616,7 +1620,7 @@ def mock_reset_coverage(operation): # The work was not added to the search index when we called # external_index_needs_updating. That happens later, when the # WorkCoverageRecord is processed. - assert [] == list(index.docs.values()) + assert [] == external_search_fake_fixture.search.documents_all() def test_for_unchecked_subjects(self, db: DatabaseTransactionFixture): w1 = db.work(with_license_pool=True) diff --git a/tests/core/search/__init__.py b/tests/core/search/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/search/test_documents.py b/tests/core/search/test_documents.py new file mode 100644 index 0000000000..72d94a29ce --- /dev/null +++ b/tests/core/search/test_documents.py @@ -0,0 +1,168 @@ +from core.search.document import ( + BASIC_TEXT, + BINARY, + BOOLEAN, + BYTE, + CUSTOM_KEYWORD, + DOUBLE, + FILTERABLE_TEXT, + FLOAT, + INTEGER, + IP, + LONG, + SHORT, + UNSIGNED_LONG, + SearchMappingDocument, + date, + icu_collation_keyword, + keyword, + nested, + sort_author_keyword, +) + + +class TestDocuments: + def test_binary(self): + assert {"type": "binary"} == BINARY.serialize() + + def test_boolean(self): + assert {"type": "boolean"} == BOOLEAN.serialize() + + def test_byte(self): + assert {"type": "byte"} == BYTE.serialize() + + def test_double(self): + assert {"type": "double"} == DOUBLE.serialize() + + def test_float(self): + assert {"type": "float"} == FLOAT.serialize() + + def test_ip(self): + assert {"type": "ip"} == IP.serialize() + + def test_integer(self): + assert {"type": "integer"} == INTEGER.serialize() + + def test_long(self): + assert {"type": "long"} == LONG.serialize() + + def test_unsigned_long(self): + assert {"type": "unsigned_long"} == UNSIGNED_LONG.serialize() + + def test_short(self): + assert {"type": "short"} == SHORT.serialize() + + def test_date(self): + t = date() + t.parameters["x"] = "a" + t.parameters["y"] = "b" + t.parameters["z"] = "c" + assert {"type": "date", "x": "a", "y": "b", "z": "c"} == t.serialize() + + def test_keyword(self): + t = keyword() + t.parameters["x"] = "a" + t.parameters["y"] = "b" + t.parameters["z"] = "c" + assert { + "type": "keyword", + "normalizer": "filterable_string", + "x": "a", + "y": "b", + "z": "c", + } == t.serialize() + + def test_icu_collation_keyword(self): + t = icu_collation_keyword() + t.parameters["x"] = "a" + t.parameters["y"] = "b" + t.parameters["z"] = "c" + assert { + "type": "icu_collation_keyword", + "x": "a", + "y": "b", + "z": "c", + } == t.serialize() + + def test_sort_author_keyword(self): + t = sort_author_keyword() + t.parameters["x"] = "a" + t.parameters["y"] = "b" + t.parameters["z"] = "c" + assert { + "type": "text", + "analyzer": "en_sort_author_analyzer", + "fielddata": "true", + "x": "a", + "y": "b", + "z": "c", + } == t.serialize() + + def test_nested(self): + u = nested() + u.add_property("a", INTEGER) + + t = nested() + t.add_property("x", INTEGER) + t.add_property("y", LONG) + t.add_property("z", u) + + assert { + "type": "nested", + "properties": { + "x": {"type": "integer"}, + "y": {"type": "long"}, + "z": {"type": "nested", "properties": {"a": {"type": "integer"}}}, + }, + } == t.serialize() + + def test_basic_text(self): + assert { + "type": "text", + "analyzer": "en_default_text_analyzer", + "fields": { + "minimal": {"type": "text", "analyzer": "en_minimal_text_analyzer"}, + "with_stopwords": { + "type": "text", + "analyzer": "en_with_stopwords_text_analyzer", + }, + }, + } == BASIC_TEXT.serialize() + + def test_filterable(self): + assert { + "type": "text", + "analyzer": "en_default_text_analyzer", + "fields": { + "minimal": {"type": "text", "analyzer": "en_minimal_text_analyzer"}, + "keyword": { + "type": "keyword", + "normalizer": "filterable_string", + "store": False, + "index": True, + }, + "with_stopwords": { + "type": "text", + "analyzer": "en_with_stopwords_text_analyzer", + }, + }, + } == FILTERABLE_TEXT.serialize() + + def test_custom_keyword(self): + assert { + "type": "keyword", + "normalizer": "filterable_string", + } == CUSTOM_KEYWORD.serialize() + + def test_document(self): + doc = SearchMappingDocument() + doc.properties["a"] = INTEGER + doc.properties["b"] = LONG + doc.settings["c"] = {"z": "x"} + + assert { + "settings": {"c": {"z": "x"}}, + "mappings": { + "properties": {"a": {"type": "integer"}, "b": {"type": "long"}} + }, + } == doc.serialize() diff --git a/tests/core/search/test_migration_states.py b/tests/core/search/test_migration_states.py new file mode 100644 index 0000000000..aa3ba92c01 --- /dev/null +++ b/tests/core/search/test_migration_states.py @@ -0,0 +1,119 @@ +"""Explicitly test the different states of migration, and ensure we are adhering to the principles set out. +These tests do have some overlap with the unit tests for the search migration, but these are specific to the migration use cases. +Initial Case +- No pointers or indices are available +- The System comes online for the first time and some prep work must be done +- The initial versioned indices and pointers should be prepped by the init_instance script +- The ExternalSearchIndex should not be hindered by this +Migration Case +- Pointers exist, indices exist +- The migration contains a new version for the index +- The search_index_refresh script, when run, should create and populate the indices, and move the red/write pointers +- The ExternalSearchIndex should not be hindered by this, and should continue to work with the pointers, regardless of where they point +""" + +import pytest + +from core.external_search import ExternalSearchIndex, SearchIndexCoverageProvider +from core.scripts import RunWorkCoverageProviderScript +from core.search.document import SearchMappingDocument +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from scripts import InstanceInitializationScript +from tests.fixtures.search import ExternalSearchFixture + + +class TestMigrationStates: + def test_initial_migration_case( + self, external_search_fixture: ExternalSearchFixture + ): + fx = external_search_fixture + db = fx.db + + # Ensure we are in the initial state, no test indices and pointer available + prefix = fx.integration.setting( + ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY + ).value + all_indices = fx.search.indices.get("*") + for index_name in all_indices.keys(): + assert prefix not in index_name + + client = ExternalSearchIndex(db.session) + + # We cannot make any requests before we intitialize + with pytest.raises(Exception) as raised: + client.query_works("") + assert "index_not_found" in str(raised.value) + + # When a new sytem comes up the first code to run is the InstanceInitailization script + # This preps the DB and the search indices/pointers + InstanceInitializationScript().initialize(db.session.connection()) + + # Ensure we have created the index and pointers + new_index_name = client._revision.name_for_index(client._revision_base_name) + empty_index_name = client.search_service()._empty(client._revision_base_name) # type: ignore [attr-defined] + all_indices = fx.search.indices.get("*") + + assert prefix in new_index_name + assert new_index_name in all_indices.keys() + assert empty_index_name in all_indices.keys() + assert fx.search.indices.exists_alias( + client._search_read_pointer, index=new_index_name + ) + assert fx.search.indices.exists_alias( + client._search_write_pointer, index=new_index_name + ) + + # The same client should work without issue once the pointers are setup + assert client.query_works("").hits == [] + + def test_migration_case(self, external_search_fixture: ExternalSearchFixture): + fx = external_search_fixture + db = fx.db + + # The initial indices setup + InstanceInitializationScript().initialize(db.session.connection()) + + MOCK_VERSION = 1000001 + + class MockSchema(SearchSchemaRevision): + def __init__(self, v: int): + self.SEARCH_VERSION = v + super().__init__() + + def mapping_document(self) -> SearchMappingDocument: + return SearchMappingDocument() + + client = ExternalSearchIndex( + db.session, + revision_directory=SearchRevisionDirectory( + {MOCK_VERSION: MockSchema(MOCK_VERSION)} + ), + ) + # The search client works just fine + assert client.query_works("") is not None + receiver = client.start_updating_search_documents() + receiver.add_documents([{"work_id": 123}]) + receiver.finish() + + mock_index_name = client._revision.name_for_index(client._revision_base_name) + assert str(MOCK_VERSION) in mock_index_name + + # The mock index does not exist yet + with pytest.raises(Exception) as raised: + fx.search.indices.get(mock_index_name) + assert "index_not_found" in str(raised.value) + + # This should run the migration + RunWorkCoverageProviderScript( + SearchIndexCoverageProvider, db.session, search_index_client=client + ).run() + + # The new version is created, and the aliases point to the right index + assert fx.search.indices.get(mock_index_name) is not None + assert mock_index_name in fx.search.indices.get_alias( + name=client._search_read_pointer + ) + assert mock_index_name in fx.search.indices.get_alias( + name=client._search_write_pointer + ) diff --git a/tests/core/search/test_migrator.py b/tests/core/search/test_migrator.py new file mode 100644 index 0000000000..e061396f0b --- /dev/null +++ b/tests/core/search/test_migrator.py @@ -0,0 +1,250 @@ +from unittest.mock import MagicMock, Mock, call + +import pytest + +from core.search.document import SearchMappingDocument +from core.search.migrator import SearchMigrationException, SearchMigrator +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from core.search.service import SearchWritePointer + + +class EmptyRevision(SearchSchemaRevision): + SEARCH_VERSION = 0 + + def __init__(self, version: int): + self.SEARCH_VERSION = version + super().__init__() + + def mapping_document(self) -> SearchMappingDocument: + return SearchMappingDocument() + + +class TestMigrator: + def test_migrate_no_revisions(self): + """If a revision isn't available, the migration fails fast.""" + service = Mock() + revisions = SearchRevisionDirectory.empty() + migrator = SearchMigrator(revisions, service) + with pytest.raises(SearchMigrationException): + migrator.migrate(base_name="any", version=23) + + def test_migrate_from_empty(self): + """With an empty search state, migrating to a supported version works.""" + service = Mock() + service.read_pointer = MagicMock(return_value=None) + service.write_pointer = MagicMock(return_value=None) + service.index_is_populated = MagicMock(return_value=False) + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + + migration = migrator.migrate(base_name="works", version=revision.version) + migration.finish() + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + service.read_pointer.assert_called_with() + # The read pointer didn't exist, so it's set to the empty index + service.read_pointer_set_empty.assert_called_with() + service.write_pointer.assert_called_with() + # The new index is created and populated. + service.index_create.assert_called_with(revision) + service.populate_index.assert_not_called() + # Both the read and write pointers are set. + service.write_pointer_set.assert_called_with(revision) + service.read_pointer_set.assert_called_with(revision) + service.index_set_populated.assert_called_with(revision) + + def test_migrate_upgrade(self): + """Index 2 exists, and we can migrate to 3.""" + service = Mock() + service.read_pointer = MagicMock(return_value="works-v2") + service.write_pointer = MagicMock(return_value=None) + service.index_is_populated = MagicMock(return_value=False) + service.index_set_mapping = MagicMock() + service.index_submit_documents = MagicMock() + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + + docs = migrator.migrate(base_name="works", version=revision.version) + docs.add_documents([{"_id": "1"}, {"_id": "2"}, {"_id": "3"}]) + docs.add_documents([{"_id": "4"}, {"_id": "5"}, {"_id": "6"}]) + docs.add_documents([{"_id": "7"}, {"_id": "8"}]) + docs.finish() + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + # The read pointer existed, so it's left alone for now. + service.read_pointer.assert_called_with() + service.write_pointer.assert_called_with() + # The index for version 3 is created and populated. + service.index_create.assert_called_with(revision) + service.index_set_mapping.assert_called_with(revision) + service.index_submit_documents.assert_has_calls( + [ + call( + pointer="works-v3", + documents=[{"_id": "1"}, {"_id": "2"}, {"_id": "3"}], + ), + call( + pointer="works-v3", + documents=[{"_id": "4"}, {"_id": "5"}, {"_id": "6"}], + ), + call( + pointer="works-v3", + documents=[{"_id": "7"}, {"_id": "8"}], + ), + ] + ) + # Both the read and write pointers are set. + service.write_pointer_set.assert_called_with(revision) + service.read_pointer_set.assert_called_with(revision) + service.index_set_populated.assert_called_with(revision) + + def test_migrate_upgrade_cancel(self): + """Cancelling a migration leaves the pointers untouched.""" + service = Mock() + service.read_pointer = MagicMock(return_value="works-v2") + service.write_pointer = MagicMock(return_value=None) + service.index_is_populated = MagicMock(return_value=False) + service.index_set_mapping = MagicMock() + service.index_submit_documents = MagicMock() + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + + docs = migrator.migrate(base_name="works", version=revision.version) + docs.add_documents([{"_id": "1"}, {"_id": "2"}, {"_id": "3"}]) + docs.add_documents([{"_id": "4"}, {"_id": "5"}, {"_id": "6"}]) + docs.add_documents([{"_id": "7"}, {"_id": "8"}]) + docs.cancel() + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + # The read pointer existed, so it's left alone for now. + service.read_pointer.assert_called_with() + service.write_pointer.assert_called_with() + # The index for version 3 is created and populated. + service.index_create.assert_called_with(revision) + service.index_set_mapping.assert_called_with(revision) + service.index_submit_documents.assert_has_calls( + [ + call( + pointer="works-v3", + documents=[{"_id": "1"}, {"_id": "2"}, {"_id": "3"}], + ), + call( + pointer="works-v3", + documents=[{"_id": "4"}, {"_id": "5"}, {"_id": "6"}], + ), + call( + pointer="works-v3", + documents=[{"_id": "7"}, {"_id": "8"}], + ), + ] + ) + # Both the read and write pointers are left untouched. + service.write_pointer_set.assert_not_called() + service.read_pointer_set.assert_not_called() + service.index_set_populated.assert_not_called() + + def test_migrate_no_op(self): + """Index 3 already exists, so migrating to 3 is a no-op.""" + service = Mock() + service.read_pointer = MagicMock(return_value="works-v3") + service.write_pointer = MagicMock(return_value=SearchWritePointer("works", 3)) + service.index_is_populated = MagicMock(return_value=True) + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + docs = migrator.migrate("works", revision.version) + assert docs is None + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + # The read pointer existed, so it's left alone for now. + service.read_pointer.assert_called_with() + service.write_pointer.assert_called_with() + # The index for version 3 already exists and is populated, so nothing happens. + service.index_create.assert_not_called() + service.index_set_mapping.assert_not_called() + # The write pointer is set redundantly. + service.write_pointer_set.assert_called_with(revision) + # The read pointer is set redundantly. + service.read_pointer_set.assert_called_with(revision) + # The "indexed" flag is set redundantly. + service.index_set_populated.assert_called_with(revision) + + def test_migrate_from_indexed_2_to_3_unpopulated(self): + """Index 3 exists but is not populated. Migrating involves populating it.""" + service = Mock() + service.read_pointer = MagicMock(return_value="works-v2") + service.write_pointer = MagicMock(return_value=SearchWritePointer("works", 2)) + service.index_is_populated = MagicMock(return_value=False) + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + migration = migrator.migrate("works", revision.version) + migration.add_documents([]) + migration.finish() + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + # The read pointer existed, so it's left alone for now. + service.read_pointer.assert_called_with() + service.write_pointer.assert_called_with() + # The index for version 3 exists but isn't populated, so it is populated. + service.index_create.assert_called_with(revision) + service.index_set_mapping.assert_called_with(revision) + service.index_submit_documents.assert_has_calls( + [ + call( + pointer="works-v3", + documents=[], + ) + ] + ) + # Both the read and write pointers are updated. + service.write_pointer_set.assert_called_with(revision) + service.read_pointer_set.assert_called_with(revision) + service.index_set_populated.assert_called_with(revision) + + def test_migrate_from_indexed_2_to_3_write_unset(self): + """Index 3 exists and is populated, but the write pointer is unset.""" + service = Mock() + service.read_pointer = MagicMock(return_value="works-v2") + service.write_pointer = MagicMock(return_value=None) + service.index_is_populated = MagicMock(return_value=True) + service.index_set_populated = MagicMock() + + revision = EmptyRevision(3) + revisions = SearchRevisionDirectory({revision.version: revision}) + migrator = SearchMigrator(revisions, service) + docs = migrator.migrate("works", revision.version) + assert docs is None + + # The sequence of expected calls. + service.create_empty_index.assert_called_with() + # The read pointer existed, so it's left alone for now. + service.read_pointer.assert_called_with() + # The write pointer is completely unset. + service.write_pointer.assert_called_with() + # The index for version 3 exists and is populated. The create call is redundant but harmless. + service.index_create.assert_called_with(revision) + service.populate_index.assert_not_called() + # Both the read and write pointers are updated. + service.write_pointer_set.assert_called_with(revision) + service.read_pointer_set.assert_called_with(revision) + service.index_set_populated.assert_called_with(revision) diff --git a/tests/core/search/test_search_revision_directory.py b/tests/core/search/test_search_revision_directory.py new file mode 100644 index 0000000000..b5c826365e --- /dev/null +++ b/tests/core/search/test_search_revision_directory.py @@ -0,0 +1,50 @@ +from unittest import mock + +import pytest + +from core.search.document import SearchMappingDocument +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory + + +class AnyNumberRevision(SearchSchemaRevision): + def __init__(self, number): + self.SEARCH_VERSION = number + super().__init__() + + def mapping_document(self) -> SearchMappingDocument: + return SearchMappingDocument() + + +class TestSearchRevisionDirectory: + def test_create(self): + """Also tests _create_revisions""" + with mock.patch("core.search.revision_directory.REVISIONS", new=[]): + assert SearchRevisionDirectory.create().available == {} + + with mock.patch( + "core.search.revision_directory.REVISIONS", + new=[AnyNumberRevision(1), AnyNumberRevision(2)], + ): + assert list(SearchRevisionDirectory.create().available.keys()) == [1, 2] + + with mock.patch( + "core.search.revision_directory.REVISIONS", + new=[AnyNumberRevision(1), AnyNumberRevision(1)], + ): + with pytest.raises(ValueError) as raised: + SearchRevisionDirectory.create() + assert str(raised.value) == "Revision version 1 is defined multiple times" + + def test_highest(self): + with mock.patch( + "core.search.revision_directory.REVISIONS", + new=[AnyNumberRevision(1), AnyNumberRevision(2)], + ): + assert SearchRevisionDirectory.create().highest().version == 2 + + with mock.patch( + "core.search.revision_directory.REVISIONS", + new=[AnyNumberRevision(17), AnyNumberRevision(2)], + ): + assert SearchRevisionDirectory.create().highest().version == 17 diff --git a/tests/core/search/test_service.py b/tests/core/search/test_service.py new file mode 100644 index 0000000000..53ece7748d --- /dev/null +++ b/tests/core/search/test_service.py @@ -0,0 +1,158 @@ +from typing import Iterable + +from core.search.document import LONG, SearchMappingDocument +from core.search.revision import SearchSchemaRevision +from core.search.service import SearchServiceOpensearch1 +from tests.fixtures.search import ExternalSearchFixture + + +class BasicMutableRevision(SearchSchemaRevision): + SEARCH_VERSION = 0 + + def __init__(self, version: int): + self.SEARCH_VERSION = version + super().__init__() + self.document = SearchMappingDocument() + + def mapping_document(self) -> SearchMappingDocument: + return self.document + + +BASE_NAME = "base" + + +class TestService: + """ + Tests to verify that the Opensearch service implementation has the semantics we expect. + """ + + def test_create_empty_idempotent( + self, external_search_fixture: ExternalSearchFixture + ): + """Creating the empty index is idempotent.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + service.create_empty_index() + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-empty") + + service.create_empty_index() + + indices = external_search_fixture.search.indices.client.indices + assert indices is not None + assert indices.exists("base-empty") + + def test_create_index_idempotent( + self, external_search_fixture: ExternalSearchFixture + ): + """Creating any index is idempotent.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + revision = BasicMutableRevision(23) + service.index_create(revision) + service.index_create(revision) + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-v23") + + indices = external_search_fixture.search.indices.client.indices + assert indices is not None + assert indices.exists(revision.name_for_index("base")) + + def test_read_pointer_none(self, external_search_fixture: ExternalSearchFixture): + """The read pointer is initially unset.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + assert None == service.read_pointer() + + def test_write_pointer_none(self, external_search_fixture: ExternalSearchFixture): + """The write pointer is initially unset.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + assert None == service.write_pointer() + + def test_read_pointer_set(self, external_search_fixture: ExternalSearchFixture): + """Setting the read pointer works.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + revision = BasicMutableRevision(23) + service.index_create(revision) + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-v23") + + service.read_pointer_set(revision) + assert "base-v23" == service.read_pointer() + + def test_read_pointer_set_empty( + self, external_search_fixture: ExternalSearchFixture + ): + """Setting the read pointer to the empty index works.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + service.create_empty_index() + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-empty") + + service.read_pointer_set_empty() + assert "base-empty" == service.read_pointer() + + def test_write_pointer_set(self, external_search_fixture: ExternalSearchFixture): + """Setting the write pointer works.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + revision = BasicMutableRevision(23) + service.index_create(revision) + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-v23") + + service.write_pointer_set(revision) + + pointer = service.write_pointer() + assert pointer is not None + assert "base-v23" == pointer.target_name + + def test_populate_index_idempotent( + self, external_search_fixture: ExternalSearchFixture + ): + """Populating an index is idempotent.""" + service = SearchServiceOpensearch1(external_search_fixture.search, BASE_NAME) + revision = BasicMutableRevision(23) + + mappings = revision.mapping_document() + mappings.properties["x"] = LONG + mappings.properties["y"] = LONG + + # The format expected by the opensearch bulk helper is completely undocumented. + # It does, however, appear to use mostly the same format as the Elasticsearch equivalent. + # See: https://elasticsearch-py.readthedocs.io/en/v7.13.1/helpers.html#bulk-helpers + documents: Iterable[dict] = [ + { + "_index": revision.name_for_index("base"), + "_type": "_doc", + "_id": 1, + "_source": {"x": 23, "y": 24}, + }, + { + "_index": revision.name_for_index("base"), + "_type": "_doc", + "_id": 2, + "_source": {"x": 25, "y": 26}, + }, + { + "_index": revision.name_for_index("base"), + "_type": "_doc", + "_id": 3, + "_source": {"x": 27, "y": 28}, + }, + ] + + service.index_create(revision) + + # Log the index so that the fixture cleans it up afterward. + external_search_fixture.record_index("base-v23") + service.index_submit_documents("base-v23", documents) + service.index_submit_documents("base-v23", documents) + + indices = external_search_fixture.search.indices.client.indices + assert indices is not None + assert indices.exists(revision.name_for_index("base")) + assert indices.get(revision.name_for_index("base"))["base-v23"]["mappings"] == { + "properties": mappings.serialize_properties() + } diff --git a/tests/core/test_external_search.py b/tests/core/test_external_search.py index 5ca325cc7e..4dcc6e7b41 100644 --- a/tests/core/test_external_search.py +++ b/tests/core/test_external_search.py @@ -4,6 +4,7 @@ import uuid from datetime import datetime from typing import Callable, Collection, List +from unittest.mock import MagicMock import pytest from opensearch_dsl import Q @@ -20,18 +21,15 @@ ) from opensearch_dsl.query import Query as opensearch_dsl_query from opensearch_dsl.query import Range, Term, Terms -from opensearchpy.exceptions import OpenSearchException from psycopg2.extras import NumericRange +from sqlalchemy.sql import Delete as sqlaDelete from core.classifier import Classifier -from core.config import CannotLoadConfiguration, Configuration +from core.config import Configuration from core.external_search import ( - CurrentMapping, ExternalSearchIndex, Filter, JSONQuery, - MockExternalSearchIndex, - MockSearchResult, Query, QueryParseException, QueryParser, @@ -39,7 +37,6 @@ SearchIndexCoverageProvider, SortKeyPagination, WorkSearchResult, - mock_search_index, ) from core.lane import Facets, FeaturedFacets, Pagination, SearchFacets, WorkList from core.metadata_layer import ContributorData, IdentifierData @@ -58,6 +55,11 @@ from core.model.classification import Subject from core.model.work import Work from core.problem_details import INVALID_INPUT +from core.scripts import RunWorkCoverageProviderScript +from core.search.document import SearchMappingDocument +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from core.search.v5 import SearchV5 from core.util.cache import CachedData from core.util.datetime_helpers import datetime_utc, from_timestamp from tests.fixtures.database import ( @@ -66,205 +68,20 @@ PerfTimer, ) from tests.fixtures.library import LibraryFixture -from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixture +from tests.fixtures.search import ( + EndToEndSearchFixture, + ExternalSearchFixture, + ExternalSearchFixtureFake, +) +from tests.mocks.search import SearchServiceFailureMode RESEARCH = Term(audience=Classifier.AUDIENCE_RESEARCH.lower()) class TestExternalSearch: - def test_load(self, external_search_fixture: ExternalSearchFixture): - session = external_search_fixture.db.session - - # Normally, load() returns a brand new ExternalSearchIndex - # object. - loaded = ExternalSearchIndex.load(session, in_testing=True) - assert isinstance(loaded, ExternalSearchIndex) - - # However, inside the mock_search_index context manager, - # load() returns whatever object was mocked. - mock = object() - with mock_search_index(mock): - assert mock == ExternalSearchIndex.load(session, in_testing=True) - - def test_constructor(self, external_search_fixture: ExternalSearchFixture): - session = external_search_fixture.db.session - - # The configuration of the search ExternalIntegration becomes the - # configuration of the ExternalSearchIndex. - # - # This basically just verifies that the test search term is taken - # from the ExternalIntegration. - class MockIndex(ExternalSearchIndex): - def set_works_index_and_alias(self, _db): - self.set_works_index_and_alias_called_with = _db - - index = MockIndex(session) - assert session == index.set_works_index_and_alias_called_with - assert "test_search_term" == index.test_search_term - # TODO: would be good to check the put_script calls, but the # current constructor makes put_script difficult to mock. - def test_opensearch_error_in_constructor_becomes_cannotloadconfiguration( - self, external_search_fixture: ExternalSearchFixture - ): - session = external_search_fixture.db.session - - """If we're unable to establish a connection to the Opensearch - server, CannotLoadConfiguration (which the circulation manager can - understand) is raised instead of an Opensearch-specific exception. - """ - - # Unlike other tests in this module, this one runs even if no - # OpenSearch server is running, since it's testing what - # happens if there's a problem communicating with that server. - class Mock(ExternalSearchIndex): - def set_works_index_and_alias(self, _db): - raise OpenSearchException("very bad") - - with pytest.raises(CannotLoadConfiguration) as excinfo: - Mock(session) - assert "Exception communicating with Search server: " in str(excinfo.value) - assert "very bad" in str(excinfo.value) - - def test_works_index_name(self, external_search_fixture: ExternalSearchFixture): - session = external_search_fixture.db.session - - """The name of the search index is the prefix (defined in - ExternalSearchTest.setup) plus a version number associated - with this version of the core code. - """ - version = external_search_fixture.search.mapping.VERSION_NAME - assert ( - f"test_index-{version}" - == external_search_fixture.search.works_index_name(session) - ) - - def test_setup_index_creates_new_index( - self, external_search_fixture: ExternalSearchFixture - ): - current_index = external_search_fixture.search.works_index - # This calls self.search.setup_index (which is what we're testing) - # and also registers the index to be torn down at the end of the test. - external_search_fixture.setup_index("the_other_index") - - # Both indices exist. - assert True == external_search_fixture.search.indices.exists(current_index) - assert True == external_search_fixture.search.indices.exists("the_other_index") - - # The index for the app's search is still the original index. - assert current_index == external_search_fixture.search.works_index - - # The alias hasn't been passed over to the new index. - alias = "test_index-" + external_search_fixture.search.CURRENT_ALIAS_SUFFIX - assert alias == external_search_fixture.search.works_alias - assert True == external_search_fixture.search.indices.exists_alias( - alias, index=current_index - ) - assert False == external_search_fixture.search.indices.exists_alias( - alias, index="the_other_index" - ) - - def test_set_works_index_and_alias( - self, external_search_fixture: ExternalSearchFixture - ): - session = external_search_fixture.db.session - search = external_search_fixture.search - - # If the index or alias don't exist, set_works_index_and_alias - # will create them. - external_search_fixture.integration.set_setting( - ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY, "banana" - ) - search.set_works_index_and_alias(session) - - expected_index = "banana-" + CurrentMapping.version_name() - expected_alias = "banana-" + search.CURRENT_ALIAS_SUFFIX - assert expected_index == search.works_index - assert expected_alias == search.works_alias - - # If the index and alias already exist, set_works_index_and_alias - # does nothing. - search.set_works_index_and_alias(session) - assert expected_index == search.works_index - assert expected_alias == search.works_alias - - def test_setup_current_alias(self, external_search_fixture: ExternalSearchFixture): - session = external_search_fixture.db.session - search = external_search_fixture.search - - # The index was generated from the string in configuration. - version = CurrentMapping.version_name() - index_name = "test_index-" + version - assert index_name == search.works_index - assert True == search.indices.exists(index_name) - - # The alias is also created from the configuration. - alias = "test_index-" + search.CURRENT_ALIAS_SUFFIX - assert alias == search.works_alias - assert True == search.indices.exists_alias(alias, index_name) - - # If the -current alias is already set on a different index, it - # won't be reassigned. Instead, search will occur against the - # index itself. - ExternalSearchIndex.reset() - external_search_fixture.integration.set_setting( - ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY, "my-app" - ) - self.search = ExternalSearchIndex(session) - - assert "my-app-%s" % version == self.search.works_index - assert "my-app-" + self.search.CURRENT_ALIAS_SUFFIX == self.search.works_alias - - def test_transfer_current_alias( - self, external_search_fixture: ExternalSearchFixture - ): - session = external_search_fixture.db.session - search = external_search_fixture.search - - # An error is raised if you try to set the alias to point to - # an index that doesn't already exist. - pytest.raises( - ValueError, search.transfer_current_alias, session, "no-such-index" - ) - - original_index = search.works_index - - # If the -current alias doesn't exist, it's created - # and everything is updated accordingly. - search.indices.delete_alias( - index=original_index, name="test_index-current", ignore=[404] - ) - search.setup_index(new_index="test_index-v9999") - search.transfer_current_alias(session, "test_index-v9999") - assert "test_index-v9999" == search.works_index - assert "test_index-current" == search.works_alias - - # If the -current alias already exists on the index, - # it's used without a problem. - search.transfer_current_alias(session, "test_index-v9999") - assert "test_index-v9999" == search.works_index - assert "test_index-current" == search.works_alias - - # If the -current alias is being used on a different version of the - # index, it's deleted from that index and placed on the new one. - search.setup_index(original_index) - search.transfer_current_alias(session, original_index) - assert original_index == search.works_index - assert "test_index-current" == search.works_alias - - # It has been removed from other index. - assert False == search.indices.exists_alias( - index="test_index-v9999", name="test_index-current" - ) - - # And only exists on the new index. - alias_indices = list(search.indices.get_alias(name="test_index-current").keys()) - assert [original_index] == alias_indices - - # If the index doesn't have the same base name, an error is raised. - pytest.raises(ValueError, search.transfer_current_alias, session, "banana-v10") - def test_query_works(self): # Verify that query_works operates by calling query_works_multi. # The actual functionality of query_works and query_works_multi @@ -315,28 +132,34 @@ def query_works_multi(self, queries, debug=False): assert pagination.offset == default.offset assert pagination.size == default.size - def test__run_self_tests(self, external_search_fixture: ExternalSearchFixture): - transaction = external_search_fixture.db + def test__run_self_tests(self, end_to_end_search_fixture: EndToEndSearchFixture): + transaction = end_to_end_search_fixture.db session = transaction.session - index = MockExternalSearchIndex() + index = end_to_end_search_fixture.external_search_index + + # Intrusively set the search term to something useful. + index._test_search_term = "How To Search" + + # Start with an up-to-date but empty index. + index.start_migration().finish() # First, see what happens when the search returns no results. - test_results = [x for x in index._run_self_tests(session, in_testing=True)] + test_results = [x for x in index._run_self_tests(session)] - assert "Search results for 'a search term':" == test_results[0].name + assert "Search results for 'How To Search':" == test_results[0].name assert True == test_results[0].success assert [] == test_results[0].result - assert "Search document for 'a search term':" == test_results[1].name + assert "Search document for 'How To Search':" == test_results[1].name assert True == test_results[1].success - assert "[]" == test_results[1].result + assert {} != test_results[1].result - assert "Raw search results for 'a search term':" == test_results[2].name + assert "Raw search results for 'How To Search':" == test_results[2].name assert True == test_results[2].success assert [] == test_results[2].result assert ( - "Total number of search results for 'a search term':" + "Total number of search results for 'How To Search':" == test_results[3].name ) assert True == test_results[3].success @@ -351,34 +174,29 @@ def test__run_self_tests(self, external_search_fixture: ExternalSearchFixture): assert "{}" == test_results[5].result # Set up the search index so it will return a result. - collection = transaction.collection() + work = end_to_end_search_fixture.external_search.default_work( + title="How To Search" + ) + work.presentation_ready = True + work.presentation_edition.subtitle = "How To Search" + work.presentation_edition.series = "Classics" + work.summary_text = "How To Search!" + work.presentation_edition.publisher = "Project Gutenberg" + work.last_update_time = datetime_utc(2019, 1, 1) + work.license_pools[0].licenses_available = 100000 - search_result = MockSearchResult("Sample Book Title", "author", {}, "id") - index.index("index", "id", search_result) - test_results = [x for x in index._run_self_tests(session, in_testing=True)] + docs = index.start_updating_search_documents() + docs.add_documents(index.create_search_documents_from_works([work])) + docs.finish() - assert "Search results for 'a search term':" == test_results[0].name - assert True == test_results[0].success - assert ["Sample Book Title (author)"] == test_results[0].result + test_results = [x for x in index._run_self_tests(session)] - assert "Search document for 'a search term':" == test_results[1].name - assert True == test_results[1].success - result = json.loads(test_results[1].result) - sample_book = { - "author": "author", - "meta": {"id": "id", "_sort": ["Sample Book Title", "author", "id"]}, - "id": "id", - "title": "Sample Book Title", - } - assert sample_book == result - - assert "Raw search results for 'a search term':" == test_results[2].name - assert True == test_results[2].success - result = json.loads(test_results[2].result[0]) - assert sample_book == result + assert "Search results for 'How To Search':" == test_results[0].name + assert True == test_results[0].success + assert [f"How To Search ({work.author})"] == test_results[0].result assert ( - "Total number of search results for 'a search term':" + "Total number of search results for 'How To Search':" == test_results[3].name ) assert True == test_results[3].success @@ -391,32 +209,17 @@ def test__run_self_tests(self, external_search_fixture: ExternalSearchFixture): assert "Total number of documents per collection:" == test_results[5].name assert True == test_results[5].success result = json.loads(test_results[5].result) - assert {collection.name: 1} == result - - def test_update_mapping(self, external_search_fixture: ExternalSearchFixture): - search = external_search_fixture.search - - search.mapping.add_properties({"long": ["new_long_property"]}) - put_mapping = search._update_index_mapping(dry_run=True) - assert "new_long_property" in put_mapping - put_mapping = search._update_index_mapping(dry_run=False) - assert "new_long_property" in put_mapping - put_mapping = search._update_index_mapping(dry_run=True) - assert "new_long_property" not in put_mapping + assert {"Default Collection": 1} == result - new_mapping = search.indices.get_mapping(search.works_index) - new_mapping = new_mapping[search.works_index]["mappings"] - assert "new_long_property" in new_mapping["properties"] - -class TestCurrentMapping: +class TestSearchV5: def test_character_filters(self): # Verify the functionality of the regular expressions we tell # Opensearch to use when normalizing fields that will be used # for searching. filters = [] - for filter_name in CurrentMapping.AUTHOR_CHAR_FILTER_NAMES: - configuration = CurrentMapping.CHAR_FILTERS[filter_name] + for filter_name in SearchV5.AUTHOR_CHAR_FILTER_NAMES: + configuration = SearchV5.CHAR_FILTERS[filter_name] find = re.compile(configuration["pattern"]) replace = configuration["replacement"] # Hack to (imperfectly) convert Java regex format to Python format. @@ -700,6 +503,7 @@ def test_query_works( transaction = fixture.external_search.db session = transaction.session + fixture.external_search_index.start_migration().finish() data = self._populate_works(fixture) fixture.populate_search_index() @@ -722,7 +526,7 @@ def test_query_works( # Set up convenient aliases for methods we'll be calling a # lot. - query = fixture.external_search.search.query_works + query = fixture.external_search_index.query_works expect = fixture.expect_results # First, test pagination. @@ -1174,7 +978,7 @@ def pages(worklist): while pagination: pages.append( worklist.works( - session, facets, pagination, fixture.external_search.search + session, facets, pagination, fixture.external_search_index ) ) pagination = pagination.next_page @@ -1305,8 +1109,8 @@ def test_remove_work(self, end_to_end_search_fixture: EndToEndSearchFixture): search = end_to_end_search_fixture.external_search.search data = self._populate_works(end_to_end_search_fixture) end_to_end_search_fixture.populate_search_index() - search.remove_work(data.moby_dick) - search.remove_work(data.moby_duck) + end_to_end_search_fixture.external_search_index.remove_work(data.moby_dick) + end_to_end_search_fixture.external_search_index.remove_work(data.moby_duck) # Immediately querying never works, the search index needs to refresh its cache/index/data search.indices.refresh() @@ -1360,7 +1164,7 @@ def test_facet_filtering(self, end_to_end_search_fixture: EndToEndSearchFixture) # Add all the works created in the setup to the search index. SearchIndexCoverageProvider( - session, search_index_client=fixture.external_search.search + session, search_index_client=fixture.external_search_index ).run_once_and_update_timestamp() # Sleep to give the index time to catch up. @@ -2253,7 +2057,11 @@ def test_run(self, end_to_end_search_fixture: EndToEndSearchFixture): def works(worklist, facets): return worklist.works( - session, facets, None, fixture.external_search.search, debug=True + session, + facets, + None, + search_engine=fixture.external_search_index, + debug=True, ) def assert_featured(description, worklist, facets, expect): @@ -2305,7 +2113,7 @@ def assert_featured(description, worklist, facets, expect): # available books will show up before all of the unavailable # books. only_availability_matters = worklist.works( - session, facets, None, fixture.external_search.search, debug=True + session, facets, None, fixture.external_search_index, debug=True ) assert 5 == len(only_availability_matters) last_two = only_availability_matters[-2:] @@ -4356,7 +4164,10 @@ def validate_sort_order(filter, main_field): assert {} == sort # The script is the 'simplified.work_last_update' stored script. - assert CurrentMapping.script_name("work_last_update") == script.pop("stored") + script_name = ( + SearchRevisionDirectory.create().highest().script_name("work_last_update") + ) + assert script_name == script.pop("stored") # Two parameters are passed into the script -- the IDs of the # collections and the lists relevant to the query. This is so @@ -4887,107 +4698,102 @@ def test_next_page(self): class TestBulkUpdate: def test_works_not_presentation_ready_kept_in_index( - self, db: DatabaseTransactionFixture + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): w1 = db.work() w1.set_presentation_ready() w2 = db.work() w2.set_presentation_ready() w3 = db.work() - index = MockExternalSearchIndex() - successes, failures = index.bulk_update([w1, w2, w3]) + index = external_search_fake_fixture.external_search + + docs = index.start_updating_search_documents() + failures = docs.add_documents( + index.create_search_documents_from_works([w1, w2, w3]) + ) + docs.finish() # All three works are regarded as successes, because their # state was successfully mirrored to the index. - assert {w1, w2, w3} == set(successes) assert [] == failures # All three works were inserted into the index, even the one # that's not presentation-ready. - ids = {x[-1] for x in list(index.docs.keys())} + ids = set( + map(lambda d: d["_id"], external_search_fake_fixture.search.documents_all()) + ) assert {w1.id, w2.id, w3.id} == ids # If a work stops being presentation-ready, it is kept in the # index. w2.presentation_ready = False - successes, failures = index.bulk_update([w1, w2, w3]) - assert {w1.id, w2.id, w3.id} == {x[-1] for x in list(index.docs.keys())} - assert {w1, w2, w3} == set(successes) + docs = index.start_updating_search_documents() + failures = docs.add_documents( + index.create_search_documents_from_works([w1, w2, w3]) + ) + docs.finish() + assert {w1.id, w2.id, w3.id} == set( + map(lambda d: d["_id"], external_search_fake_fixture.search.documents_all()) + ) assert [] == failures class TestSearchErrors: def test_search_connection_timeout( - self, external_search_fixture: ExternalSearchFixture + self, external_search_fake_fixture: ExternalSearchFixtureFake ): search, transaction = ( - external_search_fixture, - external_search_fixture.db, + external_search_fake_fixture, + external_search_fake_fixture.db, ) - attempts = [] - - def bulk_with_timeout(docs, raise_on_error=False, raise_on_exception=False): - attempts.append(docs) - - def error(doc): - return dict( - index=dict( - status="TIMEOUT", - exception="ConnectionTimeout", - error="Connection Timeout!", - _id=doc["_id"], - data=doc, - ) - ) - - errors = list(map(error, docs)) - return 0, errors - - search.search.bulk = bulk_with_timeout - + search.search.set_failing_mode( + mode=SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS_TIMEOUT + ) work = transaction.work() work.set_presentation_ready() - successes, failures = search.search.bulk_update([work]) - assert [] == successes + + docs = search.external_search.start_updating_search_documents() + failures = docs.add_documents( + search.external_search.create_search_documents_from_works([work]) + ) assert 1 == len(failures) - assert work == failures[0][0] - assert "Connection Timeout!" == failures[0][1] + assert work.id == failures[0].id + assert "Connection Timeout!" == failures[0].error_message - # When all the documents fail, it tries again once with the same arguments. - assert [work.id, work.id] == [docs[0]["_id"] for docs in attempts] + # Submissions are not retried by the base service + assert [work.id] == [ + docs["_id"] for docs in search.search.document_submission_attempts + ] def test_search_single_document_error( - self, external_search_fixture: ExternalSearchFixture + self, external_search_fake_fixture: ExternalSearchFixtureFake ): search, transaction = ( - external_search_fixture, - external_search_fixture.db, + external_search_fake_fixture, + external_search_fake_fixture.db, ) - successful_work = transaction.work() - successful_work.set_presentation_ready() - failing_work = transaction.work() - failing_work.set_presentation_ready() - - def bulk_with_error(docs, raise_on_error=False, raise_on_exception=False): - failures = [ - dict( - data=dict(_id=failing_work.id), - error="There was an error!", - exception="Exception", - ) - ] - success_count = 1 - return success_count, failures - - search.search.bulk = bulk_with_error + search.search.set_failing_mode( + mode=SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS + ) + work = transaction.work() + work.set_presentation_ready() - successes, failures = search.search.bulk_update([successful_work, failing_work]) - assert [successful_work] == successes + docs = search.external_search.start_updating_search_documents() + failures = docs.add_documents( + search.external_search.create_search_documents_from_works([work]) + ) assert 1 == len(failures) - assert failing_work == failures[0][0] - assert "There was an error!" == failures[0][1] + assert work.id == failures[0].id + assert "There was an error!" == failures[0].error_message + + # Submissions are not retried by the base service + assert [work.id] == [ + docs["_id"] for docs in search.search.document_submission_attempts + ] class TestWorkSearchResult: @@ -5011,8 +4817,12 @@ def test_constructor(self, db: DatabaseTransactionFixture): class TestSearchIndexCoverageProvider: - def test_operation(self, db: DatabaseTransactionFixture): - index = MockExternalSearchIndex() + def test_operation( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): + index = external_search_fake_fixture.external_search provider = SearchIndexCoverageProvider(db.session, search_index_client=index) assert WorkCoverageRecord.UPDATE_SEARCH_INDEX_OPERATION == provider.operation @@ -5116,10 +4926,14 @@ def test_to_search_documents_with_missing_data( assert result["title"] == None assert result["target_age"]["lower"] == None - def test_success(self, db: DatabaseTransactionFixture): + def test_success( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): work = db.work() work.set_presentation_ready() - index = MockExternalSearchIndex() + index = external_search_fake_fixture.external_search provider = SearchIndexCoverageProvider(db.session, search_index_client=index) results = provider.process_batch([work]) @@ -5127,25 +4941,21 @@ def test_success(self, db: DatabaseTransactionFixture): assert [work] == results # The work was added to the search index. - assert 1 == len(index.docs) - - def test_failure(self, db: DatabaseTransactionFixture): - class DoomedExternalSearchIndex(MockExternalSearchIndex): - """All documents sent to this index will fail.""" - - def bulk(self, docs, **kwargs): - return 0, [ - dict( - data=dict(_id=failing_work["_id"]), - error="There was an error!", - exception="Exception", - ) - for failing_work in docs - ] + search_service = external_search_fake_fixture.search + assert 1 == len(search_service.documents_all()) + def test_failure( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): work = db.work() work.set_presentation_ready() - index = DoomedExternalSearchIndex() + index = external_search_fake_fixture.external_search + external_search_fake_fixture.search.set_failing_mode( + SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS + ) + provider = SearchIndexCoverageProvider(db.session, search_index_client=index) results = provider.process_batch([work]) @@ -5153,7 +4963,77 @@ def bulk(self, docs, **kwargs): [record] = results assert work == record.obj assert True == record.transient - assert "There was an error!" == record.exception + assert "There was an error!" in record.exception + + def test_migration_available( + self, external_search_fake_fixture: ExternalSearchFixtureFake + ): + search = external_search_fake_fixture.external_search + directory = search._revision_directory + + # Create a new highest version + directory._available[10000] = SearchV10000() + search._revision = directory._available[10000] + search._search_service.index_is_populated = lambda x: False + + mock_db = MagicMock() + provider = SearchIndexCoverageProvider(mock_db, search_index_client=search) + + assert provider.migration is not None + assert provider.receiver is None + # Execute is called once with a Delete statement + assert mock_db.execute.call_count == 1 + assert len(mock_db.execute.call_args[0]) == 1 + assert mock_db.execute.call_args[0][0].__class__ == sqlaDelete + + def test_migration_not_available( + self, end_to_end_search_fixture: EndToEndSearchFixture + ): + search = end_to_end_search_fixture.external_search_index + db = end_to_end_search_fixture.db + + migration = search.start_migration() + assert migration is not None + migration.finish() + + provider = SearchIndexCoverageProvider(db.session, search_index_client=search) + assert provider.migration is None + assert provider.receiver is not None + + def test_complete_run_from_script( + self, end_to_end_search_fixture: EndToEndSearchFixture + ): + search = end_to_end_search_fixture.external_search_index + db = end_to_end_search_fixture.db + work = db.work(title="A Test Work", with_license_pool=True) + work.set_presentation_ready(search_index_client=search) + + class _SearchIndexCoverageProvider(SearchIndexCoverageProvider): + _did_call_on_completely_finished = False + + def on_completely_finished(self): + self._did_call_on_completely_finished = True + super().on_completely_finished() + + # Run as the search_index_refresh script would + provider = RunWorkCoverageProviderScript( + _SearchIndexCoverageProvider, _db=db.session, search_index_client=search + ) + provider.run() + + # The run ran till the end + assert provider.providers[0]._did_call_on_completely_finished == True + # The single available work was indexed + results = search.query_works(None) + assert len(results) == 1 + assert results[0]["work_id"] == work.id + + +class SearchV10000(SearchSchemaRevision): + SEARCH_VERSION = 10000 + + def mapping_document(self) -> SearchMappingDocument: + return {} class TestJSONQuery: @@ -5563,7 +5443,7 @@ def expect( works, ): query = dict(query=partial_query) - resp = fixture.external_search.search.query_works(query, data.filter) + resp = fixture.external_search_index.query_works(query, data.filter) assert len(resp.hits) == len(works) diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 10224a46d4..0ac74afa62 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -16,12 +16,7 @@ EntryPoint, EverythingEntryPoint, ) -from core.external_search import ( - Filter, - MockExternalSearchIndex, - WorkSearchResult, - mock_search_index, -) +from core.external_search import Filter, WorkSearchResult, mock_search_index from core.lane import ( DatabaseBackedFacets, DatabaseBackedWorkList, @@ -57,7 +52,7 @@ from tests.core.mock import LogCaptureHandler from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture -from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchPatchFixture +from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixtureFake class TestFacetsWithEntryPoint: @@ -2271,7 +2266,7 @@ def modify_search_filter_hook(self, filter): def test_groups( self, db: DatabaseTransactionFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): w1 = MockWork(1) w2 = MockWork(2) @@ -2301,7 +2296,9 @@ def groups(self, *args, **kwargs): # 2-tuples; one for each work featured by one of its children # WorkLists. Note that the same work appears twice, through two # different children. - [wwl1, wwl2, wwl3] = wl.groups(db.session) + [wwl1, wwl2, wwl3] = wl.groups( + db.session, search_engine=external_search_fake_fixture.external_search + ) assert (w1, child1) == wwl1 assert (w2, child2) == wwl2 assert (w1, child2) == wwl3 @@ -4086,7 +4083,11 @@ def test_search_target(self, db: DatabaseTransactionFixture): ] == target.audiences assert [Edition.BOOK_MEDIUM] == target.media - def test_search(self, db: DatabaseTransactionFixture): + def test_search( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # Searching a Lane calls search() on its search_target. # # TODO: This test could be trimmed down quite a bit with @@ -4095,8 +4096,11 @@ def test_search(self, db: DatabaseTransactionFixture): work = db.work(with_license_pool=True) lane = db.lane() - search_client = MockExternalSearchIndex() - search_client.bulk_update([work]) + search_client = end_to_end_search_fixture.external_search_index + docs = end_to_end_search_fixture.external_search_index.start_migration() + assert docs is not None + docs.add_documents(search_client.create_search_documents_from_works([work])) + docs.finish() pagination = Pagination(offset=0, size=1) @@ -4299,6 +4303,7 @@ def test_groups( fixture.external_search.db, fixture.external_search.db.session, ) + fixture.external_search_index.start_migration().finish() # type: ignore [union-attr] # Tell the fixture to call our populate_works method. # In this library, the groups feed includes at most two books @@ -4361,8 +4366,17 @@ def test_groups( discredited_nonfiction, children, ]: - t1 = [x.id for x in lane.works(session, facets)] + t1 = [ + x.id + for x in lane.works( + session, + facets, + search_engine=end_to_end_search_fixture.external_search_index, + ) + ] t2 = [x.id for x in lane.works_from_database(session, facets)] + print(f"t1: {t1}") + print(f"t2: {t2}") assert t1 == t2 def assert_contents(g, expect): @@ -4396,7 +4410,7 @@ def make_groups(lane, facets=None, **kwargs): return lane.groups( session, facets=facets, - search_engine=fixture.external_search.search, + search_engine=fixture.external_search_index, debug=True, **kwargs, ) @@ -4602,10 +4616,11 @@ def random_seed_fixture() -> RandomSeedFixture: class TestWorkListGroups: def test_groups_for_lanes_adapts_facets( self, - db: DatabaseTransactionFixture, random_seed_fixture: RandomSeedFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, + end_to_end_search_fixture: EndToEndSearchFixture, ): + db = end_to_end_search_fixture.db + # Verify that _groups_for_lanes gives each of a WorkList's # non-queryable children the opportunity to adapt the incoming # FeaturedFacets objects to its own needs. diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index 29f78d7c5b..3d7debba0f 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -6,7 +6,7 @@ from pymarc import MARCReader, Record from core.config import CannotLoadConfiguration -from core.external_search import Filter, MockExternalSearchIndex +from core.external_search import Filter from core.lane import WorkList from core.marc import Annotator, MARCExporter, MARCExporterFacets from core.model import ( @@ -27,6 +27,8 @@ from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc, utc_now from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.search import ExternalSearchFixtureFake +from tests.mocks.search import ExternalSearchIndexFake class TestAnnotator: @@ -584,7 +586,12 @@ def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): new_record = MARCExporter.create_record(new_work, annotator) assert record.as_marc() == new_record.as_marc() - def test_records(self, db: DatabaseTransactionFixture): + def test_records( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): + # external_search_fake_fixture is used only for the integration it creates integration = self._integration(db) now = utc_now() exporter = MARCExporter.from_config(db.default_library()) @@ -593,8 +600,8 @@ def test_records(self, db: DatabaseTransactionFixture): w1 = db.work(genre="Mystery", with_open_access_download=True) w2 = db.work(genre="Mystery", with_open_access_download=True) - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([w1, w2]) + search_engine = ExternalSearchIndexFake(db.session) + search_engine.mock_query_works([w1, w2]) # If there's a storage protocol but not corresponding storage integration, # it raises an exception. @@ -654,6 +661,7 @@ def test_records(self, db: DatabaseTransactionFixture): db.session.delete(cache) + search_engine.mock_query_works([w1, w2]) # It also works with a WorkList instead of a Lane, in which case # there will be no lane in the CachedMARCFile. worklist = WorkList() @@ -737,7 +745,7 @@ def test_records(self, db: DatabaseTransactionFixture): # If the search engine returns no contents for the lane, # nothing will be mirrored, but a CachedMARCFile is still # created to track that we checked for updates. - empty_search_engine = MockExternalSearchIndex() + search_engine.mock_query_works([]) mirror = MockS3Uploader() exporter.records( @@ -745,7 +753,7 @@ def test_records(self, db: DatabaseTransactionFixture): annotator, mirror_integration, mirror=mirror, - search_engine=empty_search_engine, + search_engine=search_engine, ) assert [] == mirror.content[0] diff --git a/tests/core/test_opds.py b/tests/core/test_opds.py index 6a595d81d0..21c3a96faf 100644 --- a/tests/core/test_opds.py +++ b/tests/core/test_opds.py @@ -3,11 +3,13 @@ import xml.etree.ElementTree as ET from io import StringIO from typing import Any, Callable, Generator, List, Type +from unittest.mock import MagicMock, Mock import feedparser import pytest from flask_babel import lazy_gettext as _ from lxml import etree +from opensearch_dsl.response import Hit from psycopg2.extras import NumericRange from sqlalchemy.orm import Session @@ -19,7 +21,7 @@ EverythingEntryPoint, MediumEntryPoint, ) -from core.external_search import MockExternalSearchIndex +from core.external_search import ExternalSearchIndex from core.facets import FacetConstants from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList from core.model import ( @@ -54,7 +56,8 @@ from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse, Response from core.util.opds_writer import AtomFeed, OPDSFeed, OPDSMessage from tests.fixtures.database import DatabaseTransactionFixture, DBStatementCounter -from tests.fixtures.search import ExternalSearchPatchFixture +from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixtureFake +from tests.mocks.search import ExternalSearchIndexFake class TestBaseAnnotator: @@ -542,6 +545,9 @@ class TestOPDSFixture: history: Lane ya: Lane + def _fake_hit(self, work: Work): + return Hit({"_source": dict(work_id=work.id)}) + @pytest.fixture def opds_fixture(db: DatabaseTransactionFixture) -> TestOPDSFixture: @@ -736,7 +742,7 @@ def test_acquisition_feed_includes_permanent_work_id( def test_lane_feed_contains_facet_links( self, opds_fixture: TestOPDSFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, + end_to_end_search_fixture: EndToEndSearchFixture, ): data, db, session = ( opds_fixture, @@ -747,8 +753,18 @@ def test_lane_feed_contains_facet_links( lane = db.lane() facets = Facets.default(db.default_library()) + migration = end_to_end_search_fixture.external_search_index.start_migration() + assert migration is not None + migration.finish() + cached_feed = AcquisitionFeed.page( - session, "title", "http://the-url.com/", lane, MockAnnotator, facets=facets + session, + "title", + "http://the-url.com/", + lane, + MockAnnotator, + facets=facets, + search_engine=end_to_end_search_fixture.external_search_index, ) u = str(cached_feed) @@ -1229,7 +1245,11 @@ def test_precomposed_entries(self, opds_fixture: TestOPDSFixture): ) assert "foo" in feed - def test_page_feed(self, opds_fixture: TestOPDSFixture): + def test_page_feed( + self, + opds_fixture: TestOPDSFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, @@ -1242,8 +1262,13 @@ def test_page_feed(self, opds_fixture: TestOPDSFixture): work1 = db.work(genre=Contemporary_Romance, with_open_access_download=True) work2 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([work1, work2]) + search_engine = end_to_end_search_fixture.external_search_index + docs = search_engine.start_migration() + assert docs is not None + docs.add_documents( + search_engine.create_search_documents_from_works([work1, work2]) + ) + docs.finish() facets = Facets.default(db.default_library()) pagination = Pagination(size=1) @@ -1304,7 +1329,11 @@ def make_page(pagination): assert lane.display_name == links[i + 1].get("title") assert MockAnnotator.lane_url(lane) == links[i + 1].get("href") - def test_page_feed_for_worklist(self, opds_fixture: TestOPDSFixture): + def test_page_feed_for_worklist( + self, + opds_fixture: TestOPDSFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, @@ -1317,8 +1346,13 @@ def test_page_feed_for_worklist(self, opds_fixture: TestOPDSFixture): work1 = db.work(genre=Contemporary_Romance, with_open_access_download=True) work2 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([work1, work2]) + search_engine = end_to_end_search_fixture.external_search_index + docs = search_engine.start_migration() + assert docs is not None + docs.add_documents( + search_engine.create_search_documents_from_works([work1, work2]) + ) + docs.finish() facets = Facets.default(db.default_library()) pagination = Pagination(size=1) @@ -1449,7 +1483,10 @@ def from_query(pagination): assert 1 == len(parsed["entries"]) assert [] == self._links(parsed, "next") - def test_groups_feed(self, opds_fixture: TestOPDSFixture): + def test_groups_feed( + self, + opds_fixture: TestOPDSFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, @@ -1468,8 +1505,13 @@ def test_groups_feed(self, opds_fixture: TestOPDSFixture): # of the work don't matter. It just needs to have a LicensePool # so it'll show up in the OPDS feed. work = db.work(title="An epic tome", with_open_access_download=True) - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([work]) + search_engine = MagicMock(spec=ExternalSearchIndex) + # We expect 1 hit per lane + search_engine.query_works_multi.return_value = [ + [data._fake_hit(work)], + [data._fake_hit(work)], + [data._fake_hit(work)], + ] # The lane setup does matter a lot -- that's what controls # how many times the search functionality is invoked. @@ -1499,6 +1541,8 @@ def test_groups_feed(self, opds_fixture: TestOPDSFixture): # constructor. assert isinstance(cached_groups, OPDSFeedResponse) assert private == cached_groups.private + # One query per lane available + assert len(search_engine.query_works_multi.call_args[0][0]) == 3 parsed = feedparser.parse(cached_groups.data) @@ -1543,21 +1587,27 @@ def test_groups_feed(self, opds_fixture: TestOPDSFixture): assert lane.display_name == links[i + 1].get("title") assert annotator.lane_url(lane) == links[i + 1].get("href") - def test_empty_groups_feed(self, opds_fixture: TestOPDSFixture): + def test_empty_groups_feed( + self, + opds_fixture: TestOPDSFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, opds_fixture.db.session, ) + search_engine = end_to_end_search_fixture.external_search_index + docs = search_engine.start_migration() + assert docs is not None + docs.finish() # Test the case where a grouped feed turns up nothing. # A Lane, and a Work not in the Lane. test_lane = db.lane("Test Lane", genres=["Mystery"]) - # Mock search index and Annotator. - search_engine = MockExternalSearchIndex() - + # Mock Annotator. class Mock(MockAnnotator): def annotate_feed(self, feed, worklist): self.called = True @@ -1588,7 +1638,11 @@ def annotate_feed(self, feed, worklist): # but our mock Annotator got a chance to modify the feed in place. assert True == annotator.called - def test_search_feed(self, opds_fixture: TestOPDSFixture): + def test_search_feed( + self, + opds_fixture: TestOPDSFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, @@ -1602,8 +1656,8 @@ def test_search_feed(self, opds_fixture: TestOPDSFixture): work2 = db.work(genre=Epic_Fantasy, with_open_access_download=True) pagination = Pagination(size=1) - search_client = MockExternalSearchIndex() - search_client.bulk_update([work1, work2]) + search_client = ExternalSearchIndexFake(session) + search_client.mock_query_works([work1, work2]) facets = SearchFacets(order="author", min_score=10) private = object() @@ -1675,7 +1729,11 @@ def make_page(pagination): breadcrumbs = root.find("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) assert None == breadcrumbs - def test_cache(self, opds_fixture: TestOPDSFixture): + def test_cache( + self, + opds_fixture: TestOPDSFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): data, db, session = ( opds_fixture, opds_fixture.db, @@ -1689,8 +1747,14 @@ def test_cache(self, opds_fixture: TestOPDSFixture): ) fantasy_lane = data.fantasy - search_engine = MockExternalSearchIndex() - search_engine.bulk_update([work1]) + search_engine = end_to_end_search_fixture.external_search_index + docs = search_engine.start_migration() + assert docs is not None + errors = docs.add_documents( + search_engine.create_search_documents_from_works([work1]) + ) + assert errors == [] + docs.finish() def make_page(): return AcquisitionFeed.page( @@ -1714,7 +1778,9 @@ def make_page(): genre=Epic_Fantasy, with_open_access_download=True, ) - search_engine.bulk_update([work2]) + recv = search_engine.start_updating_search_documents() + recv.add_documents(search_engine.create_search_documents_from_works([work2])) + recv.finish() # The new work does not show up in the feed because # we get the old cached version. @@ -1736,9 +1802,19 @@ class TestAcquisitionFeed: def test_page( self, db, - external_search_patch_fixture: ExternalSearchPatchFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, ): session = db.session + client = external_search_fake_fixture.search.search_multi_client() + + # The search client is supposed to return a set of result sets. + fake_work = MagicMock() + fake_work.work_id = 23 + client.execute = Mock(return_value=[[fake_work]]) + # The code calls "add" on the search client, which is supposed to return a new + # search client with the old search client embedded into it. We don't do that + # here as we're completely faking the search results anyway. + client.add = Mock(return_value=client) # Verify that AcquisitionFeed.page() returns an appropriate OPDSFeedResponse @@ -1753,6 +1829,7 @@ def test_page( MockAnnotator, max_age=10, private=private, + search_engine=external_search_fake_fixture.external_search, ) # The result is an OPDSFeedResponse. The 'private' argument, @@ -1925,7 +2002,6 @@ def g(entrypoint): assert "{http://opds-spec.org/2010/catalog}activeFacet" not in l def test_license_tags_no_loan_or_hold(self, db: DatabaseTransactionFixture): - edition, pool = db.edition(with_license_pool=True) availability, holds, copies = AcquisitionFeed.license_tags(pool, None, None) assert dict(status="available") == availability.attrib @@ -1933,7 +2009,6 @@ def test_license_tags_no_loan_or_hold(self, db: DatabaseTransactionFixture): assert dict(total="1", available="1") == copies.attrib def test_license_tags_hold_position(self, db: DatabaseTransactionFixture): - # When a book is placed on hold, it typically takes a while # for the LicensePool to be updated with the new number of # holds. This test verifies the normal and exceptional @@ -1989,7 +2064,6 @@ def test_license_tags_hold_position(self, db: DatabaseTransactionFixture): def test_license_tags_show_unlimited_access_books( self, db: DatabaseTransactionFixture ): - # Arrange edition, pool = db.edition(with_license_pool=True) pool.open_access = False @@ -2852,13 +2926,14 @@ class TestEntrypointLinkInsertion: def test_groups( self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, + end_to_end_search_fixture: EndToEndSearchFixture, ): data, db, session = ( entrypoint_link_insertion_fixture, entrypoint_link_insertion_fixture.db, entrypoint_link_insertion_fixture.db.session, ) + end_to_end_search_fixture.external_search_index.start_migration().finish() # type: ignore [union-attr] # When AcquisitionFeed.groups() generates a grouped # feed, it will link to different entry points into the feed, @@ -2876,6 +2951,7 @@ def run(wl=None, facets=None): data.annotator, max_age=0, facets=facets, + search_engine=end_to_end_search_fixture.external_search_index, ) return data.mock.called_with diff --git a/tests/core/test_opds2.py b/tests/core/test_opds2.py index 90b1a80812..4fa66e9973 100644 --- a/tests/core/test_opds2.py +++ b/tests/core/test_opds2.py @@ -7,8 +7,9 @@ from api.app import app from api.opds2 import OPDS2PublicationsAnnotator from core.classifier import Classifier -from core.external_search import MockExternalSearchIndex, SortKeyPagination +from core.external_search import ExternalSearchIndex, SortKeyPagination from core.lane import Facets, Lane, Pagination, SearchFacets +from core.model import ExternalIntegration from core.model.classification import Subject from core.model.datasource import DataSource from core.model.edition import Edition @@ -17,21 +18,25 @@ from core.opds2 import AcquisitonFeedOPDS2, OPDS2Annotator from core.util.flask_util import OPDSFeedResponse from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.search import EndToEndSearchFixture +from tests.mocks.search import SearchServiceFake class TestOPDS2FeedFixture: transaction: DatabaseTransactionFixture - search_engine: MockExternalSearchIndex + search_engine: ExternalSearchIndex fiction: Lane + search_fixture: EndToEndSearchFixture @pytest.fixture def opds2_feed_fixture( - db: DatabaseTransactionFixture, + db: DatabaseTransactionFixture, end_to_end_search_fixture: EndToEndSearchFixture ) -> TestOPDS2FeedFixture: data = TestOPDS2FeedFixture() data.transaction = db - data.search_engine = MockExternalSearchIndex() + data.search_fixture = end_to_end_search_fixture + data.search_engine = data.search_fixture.external_search_index data.fiction = db.lane("Fiction") data.fiction.fiction = True data.fiction.audiences = [Classifier.AUDIENCE_ADULT] @@ -49,7 +54,14 @@ def test_publications_feed(self, opds2_feed_fixture: TestOPDS2FeedFixture): work = transaction.work( with_open_access_download=True, authors="Author Name", fiction=True ) - data.search_engine.bulk_update([work]) + + docs = data.search_engine.start_migration() + assert docs is not None + docs.add_documents( + data.search_engine.create_search_documents_from_works([work]) + ) + docs.finish() + result = AcquisitonFeedOPDS2.publications( session, data.fiction, @@ -69,7 +81,6 @@ def test_publications_feed_json(self, opds2_feed_fixture: TestOPDS2FeedFixture): opds2_feed_fixture.transaction, opds2_feed_fixture.transaction.session, ) - works = [ transaction.work( with_open_access_download=True, @@ -96,7 +107,12 @@ def test_publications_feed_json(self, opds2_feed_fixture: TestOPDS2FeedFixture): fiction=True, ), ] - data.search_engine.bulk_update(works) + + docs = data.search_engine.start_migration() + assert docs is not None + docs.add_documents(data.search_engine.create_search_documents_from_works(works)) + docs.finish() + annotator = OPDS2Annotator( "/", Facets.default(transaction.default_library()), @@ -148,9 +164,10 @@ def test_acquisition_facet_links(self, opds2_feed_fixture: TestOPDS2FeedFixture) class TestOPDS2AnnotatorFixture: transaction: DatabaseTransactionFixture - search_engine: MockExternalSearchIndex + search_engine: ExternalSearchIndex fiction: Lane annotator: OPDS2Annotator + search_integration: ExternalIntegration @pytest.fixture @@ -159,7 +176,18 @@ def opds2_annotator_fixture( ) -> TestOPDS2AnnotatorFixture: data = TestOPDS2AnnotatorFixture() data.transaction = db - data.search_engine = MockExternalSearchIndex() + data.search_integration = db.external_integration( + ExternalIntegration.OPENSEARCH, + goal=ExternalIntegration.SEARCH_GOAL, + url="http://does-not-matter.com", # It doesn't matter what URL we specify, because the search service is fake + settings={ + ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY: "test_index", + ExternalSearchIndex.TEST_SEARCH_TERM_KEY: "test_search_term", + }, + ) + data.search_engine = ExternalSearchIndex( + _db=db.session, custom_client_service=SearchServiceFake() + ) data.fiction = db.lane("Fiction") data.fiction.fiction = True data.fiction.audiences = [Classifier.AUDIENCE_ADULT] @@ -210,7 +238,12 @@ def test_image_links(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture): edition.data_source, media_type="image/png", ) - data.search_engine.bulk_update([work]) + + docs = data.search_engine.start_updating_search_documents() + docs.add_documents( + data.search_engine.create_search_documents_from_works([work]) + ) + docs.finish() result = data.annotator.metadata_for_work(work) assert isinstance(result, dict) @@ -246,7 +279,11 @@ def test_work_metadata(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture) edition.series = "A series" edition.series_position = 4 - data.search_engine.bulk_update([work]) + docs = data.search_engine.start_updating_search_documents() + docs.add_documents( + data.search_engine.create_search_documents_from_works([work]) + ) + docs.finish() result = data.annotator.metadata_for_work(work) assert isinstance(result, dict) diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index 68009f345e..e7d32e2199 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -16,7 +16,7 @@ from api.lanes import create_default_lanes from core.classifier import Classifier from core.config import CannotLoadConfiguration, Configuration, ConfigurationConstants -from core.external_search import Filter, MockExternalSearchIndex +from core.external_search import ExternalSearchIndex, Filter from core.lane import Lane, WorkList from core.metadata_layer import TimestampData from core.model import ( @@ -95,7 +95,7 @@ AlwaysSuccessfulWorkCoverageProvider, ) from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchPatchFixture +from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixtureFake class TestScript: @@ -1596,13 +1596,10 @@ class MockWhereAreMyBooks(WhereAreMyBooksScript): form, so we don't have to mess around with StringIO. """ - def __init__(self, _db=None, output=None, search=None): + def __init__(self, search: ExternalSearchIndex, _db=None, output=None): # In most cases a list will do fine for `output`. output = output or [] - # In most tests an empty mock will do for `search`. - search = search or MockExternalSearchIndex() - super().__init__(_db, output, search) self.output = [] @@ -1631,7 +1628,14 @@ def test_no_search_integration(self, db: DatabaseTransactionFixture): == output.getvalue() ) - def test_overall_structure(self, db: DatabaseTransactionFixture): + @pytest.mark.skip( + reason="This test currently freezes inside pytest and has to be killed with SIGKILL." + ) + def test_overall_structure( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # Verify that run() calls the methods we expect. class Mock(MockWhereAreMyBooks): @@ -1694,13 +1698,19 @@ def explain_collection(self, collection): script.run(cmd_args=["--collection=%s" % collection2.name]) assert [collection2] == script.explained_collections - def test_check_library(self, db: DatabaseTransactionFixture): + def test_check_library( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # Give the default library a collection and a lane. library = db.default_library() collection = db.default_collection() lane = db.lane(library=library) - script = MockWhereAreMyBooks(db.session) + script = MockWhereAreMyBooks( + _db=db.session, search=end_to_end_search_fixture.external_search_index + ) script.check_library(library) checking, has_collection, has_lanes = script.output @@ -1717,7 +1727,11 @@ def test_check_library(self, db: DatabaseTransactionFixture): assert " This library has no collections -- that's a problem." == no_collection assert " This library has no lanes -- that's a problem." == no_lanes - def test_delete_cached_feeds(self, db: DatabaseTransactionFixture): + def test_delete_cached_feeds( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): groups = CachedFeed(type=CachedFeed.GROUPS_TYPE, pagination="") db.session.add(groups) not_groups = CachedFeed(type=CachedFeed.PAGE_TYPE, pagination="") @@ -1725,7 +1739,9 @@ def test_delete_cached_feeds(self, db: DatabaseTransactionFixture): assert 2 == db.session.query(CachedFeed).count() - script = MockWhereAreMyBooks(db.session) + script = MockWhereAreMyBooks( + _db=db.session, search=end_to_end_search_fixture.external_search_index + ) script.delete_cached_feeds() how_many, theyre_gone = script.output assert ( @@ -1747,6 +1763,7 @@ def test_delete_cached_feeds(self, db: DatabaseTransactionFixture): @staticmethod def check_explanation( db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, presentation_ready=1, not_presentation_ready=0, no_delivery_mechanisms=0, @@ -1756,7 +1773,11 @@ def check_explanation( **kwargs, ): """Runs explain_collection() and verifies expected output.""" - script = MockWhereAreMyBooks(db.session, **kwargs) + script = MockWhereAreMyBooks( + _db=db.session, + search=end_to_end_search_fixture.external_search_index, + **kwargs, + ) script.explain_collection(db.default_collection()) out = script.output @@ -1797,46 +1818,90 @@ def check_explanation( [in_search_index, presentation_ready], ) == out.pop(0) - def test_no_presentation_ready_works(self, db: DatabaseTransactionFixture): + def test_no_presentation_ready_works( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # This work is not presentation-ready. work = db.work(with_license_pool=True) + end_to_end_search_fixture.external_search_index.initialize_indices() work.presentation_ready = False - script = MockWhereAreMyBooks(db.session) + script = MockWhereAreMyBooks( + _db=db.session, search=end_to_end_search_fixture.external_search_index + ) self.check_explanation( + end_to_end_search_fixture=end_to_end_search_fixture, presentation_ready=0, not_presentation_ready=1, db=db, ) - def test_no_delivery_mechanisms(self, db: DatabaseTransactionFixture): + def test_no_delivery_mechanisms( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # This work has a license pool, but no delivery mechanisms. work = db.work(with_license_pool=True) + end_to_end_search_fixture.external_search_index.initialize_indices() for lpdm in work.license_pools[0].delivery_mechanisms: db.session.delete(lpdm) - self.check_explanation(no_delivery_mechanisms=1, db=db) + self.check_explanation( + no_delivery_mechanisms=1, + db=db, + end_to_end_search_fixture=end_to_end_search_fixture, + ) - def test_suppressed_pool(self, db: DatabaseTransactionFixture): + def test_suppressed_pool( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # This work has a license pool, but it's suppressed. work = db.work(with_license_pool=True) + end_to_end_search_fixture.external_search_index.initialize_indices() work.license_pools[0].suppressed = True - self.check_explanation(suppressed=1, db=db) + self.check_explanation( + suppressed=1, + db=db, + end_to_end_search_fixture=end_to_end_search_fixture, + ) - def test_no_licenses(self, db: DatabaseTransactionFixture): + def test_no_licenses( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): # This work has a license pool, but no licenses owned. work = db.work(with_license_pool=True) + end_to_end_search_fixture.external_search_index.initialize_indices() work.license_pools[0].licenses_owned = 0 - self.check_explanation(not_owned=1, db=db) + self.check_explanation( + not_owned=1, + db=db, + end_to_end_search_fixture=end_to_end_search_fixture, + ) - def test_search_engine(self, db: DatabaseTransactionFixture): - output = StringIO() - search = MockExternalSearchIndex() + def test_search_engine( + self, + db: DatabaseTransactionFixture, + end_to_end_search_fixture: EndToEndSearchFixture, + ): + search = end_to_end_search_fixture.external_search_index work = db.work(with_license_pool=True) work.presentation_ready = True - search.bulk_update([work]) - # This MockExternalSearchIndex will always claim there is one - # result. - self.check_explanation(search=search, in_search_index=1, db=db) + docs = search.start_migration() + docs.add_documents(search.create_search_documents_from_works([work])) + docs.finish() + + # This search index will always claim there is one result. + self.check_explanation( + in_search_index=1, + db=db, + end_to_end_search_fixture=end_to_end_search_fixture, + ) class TestExplain: @@ -2000,17 +2065,12 @@ def expected(c): class TestRebuildSearchIndexScript: - def test_do_run(self, db: DatabaseTransactionFixture): - class MockSearchIndex: - def setup_index(self): - # This is where the search index is deleted and recreated. - self.setup_index_called = True - - def bulk_update(self, works): - self.bulk_update_called_with = list(works) - return works, [] - - index = MockSearchIndex() + def test_do_run( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): + index = external_search_fake_fixture.external_search work = db.work(with_license_pool=True) work2 = db.work(with_license_pool=True) wcr = WorkCoverageRecord @@ -2031,8 +2091,9 @@ def bulk_update(self, works): [progress] = script.do_run() # The mock methods were called with the values we expect. - assert True == index.setup_index_called - assert {work, work2} == set(index.bulk_update_called_with) + assert {work.id, work2.id} == set( + map(lambda d: d["_id"], external_search_fake_fixture.search.documents_all()) + ) # The script returned a list containing a single # CoverageProviderProgress object containing accurate @@ -2079,20 +2140,27 @@ def test_do_run(self, db: DatabaseTransactionFixture): class TestUpdateLaneSizeScript: - def test_do_run( - self, - db, - external_search_patch_fixture: ExternalSearchPatchFixture, - ): + def test_do_run(self, db, end_to_end_search_fixture: EndToEndSearchFixture): + end_to_end_search_fixture.external_search_index.start_migration().finish() + lane = db.lane() lane.size = 100 - UpdateLaneSizeScript(db.session).do_run(cmd_args=[]) + UpdateLaneSizeScript( + db.session, + search_index_client=end_to_end_search_fixture.external_search_index, + ).do_run(cmd_args=[]) assert 0 == lane.size - def test_should_process_lane(self, db: DatabaseTransactionFixture): + def test_should_process_lane( + self, + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, + ): """Only Lane objects can have their size updated.""" lane = db.lane() - script = UpdateLaneSizeScript(db.session) + script = UpdateLaneSizeScript( + db.session, search_index_client=external_search_fake_fixture.external_search + ) assert True == script.should_process_lane(lane) worklist = WorkList() @@ -2101,14 +2169,19 @@ def test_should_process_lane(self, db: DatabaseTransactionFixture): def test_site_configuration_has_changed( self, db: DatabaseTransactionFixture, - external_search_patch_fixture: ExternalSearchPatchFixture, + end_to_end_search_fixture: EndToEndSearchFixture, ): + end_to_end_search_fixture.external_search_index.start_migration().finish() + library = db.default_library() lane1 = db.lane() lane2 = db.lane() # Run the script to create all the default config settings. - UpdateLaneSizeScript(db.session).do_run(cmd_args=[]) + UpdateLaneSizeScript( + db.session, + search_index_client=end_to_end_search_fixture.external_search_index, + ).do_run(cmd_args=[]) # Set the lane sizes lane1.size = 100 diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index d08c6456f7..43c5779fe2 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -331,7 +331,11 @@ def add_works(self, works: list[WorkSpec]): setattr(self, spec.variable_name, work) work.license_pools[0].collection = self.collection self.works.append(work) - self.manager.external_search.bulk_update(self.works) + + self.manager.external_search.search_service().index_submit_documents( + self.manager.external_search._search_write_pointer, [self.works] + ) + self.manager.external_search.mock_query_works_multi(self.works) def assert_bad_search_index_gives_problem_detail(self, test_function): """Helper method to test that a controller method serves a problem diff --git a/tests/fixtures/search.py b/tests/fixtures/search.py index 2bbda0463a..0936d273aa 100644 --- a/tests/fixtures/search.py +++ b/tests/fixtures/search.py @@ -1,20 +1,14 @@ import logging import os -from typing import Any, Iterable, List, Optional -from unittest import mock +from typing import Iterable, List import pytest -from _pytest.fixtures import FixtureRequest - -from core import external_search -from core.external_search import ( - ExternalSearchIndex, - MockExternalSearchIndex, - SearchIndexCoverageProvider, -) +from opensearchpy import OpenSearch + +from core.external_search import ExternalSearchIndex, SearchIndexCoverageProvider from core.model import ExternalIntegration, Work -from tests.core.mock import SearchClientForTesting from tests.fixtures.database import DatabaseTransactionFixture +from tests.mocks.search import SearchServiceFake class ExternalSearchFixture: @@ -27,27 +21,19 @@ class ExternalSearchFixture: to ensure that it works well overall, with a realistic index. """ - SEARCH_TEST_URLS = { - ExternalSearchIndex.SEARCH_VERSION_OS1_X: os.environ.get( - "SIMPLIFIED_TEST_OPENSEARCH", "http://localhost:9200" - ), - } - - indexes: List[Any] integration: ExternalIntegration - search: Optional[SearchClientForTesting] db: DatabaseTransactionFixture - version: str + search: OpenSearch + _indexes_created: List[str] + + def __init__(self): + self._indexes_created = [] + self._logger = logging.getLogger(ExternalSearchFixture.__name__) @classmethod - def create( - cls, db: DatabaseTransactionFixture, testing_version: str - ) -> "ExternalSearchFixture": + def create(cls, db: DatabaseTransactionFixture) -> "ExternalSearchFixture": fixture = ExternalSearchFixture() fixture.db = db - fixture.indexes = [] - fixture.version = testing_version - fixture.integration = db.external_integration( ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL, @@ -55,38 +41,35 @@ def create( settings={ ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY: "test_index", ExternalSearchIndex.TEST_SEARCH_TERM_KEY: "test_search_term", - ExternalSearchIndex.SEARCH_VERSION: fixture.version, }, ) - - try: - fixture.search = SearchClientForTesting(db.session, version=testing_version) - except Exception as e: - fixture.search = None - logging.error( - "Unable to set up opensearch index, search tests will be skipped.", - exc_info=e, - ) + fixture.search = OpenSearch(fixture.url, use_ssl=False, timeout=20, maxsize=25) return fixture @property def url(self) -> str: - return self.SEARCH_TEST_URLS[self.version] + env = os.environ.get("SIMPLIFIED_TEST_OPENSEARCH") + if env is None: + raise OSError("SIMPLIFIED_TEST_OPENSEARCH is not defined.") + return env + + def record_index(self, name: str): + self._logger.info(f"Recording index {name} for deletion") + self._indexes_created.append(name) def close(self): - if self.search: - # Delete the works_index, which is almost always created. - if self.search.works_index: - self.search.indices.delete(self.search.works_index, ignore=[404]) - # Delete any other indexes created over the course of the test. - for index in self.indexes: - self.search.indices.delete(index, ignore=[404]) - ExternalSearchIndex.reset() - - def setup_index(self, new_index): - """Create an index and register it to be destroyed during teardown.""" - self.search.setup_index(new_index=new_index) - self.indexes.append(new_index) + for index in self._indexes_created: + try: + self._logger.info(f"Deleting index {index}") + self.search.indices.delete(index) + except Exception as e: + self._logger.info(f"Failed to delete index {index}: {e}") + + # Force test index deletion + self.search.indices.delete("test_index*") + self._logger.info("Waiting for operations to complete.") + self.search.indices.refresh() + return None def default_work(self, *args, **kwargs): """Convenience method to create a work with a license pool in the default collection.""" @@ -94,26 +77,23 @@ def default_work(self, *args, **kwargs): *args, with_license_pool=True, collection=self.db.default_collection(), - **kwargs + **kwargs, ) work.set_presentation_ready() return work + def init_indices(self): + client = ExternalSearchIndex(self.db.session) + client.initialize_indices() -@pytest.fixture( - scope="function", - params=[ - ExternalSearchIndex.SEARCH_VERSION_OS1_X, - ], -) + +@pytest.fixture(scope="function") def external_search_fixture( - request: FixtureRequest, db: DatabaseTransactionFixture, ) -> Iterable[ExternalSearchFixture]: """Ask for an external search system.""" """Note: You probably want EndToEndSearchFixture instead.""" - version = request.param - data = ExternalSearchFixture.create(db, version) + data = ExternalSearchFixture.create(db) yield data data.close() @@ -123,13 +103,18 @@ class EndToEndSearchFixture: """Tests are expected to call the `populate()` method to populate the fixture with test-specific data.""" external_search: ExternalSearchFixture + external_search_index: ExternalSearchIndex + db: DatabaseTransactionFixture + + def __init__(self): + self._logger = logging.getLogger(EndToEndSearchFixture.__name__) @classmethod - def create( - cls, transaction: DatabaseTransactionFixture, test_version: str - ) -> "EndToEndSearchFixture": + def create(cls, transaction: DatabaseTransactionFixture) -> "EndToEndSearchFixture": data = EndToEndSearchFixture() - data.external_search = ExternalSearchFixture.create(transaction, test_version) + data.db = transaction + data.external_search = ExternalSearchFixture.create(transaction) + data.external_search_index = ExternalSearchIndex(transaction.session) return data def populate_search_index(self): @@ -143,8 +128,8 @@ def populate_search_index(self): # Add all the works created in the setup to the search index. SearchIndexCoverageProvider( self.external_search.db.session, - search_index_client=self.external_search.search, - ).run_once_and_update_timestamp() + search_index_client=self.external_search_index, + ).run() self.external_search.search.indices.refresh() @staticmethod @@ -199,7 +184,7 @@ def expect_results( if isinstance(expect, Work): expect = [expect] should_be_ordered = kwargs.pop("ordered", True) - hits = self.external_search.search.query_works( + hits = self.external_search_index.query_works( query_string, filter, pagination, debug=True, **kwargs ) @@ -222,7 +207,7 @@ def expect_results_multi(self, expect, queries, **kwargs): """ should_be_ordered = kwargs.pop("ordered", True) resultset = list( - self.external_search.search.query_works_multi(queries, debug=True, **kwargs) + self.external_search_index.query_works_multi(queries, debug=True, **kwargs) ) for i, expect_one_query in enumerate(expect): hits = resultset[i] @@ -257,54 +242,55 @@ def _compare_hits(self, expect, hits, query_args, should_be_ordered=True, **kwar # filter into count_works() we'll get all the results we # got from query_works(). Take the opportunity to verify # that count_works() gives the right answer. - count = self.external_search.search.count_works(filter) + count = self.external_search_index.count_works(filter) assert count == len(expect) def close(self): + for index in self.external_search_index.search_service().indexes_created(): + self.external_search.record_index(index) + self.external_search.close() -@pytest.fixture( - scope="function", - params=[ - ExternalSearchIndex.SEARCH_VERSION_OS1_X, - ], -) +@pytest.fixture(scope="function") def end_to_end_search_fixture( - request: FixtureRequest, db: DatabaseTransactionFixture, ) -> Iterable[EndToEndSearchFixture]: """Ask for an external search system that can be populated with data for end-to-end tests.""" - data = EndToEndSearchFixture.create(db, request.param) - yield data - data.close() - + data = EndToEndSearchFixture.create(db) + try: + yield data + except Exception: + raise + finally: + data.close() -class ExternalSearchPatchFixture: - """A class that represents the fact that the external search class has been patched with a mock.""" - search_mock: Any +class ExternalSearchFixtureFake: + integration: ExternalIntegration + db: DatabaseTransactionFixture + search: SearchServiceFake + external_search: ExternalSearchIndex @pytest.fixture(scope="function") -def external_search_patch_fixture(request) -> Iterable[ExternalSearchPatchFixture]: - """Ask for the external search class to be patched with a mock.""" - fixture = ExternalSearchPatchFixture() - - # Only setup the opensearch mock if the opensearch mark isn't set - opensearch_mark = request.node.get_closest_marker("opensearch") - if opensearch_mark is not None: - raise RuntimeError( - "This fixture should not be combined with @pytest.mark.opensearch" - ) - - fixture.search_mock = mock.patch( - external_search.__name__ + ".ExternalSearchIndex", - MockExternalSearchIndex, +def external_search_fake_fixture( + db: DatabaseTransactionFixture, +) -> ExternalSearchFixtureFake: + """Ask for an external search system that can be populated with data for end-to-end tests.""" + data = ExternalSearchFixtureFake() + data.db = db + data.integration = db.external_integration( + ExternalIntegration.OPENSEARCH, + goal=ExternalIntegration.SEARCH_GOAL, + url="http://does-not-exist.com/", + settings={ + ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY: "test_index", + ExternalSearchIndex.TEST_SEARCH_TERM_KEY: "a search term", + }, ) - fixture.search_mock.start() - - yield fixture - - if fixture.search_mock: - fixture.search_mock.stop() + data.search = SearchServiceFake() + data.external_search = ExternalSearchIndex( + _db=db.session, custom_client_service=data.search + ) + return data diff --git a/tests/mocks/__init__.py b/tests/mocks/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/mocks/search.py b/tests/mocks/search.py new file mode 100644 index 0000000000..194ddb557c --- /dev/null +++ b/tests/mocks/search.py @@ -0,0 +1,293 @@ +from __future__ import annotations + +from enum import Enum +from typing import Dict, Iterable, List, Optional +from unittest.mock import MagicMock + +from opensearch_dsl import MultiSearch, Search +from opensearch_dsl.response.hit import Hit +from opensearchpy import OpenSearchException + +from core.external_search import ExternalSearchIndex +from core.model import Work +from core.model.work import Work +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from core.search.service import ( + SearchService, + SearchServiceFailedDocument, + SearchWritePointer, +) + + +class SearchServiceFailureMode(Enum): + """The simulated failure modes for the search service.""" + + NOT_FAILING = 0 + FAIL_INDEXING_DOCUMENTS = 1 + FAIL_INDEXING_DOCUMENTS_TIMEOUT = 3 + FAIL_ENTIRELY = 2 + + +class SearchServiceFake(SearchService): + """A search service that doesn't speak to a real service.""" + + _documents_by_index: Dict[str, List[dict]] + _failing: SearchServiceFailureMode + _search_client: Search + _multi_search_client: MultiSearch + _indexes_created: List[str] + _document_submission_attempts: List[dict] + + def __init__(self): + self.base_name = "test_index" + self._failing = SearchServiceFailureMode.NOT_FAILING + self._documents_by_index = {} + self._read_pointer: Optional[str] = None + self._write_pointer: Optional[SearchWritePointer] = None + self._search_client = Search(using=MagicMock()) + self._multi_search_client = MultiSearch(using=MagicMock()) + self._indexes_created = [] + self._document_submission_attempts = [] + + @property + def document_submission_attempts(self) -> List[dict]: + return self._document_submission_attempts + + def indexes_created(self) -> List[str]: + return self._indexes_created + + def _fail_if_necessary(self): + if self._failing == SearchServiceFailureMode.FAIL_ENTIRELY: + raise OpenSearchException("Search index is on fire.") + + def set_failing_mode(self, mode: SearchServiceFailureMode): + self._failing = mode + + def documents_for_index(self, index_name: str) -> List[dict]: + self._fail_if_necessary() + + if not (index_name in self._documents_by_index): + return [] + return self._documents_by_index[index_name] + + def documents_all(self) -> List[dict]: + self._fail_if_necessary() + + results: List[dict] = [] + for documents in self._documents_by_index.values(): + for document in documents: + results.append(document) + + return results + + def refresh(self): + self._fail_if_necessary() + return + + def read_pointer_name(self) -> str: + self._fail_if_necessary() + return f"{self.base_name}-search-read" + + def write_pointer_name(self) -> str: + self._fail_if_necessary() + return f"{self.base_name}-search-write" + + def read_pointer(self) -> Optional[str]: + self._fail_if_necessary() + return self._read_pointer + + def write_pointer(self) -> Optional[SearchWritePointer]: + self._fail_if_necessary() + return self._write_pointer + + def create_empty_index(self) -> None: + self._fail_if_necessary() + self._indexes_created.append(f"{self.base_name}-empty") + return None + + def read_pointer_set(self, revision: SearchSchemaRevision) -> None: + self._fail_if_necessary() + self._read_pointer = f"{revision.name_for_indexed_pointer(self.base_name)}" + + def index_set_populated(self, revision: SearchSchemaRevision) -> None: + self._fail_if_necessary() + + def read_pointer_set_empty(self) -> None: + self._fail_if_necessary() + self._read_pointer = f"{self.base_name}-empty" + + def index_create(self, revision: SearchSchemaRevision) -> None: + self._fail_if_necessary() + self._indexes_created.append(revision.name_for_index(self.base_name)) + return None + + def index_is_populated(self, revision: SearchSchemaRevision) -> bool: + self._fail_if_necessary() + return True + + def index_set_mapping(self, revision: SearchSchemaRevision) -> None: + self._fail_if_necessary() + + def index_submit_documents( + self, pointer: str, documents: Iterable[dict] + ) -> List[SearchServiceFailedDocument]: + self._fail_if_necessary() + + _should_fail = False + _should_fail = ( + _should_fail + or self._failing == SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS + ) + _should_fail = ( + _should_fail + or self._failing == SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS_TIMEOUT + ) + + if _should_fail: + results: List[SearchServiceFailedDocument] = [] + for document in documents: + self._document_submission_attempts.append(document) + if self._failing == SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS: + _error = SearchServiceFailedDocument( + document["_id"], + error_message="There was an error!", + error_status=500, + error_exception="Exception", + ) + else: + _error = SearchServiceFailedDocument( + document["_id"], + error_message="Connection Timeout!", + error_status=0, + error_exception="ConnectionTimeout", + ) + results.append(_error) + + return results + + if not (pointer in self._documents_by_index): + self._documents_by_index[pointer] = [] + + for document in documents: + self._documents_by_index[pointer].append(document) + + return [] + + def write_pointer_set(self, revision: SearchSchemaRevision) -> None: + self._fail_if_necessary() + self._write_pointer = SearchWritePointer(self.base_name, revision.version) + + def index_clear_documents(self, pointer: str): + self._fail_if_necessary() + if pointer in self._documents_by_index: + self._documents_by_index[pointer] = [] + + def search_client(self, write=False) -> Search: + return self._search_client.index( + self.read_pointer_name() if not write else self.write_pointer_name() + ) + + def search_multi_client(self, write=False) -> MultiSearch: + return self._multi_search_client.index( + self.read_pointer_name() if not write else self.write_pointer_name() + ) + + def index_remove_document(self, pointer: str, id: int): + self._fail_if_necessary() + if pointer in self._documents_by_index: + items = self._documents_by_index[pointer] + to_remove = [] + for item in items: + if item.get("_id") == id: + to_remove.append(item) + for item in to_remove: + items.remove(item) + + def is_pointer_empty(*args): + return False + + +def fake_hits(works: List[Work]): + return [ + Hit( + { + "_source": {"work_id": work.id}, + "_sort": [work.sort_title, work.sort_author, work.id], + } + ) + for work in works + ] + + +class ExternalSearchIndexFake(ExternalSearchIndex): + """A fake search index, to be used where we do not care what the search does, just that the results match what we expect + Eg. Testing a Feed object doesn't need to test the search index, it just needs the search index to report correctly + """ + + def __init__( + self, + _db, + url: str | None = None, + test_search_term: str | None = None, + revision_directory: SearchRevisionDirectory | None = None, + version: int | None = None, + ): + super().__init__( + _db, url, test_search_term, revision_directory, version, SearchServiceFake() + ) + + self._mock_multi_works: List[Dict] = [] + self._mock_count_works = 0 + self._queries: List[tuple] = [] + + def mock_query_works(self, works: List[Work]): + self.mock_query_works_multi(works) + + def mock_query_works_multi(self, works: List[Work], *args: List[Work]): + self._mock_multi_works = [fake_hits(works)] + self._mock_multi_works.extend([fake_hits(arg_works) for arg_works in args]) + + def query_works_multi(self, queries, debug=False): + result = [] + for ix, (query_string, filter, pagination) in enumerate(queries): + self._queries.append((query_string, filter, pagination)) + this_result = [] + if not self._mock_multi_works: + pagination.page_loaded([]) + # Mock Pagination + elif len(self._mock_multi_works) > ix: + this_result = self._mock_multi_works[ix] + + # sortkey pagination, if it exists + # Sorting must be done by the test case + if getattr(pagination, "last_item_on_previous_page", None): + for ix, hit in enumerate(this_result): + if hit.meta["sort"] == pagination.last_item_on_previous_page: + this_result = this_result[ix + 1 : ix + 1 + pagination.size] + break + else: + this_result = [] + else: + # Else just assume offset pagination + this_result = this_result[ + pagination.offset : pagination.offset + pagination.size + ] + + pagination.page_loaded(this_result) + result.append(this_result) + else: + # Catch all + pagination.page_loaded([]) + + return result + + def mock_count_works(self, count): + self._mock_count_works = count + + def count_works(self, filter): + """So far this is not required in the tests""" + return self._mock_count_works + + def __repr__(self) -> str: + return f"Expected Results({id(self)}): {self._mock_multi_works}" From 49a5d4bbb8cfca9e9e8b20f1f113b2382d0bf9c9 Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Thu, 21 Sep 2023 14:52:29 -0400 Subject: [PATCH 051/262] Add `.csv` extension for better experience. (#1398) --- core/jobs/playtime_entries.py | 9 +++++---- tests/core/jobs/test_playtime_entries.py | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index b68378446b..06313e5bab 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -130,13 +130,14 @@ def do_run(self): email_subject = ( f"Playtime Summaries {formatted_start_date} - {formatted_until_date}" ) - attachment_name = ( - f"playtime-summary-{formatted_start_date}-{formatted_until_date}" - ) + attachment_extension = "csv" + attachment_name = f"playtime-summary-{formatted_start_date}-{formatted_until_date}.{attachment_extension}" # Write to a temporary file so we don't overflow the memory with TemporaryFile( - "w+", prefix=f"playtimereport{formatted_until_date}", suffix="csv" + "w+", + prefix=f"playtimereport{formatted_until_date}", + suffix=attachment_extension, ) as temp: # Write the data as a CSV writer = csv.writer(temp) diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index 0cef68e379..fabcee2e36 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -294,7 +294,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): receivers=["reporting@test.email"], text="", attachments={ - f"playtime-summary-{cutoff}-{until}": "" + f"playtime-summary-{cutoff}-{until}.csv": "" }, # Mock objects do not write data ) From 6fe712ae0be719fe7583bb80f83013e35568ecad Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Sep 2023 12:21:45 +0000 Subject: [PATCH 052/262] Bump cryptography from 41.0.3 to 41.0.4 (#1400) --- poetry.lock | 48 ++++++++++++++++++++++++------------------------ 1 file changed, 24 insertions(+), 24 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7432a84cc4..78990158dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -808,34 +808,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.3" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] From ac78b474767692348e7ea344ead36f8e5df184dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Sep 2023 14:32:22 +0000 Subject: [PATCH 053/262] Bump nameparser from 1.1.2 to 1.1.3 (#1399) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 78990158dd..4c3ea44631 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2354,13 +2354,13 @@ files = [ [[package]] name = "nameparser" -version = "1.1.2" +version = "1.1.3" description = "A simple Python module for parsing human names into their individual components." optional = false python-versions = "*" files = [ - {file = "nameparser-1.1.2-py2.py3-none-any.whl", hash = "sha256:ea2e01d1d9d04c0648be230f161f27316a1b5be431a1cc64e8799fac548fb3bc"}, - {file = "nameparser-1.1.2.tar.gz", hash = "sha256:f4b6c7c1048d528bd6aa2b27cf42a06447d2b31e45a95b20449513078f1d86ef"}, + {file = "nameparser-1.1.3-py2.py3-none-any.whl", hash = "sha256:08ccda98681d59751c82052d52f185bc52f99d43e87d46b85c015a9096ecfa66"}, + {file = "nameparser-1.1.3.tar.gz", hash = "sha256:aa2400ad71ccf8070675b40311a257c934659f91854b154e1ba6c264761c049d"}, ] [[package]] @@ -4227,4 +4227,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "4cf46205d47e3b72a365ff6f418785517c863a8b2770616ff1cebc61a316a21b" +content-hash = "efb9ed885d265cf45f832527246ad2393306e46e0c8ffe72da9af0dab19dadfa" diff --git a/pyproject.toml b/pyproject.toml index 0f7a967861..58081cafa6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -183,7 +183,7 @@ levenshtein = "~0.21" lxml = "^4.9.3" money = "1.3.0" multipledispatch = "0.6.0" -nameparser = "1.1.2" # nameparser is for author name manipulations +nameparser = "^1.1" # nameparser is for author name manipulations nltk = "3.8.1" # nltk is a textblob dependency. opensearch-dsl = "~1.0" opensearch-py = "~1.1" From 5e09243b26b7d42e2f7606efc24388f803fb4d34 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 22 Sep 2023 17:39:35 +0000 Subject: [PATCH 054/262] Bump dunamai from 1.18.0 to 1.18.1 (#1401) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4c3ea44631..43681370a1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -922,13 +922,13 @@ tls = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=17.5.0)"] [[package]] name = "dunamai" -version = "1.18.0" +version = "1.18.1" description = "Dynamic version generation" optional = false python-versions = ">=3.5,<4.0" files = [ - {file = "dunamai-1.18.0-py3-none-any.whl", hash = "sha256:f9284a9f4048f0b809d11539896e78bde94c05b091b966a04a44ab4c48df03ce"}, - {file = "dunamai-1.18.0.tar.gz", hash = "sha256:5200598561ea5ba956a6174c36e402e92206c6a6aa4a93a6c5cb8003ee1e0997"}, + {file = "dunamai-1.18.1-py3-none-any.whl", hash = "sha256:ee7b042f7a687fa04fc383258eb93bd819c7bd8aec62e0974f3c69747e5958f2"}, + {file = "dunamai-1.18.1.tar.gz", hash = "sha256:5e9a91e43d16bb56fa8fcddcf92fa31b2e1126e060c3dcc8d094d9b508061f9d"}, ] [package.dependencies] From 778ed79fb828d319ef54d26163bc6c4431edfcdf Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 22 Sep 2023 16:02:01 -0300 Subject: [PATCH 055/262] Move storage integrations to be configured as services (PP-95) (#1377) ## Description - Add a dependency injection container, that contains services used by our controllers. - Create framework for adding new services, and configuring them via environment variables, validated via pydantic. - Remove the existing mirror integration code, and replace it with a new storage service. - Modify the marc exporter and s3 analytics provider to use the new storage service. - Add the new environment variables for the storage service to README. ## Motivation and Context - JIRA: PP-95 - The motivation here is to allow services that don't need to be configured in the admin UI to be configured via environment variables, and instantiated via a dependency injection container, so we are able to easily mock the services for testing. - This uses the storage service as a test case for the new services architecture, and implements it as a s3 storage service. --- README.md | 37 +- api/admin/config.py | 2 +- api/admin/controller/__init__.py | 2 - api/admin/controller/analytics_services.py | 16 +- api/admin/controller/catalog_services.py | 47 +- api/admin/controller/storage_services.py | 72 - api/admin/routes.py | 16 - api/app.py | 4 +- api/circulation.py | 3 +- api/controller.py | 6 +- api/discovery/registration_script.py | 2 +- api/google_analytics_provider.py | 3 +- api/s3_analytics_provider.py | 174 +-- bin/odl2_import_monitor | 3 - bin/odl2_schema_validate | 3 - bin/odl_import_monitor | 3 - bin/opds2_import_monitor | 2 - bin/opds2_schema_validate | 2 - bin/opds_import_monitor | 2 - core/analytics.py | 14 +- core/local_analytics_provider.py | 4 +- core/marc.py | 103 +- core/mirror.py | 202 --- core/mock_analytics_provider.py | 11 +- core/model/cachedfeed.py | 11 +- core/model/collection.py | 5 +- core/model/configuration.py | 10 - core/model/resource.py | 2 +- core/s3.py | 881 ----------- core/scripts.py | 9 +- core/service/configuration.py | 52 + core/service/container.py | 37 + core/service/storage/configuration.py | 38 + core/service/storage/container.py | 34 + core/service/storage/s3.py | 232 +++ docker-compose.yml | 19 +- .../startup/01_set_simplified_environment.sh | 2 +- poetry.lock | 967 +++++++----- pyproject.toml | 10 +- scripts.py | 27 +- .../admin/controller/test_catalog_services.py | 98 -- .../admin/controller/test_storage_services.py | 27 - tests/api/admin/test_routes.py | 23 - tests/api/conftest.py | 1 - tests/api/mockapi/circulation.py | 10 + tests/api/test_controller_cm.py | 2 +- tests/api/test_controller_opdsfeed.py | 2 +- tests/api/test_google_analytics_provider.py | 15 +- tests/api/test_opds2.py | 4 +- tests/api/test_scripts.py | 235 +-- tests/core/conftest.py | 1 + tests/core/models/test_collection.py | 35 +- tests/core/models/test_configuration.py | 14 +- tests/core/service/__init__.py | 0 tests/core/service/storage/__init__.py | 0 .../service/storage/test_configuration.py | 47 + tests/core/service/storage/test_s3.py | 416 +++++ tests/core/service/test_configuration.py | 123 ++ tests/core/test_local_analytics_provider.py | 26 +- tests/core/test_marc.py | 288 ++-- tests/core/test_marc2.py | 15 - tests/core/test_mirror_uploader.py | 240 --- tests/core/test_s3.py | 1356 ----------------- tests/core/test_s3_analytics_provider.py | 364 ++--- tests/fixtures/s3.py | 284 ++-- tests/fixtures/services.py | 42 + tox.ini | 9 +- 67 files changed, 2378 insertions(+), 4368 deletions(-) delete mode 100644 api/admin/controller/storage_services.py delete mode 100644 core/mirror.py delete mode 100644 core/s3.py create mode 100644 core/service/configuration.py create mode 100644 core/service/container.py create mode 100644 core/service/storage/configuration.py create mode 100644 core/service/storage/container.py create mode 100644 core/service/storage/s3.py delete mode 100644 tests/api/admin/controller/test_storage_services.py create mode 100644 tests/core/service/__init__.py create mode 100644 tests/core/service/storage/__init__.py create mode 100644 tests/core/service/storage/test_configuration.py create mode 100644 tests/core/service/storage/test_s3.py create mode 100644 tests/core/service/test_configuration.py delete mode 100644 tests/core/test_marc2.py delete mode 100644 tests/core/test_mirror_uploader.py delete mode 100644 tests/core/test_s3.py create mode 100644 tests/fixtures/services.py diff --git a/README.md b/README.md index 3b6ee6d953..c9bd94fee6 100644 --- a/README.md +++ b/README.md @@ -151,9 +151,9 @@ CREATE USER palace with password 'test'; grant all privileges on database circ to palace; ``` -#### Environment variables +### Environment variables -##### Database +#### Database To let the application know which database to use, set the `SIMPLIFIED_PRODUCTION_DATABASE` environment variable. @@ -161,7 +161,32 @@ To let the application know which database to use, set the `SIMPLIFIED_PRODUCTIO export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" ``` -##### Patron `Basic Token` authentication +#### Storage Service + +The application optionally uses a s3 compatible storage service to store files. To configure the application to use +a storage service, you can set the following environment variables: + +- `PALACE_STORAGE_PUBLIC_ACCESS_BUCKET`: Required if you want to use the storage service to serve files directly to + users. This is the name of the bucket that will be used to serve files. This bucket should be configured to allow + public access to the files. +- `PALACE_STORAGE_ANALYTICS_BUCKET`: Required if you want to use the storage service to store analytics data. +- `PALACE_STORAGE_ACCESS_KEY`: The access key (optional). + - If this key is set it will be passed to boto3 when connecting to the storage service. + - If it is not set boto3 will attempt to find credentials as outlined in their + [documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials). +- `PALACE_STORAGE_SECRET_KEY`: The secret key (optional). +- `PALACE_STORAGE_REGION`: The AWS region of the storage service (optional). +- `PALACE_STORAGE_ENDPOINT_URL`: The endpoint of the storage service (optional). This is used if you are using a + s3 compatible storage service like [minio](https://min.io/). +- `PALACE_STORAGE_URL_TEMPLATE`: The url template to use when generating urls for files stored in the storage service + (optional). + - The default value is `https://{bucket}.s3.{region}.amazonaws.com/{key}`. + - The following variables can be used in the template: + - `{bucket}`: The name of the bucket. + - `{key}`: The key of the file. + - `{region}`: The region of the storage service. + +#### Patron `Basic Token` authentication Enables/disables patron "basic token" authentication through setting the designated environment variable to any (case-insensitive) value of "true"/"yes"/"on"/"1" or "false"/"no"/"off"/"0", respectively. @@ -172,7 +197,7 @@ If the value is the empty string or the variable is not present in the environme export SIMPLIFIED_ENABLE_BASIC_TOKEN_AUTH=true ``` -##### Firebase Cloud Messaging +#### Firebase Cloud Messaging For Firebase Cloud Messaging (FCM) support (e.g., for notifications), `one` (and only one) of the following should be set: - `SIMPLIFIED_FCM_CREDENTIALS_JSON` - the JSON-format Google Cloud Platform (GCP) service account key or @@ -191,7 +216,7 @@ export SIMPLIFIED_FCM_CREDENTIALS_FILE="/opt/credentials/fcm_credentials.json" The FCM credentials can be downloaded once a Google Service account has been created. More details in the [FCM documentation](https://firebase.google.com/docs/admin/setup#set-up-project-and-service-account) -### Email sending +#### Email To use the features that require sending emails, for example to reset the password for logged-out users, you will need to have a working SMTP server and set some environment variables: @@ -204,7 +229,7 @@ export SIMPLIFIED_MAIL_PASSWORD=password export SIMPLIFIED_MAIL_SENDER=sender@example.com ``` -### Running the Application +## Running the Application As mentioned in the [pyenv](#pyenv) section, the `poetry` tool should be executed under a virtual environment in order to guarantee that it will use the Python version you expect. To use a particular Python version, diff --git a/api/admin/config.py b/api/admin/config.py index bf64393caf..a8ddac7736 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -18,7 +18,7 @@ class Configuration: APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" - PACKAGE_VERSION = "1.8.0" + PACKAGE_VERSION = "1.9.0" STATIC_ASSETS = { "admin_js": "circulation-admin.js", diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 32e82b095f..9f8912d6a4 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -50,7 +50,6 @@ def setup_admin_controllers(manager: CirculationManager): from api.admin.controller.sitewide_settings import ( SitewideConfigurationSettingsController, ) - from api.admin.controller.storage_services import StorageServicesController from api.admin.controller.timestamps import TimestampsController from api.admin.controller.view import ViewController from api.admin.controller.work_editor import WorkController @@ -100,7 +99,6 @@ def setup_admin_controllers(manager: CirculationManager): SearchServiceSelfTestsController(manager) ) manager.admin_search_services_controller = SearchServicesController(manager) - manager.admin_storage_services_controller = StorageServicesController(manager) manager.admin_catalog_services_controller = CatalogServicesController(manager) manager.admin_announcement_service = AnnouncementSettings(manager) manager.admin_search_controller = AdminSearchController(manager) diff --git a/api/admin/controller/analytics_services.py b/api/admin/controller/analytics_services.py index f966fcdb78..6025813506 100644 --- a/api/admin/controller/analytics_services.py +++ b/api/admin/controller/analytics_services.py @@ -6,8 +6,7 @@ from api.google_analytics_provider import GoogleAnalyticsProvider from api.s3_analytics_provider import S3AnalyticsProvider from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import ExternalIntegration, ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration +from core.model import ExternalIntegration from core.util import first_or_default from core.util.problem_detail import ProblemDetail @@ -33,11 +32,6 @@ def update_protocol_settings(self): ] ) - if s3_analytics_provider: - s3_analytics_provider[ - "settings" - ] = S3AnalyticsProvider.get_storage_settings(self._db) - def process_analytics_services(self): if flask.request.method == "GET": return self.process_get() @@ -101,14 +95,6 @@ def process_post(self): service.name = name - external_integration_link = self._set_storage_external_integration_link( - service, - ExternalIntegrationLink.ANALYTICS, - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY, - ) - if isinstance(external_integration_link, ProblemDetail): - return external_integration_link - if is_new: return Response(str(service.id), 201) else: diff --git a/api/admin/controller/catalog_services.py b/api/admin/controller/catalog_services.py index 1405aa798c..294a8358c6 100644 --- a/api/admin/controller/catalog_services.py +++ b/api/admin/controller/catalog_services.py @@ -6,15 +6,12 @@ from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_INTEGRATION, MISSING_SERVICE, MULTIPLE_SERVICES_FOR_LIBRARY, UNKNOWN_PROTOCOL, ) from core.marc import MARCExporter -from core.model import ExternalIntegration, get_one, get_one_or_create -from core.model.configuration import ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration +from core.model import ExternalIntegration, get_one from core.util.problem_detail import ProblemDetail @@ -25,10 +22,6 @@ def __init__(self, manager): self.protocols = self._get_integration_protocols( service_apis, protocol_name_attr="NAME" ) - self.update_protocol_settings() - - def update_protocol_settings(self): - self.protocols[0]["settings"] = [MARCExporter.get_storage_settings(self._db)] def process_catalog_services(self): self.require_system_admin() @@ -42,7 +35,6 @@ def process_get(self): services = self._get_integration_info( ExternalIntegration.CATALOG_GOAL, self.protocols ) - self.update_protocol_settings() return dict( catalog_services=services, protocols=self.protocols, @@ -91,10 +83,6 @@ def process_post(self): if isinstance(result, ProblemDetail): return result - external_integration_link = self._set_external_integration_link(service) - if isinstance(external_integration_link, ProblemDetail): - return external_integration_link - library_error = self.check_libraries(service) if library_error: self._db.rollback() @@ -105,39 +93,6 @@ def process_post(self): else: return Response(str(service.id), 200) - def _set_external_integration_link(self, service): - """Either set or delete the external integration link between the - service and the storage integration. - """ - mirror_integration_id = flask.request.form.get("mirror_integration_id") - - # If no storage integration was selected, then delete the existing - # external integration link. - current_integration_link, ignore = get_one_or_create( - self._db, - ExternalIntegrationLink, - library_id=None, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - - if mirror_integration_id == self.NO_MIRROR_INTEGRATION: - if current_integration_link: - self._db.delete(current_integration_link) - else: - storage_integration = get_one( - self._db, ExternalIntegration, id=mirror_integration_id - ) - # Only get storage integrations that have a MARC file option set - if ( - not storage_integration - or not storage_integration.setting( - S3UploaderConfiguration.MARC_BUCKET_KEY - ).value - ): - return MISSING_INTEGRATION - current_integration_link.other_integration_id = storage_integration.id - def validate_form_fields(self, protocol): """Verify that the protocol which the user has selected is in the list of recognized protocol options.""" diff --git a/api/admin/controller/storage_services.py b/api/admin/controller/storage_services.py deleted file mode 100644 index 5ef8cf681c..0000000000 --- a/api/admin/controller/storage_services.py +++ /dev/null @@ -1,72 +0,0 @@ -import flask -from flask import Response - -from api.admin.controller.settings import SettingsController -from api.admin.problem_details import CANNOT_CHANGE_PROTOCOL, MISSING_SERVICE - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core.mirror import MirrorUploader # noqa: autoflake -from core.model import ExternalIntegration, get_one -from core.util.problem_detail import ProblemDetail - - -class StorageServicesController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - self.goal = ExternalIntegration.STORAGE_GOAL - self.protocols = self._get_integration_protocols( - list(MirrorUploader.IMPLEMENTATION_REGISTRY.values()), - protocol_name_attr="NAME", - ) - - def process_services(self): - if flask.request.method == "GET": - return self.process_get() - else: - return self.process_post() - - def process_get(self): - services = self._get_integration_info(self.goal, self.protocols) - return dict(storage_services=services, protocols=self.protocols) - - def process_post(self): - protocol = flask.request.form.get("protocol") - name = flask.request.form.get("name") - is_new = False - protocol_error = self.validate_protocol() - if protocol_error: - return protocol_error - - id = flask.request.form.get("id") - if id: - # Find an existing service to edit - storage_service = get_one( - self._db, ExternalIntegration, id=id, goal=self.goal - ) - if not storage_service: - return MISSING_SERVICE - if protocol != storage_service.protocol: - return CANNOT_CHANGE_PROTOCOL - else: - # Create a new service - storage_service, is_new = self._create_integration( - self.protocols, protocol, self.goal - ) - if isinstance(storage_service, ProblemDetail): - self._db.rollback() - return storage_service - - protocol_error = self.set_protocols(storage_service, protocol, self.protocols) - - if protocol_error: - self._db.rollback() - return protocol_error - storage_service.name = name - - if is_new: - return Response(str(storage_service.id), 201) - else: - return Response(str(storage_service.id), 200) - - def process_delete(self, service_id): - return self._delete_integration(service_id, ExternalIntegration.STORAGE_GOAL) diff --git a/api/admin/routes.py b/api/admin/routes.py index 41c218cc98..900096adbb 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -493,22 +493,6 @@ def search_service_self_tests(identifier): ) -@app.route("/admin/storage_services", methods=["GET", "POST"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def storage_services(): - return app.manager.admin_storage_services_controller.process_services() - - -@app.route("/admin/storage_service/", methods=["DELETE"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def storage_service(service_id): - return app.manager.admin_storage_services_controller.process_delete(service_id) - - @app.route("/admin/catalog_services", methods=["GET", "POST"]) @returns_json_or_response_or_problem_detail @requires_admin diff --git a/api/app.py b/api/app.py index 018dda07d0..d380beed10 100644 --- a/api/app.py +++ b/api/app.py @@ -17,6 +17,7 @@ SessionManager, pg_advisory_lock, ) +from core.service.container import container_instance from core.util import LanguageCodes from core.util.cache import CachedData from scripts import InstanceInitializationScript @@ -72,8 +73,9 @@ def initialize_circulation_manager(): pass else: if getattr(app, "manager", None) is None: + container = container_instance() try: - app.manager = CirculationManager(app._db) + app.manager = CirculationManager(app._db, container) except Exception: logging.exception("Error instantiating circulation manager!") raise diff --git a/api/circulation.py b/api/circulation.py index 9765b0801e..12fc47a4ff 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -15,7 +15,6 @@ Generic, List, Literal, - Optional, Tuple, Type, TypeVar, @@ -25,7 +24,7 @@ from flask import Response from flask_babel import lazy_gettext as _ from pydantic import PositiveInt -from sqlalchemy.orm import Query, Session +from sqlalchemy.orm import Query from core.analytics import Analytics from core.config import CannotLoadConfiguration diff --git a/api/controller.py b/api/controller.py index 6b07889caf..4e1c90dd80 100644 --- a/api/controller.py +++ b/api/controller.py @@ -87,6 +87,7 @@ from core.opds2 import AcquisitonFeedOPDS2 from core.opensearch import OpenSearchDocument from core.query.playtime_entries import PlaytimeEntries +from core.service.container import Services from core.user_profile import ProfileController as CoreProfileController from core.util.authentication_for_opds import AuthenticationForOPDSDocument from core.util.datetime_helpers import utc_now @@ -166,7 +167,6 @@ from api.admin.controller.sitewide_settings import ( SitewideConfigurationSettingsController, ) - from api.admin.controller.storage_services import StorageServicesController from api.admin.controller.timestamps import TimestampsController from api.admin.controller.view import ViewController from api.admin.controller.work_editor import WorkController as AdminWorkController @@ -220,14 +220,14 @@ class CirculationManager: admin_logging_services_controller: LoggingServicesController admin_search_service_self_tests_controller: SearchServiceSelfTestsController admin_search_services_controller: SearchServicesController - admin_storage_services_controller: StorageServicesController admin_catalog_services_controller: CatalogServicesController admin_announcement_service: AnnouncementSettings admin_search_controller: AdminSearchController admin_view_controller: ViewController - def __init__(self, _db): + def __init__(self, _db, services: Services): self._db = _db + self.services = services self.site_configuration_last_update = ( Configuration.site_configuration_last_update(self._db, timeout=0) ) diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 4d75cd7b5f..30c1bfd7c5 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -67,7 +67,7 @@ def do_run( # Set up an application context so we have access to url_for. from api.app import app - app.manager = manager or CirculationManager(self._db) + app.manager = manager or CirculationManager(self._db, self.services) base_url = ConfigurationSetting.sitewide( self._db, Configuration.BASE_URL_KEY ).value diff --git a/api/google_analytics_provider.py b/api/google_analytics_provider.py index eebf0a3a5d..09c82a72c8 100644 --- a/api/google_analytics_provider.py +++ b/api/google_analytics_provider.py @@ -6,6 +6,7 @@ from flask_babel import lazy_gettext as _ from core.model import ConfigurationSetting, ExternalIntegration, Session +from core.service.container import Services from core.util.http import HTTP from .config import CannotLoadConfiguration @@ -63,7 +64,7 @@ class GoogleAnalyticsProvider: {"key": TRACKING_ID, "label": _("Tracking ID"), "required": True}, ] - def __init__(self, integration, library=None): + def __init__(self, integration, services: Services, library=None): _db = Session.object_session(integration) if not library: raise CannotLoadConfiguration( diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index f294cc3a5d..8ddff535cc 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -1,51 +1,23 @@ import datetime import json -from typing import Any, Dict, List +import random +import string +from typing import Dict, Optional from flask_babel import lazy_gettext as _ from sqlalchemy.orm import Session from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider -from core.mirror import MirrorUploader -from core.model import ( - ExternalIntegration, - Library, - LicensePool, - MediaTypes, - Representation, - get_one, -) -from core.model.configuration import ( - ConfigurationAttributeType, - ConfigurationGrouping, - ConfigurationMetadata, - ConfigurationOption, - ExternalIntegrationLink, -) -from core.s3 import S3Uploader, S3UploaderConfiguration +from core.model import Library, LicensePool, MediaTypes +from core.model.configuration import ConfigurationGrouping +from core.service.container import Services +from core.service.storage.s3 import S3Service class S3AnalyticsProviderConfiguration(ConfigurationGrouping): """Contains configuration settings of the S3 Analytics provider.""" - NO_MIRROR_INTEGRATION = "NO_MIRROR" - - DEFAULT_MIRROR_OPTION = ConfigurationOption(NO_MIRROR_INTEGRATION, "None") - - analytics_mirror = ConfigurationMetadata( - key="mirror_integration_id", - label=_("Analytics Mirror"), - description=_( - "S3-compatible service to use for storing analytics events. " - "The service must already be configured under 'Storage Services'." - ), - type=ConfigurationAttributeType.SELECT, - required=True, - default=NO_MIRROR_INTEGRATION, - options=[DEFAULT_MIRROR_OPTION], - ) - class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" @@ -57,6 +29,14 @@ class S3AnalyticsProvider(LocalAnalyticsProvider): LocalAnalyticsProvider.SETTINGS + S3AnalyticsProviderConfiguration.to_settings() ) + def __init__( + self, + integration, + services: Services, + library=None, + ): + super().__init__(integration, services, library) + @staticmethod def _create_event_object( library: Library, @@ -223,95 +203,59 @@ def collect_event( default=str, ensure_ascii=True, ) - s3_uploader: S3Uploader = self._get_s3_uploader(_db) - analytics_file_url = s3_uploader.analytics_file_url( - library, license_pool, event_type, time - ) + storage = self._get_storage() + analytics_file_key = self._get_file_key(library, license_pool, event_type, time) - # Create a temporary Representation object because S3Uploader can work only with Representation objects. - # NOTE: It won't be stored in the database. - representation = Representation( - media_type=MediaTypes.APPLICATION_JSON_MEDIA_TYPE, content=content + storage.store( + analytics_file_key, + content, + MediaTypes.APPLICATION_JSON_MEDIA_TYPE, ) - s3_uploader.mirror_one(representation, analytics_file_url) - - def _get_s3_uploader(self, db: Session) -> S3Uploader: - """Get an S3Uploader object associated with the provider's selected storage service. - :param db: Database session - - :return: S3Uploader object associated with the provider's selected storage service - """ - # To find the storage integration for the exporter, first find the - # external integration link associated with the provider's external - # integration. - integration_link = get_one( - db, - ExternalIntegrationLink, - external_integration_id=self.integration_id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - if not integration_link: - raise CannotLoadConfiguration( - "The provider doesn't have an associated storage service" - ) - - # Then use the "other" integration value to find the storage integration. - storage_integration = get_one( - db, ExternalIntegration, id=integration_link.other_integration_id + def _get_file_key( + self, + library: Library, + license_pool: Optional[LicensePool], + event_type: str, + end_time: datetime.datetime, + start_time: Optional[datetime.datetime] = None, + ): + """The path to the analytics data file for the given library, license + pool and date range.""" + root = library.short_name + if start_time: + time_part = str(start_time) + "-" + str(end_time) + else: + time_part = str(end_time) + + # ensure the uniqueness of file name (in case of overlapping events) + collection = license_pool.collection_id if license_pool else "NONE" + random_string = "".join(random.choices(string.ascii_lowercase, k=10)) + file_name = "-".join([time_part, event_type, str(collection), random_string]) + # nest file in directories that allow for easy purging by year, month or day + return "/".join( + [ + str(root), + str(end_time.year), + str(end_time.month), + str(end_time.day), + file_name + ".json", + ] ) - if not storage_integration: - raise CannotLoadConfiguration( - "The provider doesn't have an associated storage service" - ) - - analytics_bucket = storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value + def _get_storage(self) -> S3Service: + """Return the CMs configured storage service. + Raises an exception if the storage service is not configured. - if not analytics_bucket: + :return: StorageServiceBase object + """ + s3_storage_service = self.services.storage.analytics() + if s3_storage_service is None: raise CannotLoadConfiguration( - "The associated storage service does not have {} bucket".format( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ) + "No storage service is configured with an analytics bucket." ) - s3_uploader = MirrorUploader.implementation(storage_integration) - - return s3_uploader - - @classmethod - def get_storage_settings(cls, db: Session) -> List[Dict[str, Any]]: - """Return the provider's configuration settings including available storage options. - - :param db: Database session - - :return: List containing the provider's configuration settings - """ - storage_integrations = ExternalIntegration.for_goal( - db, ExternalIntegration.STORAGE_GOAL - ) - - for storage_integration in storage_integrations: - configuration_settings = [ - setting - for setting in storage_integration.settings - if setting.key == S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ] - - if configuration_settings: - if configuration_settings[0].value: - S3AnalyticsProviderConfiguration.analytics_mirror.options.append( - ConfigurationOption( - storage_integration.id, storage_integration.name - ) - ) - - cls.SETTINGS = S3AnalyticsProviderConfiguration.to_settings() - - return cls.SETTINGS + return s3_storage_service Provider = S3AnalyticsProvider diff --git a/bin/odl2_import_monitor b/bin/odl2_import_monitor index 1a0bbe8b5b..ca1871f437 100755 --- a/bin/odl2_import_monitor +++ b/bin/odl2_import_monitor @@ -11,9 +11,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.odl import ODLFeedParserFactory from api.odl2 import ODL2Importer, ODL2ImportMonitor - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa from core.opds2_import import RWPMManifestParser from core.scripts import RunCollectionMonitorScript diff --git a/bin/odl2_schema_validate b/bin/odl2_schema_validate index bb167ba925..f7972efc14 100755 --- a/bin/odl2_schema_validate +++ b/bin/odl2_schema_validate @@ -11,9 +11,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.odl import ODLFeedParserFactory from api.odl2 import ODL2Importer - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.opds2_import import RWPMManifestParser from core.opds_schema import ODL2SchemaValidation from core.scripts import RunCollectionMonitorScript diff --git a/bin/odl_import_monitor b/bin/odl_import_monitor index dc7c536b53..aa1b5cd332 100755 --- a/bin/odl_import_monitor +++ b/bin/odl_import_monitor @@ -9,9 +9,6 @@ package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) from api.odl import ODLImporter, ODLImportMonitor - -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa from core.scripts import RunCollectionMonitorScript RunCollectionMonitorScript( diff --git a/bin/opds2_import_monitor b/bin/opds2_import_monitor index 29bcf46810..3223ba6cd0 100755 --- a/bin/opds2_import_monitor +++ b/bin/opds2_import_monitor @@ -9,8 +9,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.model import ExternalIntegration from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser from core.scripts import OPDSImportScript diff --git a/bin/opds2_schema_validate b/bin/opds2_schema_validate index afd4a48fd0..070507d428 100755 --- a/bin/opds2_schema_validate +++ b/bin/opds2_schema_validate @@ -10,8 +10,6 @@ sys.path.append(os.path.abspath(package_dir)) from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.model.configuration import ExternalIntegration from core.opds2_import import OPDS2Importer, RWPMManifestParser from core.opds_schema import OPDS2SchemaValidation diff --git a/bin/opds_import_monitor b/bin/opds_import_monitor index 5b21aeb5c9..b18022933c 100755 --- a/bin/opds_import_monitor +++ b/bin/opds_import_monitor @@ -8,8 +8,6 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) -# NOTE: We need to import it explicitly to initialize MirrorUploader.IMPLEMENTATION_REGISTRY -from core import s3 # noqa: autoflake from core.scripts import OPDSImportScript OPDSImportScript().run() diff --git a/core/analytics.py b/core/analytics.py index d8b4f94391..4c1d9e27f3 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import importlib import logging from collections import defaultdict @@ -7,6 +9,7 @@ from .config import CannotLoadConfiguration from .model import ExternalIntegration +from .service.container import container_instance from .util.datetime_helpers import utc_now from .util.log import log_elapsed_time @@ -26,7 +29,7 @@ class Analytics: GLOBAL_ENABLED: Optional[bool] = None LIBRARY_ENABLED: Set[int] = set() - def __new__(cls, _db, refresh=False) -> "Analytics": + def __new__(cls, _db: Session, refresh: bool = False) -> Analytics: instance = cls._singleton_instance if instance is None: refresh = True @@ -44,15 +47,16 @@ def _reset_singleton_instance(cls): cls._singleton_instance = None @log_elapsed_time(log_method=log.debug, message_prefix="Initializing instance") - def _initialize_instance(self, _db): + def _initialize_instance(self, _db: Session) -> None: """Initialize an instance (usually the singleton) of the class. We don't use __init__ because it would be run whether or not a new instance were instantiated. """ + services = container_instance() sitewide_providers = [] library_providers = defaultdict(list) - initialization_exceptions: Dict[int, Exception] = {} + initialization_exceptions: Dict[int, Exception | str] = {} global_enabled = False library_enabled = set() # Find a list of all the ExternalIntegrations set up with a @@ -68,12 +72,12 @@ def _initialize_instance(self, _db): provider_class = self._provider_class_from_module(module) if provider_class: if not libraries: - provider = provider_class(integration) + provider = provider_class(integration, services) sitewide_providers.append(provider) global_enabled = True else: for library in libraries: - provider = provider_class(integration, library) + provider = provider_class(integration, services, library) library_providers[library.id].append(provider) library_enabled.add(library.id) else: diff --git a/core/local_analytics_provider.py b/core/local_analytics_provider.py index 207972591c..4b40c497bf 100644 --- a/core/local_analytics_provider.py +++ b/core/local_analytics_provider.py @@ -2,6 +2,7 @@ from sqlalchemy.orm.session import Session from .model import CirculationEvent, ExternalIntegration, create, get_one +from .service.container import Services class LocalAnalyticsProvider: @@ -41,12 +42,13 @@ class LocalAnalyticsProvider: }, ] - def __init__(self, integration, library=None): + def __init__(self, integration, services: Services, library=None): self.integration_id = integration.id self.location_source = ( integration.setting(self.LOCATION_SOURCE).value or self.LOCATION_SOURCE_DISABLED ) + self.services = services if library: self.library_id = library.id else: diff --git a/core/marc.py b/core/marc.py index ccb1d89472..1157e5c86c 100644 --- a/core/marc.py +++ b/core/marc.py @@ -1,5 +1,6 @@ import re from io import BytesIO +from typing import Optional from flask_babel import lazy_gettext as _ from pymarc import Field, Record, Subfield @@ -9,7 +10,6 @@ from .config import CannotLoadConfiguration from .external_search import ExternalSearchIndex, SortKeyPagination from .lane import BaseFacets, Lane -from .mirror import MirrorUploader from .model import ( CachedMARCFile, DeliveryMechanism, @@ -20,11 +20,7 @@ Work, get_one_or_create, ) - -# this is necessary to ensure these implementations are registered -from .s3 import MinIOUploader, S3Uploader # noqa: autoflake - -# registered +from .service.storage.s3 import MultipartS3ContextManager, S3Service from .util import LanguageCodes from .util.datetime_helpers import utc_now @@ -572,20 +568,6 @@ class MARCExporter: }, ] - NO_MIRROR_INTEGRATION = "NO_MIRROR" - DEFAULT_MIRROR_INTEGRATION = dict( - key=NO_MIRROR_INTEGRATION, label=_("None - Do not mirror MARC files") - ) - SETTING = { - "key": "mirror_integration_id", - "label": _("MARC Mirror"), - "description": _( - "Storage protocol to use for uploading generated MARC files. The service must already be configured under 'Storage Services'." - ), - "type": "select", - "options": [DEFAULT_MIRROR_INTEGRATION], - } - @classmethod def from_config(cls, library): _db = Session.object_session(library) @@ -606,27 +588,6 @@ def __init__(self, _db, library, integration): self.library = library self.integration = integration - @classmethod - def get_storage_settings(cls, _db): - integrations = ExternalIntegration.for_goal( - _db, ExternalIntegration.STORAGE_GOAL - ) - cls.SETTING["options"] = [cls.DEFAULT_MIRROR_INTEGRATION] - for integration in integrations: - # Only add an integration to choose from if it has a - # MARC File Bucket field in its settings. - configuration_settings = [ - s for s in integration.settings if s.key == "marc_bucket" - ] - - if configuration_settings: - if configuration_settings[0].value: - cls.SETTING["options"].append( - dict(key=str(integration.id), label=integration.name) - ) - - return cls.SETTING - @classmethod def create_record(cls, work, annotator, force_create=False, integration=None): """Build a complete MARC record for a given work.""" @@ -674,14 +635,24 @@ def create_record(cls, work, annotator, force_create=False, integration=None): ) return record + def _file_key(self, library, lane, end_time, start_time=None): + """The path to the hosted MARC file for the given library, lane, + and date range.""" + root = library.short_name + if start_time: + time_part = str(start_time) + "-" + str(end_time) + else: + time_part = str(end_time) + parts = [root, time_part, lane.display_name] + return "/".join(parts) + ".mrc" + def records( self, lane, annotator, - mirror_integration, + storage_service: Optional[S3Service], start_time=None, force_refresh=False, - mirror=None, search_engine=None, query_batch_size=500, upload_batch_size=7500, @@ -691,10 +662,9 @@ def records( :param lane: The Lane to export books from. :param annotator: The Annotator to use when creating MARC records. - :param mirror_integration: The mirror integration to use for MARC files. + :param storage_service: The storage service integration to use for MARC files. :param start_time: Only include records that were created or modified after this time. :param force_refresh: Create new records even when cached records are available. - :param mirror: Optional mirror to use instead of loading one from configuration. :param query_batch_size: Number of works to retrieve with a single Opensearch query. :param upload_batch_size: Number of records to mirror at a time. This is different from query_batch_size because S3 enforces a minimum size of 5MB for all parts @@ -702,18 +672,10 @@ def records( works for a single query. """ - # We mirror the content, if it's not empty. If it's empty, we create a CachedMARCFile - # and Representation, but don't actually mirror it. - if not mirror: - storage_protocol = mirror_integration.protocol - mirror = MirrorUploader.implementation(mirror_integration) - if mirror.NAME != storage_protocol: - raise Exception( - "Mirror integration does not match configured storage protocol" - ) - - if not mirror: - raise Exception("No mirror integration is configured") + # We store the content, if it's not empty. If it's empty, we create a CachedMARCFile + # and Representation, but don't actually store it. + if storage_service is None: + raise Exception("No storage service is configured") search_engine = search_engine or ExternalSearchIndex(self._db) @@ -725,12 +687,12 @@ def records( facets = MARCExporterFacets(start_time=start_time) pagination = SortKeyPagination(size=query_batch_size) - url = mirror.marc_file_url(self.library, lane, end_time, start_time) - representation, ignore = get_one_or_create( - self._db, Representation, url=url, media_type=Representation.MARC_MEDIA_TYPE - ) + key = self._file_key(self.library, lane, end_time, start_time) - with mirror.multipart_upload(representation, url) as upload: + with storage_service.multipart( + key, + content_type=Representation.MARC_MEDIA_TYPE, + ) as upload: this_batch = BytesIO() this_batch_size = 0 while pagination is not None: @@ -752,7 +714,7 @@ def records( this_batch_size += pagination.this_page_size if this_batch_size >= upload_batch_size: # We've reached or exceeded the upload threshold. - # Upload one part of the multi-part document. + # Upload one part of the multipart document. self._upload_batch(this_batch, upload) this_batch = BytesIO() this_batch_size = 0 @@ -760,10 +722,16 @@ def records( # Upload the final part of the multi-document, if # necessary. - self._upload_batch(this_batch, upload) + self._upload_batch(this_batch, upload) # type: ignore[unreachable] + representation, ignore = get_one_or_create( + self._db, + Representation, + url=upload.url, + media_type=Representation.MARC_MEDIA_TYPE, + ) representation.fetched_at = end_time - if not representation.mirror_exception: + if not upload.exception: cached, is_new = get_one_or_create( self._db, CachedMARCFile, @@ -775,8 +743,11 @@ def records( if not is_new: cached.representation = representation cached.end_time = end_time + representation.set_as_mirrored(upload.url) + else: + representation.mirror_exception = str(upload.exception) - def _upload_batch(self, output, upload): + def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager): "Upload a batch of MARC records as one part of a multi-part upload." content = output.getvalue() if content: diff --git a/core/mirror.py b/core/mirror.py deleted file mode 100644 index c082b40158..0000000000 --- a/core/mirror.py +++ /dev/null @@ -1,202 +0,0 @@ -from __future__ import annotations - -from abc import ABCMeta, abstractmethod -from typing import TYPE_CHECKING -from urllib.parse import urlsplit - -from .config import CannotLoadConfiguration -from .util.datetime_helpers import utc_now - -if TYPE_CHECKING: - from .model import Collection, ExternalIntegration, Representation - - -class MirrorUploader(metaclass=ABCMeta): - """Handles the job of uploading a representation's content to - a mirror that we control. - """ - - STORAGE_GOAL = "storage" - - # Depending on the .protocol of an ExternalIntegration with - # .goal=STORAGE, a different subclass might be initialized by - # sitewide() or for_collection(). A subclass that wants to take - # advantage of this should add a mapping here from its .protocol - # to itself. - IMPLEMENTATION_REGISTRY: dict[str, type[MirrorUploader]] = {} - - @classmethod - def mirror(cls, _db, storage_name=None, integration=None): - """Create a MirrorUploader from an integration or storage name. - - :param storage_name: The name of the storage integration. - :param integration: The external integration. - - :return: A MirrorUploader. - - :raise: CannotLoadConfiguration if no integration with - goal==STORAGE_GOAL is configured. - """ - if not integration: - integration = cls.integration_by_name(_db, storage_name) - return cls.implementation(integration) - - @classmethod - def integration_by_name(cls, _db, storage_name=None): - """Find the ExternalIntegration for the mirror by storage name.""" - from .model import ExternalIntegration - - qu = _db.query(ExternalIntegration).filter( - ExternalIntegration.goal == cls.STORAGE_GOAL, - ExternalIntegration.name == storage_name, - ) - integrations = qu.all() - if not integrations: - raise CannotLoadConfiguration( - "No storage integration with name '%s' is configured." % storage_name - ) - - [integration] = integrations - return integration - - @classmethod - def for_collection(cls, collection, purpose): - """Create a MirrorUploader for the given Collection. - - :param collection: Use the mirror configuration for this Collection. - :param purpose: Use the purpose of the mirror configuration. - - :return: A MirrorUploader, or None if the Collection has no - mirror integration. - """ - from .model import ExternalIntegration - - try: - from .model import Session - - _db = Session.object_session(collection) - integration = ExternalIntegration.for_collection_and_purpose( - _db, collection, purpose - ) - except CannotLoadConfiguration as e: - return None - return cls.implementation(integration) - - @classmethod - def implementation(cls, integration): - """Instantiate the appropriate implementation of MirrorUploader - for the given ExternalIntegration. - """ - if not integration: - return None - implementation_class = cls.IMPLEMENTATION_REGISTRY.get( - integration.protocol, cls - ) - return implementation_class(integration) - - def __init__(self, integration: ExternalIntegration, host: str): - """Instantiate a MirrorUploader from an ExternalIntegration. - - :param integration: An ExternalIntegration configuring the credentials - used to upload things. - :param host: Base host used by the mirror - """ - if integration.goal != self.STORAGE_GOAL: - # This collection's 'mirror integration' isn't intended to - # be used to mirror anything. - raise CannotLoadConfiguration( - "Cannot create an MirrorUploader from an integration with goal=%s" - % integration.goal - ) - - self._host = host - - # Subclasses will override this to further configure the client - # based on the credentials in the ExternalIntegration. - - def do_upload(self, representation): - raise NotImplementedError() - - def mirror_one( - self, - representation: Representation, - mirror_to: str, - collection: Collection | None = None, - ): - """Mirror a single Representation. - - :param representation: Book's representation - :param mirror_to: Mirror URL - :param collection: Collection - """ - now = utc_now() - exception = self.do_upload(representation) - representation.mirror_exception = exception - if exception: - representation.mirrored_at = None - else: - representation.mirrored_at = now - - def mirror_batch(self, representations): - """Mirror a batch of Representations at once.""" - - for representation in representations: - self.mirror_one(representation, "") - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - """The URL of the hosted EPUB file for the given identifier. - - This does not upload anything to the URL, but it is expected - that calling mirror() on a certain Representation object will - make that representation end up at that URL. - """ - raise NotImplementedError() - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - """The URL of the hosted cover image for the given identifier. - - This does not upload anything to the URL, but it is expected - that calling mirror() on a certain Representation object will - make that representation end up at that URL. - """ - raise NotImplementedError() - - def sign_url(self, url: str, expiration: int | None = None) -> str: - """Signs a URL and make it expirable - - :param url: URL - :param expiration: (Optional) Time in seconds for the presigned URL to remain valid. - Default value depends on a specific implementation - :return: Signed expirable link - """ - raise NotImplementedError() - - def is_self_url(self, url: str) -> bool: - """Determines whether the URL has the mirror's host or a custom domain - - :param url: The URL - :return: Boolean value indicating whether the URL has the mirror's host or a custom domain - """ - scheme, netloc, path, query, fragment = urlsplit(url) - - if netloc.endswith(self._host): - return True - else: - return False - - @abstractmethod - def split_url(self, url: str, unquote: bool = True) -> tuple[str, str]: - """Splits the URL into the components: container (bucket) and file path - - :param url: URL - :param unquote: Boolean value indicating whether it's required to unquote URL elements - :return: Tuple (bucket, file path) - """ - raise NotImplementedError() diff --git a/core/mock_analytics_provider.py b/core/mock_analytics_provider.py index 8742b94063..734373028f 100644 --- a/core/mock_analytics_provider.py +++ b/core/mock_analytics_provider.py @@ -1,7 +1,16 @@ class MockAnalyticsProvider: """A mock analytics provider that keeps track of how many times it's called.""" - def __init__(self, integration=None, library=None): + def __init__(self, integration=None, services=None, library=None): + """ + Since this is a mock analytics provider, it doesn't need to do anything + with the integration or services. It just needs to keep track of how + many times it's called. + + :param integration: The ExternalIntegration that configures this analytics service. + :param services: The Service object that provides services to this provider. + :param library: The library this analytics provider is associated with. + """ self.count = 0 self.event = None if integration: diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index 3fafcfee6b..4de95da13a 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -1,9 +1,10 @@ # CachedFeed, WillNotGenerateExpensiveFeed +from __future__ import annotations import datetime import logging from collections import namedtuple -from typing import Optional +from typing import TYPE_CHECKING, Optional from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship @@ -14,6 +15,9 @@ from . import Base, flush, get_one, get_one_or_create from .work import Work +if TYPE_CHECKING: + from . import Representation + # This named tuple makes it easy to manage the return value of # CachedFeed._prepare_keys. CachedFeedKeys = namedtuple( @@ -93,7 +97,7 @@ def fetch( refresher_method, max_age=None, raw=False, - **response_kwargs + **response_kwargs, ): """Retrieve a cached feed from the database if possible. @@ -415,6 +419,9 @@ class CachedMARCFile(Base): representation_id = Column( Integer, ForeignKey("representations.id"), nullable=False ) + representation: Mapped[Representation] = relationship( + "Representation", back_populates="marc_file" + ) start_time = Column(DateTime(timezone=True), nullable=True, index=True) end_time = Column(DateTime(timezone=True), nullable=True, index=True) diff --git a/core/model/collection.py b/core/model/collection.py index df102b004d..a3a83329fd 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -948,10 +948,7 @@ def delete(self, search_index=None): # Collection, assuming it wasn't deleted already. if self.external_integration: for link in self.external_integration.links: - if ( - link.other_integration - and link.other_integration.goal == ExternalIntegration.STORAGE_GOAL - ): + if link.other_integration and link.other_integration.goal == "storage": logging.info( f"Deletion of collection {self.name} is disassociating " f"storage integration {link.other_integration.name}." diff --git a/core/model/configuration.py b/core/model/configuration.py index fcfe36d79e..696368053e 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -18,7 +18,6 @@ from core.model.hybrid import hybrid_property from ..config import CannotLoadConfiguration, Configuration -from ..mirror import MirrorUploader from ..util.string_helpers import random_string from . import Base, get_one, get_one_or_create from .constants import DataSourceConstants @@ -89,10 +88,6 @@ class ExternalIntegration(Base): # but not the books themselves. METADATA_GOAL = "metadata" - # These integrations are associated with external services such as - # S3 that provide access to book covers. - STORAGE_GOAL = MirrorUploader.STORAGE_GOAL - # These integrations are associated with external services such as # Opensearch that provide indexed search. SEARCH_GOAL = "search" @@ -164,11 +159,6 @@ class ExternalIntegration(Base): NYT = "New York Times" CONTENT_SERVER = "Content Server" - # Integrations with STORAGE_GOAL - S3 = "Amazon S3" - MINIO = "MinIO" - LCP = "LCP" - # Integrations with SEARCH_GOAL OPENSEARCH = "Opensearch" diff --git a/core/model/resource.py b/core/model/resource.py index 299839862e..8423e9fa52 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -546,7 +546,7 @@ class Representation(Base, MediaTypes): # A Representation may be a CachedMARCFile. marc_file: Mapped[CachedMARCFile] = relationship( "CachedMARCFile", - backref="representation", + back_populates="representation", cascade="all, delete-orphan", ) diff --git a/core/s3.py b/core/s3.py deleted file mode 100644 index 89c9797883..0000000000 --- a/core/s3.py +++ /dev/null @@ -1,881 +0,0 @@ -import functools -import logging -import random -import string -from contextlib import contextmanager -from datetime import datetime -from enum import Enum -from typing import Any, List, Optional, Tuple -from urllib.parse import quote, unquote_plus, urlsplit - -import boto3 -import botocore -from botocore.config import Config -from botocore.exceptions import BotoCoreError, ClientError -from flask_babel import lazy_gettext as _ - -from .mirror import MirrorUploader -from .model import Collection, ExternalIntegration, Library, LicensePool, Representation -from .model.configuration import ( - ConfigurationAttributeType, - ConfigurationGrouping, - ConfigurationMetadata, - ConfigurationOption, -) - - -class MultipartS3Upload: - def __init__(self, uploader, representation, mirror_to): - self.uploader = uploader - self.representation = representation - self.bucket, self.filename = uploader.split_url(mirror_to) - media_type = representation.external_media_type - self.part_number = 1 - self.parts = [] - - self.upload = uploader.client.create_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - ContentType=media_type, - ) - - def upload_part(self, content): - logging.info(f"Uploading part {self.part_number} of {self.filename}") - result = self.uploader.client.upload_part( - Body=content, - Bucket=self.bucket, - Key=self.filename, - PartNumber=self.part_number, - UploadId=self.upload.get("UploadId"), - ) - self.parts.append(dict(ETag=result.get("ETag"), PartNumber=self.part_number)) - self.part_number += 1 - - def complete(self): - if not self.parts: - logging.info("Upload of %s was empty, not mirroring" % self.filename) - self.abort() - else: - self.uploader.client.complete_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - UploadId=self.upload.get("UploadId"), - MultipartUpload=dict(Parts=self.parts), - ) - mirror_url = self.uploader.final_mirror_url(self.bucket, self.filename) - self.representation.set_as_mirrored(mirror_url) - logging.info("MIRRORED %s" % self.representation.mirror_url) - - def abort(self): - logging.info("Aborting upload of %s" % self.filename) - self.uploader.client.abort_multipart_upload( - Bucket=self.bucket, - Key=self.filename, - UploadId=self.upload.get("UploadId"), - ) - - -def _get_available_regions() -> List[str]: - """Returns a list of available S3 regions - - :return: List of available S3 regions - """ - session = boto3.session.Session() - - return session.get_available_regions(service_name="s3") - - -def _get_available_region_options() -> List[ConfigurationOption]: - """Returns a list of available options for S3Uploader's Region configuration setting - - :return: List of available options for S3Uploader's Region configuration setting - """ - available_regions = sorted(_get_available_regions()) - options = [ConfigurationOption(region, region) for region in available_regions] - - return options - - -class S3AddressingStyle(Enum): - """Enumeration of different addressing styles supported by boto""" - - VIRTUAL = "virtual" - PATH = "path" - AUTO = "auto" - - -class S3UploaderConfiguration(ConfigurationGrouping): - S3_REGION = "s3_region" - S3_DEFAULT_REGION = "us-east-1" - - S3_ADDRESSING_STYLE = "s3_addressing_style" - S3_DEFAULT_ADDRESSING_STYLE = S3AddressingStyle.VIRTUAL.value - - S3_PRESIGNED_URL_EXPIRATION = "s3_presigned_url_expiration" - S3_DEFAULT_PRESIGNED_URL_EXPIRATION = 3600 - - BOOK_COVERS_BUCKET_KEY = "book_covers_bucket" - OA_CONTENT_BUCKET_KEY = "open_access_content_bucket" - PROTECTED_CONTENT_BUCKET_KEY = "protected_content_bucket" - ANALYTICS_BUCKET_KEY = "analytics_bucket" - - MARC_BUCKET_KEY = "marc_bucket" - - URL_TEMPLATE_KEY = "bucket_name_transform" - URL_TEMPLATE_HTTP = "http" - URL_TEMPLATE_HTTPS = "https" - URL_TEMPLATE_DEFAULT = "identity" - - URL_TEMPLATES_BY_TEMPLATE = { - URL_TEMPLATE_HTTP: "http://%(bucket)s/%(key)s", - URL_TEMPLATE_HTTPS: "https://%(bucket)s/%(key)s", - URL_TEMPLATE_DEFAULT: "https://%(bucket)s.s3.%(region)s/%(key)s", - } - - access_key = ConfigurationMetadata( - key=ExternalIntegration.USERNAME, - label=_("Access Key"), - description="", - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - secret_key = ConfigurationMetadata( - key=ExternalIntegration.PASSWORD, - label=_("Secret Key"), - description=_( - "If the Access Key and Secret Key are not given here credentials " - "will be used as outlined in the " - 'Boto3 documenation. ' - "If Access Key is given, Secrent Key must also be given." - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - book_covers_bucket = ConfigurationMetadata( - key=BOOK_COVERS_BUCKET_KEY, - label=_("Book Covers Bucket"), - description=_( - "All book cover images encountered will be mirrored to this S3 bucket. " - "Large images will be scaled down, and the scaled-down copies will also be uploaded to this bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - open_access_content_bucket = ConfigurationMetadata( - key=OA_CONTENT_BUCKET_KEY, - label=_("Open Access Content Bucket"), - description=_( - "All open-access books encountered will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - protected_access_content_bucket = ConfigurationMetadata( - key=PROTECTED_CONTENT_BUCKET_KEY, - label=_("Protected Access Content Bucket"), - description=_( - "Self-hosted books will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - analytics_bucket = ConfigurationMetadata( - key=ANALYTICS_BUCKET_KEY, - label=_("Analytics Bucket"), - description=_( - "Text files containing analytics data will be uploaded to this " - "S3 bucket. " - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - marc_file_bucket = ConfigurationMetadata( - key=MARC_BUCKET_KEY, - label=_("MARC File Bucket"), - description=_( - "All generated MARC files will be uploaded to this S3 bucket. " - "

The bucket must already exist—it will not be created automatically.

" - ), - type=ConfigurationAttributeType.TEXT, - required=False, - ) - - s3_region = ConfigurationMetadata( - key=S3_REGION, - label=_("S3 region"), - description=_("S3 region which will be used for storing the content."), - type=ConfigurationAttributeType.SELECT, - required=False, - default=S3_DEFAULT_REGION, - options=_get_available_region_options(), - ) - - s3_addressing_style = ConfigurationMetadata( - key=S3_ADDRESSING_STYLE, - label=_("S3 addressing style"), - description=_( - "Buckets created after September 30, 2020, will support only virtual hosted-style requests. " - "Path-style requests will continue to be supported for buckets created on or before this date. " - "For more information, " - 'see ' - "Amazon S3 Path Deprecation Plan - The Rest of the Story." - ), - type=ConfigurationAttributeType.SELECT, - required=False, - default=S3_DEFAULT_REGION, - options=[ - ConfigurationOption(S3AddressingStyle.VIRTUAL.value, _("Virtual")), - ConfigurationOption(S3AddressingStyle.PATH.value, _("Path")), - ConfigurationOption(S3AddressingStyle.AUTO.value, _("Auto")), - ], - ) - - s3_presigned_url_expiration = ConfigurationMetadata( - key=S3_PRESIGNED_URL_EXPIRATION, - label=_("S3 presigned URL expiration"), - description=_("Time in seconds for the presigned URL to remain valid"), - type=ConfigurationAttributeType.NUMBER, - required=False, - default=S3_DEFAULT_PRESIGNED_URL_EXPIRATION, - ) - - url_template = ConfigurationMetadata( - key=URL_TEMPLATE_KEY, - label=_("URL format"), - description=_( - "A file mirrored to S3 is available at http://{bucket}.s3.{region}.amazonaws.com/{filename}. " - "If you've set up your DNS so that http://[bucket]/ or https://[bucket]/ points to the appropriate " - "S3 bucket, you can configure this S3 integration to shorten the URLs. " - "

If you haven't set up your S3 buckets, don't change this from the default -- " - "you'll get URLs that don't work.

" - ), - type=ConfigurationAttributeType.SELECT, - required=False, - default=URL_TEMPLATE_DEFAULT, - options=[ - ConfigurationOption( - URL_TEMPLATE_DEFAULT, - _("S3 Default: https://{bucket}.s3.{region}.amazonaws.com/{file}"), - ), - ConfigurationOption( - URL_TEMPLATE_HTTPS, _("HTTPS: https://{bucket}/{file}") - ), - ConfigurationOption(URL_TEMPLATE_HTTP, _("HTTP: http://{bucket}/{file}")), - ], - ) - - -class S3Uploader(MirrorUploader): - NAME = ExternalIntegration.S3 - - # AWS S3 host - S3_HOST = "amazonaws.com" - - SETTINGS = S3UploaderConfiguration.to_settings() - - SITEWIDE = True - - def __init__( - self, - integration: ExternalIntegration, - client_class: Optional[Any] = None, - host: str = S3_HOST, - ) -> None: - """Instantiate an S3Uploader from an ExternalIntegration. - - :param integration: An ExternalIntegration - :param client_class: Mock object (or class) to use (or instantiate) - instead of boto3.client. - :param host: Host used by this integration - """ - super().__init__(integration, host) - - if not client_class: - client_class = boto3.client - - self._s3_region = integration.setting( - S3UploaderConfiguration.S3_REGION - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_REGION) - - self._s3_addressing_style = integration.setting( - S3UploaderConfiguration.S3_ADDRESSING_STYLE - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE) - - self._s3_presigned_url_expiration = integration.setting( - S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION - ).value_or_default(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION) - - if callable(client_class): - # Pass None into boto3 if we get an empty string. - access_key = integration.username if integration.username != "" else None - secret_key = integration.password if integration.password != "" else None - config = Config( - signature_version=botocore.UNSIGNED, - s3={"addressing_style": self._s3_addressing_style}, - ) - # NOTE: Unfortunately, boto ignores credentials (aws_access_key_id, aws_secret_access_key) - # when using botocore.UNSIGNED signature version and doesn't authenticate the client in this case. - # That's why we have to create two S3 boto clients: - # - the first client WITHOUT authentication which is used for generating unsigned URLs - # - the second client WITH authentication used for working with S3: uploading files, etc. - self._s3_link_client = client_class( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - config=config, - ) - self.client = client_class( - "s3", - region_name=self._s3_region, - aws_access_key_id=access_key, - aws_secret_access_key=secret_key, - ) - else: - self.client = client_class - - self.url_transform = integration.setting( - S3UploaderConfiguration.URL_TEMPLATE_KEY - ).value_or_default(S3UploaderConfiguration.URL_TEMPLATE_DEFAULT) - - # Transfer information about bucket names from the - # ExternalIntegration to the S3Uploader object, so we don't - # have to keep the ExternalIntegration around. - self.buckets = dict() - for setting in integration.settings: - if setting.key is not None and setting.key.endswith("_bucket"): - self.buckets[setting.key] = setting.value - - def _generate_s3_url(self, bucket: str, path: Any) -> str: - """Generates an S3 URL - - :param bucket: Bucket name - :return: S3 URL - """ - key = path - - # NOTE: path can be an empty string meaning that - # we need to generate a URL pointing at the root directory of the bucket. - # However, boto3 doesn't allow us to pass the key as an empty string. - # As a workaround we set it to a dummy string and later remove it from the generated URL - if not path: - key = "dummy" - - url = self._s3_link_client.generate_presigned_url( - "get_object", ExpiresIn=0, Params={"Bucket": bucket, "Key": key} - ) - - # If the path was an empty string we need to strip out trailing dummy string ending up with a URL - # pointing at the root directory of the bucket - if not path: - url = url.replace("/" + key, "/") - - return url - - def sign_url(self, url: str, expiration: Optional[int] = None) -> str: - """Signs a URL and make it expirable - - :param url: URL - :param expiration: (Optional) Time in seconds for the presigned URL to remain valid. - If it's empty, S3_PRESIGNED_URL_EXPIRATION configuration setting is used - :return: Signed expirable link - """ - if expiration is None: - expiration = self._s3_presigned_url_expiration - - bucket, key = self.split_url(url) - url = self.client.generate_presigned_url( - "get_object", - ExpiresIn=int(expiration), - Params={"Bucket": bucket, "Key": key}, - ) - - return url - - def get_bucket(self, bucket_key): - """Gets the bucket for a particular use based on the given key""" - return self.buckets.get(bucket_key) - - def url(self, bucket, path): - """The URL to a resource on S3 identified by bucket and path.""" - custom_url = bucket.startswith("http://") or bucket.startswith("https://") - - if isinstance(path, list): - # This is a list of key components that need to be quoted - # and assembled. - path = self.key_join(path, encode=custom_url) - if isinstance(path, bytes): - path = path.decode("utf-8") - if path.startswith("/"): - path = path[1:] - - if custom_url: - url = bucket - - if not url.endswith("/"): - url += "/" - - return url + path - else: - url = self._generate_s3_url(bucket, path) - - return url - - def cover_image_root(self, bucket, data_source, scaled_size=None): - """The root URL to the S3 location of cover images for - the given data source. - """ - parts = [] - if scaled_size: - parts.extend(["scaled", str(scaled_size)]) - if isinstance(data_source, str): - data_source_name = data_source - else: - data_source_name = data_source.name - parts.append(data_source_name) - url = self.url(bucket, parts) - if not url.endswith("/"): - url += "/" - return url - - def content_root(self, bucket): - """The root URL to the S3 location of hosted content of - the given type. - """ - return self.url(bucket, "/") - - def marc_file_root(self, bucket, library): - url = self.url(bucket, [library.short_name]) - if not url.endswith("/"): - url += "/" - return url - - def _analytics_file_root(self, bucket, library) -> str: - url = self.url(bucket, [library.short_name]) - if not url.endswith("/"): - url += "/" - return url - - @classmethod - def key_join(self, key, encode=True): - """Quote the path portions of an S3 key while leaving the path - characters themselves alone. - - :param key: Either a key, or a list of parts to be - assembled into a key. - - :return: A string that can be used as an S3 key. - """ - if isinstance(key, str): - parts = key.split("/") - else: - parts = key - new_parts = [] - - for part in parts: - if isinstance(part, bytes): - part = part.decode("utf-8") - if encode: - part = quote(str(part)) - new_parts.append(part) - - return "/".join(new_parts) - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - """The path to the hosted EPUB file for the given identifier.""" - bucket = self.get_bucket( - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY - if open_access - else S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY - ) - root = self.content_root(bucket) - - if not extension.startswith("."): - extension = "." + extension - - parts = [] - if data_source: - parts.append(data_source.name) - parts.append(identifier.type) - if title: - # e.g. DataSource/ISBN/1234/Title.epub - parts.append(identifier.identifier) - filename = title - else: - # e.g. DataSource/ISBN/1234.epub - filename = identifier.identifier - parts.append(filename + extension) - return root + self.key_join(parts) - - def cover_image_url(self, data_source, identifier, filename, scaled_size=None): - """The path to the hosted cover image for the given identifier.""" - bucket = self.get_bucket(S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY) - root = self.cover_image_root(bucket, data_source, scaled_size) - parts = [identifier.type, identifier.identifier, filename] - return root + self.key_join(parts) - - def marc_file_url(self, library, lane, end_time, start_time=None): - """The path to the hosted MARC file for the given library, lane, - and date range.""" - bucket = self.get_bucket(S3UploaderConfiguration.MARC_BUCKET_KEY) - root = self.marc_file_root(bucket, library) - if start_time: - time_part = str(start_time) + "-" + str(end_time) - else: - time_part = str(end_time) - parts = [time_part, lane.display_name] - return root + self.key_join(parts) + ".mrc" - - def analytics_file_url( - self, - library: Library, - license_pool: LicensePool, - event_type: str, - end_time: datetime, - start_time: Optional[datetime] = None, - ): - """The path to the analytics data file for the given library, license - pool and date range.""" - bucket = self.get_bucket(S3UploaderConfiguration.ANALYTICS_BUCKET_KEY) - root = self._analytics_file_root(bucket, library) - if start_time: - time_part = str(start_time) + "-" + str(end_time) - else: - time_part = str(end_time) - - # ensure the uniqueness of file name (in case of overlapping events) - collection = license_pool.collection_id if license_pool else "NONE" - random_string = "".join(random.choices(string.ascii_lowercase, k=10)) - file_name = "-".join([time_part, event_type, str(collection), random_string]) - # nest file in directories that allow for easy purging by year, month or day - parts = [ - str(end_time.year), - str(end_time.month), - str(end_time.day), - file_name + ".json", - ] - return root + self.key_join(parts) - - def split_url(self, url: str, unquote: bool = True) -> Tuple[str, str]: - """Splits the URL into the components: bucket and file path - - :param url: URL - :param unquote: Boolean value indicating whether it's required to unquote URL elements - :return: Tuple (bucket, file path) - """ - scheme, netloc, path, query, fragment = urlsplit(url) - - if self.is_self_url(url): - host_parts = netloc.split(".") - host_parts_count = len(host_parts) - - # 1. Path-style requests - # 1.1. URL without a region: https://s3.amazonaws.com/{bucket}/{path} - # 1.2. URL with a region: https://s3.{region}.amazonaws.com/{bucket}/{path} - - # 2. Virtual hosted-style requests - # 2.1. Legacy global endpoints: https://{bucket}.s3.amazonaws.com/{path} - # 2.2. Endpoints with s3-region: https://{bucket}.s3-{region}.amazonaws.com/{path} - # 2.3. Endpoints with s3.region: https://{bucket}.s3.{region}.amazonaws.com/{path} - - if host_parts_count == 3 or ( - host_parts_count == 4 and host_parts[0] == "s3" - ): - if path.startswith("/"): - path = path[1:] - bucket, filename = path.split("/", 1) - else: - bucket = host_parts[0] - - if path.startswith("/"): - path = path[1:] - - filename = path - else: - bucket = netloc - filename = path[1:] - - if unquote: - filename = unquote_plus(filename) - - return bucket, filename - - def final_mirror_url(self, bucket, key): - """Determine the URL to pass into Representation.set_as_mirrored, - assuming that it was successfully uploaded to the given - `bucket` as `key`. - - Depending on ExternalIntegration configuration this may - be any of the following: - - https://{bucket}.s3.{region}.amazonaws.com/{key} - http://{bucket}/{key} - https://{bucket}/{key} - """ - templates = S3UploaderConfiguration.URL_TEMPLATES_BY_TEMPLATE - default = templates[S3UploaderConfiguration.URL_TEMPLATE_DEFAULT] - template = templates.get(self.url_transform, default) - - if template == default: - link = self._generate_s3_url(bucket, self.key_join(key, encode=False)) - else: - link = template % dict(bucket=bucket, key=self.key_join(key)) - - return link - - def mirror_one( - self, - representation: Representation, - mirror_to: str, - collection: Optional[Collection] = None, - ) -> Any: - """Mirror a single representation to the given URL. - - :param representation: Book's representation - :param mirror_to: Mirror URL - :param collection: Collection - """ - # Turn the original URL into an s3.amazonaws.com URL. - media_type = representation.external_media_type - bucket, remote_filename = self.split_url(mirror_to) - fh = representation.external_content() - try: - result = self.client.upload_fileobj( - Fileobj=fh, - Bucket=bucket, - Key=remote_filename, - ExtraArgs=dict(ContentType=media_type), - ) - - # Since upload_fileobj completed without a problem, we - # know the file is available at - # https://s3.amazonaws.com/{bucket}/{remote_filename}. But - # that may not be the URL we want to store. - mirror_url = self.final_mirror_url(bucket, remote_filename) - representation.set_as_mirrored(mirror_url) - - source = representation.local_content_path - if representation.url != mirror_url: - source = representation.url - if source: - logging.info("MIRRORED %s => %s", source, representation.mirror_url) - else: - logging.info("MIRRORED %s", representation.mirror_url) - except (BotoCoreError, ClientError) as e: - # BotoCoreError happens when there's a problem with - # the network transport. ClientError happens when - # there's a problem with the credentials. Either way, - # the best thing to do is treat this as a transient - # error and try again later. There's no scenario where - # giving up is the right move. - logging.error("Error uploading %s: %r", mirror_to, e, exc_info=e) - finally: - fh.close() - - @contextmanager - def multipart_upload( - self, representation, mirror_to, upload_class=MultipartS3Upload - ): - upload = upload_class(self, representation, mirror_to) - try: - yield upload - upload.complete() - except Exception as e: - logging.error("Multipart upload of %s failed: %r", mirror_to, e, exc_info=e) - upload.abort() - representation.mirror_exception = str(e) - - -# MirrorUploader.implementation will instantiate an S3Uploader -# for storage integrations with protocol 'Amazon S3'. -MirrorUploader.IMPLEMENTATION_REGISTRY[S3Uploader.NAME] = S3Uploader - - -class MinIOUploaderConfiguration(ConfigurationGrouping): - ENDPOINT_URL = "ENDPOINT_URL" - - endpoint_url = ConfigurationMetadata( - key=ENDPOINT_URL, - label=_("Endpoint URL"), - description=_("MinIO's endpoint URL"), - type=ConfigurationAttributeType.TEXT, - required=True, - ) - - -class MinIOUploader(S3Uploader): - NAME = ExternalIntegration.MINIO - - SETTINGS = S3Uploader.SETTINGS + [ - MinIOUploaderConfiguration.endpoint_url.to_settings() - ] - - def __init__(self, integration, client_class=None): - """Instantiate an S3Uploader from an ExternalIntegration. - - :param integration: An ExternalIntegration - - :param client_class: Mock object (or class) to use (or instantiate) - instead of boto3.client. - """ - endpoint_url = integration.setting( - MinIOUploaderConfiguration.ENDPOINT_URL - ).value - - _, host, _, _, _ = urlsplit(endpoint_url) - - if not client_class: - client_class = boto3.client - - if callable(client_class): - client_class = functools.partial(client_class, endpoint_url=endpoint_url) - else: - self.client = client_class - - super().__init__(integration, client_class, host) - - -# MirrorUploader.implementation will instantiate an MinIOUploader instance -# for storage integrations with protocol 'MinIO'. -MirrorUploader.IMPLEMENTATION_REGISTRY[MinIOUploader.NAME] = MinIOUploader - - -class MockS3Uploader(S3Uploader): - """A dummy uploader for use in tests.""" - - buckets = { - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "test-cover-bucket", - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "test-content-bucket", - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "test-content-bucket", - S3UploaderConfiguration.MARC_BUCKET_KEY: "test-marc-bucket", - } - - def __init__(self, fail=False, *args, **kwargs): - self.uploaded = [] - self.content = [] - self.destinations = [] - self.fail = fail - self._s3_region = S3UploaderConfiguration.S3_DEFAULT_REGION - self._s3_addressing_style = S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE - config = Config( - signature_version=botocore.UNSIGNED, - s3={"addressing_style": self._s3_addressing_style}, - ) - self._s3_link_client = boto3.client( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - config=config, - ) - self.client = boto3.client( - "s3", - region_name=self._s3_region, - aws_access_key_id=None, - aws_secret_access_key=None, - ) - - self.client - - def mirror_one(self, representation, **kwargs): - mirror_to = kwargs["mirror_to"] - self.uploaded.append(representation) - self.destinations.append(mirror_to) - self.content.append(representation.content) - if self.fail: - representation.mirror_exception = "Exception" - representation.mirrored_at = None - else: - representation.set_as_mirrored(mirror_to) - - @contextmanager - def multipart_upload(self, representation, mirror_to): - class MockMultipartS3Upload(MultipartS3Upload): - def __init__(self): - self.parts = [] - - def upload_part(self, part): - self.parts.append(part) - - upload = MockMultipartS3Upload() - yield upload - - self.uploaded.append(representation) - self.destinations.append(mirror_to) - self.content.append(upload.parts) - if self.fail: - representation.mirror_exception = "Exception" - representation.mirrored_at = None - else: - representation.set_as_mirrored(mirror_to) - - -class MockS3Client: - """This pool lets us test the real S3Uploader class with a mocked-up - boto3 client. - """ - - def __init__( - self, - service, - region_name, - aws_access_key_id, - aws_secret_access_key, - config=None, - ): - assert service == "s3" - self.region_name = region_name - self.access_key = aws_access_key_id - self.secret_key = aws_secret_access_key - self.config = config - self.uploads = [] - self.parts = [] - self.fail_with = None - - def upload_fileobj(self, Fileobj, Bucket, Key, ExtraArgs=None, **kwargs): - if self.fail_with: - raise self.fail_with - self.uploads.append((Fileobj.read(), Bucket, Key, ExtraArgs, kwargs)) - return None - - def create_multipart_upload(self, **kwargs): - if self.fail_with: - raise self.fail_with - return dict(UploadId=1) - - def upload_part(self, **kwargs): - if self.fail_with: - raise self.fail_with - self.parts.append(kwargs) - return dict(ETag="etag") - - def complete_multipart_upload(self, **kwargs): - self.uploads.append(kwargs) - self.parts = [] - return None - - def abort_multipart_upload(self, **kwargs): - self.parts = [] - return None - - def generate_presigned_url( - self, ClientMethod, Params=None, ExpiresIn=3600, HttpMethod=None - ): - return None diff --git a/core/scripts.py b/core/scripts.py index 328de301a3..62290e4575 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -58,6 +58,7 @@ from .monitor import CollectionMonitor, ReaperMonitor from .opds_import import OPDSImporter, OPDSImportMonitor from .overdrive import OverdriveCoreAPI +from .service.container import Services, container_instance from .util import fast_query_count from .util.datetime_helpers import strptime_utc, utc_now from .util.personal_names import contributor_name_match_ratio, display_name_to_sort_name @@ -71,6 +72,10 @@ def _db(self) -> Session: self._session = production_session() return self._session + @property + def services(self) -> Services: + return self._services + @property def script_name(self): """Find or guess the name of the script. @@ -110,7 +115,7 @@ def parse_time(cls, time_string): continue raise ValueError("Could not parse time: %s" % time_string) - def __init__(self, _db=None, *args, **kwargs): + def __init__(self, _db=None, services: Optional[Services] = None, *args, **kwargs): """Basic constructor. :_db: A database session to be used instead of @@ -119,6 +124,8 @@ def __init__(self, _db=None, *args, **kwargs): if _db: self._session = _db + self._services = container_instance() if services is None else services + def run(self): DataSource.well_known_sources(self._db) start_time = utc_now() diff --git a/core/service/configuration.py b/core/service/configuration.py new file mode 100644 index 0000000000..14321668e5 --- /dev/null +++ b/core/service/configuration.py @@ -0,0 +1,52 @@ +from pathlib import Path +from typing import Any + +from pydantic import BaseSettings, ValidationError + +from core.config import CannotLoadConfiguration + + +class ServiceConfiguration(BaseSettings): + """ + Base class for our service configuration. Each subclass should define its own + configuration settings as pydantic fields. The settings will be loaded from + environment variables with the prefix defined in the Config class. + + The env_prefix should also be overridden in subclasses to provide a unique prefix + for each service. + """ + + class Config: + # See the pydantic docs for information on these settings + # https://docs.pydantic.dev/usage/model_config/ + + # Each sub-config will have its own prefix + env_prefix = "PALACE_" + + # Strip whitespace from all strings + anystr_strip_whitespace = True + + # Forbid mutation, settings should be loaded once from environment. + allow_mutation = False + + # Allow env vars to be loaded from a .env file + # This loads the .env file from the root of the project + env_file = str(Path(__file__).parent.parent.parent.absolute() / ".env") + + # Nested settings will be loaded from environment variables with this delimiter. + env_nested_delimiter = "__" + + def __init__(self, *args: Any, **kwargs: Any): + try: + super().__init__(*args, **kwargs) + except ValidationError as error_exception: + # The services settings failed to validate, we capture the ValidationError and + # raise a more specific CannotLoadConfiguration error. + errors = error_exception.errors() + error_log_message = f"Error loading settings from environment:" + for error in errors: + delimiter = self.__config__.env_nested_delimiter or "__" + error_location = delimiter.join(str(e).upper() for e in error["loc"]) + env_var_name = f"{self.__config__.env_prefix}{error_location}" + error_log_message += f"\n {env_var_name}: {error['msg']}" + raise CannotLoadConfiguration(error_log_message) from error_exception diff --git a/core/service/container.py b/core/service/container.py new file mode 100644 index 0000000000..b204df6462 --- /dev/null +++ b/core/service/container.py @@ -0,0 +1,37 @@ +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer + +from core.service.storage.configuration import StorageConfiguration +from core.service.storage.container import Storage + + +class Services(DeclarativeContainer): + + config = providers.Configuration() + + storage = providers.Container( + Storage, + config=config.storage, + ) + + +def create_container() -> Services: + container = Services() + container.config.from_dict({"storage": StorageConfiguration().dict()}) + return container + + +_container_instance = None + + +def container_instance() -> Services: + # Create a singleton container instance, I'd like this to be used sparingly + # and eventually have it go away, but there are places in the code that + # are currently difficult to refactor to pass the container into the + # constructor. + # If at all possible please use the container that is stored in the CirculationManager + # or Scripts classes instead of using this function. + global _container_instance + if _container_instance is None: + _container_instance = create_container() + return _container_instance diff --git a/core/service/storage/configuration.py b/core/service/storage/configuration.py new file mode 100644 index 0000000000..6e9b51f052 --- /dev/null +++ b/core/service/storage/configuration.py @@ -0,0 +1,38 @@ +from typing import Optional + +import boto3 +from pydantic import AnyHttpUrl, parse_obj_as, validator + +from core.service.configuration import ServiceConfiguration + + +class StorageConfiguration(ServiceConfiguration): + region: Optional[str] = None + access_key: Optional[str] = None + secret_key: Optional[str] = None + + public_access_bucket: Optional[str] = None + analytics_bucket: Optional[str] = None + + endpoint_url: Optional[AnyHttpUrl] = None + + url_template: AnyHttpUrl = parse_obj_as( + AnyHttpUrl, "https://{bucket}.s3.{region}.amazonaws.com/{key}" + ) + + @validator("region") + def validate_region(cls, v: Optional[str]) -> Optional[str]: + # No validation if region is not provided. + if v is None: + return None + + session = boto3.session.Session() + regions = session.get_available_regions(service_name="s3") + if v not in regions: + raise ValueError( + f"Invalid region: {v}. Region must be one of: {' ,'.join(regions)}." + ) + return v + + class Config: + env_prefix = "PALACE_STORAGE_" diff --git a/core/service/storage/container.py b/core/service/storage/container.py new file mode 100644 index 0000000000..54cf2db835 --- /dev/null +++ b/core/service/storage/container.py @@ -0,0 +1,34 @@ +import boto3 +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer + +from core.service.storage.s3 import S3Service + + +class Storage(DeclarativeContainer): + config = providers.Configuration() + + s3_client = providers.Singleton( + boto3.client, + service_name="s3", + aws_access_key_id=config.access_key, + aws_secret_access_key=config.secret_key, + region_name=config.region, + endpoint_url=config.endpoint_url, + ) + + analytics = providers.Singleton( + S3Service.factory, + client=s3_client, + region=config.region, + bucket=config.analytics_bucket, + url_template=config.url_template, + ) + + public = providers.Singleton( + S3Service.factory, + client=s3_client, + region=config.region, + bucket=config.public_access_bucket, + url_template=config.url_template, + ) diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py new file mode 100644 index 0000000000..e73c428004 --- /dev/null +++ b/core/service/storage/s3.py @@ -0,0 +1,232 @@ +from __future__ import annotations + +import dataclasses +import logging +import sys +from io import BytesIO +from string import Formatter +from types import TracebackType +from typing import TYPE_CHECKING, BinaryIO, List, Optional, Type +from urllib.parse import quote + +from botocore.exceptions import BotoCoreError, ClientError + +from core.config import CannotLoadConfiguration + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self + +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client + from mypy_boto3_s3.type_defs import CreateMultipartUploadOutputTypeDef + + +@dataclasses.dataclass +class MultipartS3UploadPart: + ETag: str + PartNumber: int + + +class MultipartS3ContextManager: + def __init__( + self, + client: S3Client, + bucket: str, + key: str, + url: str, + media_type: Optional[str] = None, + ) -> None: + self.client = client + self.key = key + self.bucket = bucket + self.part_number = 1 + self.parts: List[MultipartS3UploadPart] = [] + self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") + self.media_type = media_type + self.upload: Optional[CreateMultipartUploadOutputTypeDef] = None + self.upload_id: Optional[str] = None + self._complete = False + self._url = url + self._exception: Optional[BaseException] = None + + def __enter__(self) -> Self: + params = { + "Bucket": self.bucket, + "Key": self.key, + } + if self.media_type is not None: + params["ContentType"] = self.media_type + self.upload = self.client.create_multipart_upload(**params) # type: ignore[arg-type] + self.upload_id = self.upload["UploadId"] + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> bool: + if exc_val is None: + self._upload_complete() + else: + self.log.debug( + f"Exception {exc_type} occurred during upload of {self.key}. Aborting.", + exc_info=exc_val, + ) + self._upload_abort() + self._exception = exc_val + if isinstance(exc_val, (ClientError, BotoCoreError)): + return True + return False + + def upload_part(self, content: bytes) -> None: + if self.complete or self.exception or self.upload_id is None: + raise RuntimeError("Upload already complete or aborted.") + + logging.info( + f"Uploading part {self.part_number} of {self.key} to {self.bucket}" + ) + result = self.client.upload_part( + Body=content, + Bucket=self.bucket, + Key=self.key, + PartNumber=self.part_number, + UploadId=self.upload_id, + ) + self.parts.append(MultipartS3UploadPart(result["ETag"], self.part_number)) + self.part_number += 1 + + def _upload_complete(self) -> None: + if not self.parts: + logging.info(f"Upload of {self.key} was empty.") + self._upload_abort() + elif self.upload_id is None: + raise RuntimeError("Upload ID not set.") + else: + self.client.complete_multipart_upload( + Bucket=self.bucket, + Key=self.key, + UploadId=self.upload_id, + MultipartUpload=dict(Parts=[dataclasses.asdict(part) for part in self.parts]), # type: ignore[misc] + ) + self._complete = True + + def _upload_abort(self) -> None: + logging.info(f"Aborting upload of {self.key}.") + if self.upload_id is not None: + self.client.abort_multipart_upload( + Bucket=self.bucket, + Key=self.key, + UploadId=self.upload_id, + ) + else: + logging.error("Upload ID not set, unable to abort.") + + @property + def url(self) -> str: + return self._url + + @property + def complete(self) -> bool: + return self._complete + + @property + def exception(self) -> Optional[BaseException]: + return self._exception + + +class S3Service: + def __init__( + self, + client: S3Client, + region: Optional[str], + bucket: str, + url_template: str, + ) -> None: + self.client = client + self.region = region + self.bucket = bucket + self.url_template = url_template + self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") + + # Validate the URL template. + formatter = Formatter() + field_tuple = formatter.parse(self.url_template) + field_names = [field[1] for field in field_tuple] + if "region" in field_names and self.region is None: + raise CannotLoadConfiguration( + "URL template requires a region, but no region was provided." + ) + if "key" not in field_names: + raise CannotLoadConfiguration( + "URL template requires a key, but no key was provided." + ) + + @classmethod + def factory( + cls, + client: S3Client, + region: Optional[str], + bucket: Optional[str], + url_template: str, + ) -> Optional[Self]: + if bucket is None: + return None + return cls(client, region, bucket, url_template) + + def generate_url(self, key: str) -> str: + return self.url_template.format( + bucket=self.bucket, key=quote(key), region=self.region + ) + + def store( + self, + key: str, + content: str | bytes, + content_type: Optional[str] = None, + ) -> Optional[str]: + if isinstance(content, str): + content = content.encode("utf8") + return self.store_stream( + key=key, stream=BytesIO(content), content_type=content_type + ) + + def store_stream( + self, + key: str, + stream: BinaryIO, + content_type: Optional[str] = None, + ) -> Optional[str]: + try: + extra_args = {} if content_type is None else {"ContentType": content_type} + self.client.upload_fileobj( + Fileobj=stream, + Bucket=self.bucket, + Key=key, + ExtraArgs=extra_args, + ) + except (BotoCoreError, ClientError) as e: + # BotoCoreError happens when there's a problem with + # the network transport. ClientError happens when + # there's a problem with the credentials. Either way, + # the best thing to do is treat this as a transient + # error and try again later. There's no scenario where + # giving up is the right move. + self.log.exception(f"Error uploading {key}: {str(e)}") + return None + finally: + stream.close() + + url = self.generate_url(key) + self.log.info(f"Stored '{key}' to {url}.") + return url + + def multipart( + self, key: str, content_type: Optional[str] = None + ) -> MultipartS3ContextManager: + url = self.generate_url(key) + return MultipartS3ContextManager( + self.client, self.bucket, key, url, content_type + ) diff --git a/docker-compose.yml b/docker-compose.yml index 68f6f12565..a9996f8b7c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,4 +1,16 @@ version: "3.9" + +# Common set of CM environment variables +# see: https://github.com/compose-spec/compose-spec/blob/master/spec.md#extension +x-cm-env-variables: &cm-env-variables + SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + PALACE_STORAGE_ACCESS_KEY: "palace" + PALACE_STORAGE_SECRET_KEY: "test123456789" + PALACE_STORAGE_ENDPOINT_URL: "http://minio:9000" + PALACE_STORAGE_PUBLIC_ACCESS_BUCKET: "public" + PALACE_STORAGE_ANALYTICS_BUCKET: "analytics" + PALACE_STORAGE_URL_TEMPLATE: "http://localhost:9000/{bucket}/{key}" + services: # example docker compose configuration for testing and development @@ -9,16 +21,14 @@ services: target: webapp ports: - "6500:80" - environment: - SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + environment: *cm-env-variables scripts: build: context: . dockerfile: docker/Dockerfile target: scripts - environment: - SIMPLIFIED_PRODUCTION_DATABASE: "postgresql://palace:test@pg:5432/circ" + environment: *cm-env-variables pg: image: "postgres:12" @@ -36,6 +46,7 @@ services: MINIO_ROOT_USER: "palace" MINIO_ROOT_PASSWORD: "test123456789" MINIO_SCHEME: "http" + MINIO_DEFAULT_BUCKETS: "public:download,analytics" os: build: diff --git a/docker/startup/01_set_simplified_environment.sh b/docker/startup/01_set_simplified_environment.sh index d26a0a8db8..709656bc56 100755 --- a/docker/startup/01_set_simplified_environment.sh +++ b/docker/startup/01_set_simplified_environment.sh @@ -14,7 +14,7 @@ touch $SIMPLIFIED_ENVIRONMENT # into an environment file. This will allow the environment to be loaded when # cron tasks are run, since crontab doesn't load them automatically. # The values of the variables are escaped as needed for the shell. -for var in $(printenv | grep -e SIMPLIFIED -e LIBSIMPLE | sed -e 's/^\([^=]*\)=.*$/\1/g'); do { +for var in $(printenv | grep -e SIMPLIFIED -e LIBSIMPLE -e PALACE | sed -e 's/^\([^=]*\)=.*$/\1/g'); do { printf "export ${var}=%q\n" $(printenv "${var}") } done > $SIMPLIFIED_ENVIRONMENT diff --git a/poetry.lock b/poetry.lock index 43681370a1..0cd72f5b8a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -133,412 +133,426 @@ files = [ [[package]] name = "boto3" -version = "1.18.65" +version = "1.28.52" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "boto3-1.18.65-py3-none-any.whl", hash = "sha256:bbbc3a71949af31c33101ee0daf4db9b11148d67a4e574b6c66cbe35d985b5af"}, - {file = "boto3-1.18.65.tar.gz", hash = "sha256:baedf0637dd0e47cff60eb5591133f9c10aeb49581e2ad5a99794996a2dfbe09"}, + {file = "boto3-1.28.52-py3-none-any.whl", hash = "sha256:1d36db102517d62c6968b3b0636303241f56859d12dd071def4882fc6e030b20"}, + {file = "boto3-1.28.52.tar.gz", hash = "sha256:a34fc153cb2f6fb2f79a764286c967392e8aae9412381d943bddc576c4f7631a"}, ] [package.dependencies] -botocore = ">=1.21.65,<1.22.0" -jmespath = ">=0.7.1,<1.0.0" -s3transfer = ">=0.5.0,<0.6.0" +botocore = ">=1.31.52,<1.32.0" +jmespath = ">=0.7.1,<2.0.0" +s3transfer = ">=0.6.0,<0.7.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.27.0" -description = "Type annotations for boto3 1.27.0 generated with mypy-boto3-builder 7.14.5" +version = "1.28.52" +description = "Type annotations for boto3 1.28.52 generated with mypy-boto3-builder 7.19.0" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.27.0.tar.gz", hash = "sha256:55b094ebbefecb0b8015451707aafeb81c2313e216dbbd5e2f2efff70a02db63"}, - {file = "boto3_stubs-1.27.0-py3-none-any.whl", hash = "sha256:451749fc2bb0af5718bf1410473ec2e7f915bb860614cd0f6aca00c254ccf7e3"}, + {file = "boto3-stubs-1.28.52.tar.gz", hash = "sha256:12d7e5865aeec52e1f73b935b1c6a42e61325538fc2cb83a87a83e41e9485241"}, + {file = "boto3_stubs-1.28.52-py3-none-any.whl", hash = "sha256:3ea81a225e062f3bcb205467891086ea031519697ad54622e61251b52609b8d6"}, ] [package.dependencies] +boto3 = {version = "1.28.52", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.31.52", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" +mypy-boto3-cloudformation = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-rds = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.27.0,<1.28.0)"] -account = ["mypy-boto3-account (>=1.27.0,<1.28.0)"] -acm = ["mypy-boto3-acm (>=1.27.0,<1.28.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.27.0,<1.28.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.27.0,<1.28.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.27.0,<1.28.0)", "mypy-boto3-account (>=1.27.0,<1.28.0)", "mypy-boto3-acm (>=1.27.0,<1.28.0)", "mypy-boto3-acm-pca (>=1.27.0,<1.28.0)", "mypy-boto3-alexaforbusiness (>=1.27.0,<1.28.0)", "mypy-boto3-amp (>=1.27.0,<1.28.0)", "mypy-boto3-amplify (>=1.27.0,<1.28.0)", "mypy-boto3-amplifybackend (>=1.27.0,<1.28.0)", "mypy-boto3-amplifyuibuilder (>=1.27.0,<1.28.0)", "mypy-boto3-apigateway (>=1.27.0,<1.28.0)", "mypy-boto3-apigatewaymanagementapi (>=1.27.0,<1.28.0)", "mypy-boto3-apigatewayv2 (>=1.27.0,<1.28.0)", "mypy-boto3-appconfig (>=1.27.0,<1.28.0)", "mypy-boto3-appconfigdata (>=1.27.0,<1.28.0)", "mypy-boto3-appfabric (>=1.27.0,<1.28.0)", "mypy-boto3-appflow (>=1.27.0,<1.28.0)", "mypy-boto3-appintegrations (>=1.27.0,<1.28.0)", "mypy-boto3-application-autoscaling (>=1.27.0,<1.28.0)", "mypy-boto3-application-insights (>=1.27.0,<1.28.0)", "mypy-boto3-applicationcostprofiler (>=1.27.0,<1.28.0)", "mypy-boto3-appmesh (>=1.27.0,<1.28.0)", "mypy-boto3-apprunner (>=1.27.0,<1.28.0)", "mypy-boto3-appstream (>=1.27.0,<1.28.0)", "mypy-boto3-appsync (>=1.27.0,<1.28.0)", "mypy-boto3-arc-zonal-shift (>=1.27.0,<1.28.0)", "mypy-boto3-athena (>=1.27.0,<1.28.0)", "mypy-boto3-auditmanager (>=1.27.0,<1.28.0)", "mypy-boto3-autoscaling (>=1.27.0,<1.28.0)", "mypy-boto3-autoscaling-plans (>=1.27.0,<1.28.0)", "mypy-boto3-backup (>=1.27.0,<1.28.0)", "mypy-boto3-backup-gateway (>=1.27.0,<1.28.0)", "mypy-boto3-backupstorage (>=1.27.0,<1.28.0)", "mypy-boto3-batch (>=1.27.0,<1.28.0)", "mypy-boto3-billingconductor (>=1.27.0,<1.28.0)", "mypy-boto3-braket (>=1.27.0,<1.28.0)", "mypy-boto3-budgets (>=1.27.0,<1.28.0)", "mypy-boto3-ce (>=1.27.0,<1.28.0)", "mypy-boto3-chime (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-identity (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-meetings (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-messaging (>=1.27.0,<1.28.0)", "mypy-boto3-chime-sdk-voice (>=1.27.0,<1.28.0)", "mypy-boto3-cleanrooms (>=1.27.0,<1.28.0)", "mypy-boto3-cloud9 (>=1.27.0,<1.28.0)", "mypy-boto3-cloudcontrol (>=1.27.0,<1.28.0)", "mypy-boto3-clouddirectory (>=1.27.0,<1.28.0)", "mypy-boto3-cloudformation (>=1.27.0,<1.28.0)", "mypy-boto3-cloudfront (>=1.27.0,<1.28.0)", "mypy-boto3-cloudhsm (>=1.27.0,<1.28.0)", "mypy-boto3-cloudhsmv2 (>=1.27.0,<1.28.0)", "mypy-boto3-cloudsearch (>=1.27.0,<1.28.0)", "mypy-boto3-cloudsearchdomain (>=1.27.0,<1.28.0)", "mypy-boto3-cloudtrail (>=1.27.0,<1.28.0)", "mypy-boto3-cloudtrail-data (>=1.27.0,<1.28.0)", "mypy-boto3-cloudwatch (>=1.27.0,<1.28.0)", "mypy-boto3-codeartifact (>=1.27.0,<1.28.0)", "mypy-boto3-codebuild (>=1.27.0,<1.28.0)", "mypy-boto3-codecatalyst (>=1.27.0,<1.28.0)", "mypy-boto3-codecommit (>=1.27.0,<1.28.0)", "mypy-boto3-codedeploy (>=1.27.0,<1.28.0)", "mypy-boto3-codeguru-reviewer (>=1.27.0,<1.28.0)", "mypy-boto3-codeguru-security (>=1.27.0,<1.28.0)", "mypy-boto3-codeguruprofiler (>=1.27.0,<1.28.0)", "mypy-boto3-codepipeline (>=1.27.0,<1.28.0)", "mypy-boto3-codestar (>=1.27.0,<1.28.0)", "mypy-boto3-codestar-connections (>=1.27.0,<1.28.0)", "mypy-boto3-codestar-notifications (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-identity (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-idp (>=1.27.0,<1.28.0)", "mypy-boto3-cognito-sync (>=1.27.0,<1.28.0)", "mypy-boto3-comprehend (>=1.27.0,<1.28.0)", "mypy-boto3-comprehendmedical (>=1.27.0,<1.28.0)", "mypy-boto3-compute-optimizer (>=1.27.0,<1.28.0)", "mypy-boto3-config (>=1.27.0,<1.28.0)", "mypy-boto3-connect (>=1.27.0,<1.28.0)", "mypy-boto3-connect-contact-lens (>=1.27.0,<1.28.0)", "mypy-boto3-connectcampaigns (>=1.27.0,<1.28.0)", "mypy-boto3-connectcases (>=1.27.0,<1.28.0)", "mypy-boto3-connectparticipant (>=1.27.0,<1.28.0)", "mypy-boto3-controltower (>=1.27.0,<1.28.0)", "mypy-boto3-cur (>=1.27.0,<1.28.0)", "mypy-boto3-customer-profiles (>=1.27.0,<1.28.0)", "mypy-boto3-databrew (>=1.27.0,<1.28.0)", "mypy-boto3-dataexchange (>=1.27.0,<1.28.0)", "mypy-boto3-datapipeline (>=1.27.0,<1.28.0)", "mypy-boto3-datasync (>=1.27.0,<1.28.0)", "mypy-boto3-dax (>=1.27.0,<1.28.0)", "mypy-boto3-detective (>=1.27.0,<1.28.0)", "mypy-boto3-devicefarm (>=1.27.0,<1.28.0)", "mypy-boto3-devops-guru (>=1.27.0,<1.28.0)", "mypy-boto3-directconnect (>=1.27.0,<1.28.0)", "mypy-boto3-discovery (>=1.27.0,<1.28.0)", "mypy-boto3-dlm (>=1.27.0,<1.28.0)", "mypy-boto3-dms (>=1.27.0,<1.28.0)", "mypy-boto3-docdb (>=1.27.0,<1.28.0)", "mypy-boto3-docdb-elastic (>=1.27.0,<1.28.0)", "mypy-boto3-drs (>=1.27.0,<1.28.0)", "mypy-boto3-ds (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodb (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodbstreams (>=1.27.0,<1.28.0)", "mypy-boto3-ebs (>=1.27.0,<1.28.0)", "mypy-boto3-ec2 (>=1.27.0,<1.28.0)", "mypy-boto3-ec2-instance-connect (>=1.27.0,<1.28.0)", "mypy-boto3-ecr (>=1.27.0,<1.28.0)", "mypy-boto3-ecr-public (>=1.27.0,<1.28.0)", "mypy-boto3-ecs (>=1.27.0,<1.28.0)", "mypy-boto3-efs (>=1.27.0,<1.28.0)", "mypy-boto3-eks (>=1.27.0,<1.28.0)", "mypy-boto3-elastic-inference (>=1.27.0,<1.28.0)", "mypy-boto3-elasticache (>=1.27.0,<1.28.0)", "mypy-boto3-elasticbeanstalk (>=1.27.0,<1.28.0)", "mypy-boto3-elastictranscoder (>=1.27.0,<1.28.0)", "mypy-boto3-elb (>=1.27.0,<1.28.0)", "mypy-boto3-elbv2 (>=1.27.0,<1.28.0)", "mypy-boto3-emr (>=1.27.0,<1.28.0)", "mypy-boto3-emr-containers (>=1.27.0,<1.28.0)", "mypy-boto3-emr-serverless (>=1.27.0,<1.28.0)", "mypy-boto3-es (>=1.27.0,<1.28.0)", "mypy-boto3-events (>=1.27.0,<1.28.0)", "mypy-boto3-evidently (>=1.27.0,<1.28.0)", "mypy-boto3-finspace (>=1.27.0,<1.28.0)", "mypy-boto3-finspace-data (>=1.27.0,<1.28.0)", "mypy-boto3-firehose (>=1.27.0,<1.28.0)", "mypy-boto3-fis (>=1.27.0,<1.28.0)", "mypy-boto3-fms (>=1.27.0,<1.28.0)", "mypy-boto3-forecast (>=1.27.0,<1.28.0)", "mypy-boto3-forecastquery (>=1.27.0,<1.28.0)", "mypy-boto3-frauddetector (>=1.27.0,<1.28.0)", "mypy-boto3-fsx (>=1.27.0,<1.28.0)", "mypy-boto3-gamelift (>=1.27.0,<1.28.0)", "mypy-boto3-gamesparks (>=1.27.0,<1.28.0)", "mypy-boto3-glacier (>=1.27.0,<1.28.0)", "mypy-boto3-globalaccelerator (>=1.27.0,<1.28.0)", "mypy-boto3-glue (>=1.27.0,<1.28.0)", "mypy-boto3-grafana (>=1.27.0,<1.28.0)", "mypy-boto3-greengrass (>=1.27.0,<1.28.0)", "mypy-boto3-greengrassv2 (>=1.27.0,<1.28.0)", "mypy-boto3-groundstation (>=1.27.0,<1.28.0)", "mypy-boto3-guardduty (>=1.27.0,<1.28.0)", "mypy-boto3-health (>=1.27.0,<1.28.0)", "mypy-boto3-healthlake (>=1.27.0,<1.28.0)", "mypy-boto3-honeycode (>=1.27.0,<1.28.0)", "mypy-boto3-iam (>=1.27.0,<1.28.0)", "mypy-boto3-identitystore (>=1.27.0,<1.28.0)", "mypy-boto3-imagebuilder (>=1.27.0,<1.28.0)", "mypy-boto3-importexport (>=1.27.0,<1.28.0)", "mypy-boto3-inspector (>=1.27.0,<1.28.0)", "mypy-boto3-inspector2 (>=1.27.0,<1.28.0)", "mypy-boto3-internetmonitor (>=1.27.0,<1.28.0)", "mypy-boto3-iot (>=1.27.0,<1.28.0)", "mypy-boto3-iot-data (>=1.27.0,<1.28.0)", "mypy-boto3-iot-jobs-data (>=1.27.0,<1.28.0)", "mypy-boto3-iot-roborunner (>=1.27.0,<1.28.0)", "mypy-boto3-iot1click-devices (>=1.27.0,<1.28.0)", "mypy-boto3-iot1click-projects (>=1.27.0,<1.28.0)", "mypy-boto3-iotanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-iotdeviceadvisor (>=1.27.0,<1.28.0)", "mypy-boto3-iotevents (>=1.27.0,<1.28.0)", "mypy-boto3-iotevents-data (>=1.27.0,<1.28.0)", "mypy-boto3-iotfleethub (>=1.27.0,<1.28.0)", "mypy-boto3-iotfleetwise (>=1.27.0,<1.28.0)", "mypy-boto3-iotsecuretunneling (>=1.27.0,<1.28.0)", "mypy-boto3-iotsitewise (>=1.27.0,<1.28.0)", "mypy-boto3-iotthingsgraph (>=1.27.0,<1.28.0)", "mypy-boto3-iottwinmaker (>=1.27.0,<1.28.0)", "mypy-boto3-iotwireless (>=1.27.0,<1.28.0)", "mypy-boto3-ivs (>=1.27.0,<1.28.0)", "mypy-boto3-ivs-realtime (>=1.27.0,<1.28.0)", "mypy-boto3-ivschat (>=1.27.0,<1.28.0)", "mypy-boto3-kafka (>=1.27.0,<1.28.0)", "mypy-boto3-kafkaconnect (>=1.27.0,<1.28.0)", "mypy-boto3-kendra (>=1.27.0,<1.28.0)", "mypy-boto3-kendra-ranking (>=1.27.0,<1.28.0)", "mypy-boto3-keyspaces (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-archived-media (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-media (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-signaling (>=1.27.0,<1.28.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.27.0,<1.28.0)", "mypy-boto3-kinesisvideo (>=1.27.0,<1.28.0)", "mypy-boto3-kms (>=1.27.0,<1.28.0)", "mypy-boto3-lakeformation (>=1.27.0,<1.28.0)", "mypy-boto3-lambda (>=1.27.0,<1.28.0)", "mypy-boto3-lex-models (>=1.27.0,<1.28.0)", "mypy-boto3-lex-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-lexv2-models (>=1.27.0,<1.28.0)", "mypy-boto3-lexv2-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.27.0,<1.28.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.27.0,<1.28.0)", "mypy-boto3-lightsail (>=1.27.0,<1.28.0)", "mypy-boto3-location (>=1.27.0,<1.28.0)", "mypy-boto3-logs (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutequipment (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutmetrics (>=1.27.0,<1.28.0)", "mypy-boto3-lookoutvision (>=1.27.0,<1.28.0)", "mypy-boto3-m2 (>=1.27.0,<1.28.0)", "mypy-boto3-machinelearning (>=1.27.0,<1.28.0)", "mypy-boto3-macie (>=1.27.0,<1.28.0)", "mypy-boto3-macie2 (>=1.27.0,<1.28.0)", "mypy-boto3-managedblockchain (>=1.27.0,<1.28.0)", "mypy-boto3-marketplace-catalog (>=1.27.0,<1.28.0)", "mypy-boto3-marketplace-entitlement (>=1.27.0,<1.28.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.27.0,<1.28.0)", "mypy-boto3-mediaconnect (>=1.27.0,<1.28.0)", "mypy-boto3-mediaconvert (>=1.27.0,<1.28.0)", "mypy-boto3-medialive (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackage (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackage-vod (>=1.27.0,<1.28.0)", "mypy-boto3-mediapackagev2 (>=1.27.0,<1.28.0)", "mypy-boto3-mediastore (>=1.27.0,<1.28.0)", "mypy-boto3-mediastore-data (>=1.27.0,<1.28.0)", "mypy-boto3-mediatailor (>=1.27.0,<1.28.0)", "mypy-boto3-memorydb (>=1.27.0,<1.28.0)", "mypy-boto3-meteringmarketplace (>=1.27.0,<1.28.0)", "mypy-boto3-mgh (>=1.27.0,<1.28.0)", "mypy-boto3-mgn (>=1.27.0,<1.28.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhub-config (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhuborchestrator (>=1.27.0,<1.28.0)", "mypy-boto3-migrationhubstrategy (>=1.27.0,<1.28.0)", "mypy-boto3-mobile (>=1.27.0,<1.28.0)", "mypy-boto3-mq (>=1.27.0,<1.28.0)", "mypy-boto3-mturk (>=1.27.0,<1.28.0)", "mypy-boto3-mwaa (>=1.27.0,<1.28.0)", "mypy-boto3-neptune (>=1.27.0,<1.28.0)", "mypy-boto3-network-firewall (>=1.27.0,<1.28.0)", "mypy-boto3-networkmanager (>=1.27.0,<1.28.0)", "mypy-boto3-nimble (>=1.27.0,<1.28.0)", "mypy-boto3-oam (>=1.27.0,<1.28.0)", "mypy-boto3-omics (>=1.27.0,<1.28.0)", "mypy-boto3-opensearch (>=1.27.0,<1.28.0)", "mypy-boto3-opensearchserverless (>=1.27.0,<1.28.0)", "mypy-boto3-opsworks (>=1.27.0,<1.28.0)", "mypy-boto3-opsworkscm (>=1.27.0,<1.28.0)", "mypy-boto3-organizations (>=1.27.0,<1.28.0)", "mypy-boto3-osis (>=1.27.0,<1.28.0)", "mypy-boto3-outposts (>=1.27.0,<1.28.0)", "mypy-boto3-panorama (>=1.27.0,<1.28.0)", "mypy-boto3-payment-cryptography (>=1.27.0,<1.28.0)", "mypy-boto3-payment-cryptography-data (>=1.27.0,<1.28.0)", "mypy-boto3-personalize (>=1.27.0,<1.28.0)", "mypy-boto3-personalize-events (>=1.27.0,<1.28.0)", "mypy-boto3-personalize-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-pi (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-email (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-sms-voice (>=1.27.0,<1.28.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.27.0,<1.28.0)", "mypy-boto3-pipes (>=1.27.0,<1.28.0)", "mypy-boto3-polly (>=1.27.0,<1.28.0)", "mypy-boto3-pricing (>=1.27.0,<1.28.0)", "mypy-boto3-privatenetworks (>=1.27.0,<1.28.0)", "mypy-boto3-proton (>=1.27.0,<1.28.0)", "mypy-boto3-qldb (>=1.27.0,<1.28.0)", "mypy-boto3-qldb-session (>=1.27.0,<1.28.0)", "mypy-boto3-quicksight (>=1.27.0,<1.28.0)", "mypy-boto3-ram (>=1.27.0,<1.28.0)", "mypy-boto3-rbin (>=1.27.0,<1.28.0)", "mypy-boto3-rds (>=1.27.0,<1.28.0)", "mypy-boto3-rds-data (>=1.27.0,<1.28.0)", "mypy-boto3-redshift (>=1.27.0,<1.28.0)", "mypy-boto3-redshift-data (>=1.27.0,<1.28.0)", "mypy-boto3-redshift-serverless (>=1.27.0,<1.28.0)", "mypy-boto3-rekognition (>=1.27.0,<1.28.0)", "mypy-boto3-resiliencehub (>=1.27.0,<1.28.0)", "mypy-boto3-resource-explorer-2 (>=1.27.0,<1.28.0)", "mypy-boto3-resource-groups (>=1.27.0,<1.28.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.27.0,<1.28.0)", "mypy-boto3-robomaker (>=1.27.0,<1.28.0)", "mypy-boto3-rolesanywhere (>=1.27.0,<1.28.0)", "mypy-boto3-route53 (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-cluster (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-control-config (>=1.27.0,<1.28.0)", "mypy-boto3-route53-recovery-readiness (>=1.27.0,<1.28.0)", "mypy-boto3-route53domains (>=1.27.0,<1.28.0)", "mypy-boto3-route53resolver (>=1.27.0,<1.28.0)", "mypy-boto3-rum (>=1.27.0,<1.28.0)", "mypy-boto3-s3 (>=1.27.0,<1.28.0)", "mypy-boto3-s3control (>=1.27.0,<1.28.0)", "mypy-boto3-s3outposts (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-edge (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-geospatial (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-metrics (>=1.27.0,<1.28.0)", "mypy-boto3-sagemaker-runtime (>=1.27.0,<1.28.0)", "mypy-boto3-savingsplans (>=1.27.0,<1.28.0)", "mypy-boto3-scheduler (>=1.27.0,<1.28.0)", "mypy-boto3-schemas (>=1.27.0,<1.28.0)", "mypy-boto3-sdb (>=1.27.0,<1.28.0)", "mypy-boto3-secretsmanager (>=1.27.0,<1.28.0)", "mypy-boto3-securityhub (>=1.27.0,<1.28.0)", "mypy-boto3-securitylake (>=1.27.0,<1.28.0)", "mypy-boto3-serverlessrepo (>=1.27.0,<1.28.0)", "mypy-boto3-service-quotas (>=1.27.0,<1.28.0)", "mypy-boto3-servicecatalog (>=1.27.0,<1.28.0)", "mypy-boto3-servicecatalog-appregistry (>=1.27.0,<1.28.0)", "mypy-boto3-servicediscovery (>=1.27.0,<1.28.0)", "mypy-boto3-ses (>=1.27.0,<1.28.0)", "mypy-boto3-sesv2 (>=1.27.0,<1.28.0)", "mypy-boto3-shield (>=1.27.0,<1.28.0)", "mypy-boto3-signer (>=1.27.0,<1.28.0)", "mypy-boto3-simspaceweaver (>=1.27.0,<1.28.0)", "mypy-boto3-sms (>=1.27.0,<1.28.0)", "mypy-boto3-sms-voice (>=1.27.0,<1.28.0)", "mypy-boto3-snow-device-management (>=1.27.0,<1.28.0)", "mypy-boto3-snowball (>=1.27.0,<1.28.0)", "mypy-boto3-sns (>=1.27.0,<1.28.0)", "mypy-boto3-sqs (>=1.27.0,<1.28.0)", "mypy-boto3-ssm (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-contacts (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-incidents (>=1.27.0,<1.28.0)", "mypy-boto3-ssm-sap (>=1.27.0,<1.28.0)", "mypy-boto3-sso (>=1.27.0,<1.28.0)", "mypy-boto3-sso-admin (>=1.27.0,<1.28.0)", "mypy-boto3-sso-oidc (>=1.27.0,<1.28.0)", "mypy-boto3-stepfunctions (>=1.27.0,<1.28.0)", "mypy-boto3-storagegateway (>=1.27.0,<1.28.0)", "mypy-boto3-sts (>=1.27.0,<1.28.0)", "mypy-boto3-support (>=1.27.0,<1.28.0)", "mypy-boto3-support-app (>=1.27.0,<1.28.0)", "mypy-boto3-swf (>=1.27.0,<1.28.0)", "mypy-boto3-synthetics (>=1.27.0,<1.28.0)", "mypy-boto3-textract (>=1.27.0,<1.28.0)", "mypy-boto3-timestream-query (>=1.27.0,<1.28.0)", "mypy-boto3-timestream-write (>=1.27.0,<1.28.0)", "mypy-boto3-tnb (>=1.27.0,<1.28.0)", "mypy-boto3-transcribe (>=1.27.0,<1.28.0)", "mypy-boto3-transfer (>=1.27.0,<1.28.0)", "mypy-boto3-translate (>=1.27.0,<1.28.0)", "mypy-boto3-verifiedpermissions (>=1.27.0,<1.28.0)", "mypy-boto3-voice-id (>=1.27.0,<1.28.0)", "mypy-boto3-vpc-lattice (>=1.27.0,<1.28.0)", "mypy-boto3-waf (>=1.27.0,<1.28.0)", "mypy-boto3-waf-regional (>=1.27.0,<1.28.0)", "mypy-boto3-wafv2 (>=1.27.0,<1.28.0)", "mypy-boto3-wellarchitected (>=1.27.0,<1.28.0)", "mypy-boto3-wisdom (>=1.27.0,<1.28.0)", "mypy-boto3-workdocs (>=1.27.0,<1.28.0)", "mypy-boto3-worklink (>=1.27.0,<1.28.0)", "mypy-boto3-workmail (>=1.27.0,<1.28.0)", "mypy-boto3-workmailmessageflow (>=1.27.0,<1.28.0)", "mypy-boto3-workspaces (>=1.27.0,<1.28.0)", "mypy-boto3-workspaces-web (>=1.27.0,<1.28.0)", "mypy-boto3-xray (>=1.27.0,<1.28.0)"] -amp = ["mypy-boto3-amp (>=1.27.0,<1.28.0)"] -amplify = ["mypy-boto3-amplify (>=1.27.0,<1.28.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.27.0,<1.28.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.27.0,<1.28.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.27.0,<1.28.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.27.0,<1.28.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.27.0,<1.28.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.27.0,<1.28.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.27.0,<1.28.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.27.0,<1.28.0)"] -appflow = ["mypy-boto3-appflow (>=1.27.0,<1.28.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.27.0,<1.28.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.27.0,<1.28.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.27.0,<1.28.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.27.0,<1.28.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.27.0,<1.28.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.27.0,<1.28.0)"] -appstream = ["mypy-boto3-appstream (>=1.27.0,<1.28.0)"] -appsync = ["mypy-boto3-appsync (>=1.27.0,<1.28.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.27.0,<1.28.0)"] -athena = ["mypy-boto3-athena (>=1.27.0,<1.28.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.27.0,<1.28.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.27.0,<1.28.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.27.0,<1.28.0)"] -backup = ["mypy-boto3-backup (>=1.27.0,<1.28.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.27.0,<1.28.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.27.0,<1.28.0)"] -batch = ["mypy-boto3-batch (>=1.27.0,<1.28.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.27.0,<1.28.0)"] -boto3 = ["boto3 (==1.27.0)", "botocore (==1.30.0)"] -braket = ["mypy-boto3-braket (>=1.27.0,<1.28.0)"] -budgets = ["mypy-boto3-budgets (>=1.27.0,<1.28.0)"] -ce = ["mypy-boto3-ce (>=1.27.0,<1.28.0)"] -chime = ["mypy-boto3-chime (>=1.27.0,<1.28.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.27.0,<1.28.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.27.0,<1.28.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.27.0,<1.28.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.27.0,<1.28.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.27.0,<1.28.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.27.0,<1.28.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.27.0,<1.28.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.27.0,<1.28.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.27.0,<1.28.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.27.0,<1.28.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.27.0,<1.28.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.27.0,<1.28.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.27.0,<1.28.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.27.0,<1.28.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.27.0,<1.28.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.27.0,<1.28.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.27.0,<1.28.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.27.0,<1.28.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.27.0,<1.28.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.27.0,<1.28.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.27.0,<1.28.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.27.0,<1.28.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.27.0,<1.28.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.27.0,<1.28.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.27.0,<1.28.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.27.0,<1.28.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.27.0,<1.28.0)"] -codestar = ["mypy-boto3-codestar (>=1.27.0,<1.28.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.27.0,<1.28.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.27.0,<1.28.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.27.0,<1.28.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.27.0,<1.28.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.27.0,<1.28.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.27.0,<1.28.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.27.0,<1.28.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.27.0,<1.28.0)"] -config = ["mypy-boto3-config (>=1.27.0,<1.28.0)"] -connect = ["mypy-boto3-connect (>=1.27.0,<1.28.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.27.0,<1.28.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.27.0,<1.28.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.27.0,<1.28.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.27.0,<1.28.0)"] -controltower = ["mypy-boto3-controltower (>=1.27.0,<1.28.0)"] -cur = ["mypy-boto3-cur (>=1.27.0,<1.28.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.27.0,<1.28.0)"] -databrew = ["mypy-boto3-databrew (>=1.27.0,<1.28.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.27.0,<1.28.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.27.0,<1.28.0)"] -datasync = ["mypy-boto3-datasync (>=1.27.0,<1.28.0)"] -dax = ["mypy-boto3-dax (>=1.27.0,<1.28.0)"] -detective = ["mypy-boto3-detective (>=1.27.0,<1.28.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.27.0,<1.28.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.27.0,<1.28.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.27.0,<1.28.0)"] -discovery = ["mypy-boto3-discovery (>=1.27.0,<1.28.0)"] -dlm = ["mypy-boto3-dlm (>=1.27.0,<1.28.0)"] -dms = ["mypy-boto3-dms (>=1.27.0,<1.28.0)"] -docdb = ["mypy-boto3-docdb (>=1.27.0,<1.28.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.27.0,<1.28.0)"] -drs = ["mypy-boto3-drs (>=1.27.0,<1.28.0)"] -ds = ["mypy-boto3-ds (>=1.27.0,<1.28.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.27.0,<1.28.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.27.0,<1.28.0)"] -ebs = ["mypy-boto3-ebs (>=1.27.0,<1.28.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.27.0,<1.28.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.27.0,<1.28.0)"] -ecr = ["mypy-boto3-ecr (>=1.27.0,<1.28.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.27.0,<1.28.0)"] -ecs = ["mypy-boto3-ecs (>=1.27.0,<1.28.0)"] -efs = ["mypy-boto3-efs (>=1.27.0,<1.28.0)"] -eks = ["mypy-boto3-eks (>=1.27.0,<1.28.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.27.0,<1.28.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.27.0,<1.28.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.27.0,<1.28.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.27.0,<1.28.0)"] -elb = ["mypy-boto3-elb (>=1.27.0,<1.28.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.27.0,<1.28.0)"] -emr = ["mypy-boto3-emr (>=1.27.0,<1.28.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.27.0,<1.28.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.27.0,<1.28.0)"] -es = ["mypy-boto3-es (>=1.27.0,<1.28.0)"] -essential = ["mypy-boto3-cloudformation (>=1.27.0,<1.28.0)", "mypy-boto3-dynamodb (>=1.27.0,<1.28.0)", "mypy-boto3-ec2 (>=1.27.0,<1.28.0)", "mypy-boto3-lambda (>=1.27.0,<1.28.0)", "mypy-boto3-rds (>=1.27.0,<1.28.0)", "mypy-boto3-s3 (>=1.27.0,<1.28.0)", "mypy-boto3-sqs (>=1.27.0,<1.28.0)"] -events = ["mypy-boto3-events (>=1.27.0,<1.28.0)"] -evidently = ["mypy-boto3-evidently (>=1.27.0,<1.28.0)"] -finspace = ["mypy-boto3-finspace (>=1.27.0,<1.28.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.27.0,<1.28.0)"] -firehose = ["mypy-boto3-firehose (>=1.27.0,<1.28.0)"] -fis = ["mypy-boto3-fis (>=1.27.0,<1.28.0)"] -fms = ["mypy-boto3-fms (>=1.27.0,<1.28.0)"] -forecast = ["mypy-boto3-forecast (>=1.27.0,<1.28.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.27.0,<1.28.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.27.0,<1.28.0)"] -fsx = ["mypy-boto3-fsx (>=1.27.0,<1.28.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.27.0,<1.28.0)"] -gamesparks = ["mypy-boto3-gamesparks (>=1.27.0,<1.28.0)"] -glacier = ["mypy-boto3-glacier (>=1.27.0,<1.28.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.27.0,<1.28.0)"] -glue = ["mypy-boto3-glue (>=1.27.0,<1.28.0)"] -grafana = ["mypy-boto3-grafana (>=1.27.0,<1.28.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.27.0,<1.28.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.27.0,<1.28.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.27.0,<1.28.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.27.0,<1.28.0)"] -health = ["mypy-boto3-health (>=1.27.0,<1.28.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.27.0,<1.28.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.27.0,<1.28.0)"] -iam = ["mypy-boto3-iam (>=1.27.0,<1.28.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.27.0,<1.28.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.27.0,<1.28.0)"] -importexport = ["mypy-boto3-importexport (>=1.27.0,<1.28.0)"] -inspector = ["mypy-boto3-inspector (>=1.27.0,<1.28.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.27.0,<1.28.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.27.0,<1.28.0)"] -iot = ["mypy-boto3-iot (>=1.27.0,<1.28.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.27.0,<1.28.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.27.0,<1.28.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.27.0,<1.28.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.27.0,<1.28.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.27.0,<1.28.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.27.0,<1.28.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.27.0,<1.28.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.27.0,<1.28.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.27.0,<1.28.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.27.0,<1.28.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.27.0,<1.28.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.27.0,<1.28.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.27.0,<1.28.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.27.0,<1.28.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.27.0,<1.28.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.27.0,<1.28.0)"] -ivs = ["mypy-boto3-ivs (>=1.27.0,<1.28.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.27.0,<1.28.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.27.0,<1.28.0)"] -kafka = ["mypy-boto3-kafka (>=1.27.0,<1.28.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.27.0,<1.28.0)"] -kendra = ["mypy-boto3-kendra (>=1.27.0,<1.28.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.27.0,<1.28.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.27.0,<1.28.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.27.0,<1.28.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.27.0,<1.28.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.27.0,<1.28.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.27.0,<1.28.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.27.0,<1.28.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.27.0,<1.28.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.27.0,<1.28.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.27.0,<1.28.0)"] -kms = ["mypy-boto3-kms (>=1.27.0,<1.28.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.27.0,<1.28.0)"] -lambda = ["mypy-boto3-lambda (>=1.27.0,<1.28.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.27.0,<1.28.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.27.0,<1.28.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.27.0,<1.28.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.27.0,<1.28.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.27.0,<1.28.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.27.0,<1.28.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.27.0,<1.28.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.27.0,<1.28.0)"] -location = ["mypy-boto3-location (>=1.27.0,<1.28.0)"] -logs = ["mypy-boto3-logs (>=1.27.0,<1.28.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.27.0,<1.28.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.27.0,<1.28.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.27.0,<1.28.0)"] -m2 = ["mypy-boto3-m2 (>=1.27.0,<1.28.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.27.0,<1.28.0)"] -macie = ["mypy-boto3-macie (>=1.27.0,<1.28.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.27.0,<1.28.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.27.0,<1.28.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.27.0,<1.28.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.27.0,<1.28.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.27.0,<1.28.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.27.0,<1.28.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.27.0,<1.28.0)"] -medialive = ["mypy-boto3-medialive (>=1.27.0,<1.28.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.27.0,<1.28.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.27.0,<1.28.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.27.0,<1.28.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.27.0,<1.28.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.27.0,<1.28.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.27.0,<1.28.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.27.0,<1.28.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.27.0,<1.28.0)"] -mgh = ["mypy-boto3-mgh (>=1.27.0,<1.28.0)"] -mgn = ["mypy-boto3-mgn (>=1.27.0,<1.28.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.27.0,<1.28.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.27.0,<1.28.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.27.0,<1.28.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.27.0,<1.28.0)"] -mobile = ["mypy-boto3-mobile (>=1.27.0,<1.28.0)"] -mq = ["mypy-boto3-mq (>=1.27.0,<1.28.0)"] -mturk = ["mypy-boto3-mturk (>=1.27.0,<1.28.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.27.0,<1.28.0)"] -neptune = ["mypy-boto3-neptune (>=1.27.0,<1.28.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.27.0,<1.28.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.27.0,<1.28.0)"] -nimble = ["mypy-boto3-nimble (>=1.27.0,<1.28.0)"] -oam = ["mypy-boto3-oam (>=1.27.0,<1.28.0)"] -omics = ["mypy-boto3-omics (>=1.27.0,<1.28.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.27.0,<1.28.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.27.0,<1.28.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.27.0,<1.28.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.27.0,<1.28.0)"] -organizations = ["mypy-boto3-organizations (>=1.27.0,<1.28.0)"] -osis = ["mypy-boto3-osis (>=1.27.0,<1.28.0)"] -outposts = ["mypy-boto3-outposts (>=1.27.0,<1.28.0)"] -panorama = ["mypy-boto3-panorama (>=1.27.0,<1.28.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.27.0,<1.28.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.27.0,<1.28.0)"] -personalize = ["mypy-boto3-personalize (>=1.27.0,<1.28.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.27.0,<1.28.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.27.0,<1.28.0)"] -pi = ["mypy-boto3-pi (>=1.27.0,<1.28.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.27.0,<1.28.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.27.0,<1.28.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.27.0,<1.28.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.27.0,<1.28.0)"] -pipes = ["mypy-boto3-pipes (>=1.27.0,<1.28.0)"] -polly = ["mypy-boto3-polly (>=1.27.0,<1.28.0)"] -pricing = ["mypy-boto3-pricing (>=1.27.0,<1.28.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.27.0,<1.28.0)"] -proton = ["mypy-boto3-proton (>=1.27.0,<1.28.0)"] -qldb = ["mypy-boto3-qldb (>=1.27.0,<1.28.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.27.0,<1.28.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.27.0,<1.28.0)"] -ram = ["mypy-boto3-ram (>=1.27.0,<1.28.0)"] -rbin = ["mypy-boto3-rbin (>=1.27.0,<1.28.0)"] -rds = ["mypy-boto3-rds (>=1.27.0,<1.28.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.27.0,<1.28.0)"] -redshift = ["mypy-boto3-redshift (>=1.27.0,<1.28.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.27.0,<1.28.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.27.0,<1.28.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.27.0,<1.28.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.27.0,<1.28.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.27.0,<1.28.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.27.0,<1.28.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.27.0,<1.28.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.27.0,<1.28.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.27.0,<1.28.0)"] -route53 = ["mypy-boto3-route53 (>=1.27.0,<1.28.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.27.0,<1.28.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.27.0,<1.28.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.27.0,<1.28.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.27.0,<1.28.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.27.0,<1.28.0)"] -rum = ["mypy-boto3-rum (>=1.27.0,<1.28.0)"] -s3 = ["mypy-boto3-s3 (>=1.27.0,<1.28.0)"] -s3control = ["mypy-boto3-s3control (>=1.27.0,<1.28.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.27.0,<1.28.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.27.0,<1.28.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.27.0,<1.28.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.27.0,<1.28.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.27.0,<1.28.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.27.0,<1.28.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.27.0,<1.28.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.27.0,<1.28.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.27.0,<1.28.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.27.0,<1.28.0)"] -schemas = ["mypy-boto3-schemas (>=1.27.0,<1.28.0)"] -sdb = ["mypy-boto3-sdb (>=1.27.0,<1.28.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.27.0,<1.28.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.27.0,<1.28.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.27.0,<1.28.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.27.0,<1.28.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.27.0,<1.28.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.27.0,<1.28.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.27.0,<1.28.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.27.0,<1.28.0)"] -ses = ["mypy-boto3-ses (>=1.27.0,<1.28.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.27.0,<1.28.0)"] -shield = ["mypy-boto3-shield (>=1.27.0,<1.28.0)"] -signer = ["mypy-boto3-signer (>=1.27.0,<1.28.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.27.0,<1.28.0)"] -sms = ["mypy-boto3-sms (>=1.27.0,<1.28.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.27.0,<1.28.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.27.0,<1.28.0)"] -snowball = ["mypy-boto3-snowball (>=1.27.0,<1.28.0)"] -sns = ["mypy-boto3-sns (>=1.27.0,<1.28.0)"] -sqs = ["mypy-boto3-sqs (>=1.27.0,<1.28.0)"] -ssm = ["mypy-boto3-ssm (>=1.27.0,<1.28.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.27.0,<1.28.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.27.0,<1.28.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.27.0,<1.28.0)"] -sso = ["mypy-boto3-sso (>=1.27.0,<1.28.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.27.0,<1.28.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.27.0,<1.28.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.27.0,<1.28.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.27.0,<1.28.0)"] -sts = ["mypy-boto3-sts (>=1.27.0,<1.28.0)"] -support = ["mypy-boto3-support (>=1.27.0,<1.28.0)"] -support-app = ["mypy-boto3-support-app (>=1.27.0,<1.28.0)"] -swf = ["mypy-boto3-swf (>=1.27.0,<1.28.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.27.0,<1.28.0)"] -textract = ["mypy-boto3-textract (>=1.27.0,<1.28.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.27.0,<1.28.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.27.0,<1.28.0)"] -tnb = ["mypy-boto3-tnb (>=1.27.0,<1.28.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.27.0,<1.28.0)"] -transfer = ["mypy-boto3-transfer (>=1.27.0,<1.28.0)"] -translate = ["mypy-boto3-translate (>=1.27.0,<1.28.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.27.0,<1.28.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.27.0,<1.28.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.27.0,<1.28.0)"] -waf = ["mypy-boto3-waf (>=1.27.0,<1.28.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.27.0,<1.28.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.27.0,<1.28.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.27.0,<1.28.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.27.0,<1.28.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.27.0,<1.28.0)"] -worklink = ["mypy-boto3-worklink (>=1.27.0,<1.28.0)"] -workmail = ["mypy-boto3-workmail (>=1.27.0,<1.28.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.27.0,<1.28.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.27.0,<1.28.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.27.0,<1.28.0)"] -xray = ["mypy-boto3-xray (>=1.27.0,<1.28.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)"] +account = ["mypy-boto3-account (>=1.28.0,<1.29.0)"] +acm = ["mypy-boto3-acm (>=1.28.0,<1.29.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.28.0,<1.29.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)", "mypy-boto3-account (>=1.28.0,<1.29.0)", "mypy-boto3-acm (>=1.28.0,<1.29.0)", "mypy-boto3-acm-pca (>=1.28.0,<1.29.0)", "mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)", "mypy-boto3-amp (>=1.28.0,<1.29.0)", "mypy-boto3-amplify (>=1.28.0,<1.29.0)", "mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)", "mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)", "mypy-boto3-apigateway (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)", "mypy-boto3-appconfig (>=1.28.0,<1.29.0)", "mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)", "mypy-boto3-appfabric (>=1.28.0,<1.29.0)", "mypy-boto3-appflow (>=1.28.0,<1.29.0)", "mypy-boto3-appintegrations (>=1.28.0,<1.29.0)", "mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-application-insights (>=1.28.0,<1.29.0)", "mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-appmesh (>=1.28.0,<1.29.0)", "mypy-boto3-apprunner (>=1.28.0,<1.29.0)", "mypy-boto3-appstream (>=1.28.0,<1.29.0)", "mypy-boto3-appsync (>=1.28.0,<1.29.0)", "mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)", "mypy-boto3-athena (>=1.28.0,<1.29.0)", "mypy-boto3-auditmanager (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)", "mypy-boto3-backup (>=1.28.0,<1.29.0)", "mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)", "mypy-boto3-backupstorage (>=1.28.0,<1.29.0)", "mypy-boto3-batch (>=1.28.0,<1.29.0)", "mypy-boto3-billingconductor (>=1.28.0,<1.29.0)", "mypy-boto3-braket (>=1.28.0,<1.29.0)", "mypy-boto3-budgets (>=1.28.0,<1.29.0)", "mypy-boto3-ce (>=1.28.0,<1.29.0)", "mypy-boto3-chime (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)", "mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)", "mypy-boto3-cloud9 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)", "mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)", "mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-cloudfront (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)", "mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)", "mypy-boto3-codeartifact (>=1.28.0,<1.29.0)", "mypy-boto3-codebuild (>=1.28.0,<1.29.0)", "mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)", "mypy-boto3-codecommit (>=1.28.0,<1.29.0)", "mypy-boto3-codedeploy (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)", "mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-codepipeline (>=1.28.0,<1.29.0)", "mypy-boto3-codestar (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)", "mypy-boto3-comprehend (>=1.28.0,<1.29.0)", "mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)", "mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)", "mypy-boto3-config (>=1.28.0,<1.29.0)", "mypy-boto3-connect (>=1.28.0,<1.29.0)", "mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)", "mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)", "mypy-boto3-connectcases (>=1.28.0,<1.29.0)", "mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)", "mypy-boto3-controltower (>=1.28.0,<1.29.0)", "mypy-boto3-cur (>=1.28.0,<1.29.0)", "mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)", "mypy-boto3-databrew (>=1.28.0,<1.29.0)", "mypy-boto3-dataexchange (>=1.28.0,<1.29.0)", "mypy-boto3-datapipeline (>=1.28.0,<1.29.0)", "mypy-boto3-datasync (>=1.28.0,<1.29.0)", "mypy-boto3-dax (>=1.28.0,<1.29.0)", "mypy-boto3-detective (>=1.28.0,<1.29.0)", "mypy-boto3-devicefarm (>=1.28.0,<1.29.0)", "mypy-boto3-devops-guru (>=1.28.0,<1.29.0)", "mypy-boto3-directconnect (>=1.28.0,<1.29.0)", "mypy-boto3-discovery (>=1.28.0,<1.29.0)", "mypy-boto3-dlm (>=1.28.0,<1.29.0)", "mypy-boto3-dms (>=1.28.0,<1.29.0)", "mypy-boto3-docdb (>=1.28.0,<1.29.0)", "mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)", "mypy-boto3-drs (>=1.28.0,<1.29.0)", "mypy-boto3-ds (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)", "mypy-boto3-ebs (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)", "mypy-boto3-ecr (>=1.28.0,<1.29.0)", "mypy-boto3-ecr-public (>=1.28.0,<1.29.0)", "mypy-boto3-ecs (>=1.28.0,<1.29.0)", "mypy-boto3-efs (>=1.28.0,<1.29.0)", "mypy-boto3-eks (>=1.28.0,<1.29.0)", "mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)", "mypy-boto3-elasticache (>=1.28.0,<1.29.0)", "mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)", "mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)", "mypy-boto3-elb (>=1.28.0,<1.29.0)", "mypy-boto3-elbv2 (>=1.28.0,<1.29.0)", "mypy-boto3-emr (>=1.28.0,<1.29.0)", "mypy-boto3-emr-containers (>=1.28.0,<1.29.0)", "mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-entityresolution (>=1.28.0,<1.29.0)", "mypy-boto3-es (>=1.28.0,<1.29.0)", "mypy-boto3-events (>=1.28.0,<1.29.0)", "mypy-boto3-evidently (>=1.28.0,<1.29.0)", "mypy-boto3-finspace (>=1.28.0,<1.29.0)", "mypy-boto3-finspace-data (>=1.28.0,<1.29.0)", "mypy-boto3-firehose (>=1.28.0,<1.29.0)", "mypy-boto3-fis (>=1.28.0,<1.29.0)", "mypy-boto3-fms (>=1.28.0,<1.29.0)", "mypy-boto3-forecast (>=1.28.0,<1.29.0)", "mypy-boto3-forecastquery (>=1.28.0,<1.29.0)", "mypy-boto3-frauddetector (>=1.28.0,<1.29.0)", "mypy-boto3-fsx (>=1.28.0,<1.29.0)", "mypy-boto3-gamelift (>=1.28.0,<1.29.0)", "mypy-boto3-gamesparks (>=1.28.0,<1.29.0)", "mypy-boto3-glacier (>=1.28.0,<1.29.0)", "mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)", "mypy-boto3-glue (>=1.28.0,<1.29.0)", "mypy-boto3-grafana (>=1.28.0,<1.29.0)", "mypy-boto3-greengrass (>=1.28.0,<1.29.0)", "mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)", "mypy-boto3-groundstation (>=1.28.0,<1.29.0)", "mypy-boto3-guardduty (>=1.28.0,<1.29.0)", "mypy-boto3-health (>=1.28.0,<1.29.0)", "mypy-boto3-healthlake (>=1.28.0,<1.29.0)", "mypy-boto3-honeycode (>=1.28.0,<1.29.0)", "mypy-boto3-iam (>=1.28.0,<1.29.0)", "mypy-boto3-identitystore (>=1.28.0,<1.29.0)", "mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)", "mypy-boto3-importexport (>=1.28.0,<1.29.0)", "mypy-boto3-inspector (>=1.28.0,<1.29.0)", "mypy-boto3-inspector2 (>=1.28.0,<1.29.0)", "mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)", "mypy-boto3-iot (>=1.28.0,<1.29.0)", "mypy-boto3-iot-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)", "mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)", "mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)", "mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)", "mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)", "mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)", "mypy-boto3-iotwireless (>=1.28.0,<1.29.0)", "mypy-boto3-ivs (>=1.28.0,<1.29.0)", "mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)", "mypy-boto3-ivschat (>=1.28.0,<1.29.0)", "mypy-boto3-kafka (>=1.28.0,<1.29.0)", "mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-kendra (>=1.28.0,<1.29.0)", "mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)", "mypy-boto3-keyspaces (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)", "mypy-boto3-kms (>=1.28.0,<1.29.0)", "mypy-boto3-lakeformation (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-lex-models (>=1.28.0,<1.29.0)", "mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-lightsail (>=1.28.0,<1.29.0)", "mypy-boto3-location (>=1.28.0,<1.29.0)", "mypy-boto3-logs (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)", "mypy-boto3-m2 (>=1.28.0,<1.29.0)", "mypy-boto3-machinelearning (>=1.28.0,<1.29.0)", "mypy-boto3-macie (>=1.28.0,<1.29.0)", "mypy-boto3-macie2 (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)", "mypy-boto3-medialive (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)", "mypy-boto3-mediatailor (>=1.28.0,<1.29.0)", "mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)", "mypy-boto3-memorydb (>=1.28.0,<1.29.0)", "mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)", "mypy-boto3-mgh (>=1.28.0,<1.29.0)", "mypy-boto3-mgn (>=1.28.0,<1.29.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)", "mypy-boto3-mobile (>=1.28.0,<1.29.0)", "mypy-boto3-mq (>=1.28.0,<1.29.0)", "mypy-boto3-mturk (>=1.28.0,<1.29.0)", "mypy-boto3-mwaa (>=1.28.0,<1.29.0)", "mypy-boto3-neptune (>=1.28.0,<1.29.0)", "mypy-boto3-neptunedata (>=1.28.0,<1.29.0)", "mypy-boto3-network-firewall (>=1.28.0,<1.29.0)", "mypy-boto3-networkmanager (>=1.28.0,<1.29.0)", "mypy-boto3-nimble (>=1.28.0,<1.29.0)", "mypy-boto3-oam (>=1.28.0,<1.29.0)", "mypy-boto3-omics (>=1.28.0,<1.29.0)", "mypy-boto3-opensearch (>=1.28.0,<1.29.0)", "mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)", "mypy-boto3-opsworks (>=1.28.0,<1.29.0)", "mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)", "mypy-boto3-organizations (>=1.28.0,<1.29.0)", "mypy-boto3-osis (>=1.28.0,<1.29.0)", "mypy-boto3-outposts (>=1.28.0,<1.29.0)", "mypy-boto3-panorama (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)", "mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)", "mypy-boto3-personalize (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-events (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-pi (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)", "mypy-boto3-pipes (>=1.28.0,<1.29.0)", "mypy-boto3-polly (>=1.28.0,<1.29.0)", "mypy-boto3-pricing (>=1.28.0,<1.29.0)", "mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)", "mypy-boto3-proton (>=1.28.0,<1.29.0)", "mypy-boto3-qldb (>=1.28.0,<1.29.0)", "mypy-boto3-qldb-session (>=1.28.0,<1.29.0)", "mypy-boto3-quicksight (>=1.28.0,<1.29.0)", "mypy-boto3-ram (>=1.28.0,<1.29.0)", "mypy-boto3-rbin (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-rds-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-rekognition (>=1.28.0,<1.29.0)", "mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)", "mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)", "mypy-boto3-resource-groups (>=1.28.0,<1.29.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)", "mypy-boto3-robomaker (>=1.28.0,<1.29.0)", "mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)", "mypy-boto3-route53 (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)", "mypy-boto3-route53domains (>=1.28.0,<1.29.0)", "mypy-boto3-route53resolver (>=1.28.0,<1.29.0)", "mypy-boto3-rum (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-s3control (>=1.28.0,<1.29.0)", "mypy-boto3-s3outposts (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-savingsplans (>=1.28.0,<1.29.0)", "mypy-boto3-scheduler (>=1.28.0,<1.29.0)", "mypy-boto3-schemas (>=1.28.0,<1.29.0)", "mypy-boto3-sdb (>=1.28.0,<1.29.0)", "mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)", "mypy-boto3-securityhub (>=1.28.0,<1.29.0)", "mypy-boto3-securitylake (>=1.28.0,<1.29.0)", "mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)", "mypy-boto3-service-quotas (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)", "mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)", "mypy-boto3-ses (>=1.28.0,<1.29.0)", "mypy-boto3-sesv2 (>=1.28.0,<1.29.0)", "mypy-boto3-shield (>=1.28.0,<1.29.0)", "mypy-boto3-signer (>=1.28.0,<1.29.0)", "mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)", "mypy-boto3-sms (>=1.28.0,<1.29.0)", "mypy-boto3-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)", "mypy-boto3-snowball (>=1.28.0,<1.29.0)", "mypy-boto3-sns (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)", "mypy-boto3-ssm (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)", "mypy-boto3-sso (>=1.28.0,<1.29.0)", "mypy-boto3-sso-admin (>=1.28.0,<1.29.0)", "mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)", "mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)", "mypy-boto3-storagegateway (>=1.28.0,<1.29.0)", "mypy-boto3-sts (>=1.28.0,<1.29.0)", "mypy-boto3-support (>=1.28.0,<1.29.0)", "mypy-boto3-support-app (>=1.28.0,<1.29.0)", "mypy-boto3-swf (>=1.28.0,<1.29.0)", "mypy-boto3-synthetics (>=1.28.0,<1.29.0)", "mypy-boto3-textract (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-query (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-write (>=1.28.0,<1.29.0)", "mypy-boto3-tnb (>=1.28.0,<1.29.0)", "mypy-boto3-transcribe (>=1.28.0,<1.29.0)", "mypy-boto3-transfer (>=1.28.0,<1.29.0)", "mypy-boto3-translate (>=1.28.0,<1.29.0)", "mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)", "mypy-boto3-voice-id (>=1.28.0,<1.29.0)", "mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)", "mypy-boto3-waf (>=1.28.0,<1.29.0)", "mypy-boto3-waf-regional (>=1.28.0,<1.29.0)", "mypy-boto3-wafv2 (>=1.28.0,<1.29.0)", "mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)", "mypy-boto3-wisdom (>=1.28.0,<1.29.0)", "mypy-boto3-workdocs (>=1.28.0,<1.29.0)", "mypy-boto3-worklink (>=1.28.0,<1.29.0)", "mypy-boto3-workmail (>=1.28.0,<1.29.0)", "mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)", "mypy-boto3-xray (>=1.28.0,<1.29.0)"] +amp = ["mypy-boto3-amp (>=1.28.0,<1.29.0)"] +amplify = ["mypy-boto3-amplify (>=1.28.0,<1.29.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.28.0,<1.29.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.28.0,<1.29.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.28.0,<1.29.0)"] +appflow = ["mypy-boto3-appflow (>=1.28.0,<1.29.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.28.0,<1.29.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.28.0,<1.29.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.28.0,<1.29.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.28.0,<1.29.0)"] +appstream = ["mypy-boto3-appstream (>=1.28.0,<1.29.0)"] +appsync = ["mypy-boto3-appsync (>=1.28.0,<1.29.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)"] +athena = ["mypy-boto3-athena (>=1.28.0,<1.29.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.28.0,<1.29.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.28.0,<1.29.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)"] +backup = ["mypy-boto3-backup (>=1.28.0,<1.29.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.28.0,<1.29.0)"] +batch = ["mypy-boto3-batch (>=1.28.0,<1.29.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.28.0,<1.29.0)"] +boto3 = ["boto3 (==1.28.52)", "botocore (==1.31.52)"] +braket = ["mypy-boto3-braket (>=1.28.0,<1.29.0)"] +budgets = ["mypy-boto3-budgets (>=1.28.0,<1.29.0)"] +ce = ["mypy-boto3-ce (>=1.28.0,<1.29.0)"] +chime = ["mypy-boto3-chime (>=1.28.0,<1.29.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.28.0,<1.29.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.28.0,<1.29.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.28.0,<1.29.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.28.0,<1.29.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.28.0,<1.29.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.28.0,<1.29.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.28.0,<1.29.0)"] +codestar = ["mypy-boto3-codestar (>=1.28.0,<1.29.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.28.0,<1.29.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)"] +config = ["mypy-boto3-config (>=1.28.0,<1.29.0)"] +connect = ["mypy-boto3-connect (>=1.28.0,<1.29.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.28.0,<1.29.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)"] +controltower = ["mypy-boto3-controltower (>=1.28.0,<1.29.0)"] +cur = ["mypy-boto3-cur (>=1.28.0,<1.29.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)"] +databrew = ["mypy-boto3-databrew (>=1.28.0,<1.29.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.28.0,<1.29.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.28.0,<1.29.0)"] +datasync = ["mypy-boto3-datasync (>=1.28.0,<1.29.0)"] +dax = ["mypy-boto3-dax (>=1.28.0,<1.29.0)"] +detective = ["mypy-boto3-detective (>=1.28.0,<1.29.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.28.0,<1.29.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.28.0,<1.29.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.28.0,<1.29.0)"] +discovery = ["mypy-boto3-discovery (>=1.28.0,<1.29.0)"] +dlm = ["mypy-boto3-dlm (>=1.28.0,<1.29.0)"] +dms = ["mypy-boto3-dms (>=1.28.0,<1.29.0)"] +docdb = ["mypy-boto3-docdb (>=1.28.0,<1.29.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)"] +drs = ["mypy-boto3-drs (>=1.28.0,<1.29.0)"] +ds = ["mypy-boto3-ds (>=1.28.0,<1.29.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.28.0,<1.29.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)"] +ebs = ["mypy-boto3-ebs (>=1.28.0,<1.29.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.28.0,<1.29.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)"] +ecr = ["mypy-boto3-ecr (>=1.28.0,<1.29.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.28.0,<1.29.0)"] +ecs = ["mypy-boto3-ecs (>=1.28.0,<1.29.0)"] +efs = ["mypy-boto3-efs (>=1.28.0,<1.29.0)"] +eks = ["mypy-boto3-eks (>=1.28.0,<1.29.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.28.0,<1.29.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)"] +elb = ["mypy-boto3-elb (>=1.28.0,<1.29.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.28.0,<1.29.0)"] +emr = ["mypy-boto3-emr (>=1.28.0,<1.29.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.28.0,<1.29.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.28.0,<1.29.0)"] +es = ["mypy-boto3-es (>=1.28.0,<1.29.0)"] +essential = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +events = ["mypy-boto3-events (>=1.28.0,<1.29.0)"] +evidently = ["mypy-boto3-evidently (>=1.28.0,<1.29.0)"] +finspace = ["mypy-boto3-finspace (>=1.28.0,<1.29.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.28.0,<1.29.0)"] +firehose = ["mypy-boto3-firehose (>=1.28.0,<1.29.0)"] +fis = ["mypy-boto3-fis (>=1.28.0,<1.29.0)"] +fms = ["mypy-boto3-fms (>=1.28.0,<1.29.0)"] +forecast = ["mypy-boto3-forecast (>=1.28.0,<1.29.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.28.0,<1.29.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.28.0,<1.29.0)"] +fsx = ["mypy-boto3-fsx (>=1.28.0,<1.29.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.28.0,<1.29.0)"] +gamesparks = ["mypy-boto3-gamesparks (>=1.28.0,<1.29.0)"] +glacier = ["mypy-boto3-glacier (>=1.28.0,<1.29.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)"] +glue = ["mypy-boto3-glue (>=1.28.0,<1.29.0)"] +grafana = ["mypy-boto3-grafana (>=1.28.0,<1.29.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.28.0,<1.29.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.28.0,<1.29.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.28.0,<1.29.0)"] +health = ["mypy-boto3-health (>=1.28.0,<1.29.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.28.0,<1.29.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.28.0,<1.29.0)"] +iam = ["mypy-boto3-iam (>=1.28.0,<1.29.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.28.0,<1.29.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)"] +importexport = ["mypy-boto3-importexport (>=1.28.0,<1.29.0)"] +inspector = ["mypy-boto3-inspector (>=1.28.0,<1.29.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.28.0,<1.29.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)"] +iot = ["mypy-boto3-iot (>=1.28.0,<1.29.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.28.0,<1.29.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.28.0,<1.29.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.28.0,<1.29.0)"] +ivs = ["mypy-boto3-ivs (>=1.28.0,<1.29.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.28.0,<1.29.0)"] +kafka = ["mypy-boto3-kafka (>=1.28.0,<1.29.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)"] +kendra = ["mypy-boto3-kendra (>=1.28.0,<1.29.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.28.0,<1.29.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.28.0,<1.29.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)"] +kms = ["mypy-boto3-kms (>=1.28.0,<1.29.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.28.0,<1.29.0)"] +lambda = ["mypy-boto3-lambda (>=1.28.0,<1.29.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.28.0,<1.29.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.28.0,<1.29.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.28.0,<1.29.0)"] +location = ["mypy-boto3-location (>=1.28.0,<1.29.0)"] +logs = ["mypy-boto3-logs (>=1.28.0,<1.29.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)"] +m2 = ["mypy-boto3-m2 (>=1.28.0,<1.29.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.28.0,<1.29.0)"] +macie = ["mypy-boto3-macie (>=1.28.0,<1.29.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.28.0,<1.29.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)"] +medialive = ["mypy-boto3-medialive (>=1.28.0,<1.29.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.28.0,<1.29.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.28.0,<1.29.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.28.0,<1.29.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.28.0,<1.29.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)"] +mgh = ["mypy-boto3-mgh (>=1.28.0,<1.29.0)"] +mgn = ["mypy-boto3-mgn (>=1.28.0,<1.29.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)"] +mobile = ["mypy-boto3-mobile (>=1.28.0,<1.29.0)"] +mq = ["mypy-boto3-mq (>=1.28.0,<1.29.0)"] +mturk = ["mypy-boto3-mturk (>=1.28.0,<1.29.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.28.0,<1.29.0)"] +neptune = ["mypy-boto3-neptune (>=1.28.0,<1.29.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.28.0,<1.29.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.28.0,<1.29.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.28.0,<1.29.0)"] +nimble = ["mypy-boto3-nimble (>=1.28.0,<1.29.0)"] +oam = ["mypy-boto3-oam (>=1.28.0,<1.29.0)"] +omics = ["mypy-boto3-omics (>=1.28.0,<1.29.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.28.0,<1.29.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.28.0,<1.29.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)"] +organizations = ["mypy-boto3-organizations (>=1.28.0,<1.29.0)"] +osis = ["mypy-boto3-osis (>=1.28.0,<1.29.0)"] +outposts = ["mypy-boto3-outposts (>=1.28.0,<1.29.0)"] +panorama = ["mypy-boto3-panorama (>=1.28.0,<1.29.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)"] +personalize = ["mypy-boto3-personalize (>=1.28.0,<1.29.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.28.0,<1.29.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)"] +pi = ["mypy-boto3-pi (>=1.28.0,<1.29.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.28.0,<1.29.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)"] +pipes = ["mypy-boto3-pipes (>=1.28.0,<1.29.0)"] +polly = ["mypy-boto3-polly (>=1.28.0,<1.29.0)"] +pricing = ["mypy-boto3-pricing (>=1.28.0,<1.29.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)"] +proton = ["mypy-boto3-proton (>=1.28.0,<1.29.0)"] +qldb = ["mypy-boto3-qldb (>=1.28.0,<1.29.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.28.0,<1.29.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.28.0,<1.29.0)"] +ram = ["mypy-boto3-ram (>=1.28.0,<1.29.0)"] +rbin = ["mypy-boto3-rbin (>=1.28.0,<1.29.0)"] +rds = ["mypy-boto3-rds (>=1.28.0,<1.29.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.28.0,<1.29.0)"] +redshift = ["mypy-boto3-redshift (>=1.28.0,<1.29.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.28.0,<1.29.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.28.0,<1.29.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.28.0,<1.29.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.28.0,<1.29.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)"] +route53 = ["mypy-boto3-route53 (>=1.28.0,<1.29.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.28.0,<1.29.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.28.0,<1.29.0)"] +rum = ["mypy-boto3-rum (>=1.28.0,<1.29.0)"] +s3 = ["mypy-boto3-s3 (>=1.28.0,<1.29.0)"] +s3control = ["mypy-boto3-s3control (>=1.28.0,<1.29.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.28.0,<1.29.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.28.0,<1.29.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.28.0,<1.29.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.28.0,<1.29.0)"] +schemas = ["mypy-boto3-schemas (>=1.28.0,<1.29.0)"] +sdb = ["mypy-boto3-sdb (>=1.28.0,<1.29.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.28.0,<1.29.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.28.0,<1.29.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.28.0,<1.29.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)"] +ses = ["mypy-boto3-ses (>=1.28.0,<1.29.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.28.0,<1.29.0)"] +shield = ["mypy-boto3-shield (>=1.28.0,<1.29.0)"] +signer = ["mypy-boto3-signer (>=1.28.0,<1.29.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)"] +sms = ["mypy-boto3-sms (>=1.28.0,<1.29.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.28.0,<1.29.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)"] +snowball = ["mypy-boto3-snowball (>=1.28.0,<1.29.0)"] +sns = ["mypy-boto3-sns (>=1.28.0,<1.29.0)"] +sqs = ["mypy-boto3-sqs (>=1.28.0,<1.29.0)"] +ssm = ["mypy-boto3-ssm (>=1.28.0,<1.29.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)"] +sso = ["mypy-boto3-sso (>=1.28.0,<1.29.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.28.0,<1.29.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.28.0,<1.29.0)"] +sts = ["mypy-boto3-sts (>=1.28.0,<1.29.0)"] +support = ["mypy-boto3-support (>=1.28.0,<1.29.0)"] +support-app = ["mypy-boto3-support-app (>=1.28.0,<1.29.0)"] +swf = ["mypy-boto3-swf (>=1.28.0,<1.29.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.28.0,<1.29.0)"] +textract = ["mypy-boto3-textract (>=1.28.0,<1.29.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.28.0,<1.29.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.28.0,<1.29.0)"] +tnb = ["mypy-boto3-tnb (>=1.28.0,<1.29.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.28.0,<1.29.0)"] +transfer = ["mypy-boto3-transfer (>=1.28.0,<1.29.0)"] +translate = ["mypy-boto3-translate (>=1.28.0,<1.29.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.28.0,<1.29.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)"] +waf = ["mypy-boto3-waf (>=1.28.0,<1.29.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.28.0,<1.29.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.28.0,<1.29.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.28.0,<1.29.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.28.0,<1.29.0)"] +worklink = ["mypy-boto3-worklink (>=1.28.0,<1.29.0)"] +workmail = ["mypy-boto3-workmail (>=1.28.0,<1.29.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.28.0,<1.29.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)"] +xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] [[package]] name = "botocore" -version = "1.21.65" +version = "1.31.52" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "botocore-1.21.65-py3-none-any.whl", hash = "sha256:3bd0e3d6daee6afcc747d596b52158519abe1ce36f906d556b9f8b54faa081e8"}, - {file = "botocore-1.21.65.tar.gz", hash = "sha256:6437d6a3999a189e7d45b3fcd8f794a46670fb255ae670c946d3f224caa8b46a"}, + {file = "botocore-1.31.52-py3-none-any.whl", hash = "sha256:46b0a75a38521aa6a75fddccb1542e002930e609d4e13516f40fef170d32e515"}, + {file = "botocore-1.31.52.tar.gz", hash = "sha256:6d09881c5a8be34b497872ca3936f8757d886a6f42f2a8703411928189cfedc0"}, ] [package.dependencies] -jmespath = ">=0.7.1,<1.0.0" +jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = ">=1.25.4,<1.27" [package.extras] -crt = ["awscrt (==0.12.5)"] +crt = ["awscrt (==0.16.26)"] [[package]] name = "botocore-stubs" @@ -851,6 +865,94 @@ ssh = ["bcrypt (>=3.1.5)"] test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] test-randomorder = ["pytest-randomly"] +[[package]] +name = "dependency-injector" +version = "4.41.0" +description = "Dependency injection framework for Python" +optional = false +python-versions = "*" +files = [ + {file = "dependency-injector-4.41.0.tar.gz", hash = "sha256:939dfc657104bc3e66b67afd3fb2ebb0850c9a1e73d0d26066f2bbdd8735ff9c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a2381a251b04244125148298212550750e6e1403e9b2850cc62e0e829d050ad3"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75280dfa23f7c88e1bf56c3920d58a43516816de6f6ab2a6650bb8a0f27d5c2c"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63bfba21f8bff654a80e9b9d06dd6c43a442990b73bf89cd471314c11c541ec2"}, + {file = "dependency_injector-4.41.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3535d06416251715b45f8412482b58ec1c6196a4a3baa207f947f0b03a7c4b44"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d09c08c944a25dabfb454238c1a889acd85102b93ae497de523bf9ab7947b28a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:586a0821720b15932addbefb00f7370fbcd5831d6ebbd6494d774b44ff96d23a"}, + {file = "dependency_injector-4.41.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7fa4970f12a3fc95d8796938b11c41276ad1ff4c447b0e589212eab3fc527a90"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win32.whl", hash = "sha256:d557e40673de984f78dab13ebd68d27fbb2f16d7c4e3b663ea2fa2f9fae6765b"}, + {file = "dependency_injector-4.41.0-cp310-cp310-win_amd64.whl", hash = "sha256:3744c327d18408e74781bd6d8b7738745ee80ef89f2c8daecf9ebd098cb84972"}, + {file = "dependency_injector-4.41.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:89c67edffe7007cf33cee79ecbca38f48efcc2add5c280717af434db6c789377"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786f7aac592e191c9caafc47732161d807bad65c62f260cd84cd73c7e2d67d6d"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8b61a15bc46a3aa7b29bd8a7384b650aa3a7ef943491e93c49a0540a0b3dda4"}, + {file = "dependency_injector-4.41.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4f113e5d4c3070973ad76e5bda7317e500abae6083d78689f0b6e37cf403abf"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:5fa3ed8f0700e47a0e7363f949b4525ffa8277aa1c5b10ca5b41fce4dea61bb9"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:05e15ea0f2b14c1127e8b0d1597fef13f98845679f63bf670ba12dbfc12a16ef"}, + {file = "dependency_injector-4.41.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3055b3fc47a0d6e5f27defb4166c0d37543a4967c279549b154afaf506ce6efc"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win32.whl", hash = "sha256:37d5954026e3831663518d78bdf4be9c2dbfea691edcb73c813aa3093aa4363a"}, + {file = "dependency_injector-4.41.0-cp311-cp311-win_amd64.whl", hash = "sha256:f89a507e389b7e4d4892dd9a6f5f4da25849e24f73275478634ac594d621ab3f"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ac79f3c05747f9724bd56c06985e78331fc6c85eb50f3e3f1a35e0c60f9977e9"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75e7a733b372db3144a34020c4233f6b94db2c6342d6d16bc5245b1b941ee2bd"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40936d9384363331910abd59dd244158ec3572abf9d37322f15095315ac99893"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a31d9d60be4b585585081109480cfb2ef564d3b851cb32a139bf8408411a93a"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:953bfac819d32dc72b963767589e0ed372e5e9e78b03fb6b89419d0500d34bbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:8f0090ff14038f17a026ca408a3a0b0e7affb6aa7498b2b59d670f40ac970fbe"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:6b29abac56ce347d2eb58a560723e1663ee2125cf5cc38866ed92b84319927ec"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win32.whl", hash = "sha256:059fbb48333148143e8667a5323d162628dfe27c386bd0ed3deeecfc390338bf"}, + {file = "dependency_injector-4.41.0-cp36-cp36m-win_amd64.whl", hash = "sha256:16de2797dcfcc2263b8672bf0751166f7c7b369ca2ff9246ceb67b65f8e1d802"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c71d30b6708438050675f338edb9a25bea6c258478dbe5ec8405286756a2d347"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d283aee588a72072439e6721cb64aa6cba5bc18c576ef0ab28285a6ec7a9d655"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc852da612c7e347f2fcf921df2eca2718697a49f648a28a63db3ab504fd9510"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:02620454ee8101f77a317f3229935ce687480883d72a40858ff4b0c87c935cce"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7a92680bea1c260e5c0d2d6cd60b0c913cba76a456a147db5ac047ecfcfcc758"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:168334cba3f1cbf55299ef38f0f2e31879115cc767b780c859f7814a52d80abb"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:48b6886a87b4ceb9b9f78550f77b2a5c7d2ce33bc83efd886556ad468cc9c85a"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win32.whl", hash = "sha256:87be84084a1b922c4ba15e2e5aa900ee24b78a5467997cb7aec0a1d6cdb4a00b"}, + {file = "dependency_injector-4.41.0-cp37-cp37m-win_amd64.whl", hash = "sha256:8b8cf1c6c56f5c18bdbd9f5e93b52ca29cb4d99606d4056e91f0c761eef496dc"}, + {file = "dependency_injector-4.41.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a8686fa330c83251c75c8238697686f7a0e0f6d40658538089165dc72df9bcff"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d670a844268dcd758195e58e9a5b39fc74bb8648aba99a13135a4a10ec9cfac"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e3b9d41e0eff4c8e16fea1e33de66ff0030fe51137ca530f3c52ce110447914"}, + {file = "dependency_injector-4.41.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a724e0a737baadb4378f5dc1b079867cc3a88552fcca719b3dba84716828b2"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3588bd887b051d16b8bcabaae1127eb14059a0719a8fe34c8a75ba59321b352c"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:409441122f40e1b4b8582845fdd76deb9dc5c9d6eb74a057b85736ef9e9c671f"}, + {file = "dependency_injector-4.41.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7dcba8665cafec825b7095d5dd80afb5cf14404450eca3fe8b66e1edbf4dbc10"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win32.whl", hash = "sha256:8b51efeaebacaf79ef68edfc65e9687699ccffb3538c4a3ab30d0d77e2db7189"}, + {file = "dependency_injector-4.41.0-cp38-cp38-win_amd64.whl", hash = "sha256:1662e2ef60ac6e681b9e11b5d8b7c17a0f733688916cf695f9540f8f50a61b1e"}, + {file = "dependency_injector-4.41.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:51217cb384b468d7cc355544cec20774859f00812f9a1a71ed7fa701c957b2a7"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b3890a12423ae3a9eade035093beba487f8d092ee6c6cb8706f4e7080a56e819"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99ed73b1521bf249e2823a08a730c9f9413a58f4b4290da022e0ad4fb333ba3d"}, + {file = "dependency_injector-4.41.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:300838e9d4f3fbf539892a5a4072851728e23b37a1f467afcf393edd994d88f0"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:56d37b9d2f50a18f059d9abdbea7669a7518bd42b81603c21a27910a2b3f1657"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4a44ca3ce5867513a70b31855b218be3d251f5068ce1c480cc3a4ad24ffd3280"}, + {file = "dependency_injector-4.41.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:67b369592c57549ccdcad0d5fef1ddb9d39af7fed8083d76e789ab0111fc6389"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win32.whl", hash = "sha256:740a8e8106a04d3f44b52b25b80570fdac96a8a3934423de7c9202c5623e7936"}, + {file = "dependency_injector-4.41.0-cp39-cp39-win_amd64.whl", hash = "sha256:22b11dbf696e184f0b3d5ac4e5418aeac3c379ba4ea758c04a83869b7e5d1cbf"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b365a8548e9a49049fa6acb24d3cd939f619eeb8e300ca3e156e44402dcc07ec"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5168dc59808317dc4cdd235aa5d7d556d33e5600156acaf224cead236b48a3e8"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3229d83e99e255451605d5276604386e06ad948e3d60f31ddd796781c77f76f"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1baee908f21190bdc46a65ce4c417a5175e9397ca62354928694fce218f84487"}, + {file = "dependency_injector-4.41.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:b37f36ecb0c1227f697e1d4a029644e3eda8dd0f0716aa63ad04d96dbb15bbbb"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b0c9c966ff66c77364a2d43d08de9968aff7e3903938fe912ba49796b2133344"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12e91ac0333e7e589421943ff6c6bf9cf0d9ac9703301cec37ccff3723406332"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2440b32474d4e747209528ca3ae48f42563b2fbe3d74dbfe949c11dfbfef7c4"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54032d62610cf2f4421c9d92cef52957215aaa0bca403cda580c58eb3f726eda"}, + {file = "dependency_injector-4.41.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:76b94c8310929e54136f3cb3de3adc86d1a657b3984299f40bf1cd2ba0bae548"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6ee9810841c6e0599356cb884d16453bfca6ab739d0e4f0248724ed8f9ee0d79"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b98945edae88e777091bf0848f869fb94bd76dfa4066d7c870a5caa933391d0"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a2dee5d4abdd21f1a30a51d46645c095be9dcc404c7c6e9f81d0a01415a49e64"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d03f5fa0fa98a18bd0dfce846db80e2798607f0b861f1f99c97f441f7669d7a2"}, + {file = "dependency_injector-4.41.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:f2842e15bae664a9f69932e922b02afa055c91efec959cb1896f6c499bf68180"}, +] + +[package.dependencies] +six = ">=1.7.0,<=1.16.0" + +[package.extras] +aiohttp = ["aiohttp"] +flask = ["flask"] +pydantic = ["pydantic"] +yaml = ["pyyaml"] + [[package]] name = "deprecated" version = "1.2.14" @@ -2341,6 +2443,104 @@ dmypy = ["psutil (>=4.0)"] install-types = ["pip"] reports = ["lxml"] +[[package]] +name = "mypy-boto3-cloudformation" +version = "1.28.48" +description = "Type annotations for boto3.CloudFormation 1.28.48 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-cloudformation-1.28.48.tar.gz", hash = "sha256:efbe4073397800824287c8d52a65383112862f903b16fd587b5113449652371b"}, + {file = "mypy_boto3_cloudformation-1.28.48-py3-none-any.whl", hash = "sha256:653e14414abc9fab8d29d693f138639a1325322fb6ba1b06ca90a6ccb11dfd94"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-dynamodb" +version = "1.28.36" +description = "Type annotations for boto3.DynamoDB 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-dynamodb-1.28.36.tar.gz", hash = "sha256:5fe1d336fdc8c58f345c9c1b4e4c1a2d164660531cf3a074d4598975fb2687de"}, + {file = "mypy_boto3_dynamodb-1.28.36-py3-none-any.whl", hash = "sha256:9a3b49385d17e421661ab8639fc09cc64a706198be20287f82d83511289294a3"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-ec2" +version = "1.28.51" +description = "Type annotations for boto3.EC2 1.28.51 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-ec2-1.28.51.tar.gz", hash = "sha256:e62204a712e89a14e0663d3d18813ac8072706684b0e89a1b04a7d01c10f9ebe"}, + {file = "mypy_boto3_ec2-1.28.51-py3-none-any.whl", hash = "sha256:f9162f00b144cf2fab5b8b56d7883674b1d4096c79a7226b592918eae17e6235"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-lambda" +version = "1.28.36" +description = "Type annotations for boto3.Lambda 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-lambda-1.28.36.tar.gz", hash = "sha256:70498e6ff6bfd60b758553d27fadf691ba169572faca01c2bd457da0b48b9cff"}, + {file = "mypy_boto3_lambda-1.28.36-py3-none-any.whl", hash = "sha256:edb1f49279f7713929a70eaab00cf3d4ba65a10016db636805d022b2eaf14c84"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-rds" +version = "1.28.41" +description = "Type annotations for boto3.RDS 1.28.41 service generated with mypy-boto3-builder 7.18.2" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-rds-1.28.41.tar.gz", hash = "sha256:19b7d6a1e3e53ff7d03157052885ceead29861fba2d9cbddf701f6238159cb72"}, + {file = "mypy_boto3_rds-1.28.41-py3-none-any.whl", hash = "sha256:2935be3736147b726964e3cebcb918d0fc394e4012b9b7d3d0d35c12aa740ddf"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-s3" +version = "1.28.52" +description = "Type annotations for boto3.S3 1.28.52 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-s3-1.28.52.tar.gz", hash = "sha256:179cb7542cc5ef656f1323ad51eb237afcba77d1e5ed07d21a013fe36effb8b2"}, + {file = "mypy_boto3_s3-1.28.52-py3-none-any.whl", hash = "sha256:a75cd5ff28f1cb5109dd50db94259436701208fa97c61b5a2cc0689e169b7cba"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + +[[package]] +name = "mypy-boto3-sqs" +version = "1.28.36" +description = "Type annotations for boto3.SQS 1.28.36 service generated with mypy-boto3-builder 7.18.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-sqs-1.28.36.tar.gz", hash = "sha256:d9c159e020f0ef225a6d5850a3673e8b236327243ba5ffe0d13762ae4fdc0e21"}, + {file = "mypy_boto3_sqs-1.28.36-py3-none-any.whl", hash = "sha256:8457aa9f2a6da44e8543e547597773f67a04e517f6a398989117cf1fa3f70d6e"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-extensions" version = "1.0.0" @@ -2838,6 +3038,7 @@ files = [ [package.dependencies] email-validator = {version = ">=1.0.3", optional = true, markers = "extra == \"email\""} +python-dotenv = {version = ">=0.10.4", optional = true, markers = "extra == \"dotenv\""} typing-extensions = ">=4.2.0" [package.extras] @@ -3173,6 +3374,20 @@ files = [ [package.dependencies] six = ">=1.5" +[[package]] +name = "python-dotenv" +version = "1.0.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.8" +files = [ + {file = "python-dotenv-1.0.0.tar.gz", hash = "sha256:a8df96034aae6d2d50a4ebe8216326c61c3eb64836776504fcca410e5937a3ba"}, + {file = "python_dotenv-1.0.0-py3-none-any.whl", hash = "sha256:f5971a9226b701070a4bf2c38c89e5a3f0d64de8debda981d1db98583009122a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + [[package]] name = "python3-saml" version = "1.15.0" @@ -3568,13 +3783,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.5.2" +version = "0.6.2" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.6" +python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, - {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, + {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, + {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, ] [package.dependencies] @@ -4227,4 +4442,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "efb9ed885d265cf45f832527246ad2393306e46e0c8ffe72da9af0dab19dadfa" +content-hash = "4a0a1f9123bc6217c1c2427e0a217e605497e140f5cb98c916c17b4209b82467" diff --git a/pyproject.toml b/pyproject.toml index 58081cafa6..131250f319 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -85,6 +85,7 @@ module = [ "core.model.integration", "core.model.library", "core.selftest", + "core.service.*", "core.settings.*", "core.util.authentication_for_opds", "core.util.cache", @@ -162,11 +163,11 @@ version = "0" # Version number is managed with tags in git alembic = {extras = ["tz"], version = "^1.8.1"} aws-xray-sdk = "~2.12" bcrypt = "^4.0.1" -boto3 = "~1.18" -botocore = "~1.21" +boto3 = "^1.28" certifi = "*" click = "^8.1.3" contextlib2 = "21.6.0" +dependency-injector = "^4.41" expiringdict = "1.2.2" feedparser = "6.0.10" firebase-admin = "^6.0.1" @@ -190,7 +191,7 @@ opensearch-py = "~1.1" palace-webpub-manifest-parser = "~3.0.1" pillow = "^10.0" pycryptodome = "^3.18" -pydantic = {version = "^1.10.9", extras = ["email"]} +pydantic = {version = "^1.10.9", extras = ["dotenv", "email"]} pyinstrument = "<4.6" PyJWT = "^2.8" PyLD = "2.0.3" @@ -229,8 +230,7 @@ tox-docker = "^4.1" tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] -boto3-stubs = "^1.26.81" -botocore-stubs = "^1.29.81" +boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "s3"]} freezegun = "~1.2.2" Jinja2 = "^3.1.2" mypy = "^1.4.1" diff --git a/scripts.py b/scripts.py index 07b2421684..2ea03abea3 100644 --- a/scripts.py +++ b/scripts.py @@ -55,7 +55,6 @@ get_one, pg_advisory_lock, ) -from core.model.configuration import ExternalIntegrationLink from core.scripts import ( IdentifierInputScript, LaneSweeperScript, @@ -194,7 +193,7 @@ def __init__(self, _db=None, cmd_args=None, manager=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) self.parse_args(cmd_args) if not manager: - manager = CirculationManager(self._db) + manager = CirculationManager(self._db, self.services) from api.app import app app.manager = manager @@ -652,26 +651,14 @@ def process_lane(self, lane, exporter=None): ) return - # To find the storage integration for the exporter, first find the - # external integration link associated with the exporter's external - # integration. - integration_link = get_one( - self._db, - ExternalIntegrationLink, - external_integration_id=exporter.integration.id, - purpose=ExternalIntegrationLink.MARC, - ) - # Then use the "other" integration value to find the storage integration. - storage_integration = get_one( - self._db, ExternalIntegration, id=integration_link.other_integration_id - ) - - if not storage_integration: - self.log.info("No storage External Integration was found.") + # Find the storage service + storage_service = self.services.storage.public() + if not storage_service: + self.log.info("No storage service was found.") return # First update the file with ALL the records. - records = exporter.records(lane, annotator, storage_integration) + records = exporter.records(lane, annotator, storage_service) # Then create a new file with changes since the last update. start_time = None @@ -680,7 +667,7 @@ def process_lane(self, lane, exporter=None): start_time = last_update - timedelta(days=1) records = exporter.records( - lane, annotator, storage_integration, start_time=start_time + lane, annotator, storage_service, start_time=start_time ) diff --git a/tests/api/admin/controller/test_catalog_services.py b/tests/api/admin/controller/test_catalog_services.py index fda8a836a3..6a6038ad91 100644 --- a/tests/api/admin/controller/test_catalog_services.py +++ b/tests/api/admin/controller/test_catalog_services.py @@ -8,7 +8,6 @@ from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_INTEGRATION, MISSING_SERVICE, MULTIPLE_SERVICES_FOR_LIBRARY, UNKNOWN_PROTOCOL, @@ -21,8 +20,6 @@ create, get_one, ) -from core.model.configuration import ExternalIntegrationLink -from core.s3 import S3UploaderConfiguration from tests.fixtures.api_admin import SettingsControllerFixture @@ -159,63 +156,8 @@ def test_catalog_services_post_errors( goal=ExternalIntegration.CATALOG_GOAL, ) - # Attempt to set an S3 mirror external integration but it does not exist! - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "exporter name"), - ("id", str(service.id)), - ("protocol", ME.NAME), - ("mirror_integration_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MISSING_INTEGRATION.uri - - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - - # Now an S3 integration exists, but it has no MARC bucket configured. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "exporter name"), - ("id", str(service.id)), - ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MISSING_INTEGRATION.uri - - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - settings_ctrl_fixture.ctrl.db.session.flush() - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "new name"), - ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), - ] - ) - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, - ) - # This should be the last test to check since rolling back database # changes in the test can cause it to crash. - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) @@ -225,7 +167,6 @@ def test_catalog_services_post_errors( [ ("name", "new name"), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -250,20 +191,11 @@ def test_catalog_services_post_create( ): ME = MARCExporter - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -289,24 +221,11 @@ def test_catalog_services_post_create( goal=ExternalIntegration.CATALOG_GOAL, ) assert isinstance(service, ExternalIntegration) - # There was one S3 integration and it was selected. The service has an - # External Integration Link to the storage integration that is created - # in a POST with purpose of ExternalIntegrationLink.MARC. - integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - assert isinstance(integration_link, ExternalIntegrationLink) assert service.id == int(response.get_data()) assert ME.NAME == service.protocol assert "exporter name" == service.name assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - # We expect the Catalog external integration to have a link to the - # S3 storage external integration - assert s3.id == integration_link.other_integration_id assert ( "false" == ConfigurationSetting.for_library_and_externalintegration( @@ -331,14 +250,6 @@ def test_catalog_services_post_edit( ): ME = MARCExporter - s3, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - s3.setting(S3UploaderConfiguration.MARC_BUCKET_KEY).value = "marc-files" - service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, @@ -353,7 +264,6 @@ def test_catalog_services_post_edit( ("name", "exporter name"), ("id", str(service.id)), ("protocol", ME.NAME), - ("mirror_integration_id", str(s3.id)), ( "libraries", json.dumps( @@ -373,17 +283,9 @@ def test_catalog_services_post_edit( ) assert response.status_code == 200 - integration_link = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegrationLink, - external_integration_id=service.id, - purpose=ExternalIntegrationLink.MARC, - ) - assert isinstance(integration_link, ExternalIntegrationLink) assert service.id == int(response.get_data()) assert ME.NAME == service.protocol assert "exporter name" == service.name - assert s3.id == integration_link.other_integration_id assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries assert ( "false" diff --git a/tests/api/admin/controller/test_storage_services.py b/tests/api/admin/controller/test_storage_services.py deleted file mode 100644 index 643c266b51..0000000000 --- a/tests/api/admin/controller/test_storage_services.py +++ /dev/null @@ -1,27 +0,0 @@ -from api.admin.controller.storage_services import StorageServicesController -from core.model import ExternalIntegration -from core.s3 import S3Uploader - - -class TestStorageServices: - def test_storage_service_management(self, settings_ctrl_fixture): - class MockStorage(StorageServicesController): - def _get_integration_protocols(self, apis, protocol_name_attr): - self.manage_called_with = (apis, protocol_name_attr) - - def _delete_integration(self, *args): - self.delete_called_with = args - - controller = MockStorage(settings_ctrl_fixture.manager) - EI = ExternalIntegration - with settings_ctrl_fixture.request_context_with_admin("/"): - controller.process_services() - (apis, procotol_name) = controller.manage_called_with - - assert S3Uploader in apis - assert procotol_name == "NAME" - - with settings_ctrl_fixture.request_context_with_admin("/"): - id = object() - controller.process_delete(id) - assert (id, EI.STORAGE_GOAL) == controller.delete_called_with diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 465aec0e7f..6dae466615 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -680,29 +680,6 @@ def test_process_search_service_self_tests(self, fixture: AdminRouteFixture): fixture.assert_supported_methods(url, "GET", "POST") -class TestAdminStorageServices: - CONTROLLER_NAME = "admin_storage_services_controller" - - @pytest.fixture(scope="function") - def fixture(self, admin_route_fixture: AdminRouteFixture) -> AdminRouteFixture: - admin_route_fixture.set_controller_name(self.CONTROLLER_NAME) - return admin_route_fixture - - def test_process_services(self, fixture: AdminRouteFixture): - url = "/admin/storage_services" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_services # type: ignore - ) - fixture.assert_supported_methods(url, "GET", "POST") - - def test_process_delete(self, fixture: AdminRouteFixture): - url = "/admin/storage_service/" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_delete, "", http_method="DELETE" # type: ignore - ) - fixture.assert_supported_methods(url, "DELETE") - - class TestAdminCatalogServices: CONTROLLER_NAME = "admin_catalog_services_controller" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 20d39e22d0..603e67b2c6 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -35,7 +35,6 @@ "tests.fixtures.opds2_files", "tests.fixtures.opds_files", "tests.fixtures.overdrive", - "tests.fixtures.s3", "tests.fixtures.sample_covers", "tests.fixtures.search", "tests.fixtures.time", diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index f3f3b1cab7..ff1392c83a 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,6 +1,10 @@ import logging from abc import ABC from collections import defaultdict +from typing import Optional +from unittest.mock import MagicMock + +from sqlalchemy.orm import Session from api.circulation import BaseCirculationAPI, CirculationAPI, HoldInfo, LoanInfo from api.controller import CirculationManager @@ -8,6 +12,7 @@ from core.integration.settings import BaseSettings from core.model import DataSource, Hold, Loan, get_one_or_create from core.model.configuration import ExternalIntegration +from core.service.container import Services from tests.mocks.search import ExternalSearchIndexFake @@ -167,6 +172,11 @@ def api_for_license_pool(self, licensepool): class MockCirculationManager(CirculationManager): d_circulation: MockCirculationAPI + def __init__(self, db: Session, services: Optional[Services] = None): + if services is None: + services = MagicMock(spec=Services) + super().__init__(db, services) + def setup_search(self): """Set up a search client.""" integration, _ = get_one_or_create( diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index e376c9d5c1..06ff4d81aa 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -154,7 +154,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session) + circulation = BadSearch(circulation_fixture.db.session, MagicMock()) # We didn't get a search object. assert None == circulation.external_search diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index dd81e7b5c5..22a21471f8 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -674,7 +674,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session) + circulation = BadSearch(circulation_fixture.db.session, MagicMock()) # An attempt to call FeedController.search() will return a # problem detail. diff --git a/tests/api/test_google_analytics_provider.py b/tests/api/test_google_analytics_provider.py index ceee85fd6c..26682ceb9e 100644 --- a/tests/api/test_google_analytics_provider.py +++ b/tests/api/test_google_analytics_provider.py @@ -1,5 +1,6 @@ import unicodedata import urllib.parse +from unittest.mock import MagicMock import pytest from psycopg2.extras import NumericRange @@ -37,13 +38,13 @@ def test_init(self, db: DatabaseTransactionFixture): ) with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration) + GoogleAnalyticsProvider(integration, MagicMock()) assert "Google Analytics can't be configured without a library." in str( excinfo.value ) with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration, db.default_library()) + GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert ( "Missing tracking id for library %s" % db.default_library().short_name in str(excinfo.value) @@ -55,12 +56,12 @@ def test_init(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = GoogleAnalyticsProvider(integration, db.default_library()) + ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert GoogleAnalyticsProvider.DEFAULT_URL == ga.url assert "faketrackingid" == ga.tracking_id integration.url = db.fresh_str() - ga = GoogleAnalyticsProvider(integration, db.default_library()) + ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) assert integration.url == ga.url assert "faketrackingid" == ga.tracking_id @@ -78,7 +79,7 @@ def test_collect_event_with_work(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) work = db.work( title="pi\u00F1ata", @@ -146,7 +147,7 @@ def test_collect_event_without_work(self, db: DatabaseTransactionFixture): db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) identifier = db.identifier() source = DataSource.lookup(db.session, DataSource.GUTENBERG) @@ -201,7 +202,7 @@ def test_collect_event_without_license_pool(self, db: DatabaseTransactionFixture db.default_library(), integration, ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, db.default_library()) + ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) now = utc_now() ga.collect_event(db.default_library(), None, CirculationEvent.NEW_PATRON, now) diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py index 969b290261..5a57845b4f 100644 --- a/tests/api/test_opds2.py +++ b/tests/api/test_opds2.py @@ -1,6 +1,6 @@ import io import json -from unittest.mock import patch +from unittest.mock import MagicMock, patch from urllib.parse import parse_qs, quote, urlparse import pytest @@ -315,7 +315,7 @@ def test_opds2_with_authentication_tokens( work = works[0] identifier = work.presentation_edition.primary_identifier - manager = CirculationManager(controller_fixture.db.session) + manager = CirculationManager(controller_fixture.db.session, MagicMock()) patron = controller_fixture.db.patron() # Borrow the book from the library diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 5094b5f4d8..a8952f09f4 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -4,7 +4,7 @@ import logging from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Optional from unittest.mock import MagicMock, patch import pytest @@ -29,7 +29,6 @@ SessionManager, create, ) -from core.model.configuration import ExternalIntegrationLink from core.opds import AcquisitionFeed from core.util.datetime_helpers import datetime_utc, utc_now from core.util.flask_util import OPDSFeedResponse, Response @@ -543,6 +542,48 @@ def test_do_run( assert work.title in feed.content +class TestCacheMARCFilesFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.lane = db.lane(genres=["Science Fiction"]) + self.integration = db.external_integration( + ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL + ) + + self.exporter = MARCExporter(None, None, self.integration) + self.mock_records = MagicMock() + self.mock_services = MagicMock() + self.exporter.records = self.mock_records + + def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: + cmd_args = cmd_args or [] + return CacheMARCFiles( + self.db.session, services=self.mock_services, cmd_args=cmd_args + ) + + def assert_call(self, call: Any) -> None: + assert call.args[0] == self.lane + assert isinstance(call.args[1], MARCLibraryAnnotator) + assert call.args[2] == self.mock_services.storage.public.return_value + + def create_cached_file(self, end_time: datetime.datetime) -> CachedMARCFile: + representation, _ = self.db.representation() + cached, _ = create( + self.db.session, + CachedMARCFile, + library=self.db.default_library(), + lane=self.lane, + representation=representation, + end_time=end_time, + ) + return cached + + +@pytest.fixture +def cache_marc_files(db: DatabaseTransactionFixture) -> TestCacheMARCFilesFixture: + return TestCacheMARCFilesFixture(db) + + class TestCacheMARCFiles: def test_should_process_library(self, lane_script_fixture: LaneScriptFixture): db = lane_script_fixture.db @@ -582,125 +623,133 @@ def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): assert True == script.should_process_lane(wl) assert False == script.should_process_lane(empty) - def test_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - lane = db.lane(genres=["Science Fiction"]) - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL - ) - - class MockMARCExporter(MARCExporter): - called_with = [] - - def records(self, lane, annotator, mirror_integration, start_time=None): - self.called_with += [(lane, annotator, mirror_integration, start_time)] - - exporter = MockMARCExporter(None, None, integration) - - # This just needs to be an ExternalIntegration, but a storage integration - # makes the most sense in this context. - the_linked_integration, ignore = create( - db.session, - ExternalIntegration, - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - ) - - integration_link = db.external_integration_link( - integration=integration, - other_integration=the_linked_integration, - purpose=ExternalIntegrationLink.MARC, - ) - - script = CacheMARCFiles(db.session, cmd_args=[]) - script.process_lane(lane, exporter) + def test_process_lane_never_run(self, cache_marc_files: TestCacheMARCFilesFixture): + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) # If the script has never been run before, it runs the exporter once # to create a file with all records. - assert 1 == len(exporter.called_with) - - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + assert cache_marc_files.mock_records.call_count == 1 + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args) + def test_process_lane_cached_update( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # If we have a cached file already, and it's old enough, the script will # run the exporter twice, first to update that file and second to create # a file with changes since that first file was originally created. - exporter.called_with = [] + db = cache_marc_files.db now = utc_now() - yesterday = now - datetime.timedelta(days=1) last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(last_week) ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.UPDATE_FREQUENCY, db.default_library(), integration - ).value = 3 - representation, ignore = db.representation() - cached, ignore = create( db.session, - CachedMARCFile, - library=db.default_library(), - lane=lane, - representation=representation, - end_time=last_week, - ) - - script.process_lane(lane, exporter) + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 - assert 2 == len(exporter.called_with) + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < last_week + ) + def test_process_lane_cached_recent( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # If we already have a recent cached file, the script won't do anything. - cached.end_time = yesterday - exporter.called_with = [] - script.process_lane(lane, exporter) - assert [] == exporter.called_with + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + cache_marc_files.create_cached_file(yesterday) + ConfigurationSetting.for_library_and_externalintegration( + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 0 + + def test_process_lane_cached_recent_force( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # But we can force it to run anyway. - script = CacheMARCFiles(db.session, cmd_args=["--force"]) - script.process_lane(lane, exporter) + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(yesterday) + ConfigurationSetting.for_library_and_externalintegration( + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, + ).value = 3 - assert 2 == len(exporter.called_with) + script = cache_marc_files.script(cmd_args=["--force"]) + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < yesterday - assert exporter.called_with[1][3] > last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < yesterday + ) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + > last_week + ) + def test_process_lane_cached_frequency_zero( + self, cache_marc_files: TestCacheMARCFilesFixture + ): # The update frequency can also be 0, in which case it will always run. + # If we already have a recent cached file, the script won't do anything. + db = cache_marc_files.db + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + cache_marc_files.create_cached_file(yesterday) ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.UPDATE_FREQUENCY, db.default_library(), integration + db.session, + MARCExporter.UPDATE_FREQUENCY, + db.default_library(), + cache_marc_files.integration, ).value = 0 - exporter.called_with = [] - script = CacheMARCFiles(db.session, cmd_args=[]) - script.process_lane(lane, exporter) + script = cache_marc_files.script() + script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert 2 == len(exporter.called_with) + assert cache_marc_files.mock_records.call_count == 2 - assert lane == exporter.called_with[0][0] - assert isinstance(exporter.called_with[0][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[0][2] - assert None == exporter.called_with[0][3] + # First call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) - assert lane == exporter.called_with[1][0] - assert isinstance(exporter.called_with[1][1], MARCLibraryAnnotator) - assert the_linked_integration == exporter.called_with[1][2] - assert exporter.called_with[1][3] < yesterday - assert exporter.called_with[1][3] > last_week + # Second call + cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + < yesterday + ) + assert ( + cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] + > last_week + ) class TestInstanceInitializationScript: diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 4f494afddc..9ea0933198 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -9,6 +9,7 @@ "tests.fixtures.s3", "tests.fixtures.sample_covers", "tests.fixtures.search", + "tests.fixtures.services", "tests.fixtures.time", "tests.fixtures.tls_server", ] diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index b85247f201..a42b7150ed 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -7,11 +7,7 @@ from core.model import create, get_one_or_create from core.model.circulationevent import CirculationEvent from core.model.collection import Collection -from core.model.configuration import ( - ConfigurationSetting, - ExternalIntegration, - ExternalIntegrationLink, -) +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.coverage import CoverageRecord, WorkCoverageRecord from core.model.customlist import CustomList from core.model.datasource import DataSource @@ -934,28 +930,6 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): ) setting2.value = "value2" - # Also it has links to another independent ExternalIntegration (S3 storage in this case). - s3_storage = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - libraries=[db.default_library()], - ) - link1 = db.external_integration_link( - integration, - db.default_library(), - s3_storage, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ) - link2 = db.external_integration_link( - integration, - db.default_library(), - s3_storage, - ExternalIntegrationLink.COVERS, - ) - - integration.links.append(link1) - integration.links.append(link2) - # It's got a Work that has a LicensePool, which has a License, # which has a loan. work = db.work(with_license_pool=True) @@ -1047,20 +1021,15 @@ def remove_work(self, work): # has any LicensePools), but not the second. assert [work] == index.removed - # The collection ExternalIntegration, its settings, and links to other integrations have been deleted. + # The collection ExternalIntegration and its settings have been deleted. # The storage ExternalIntegration remains. external_integrations = db.session.query(ExternalIntegration).all() assert integration not in external_integrations - assert s3_storage in external_integrations settings = db.session.query(ConfigurationSetting).all() for setting in (setting1, setting2): assert setting not in settings - links = db.session.query(ExternalIntegrationLink).all() - for link in (link1, link2): - assert link not in links - # If no search_index is passed into delete() (the default behavior), # we try to instantiate the normal ExternalSearchIndex object. Since # no search index is configured, this will raise an exception -- but diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index f21c2b65b5..e7f7fe8ee6 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -421,12 +421,12 @@ def test_relationships(self, db: DatabaseTransactionFixture): storage1 = db.external_integration( name="integration1", - protocol=ExternalIntegration.S3, + protocol="protocol", ) storage2 = db.external_integration( name="integration2", - protocol=ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, + protocol="protocol", + goal="storage", username="username", password="password", ) @@ -721,17 +721,17 @@ def test_delete( db = example_externalintegration_fixture.database_fixture integration1 = db.external_integration( - ExternalIntegration.LCP, + "protocol", ExternalIntegration.LICENSE_GOAL, libraries=[db.default_library()], ) integration2 = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, + "storage", + "storage goal", libraries=[db.default_library()], ) - # Set up a a link associating integration2 with integration1. + # Set up a link associating integration2 with integration1. link1 = db.external_integration_link( integration1, db.default_library(), diff --git a/tests/core/service/__init__.py b/tests/core/service/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/service/storage/__init__.py b/tests/core/service/storage/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/service/storage/test_configuration.py b/tests/core/service/storage/test_configuration.py new file mode 100644 index 0000000000..2621bff70e --- /dev/null +++ b/tests/core/service/storage/test_configuration.py @@ -0,0 +1,47 @@ +import pytest + +from core.config import CannotLoadConfiguration +from core.service.storage.configuration import StorageConfiguration + + +def test_region_validation_fail(): + with pytest.raises(CannotLoadConfiguration) as exc_info: + StorageConfiguration(region="foo bar baz") + + assert "PALACE_STORAGE_REGION: Invalid region: foo bar baz." in str(exc_info.value) + + +def test_region_validation_success(): + configuration = StorageConfiguration(region="us-west-2") + assert configuration.region == "us-west-2" + + configuration = StorageConfiguration(region=None) + assert configuration.region is None + + +@pytest.mark.parametrize( + "url", + [ + "http://localhost:9000", + "https://real.endpoint.com", + "http://192.168.0.1", + ], +) +def test_endpoint_url_validation_success(url: str): + configuration = StorageConfiguration(endpoint_url=url) + assert configuration.endpoint_url == url + + +@pytest.mark.parametrize( + "url, error", + [ + ("ftp://localhost:9000", "URL scheme not permitted"), + ("foo bar baz", "invalid or missing URL scheme"), + ], +) +def test_endpoint_url_validation_fail(url: str, error: str): + with pytest.raises(CannotLoadConfiguration) as exc_info: + StorageConfiguration(endpoint_url=url) + + assert "PALACE_STORAGE_ENDPOINT_URL" in str(exc_info.value) + assert error in str(exc_info.value) diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py new file mode 100644 index 0000000000..7587de7a9e --- /dev/null +++ b/tests/core/service/storage/test_s3.py @@ -0,0 +1,416 @@ +from __future__ import annotations + +import functools +from io import BytesIO +from typing import TYPE_CHECKING, Generator, Optional +from unittest.mock import MagicMock + +import pytest +from botocore.exceptions import BotoCoreError, ClientError +from mypy_boto3_s3 import S3Client +from pydantic import AnyHttpUrl + +from core.config import CannotLoadConfiguration +from core.service.configuration import ServiceConfiguration +from core.service.storage.container import Storage +from core.service.storage.s3 import S3Service + +if TYPE_CHECKING: + from tests.fixtures.s3 import S3ServiceFixture + + +class TestS3Service: + def test_factory(self, s3_service_fixture: S3ServiceFixture): + """The S3Service.factory method returns an S3Service, if given + a bucket, or None otherwise. + """ + # No bucket, no service. + factory = functools.partial( + S3Service.factory, + client=s3_service_fixture.mock_s3_client, + region=s3_service_fixture.region, + url_template=s3_service_fixture.url_template, + ) + assert factory(bucket=None) is None + + # Bucket, service. + service = factory(bucket="bucket") + assert isinstance(service, S3Service) + assert service.client == s3_service_fixture.mock_s3_client + assert service.region == s3_service_fixture.region + assert service.bucket == "bucket" + assert service.url_template == s3_service_fixture.url_template + + @pytest.mark.parametrize( + "url_template", + [ + # No region passed into the constructor, but the URL template + # contains a region. + "https://{bucket}.s3.{region}.amazonaws.com/{key}", + # No key in the URL template. + "https://no-key-in-template.com/", + ], + ) + def test_constructor_exception( + self, url_template: str, s3_service_fixture: S3ServiceFixture + ): + """The S3Service constructor raises an exception if the URL template is invalid.""" + with pytest.raises(CannotLoadConfiguration): + s3_service_fixture.service(url_template=url_template, region=None) + + @pytest.mark.parametrize( + "template,key,expected", + [ + ( + "https://{bucket}.s3.{region}.amazonaws.com/{key}", + "key", + "https://bucket.s3.region.amazonaws.com/key", + ), + ( + "https://test.com/{bucket}/{key}", + "key with spaces", + "https://test.com/bucket/key%20with%20spaces", + ), + ( + "https://test.com/{bucket}/{key}", + "s p a c e s/🔥/slashes%", + "https://test.com/bucket/s%20p%20a%20c%20e%20s/%F0%9F%94%A5/slashes%25", + ), + ( + "https://cdn.com/{key}", + "filename.ext", + "https://cdn.com/filename.ext", + ), + ], + ) + def test_generate_url( + self, + template: str, + key: str, + expected: str, + s3_service_fixture: S3ServiceFixture, + ): + """ + Generate URL creates a URL based on the URL template, it uses format to template in + the region, bucket, and key, then makes sure the URL is urlencoded. + """ + service = s3_service_fixture.service(url_template=template) + url = service.generate_url(key) + assert url == expected + + @pytest.mark.parametrize( + "content", + ["foo bar baz", b"byte string"], + ) + def test_store(self, content: bytes | str, s3_service_fixture: S3ServiceFixture): + service = s3_service_fixture.service() + service.store_stream = MagicMock() + + if isinstance(content, str): + expected_content = content.encode("utf8") + else: + expected_content = content + + service.store("key", content, "text/plain") + service.store_stream.assert_called_once() + assert service.store_stream.call_args.kwargs["key"] == "key" + stream = service.store_stream.call_args.kwargs["stream"] + assert isinstance(stream, BytesIO) + assert stream.getvalue() == expected_content + assert service.store_stream.call_args.kwargs["content_type"] == "text/plain" + + @pytest.mark.parametrize( + "content_type", + ["text/plain", "application/binary", None], + ) + def test_store_stream( + self, content_type: str, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + stream = MagicMock(spec=BytesIO) + + if content_type: + url = service.store_stream("key", stream, content_type) + else: + url = service.store_stream("key", stream) + + mock_s3_client = s3_service_fixture.mock_s3_client + mock_s3_client.upload_fileobj.assert_called_once() + assert mock_s3_client.upload_fileobj.call_args.kwargs["Fileobj"] == stream + assert ( + mock_s3_client.upload_fileobj.call_args.kwargs["Bucket"] + == s3_service_fixture.bucket + ) + assert mock_s3_client.upload_fileobj.call_args.kwargs["Key"] == "key" + assert url == "https://region.test.com/bucket/key" + stream.close.assert_called_once() + + if content_type: + assert mock_s3_client.upload_fileobj.call_args.kwargs["ExtraArgs"] == { + "ContentType": content_type + } + else: + assert mock_s3_client.upload_fileobj.call_args.kwargs["ExtraArgs"] == {} + + @pytest.mark.parametrize( + "exception", + [BotoCoreError(), ClientError({}, "")], + ) + def test_store_stream_exception( + self, exception: Exception, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + stream = MagicMock(spec=BytesIO) + + mock_s3_client = s3_service_fixture.mock_s3_client + mock_s3_client.upload_fileobj.side_effect = exception + assert service.store_stream("key", stream) is None + mock_s3_client.upload_fileobj.assert_called_once() + stream.close.assert_called_once() + + def test_multipart_upload(self, s3_service_fixture: S3ServiceFixture): + service = s3_service_fixture.service() + + # Successful upload + with service.multipart(key="key") as upload: + assert upload.client == s3_service_fixture.mock_s3_client + assert upload.bucket == s3_service_fixture.bucket + assert upload.key == "key" + assert upload.parts == [] + + s3_service_fixture.mock_s3_client.create_multipart_upload.assert_called_once() + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + + upload.upload_part(b"Part 1") + assert s3_service_fixture.mock_s3_client.upload_part.call_count == 1 + upload.upload_part(b"Part 2") + assert s3_service_fixture.mock_s3_client.upload_part.call_count == 2 + + assert len(upload.parts) == 2 + [part1, part2] = upload.parts + assert part1.PartNumber == 1 + assert part2.PartNumber == 2 + + s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_not_called() + + assert upload.complete is True + assert upload.exception is None + s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_called_once() + + def test_multipart_upload_boto_exception( + self, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + exception = BotoCoreError() + s3_service_fixture.mock_s3_client.upload_part.side_effect = exception + + # A boto exception is raised during upload, but it is captured + # and the upload is aborted. + with service.multipart(key="key") as upload: + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + upload.upload_part(b"test") + + assert upload.complete is False + assert upload.exception is exception + s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() + + def test_multipart_upload_other_exception( + self, s3_service_fixture: S3ServiceFixture + ): + service = s3_service_fixture.service() + exception = ValueError("foo") + s3_service_fixture.mock_s3_client.upload_part.side_effect = exception + + # A non-boto exception is raised during upload, the upload is aborted + # and the exception is raised. + with pytest.raises(ValueError) as excinfo: + with service.multipart(key="key") as upload: + assert upload.complete is False + assert upload.url == "https://region.test.com/bucket/key" + assert upload.exception is None + upload.upload_part(b"test") + + assert upload.complete is False + assert upload.exception is exception + s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() + assert excinfo.value is exception + + # Calling upload_part after the upload is complete raises an error. + with pytest.raises(RuntimeError): + upload.upload_part(b"foo") + + +class S3UploaderIntegrationConfiguration(ServiceConfiguration): + endpoint_url: AnyHttpUrl + user: str + password: str + + class Config(ServiceConfiguration.Config): + env_prefix = "PALACE_TEST_MINIO_" + + +class S3ServiceIntegrationFixture: + def __init__(self): + self.container = Storage() + self.configuration = S3UploaderIntegrationConfiguration() + self.container.config.from_dict( + { + "access_key": self.configuration.user, + "secret_key": self.configuration.password, + "endpoint_url": self.configuration.endpoint_url, + "region": "us-east-1", + "analytics_bucket": "analytics", + "public_access_bucket": "public", + "url_template": self.configuration.endpoint_url + "/{bucket}/{key}", + } + ) + self.buckets = [] + self.create_bucket("analytics") + self.create_bucket("public") + + @property + def s3_client(self) -> S3Client: + return self.container.s3_client() + + @property + def public(self) -> S3Service: + return self.container.public() + + @property + def analytics(self) -> S3Service: + return self.container.analytics() + + def create_bucket(self, bucket_name: str) -> None: + client = self.s3_client + client.create_bucket(Bucket=bucket_name) + self.buckets.append(bucket_name) + + def close(self): + for bucket in self.buckets: + response = self.s3_client.list_objects(Bucket=bucket) + + for object in response.get("Contents", []): + object_key = object["Key"] + self.s3_client.delete_object(Bucket=bucket, Key=object_key) + + self.s3_client.delete_bucket(Bucket=bucket) + + +@pytest.fixture +def s3_service_integration_fixture() -> Generator[ + S3ServiceIntegrationFixture, None, None +]: + fixture = S3ServiceIntegrationFixture() + yield fixture + fixture.close() + + +@pytest.mark.minio +class TestS3ServiceIntegration: + @pytest.mark.parametrize( + "key, bucket, content, content_type", + [ + ("key", "public", "foo bar baz", "text/plain"), + ("key/w i t h/slash/.!%:", "public", b"byte string", None), + ("key/with/🥏", "public", "🔥", None), + ("ûberkey", "analytics", "foo bar", "application/pdf"), + ("õ/🤖/analytics.foo", "analytics", b"another byte string", None), + ("normal/key", "analytics", "🚀", None), + ], + ) + def test_store( + self, + key: str, + bucket: str, + content: bytes | str, + content_type: Optional[str], + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + """The S3Service.store method stores content in the bucket.""" + service = getattr(s3_service_integration_fixture, bucket) + service.store(key, content, content_type) + response = s3_service_integration_fixture.s3_client.get_object( + Bucket=bucket, Key=key + ) + + if isinstance(content, str): + # The response we get back from S3 is always utf-8 encoded bytes. + expected_content = content.encode("utf8") + else: + expected_content = content + + assert response["Body"].read() == expected_content + + if content_type is None: + expected_content_type = "binary/octet-stream" + else: + expected_content_type = content_type + assert response["ContentType"] == expected_content_type + + @pytest.mark.parametrize( + "key, bucket, content, content_type", + [ + ("key", "public", b"foo bar baz", "text/plain"), + ("key/with/slash", "public", b"byte string", None), + ("key/with/🥏", "public", "🔥".encode(), None), + ("ûberkey", "analytics", b"foo bar", "application/pdf"), + ("õ/🤖/analytics.foo", "analytics", b"another byte string", None), + ("normal/key", "analytics", "🚀".encode(), None), + ], + ) + def test_multipart( + self, + key: str, + bucket: str, + content: bytes, + content_type: Optional[str], + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + service = getattr(s3_service_integration_fixture, bucket) + part_1_data = ( + b"a" * 5 * 1024**2 + ) # Minimum part size is 5MB, so we generate some junk data to send. + part_2_data = b"b" * 5 * 1024**2 + with service.multipart(key=key, content_type=content_type) as upload: + upload.upload_part(part_1_data) + upload.upload_part(part_2_data) + upload.upload_part(content) + assert not upload.complete + assert upload.exception is None + + assert upload.complete + assert upload.exception is None + + response = s3_service_integration_fixture.s3_client.get_object( + Bucket=bucket, Key=key + ) + assert response["Body"].read() == part_1_data + part_2_data + content + + if content_type is None: + expected_content_type = "binary/octet-stream" + else: + expected_content_type = content_type + assert response["ContentType"] == expected_content_type + + def test_multipart_one_small_part( + self, + s3_service_integration_fixture: S3ServiceIntegrationFixture, + ): + # If we only have one part, we are allowed to upload less than 5MB. + service = s3_service_integration_fixture.public + with service.multipart(key="key") as upload: + upload.upload_part(b"small data") + assert not upload.complete + assert upload.exception is None + + assert upload.complete + assert upload.exception is None + + response = s3_service_integration_fixture.s3_client.get_object( + Bucket="public", Key="key" + ) + assert response["Body"].read() == b"small data" diff --git a/tests/core/service/test_configuration.py b/tests/core/service/test_configuration.py new file mode 100644 index 0000000000..48dd008891 --- /dev/null +++ b/tests/core/service/test_configuration.py @@ -0,0 +1,123 @@ +from __future__ import annotations + +from pathlib import Path +from typing import TYPE_CHECKING + +import pytest +from pyfakefs.fake_filesystem import FakeFilesystem + +from core.config import CannotLoadConfiguration +from core.service.configuration import ServiceConfiguration + +if TYPE_CHECKING: + from _pytest.monkeypatch import MonkeyPatch + + +class MockServiceConfiguration(ServiceConfiguration): + string_with_default: str = "default" + string_without_default: str + int_type: int = 12 + + class Config: + env_prefix = "MOCK_" + + +class ServiceConfigurationFixture: + def __init__(self, type: str, monkeypatch: MonkeyPatch, fs: FakeFilesystem): + self.type = type + self.monkeypatch = monkeypatch + self.fs = fs + + # Make sure the environment is empty + self.monkeypatch.delenv("MOCK_STRING_WITHOUT_DEFAULT", raising=False) + self.monkeypatch.delenv("MOCK_INT_TYPE", raising=False) + self.monkeypatch.delenv("MOCK_STRING_WITH_DEFAULT", raising=False) + + # Make sure the .env file is empty + project_root = Path(__file__).parent.parent.parent.parent.absolute() + self.env_file = fs.create_file(project_root / ".env", contents="") + + def set(self, key: str, value: str): + if self.type == "env": + self.set_env(key, value) + elif self.type == "dot_env": + self.set_dot_env(key, value) + else: + raise ValueError(f"Unknown type: {self.type}") + + def set_env(self, key: str, value: str): + self.monkeypatch.setenv(key, value) + + def set_dot_env(self, key: str, value: str): + existing = self.env_file.contents or "" + self.env_file.set_contents("\n".join([existing, f"{key}={value}"])) + + +@pytest.fixture(params=["env", "dot_env"]) +def service_configuration_fixture( + request: pytest.FixtureRequest, monkeypatch: MonkeyPatch, fs: FakeFilesystem +): + if request.param not in ["env", "dot_env"]: + raise ValueError(f"Unknown param: {request.param}") + + return ServiceConfigurationFixture(request.param, monkeypatch, fs) + + +class TestServiceConfiguration: + def test_set(self, service_configuration_fixture: ServiceConfigurationFixture): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + service_configuration_fixture.set("MOCK_INT_TYPE", "42") + + config = MockServiceConfiguration() + + assert config.string_with_default == "default" + assert config.string_without_default == "string" + assert config.int_type == 42 + + def test_override_default( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + service_configuration_fixture.set("MOCK_INT_TYPE", "42") + # Note the spaces around the value, these should be stripped + service_configuration_fixture.set("MOCK_STRING_WITH_DEFAULT", " not default ") + + config = MockServiceConfiguration() + + assert config.string_with_default == "not default" + assert config.string_without_default == "string" + assert config.int_type == 42 + + def test_encoding(self, service_configuration_fixture: ServiceConfigurationFixture): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "🎉") + config = MockServiceConfiguration() + assert config.string_without_default == "🎉" + + def test_exception_missing( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + with pytest.raises(CannotLoadConfiguration) as exc_info: + MockServiceConfiguration() + + assert "MOCK_STRING_WITHOUT_DEFAULT: field required" in str(exc_info.value) + + def test_exception_validation( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_INT_TYPE", "this is not an int") + + with pytest.raises(CannotLoadConfiguration) as exc_info: + MockServiceConfiguration() + + assert "MOCK_INT_TYPE: value is not a valid integer" in str(exc_info.value) + + def test_exception_mutation( + self, service_configuration_fixture: ServiceConfigurationFixture + ): + service_configuration_fixture.set("MOCK_STRING_WITHOUT_DEFAULT", "string") + config = MockServiceConfiguration() + + with pytest.raises(TypeError): + # Ignore the type error, since it tells us this is immutable, + # and we are testing that behavior at runtime. + config.string_with_default = "new value" # type: ignore[misc] diff --git a/tests/core/test_local_analytics_provider.py b/tests/core/test_local_analytics_provider.py index 031b9a90d0..478e4d6d68 100644 --- a/tests/core/test_local_analytics_provider.py +++ b/tests/core/test_local_analytics_provider.py @@ -1,9 +1,16 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + import pytest from core.local_analytics_provider import LocalAnalyticsProvider from core.model import CirculationEvent, ExternalIntegration, create, get_one from core.util.datetime_helpers import utc_now -from tests.fixtures.database import DatabaseTransactionFixture + +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.services import MockServicesFixture class TestInitializeLocalAnalyticsProvider: @@ -49,7 +56,11 @@ class LocalAnalyticsProviderFixture: integration: ExternalIntegration la: LocalAnalyticsProvider - def __init__(self, transaction: DatabaseTransactionFixture): + def __init__( + self, + transaction: DatabaseTransactionFixture, + mock_services_fixture: MockServicesFixture, + ): self.transaction = transaction self.integration, ignore = create( transaction.session, @@ -57,16 +68,17 @@ def __init__(self, transaction: DatabaseTransactionFixture): goal=ExternalIntegration.ANALYTICS_GOAL, protocol="core.local_analytics_provider", ) + self.services = mock_services_fixture.services self.la = LocalAnalyticsProvider( - self.integration, transaction.default_library() + self.integration, self.services, transaction.default_library() ) @pytest.fixture() def local_analytics_provider_fixture( - db, + db: DatabaseTransactionFixture, mock_services_fixture: MockServicesFixture ) -> LocalAnalyticsProviderFixture: - return LocalAnalyticsProviderFixture(db) + return LocalAnalyticsProviderFixture(db, mock_services_fixture) class TestLocalAnalyticsProvider: @@ -123,7 +135,7 @@ def test_collect_event( # It's possible to instantiate the LocalAnalyticsProvider # without a library. - la = LocalAnalyticsProvider(data.integration) + la = LocalAnalyticsProvider(data.integration, data.services) # In that case, it will process events for any library. for library in [database.default_library(), library2]: @@ -184,7 +196,7 @@ def test_neighborhood_is_location( data.integration.setting( p.LOCATION_SOURCE ).value = p.LOCATION_SOURCE_NEIGHBORHOOD - la = p(data.integration, database.default_library()) + la = p(data.integration, data.services, database.default_library()) event, is_new = la.collect_event( database.default_library(), diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index 3d7debba0f..27355241e1 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import datetime +from typing import TYPE_CHECKING from urllib.parse import quote import pytest @@ -24,12 +27,14 @@ Work, get_one, ) -from core.s3 import MockS3Uploader from core.util.datetime_helpers import datetime_utc, utc_now -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import ExternalSearchFixtureFake from tests.mocks.search import ExternalSearchIndexFake +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.s3 import S3ServiceFixture + from tests.fixtures.search import ExternalSearchFixtureFake + class TestAnnotator: def test_annotate_work_record(self, db: DatabaseTransactionFixture): @@ -467,7 +472,20 @@ def test_add_ebooks_subject(self): self._check_field(record, "655", {"a": "Electronic books."}, [" ", "0"]) -class TestMARCExporter: +class MarcExporterFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + + self.integration = self._integration(db) + self.now = utc_now() + self.exporter = MARCExporter.from_config(db.default_library()) + self.annotator = Annotator() + self.w1 = db.work(genre="Mystery", with_open_access_download=True) + self.w2 = db.work(genre="Mystery", with_open_access_download=True) + + self.search_engine = ExternalSearchIndexFake(db.session) + self.search_engine.mock_query_works([self.w1, self.w2]) + @staticmethod def _integration(db: DatabaseTransactionFixture): return db.external_integration( @@ -476,12 +494,23 @@ def _integration(db: DatabaseTransactionFixture): libraries=[db.default_library()], ) + +@pytest.fixture +def marc_exporter_fixture( + db: DatabaseTransactionFixture, + external_search_fake_fixture: ExternalSearchFixtureFake, +) -> MarcExporterFixture: + # external_search_fake_fixture is used only for the integration it creates + return MarcExporterFixture(db) + + +class TestMARCExporter: def test_from_config(self, db: DatabaseTransactionFixture): pytest.raises( CannotLoadConfiguration, MARCExporter.from_config, db.default_library() ) - integration = self._integration(db) + integration = MarcExporterFixture._integration(db) exporter = MARCExporter.from_config(db.default_library()) assert integration == exporter.integration assert db.default_library() == exporter.library @@ -544,7 +573,7 @@ def test_create_record(self, db: DatabaseTransactionFixture): assert "author, new" in cached # If we pass in an integration, it's passed along to the annotator. - integration = self._integration(db) + integration = MarcExporterFixture._integration(db) class MockAnnotator(Annotator): integration = None @@ -586,218 +615,141 @@ def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): new_record = MARCExporter.create_record(new_work, annotator) assert record.as_marc() == new_record.as_marc() - def test_records( + @pytest.mark.parametrize("object_type", ["lane", "worklist"]) + def test_records_lane( self, + object_type: str, db: DatabaseTransactionFixture, - external_search_fake_fixture: ExternalSearchFixtureFake, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, ): - # external_search_fake_fixture is used only for the integration it creates - integration = self._integration(db) - now = utc_now() - exporter = MARCExporter.from_config(db.default_library()) - annotator = Annotator() - lane = db.lane("Test Lane", genres=["Mystery"]) - w1 = db.work(genre="Mystery", with_open_access_download=True) - w2 = db.work(genre="Mystery", with_open_access_download=True) - - search_engine = ExternalSearchIndexFake(db.session) - search_engine.mock_query_works([w1, w2]) + if object_type == "lane": + lane_or_wl = db.lane("Test Lane", genres=["Mystery"]) + elif object_type == "worklist": + lane_or_wl = WorkList() + lane_or_wl.initialize(db.default_library(), display_name="All Books") + else: + raise RuntimeError() + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + search_engine = marc_exporter_fixture.search_engine # If there's a storage protocol but not corresponding storage integration, # it raises an exception. - pytest.raises(Exception, exporter.records, lane, annotator) - - # If there is a storage integration, the output file is mirrored. - mirror_integration = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - ) - - mirror = MockS3Uploader() + pytest.raises(Exception, exporter.records, lane_or_wl, annotator) + storage_service = s3_service_fixture.mock_service() exporter.records( - lane, + lane_or_wl, annotator, - mirror_integration, - mirror=mirror, + storage_service, query_batch_size=1, upload_batch_size=1, search_engine=search_engine, ) # The file was mirrored and a CachedMARCFile was created to track the mirrored file. - assert 1 == len(mirror.uploaded) + assert len(storage_service.uploads) == 1 [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert lane == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(cache.representation.fetched_at)), - quote(lane.display_name), - ) - == mirror.uploaded[0].mirror_url - ) - assert None == cache.start_time - assert cache.end_time > now - - # The content was uploaded in two parts. - assert 2 == len(mirror.content[0]) - complete_file = b"".join(mirror.content[0]) - records = list(MARCReader(complete_file)) - assert 2 == len(records) + assert cache.library == db.default_library() + if object_type == "lane": + assert cache.lane == lane_or_wl + else: + assert cache.lane is None + assert cache.representation.content is None + assert storage_service.uploads[0].key == "{}/{}/{}.mrc".format( + db.default_library().short_name, + str(cache.representation.fetched_at), + lane_or_wl.display_name, + ) + assert quote(storage_service.uploads[0].key) in cache.representation.mirror_url + assert cache.start_time is None + assert marc_exporter_fixture.now < cache.end_time + + records = list(MARCReader(storage_service.uploads[0].content)) + assert len(records) == 2 title_fields = [record.get_fields("245") for record in records] titles = [fields[0].get_subfields("a")[0] for fields in title_fields] - assert {w1.title, w2.title} == set(titles) - - assert w1.title in w1.marc_record - assert w2.title in w2.marc_record - - db.session.delete(cache) - - search_engine.mock_query_works([w1, w2]) - # It also works with a WorkList instead of a Lane, in which case - # there will be no lane in the CachedMARCFile. - worklist = WorkList() - worklist.initialize(db.default_library(), display_name="All Books") - - mirror = MockS3Uploader() - exporter.records( - worklist, - annotator, - mirror_integration, - mirror=mirror, - query_batch_size=1, - upload_batch_size=1, - search_engine=search_engine, - ) - - assert 1 == len(mirror.uploaded) - [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert None == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(cache.representation.fetched_at)), - quote(worklist.display_name), - ) - == mirror.uploaded[0].mirror_url - ) - assert None == cache.start_time - assert cache.end_time > now - - assert 2 == len(mirror.content[0]) - complete_file = b"".join(mirror.content[0]) - records = list(MARCReader(complete_file)) - assert 2 == len(records) + assert set(titles) == { + marc_exporter_fixture.w1.title, + marc_exporter_fixture.w2.title, + } - db.session.delete(cache) + assert marc_exporter_fixture.w1.title in marc_exporter_fixture.w1.marc_record + assert marc_exporter_fixture.w2.title in marc_exporter_fixture.w2.marc_record + def test_records_start_time( + self, + db: DatabaseTransactionFixture, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, + ): # If a start time is set, it's used in the mirror url. # # (Our mock search engine returns everthing in its 'index', # so this doesn't test that the start time is actually used to # find works -- that's in the search index tests and the # tests of MARCExporterFacets.) - start_time = now - datetime.timedelta(days=3) + start_time = marc_exporter_fixture.now - datetime.timedelta(days=3) + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + search_engine = marc_exporter_fixture.search_engine + lane = db.lane("Test Lane", genres=["Mystery"]) + storage_service = s3_service_fixture.mock_service() - mirror = MockS3Uploader() exporter.records( lane, annotator, - mirror_integration, + storage_service, start_time=start_time, - mirror=mirror, query_batch_size=2, upload_batch_size=2, search_engine=search_engine, ) [cache] = db.session.query(CachedMARCFile).all() - assert db.default_library() == cache.library - assert lane == cache.lane - assert mirror.uploaded[0] == cache.representation - assert None == cache.representation.content - assert ( - "https://test-marc-bucket.s3.amazonaws.com/%s/%s-%s/%s.mrc" - % ( - db.default_library().short_name, - quote(str(start_time)), - quote(str(cache.representation.fetched_at)), - quote(lane.display_name), - ) - == mirror.uploaded[0].mirror_url + assert cache.library == db.default_library() + assert cache.lane == lane + assert cache.representation.content is None + assert storage_service.uploads[0].key == "{}/{}-{}/{}.mrc".format( + db.default_library().short_name, + str(start_time), + str(cache.representation.fetched_at), + lane.display_name, ) - assert start_time == cache.start_time - assert cache.end_time > now - db.session.delete(cache) + assert cache.start_time == start_time + assert marc_exporter_fixture.now < cache.end_time + def test_records_empty_search( + self, + db: DatabaseTransactionFixture, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, + ): # If the search engine returns no contents for the lane, # nothing will be mirrored, but a CachedMARCFile is still # created to track that we checked for updates. - search_engine.mock_query_works([]) + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + empty_search_engine = ExternalSearchIndexFake(db.session) + lane = db.lane("Test Lane", genres=["Mystery"]) + storage_service = s3_service_fixture.mock_service() - mirror = MockS3Uploader() exporter.records( lane, annotator, - mirror_integration, - mirror=mirror, - search_engine=search_engine, + storage_service, + search_engine=empty_search_engine, ) - assert [] == mirror.content[0] + assert [] == storage_service.uploads [cache] = db.session.query(CachedMARCFile).all() - assert cache.representation == mirror.uploaded[0] - assert db.default_library() == cache.library - assert lane == cache.lane - assert None == cache.representation.content - assert None == cache.start_time - assert cache.end_time > now - - db.session.delete(cache) - - def test_get_storage_settings(self, db: DatabaseTransactionFixture): - # Two ExternalIntegration, one has a marc_bucket setting, and the - # other doesn't. - has_marc_bucket = db.external_integration( - name="has_marc_bucket", - protocol=db.fresh_str(), - goal=ExternalIntegration.STORAGE_GOAL, - settings={"marc_bucket": "test-marc-bucket"}, - ) - db.external_integration( - name="no_marc_bucket", - protocol=db.fresh_str(), - goal=ExternalIntegration.STORAGE_GOAL, - ) - - # Before we call get_storage_settings, the only option is the default. - assert MARCExporter.SETTING["options"] == [ - MARCExporter.DEFAULT_MIRROR_INTEGRATION - ] - - MARCExporter.get_storage_settings(db.session) - - # After we call get_storage_settings, the options are the default and - # the ExternalIntegration with a marc_bucket setting. - assert len(MARCExporter.SETTING["options"]) == 2 - [default, from_config] = MARCExporter.SETTING["options"] - assert default == MARCExporter.DEFAULT_MIRROR_INTEGRATION - assert from_config == { - "key": str(has_marc_bucket.id), - "label": has_marc_bucket.name, - } + assert cache.library == db.default_library() + assert cache.lane == lane + assert cache.representation.content is None + assert cache.start_time is None + assert marc_exporter_fixture.now < cache.end_time class TestMARCExporterFacets: diff --git a/tests/core/test_marc2.py b/tests/core/test_marc2.py deleted file mode 100644 index 07b96a8502..0000000000 --- a/tests/core/test_marc2.py +++ /dev/null @@ -1,15 +0,0 @@ -from core.mirror import MirrorUploader - - -class TestMiscellaneous: - def test_mirror_uploader_implementations_are_being_loaded(self): - """ - This test verifies that the two S3 mirror implementations are being - loaded when the MARCExporter is imported. It was not added to - tests/core/test_marc.py because that test causes the implementations - to be loaded since it references the core.s3 package directly. - """ - from core.marc import MARCExporter # noqa: autoflake - - assert MirrorUploader.IMPLEMENTATION_REGISTRY.get("Amazon S3") - assert MirrorUploader.IMPLEMENTATION_REGISTRY.get("MinIO") diff --git a/tests/core/test_mirror_uploader.py b/tests/core/test_mirror_uploader.py deleted file mode 100644 index 2487397079..0000000000 --- a/tests/core/test_mirror_uploader.py +++ /dev/null @@ -1,240 +0,0 @@ -import pytest - -from core.config import CannotLoadConfiguration -from core.mirror import MirrorUploader -from core.model import ExternalIntegration -from core.model.configuration import ExternalIntegrationLink -from core.s3 import ( - MinIOUploader, - MinIOUploaderConfiguration, - S3Uploader, - S3UploaderConfiguration, -) -from core.util.datetime_helpers import utc_now -from tests.fixtures.database import DatabaseTransactionFixture - - -class DummySuccessUploader(MirrorUploader): - def __init__(self, integration=None): - pass - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - pass - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - pass - - def sign_url(self, url, expiration=None): - pass - - def split_url(self, url, unquote=True): - pass - - def do_upload(self, representation): - return None - - -class DummyFailureUploader(MirrorUploader): - def __init__(self, integration=None): - pass - - def book_url( - self, - identifier, - extension=".epub", - open_access=True, - data_source=None, - title=None, - ): - pass - - def cover_image_url(self, data_source, identifier, filename=None, scaled_size=None): - pass - - def sign_url(self, url, expiration=None): - pass - - def split_url(self, url, unquote=True): - pass - - def do_upload(self, representation): - return "I always fail." - - -class TestInitialization: - """Test the ability to get a MirrorUploader for various aspects of site - configuration. - """ - - @staticmethod - def _integration(data: DatabaseTransactionFixture) -> ExternalIntegration: - """Helper method to make a storage ExternalIntegration.""" - storage_name = "some storage" - integration = data.external_integration("my protocol") - integration.goal = ExternalIntegration.STORAGE_GOAL - integration.name = storage_name - return integration - - @pytest.mark.parametrize( - "name,protocol,uploader_class,settings", - [ - ("s3_uploader", ExternalIntegration.S3, S3Uploader, None), - ( - "minio_uploader", - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ], - ) - def test_mirror( - self, - db, - name, - protocol, - uploader_class, - settings, - ): - storage_name = "some storage" - # If there's no integration with goal=STORAGE or name=storage_name, - # MirrorUploader.mirror raises an exception. - with pytest.raises(CannotLoadConfiguration) as excinfo: - MirrorUploader.mirror(db.session, storage_name) - assert "No storage integration with name 'some storage' is configured" in str( - excinfo.value - ) - - # If there's only one, mirror() uses it to initialize a - # MirrorUploader. - integration = self._integration(db) - integration.protocol = protocol - - if settings: - for key, value in settings.items(): - integration.setting(key).value = value - - uploader = MirrorUploader.mirror(db.session, integration=integration) - - assert isinstance(uploader, uploader_class) - - def test_integration_by_name(self, db: DatabaseTransactionFixture): - integration = self._integration(db) - - # No name was passed so nothing is found - with pytest.raises(CannotLoadConfiguration) as excinfo: - MirrorUploader.integration_by_name(db.session) - assert "No storage integration with name 'None' is configured" in str( - excinfo.value - ) - - # Correct name was passed - integration = MirrorUploader.integration_by_name(db.session, integration.name) - assert isinstance(integration, ExternalIntegration) - - def test_for_collection(self, db: DatabaseTransactionFixture): - - # This collection has no mirror_integration, so - # there is no MirrorUploader for it. - collection = db.collection() - assert None == MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - - # This collection has a properly configured mirror_integration, - # so it can have an MirrorUploader. - integration = db.external_integration( - ExternalIntegration.S3, - ExternalIntegration.STORAGE_GOAL, - username="username", - password="password", - settings={S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "some-covers"}, - ) - integration_link = db.external_integration_link( - integration=collection._external_integration, - other_integration=integration, - purpose=ExternalIntegrationLink.COVERS, - ) - - uploader = MirrorUploader.for_collection( - collection, ExternalIntegrationLink.COVERS - ) - assert isinstance(uploader, MirrorUploader) - - @pytest.mark.parametrize( - "name,protocol,uploader_class,settings", - [ - ("s3_uploader", ExternalIntegration.S3, S3Uploader, None), - ( - "minio_uploader", - ExternalIntegration.MINIO, - MinIOUploader, - {MinIOUploaderConfiguration.ENDPOINT_URL: "http://localhost"}, - ), - ], - ) - def test_constructor( - self, - db, - name, - protocol, - uploader_class, - settings, - ): - # You can't create a MirrorUploader with an integration - # that's not designed for storage. - integration = self._integration(db) - integration.goal = ExternalIntegration.LICENSE_GOAL - integration.protocol = protocol - - if settings: - for key, value in settings.items(): - integration.setting(key).value = value - with pytest.raises(CannotLoadConfiguration) as excinfo: - uploader_class(integration) - assert "from an integration with goal=licenses" in str(excinfo.value) - - def test_implementation_registry(self, db: DatabaseTransactionFixture): - session = db.session - - # The implementation class used for a given ExternalIntegration - # is controlled by the integration's protocol and the contents - # of the MirrorUploader's implementation registry. - MirrorUploader.IMPLEMENTATION_REGISTRY["my protocol"] = DummyFailureUploader - - integration = self._integration(db) - uploader = MirrorUploader.mirror(session, integration=integration) - assert isinstance(uploader, DummyFailureUploader) - del MirrorUploader.IMPLEMENTATION_REGISTRY["my protocol"] - - -class TestMirrorUploader: - """Test the basic workflow of MirrorUploader.""" - - def test_mirror_batch(self, db: DatabaseTransactionFixture): - r1, ignore = db.representation() - r2, ignore = db.representation() - uploader = DummySuccessUploader() - uploader.mirror_batch([r1, r2]) - assert r1.mirrored_at != None - assert r2.mirrored_at != None - - def test_success_and_then_failure(self, db: DatabaseTransactionFixture): - r, ignore = db.representation() - now = utc_now() - DummySuccessUploader().mirror_one(r, "") - assert r.mirrored_at > now - assert None == r.mirror_exception - - # Even if the original upload succeeds, a subsequent upload - # may fail in a way that leaves the image in an inconsistent - # state. - DummyFailureUploader().mirror_one(r, "") - assert None == r.mirrored_at - assert "I always fail." == r.mirror_exception diff --git a/tests/core/test_s3.py b/tests/core/test_s3.py deleted file mode 100644 index a2f216279a..0000000000 --- a/tests/core/test_s3.py +++ /dev/null @@ -1,1356 +0,0 @@ -import functools -from unittest.mock import MagicMock - -import botocore -import pytest -from botocore.exceptions import BotoCoreError, ClientError - -from core.mirror import MirrorUploader -from core.model import ( - DataSource, - ExternalIntegration, - Hyperlink, - Identifier, - Representation, - create, -) -from core.s3 import ( - MinIOUploader, - MinIOUploaderConfiguration, - MockS3Client, - MultipartS3Upload, - S3AddressingStyle, - S3Uploader, - S3UploaderConfiguration, -) -from core.util.datetime_helpers import datetime_utc, utc_now - -# TODO: we can drop this when we drop support for Python 3.6 and 3.7 -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.s3 import S3UploaderFixture, S3UploaderIntegrationFixture -from tests.fixtures.sample_covers import SampleCoversFixture - - -class TestS3Uploader: - def test_names(self): - # The NAME associated with this class must be the same as its - # key in the MirrorUploader implementation registry, and it's - # better if it's the same as the name of the external - # integration. - assert S3Uploader.NAME == ExternalIntegration.S3 - assert ( - S3Uploader == MirrorUploader.IMPLEMENTATION_REGISTRY[ExternalIntegration.S3] - ) - - def test_instantiation(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - integration = transaction.external_integration( - ExternalIntegration.S3, goal=ExternalIntegration.STORAGE_GOAL - ) - integration.username = "your-access-key" - integration.password = "your-secret-key" - integration.setting( - S3UploaderConfiguration.URL_TEMPLATE_KEY - ).value = "a transform" - uploader = MirrorUploader.implementation(integration) - assert True == isinstance(uploader, S3Uploader) - - # The URL_TEMPLATE_KEY setting becomes the .url_transform - # attribute on the S3Uploader object. - assert "a transform" == uploader.url_transform - - @pytest.mark.parametrize( - "name,username,password", - [ - ("empty_credentials", None, None), - ("empty_string_credentials", "", ""), - ("non_empty_string_credentials", "username", "password"), - ], - ) - def test_initialization( - self, s3_uploader_fixture: S3UploaderFixture, name, username, password - ): - # Arrange - transaction = s3_uploader_fixture.transaction - settings = {"username": username, "password": password} - integration = transaction.external_integration( - ExternalIntegration.S3, - goal=ExternalIntegration.STORAGE_GOAL, - settings=settings, - ) - client_class = MagicMock() - - # Act - S3Uploader(integration, client_class=client_class) - - # Assert - assert client_class.call_count == 2 - - service_name = client_class.call_args_list[0].args[0] - region_name = client_class.call_args_list[0].kwargs["region_name"] - aws_access_key_id = client_class.call_args_list[0].kwargs["aws_access_key_id"] - aws_secret_access_key = client_class.call_args_list[0].kwargs[ - "aws_secret_access_key" - ] - config = client_class.call_args_list[0].kwargs["config"] - assert service_name == "s3" - assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION - assert aws_access_key_id == None - assert aws_secret_access_key == None - assert config.signature_version == botocore.UNSIGNED - assert ( - config.s3["addressing_style"] - == S3UploaderConfiguration.S3_DEFAULT_ADDRESSING_STYLE - ) - - service_name = client_class.call_args_list[1].args[0] - region_name = client_class.call_args_list[1].kwargs["region_name"] - aws_access_key_id = client_class.call_args_list[1].kwargs["aws_access_key_id"] - aws_secret_access_key = client_class.call_args_list[1].kwargs[ - "aws_secret_access_key" - ] - assert service_name == "s3" - assert region_name == S3UploaderConfiguration.S3_DEFAULT_REGION - assert aws_access_key_id == (username if username != "" else None) - assert aws_secret_access_key == (password if password != "" else None) - assert "config" not in client_class.call_args_list[1].kwargs - - def test_custom_client_class(self, s3_uploader_fixture: S3UploaderFixture): - """You can specify a client class to use instead of boto3.client.""" - integration = s3_uploader_fixture.integration() - uploader = S3Uploader(integration, MockS3Client) - assert isinstance(uploader.client, MockS3Client) - - def test_get_bucket(self, s3_uploader_fixture: S3UploaderFixture): - buckets = { - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "banana", - S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "bucket", - } - buckets_plus_irrelevant_setting = dict(buckets) - buckets_plus_irrelevant_setting["not-a-bucket-at-all"] = "value" - uploader = s3_uploader_fixture.create_s3_uploader( - **buckets_plus_irrelevant_setting - ) - - # This S3Uploader knows about the configured buckets. It - # wasn't informed of the irrelevant 'not-a-bucket-at-all' - # setting. - assert buckets == uploader.buckets - - # get_bucket just does a lookup in .buckets - uploader.buckets["foo"] = object() - result = uploader.get_bucket("foo") - assert uploader.buckets["foo"] == result - - @pytest.mark.parametrize( - "name,bucket,path,expected_result,region,addressing_style", - [ - ( - "s3_url_with_path_without_slash", - "a-bucket", - "a-path", - "https://a-bucket.s3.amazonaws.com/a-path", - None, - None, - ), - ( - "s3_dummy_url_with_path_without_slash", - "dummy", - "dummy", - "https://dummy.s3.amazonaws.com/dummy", - None, - None, - ), - ( - "s3_path_style_url_with_path_without_slash", - "a-bucket", - "a-path", - "https://s3.amazonaws.com/a-bucket/a-path", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_path_style_dummy_url_with_path_without_slash", - "dummy", - "dummy", - "https://s3.amazonaws.com/dummy/dummy", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_path_with_slash", - "a-bucket", - "/a-path", - "https://a-bucket.s3.amazonaws.com/a-path", - None, - None, - ), - ( - "s3_path_style_url_with_path_with_slash", - "a-bucket", - "/a-path", - "https://s3.amazonaws.com/a-bucket/a-path", - None, - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_custom_region_and_path_without_slash", - "a-bucket", - "a-path", - "https://a-bucket.s3.us-east-2.amazonaws.com/a-path", - "us-east-2", - None, - ), - ( - "s3_path_style_url_with_custom_region_and_path_without_slash", - "a-bucket", - "a-path", - "https://s3.us-east-2.amazonaws.com/a-bucket/a-path", - "us-east-2", - S3AddressingStyle.PATH.value, - ), - ( - "s3_url_with_custom_region_and_path_with_slash", - "a-bucket", - "/a-path", - "https://a-bucket.s3.us-east-3.amazonaws.com/a-path", - "us-east-3", - None, - ), - ( - "s3_path_style_url_with_custom_region_and_path_with_slash", - "a-bucket", - "/a-path", - "https://s3.us-east-3.amazonaws.com/a-bucket/a-path", - "us-east-3", - S3AddressingStyle.PATH.value, - ), - ( - "custom_http_url_and_path_without_slash", - "http://a-bucket.com/", - "a-path", - "http://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_with_slash", - "http://a-bucket.com/", - "/a-path", - "http://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_without_slash", - "https://a-bucket.com/", - "a-path", - "https://a-bucket.com/a-path", - None, - None, - ), - ( - "custom_http_url_and_path_with_slash", - "https://a-bucket.com/", - "/a-path", - "https://a-bucket.com/a-path", - None, - None, - ), - ], - ) - def test_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - path, - expected_result, - region, - addressing_style, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader( - region=region, addressing_style=addressing_style - ) - - # Act - result = uploader.url(bucket, path) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,key,expected_result,url_transform,region", - [ - ( - "implicit_s3_url_template", - "bucket", - "the key", - "https://bucket.s3.amazonaws.com/the%20key", - None, - None, - ), - ( - "implicit_s3_url_template_with_custom_region", - "bucket", - "the key", - "https://bucket.s3.us-east-2.amazonaws.com/the%20key", - None, - "us-east-2", - ), - ( - "explicit_s3_url_template", - "bucket", - "the key", - "https://bucket.s3.amazonaws.com/the%20key", - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, - None, - ), - ( - "explicit_s3_url_template_with_custom_region", - "bucket", - "the key", - "https://bucket.s3.us-east-2.amazonaws.com/the%20key", - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT, - "us-east-2", - ), - ( - "http_url_template", - "bucket", - "the këy", - "http://bucket/the%20k%C3%ABy", - S3UploaderConfiguration.URL_TEMPLATE_HTTP, - None, - ), - ( - "https_url_template", - "bucket", - "the këy", - "https://bucket/the%20k%C3%ABy", - S3UploaderConfiguration.URL_TEMPLATE_HTTPS, - None, - ), - ], - ) - def test_final_mirror_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - key, - expected_result, - url_transform, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - - if url_transform: - uploader.url_transform = url_transform - - # Act - result = uploader.final_mirror_url(bucket, key) - - # Assert - if not url_transform: - assert ( - S3UploaderConfiguration.URL_TEMPLATE_DEFAULT == uploader.url_transform - ) - - assert result == expected_result - - def test_key_join(self): - """Test the code used to build S3 keys from parts.""" - parts = ["Gutenberg", b"Gutenberg ID", 1234, "Die Flügelmaus+.epub"] - assert ( - "Gutenberg/Gutenberg%20ID/1234/Die%20Fl%C3%BCgelmaus%2B.epub" - == S3Uploader.key_join(parts) - ) - - @pytest.mark.parametrize( - "name,bucket,data_source_name,expected_result,scaled_size,region,", - [ - ( - "with_gutenberg_cover_generator_data_source", - "test-book-covers-s3-bucket", - DataSource.GUTENBERG_COVER_GENERATOR, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/Gutenberg%20Illustrated/", - None, - None, - ), - ( - "with_overdrive_data_source", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/Overdrive/", - None, - None, - ), - ( - "with_overdrive_data_source_and_scaled_size", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.amazonaws.com/scaled/300/Overdrive/", - 300, - None, - ), - ( - "with_gutenberg_cover_generator_data_source_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.GUTENBERG_COVER_GENERATOR, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Gutenberg%20Illustrated/", - None, - "us-east-3", - ), - ( - "with_overdrive_data_source_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/Overdrive/", - None, - "us-east-3", - ), - ( - "with_overdrive_data_source_and_scaled_size_and_custom_region", - "test-book-covers-s3-bucket", - DataSource.OVERDRIVE, - "https://test-book-covers-s3-bucket.s3.us-east-3.amazonaws.com/scaled/300/Overdrive/", - 300, - "us-east-3", - ), - ], - ) - def test_cover_image_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - data_source_name, - expected_result, - scaled_size, - region, - ): - # Arrange - session = s3_uploader_fixture.transaction.session - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - data_source = DataSource.lookup(session, data_source_name) - - # Act - result = uploader.cover_image_root(bucket, data_source, scaled_size=scaled_size) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,expected_result,region", - [ - ( - "with_default_region", - "test-open-access-s3-bucket", - "https://test-open-access-s3-bucket.s3.amazonaws.com/", - None, - ), - ( - "with_custom_region", - "test-open-access-s3-bucket", - "https://test-open-access-s3-bucket.s3.us-east-3.amazonaws.com/", - "us-east-3", - ), - ], - ) - def test_content_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - expected_result, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - - # Act - result = uploader.content_root(bucket) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,library_name,expected_result,region", - [ - ( - "s3_url", - "test-marc-s3-bucket", - "SHORT", - "https://test-marc-s3-bucket.s3.amazonaws.com/SHORT/", - None, - ), - ( - "s3_url_with_custom_region", - "test-marc-s3-bucket", - "SHORT", - "https://test-marc-s3-bucket.s3.us-east-2.amazonaws.com/SHORT/", - "us-east-2", - ), - ( - "custom_http_url", - "http://my-feed/", - "SHORT", - "http://my-feed/SHORT/", - None, - ), - ( - "custom_https_url", - "https://my-feed/", - "SHORT", - "https://my-feed/SHORT/", - None, - ), - ], - ) - def test_marc_file_root( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - library_name, - expected_result, - region, - ): - # Arrange - uploader = s3_uploader_fixture.create_s3_uploader(region=region) - library = s3_uploader_fixture.transaction.library(short_name=library_name) - - # Act - result = uploader.marc_file_root(bucket, library) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,buckets,identifier,expected_result,extension,data_source_name,title,region,open_access", - [ - ( - "with_identifier", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.epub", - None, - None, - None, - None, - True, - ), - ( - "with_custom_extension", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", - "pdf", - None, - None, - None, - True, - ), - ( - "with_custom_dotted_extension", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK.pdf", - ".pdf", - None, - None, - None, - True, - ), - ( - "with_custom_data_source", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK.epub", - None, - DataSource.UNGLUE_IT, - None, - None, - True, - ), - ( - "with_custom_title", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/Gutenberg%20ID/ABOOK/On%20Books.epub", - None, - None, - "On Books", - None, - True, - ), - ( - "with_custom_extension_and_title_and_data_source", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - None, - True, - ), - ( - "with_custom_extension_and_title_and_data_source_and_region", - {S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - "us-east-3", - True, - ), - ( - "with_protected_access_and_custom_extension_and_title_and_data_source_and_region", - {S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY: "thebooks"}, - "ABOOK", - "https://thebooks.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/On%20Books.pdf", - ".pdf", - DataSource.UNGLUE_IT, - "On Books", - "us-east-3", - False, - ), - ], - ) - def test_book_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - buckets, - identifier, - expected_result, - extension, - data_source_name, - title, - region, - open_access, - ): - # Arrange - transaction = s3_uploader_fixture.transaction - identifier = transaction.identifier(foreign_id=identifier) - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - parameters = {"identifier": identifier, "open_access": open_access} - - if extension: - parameters["extension"] = extension - if title: - parameters["title"] = title - - if data_source_name: - data_source = DataSource.lookup(transaction.session, DataSource.UNGLUE_IT) - parameters["data_source"] = data_source - - # Act - result = uploader.book_url(**parameters) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,buckets,data_source_name,identifier,filename,expected_result,scaled_size,region", - [ - ( - "without_scaled_size", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", - None, - None, - ), - ( - "without_scaled_size_and_with_custom_region", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.us-east-3.amazonaws.com/unglue.it/Gutenberg%20ID/ABOOK/filename", - None, - "us-east-3", - ), - ( - "with_scaled_size", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", - 601, - None, - ), - ( - "with_scaled_size_and_custom_region", - {S3UploaderConfiguration.BOOK_COVERS_BUCKET_KEY: "thecovers"}, - DataSource.UNGLUE_IT, - "ABOOK", - "filename", - "https://thecovers.s3.us-east-3.amazonaws.com/scaled/601/unglue.it/Gutenberg%20ID/ABOOK/filename", - 601, - "us-east-3", - ), - ], - ) - def test_cover_image_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - buckets, - data_source_name, - identifier, - filename, - expected_result, - scaled_size, - region, - ): - transaction = s3_uploader_fixture.transaction - - # identifier = self._identifier(foreign_id="ABOOK") - # buckets = {S3Uploader.BOOK_COVERS_BUCKET_KEY : 'thecovers'} - # uploader = self._uploader(**buckets) - # m = uploader.cover_image_url - # - # unglueit = DataSource.lookup(self._db, DataSource.UNGLUE_IT) - # identifier = self._identifier(foreign_id="ABOOK") - # eq_('https://s3.amazonaws.com/thecovers/scaled/601/unglue.it/Gutenberg+ID/ABOOK/filename', - # m(unglueit, identifier, "filename", scaled_size=601)) - - # Arrange - data_source = DataSource.lookup(transaction.session, data_source_name) - identifier = transaction.identifier(foreign_id=identifier) - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - # Act - result = uploader.cover_image_url( - data_source, identifier, filename, scaled_size=scaled_size - ) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,bucket,library_name,lane_name,end_time,expected_result,start_time,region", - [ - ( - "with_s3_bucket_and_end_time", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 1, 0, 0, 0), - "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00/Lane.mrc", - None, - None, - ), - ( - "with_s3_bucket_and_end_time_and_start_time", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc.s3.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ( - "with_s3_bucket_and_end_time_and_start_time_and_custom_region", - "marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc.s3.us-east-2.amazonaws.com/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - "us-east-2", - ), - ( - "with_http_bucket_and_end_time_and_start_time", - "http://marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "http://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ( - "with_https_bucket_and_end_time_and_start_time", - "https://marc", - "SHORT", - "Lane", - datetime_utc(2020, 1, 2, 0, 0, 0), - "https://marc/SHORT/2020-01-01%2000%3A00%3A00%2B00%3A00-2020-01-02%2000%3A00%3A00%2B00%3A00/Lane.mrc", - datetime_utc(2020, 1, 1, 0, 0, 0), - None, - ), - ], - ) - def test_marc_file_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - bucket, - library_name, - lane_name, - end_time, - expected_result, - start_time, - region, - ): - # Arrange - transaction = s3_uploader_fixture.transaction - library = transaction.library(short_name=library_name) - lane = transaction.lane(display_name=lane_name) - buckets = {S3UploaderConfiguration.MARC_BUCKET_KEY: bucket} - uploader = s3_uploader_fixture.create_s3_uploader(region=region, **buckets) - - # Act - result = uploader.marc_file_url(library, lane, end_time, start_time) - - # Assert - assert result == expected_result - - @pytest.mark.parametrize( - "name,url,expected_result,unquote", - [ - ( - "s3_path_style_request_without_region", - "https://s3.amazonaws.com/bucket/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_path_style_request_with_region", - "https://s3.us-east-2.amazonaws.com/bucket/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_global_endpoint", - "https://bucket.s3.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_dashed_region", - "https://bucket.s3-us-east-2.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "s3_virtual_hosted_style_request_with_dotted_region", - "https://bucket.s3.us-east-2.amazonaws.com/directory/filename.jpg", - ("bucket", "directory/filename.jpg"), - True, - ), - ( - "http_url", - "http://book-covers.nypl.org/directory/filename.jpg", - ("book-covers.nypl.org", "directory/filename.jpg"), - True, - ), - ( - "https_url", - "https://book-covers.nypl.org/directory/filename.jpg", - ("book-covers.nypl.org", "directory/filename.jpg"), - True, - ), - ( - "http_url_with_escaped_symbols", - "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", - ("book-covers.nypl.org", "directory/filename with spaces!.jpg"), - True, - ), - ( - "http_url_with_escaped_symbols_but_unquote_set_to_false", - "http://book-covers.nypl.org/directory/filename+with+spaces%21.jpg", - ("book-covers.nypl.org", "directory/filename+with+spaces%21.jpg"), - False, - ), - ], - ) - def test_split_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - url, - expected_result, - unquote, - ): - # Arrange - s3_uploader = s3_uploader_fixture.create_s3_uploader() - - # Act - result = s3_uploader.split_url(url, unquote) - - # Assert - assert result == expected_result - - def test_mirror_one( - self, - s3_uploader_fixture: S3UploaderFixture, - sample_covers_fixture: SampleCoversFixture, - ): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original_cover_location = "http://example.com/a-cover.png" - content = open( - sample_covers_fixture.sample_cover_path("test-book-cover.png"), "rb" - ).read() - cover, ignore = pool.add_link( - Hyperlink.IMAGE, - original_cover_location, - edition.data_source, - Representation.PNG_MEDIA_TYPE, - content=content, - ) - cover_rep = cover.resource.representation - assert None == cover_rep.mirrored_at - - original_epub_location = "https://books.com/a-book.epub" - epub, ignore = pool.add_link( - Hyperlink.OPEN_ACCESS_DOWNLOAD, - original_epub_location, - edition.data_source, - Representation.EPUB_MEDIA_TYPE, - content="i'm an epub", - ) - epub_rep = epub.resource.representation - assert None == epub_rep.mirrored_at - - s3 = s3_uploader_fixture.create_s3_uploader(client_class=MockS3Client) - - # Mock final_mirror_url so we can verify that it's called with - # the right arguments - def mock_final_mirror_url(bucket, key): - return "final_mirror_url was called with bucket {}, key {}".format( - bucket, key - ) - - s3.final_mirror_url = mock_final_mirror_url - - book_url = "http://books-go/here.epub" - cover_url = "http://s3.amazonaws.com/covers-go/here.png" - s3.mirror_one(cover.resource.representation, cover_url) - s3.mirror_one(epub.resource.representation, book_url) - [ - [data1, bucket1, key1, args1, ignore1], - [data2, bucket2, key2, args2, ignore2], - ] = s3.client.uploads - - # Both representations have had .mirror_url set and been - # mirrored to those URLs. - assert data1.startswith(b"\x89") - assert "covers-go" == bucket1 - assert "here.png" == key1 - assert Representation.PNG_MEDIA_TYPE == args1["ContentType"] - assert (utc_now() - cover_rep.mirrored_at).seconds < 10 - - assert b"i'm an epub" == data2 - assert "books-go" == bucket2 - assert "here.epub" == key2 - assert Representation.EPUB_MEDIA_TYPE == args2["ContentType"] - - # In both cases, mirror_url was set to the result of final_mirror_url. - assert ( - "final_mirror_url was called with bucket books-go, key here.epub" - == epub_rep.mirror_url - ) - assert ( - "final_mirror_url was called with bucket covers-go, key here.png" - == cover_rep.mirror_url - ) - - # mirrored-at was set when the representation was 'mirrored' - for rep in epub_rep, cover_rep: - assert (utc_now() - rep.mirrored_at).seconds < 10 - - def test_mirror_failure(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original_epub_location = "https://books.com/a-book.epub" - epub, ignore = pool.add_link( - Hyperlink.OPEN_ACCESS_DOWNLOAD, - original_epub_location, - edition.data_source, - Representation.EPUB_MEDIA_TYPE, - content="i'm an epub", - ) - epub_rep = epub.resource.representation - - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - - # A network failure is treated as a transient error. - uploader.client.fail_with = BotoCoreError() - uploader.mirror_one(epub_rep, transaction.fresh_url()) - assert None == epub_rep.mirrored_at - assert None == epub_rep.mirror_exception - - # An S3 credential failure is treated as a transient error. - response = dict( - Error=dict( - Code=401, - Message="Bad credentials", - ) - ) - uploader.client.fail_with = ClientError(response, "SomeOperation") # type: ignore[arg-type] - uploader.mirror_one(epub_rep, transaction.fresh_url()) - assert None == epub_rep.mirrored_at - assert None == epub_rep.mirror_exception - - # Because the file was not successfully uploaded, - # final_mirror_url was never called and mirror_url is - # was not set. - assert None == epub_rep.mirror_url - - # A bug in the code is not treated as a transient error -- - # the exception propagates through. - uploader.client.fail_with = Exception("crash!") - pytest.raises(Exception, uploader.mirror_one, epub_rep, transaction.fresh_url()) - - def test_svg_mirroring(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - edition, pool = transaction.edition(with_license_pool=True) - original = transaction.fresh_url() - - # Create an SVG cover for the book. - svg = """ - - - -""" - hyperlink, ignore = pool.add_link( - Hyperlink.IMAGE, - original, - edition.data_source, - Representation.SVG_MEDIA_TYPE, - content=svg, - ) - - # 'Upload' it to S3. - s3 = s3_uploader_fixture.create_s3_uploader(MockS3Client) - s3.mirror_one(hyperlink.resource.representation, transaction.fresh_url()) - [[data, bucket, key, args, ignore]] = s3.client.uploads - - assert Representation.SVG_MEDIA_TYPE == args["ContentType"] - assert b"svg" in data - assert b"PNG" not in data - - def test_multipart_upload(self, s3_uploader_fixture: S3UploaderFixture): - transaction = s3_uploader_fixture.transaction - - class MockMultipartS3Upload(MultipartS3Upload): - completed = None - aborted = None - - def __init__(self, uploader, representation, mirror_to): - self.parts = [] - MockMultipartS3Upload.completed = False - MockMultipartS3Upload.aborted = False - - def upload_part(self, content): - self.parts.append(content) - - def complete(self): - MockMultipartS3Upload.completed = True - - def abort(self): - MockMultipartS3Upload.aborted = True - - rep, ignore = create( - transaction.session, - Representation, - url="http://books.mrc", - media_type=Representation.MARC_MEDIA_TYPE, - ) - - s3 = s3_uploader_fixture.create_s3_uploader(MockS3Client) - - # Successful upload - with s3.multipart_upload( - rep, rep.url, upload_class=MockMultipartS3Upload - ) as upload: - assert [] == upload.parts - assert False == upload.completed - assert False == upload.aborted - - upload.upload_part("Part 1") - upload.upload_part("Part 2") - - assert ["Part 1", "Part 2"] == upload.parts - - assert True == MockMultipartS3Upload.completed - assert False == MockMultipartS3Upload.aborted - assert None == rep.mirror_exception - - class FailingMultipartS3Upload(MockMultipartS3Upload): - def upload_part(self, content): - raise Exception("Error!") - - # Failed during upload - with s3.multipart_upload( - rep, rep.url, upload_class=FailingMultipartS3Upload - ) as upload: - upload.upload_part("Part 1") - - assert False == MockMultipartS3Upload.completed - assert True == MockMultipartS3Upload.aborted - assert "Error!" == rep.mirror_exception - - class AnotherFailingMultipartS3Upload(MockMultipartS3Upload): - def complete(self): - raise Exception("Error!") - - rep.mirror_exception = None - # Failed during completion - with s3.multipart_upload( - rep, rep.url, upload_class=AnotherFailingMultipartS3Upload - ) as upload: - upload.upload_part("Part 1") - - assert False == MockMultipartS3Upload.completed - assert True == MockMultipartS3Upload.aborted - assert "Error!" == rep.mirror_exception - - @pytest.mark.parametrize( - "name,expiration_settings,expected_expiration", - [ - ( - "default_expiration_parameter", - None, - int(S3UploaderConfiguration.S3_DEFAULT_PRESIGNED_URL_EXPIRATION), - ), - ( - "empty_expiration_parameter", - {S3UploaderConfiguration.S3_PRESIGNED_URL_EXPIRATION: 100}, - 100, - ), - ], - ) - def test_sign_url( - self, - s3_uploader_fixture: S3UploaderFixture, - name, - expiration_settings, - expected_expiration, - ): - # Arrange - region = "us-east-1" - bucket = "bucket" - filename = "filename" - url = f"https://{bucket}.s3.{region}.amazonaws.com/{filename}" - expected_url = url + "?AWSAccessKeyId=KEY&Expires=1&Signature=S" - settings = expiration_settings if expiration_settings else {} - s3_uploader = s3_uploader_fixture.create_s3_uploader(region=region, **settings) - s3_uploader.split_url = MagicMock(return_value=(bucket, filename)) - s3_uploader.client.generate_presigned_url = MagicMock(return_value=expected_url) - - # Act - result = s3_uploader.sign_url(url) - - # Assert - assert result == expected_url - s3_uploader.split_url.assert_called_once_with(url) - s3_uploader.client.generate_presigned_url.assert_called_once_with( - "get_object", - ExpiresIn=expected_expiration, - Params={"Bucket": bucket, "Key": filename}, - ) - - -class TestMultiPartS3Upload: - @staticmethod - def _representation(transaction: DatabaseTransactionFixture): - rep, ignore = create( - transaction.session, - Representation, - url="http://bucket/books.mrc", - media_type=Representation.MARC_MEDIA_TYPE, - ) - return rep - - def test_init(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - assert uploader == upload.uploader - assert rep == upload.representation - assert "bucket" == upload.bucket - assert "books.mrc" == upload.filename - assert 1 == upload.part_number - assert [] == upload.parts - assert 1 == upload.upload.get("UploadId") - - uploader.client.fail_with = Exception("Error!") - pytest.raises(Exception, MultipartS3Upload, uploader, rep, rep.url) - - def test_upload_part(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - assert [ - { - "Body": "Part 1", - "UploadId": 1, - "PartNumber": 1, - "Bucket": "bucket", - "Key": "books.mrc", - }, - { - "Body": "Part 2", - "UploadId": 1, - "PartNumber": 2, - "Bucket": "bucket", - "Key": "books.mrc", - }, - ] == uploader.client.parts - assert 3 == upload.part_number - assert [ - {"ETag": "etag", "PartNumber": 1}, - {"ETag": "etag", "PartNumber": 2}, - ] == upload.parts - - uploader.client.fail_with = Exception("Error!") - pytest.raises(Exception, upload.upload_part, "Part 3") - - def test_complete(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - upload.complete() - assert [ - { - "Bucket": "bucket", - "Key": "books.mrc", - "UploadId": 1, - "MultipartUpload": { - "Parts": [ - {"ETag": "etag", "PartNumber": 1}, - {"ETag": "etag", "PartNumber": 2}, - ], - }, - } - ] == uploader.client.uploads - - def test_abort(self, s3_uploader_fixture: S3UploaderFixture): - uploader = s3_uploader_fixture.create_s3_uploader(MockS3Client) - rep = self._representation(s3_uploader_fixture.transaction) - upload = MultipartS3Upload(uploader, rep, rep.url) - upload.upload_part("Part 1") - upload.upload_part("Part 2") - upload.abort() - assert [] == uploader.client.parts - - -@pytest.mark.minio -class TestS3UploaderIntegration: - @pytest.mark.parametrize( - "name,uploader_class,bucket_type,bucket_name,open_access,settings", - [ - ( - "using_s3_uploader_and_open_access_bucket", - functools.partial( - S3Uploader, - host=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_HOST, - ), - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, - "test-bucket", - True, - None, - ), - ( - "using_s3_uploader_and_protected_access_bucket", - functools.partial( - S3Uploader, - host=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_HOST, - ), - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, - "test-bucket", - False, - None, - ), - ( - "using_minio_uploader_and_open_access_bucket", - MinIOUploader, - S3UploaderConfiguration.OA_CONTENT_BUCKET_KEY, - "test-bucket", - True, - { - MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - }, - ), - ( - "using_minio_uploader_and_protected_access_bucket", - MinIOUploader, - S3UploaderConfiguration.PROTECTED_CONTENT_BUCKET_KEY, - "test-bucket", - False, - { - MinIOUploaderConfiguration.ENDPOINT_URL: S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - }, - ), - ], - ) - def test_mirror( - self, - s3_uploader_integration_fixture: S3UploaderIntegrationFixture, - name, - uploader_class, - bucket_type, - bucket_name, - open_access, - settings, - ): - fixture = s3_uploader_integration_fixture - - # Arrange - book_title = "1234567890" - book_content = b"1234567890" - identifier = Identifier(type=Identifier.ISBN, identifier=book_title) - representation = Representation( - content=book_content, media_type=Representation.EPUB_MEDIA_TYPE - ) - buckets = { - bucket_type: bucket_name, - } - - if settings: - settings.update(buckets) - else: - settings = buckets - - s3_uploader = fixture.create_s3_uploader( - uploader_class=uploader_class, **settings - ) - - fixture.minio_s3_client.create_bucket(Bucket=bucket_name) - - # Act - book_url = s3_uploader.book_url(identifier, open_access=open_access) - s3_uploader.mirror_one(representation, book_url) - - # Assert - response = fixture.minio_s3_client.list_objects(Bucket=bucket_name) - assert "Contents" in response - assert len(response["Contents"]) == 1 - - [object] = response["Contents"] - - assert object["Key"] == f"ISBN/{book_title}.epub" diff --git a/tests/core/test_s3_analytics_provider.py b/tests/core/test_s3_analytics_provider.py index e07e0b45a7..9ed40e782a 100644 --- a/tests/core/test_s3_analytics_provider.py +++ b/tests/core/test_s3_analytics_provider.py @@ -1,6 +1,9 @@ +from __future__ import annotations + import datetime import json -from unittest.mock import create_autospec, patch +from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest @@ -11,31 +14,38 @@ CirculationEvent, DataSource, ExternalIntegration, - ExternalIntegrationLink, MediaTypes, create, ) -from core.s3 import S3Uploader, S3UploaderConfiguration -from tests.fixtures.database import DatabaseTransactionFixture + +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.services import MockServicesFixture class S3AnalyticsFixture: - def __init__(self, db: DatabaseTransactionFixture) -> None: + def __init__( + self, db: DatabaseTransactionFixture, services_fixture: MockServicesFixture + ) -> None: self.db = db - self._analytics_integration, _ = create( + self.analytics_integration, _ = create( db.session, ExternalIntegration, goal=ExternalIntegration.ANALYTICS_GOAL, protocol=S3AnalyticsProvider.__module__, ) - self._analytics_provider = S3AnalyticsProvider( - self._analytics_integration, db.default_library() + self.services = services_fixture.services + self.analytics_storage = services_fixture.storage.analytics + self.analytics_provider = S3AnalyticsProvider( + self.analytics_integration, self.services, db.default_library() ) @pytest.fixture(scope="function") -def s3_analytics_fixture(db: DatabaseTransactionFixture): - return S3AnalyticsFixture(db) +def s3_analytics_fixture( + db: DatabaseTransactionFixture, mock_services_fixture: MockServicesFixture +): + return S3AnalyticsFixture(db, mock_services_fixture) class TestS3AnalyticsProvider: @@ -51,63 +61,22 @@ def timestamp_to_string(timestamp): """ return str(timestamp) - def test_exception_is_raised_when_there_is_no_external_integration_link( - self, s3_analytics_fixture: S3AnalyticsFixture - ): - # Act, Assert - with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( - s3_analytics_fixture.db.default_library(), - None, - CirculationEvent.NEW_PATRON, - datetime.datetime.utcnow(), - ) - - def test_exception_is_raised_when_there_is_no_storage_integration( - self, s3_analytics_fixture: S3AnalyticsFixture - ): - # Arrange - # Create an external integration link but don't create a storage integration - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=s3_analytics_fixture._analytics_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - # Act, Assert - with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( - s3_analytics_fixture.db.default_library(), - None, - CirculationEvent.NEW_PATRON, - datetime.datetime.utcnow(), - ) - - def test_exception_is_raised_when_there_is_no_analytics_bucket( + def test_exception_is_raised_when_no_analytics_bucket_configured( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create a storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, - ) + # The services container returns None when there is no analytics storage service configured, + # so we override the analytics storage service with None to simulate this situation. + s3_analytics_fixture.services.storage.analytics.override(None) - # Create an external integration link to the storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=s3_analytics_fixture._analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, + provider = S3AnalyticsProvider( + s3_analytics_fixture.analytics_integration, + s3_analytics_fixture.services, + s3_analytics_fixture.db.default_library(), ) # Act, Assert with pytest.raises(CannotLoadConfiguration): - s3_analytics_fixture._analytics_provider.collect_event( + provider.collect_event( s3_analytics_fixture.db.default_library(), None, CirculationEvent.NEW_PATRON, @@ -117,189 +86,116 @@ def test_exception_is_raised_when_there_is_no_analytics_bucket( def test_analytics_data_without_associated_license_pool_is_correctly_stored_in_s3( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create an S3 Analytics integration - analytics_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, - ) - # Create an S3 Analytics provider - provider = S3AnalyticsProvider( - analytics_integration, s3_analytics_fixture.db.default_library() - ) + # Set up event's metadata + event_time = datetime.datetime.utcnow() + event_time_formatted = self.timestamp_to_string(event_time) + event_type = CirculationEvent.NEW_PATRON - # Create an S3 storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, - ) - # Set up a bucket name used for storing analytics data - storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value = "analytics" + s3_analytics_fixture.analytics_provider._get_file_key = MagicMock() - # Create a link to the S3 storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, + # Act + s3_analytics_fixture.analytics_provider.collect_event( + s3_analytics_fixture.db.default_library(), None, event_type, event_time ) - # Set up a mock instead of real S3Uploader class acting as the S3 storage service - s3_uploader = create_autospec(spec=S3Uploader) - - with patch("core.mirror.MirrorUploader.implementation") as mock_implementation: - mock_implementation.return_value = s3_uploader - # Set up event's metadata - event_time = datetime.datetime.utcnow() - event_time_formatted = self.timestamp_to_string(event_time) - event_type = CirculationEvent.NEW_PATRON - - # Act - provider.collect_event( - s3_analytics_fixture.db.default_library(), None, event_type, event_time - ) - - # Assert - s3_uploader.analytics_file_url.assert_called_once_with( - s3_analytics_fixture.db.default_library(), None, event_type, event_time - ) - s3_uploader.mirror_one.assert_called_once() - representation, _ = s3_uploader.mirror_one.call_args[0] - - assert MediaTypes.APPLICATION_JSON_MEDIA_TYPE == representation.media_type - - content = representation.content - event = json.loads(content) - - assert event_type == event["type"] - assert event_time_formatted == event["start"] - assert event_time_formatted == event["end"] - assert s3_analytics_fixture.db.default_library().id == event["library_id"] + # Assert + s3_analytics_fixture.analytics_provider._get_file_key.assert_called_once_with( + s3_analytics_fixture.db.default_library(), None, event_type, event_time + ) + s3_analytics_fixture.analytics_storage.store.assert_called_once() + ( + key, + content, + content_type, + ) = s3_analytics_fixture.analytics_storage.store.call_args.args + + assert content_type == MediaTypes.APPLICATION_JSON_MEDIA_TYPE + assert key == s3_analytics_fixture.analytics_provider._get_file_key.return_value + event = json.loads(content) + + assert event["type"] == event_type + assert event["start"] == event_time_formatted + assert event["end"] == event_time_formatted + assert event["library_id"] == s3_analytics_fixture.db.default_library().id def test_analytics_data_with_associated_license_pool_is_correctly_stored_in_s3( self, s3_analytics_fixture: S3AnalyticsFixture ): - # Arrange - # Create an S3 Analytics integration - analytics_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, + # Create a test book + work = s3_analytics_fixture.db.work( + data_source_name=DataSource.GUTENBERG, + title="Test Book", + authors=("Test Author 1", "Test Author 2"), + genre="Test Genre", + language="eng", + audience=Classifier.AUDIENCE_ADULT, + with_license_pool=True, ) - # Create an S3 Analytics provider - provider = S3AnalyticsProvider( - analytics_integration, s3_analytics_fixture.db.default_library() + license_pool = work.license_pools[0] + edition = work.presentation_edition + + # Set up event's metadata + event_time = datetime.datetime.utcnow() + event_time_formatted = self.timestamp_to_string(event_time) + event_type = CirculationEvent.CM_CHECKOUT + + s3_analytics_fixture.analytics_provider._get_file_key = MagicMock() + + # Act + s3_analytics_fixture.analytics_provider.collect_event( + s3_analytics_fixture.db.default_library(), + license_pool, + event_type, + event_time, ) - # Create an S3 storage service - storage_integration, _ = create( - s3_analytics_fixture.db.session, - ExternalIntegration, - goal=ExternalIntegration.STORAGE_GOAL, - protocol=ExternalIntegration.S3, + # Assert + s3_analytics_fixture.analytics_storage.store.assert_called_once() + ( + key, + content, + content_type, + ) = s3_analytics_fixture.analytics_storage.store.call_args.args + + assert content_type == MediaTypes.APPLICATION_JSON_MEDIA_TYPE + assert key == s3_analytics_fixture.analytics_provider._get_file_key.return_value + + event = json.loads(content) + data_source = license_pool.data_source if license_pool else None + identifier = license_pool.identifier if license_pool else None + collection = license_pool.collection if license_pool else None + work = license_pool.work if license_pool else None + + assert event["type"] == event_type + assert event["start"] == event_time_formatted + assert event["end"] == event_time_formatted + assert event["library_id"] == s3_analytics_fixture.db.default_library().id + assert event["license_pool_id"] == license_pool.id + assert event["publisher"] == edition.publisher + assert event["imprint"] == edition.imprint + assert event["issued"] == edition.issued + assert event["published"] == edition.published + assert event["medium"] == edition.medium + assert event["collection"] == collection.name + assert event["identifier_type"] == identifier.type + assert event["identifier"] == identifier.identifier + assert event["data_source"] == data_source.name + assert event["audience"] == work.audience + assert event["fiction"] == work.fiction + assert event["summary_text"] == work.summary_text + assert event["quality"] == work.quality + assert event["rating"] == work.rating + assert event["popularity"] == work.popularity + assert event["genre"] == work.genres[0].name + assert event["availability_time"] == self.timestamp_to_string( + license_pool.availability_time ) - # Set up a bucket name used for storing analytics data - storage_integration.setting( - S3UploaderConfiguration.ANALYTICS_BUCKET_KEY - ).value = "analytics" - - # Create a link to the S3 storage service - create( - s3_analytics_fixture.db.session, - ExternalIntegrationLink, - external_integration_id=analytics_integration.id, - other_integration_id=storage_integration.id, - purpose=ExternalIntegrationLink.ANALYTICS, - ) - - # Set up a mock instead of real S3Uploader class acting as the S3 storage service - s3_uploader = create_autospec(spec=S3Uploader) - with patch("core.mirror.MirrorUploader.implementation") as mock_implementation: - mock_implementation.return_value = s3_uploader - # Create a test book - work = s3_analytics_fixture.db.work( - data_source_name=DataSource.GUTENBERG, - title="Test Book", - authors=("Test Author 1", "Test Author 2"), - genre="Test Genre", - language="eng", - audience=Classifier.AUDIENCE_ADULT, - with_license_pool=True, - ) - license_pool = work.license_pools[0] - edition = work.presentation_edition - - # Set up event's metadata - event_time = datetime.datetime.utcnow() - event_time_formatted = self.timestamp_to_string(event_time) - event_type = CirculationEvent.CM_CHECKOUT - - # Act - provider.collect_event( - s3_analytics_fixture.db.default_library(), - license_pool, - event_type, - event_time, - ) - - # Assert - s3_uploader.analytics_file_url.assert_called_once_with( - s3_analytics_fixture.db.default_library(), - license_pool, - event_type, - event_time, - ) - s3_uploader.mirror_one.assert_called_once() - representation, _ = s3_uploader.mirror_one.call_args[0] - - assert MediaTypes.APPLICATION_JSON_MEDIA_TYPE == representation.media_type - - content = representation.content - event = json.loads(content) - data_source = license_pool.data_source if license_pool else None - identifier = license_pool.identifier if license_pool else None - collection = license_pool.collection if license_pool else None - work = license_pool.work if license_pool else None - - assert event_type == event["type"] - assert event_time_formatted == event["start"] - assert event_time_formatted == event["end"] - assert s3_analytics_fixture.db.default_library().id == event["library_id"] - assert license_pool.id == event["license_pool_id"] - assert edition.publisher == event["publisher"] - assert edition.imprint == event["imprint"] - assert edition.issued == event["issued"] - assert edition.published == event["published"] - assert edition.medium == event["medium"] - assert collection.name == event["collection"] - assert identifier.type == event["identifier_type"] - assert identifier.identifier == event["identifier"] - assert data_source.name == event["data_source"] - assert work.audience == event["audience"] - assert work.fiction == event["fiction"] - assert work.summary_text == event["summary_text"] - assert work.quality == event["quality"] - assert work.rating == event["rating"] - assert work.popularity == event["popularity"] - assert work.genres[0].name == event["genre"] - assert ( - self.timestamp_to_string(license_pool.availability_time) - == event["availability_time"] - ) - assert license_pool.licenses_owned == event["licenses_owned"] - assert license_pool.licenses_available == event["licenses_available"] - assert license_pool.licenses_reserved == event["licenses_reserved"] - assert license_pool.patrons_in_hold_queue == event["patrons_in_hold_queue"] - assert False == event["self_hosted"] - assert work.title == event["title"] - assert work.series == event["series"] - assert work.series_position == event["series_position"] - assert work.language == event["language"] + assert event["licenses_owned"] == license_pool.licenses_owned + assert event["licenses_available"] == license_pool.licenses_available + assert event["licenses_reserved"] == license_pool.licenses_reserved + assert event["patrons_in_hold_queue"] == license_pool.patrons_in_hold_queue + assert event["self_hosted"] is False + assert event["title"] == work.title + assert event["series"] == work.series + assert event["series_position"] == work.series_position + assert event["language"] == work.language diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index fb3dc23229..7f802b0382 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -1,197 +1,131 @@ +from __future__ import annotations + import functools -import os -from typing import Any, Iterable -from urllib.parse import urlsplit +import sys +from types import TracebackType +from typing import BinaryIO, List, Literal, NamedTuple, Optional, Protocol, Type +from unittest.mock import MagicMock -import boto3 import pytest +from mypy_boto3_s3 import S3Client -from core.model import ExternalIntegration -from core.s3 import S3Uploader, S3UploaderConfiguration -from tests.fixtures.database import DatabaseTransactionFixture - - -class S3UploaderFixture: - transaction: DatabaseTransactionFixture - - def __init__(self, transaction: DatabaseTransactionFixture): - self.transaction = transaction - - def integration(self, **settings): - """Create and configure a simple S3 integration.""" - integration = self.transaction.external_integration( - ExternalIntegration.S3, ExternalIntegration.STORAGE_GOAL, settings=settings - ) - integration.username = settings.get("username", "username") - integration.password = settings.get("password", "password") - return integration - - @staticmethod - def add_settings_value(settings, key, value): - """Adds a value to settings dictionary - - :param settings: Settings dictionary - :type settings: Dict - - :param key: Key - :type key: string +from core.service.storage.s3 import MultipartS3ContextManager, S3Service - :param value: Value - :type value: Any +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self - :return: Updated settings dictionary - :rtype: Dict - """ - if value: - if settings: - settings[key] = value - else: - settings = {key: value} +class MockS3ServiceUpload(NamedTuple): + key: str + content: bytes + media_type: Optional[str] - return settings - def create_s3_uploader( +class MockMultipartS3ContextManager(MultipartS3ContextManager): + def __init__( self, - client_class=None, - uploader_class=None, - region=None, - addressing_style=None, - **settings, - ): - """Creates a new instance of S3 uploader - - :param client_class: (Optional) Custom class to be used instead of boto3's client class - :type client_class: Optional[Type] - - :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader - :type uploader_class: Optional[Type] - - :param region: (Optional) S3 region - :type region: Optional[string] - - :param addressing_style: (Optional) S3 addressing style - :type addressing_style: Optional[string] - - :param settings: Kwargs used for initializing an external integration - :type: Optional[Dict] - - :return: New intance of S3 uploader - :rtype: S3Uploader - """ - settings = self.add_settings_value( - settings, S3UploaderConfiguration.S3_REGION, region - ) - settings = self.add_settings_value( - settings, S3UploaderConfiguration.S3_ADDRESSING_STYLE, addressing_style - ) - integration = self.integration(**settings) - uploader_class = uploader_class or S3Uploader + parent: MockS3Service, + bucket: str, + key: str, + url: str, + media_type: Optional[str] = None, + ) -> None: + self.parent = parent + self.key = key + self.bucket = bucket + self.media_type = media_type + self.content = b"" + self._complete = False + self._url = url + self._exception = None + + def __enter__(self) -> Self: + return self + + def __exit__( + self, + exc_type: Optional[Type[BaseException]], + exc_val: Optional[BaseException], + exc_tb: Optional[TracebackType], + ) -> Literal[False]: + if self.content: + self._complete = True + self.parent.uploads.append( + MockS3ServiceUpload(self.key, self.content, self.media_type) + ) + return False - return uploader_class(integration, client_class=client_class) + def upload_part(self, content: bytes) -> None: + self.content += content -@pytest.fixture -def s3_uploader_fixture( - db, -) -> S3UploaderFixture: - return S3UploaderFixture(db) - - -class S3UploaderIntegrationFixture(S3UploaderFixture): - SIMPLIFIED_TEST_MINIO_ENDPOINT_URL = os.environ.get( - "SIMPLIFIED_TEST_MINIO_ENDPOINT_URL", "http://localhost:9000" - ) - SIMPLIFIED_TEST_MINIO_USER = os.environ.get( - "SIMPLIFIED_TEST_MINIO_USER", "minioadmin" - ) - SIMPLIFIED_TEST_MINIO_PASSWORD = os.environ.get( - "SIMPLIFIED_TEST_MINIO_PASSWORD", "minioadmin" - ) - _, SIMPLIFIED_TEST_MINIO_HOST, _, _, _ = urlsplit( - SIMPLIFIED_TEST_MINIO_ENDPOINT_URL - ) - - minio_s3_client: Any - """boto3 client connected to locally running MinIO instance""" - - s3_client_class = None - """Factory function used for creating a boto3 client inside S3Uploader""" - - def __init__(self, transaction: DatabaseTransactionFixture): - super().__init__(transaction) - self.minio_s3_client = boto3.client( - "s3", - aws_access_key_id=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_USER, - aws_secret_access_key=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_PASSWORD, - endpoint_url=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, - ) - self.s3_client_class = functools.partial( - boto3.client, - endpoint_url=S3UploaderIntegrationFixture.SIMPLIFIED_TEST_MINIO_ENDPOINT_URL, +class MockS3Service(S3Service): + def __init__( + self, + client: S3Client, + region: str, + bucket: str, + url_template: str, + ) -> None: + super().__init__(client, region, bucket, url_template) + self.uploads: List[MockS3ServiceUpload] = [] + + def store_stream( + self, + key: str, + stream: BinaryIO, + content_type: Optional[str] = None, + ) -> Optional[str]: + self.uploads.append(MockS3ServiceUpload(key, stream.read(), content_type)) + return self.generate_url(key) + + def multipart( + self, key: str, content_type: Optional[str] = None + ) -> MultipartS3ContextManager: + return MockMultipartS3ContextManager( + self, self.bucket, key, self.generate_url(key), content_type ) - def close(self): - response = self.minio_s3_client.list_buckets() - - for bucket in response["Buckets"]: - bucket_name = bucket["Name"] - response = self.minio_s3_client.list_objects(Bucket=bucket_name) - for object in response.get("Contents", []): - object_key = object["Key"] - self.minio_s3_client.delete_object(Bucket=bucket_name, Key=object_key) - - self.minio_s3_client.delete_bucket(Bucket=bucket_name) - - def create_s3_uploader( +class S3ServiceProtocol(Protocol): + def __call__( self, - client_class=None, - uploader_class=None, - region=None, - addressing_style=None, - **settings, - ): - """Creates a new instance of S3 uploader - - :param client_class: (Optional) Custom class to be used instead of boto3's client class - :type client_class: Optional[Type] - - :param: uploader_class: (Optional) Custom class which will be used insted of S3Uploader - :type uploader_class: Optional[Type] - - :param region: (Optional) S3 region - :type region: Optional[string] - - :param addressing_style: (Optional) S3 addressing style - :type addressing_style: Optional[string] - - :param settings: Kwargs used for initializing an external integration - :type: Optional[Dict] - - :return: New intance of S3 uploader - :rtype: S3Uploader - """ - if settings and "username" not in settings: - self.add_settings_value( - settings, "username", self.SIMPLIFIED_TEST_MINIO_USER - ) - if settings and "password" not in settings: - self.add_settings_value( - settings, "password", self.SIMPLIFIED_TEST_MINIO_PASSWORD - ) - if not client_class: - client_class = self.s3_client_class + client: Optional[S3Client] = None, + region: Optional[str] = None, + bucket: Optional[str] = None, + url_template: Optional[str] = None, + ) -> S3Service: + ... + + +class S3ServiceFixture: + def __init__(self): + self.mock_s3_client = MagicMock() + self.region = "region" + self.url_template = "https://{region}.test.com/{bucket}/{key}" + self.bucket = "bucket" + + @property + def service(self) -> S3ServiceProtocol: + return functools.partial( + S3Service, + client=self.mock_s3_client, + region=self.region, + bucket=self.bucket, + url_template=self.url_template, + ) - return super().create_s3_uploader( - client_class, uploader_class, region, addressing_style, **settings + def mock_service(self) -> MockS3Service: + return MockS3Service( + client=self.mock_s3_client, + region=self.region, + bucket=self.bucket, + url_template=self.url_template, ) @pytest.fixture -def s3_uploader_integration_fixture( - db, -) -> Iterable[S3UploaderIntegrationFixture]: - fixture = S3UploaderIntegrationFixture(db) - yield fixture - fixture.close() +def s3_service_fixture() -> S3ServiceFixture: + return S3ServiceFixture() diff --git a/tests/fixtures/services.py b/tests/fixtures/services.py new file mode 100644 index 0000000000..edbeccb524 --- /dev/null +++ b/tests/fixtures/services.py @@ -0,0 +1,42 @@ +from unittest.mock import MagicMock + +import pytest + +from core.service.container import Services +from core.service.storage.container import Storage +from core.service.storage.s3 import S3Service + + +class MockStorageFixture: + def __init__(self): + self.storage = Storage() + self.analytics = MagicMock(spec=S3Service) + self.storage.analytics.override(self.analytics) + self.public = MagicMock(spec=S3Service) + self.storage.public.override(self.public) + self.s3_client = MagicMock() + self.storage.s3_client.override(self.s3_client) + + +@pytest.fixture +def mock_storage_fixture() -> MockStorageFixture: + return MockStorageFixture() + + +class MockServicesFixture: + """ + Provide a services container with all the services mocked out + by MagicMock objects. + """ + + def __init__(self, storage: MockStorageFixture): + self.services = Services() + self.services.storage.override(storage.storage) + self.storage = storage + + +@pytest.fixture +def mock_services_fixture( + mock_storage_fixture: MockStorageFixture, +) -> MockServicesFixture: + return MockServicesFixture(mock_storage_fixture) diff --git a/tox.ini b/tox.ini index 1721430ba6..34ffa9983f 100644 --- a/tox.ini +++ b/tox.ini @@ -11,14 +11,15 @@ commands = core: pytest {posargs:tests/core} passenv = SIMPLIFIED_* + PALACE_* CI setenv = {api,core}: COVERAGE_FILE = .coverage.{envname} docker: SIMPLIFIED_TEST_DATABASE=postgresql://simplified_test:test@localhost:9005/simplified_circulation_test docker: SIMPLIFIED_TEST_OPENSEARCH=http://localhost:9007 - core-docker: SIMPLIFIED_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 - core-docker: SIMPLIFIED_TEST_MINIO_USER=simplified - core-docker: SIMPLIFIED_TEST_MINIO_PASSWORD=12345678901234567890 + core-docker: PALACE_TEST_MINIO_ENDPOINT_URL=http://localhost:9004 + core-docker: PALACE_TEST_MINIO_USER=palace + core-docker: PALACE_TEST_MINIO_PASSWORD=12345678901234567890 docker = docker: os-circ docker: db-circ @@ -68,7 +69,7 @@ ports = [docker:minio-circ] image = bitnami/minio:2022.3.3 environment = - MINIO_ROOT_USER=simplified + MINIO_ROOT_USER=palace MINIO_ROOT_PASSWORD=12345678901234567890 ports = 9004:9000/tcp From 35e32de15c0c01e049a2ee41901191d31d85febe Mon Sep 17 00:00:00 2001 From: dbernstein Date: Fri, 22 Sep 2023 12:48:24 -0700 Subject: [PATCH 056/262] Prevent empty pointer problem. (#1402) Improve logging to show detailed failure info in logs during OPDS feed imports. Resolves: https://ebce-lyrasis.atlassian.net/browse/PP-192 --- api/opds_for_distributors.py | 5 ++++- core/opds_import.py | 6 ++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 421f4ec883..88d5f753f8 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -357,7 +357,10 @@ def update_work_for_edition(self, *args, **kwargs): pool, work = super().update_work_for_edition( *args, is_open_access=False, **kwargs ) - pool.unlimited_access = True + + if pool: + pool.unlimited_access = True + return pool, work @classmethod diff --git a/core/opds_import.py b/core/opds_import.py index 7fc4dc618b..6b52d77406 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -524,6 +524,12 @@ def import_from_feed(self, feed, feed_url=None): if work: works[key] = work except Exception as e: + logging.warning( + f"Non-fatal exception: Failed to import item - import will continue: " + f"identifier={key}; collection={self.collection.name}; " + f"data_source={self.data_source}; exception={e}", + stack_info=True, + ) identifier, ignore = Identifier.parse_urn(self._db, key) data_source = self.data_source failure = CoverageFailure( From 0bec652966d3e0d836884430f6593a556b1a2f7a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 25 Sep 2023 10:55:57 -0300 Subject: [PATCH 057/262] Remove unused integration test code. (#1404) --- integration_tests/benchmark_feed_queries.py | 204 -- integration_tests/test_borrow.py | 51 - integration_tests/test_circulation.py | 106 - integration_tests/test_feed.py | 45 - integration_tests/test_hold.py | 49 - integration_tests/test_search.py | 2819 ------------------- 6 files changed, 3274 deletions(-) delete mode 100644 integration_tests/benchmark_feed_queries.py delete mode 100644 integration_tests/test_borrow.py delete mode 100644 integration_tests/test_circulation.py delete mode 100644 integration_tests/test_feed.py delete mode 100644 integration_tests/test_hold.py delete mode 100644 integration_tests/test_search.py diff --git a/integration_tests/benchmark_feed_queries.py b/integration_tests/benchmark_feed_queries.py deleted file mode 100644 index 98c099ee5e..0000000000 --- a/integration_tests/benchmark_feed_queries.py +++ /dev/null @@ -1,204 +0,0 @@ -import random -import time -from threading import Thread -from urllib.parse import quote, urlencode - -import numpy -import requests - - -class QueryTimingThread(Thread): - def __init__(self, urls): - Thread.__init__(self) - self.urls = urls - - def run(self): - self.elapsed = [] - self.exceptions = [] - for url in self.urls: - a = time.time() - exception = self.do_query(url) - self.elapsed.append(time.time() - a) - if exception: - self.exceptions.append((url, exception)) - - def do_query(self, url): - print(url) - try: - response = requests.get(url) - return None - except Exception as e: - return e - - def report(self): - print("") - print("Timing results for %s" % self.urls[0]) - print("------------------") - # print "Total time elapsed: %s" % numpy.sum(self.elapsed) - print("Mean time elapsed: %.2f" % numpy.mean(self.elapsed)) - print("Median time elapsed: %.2f" % numpy.median(self.elapsed)) - m = numpy.argmax(self.elapsed) - print("Max time elapsed: %.2f" % self.elapsed[m]) - print("Max url: %s" % self.urls[m]) - print("Raw data:") - for i, url in enumerate(self.urls): - print(f"({self.elapsed[i]:.2f}) {url}") - for (url, e) in self.exceptions: - print(f"Exception: {url}: {e}") - print("") - - -size = 50 -pages = 10 -thread_count = 10 -base_url = "http://qa.circulation.librarysimplified.org" - -queries = [ - { - "language": "eng", - "category": "Adult Fiction", - "params": {"order": "author", "available": "now", "collection": "full"}, - }, - { - "language": "eng", - "category": "Adult Fiction", - "params": {"order": "title", "available": "all", "collection": "main"}, - }, - { - "language": "eng", - "category": "Adult Nonfiction", - "params": {"order": "author", "available": "now", "collection": "main"}, - }, - { - "language": "eng", - "category": "Adult Nonfiction", - "params": {"order": "title", "available": "all", "collection": "featured"}, - }, - { - "language": "eng", - "category": "English Best Sellers", - "params": {"order": "author", "available": "all", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Young Adult Fiction", - "params": {"order": "added", "available": "all", "collection": "main"}, - }, - { - "language": "eng", - "category": "Children and Middle Grade", - "params": {"order": "author", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Adventure", - "params": {"order": "author", "available": "main", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Classics", - "params": {"order": "title", "available": "now", "collection": "full"}, - }, - { - "language": "eng", - "category": "Police Procedural", - "params": {"order": "title", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Biography & Memoir", - "params": {"order": "author", "available": "always", "collection": "main"}, - }, - { - "language": "eng", - "category": "Business", - "params": {"order": "added", "available": "now", "collection": "full"}, - }, - { - "language": "eng", - "category": "Parenting & Family", - "params": {"order": "author", "available": "all", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Cooking", - "params": {"order": "title", "available": "all", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Latin American History", - "params": {"order": "author", "available": "all", "collection": "main"}, - }, - { - "language": "eng", - "category": "Pets", - "params": {"order": "title", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Photography", - "params": {"order": "author", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Music", - "params": {"order": "added", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Life Strategies", - "params": {"order": "title", "available": "all", "collection": "main"}, - }, - { - "language": "eng", - "category": "Buddhism", - "params": {"order": "author", "available": "all", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Computers", - "params": {"order": "added", "available": "now", "collection": "featured"}, - }, - { - "language": "eng", - "category": "Self Help", - "params": {"order": "author", "available": "all", "collection": "full"}, - }, - { - "language": "eng", - "category": "True Crime", - "params": {"order": "title", "available": "all", "collection": "full"}, - }, -] - - -def urls_from_query(query, pages, size): - urls = [] - for i in range(pages): - if i > 0: - query["params"]["after"] = i * size - url = quote( - "%s/feed/%s/%s?%s" - % ( - base_url, - query["language"], - query["category"], - urlencode(query["params"]), - ), - safe=":/?=&", - ) - urls.append(url) - return urls - - -threads = [ - QueryTimingThread(urls=urls_from_query(random.choice(queries), pages, size)) - for i in range(thread_count) -] - -for t in threads: - t.start() -for t in threads: - t.join() -for t in threads: - t.report() diff --git a/integration_tests/test_borrow.py b/integration_tests/test_borrow.py deleted file mode 100644 index fd3bfc1e17..0000000000 --- a/integration_tests/test_borrow.py +++ /dev/null @@ -1,51 +0,0 @@ -import os - -import feedparser -import requests -from requests.auth import HTTPBasicAuth - -from . import CirculationIntegrationTest - - -class TestBorrow(CirculationIntegrationTest): - def test_borrow(self): - if "TEST_IDENTIFIER" in os.environ: - overdrive_id = os.environ["TEST_IDENTIFIER"] - else: - # Fifty Shades of Grey has a large number of copies available - overdrive_id = "82cdd641-857a-45ca-8775-34eede35b238" - borrow_url = f"{self.url}works/Overdrive/{overdrive_id}/borrow" - borrow_response = requests.get( - borrow_url, auth=HTTPBasicAuth(self.test_username, self.test_password) - ) - - # it's possible we already have the book borrowed, if a previous test didn't revoke it - assert borrow_response.status_code in [200, 201] - feed = feedparser.parse(borrow_response.text) - entries = feed["entries"] - eq_(1, len(entries)) - entry = entries[0] - - links = entry["links"] - fulfill_links = [ - link for link in links if link.rel == "http://opds-spec.org/acquisition" - ] - assert len(fulfill_links) > 0 - fulfill_url = fulfill_links[0].href - fulfill_response = requests.get( - fulfill_url, auth=HTTPBasicAuth(self.test_username, self.test_password) - ) - eq_(200, fulfill_response.status_code) - - revoke_links = [ - link - for link in links - if link.rel == "http://librarysimplified.org/terms/rel/revoke" - ] - eq_(1, len(revoke_links)) - revoke_url = revoke_links[0].href - - revoke_response = requests.get( - revoke_url, auth=HTTPBasicAuth(self.test_username, self.test_password) - ) - eq_(200, revoke_response.status_code) diff --git a/integration_tests/test_circulation.py b/integration_tests/test_circulation.py deleted file mode 100644 index 0f440f7fc5..0000000000 --- a/integration_tests/test_circulation.py +++ /dev/null @@ -1,106 +0,0 @@ -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) - -from axis import Axis360API -from circulation_exceptions import * -from overdrive import OverdriveAPI -from threem import ThreeMAPI - -from circulation import CirculationAPI -from core.model import Identifier, Patron, get_one_or_create, production_session - -barcode, pin, borrow_urn, hold_urn = sys.argv[1:5] -email = os.environ.get( - "DEFAULT_NOTIFICATION_EMAIL_ADDRESS", "test@librarysimplified.org" -) - -_db = production_session() -patron, ignore = get_one_or_create(_db, Patron, authorization_identifier=barcode) - -borrow_identifier = Identifier.parse_urn(_db, borrow_urn, True)[0] -hold_identifier = Identifier.parse_urn(_db, hold_urn, True)[0] -borrow_pool = borrow_identifier.licensed_through -hold_pool = hold_identifier.licensed_through - -if any(x.type == Identifier.THREEM_ID for x in [borrow_identifier, hold_identifier]): - threem = ThreeMAPI(_db) -else: - threem = None - -if any(x.type == Identifier.OVERDRIVE_ID for x in [borrow_identifier, hold_identifier]): - overdrive = OverdriveAPI(_db) -else: - overdrive = None - -if any(x.type == Identifier.AXIS_360_ID for x in [borrow_identifier, hold_identifier]): - axis = Axis360API(_db) -else: - axis = None - -circulation = CirculationAPI(_db, overdrive=overdrive, threem=threem, axis=axis) - -activity = circulation.patron_activity(patron, pin) -print("-" * 80) -for i in activity: - print(i) -print("-" * 80) - -licensepool = borrow_pool -mechanism = licensepool.delivery_mechanisms[0] -try: - circulation.fulfill(patron, pin, licensepool, mechanism) -except NoActiveLoan as e: - print(" No active loan...") -circulation.borrow(patron, pin, licensepool, mechanism, email) -print("Attempting to borrow", licensepool.work) -print("Initial revoke loan") -print(circulation.revoke_loan(patron, pin, licensepool)) -print("Fulfill with no loan") -try: - circulation.fulfill(patron, pin, licensepool, mechanism) -except NoActiveLoan as e: - print(" Exception as expected.") -print("Borrow") -print(circulation.borrow(patron, pin, licensepool, mechanism, email)) -print("Borrow again!") -print(circulation.borrow(patron, pin, licensepool, mechanism, email)) -print("Fulfill with loan") -print(circulation.fulfill(patron, pin, licensepool, mechanism)) - - -licensepool = hold_pool -print("Attempting to place hold on", licensepool.work) -print("Initial release hold") -print("", circulation.release_hold(patron, pin, licensepool)) -print("Creating hold.") -print("", circulation.borrow(patron, pin, licensepool, mechanism, email)) -print("Creating hold again!") -try: - print(circulation.borrow(patron, pin, licensepool, mechanism, email)) -except CannotLoan as e: - print(" Exception as expected.") -print("Attempt to fulfill hold.") -try: - print(circulation.fulfill(patron, pin, licensepool, mechanism)) -except NoActiveLoan as e: - print(" Exception as expected") - -activity = circulation.patron_activity(patron, pin) -print("-" * 80) -for i in activity: - print(i) -print("-" * 80) - -print("Revoke loan") -print(circulation.revoke_loan(patron, pin, licensepool)) -print("Revoke already revoked loan") -print(circulation.revoke_loan(patron, pin, licensepool)) - -print("Release hold.") -print(circulation.release_hold(patron, pin, licensepool)) -print("Release nonexistent hold.") -print(circulation.release_hold(patron, pin, licensepool)) diff --git a/integration_tests/test_feed.py b/integration_tests/test_feed.py deleted file mode 100644 index 8b7c1d2353..0000000000 --- a/integration_tests/test_feed.py +++ /dev/null @@ -1,45 +0,0 @@ -import os -from urllib.request import urlopen - -import feedparser - -from . import CirculationIntegrationTest - - -class TestFeed(CirculationIntegrationTest): - def test_grouped_feed(self): - feed_url = self.url - feed = urlopen(feed_url).read() - feed = feedparser.parse(str(feed)) - entries = feed["entries"] - assert len(entries) > 20 - # spot-check an entry - entry = entries[5] - assert len(entry.get("title")) > 0 - assert len(entry.get("author")) > 0 - links = entry.get("links") - assert len(links) > 0 - # books on the first page should be available to borrow - borrow_links = [ - link - for link in links - if link.rel == "http://opds-spec.org/acquisition/borrow" - ] - eq_(1, len(borrow_links)) - - def test_genre_feed(self): - if "TEST_FEED_PATH" in os.environ: - path = os.environ["TEST_FEED_PATH"] - else: - path = "eng/Romance" - feed_url = f"{self.url}feed/{path}" - feed = urlopen(feed_url).read() - feed = feedparser.parse(str(feed)) - entries = feed["entries"] - assert len(entries) > 20 - # spot-check an entry - entry = entries[5] - assert len(entry.get("title")) > 0 - assert len(entry.get("author")) > 0 - links = entry.get("links") - assert len(links) > 0 diff --git a/integration_tests/test_hold.py b/integration_tests/test_hold.py deleted file mode 100644 index cb5e8e856f..0000000000 --- a/integration_tests/test_hold.py +++ /dev/null @@ -1,49 +0,0 @@ -import os - -import feedparser -import requests -from requests.auth import HTTPBasicAuth - -from . import CirculationIntegrationTest - - -class TestHold(CirculationIntegrationTest): - def test_hold(self): - if "TEST_IDENTIFIER" in os.environ: - overdrive_id = os.environ["TEST_IDENTIFIER"] - else: - # Yes Please has a large hold queue - overdrive_id = "0abe1ed3-f117-4b7c-a6b0-857a2e7d227b" - - borrow_url = f"{self.url}works/Overdrive/{overdrive_id}/borrow" - borrow_response = requests.get( - borrow_url, auth=HTTPBasicAuth(self.test_username, self.test_password) - ) - # it's possible we already have the book on hold, if a previous test didn't revoke it - assert borrow_response.status_code in [200, 201] - feed = feedparser.parse(borrow_response.text) - entries = feed["entries"] - eq_(1, len(entries)) - entry = entries[0] - - availability = entry["opds_availability"] - eq_("reserved", availability["status"]) - - links = entry["links"] - fulfill_links = [ - link for link in links if link.rel == "http://opds-spec.org/acquisition" - ] - eq_(0, len(fulfill_links)) - - revoke_links = [ - link - for link in links - if link.rel == "http://librarysimplified.org/terms/rel/revoke" - ] - eq_(1, len(revoke_links)) - revoke_url = revoke_links[0].href - - revoke_response = requests.get( - revoke_url, auth=HTTPBasicAuth(self.test_username, self.test_password) - ) - eq_(200, revoke_response.status_code) diff --git a/integration_tests/test_search.py b/integration_tests/test_search.py deleted file mode 100644 index 0b3e5f79bc..0000000000 --- a/integration_tests/test_search.py +++ /dev/null @@ -1,2819 +0,0 @@ -# -# These integration tests were written based primarily on real -# searches made in October 2018 against NYPL's circulation -# manager. Theoretically, most of the tests that pass on NYPL's index -# should pass when run against the search index of any public library -# with a similarly sized collection. -# -# These guidelines were used when writing the tests: -# -# * A search for a specific book should return that book as the first result. -# This is true whether or not the search query names the book. -# * Results for a series search should be dominated by books from that series. -# * Results for a person search should be dominated by books by or (in some -# cases) about that person. -# * A search for a topic or genre should return books on that topic or -# in that genre. -# -# It's possible for a test to fail not because of a problem with the -# search engine but because a library's collection is incomplete. The -# tests are written to minimize the chances that this will happen -# unnecessarily. (e.g. the search for "dirtbike" checks for books -# filed under certain subjects, not specific titles). -# -# Run the tests with this command: -# -# $ nosetests integration_tests/test_search.py - -import logging -import re -from functools import wraps - -from core.external_search import ExternalSearchIndex, Filter -from core.lane import Pagination -from core.model import production_session -from core.util.personal_names import ( - display_name_to_sort_name, - sort_name_to_display_name, -) - -# A problem from the unit tests that we couldn't turn into a -# real integration test. -# -# # In Opensearch 1.x, the exact author match that doesn't -# # mention 'biography' is boosted above a book that -# # mentions all three words in its title. -# order = [ -# self.biography_of_peter_graves, # title + genre 'biography' -# self.book_by_peter_graves, # author (no 'biography') -# self.behind_the_scenes, # all words match in title -# self.book_by_someone_else, # match across fields (no 'biography') -# ] - - -def known_to_fail(f): - @wraps(f) - def decorated(*args, **kwargs): - try: - ignore = f(*args, **kwargs) - except Exception as e: - SearchTest.expected_failures.append(f) - logging.info("Expected this test to fail, and it did: %r" % e) - return - SearchTest.unexpected_successes.append(f) - raise Exception("Expected this test to fail, and it didn't! Congratulations?") - - return decorated - - -class Searcher: - """A class that knows how to perform searches.""" - - def __init__(self, library, index): - self.library = library - self.filter = Filter(collections=self.library) - self.index = index - - def query(self, query, pagination): - return self.index.query_works( - query, filter=self.filter, pagination=pagination, debug=True - ) - - -class Evaluator: - """A class that knows how to evaluate search results.""" - - log = logging.getLogger("Search evaluator") - - def __init__(self, **kwargs): - self.kwargs = dict() - self.original_kwargs = dict() - for k, v in list(kwargs.items()): - self.original_kwargs[k] = v - if isinstance(v, (bytes, str)): - v = v.lower() - self.kwargs[k] = v - for k, v in list(self.kwargs.items()): - setattr(self, k, v) - - def evaluate(self, hits): - """Raise an AssertionError if the search results are so bad that the - test should fail. - """ - self.evaluate_search(hits) - first = hits[0] - self.evaluate_first(first) - self.evaluate_hits(hits) - - def evaluate_search(self, hits): - """Evaluate the search itself.""" - # By default, a search passes the check if it returns anything - # as opposed to returning nothing. - assert hits - - def evaluate_first(self, hits): - """Evaluate the first (and most important) search result.""" - return - - def evaluate_hits(self, hits): - """Evaluate the search results as a whole.""" - return - - def format(self, result): - source = dict( - title=result.title, - author=result.author, - subtitle=result.subtitle, - series=result.series, - summary=result.summary, - genres=result.genres, - imprint=result.imprint, - publisher=result.publisher, - ) - return dict( - _score=result.meta.score, - _type=result.meta.doc_type, - _id=result.meta.id, - _index=result.meta.index, - source=source, - ) - - def _field(self, field, result=None): - """Extract a field from a search result.""" - result = result or self.first - value = getattr(result, field, None) - if isinstance(value, (bytes, str)): - value = value.lower() - return value - - def assert_ratio(self, matches, hits, threshold): - """Assert that the size of `matches` is proportional to the size of - `hits`. - """ - if not hits: - actual = 0 - else: - actual = float(len(matches)) / len(hits) - if actual < threshold: - # This test is going to fail. Log some useful information. - logging.info( - "Need %d%% matches, got %d%%" % (threshold * 100, actual * 100) - ) - for hit in hits: - logging.info(repr(hit)) - assert actual >= threshold - - def _match_scalar(self, value, expect, inclusive=False, case_sensitive=False): - if hasattr(expect, "search"): - if expect and value is not None: - success = expect.search(value) - else: - success = False - expect_str = expect.pattern - else: - if value and not case_sensitive: - value = value.lower() - if inclusive: - success = expect in value - else: - success = value == expect - expect_str = expect - return success, expect_str - - def _match_subject(self, subject, result): - """Is the given result classified under the given subject?""" - values = [] - expect_str = subject - for classification in result.classifications or []: - value = classification["term"].lower() - values.append(value) - success, expect_str = self._match_scalar(value, subject) - if success: - return True, values, expect_str - return False, values, expect_str - - def _match_genre(self, subject, result): - """Is the given result classified under the given genre?""" - values = [] - expect_str = subject - for genre in result.genres or []: - value = genre["name"].lower() - values.append(value) - success, expect_str = self._match_scalar(value, subject) - if success: - return True, values, expect_str - return False, values, expect_str - - def _match_target_age(self, how_old_is_the_kid, result): - - if result.target_age.lower == 18: - return False, how_old_is_the_kid, "18+" - - expect_upper, expect_lower = max(how_old_is_the_kid), min(how_old_is_the_kid) - expect_set = set(range(expect_lower, expect_upper + 1)) - - result_upper = result.target_age.upper - result_lower = result.target_age.lower - result_set = set(range(result_lower, result_upper + 1)) - - if result_set and expect_set.intersection(result_set): - return True, how_old_is_the_kid, (result_lower, result_upper) - return False, how_old_is_the_kid, (result_lower, result_upper) - - def _match_author(self, author, result): - for contributor in result.contributors: - if not contributor.role in Filter.AUTHOR_MATCH_ROLES: - continue - names = [ - contributor[field].lower() - for field in ["display_name", "sort_name"] - if contributor[field] - ] - if hasattr(author, "match"): - match = any(author.search(name) for name in names) - else: - match = any(author == name for name in names) - if match: - return True, author, contributor - else: - return False, author, None - - def match_result(self, result): - """Does the given result match these criteria?""" - - for field, expect in list(self.kwargs.items()): - fields = None - if field == "subject": - success, value, expect_str = self._match_subject(expect, result) - elif field == "genre": - success, value, expect_str = self._match_genre(expect, result) - elif field == "target_age": - success, value, expect_str = self._match_target_age(expect, result) - elif field == "author": - success, value, expect_str = self._match_author(expect, result) - elif field == "title_or_subtitle": - fields = ["title", "subtitle"] - else: - fields = [field] - if fields: - for field in fields: - value = self._field(field, result) - success, expect_str = self._match_scalar(value, expect) - if success: - break - if not success: - return False, value, expect_str - return True, value, expect_str - - def multi_evaluate(self, hits): - # Evalate a number of hits and sort them into successes and failures. - successes = [] - failures = [] - for h in hits: - success, actual, expected = self.match_result(h) - if success: - successes.append((success, actual, expected)) - else: - failures.append((success, actual, expected)) - return successes, failures - - -class ReturnsNothing(Evaluator): - """This search should return no results at all.""" - - def evaluate(self, hits): - assert not hits - - -class Common(Evaluator): - """It must be common for the results to match certain criteria.""" - - def __init__( - self, threshold=0.5, minimum=None, first_must_match=True, negate=False, **kwargs - ): - """Constructor - - :param threshold: A proportion of the search results must - match these criteria. - - :param minimum: At least this many search results must match - these criteria. - - :param first_must_match: In addition to any collective - restrictions, the first search result must match the criteria. - """ - super().__init__(**kwargs) - self.threshold = threshold - self.minimum = minimum - self.first_must_match = first_must_match - self.negate = negate - - def evaluate_first(self, hit): - if self.first_must_match: - success, actual, expected = self.match_result(hit) - if hasattr(actual, "match"): - actual = actual.pattern - if (not success) or (self.negate and success): - if self.negate: - if actual == expected: - logging.info( - "First result matched and shouldn't have. %s == %s", - expected, - actual, - ) - assert actual != expected - else: - if actual != expected: - logging.info( - "First result did not match. {} != {}".format( - expected, actual - ) - ) - eq_(actual, expected) - - def evaluate_hits(self, hits): - successes, failures = self.multi_evaluate(hits) - if self.negate: - failures, successes = successes, failures - if self.threshold is not None: - self.assert_ratio( - [x[1:] for x in successes], - [x[1:] for x in successes + failures], - self.threshold, - ) - if self.minimum is not None: - overall_success = len(successes) >= self.minimum - if not overall_success: - logging.info("Need %d matches, got %d" % (self.minimum, len(successes))) - for i in successes + failures: - if i in successes: - template = "Y (%s == %s)" - else: - template = "N (%s != %s)" - vars = [] - for display in i[1:]: - if hasattr(display, "match"): - display = display.pattern - vars.append(display) - logging.info(template % tuple(vars)) - assert overall_success - - -class Uncommon(Common): - """The given match must seldom or never happen.""" - - def __init__(self, threshold=1, **kwargs): - kwargs["negate"] = True - super().__init__(threshold=threshold, **kwargs) - - -class FirstMatch(Common): - """The first result must match certain criteria.""" - - def __init__(self, **kwargs): - threshold = kwargs.pop("threshold", None) - super().__init__(threshold=threshold, first_must_match=True, **kwargs) - - -class AtLeastOne(Common): - def __init__(self, **kwargs): - super().__init__(threshold=None, minimum=1, first_must_match=False, **kwargs) - - -class SpecificGenre(Common): - pass - - -class SpecificAuthor(FirstMatch): - """The first result must be by a specific author. - - Most of the results must also be by that author. - """ - - def __init__(self, author, accept_title=None, threshold=0): - super().__init__(author=author, threshold=threshold) - if accept_title: - self.accept_title = accept_title.lower() - else: - self.accept_title = None - - def author_role(self, expect_author, result): - if hasattr(expect_author, "match"): - - def match(author): - return expect_author.search( - author.display_name - ) or expect_author.search(author.sort_name) - - else: - expect_author_sort = display_name_to_sort_name(expect_author) - expect_author_display = sort_name_to_display_name(expect_author) - - def match(author): - return ( - contributor.display_name == expect_author - or contributor.sort_name == expect_author - or contributor.sort_name == expect_author_sort - or contributor.display_name == expect_author_display - ) - - for contributor in result.contributors or []: - if match(contributor): - return contributor.role - else: - return None - - def evaluate_first(self, first): - expect = self.original_kwargs["author"] - if self.author_role(expect, first) is not None: - return True - - title = self._field("title", first) - subtitle = self._field("subtitle", first) - if self.accept_title and ( - self.accept_title in title or self.accept_title in subtitle - ): - return True - - # We have failed. - if hasattr(expect, "match"): - expect = expect.pattern - eq_(expect, first.contributors) - - def evaluate_hits(self, hits): - last_role = None - last_title = None - author = self.original_kwargs["author"] - authors = [hit.contributors for hit in hits] - author_matches = [] - for hit in hits: - role = self.author_role(author, hit) - author_matches.append(role is not None) - last_role = role - last_title = hit.title - self.assert_ratio(author_matches, authors, self.threshold) - - -class SpecificSeries(Common): - """Verify that results come from a certain series of books.""" - - def evaluate(self, results): - successes = [] - diagnostics = [] - for result in results: - success, should_have_matched = self.evaluate_one(result) - if success: - successes.append(result) - diagnostics.append(should_have_matched) - self.assert_ratio(successes, diagnostics, self.threshold) - - def evaluate_one(self, result): - expect_author = self.kwargs.get("author") - expect_series = self.kwargs.get("series") - - # Ideally a series match happens in the .series, but sometimes - # it happens in the .title. - - actual_series = result.series or "" - series_match, details = self._match_scalar( - actual_series, expect_series, inclusive=True - ) - actual_title = result.title - title_match, details = self._match_scalar( - actual_title, expect_series, inclusive=True - ) - - # Either way, if an author is specified, it means a book with - # a matching title by a different author is not part of the - # series. - if expect_author: - author_match, match, details = self._match_author(expect_author, result) - else: - author_match = True - actual = ( - actual_series, - actual_title, - result.author, - result.sort_author, - series_match, - title_match, - author_match, - ) - return (series_match or title_match) and author_match, actual - - -class SearchTest: - """A test suite that runs searches and compares the actual results - to some expected state. - """ - - expected_failures = [] - unexpected_successes = [] - - def search(self, query, evaluators=None, limit=10): - query = query.lower() - logging.info("Query: %r", query) - pagination = Pagination(size=limit) - qu = self.searcher.query(query, pagination=pagination) - hits = [x for x in qu][:] - if not evaluators: - raise Exception("No evaluators specified!") - if not isinstance(evaluators, list): - evaluators = [evaluators] - for e in evaluators: - e.evaluate(hits) - - -class VariantSearchTest(SearchTest): - """A test suite that runs different searches but evaluates the - results against the same evaluator every time. - """ - - EVALUATOR = None - - def search(self, query): - return super().search(query, self.EVALUATOR) - - -class TestGibberish(SearchTest): - # If you type junk into the search box you should get no results. - - def test_junk(self): - # Test one long string - self.search( - "rguhriregiuh43pn5rtsadpfnsadfausdfhaspdiufnhwe42uhdsaipfh", - ReturnsNothing(), - ) - - def test_multi_word_junk(self): - # Test several short strings - self.search( - "rguhriregiuh 43pn5rts adpfnsadfaus dfhaspdiufnhwe4 2uhdsaipfh", - ReturnsNothing(), - ) - - def test_wordlike_junk(self): - # To a human eye this is obviously gibberish, but it's close - # enough to English words that it might pick up a few results - # on a fuzzy match. - self.search("asdfza oiagher ofnalqk", ReturnsNothing()) - - -class TestTitleMatch(SearchTest): - # A search for one specific book. We want that book to be the - # first result. The rest of the results are usually irrelevant. - - def test_simple_title_match_carrie(self): - # There is one obvious right answer. - self.search("carrie", FirstMatch(title="Carrie")) - - def test_simple_title_match_bookshop(self): - self.search("the bookshop", FirstMatch(title="The Bookshop")) - - def test_simple_title_match_house(self): - self.search( - "A house for Mr. biswas", FirstMatch(title="A House for Mr. Biswas") - ) - - def test_simple_title_match_clique(self): - self.search("clique", FirstMatch(title="The Clique")) - - def test_simple_title_match_assassin(self): - self.search( - "blind assassin", - FirstMatch( - title=re.compile("^(the )?blind assassin$"), author="Margaret Atwood" - ), - ) - - def test_simple_title_match_dry(self): - self.search("the dry", FirstMatch(title="The Dry")) - - def test_simple_title_match_origin(self): - self.search("origin", FirstMatch(title="Origin")) - - def test_simple_title_match_goldfinch(self): - # This book is available as both "The Goldfinch" and "Goldfinch" - self.search( - "goldfinch", - FirstMatch(title=re.compile("^(the )?goldfinch$"), author="Donna Tartt"), - ) - - def test_simple_title_match_beach(self): - self.search("Manhattan beach", FirstMatch(title="Manhattan Beach")) - - def test_simple_title_match_testing(self): - self.search( - "The testing", FirstMatch(title="The testing", author="Joelle Charbonneau") - ) - - def test_simple_title_twentysomething(self): - self.search("Twentysomething", FirstMatch(title="Twentysomething")) - - def test_simple_title_match_bell_jar(self): - # NOTE: this works on ES6. On ES1, the top result is the Sparknotes for - # "The Bell Jar," rather than the novel itself. - - self.search("bell jar", FirstMatch(author="Sylvia Plath")) - - def test_simple_title_match_androids(self): - self.search( - "Do androids dream of electric sheep", - FirstMatch(title="Do Androids Dream of Electric Sheep?"), - ) - - def test_genius_foods(self): - # In addition to an exact title match, we also check that - # food-related books show up in the search results. - self.search( - "genius foods", - [ - FirstMatch(title="Genius Foods"), - Common(genre=re.compile("(cook|diet)"), threshold=0.2), - ], - ) - - def test_it(self): - # The book "It" is correctly prioritized over books whose titles contain - # the word "it." - self.search("It", FirstMatch(title="It")) - - def test_girl_on_the_train(self): - # There's a different book called "The Girl in the Train". - self.search("girl on the train", FirstMatch(title="The Girl On The Train")) - - -class TestPosessives(SearchTest): - """Test searches for book titles that contain posessives.""" - - def test_washington_partial(self): - self.search("washington war", AtLeastOne(title="George Washington's War")) - - def test_washington_full_no_apostrophe(self): - self.search( - "george washingtons war", FirstMatch(title="George Washington's War") - ) - - @known_to_fail - def test_washington_partial_apostrophe(self): - # The apostrophe is stripped and the 's' is stemmed. This is - # normally a good thing, but here the query is parsed as - # "washington war", and the first result is "The Washington - # War". Parsing this as "washington ' s war" would give better - # results here. - # - # Since most people don't type the apostrophe, the tradeoff is - # worth it. - self.search("washington's war", FirstMatch(title="George Washington's War")) - - def test_washington_full_apostrophe(self): - self.search( - "george washington's war", FirstMatch(title="George Washington's War") - ) - - def test_bankers(self): - self.search("bankers wife", FirstMatch(title="The Banker's Wife")) - - def test_brother(self): - # The entire posessive is omitted. - self.search( - "my brother shadow", - FirstMatch(title="My Brother's Shadow"), - ) - - def test_police_women_apostrophe(self): - self.search( - "policewomen's bureau", - FirstMatch(title="The Policewomen's Bureau"), - ) - - def test_police_women_no_apostrophe(self): - self.search( - "policewomens bureau", - FirstMatch(title="The Policewomen's Bureau"), - ) - - def test_police_women_no_posessive(self): - self.search( - "policewomen bureau", - FirstMatch(title="The Policewomen's Bureau"), - ) - - @known_to_fail - def test_police_women_extra_space(self): - # The extra space means this parses to 'police' and 'women', - # two very common words, and not the relatively uncommon - # 'policewomen'. - self.search( - "police womens bureau", - FirstMatch(title="The Policewomen's Bureau"), - ) - - -class TestSynonyms(SearchTest): - # Test synonyms that could be (but currently aren't) defined in - # the search index. - - @known_to_fail - def test_and_is_ampersand(self): - # There are books called "Black & White" and books called - # "Black And White". When '&' and 'and' are synonyms, all - # these books should get the same score. - self.search( - "black and white", - AtLeastOne(title="Black & White"), - ) - - @known_to_fail - def test_ampersand_is_and(self): - # The result we're looking for is second, behind "The - # Cheesemaker's Apprentice". - self.search( - "master and apprentice", - FirstMatch(title="Master & Apprentice (Star Wars)"), - ) - - -class TestUnownedTitle(SearchTest): - # These are title searches for books not owned by NYPL. - # Because of this we check that _similar_ books are returned. - # - # If your library owns these titles, then your results may be - # different for these tests. - - def test_boy_saved_baseball(self): - # The target title ("The Boy who Saved Baseball") isn't in the - # collection, but, ideally, most of the top results should - # still be about baseball. - self.search("boy saved baseball", Common(subject=re.compile("baseball"))) - - def test_save_cat(self): - # This specific book isn't in the collection, but there's a - # book with a very similar title, which is the first result. - self.search( - "Save the Cat", - [ - Common(title=re.compile("save the cat"), threshold=0.1), - Common(title=re.compile("(save|cat)"), threshold=0.6), - ], - ) - - def test_minecraft_zombie(self): - # We don't have this specific title, but there's no shortage of - # Minecraft books. - self.search( - "Diary of a minecraft zombie", Common(summary=re.compile("minecraft", re.I)) - ) - - def test_pie(self): - # NOTE: "Pie Town Woman" isn't in the collection, but there's - # a book called "Pie Town," which seems like the clear best - # option for the first result. - self.search("Pie town woman", FirstMatch(title="Pie Town")) - - def test_divorce(self): - # Despite the 'children', we are not looking for children's - # books. We're looking for books for grown-ups about divorce. - self.search( - "The truth about children and divorce", - [ - Common(audience="adult"), - AtLeastOne(subject=re.compile("divorce")), - ], - ) - - def test_patterns_of_fashion(self): - # This specific title isn't in the collection, but the results - # should still be reasonably relevant. - self.search( - "Patterns of fashion", - [ - Common(subject=re.compile("crafts"), first_must_match=False), - ], - ) - - def test_unowned_partial_title_rosetta_stone(self): - # NOTE: the collection doesn't have any books with titles containing "rosetta" - # stone," but it does have a few with titles containing "rosetta"; ideally, - # one of those would be the first result. A title containing "stone" would be - # less relevant to the user, but still reasonable. Instead, the first result - # is a memoir by an author whose first name is "Rosetta." - - self.search("Rosetta stone", FirstMatch(title=re.compile("(rosetta|stone)"))) - - @known_to_fail - def test_unowned_misspelled_partial_title_cosmetics(self): - # NOTE: The patron was presumably looking for "Don't Go to the - # Cosmetics Counter Without Me," which isn't in the - # collection. Ideally, one of the results should have - # something to do with cosmetics; instead, they're about - # comets. - self.search( - "Cometics counter", - [ - AtLeastOne(title=re.compile("cosmetics")), - ], - ) - - def test_title_match_with_genre_name(self): - # This book is unowned and its title includes a genre name. - # We're going to get a lot of books with "life" or "spy" in - # the title. - # - # The first result is a book that fills the intent of the - # search query but doesn't say "spy" anywhere. - self.search( - "My life as a spy", - Common(title_or_subtitle=re.compile("life|spy"), threshold=0.5), - ) - - @known_to_fail - def test_nonexistent_title_tower(self): - # NOTE: there is no book with this title. The most likely - # scenario is that the user meant "The Dark Tower." The only - # way to get this to work in Opensearch might be to - # institute a big synonym filter. - self.search("The night tower", FirstMatch(title="The Dark Tower")) - - -class TestMisspelledTitleSearch(SearchTest): - # Test title searches where the title is misspelled. - - @known_to_fail - def test_allegiant(self): - # A very bad misspelling. - self.search("alliagent", FirstMatch(title="Allegiant")) - - def test_marriage_lie(self): - self.search("Marriage liez", FirstMatch(title="The Marriage Lie")) - - def test_invisible_emmie(self): - # One word in the title is slightly misspelled. - self.search("Ivisible emmie", FirstMatch(title="Invisible Emmie")) - - def test_karamazov(self): - # Extremely uncommon proper noun, slightly misspelled - self.search("Brothers karamzov", FirstMatch(title="The Brothers Karamazov")) - - def test_restless_wave(self): - # One common word in the title is slightly misspelled. - self.search("He restless wave", FirstMatch(title="The Restless Wave")) - - def test_kingdom_of_the_blind(self): - # The first word, which is a fairly common word, is slightly misspelled. - self.search("Kngdom of the blind", FirstMatch(title="Kingdom of the Blind")) - - def test_seven_husbands(self): - # Two words--1) a common word which is spelled as a different word - # ("if" instead of "of"), and 2) a proper noun--are misspelled. - self.search( - "The seven husbands if evyln hugo", - FirstMatch(title="The Seven Husbands of Evelyn Hugo"), - ) - - def test_nightingale(self): - # Unusual word misspelled. - # - # This might fail because a book by Florence Nightingale is - # seen as a better match. - self.search("The nightenale", FirstMatch(title="The Nightingale")) - - @known_to_fail - def test_memoirs_geisha(self): - # The desired work shows up on the first page, but it should - # be first. - self.search("Memoire of a ghesia", FirstMatch(title="Memoirs of a Geisha")) - - def test_healthyish(self): - # Misspelling of the title, which is a neologism. - self.search("healtylish", FirstMatch(title="Healthyish")) - - def test_zodiac(self): - # Uncommon word, slightly misspelled. - self.search("Zodiaf", FirstMatch(title="Zodiac")) - - def test_for_whom_the_bell_tolls(self): - # A relatively common word is spelled as a different, more common word. - self.search( - "For whom the bell tools", FirstMatch(title="For Whom the Bell Tolls") - ) - - @known_to_fail - def test_came_to_baghdad(self): - # An extremely common word is spelled as a different word. - self.search("They cane to baghdad", FirstMatch(title="They Came To Baghdad")) - - def test_genghis_khan(self): - self.search("Ghangiz Khan", AtLeastOne(title=re.compile("Genghis Khan", re.I))) - - def test_guernsey(self): - # One word, which is a place name, is misspelled. - self.search( - "The gurnsey literary and potato peel society", - FirstMatch(title="The Guernsey Literary & Potato Peel Society"), - ) - - def test_british_spelling_color_of_our_sky(self): - # Note to pedants: the title of the book as published is - # "The Color of Our Sky". - - self.search("The colour of our sky", FirstMatch(title="The Color of Our Sky")) - - -class TestPartialTitleSearch(SearchTest): - # Test title searches where only part of the title is provided. - - def test_i_funnyest(self): - # An important word from the middle of the title is omitted. - self.search( - "i funnyest", - AtLeastOne(title="I Totally Funniest"), - ) - - def test_future_home(self): - # The search query only contains half of the title. - self.search("Future home of", FirstMatch(title="Future Home Of the Living God")) - - def test_fundamentals_of_supervision(self): - # A word from the middle of the title is missing. - self.search( - "fundamentals of supervision", - FirstMatch(title="Fundamentals of Library Supervision"), - ) - - def test_hurin(self): - # A single word is so unusual that it can identify the book - # we're looking for. - for query in ("Hurin", "Húrin"): - self.search( - query, - FirstMatch(title="The Children of Húrin", author=re.compile("tolkien")), - ) - - @known_to_fail - def test_open_wide(self): - # Search query cuts off midway through the second word of the - # subtitle. NOTE: The book we're looking for is on the first - # page, beneath other titles called "Open Wide!" and "Wide - # Open", which ought to be worse matches because there's no - # subtitle match. - self.search( - "Open wide a radical", - FirstMatch( - title="Open Wide", - subtitle="a radically real guide to deep love, rocking relationships, and soulful sex", - ), - ) - - def test_how_to_win_friends(self): - # The search query only contains half of the title. - self.search( - "How to win friends", - FirstMatch(title="How to Win Friends and Influence People"), - ) - - def test_wash_your_face_1(self): - # The search query is missing the last word of the title. - self.search("Girl wash your", FirstMatch(title="Girl, Wash Your Face")) - - def test_wash_your_face_2(self): - # The search query is missing the first word of the title. - self.search("Wash your face", FirstMatch(title="Girl, Wash Your Face")) - - def test_theresa(self): - # The search results correctly prioritize books with titles containing - # "Theresa" over books by authors with the first name "Theresa." - self.search("Theresa", FirstMatch(title=re.compile("Theresa", re.I))) - - def test_prime_of_miss_jean_brodie(self): - # The search query only has the first and last words from the title, and - # the last word is misspelled. - self.search("Prime brody", FirstMatch(title="The Prime of Miss Jean Brodie")) - - -class TestTitleGenreConflict(SearchTest): - # These tests address a longstanding problem of books whose titles - # contain the names of genres. - - @known_to_fail - def test_drama(self): - # The title of the book is the name of a genre, and another - # genre has been added to the search term to clarify it. - self.search("drama comic", FirstMatch(title="Drama", author="Raina Telgemeier")) - - def test_title_match_with_genre_name_romance(self): - # The title contains the name of a genre. Despite this, - # an exact title match should show up first. - self.search("modern romance", FirstMatch(title="Modern Romance")) - - def test_modern_romance_with_author(self): - self.search( - "modern romance aziz ansari", - FirstMatch(title="Modern Romance", author="Aziz Ansari"), - ) - - def test_partial_title_match_with_genre_name_education(self): - self.search( - "education henry adams", - FirstMatch(title="The Education of Henry Adams"), - ) - - def test_title_match_with_genre_name_law(self): - self.search( - "law of the mountain man", FirstMatch(title="Law of the Mountain Man") - ) - - @known_to_fail - def test_law_of_the_mountain_man_with_author(self): - # "Law of the Mountain Man" is the second result, but it - # really should be first. - self.search( - "law of the mountain man william johnstone", - [ - FirstMatch(title="Law of the Mountain Man"), - Common(author="William Johnstone"), - ], - ) - - def test_spy(self): - self.search("spying on whales", FirstMatch(title="Spying on Whales")) - - def test_dance(self): - # This works because of the stopword index. - # - # Otherwise "Dance of the Dragons" looks like an equally good - # result. - self.search("dance with dragons", FirstMatch(title="A Dance With Dragons")) - - -class TestTitleAuthorConflict(SearchTest): - # Test title searches for works whose titles contain words - # that often show up in peoples' names. - - def test_lord_jim(self): - # The book "Lord Jim" is correctly prioritized over books whose authors' names - # contain "Lord" or "Jim." - self.search("Lord Jim", FirstMatch(title="Lord Jim")) - - def test_wilder(self): - # The book "Wilder" is correctly prioritized over books by authors with the - # last name "Wilder." - self.search("Wilder", FirstMatch(title="Wilder")) - - def test_alice(self): - # The book "Alice" is correctly prioritized over books by authors with the - # first name "Alice." - self.search("Alice", FirstMatch(title="Alice")) - - def test_alex_and_eliza(self): - # The book "Alex and Eliza" is correctly prioritized over books by authors with the - # first names "Alex" or "Eliza." - self.search("Alex and Eliza", FirstMatch(title="Alex and Eliza")) - - def test_disney(self): - # The majority of the search results will be about Walt Disney and/or the - # Disney Company, but there should also be some published by the Disney Book Group - - # NOTE: The first result is a book whose .series is the literal - # string "Disney". This triggers a keyword series match which - # bumps it to the top. That's why first_must_match=False. - # It's an unusual situation so I think this is all right. - self.search( - "disney", - [ - Common(title=re.compile("disney"), first_must_match=False), - AtLeastOne(title=re.compile("walt disney")), - AtLeastOne(author="Disney Book Group"), - ], - ) - - def test_bridge(self): - # The search results correctly prioritize the book with this - # title over books by authors whose names contain "Luis" or - # "Rey." - self.search( - "the bridge of san luis rey", FirstMatch(title="The Bridge of San Luis Rey") - ) - - -class TestTitleAudienceConflict(SearchTest): - # Test titles searches for books whose titles contain the - # name of an audience or target age. - - def test_title_match_with_audience_name_children(self): - self.search("Children blood", FirstMatch(title="Children of Blood and Bone")) - - def test_title_match_with_audience_name_kids(self): - self.search("just kids", FirstMatch(title="Just Kids")) - - def test_tales_of_a_fourth_grade_nothing(self): - self.search( - "fourth grade nothing", FirstMatch(title="Tales of a Fourth Grade Nothing") - ) - - -class TestMixedTitleAuthorMatch(SearchTest): - @known_to_fail - def test_centos_caen(self): - # 'centos' shows up in the subtitle. 'caen' is the name - # of one of the authors. - # - # NOTE: The work we're looking for shows up on the first page - # but it can't beat out title matches like "CentOS Bible" - self.search("centos caen", FirstMatch(title="fedora linux toolbox")) - - def test_fallen_baldacci(self): - self.search( - "fallen baldacci", FirstMatch(author="David Baldacci", title="The Fallen") - ) - - def test_dragons(self): - # Full title, full but misspelled author - self.search( - "Michael conolley Nine Dragons", - FirstMatch(title="Nine Dragons", author="Michael Connelly"), - ) - - def test_dostoyevsky(self): - # Full title, partial author - self.search( - "Crime and punishment Dostoyevsky", FirstMatch(title="Crime and Punishment") - ) - - def test_dostoyevsky_partial_title(self): - # Partial title, partial author - self.search("punishment Dostoyevsky", FirstMatch(title="Crime and Punishment")) - - @known_to_fail - def test_sparks(self): - # Full title, full but misspelled author, "by" - # NOTE: Work shows up very high on first page but ought to be - # first. It's behind other books called "Every - # Breath" - self.search( - "Every breath by nicholis sparks", - FirstMatch(title="Every Breath", author="Nicholas Sparks"), - ) - - def test_grisham(self): - # Full title, author name misspelled - self.search( - "The reckoning john grisham", - FirstMatch(title="The Reckoning", author="John Grisham"), - ) - - def test_singh(self): - self.search( - "Nalini singh archangel", - [ - Common(author="Nalini Singh", threshold=0.9), - Common(title=re.compile("archangel")), - ], - ) - - def test_sebald_1(self): - # This title isn't in the collection, but the author's other - # books should still come up. - self.search( - "Sebald after", SpecificAuthor("W. G. Sebald", accept_title="Sebald") - ) - - def test_sebald_2(self): - # Specifying the full title gets rid of the book about - # this author, probably because "Nature" is the name of a genre. - self.search("Sebald after nature", SpecificAuthor("W. G. Sebald")) - - -# Classes that test many different variant searches for a specific -# title. -# -class TestTheHateUGive(VariantSearchTest): - """Test various ways of searching for "The Hate U Give".""" - - # We check the start of the title because for some reason we have - # a copy of the book that includes the author's name in the title. - EVALUATOR = FirstMatch(title=re.compile("^The Hate U Give", re.I)) - - def test_correct_spelling(self): - self.search("the hate u give") - - def test_with_all(self): - self.search("all the hate u give") - - def test_with_all_and_you(self): - self.search("all the hate you give") - - def test_with_you(self): - self.search("hate you give") - - def test_with_you_misspelled(self): - self.search("hate you gove") - - -class TestCharlottesWeb(VariantSearchTest): - """Test various ways of searching for "Charlotte's Web".""" - - EVALUATOR = FirstMatch(title="Charlotte's Web") - - def test_with_apostrophe(self): - self.search("charlotte's web") - - def test_without_possessive(self): - self.search("charlotte web") - - def test_without_apostrophe(self): - self.search("charlottes web") - - def test_misspelled_no_apostrophe(self): - self.search("charlettes web") - - def test_no_apostrophe_with_author(self): - self.search("charlottes web eb white") - - def test_no_apostrophe_with_author_space(self): - self.search("charlottes web e b white") - - -class TestChristopherMouse(VariantSearchTest): - # Test various partial title spellings for "Christopher Mouse: The Tale - # of a Small Traveler". - # - # This title is not in NYPL's collection, so we don't expect any of - # these tests to pass. - EVALUATOR = FirstMatch(title=re.compile("Christopher Mouse")) - - @known_to_fail - def test_correct_spelling(self): - self.search("christopher mouse") - - @known_to_fail - def test_misspelled_1(self): - self.search("chistopher mouse") - - @known_to_fail - def test_misspelled_2(self): - self.search("christopher moise") - - @known_to_fail - def test_misspelled_3(self): - self.search("chistoper muse") - - -class TestSubtitleMatch(SearchTest): - # Test searches for words that show up based on a remembered - # subtitle. - - def test_shame_stereotypes(self): - # "Sister Citizen" has both search terms in its - # subtitle. - self.search("shame stereotypes", FirstMatch(title="Sister Citizen")) - - def test_garden_wiser(self): - self.search("garden wiser", FirstMatch(title="Gardening for a Lifetime")) - - -class TestAuthorMatch(SearchTest): - def test_kelly_link(self): - # There is one obvious right answer. - self.search("kelly link", SpecificAuthor("Kelly Link")) - - def test_stephen_king(self): - # This author is so well-known that there are books _about_ - # him (e.g. "Stephen King and Philosophy"). Such a book might - # reasonably show up as the first search result. However, the - # majority of search results should be books _by_ this author. - self.search( - "stephen king", - [ - SpecificAuthor("Stephen King", accept_title="Stephen King"), - Common(author="Stephen King", threshold=0.7), - ], - ) - - def test_fleming(self): - # It's reasonable for there to be a biography of this author in the search - # results, but the overwhelming majority of the results should be books by him. - self.search( - "ian fleming", - [ - SpecificAuthor("Ian Fleming", accept_title="Ian Fleming"), - Common(author="Ian Fleming", threshold=0.9), - ], - ) - - def test_plato(self): - # The majority of the search results will be _about_ this author, - # but there should also be some _by_ him. - self.search( - "plato", - [SpecificAuthor("Plato", accept_title="Plato"), AtLeastOne(author="Plato")], - ) - - def test_byron(self): - # The user probably wants either a biography of Byron or a book of - # his poetry. - # - # TODO: Books about Byron are consistently prioritized above books by him. - self.search( - "Byron", - [ - AtLeastOne(title=re.compile("byron"), genre=re.compile("biography")), - AtLeastOne(author=re.compile("byron")), - ], - ) - - def test_hemingway(self): - # TODO: Books about Hemingway are consistently prioritized above books by him. - - # The majority of the search results should be _by_ this author, - # but there should also be at least one _about_ him. - self.search( - "Hemingway", - [ - AtLeastOne( - title=re.compile("hemingway"), genre=re.compile("biography") - ), - AtLeastOne(author="Ernest Hemingway"), - ], - ) - - def test_lagercrantz(self): - # The search query contains only the author's last name. - # There are several people with this name, and there's no - # information that would let us prefer one over the other. - self.search("Lagercrantz", SpecificAuthor(re.compile("Lagercrantz"))) - - def test_burger(self): - # The author is correctly prioritized above books whose titles contain - # the word "burger." - self.search("wolfgang burger", SpecificAuthor("Wolfgang Burger")) - - def test_chase(self): - # The author is correctly prioritized above the book "Emma." - self.search("Emma chase", SpecificAuthor("Emma Chase")) - - @known_to_fail - def test_deirdre_martin(self): - # The author's first name is misspelled in the search query. - # - # The search results are books about characters named Diedre. - self.search("deidre martin", SpecificAuthor("Deirdre Martin")) - - def test_wharton(self): - self.search( - "edith wharton", - SpecificAuthor("Edith Wharton", accept_title="Edith Wharton"), - ) - - def test_wharton_misspelled(self): - # The author's last name is misspelled in the search query. - self.search("edith warton", Common(author="Edith Wharton")) - - def test_danielle_steel(self): - # The author's last name is slightly misspelled in the search query. - self.search("danielle steele", SpecificAuthor("Danielle Steel", threshold=1)) - - def test_primary_author_with_coauthors(self): - # This person is sometimes credited as primary author with - # other authors, and sometimes as just a regular co-author. - self.search("steven peterman", SpecificAuthor("Steven Peterman")) - - def test_primary_author_with_coauthors_2(self): - self.search("jack cohen", SpecificAuthor("Jack Cohen")) - - def test_only_as_coauthor(self): - # This person is inevitably credited co-equal with another - # author. - self.search("stan berenstain", SpecificAuthor("Stan Berenstain")) - - def test_narrator(self): - # This person is narrator for a lot of Stephen King - # audiobooks. Searching for their name may bring up people - # with similar names and authorship roles, but they'll show up - # pretty frequently. - self.search("will patton", Common(author="Will Patton")) - - def test_unknown_display_name(self): - # In NYPL's dataset, we know the sort name for this author but - # not the display name. - self.search("emma craigie", SpecificAuthor("Craigie, Emma")) - - def test_nabokov_misspelled(self): - # Only the last name is provided in the search query, - # and it's misspelled. - self.search( - "Nabokof", SpecificAuthor("Vladimir Nabokov", accept_title="Nabokov") - ) - - def test_ba_paris(self): - # Author's last name could also be a subject keyword. - # - # NOTE: These results are always very good, but sometimes the - # first result is a title match with stopword removed: - # "Escalier B, Paris 12". - self.search("b a paris", SpecificAuthor("B. A. Paris")) - - def test_griffiths(self): - # The search query gives the author's sort name. - self.search("Griffiths elly", SpecificAuthor("Elly Griffiths")) - - def test_christian_kracht(self): - # The author's name contains a genre name. - self.search("christian kracht", FirstMatch(author="Christian Kracht")) - - def test_dan_gutman(self): - self.search("gutman, dan", Common(author="Dan Gutman")) - - def test_dan_gutman_with_series(self): - self.search( - "gutman, dan the weird school", - SpecificSeries(series="My Weird School", author="Dan Gutman"), - ) - - def test_steve_berry(self): - # This search looks like nothing special but it has been - # difficult in the past, possibly because "berry" is an - # English word. - self.search("steve berry", Common(author="Steve Berry")) - - @known_to_fail - def test_thomas_python(self): - # All the terms are correctly spelled words, but the patron - # clearly means something else. - self.search("thomas python", Common(author="Thomas Pynchon")) - - def test_betty_neels_audiobooks(self): - # Even though there are no audiobooks, all of the search - # results should still be books by this author. - self.search( - "Betty neels audiobooks", - Common(author="Betty Neels", genre="romance", threshold=1), - ) - - -# Classes that test many different variant searches for a specific -# author. -# - - -class TestTimothyZahn(VariantSearchTest): - # Test ways of searching for author Timothy Zahn. - EVALUATOR = SpecificAuthor("Timothy Zahn") - - def test_correct_spelling(self): - self.search("timothy zahn") - - def test_incorrect_1(self): - self.search("timithy zahn") - - def test_incorrect_2(self): - self.search("timithy zhan") - - -class TestRainaTelgemeier(VariantSearchTest): - # Test ways of searching for author Raina Telgemeier. - EVALUATOR = SpecificAuthor("Raina Telgemeier") - - def test_correct_spelling(self): - self.search("raina telgemeier") - - def test_minor_misspelling(self): - self.search("raina telegmeier") - - @known_to_fail - def test_misspelling_1(self): - self.search("raina telemger") - - def test_misspelling_2(self): - self.search("raina telgemerier") - - -class TestHenningMankell(VariantSearchTest): - # A few tests of searches for author Henning Mankell - # - # Among other things, these tests verify that we can resist the - # temptation to stem "Henning" to "Hen". - # - # In NYPL's collection, the top result for a misspelled version of - # this author's name is a book by a different author, with the - # subtitle "A gripping thriller for fans of Jo Nesbo and Henning - # Mankell". That's not perfect, but it's acceptable. - - EVALUATOR = SpecificAuthor("Henning Mankell", accept_title="Henning Mankell") - - def test_display_name(self): - self.search("henning mankell") - - def test_sort_name(self): - self.search("mankell henning") - - def test_display_name_misspelled(self): - self.search("henning mankel") - - def test_sort_name_misspelled(self): - self.search("mankel henning") - - -class TestMJRose(VariantSearchTest): - # Test ways of searching for author M. J. Rose. - # This highlights a lot of problems with the way we handle - # punctuation and spaces. - EVALUATOR = Common(author="M. J. Rose") - - # TODO: This is pretty bad given the work we do to normalize - # author names during indexing. Maybe we need to normalize the - # data going in to the search. - - def test_with_periods_and_spaces(self): - # This proves that we do have the books and can find them. - self.search("m. j. rose") - - def test_with_spaces(self): - # This is how the author's name is indexed internally. - self.search("m j rose") - - @known_to_fail - def test_with_periods(self): - # This only gets three books by this author. - # Maybe 'm.j.' is parsed as a single token or something. - self.search("m.j. rose") - - @known_to_fail - def test_with_one_period(self): - # This only gets three books by this author. - self.search("m.j rose") - - @known_to_fail - def test_with_no_periods_or_spaces(self): - # The author name is indexed as "m j", and without a space - # between the "m" and the "j" Opensearch won't match the - # tokens. - self.search("mj rose") - - -class TestPublisherMatch(SearchTest): - # Test the ability to find books by a specific publisher or - # imprint. - - def test_harlequin_romance(self): - self.search("harlequin romance", Common(publisher="harlequin", genre="Romance")) - - def test_harlequin_historical(self): - self.search( - "harlequin historical", - # We may get some "harlequin historical classic", which is fine. - Common(imprint=re.compile("harlequin historical"), genre="Romance"), - ) - - def test_princeton_review(self): - self.search("princeton review", Common(imprint="princeton review")) - - @known_to_fail - def test_wizards(self): - self.search("wizards coast", Common(publisher="wizards of the coast")) - - # We don't want to boost publisher/imprint matches _too_ highly - # because publishers and imprints are often single words that - # would be better matched against other fields. - - def test_penguin(self): - # Searching for a word like 'penguin' should prioritize partial - # matches in other fields over exact imprint matches. - self.search( - "penguin", - [Common(title=re.compile("penguin", re.I)), Uncommon(imprint="Penguin")], - ) - - def test_vintage(self): - self.search( - "vintage", - [ - Common(title=re.compile("vintage", re.I)), - Uncommon(imprint="Vintage", threshold=0.5), - ], - ) - - def test_plympton(self): - # This should prioritize books by George Plimpton (even though - # it's not an exact string match) over books from the Plympton - # publisher. - self.search( - "plympton", - [ - Common(author=re.compile("plimpton", re.I)), - Uncommon(publisher="Plympton"), - ], - ) - - @known_to_fail - def test_scholastic(self): - # This gets under 50% matches -- there are test prep books and - # the like in the mix. - # - # TODO: It would be nice to boost this publisher more, but - # it's tough to know that "scholastic" is probably a publisher - # search, where "penguin" is probably a topic search and - # "plympton" is probably a misspelled author search. - self.search("scholastic", Common(publisher="scholastic inc.")) - - -class TestGenreMatch(SearchTest): - # A genre search is a search for books in a certain 'section' - # of the library. - - any_sf = re.compile("(Science Fiction|SF)", re.I) - - def test_science_fiction(self): - # NOTE: "Science Fiction" title matches (some of which are - # also science fiction) are promoted highly. Genre matches - # only show up in the front page if they also have "Science - # Fiction" in the title. - self.search( - "science fiction", Common(genre=self.any_sf, first_must_match=False) - ) - - def test_sf(self): - # Shorthand for "Science Fiction" - # NOTE: The first result is a book of essays with "SF" in the subtitle - # -- a reasonable match. - self.search("sf", Common(genre=self.any_sf, first_must_match=False)) - - def test_scifi(self): - # Shorthand for "Science Fiction" - self.search("sci-fi", Common(genre=self.any_sf)) - - def test_iain_banks_sf(self): - self.search( - # Genre and author - "iain banks science fiction", - Common(genre=self.any_sf, author="Iain M. Banks"), - ) - - @known_to_fail - def test_christian(self): - # NOTE: This fails because of a large number of title matches - # classified under other genres. - self.search( - "christian", - Common(genre=re.compile("(christian|religion)"), first_must_match=False), - ) - - def test_christian_authors(self): - self.search( - "christian authors", Common(genre=re.compile("(christian|religion)")) - ) - - @known_to_fail - def test_christian_lust(self): - # It's not clear what this person is looking for, but - # treating it as a genre search seems appropriate. - # - # The first couple results are excellent, so this isn't - # so bad. - self.search( - "lust christian", - Common(genre=re.compile("(christian|religion|religious fiction)")), - ) - - @known_to_fail - def test_christian_fiction(self): - # NOTE: This fails for a spurious reason. These results are - # pretty good, but they're not obvious genre matches. - self.search( - "christian fiction", - [ - Common(fiction="fiction"), - Common(genre=re.compile("(christian|religion|religious fiction)")), - ], - ) - - @known_to_fail - def test_graphic_novel(self): - # NOTE: This fails for a spurious reason. Many of the results - # have "Graphic Novel" in the title but are not classified as - # such. - self.search("Graphic novel", Common(genre="Comics & Graphic Novels")) - - def test_horror(self): - self.search("Best horror story", Common(genre=re.compile("horror"))) - - @known_to_fail - def test_scary_stories(self): - # NOTE: This seems spurious. The first results have "Scary - # Stories" in the title, so they should do fine, but are not - # necessarily classified as horror. - self.search("scary stories", Common(genre="Horror")) - - @known_to_fail - def test_percy_jackson_graphic_novel(self): - # NOTE: This doesn't work very well. The first few results are - # by Rick Riordan and then works with "Graphic Novel" in the - # title take over. - - self.search( - "Percy jackson graphic novel", - [ - Common(author="Rick Riordan"), - AtLeastOne(genre="Comics & Graphic Novels", author="Rick Riordan"), - ], - ) - - def test_gossip_girl_manga(self): - # A "Gossip Girl" manga series does exist, but it's not in - # NYPL's collection. Instead, the results should focus on - # the "Gossip Girl" series. - self.search( - "Gossip girl Manga", - [ - SpecificSeries( - series="Gossip Girl", - author=re.compile("cecily von ziegesar"), - ), - ], - ) - - @known_to_fail - def test_clique(self): - # NOTE: The target book does show up in the results, but the - # top results are dominated by books with 'graphic novel' in - # the title. - - # Genre and title - self.search( - "The clique graphic novel", - Common(genre="Comics & Graphic Novels", title="The Clique"), - ) - - def test_spy(self): - # Results are dominated by title matches, which is probably - # fine, since people don't really think of "Spy" as a genre, - # and people who do type in "spy" looking for spy books will - # find them. - self.search("Spy", Common(title=re.compile("(spy|spies)", re.I))) - - def test_espionage(self): - self.search( - "Espionage", - Common( - genre=re.compile("(espionage|history|crime|thriller)"), - ), - ) - - def test_food(self): - self.search("food", Common(genre=re.compile("(cook|diet)"))) - - def test_mystery(self): - self.search("mystery", Common(genre="Mystery")) - - def test_agatha_christie_mystery(self): - # Genre and author -- we should get nothing but mysteries by - # Agatha Christie. - self.search( - "agatha christie mystery", - [ - SpecificGenre(genre="Mystery", author="Agatha Christie"), - Common(author="Agatha Christie", threshold=1), - ], - ) - - def test_british_mystery(self): - # Genre and keyword - self.search( - "British mysteries", - Common( - genre="Mystery", summary=re.compile("british|london|england|scotland") - ), - ) - - def test_finance(self): - # Keyword - self.search( - "Finance", - Common(genre=re.compile("(business|finance)"), first_must_match=False), - ) - - def test_constitution(self): - # Keyword - self.search( - "Constitution", - Common(genre=re.compile("(politic|history)"), first_must_match=False), - ) - - def test_deep_poems(self): - # This appears to be a search for poems which are deep. - self.search("deep poems", Common(genre="Poetry")) - - -class TestSubjectMatch(SearchTest): - # Search for a specific subject, more specific than a genre. - - def test_alien_misspelled(self): - self.search( - "allien", - Common( - subject=re.compile("(alien|extraterrestrial|science fiction)"), - first_must_match=False, - ), - ) - - def test_alien_misspelled_2(self): - self.search( - "aluens", - Common( - subject=re.compile("(alien|extraterrestrial|science fiction)"), - first_must_match=False, - ), - ) - - @known_to_fail - def test_anime_genre(self): - # 'anime' and 'manga' are not subject classifications we get - # from our existing vendors. We have a lot of these books but - # they're not classified under those terms. - # - # So we get a few title matches for "Anime" and then go into - # books about animals. - self.search("anime", Common(subject=re.compile("(manga|anime)"))) - - def test_astrophysics(self): - # Keyword - self.search( - "Astrophysics", - Common( - genre="Science", - subject=re.compile("(astrophysics|astronomy|physics|space|science)"), - ), - ) - - def test_anxiety(self): - self.search( - "anxiety", - Common(genre=re.compile("(psychology|self-help)"), first_must_match=False), - ) - - def test_beauty_hacks(self): - # NOTE: fails on both versions. The user was obviously looking for a specific - # type of book; ideally, the search results would return at least one relevant - # one. Instead, all of the top results are either books about computer hacking - # or romance novels. - self.search( - "beauty hacks", - AtLeastOne(subject=re.compile("(self-help|style|grooming|personal)")), - ) - - def test_character_classification(self): - # Although we check a book's description, it's very difficult - # to find a good query that singles this out. - - # However, by searching for a hyperspecific subject matter - # classification, we can find a series of books that only has - # one word of overlap with the subject matter classification. - self.search( - "Gastner, Sheriff (Fictitious character)", - SpecificSeries(series="Bill Gastner Mystery"), - ) - - def test_college_essay(self): - self.search( - "College essay", - Common(genre=re.compile("study aids"), subject=re.compile("college")), - ) - - @known_to_fail - def test_da_vinci(self): - # Someone who searches for "da vinci" is almost certainly - # looking entirely for books _about_ Da Vinci. - # - # TODO: The first few results are good but then we go into - # "Da Vinci Code" territory. Maybe that's fine, though. - self.search( - "Da Vinci", - Common(genre=re.compile("(biography|art)"), first_must_match=False), - ) - - @known_to_fail - def test_da_vinci_missing_space(self): - # NOTE: Books in the "Davina Graham" series are taking up most - # of the top search results. - self.search( - "Davinci", - Common( - genre=re.compile("(biography|art)"), - first_must_match=False, - threshold=0.3, - ), - ) - - @known_to_fail - def test_dirtbike(self): - # NOTE: This gets no results at all. Searching "dirt bike" - # (two words) renders more relevant results, but still not - # enough for the test to pass. - self.search( - "dirtbike", Common(subject=re.compile("(bik|bicycle|sports|nature|travel)")) - ) - - def test_greek_romance(self): - # This person might be searching for romance novels or for - # something like "Essays on the Greek Romances." - self.search( - "Greek romance", - [ - Common(genre="Romance", first_must_match=False), - AtLeastOne(title=re.compile("greek romance")), - ], - ) - - def test_ice_cream(self): - # There are a lot of books about making ice cream. The search results - # correctly present those before looking for non-cooking "artisan" books. - self.search( - "Artisan ice cream", Common(genre=re.compile("cook"), threshold=0.9) - ) - - def test_information_technology(self): - # The first result is a title match. - self.search( - "information technology", - Common( - subject=re.compile("(information technology|computer)"), - first_must_match=False, - ), - ) - - def test_louis_xiii(self): - # There aren't very many books in the collection about Louis - # XIII, but he is the basis for the king in "The Three - # Musketeers", so that's not a bad answer. - self.search("Louis xiii", AtLeastOne(title="The Three Musketeers")) - - def test_managerial_skills(self): - self.search( - "managerial skills", Common(subject=re.compile("(business|management)")) - ) - - def test_manga(self): - # This has the same problem as the 'anime' test above -- - # we have tons of manga but it's not classified as "manga". - self.search( - "manga", - [ - Common(title=re.compile("manga")), - Common(subject=re.compile("(manga|art|comic)")), - ], - ) - - def test_meditation(self): - self.search("Meditation", Common(genre=re.compile("(self-help|mind|spirit)"))) - - def test_music_theory(self): - # Keywords - self.search( - "music theory", - Common(genre="Music", subject=re.compile("(music theory|musical theory)")), - ) - - def test_native_american(self): - # Keyword - self.search( - "Native american", - [ - Common( - genre=re.compile("history"), - subject=re.compile("(america|u.s.)"), - first_must_match=False, - ) - ], - ) - - def test_native_american_misspelled(self): - # Keyword, misspelled - self.search( - "Native amerixan", - [ - Common( - genre=re.compile("history"), - subject=re.compile("(america|u.s.)"), - first_must_match=False, - threshold=0.4, - ) - ], - ) - - def test_ninjas(self): - self.search("ninjas", Common(title=re.compile("ninja"))) - - def test_ninjas_misspelled(self): - # NOTE: The first result is "Ningyo", which does look a - # lot like "Ningas"... - self.search("ningas", Common(title=re.compile("ninja"), first_must_match=False)) - - def test_pattern_making(self): - self.search("Pattern making", AtLeastOne(subject=re.compile("crafts"))) - - def test_plant_based(self): - self.search( - "Plant based", Common(subject=re.compile("(cooking|food|nutrition|health)")) - ) - - def test_prank(self): - self.search("prank", Common(title=re.compile("prank"))) - - def test_prank_plural(self): - self.search("pranks", Common(title=re.compile("prank"))) - - def test_presentations(self): - self.search( - "presentations", - Common( - subject=re.compile( - "(language arts|business presentations|business|management)" - ) - ), - ) - - def test_python_programming(self): - # This is tricky because 'python' means a lot of different - # things. - self.search( - "python programming", - [ - # Most works will show up because of a title match -- verify that we're talking about - # Python as a programming language. - Common( - title=re.compile("python", re.I), - subject=re.compile("(computer technology|programming)", re.I), - threshold=0.8, - first_must_match=False, - ) - ], - ) - - def test_sewing(self): - self.search( - "Sewing", - [ - FirstMatch(title=re.compile("sewing")), - Common(title=re.compile("sewing")), - ], - ) - - def test_supervising(self): - # Keyword - self.search("supervising", Common(genre="Business", first_must_match=False)) - - def test_tennis(self): - # We will get sports books with "Tennis" in the title. - self.search( - "tennis", - Common( - title=re.compile("Tennis", re.I), - genre=re.compile("(Sports|Games)", re.I), - ), - ) - - @known_to_fail - def test_texas_fair(self): - # There exist a few books about the Texas state fair, but none of them - # are in the collection, so the best outcome is that the results will - # include a lot of books about Texas. - # - # TODO: "books about" really skews the results here -- lots of - # title matches. - self.search( - "books about texas like the fair", Common(title=re.compile("texas")) - ) - - def test_witches(self): - self.search("witches", Common(subject=re.compile("witch"))) - - -class TestFuzzyConfounders(SearchTest): - """Test searches on very distinct terms that are near each other in - Levenstein distance. - """ - - # amulet / hamlet / harlem / tablet - def test_amulet(self): - self.search( - "amulet", - [ - Common(title_or_subtitle=re.compile("amulet")), - Uncommon(title_or_subtitle=re.compile("hamlet|harlem|tablet")), - ], - ) - - def test_hamlet(self): - self.search( - "Hamlet", - [ - Common(title_or_subtitle="Hamlet"), - Uncommon(title_or_subtitle=re.compile("amulet|harlem|tablet")), - ], - ) - - def test_harlem(self): - self.search( - "harlem", - [ - Common(title_or_subtitle=re.compile("harlem")), - Uncommon(title_or_subtitle=re.compile("amulet|hamlet|tablet")), - ], - ) - - def test_tablet(self): - self.search( - "tablet", - [ - Common(title_or_subtitle=re.compile("tablet")), - Uncommon(title_or_subtitle=re.compile("amulet|hamlet|harlem")), - ], - ) - - # baseball / basketball - def test_baseball(self): - self.search( - "baseball", - [ - Common(title=re.compile("baseball")), - Uncommon(title=re.compile("basketball")), - ], - ) - - def test_basketball(self): - self.search( - "basketball", - [ - Common(title=re.compile("basketball")), - Uncommon(title=re.compile("baseball")), - ], - ) - - # car / war - def test_car(self): - self.search( - "car", - # There is a book called "Car Wars", so we can't - # completely prohibit 'war' from showing up. - [ - Common(title=re.compile("car")), - Uncommon(title=re.compile("war"), threshold=0.1), - ], - ) - - def test_war(self): - self.search( - "war", [Common(title=re.compile("war")), Uncommon(title=re.compile("car"))] - ) - - -class TestTravel(VariantSearchTest): - # Searches for places that are likely to be searches for travel guides - # (rather than history books, names of novels, etc). - - EVALUATOR = Common( - subject=re.compile("(travel|guide|fodors)"), first_must_match=False - ) - - @known_to_fail - def test_california(self): - # NOTE: This fails due to a large number of title matches. - self.search("California") - - def test_new_england(self): - self.search("new england") - - def test_toronto(self): - self.search("toronto") - - -class TestSeriesMatch(SearchTest): - @known_to_fail - def test_dinosaur_cove(self): - # NYPL's collection doesn't have any books in this series . - self.search("dinosaur cove", SpecificSeries(series="Dinosaur Cove")) - - def test_poldi(self): - # NYPL's collection only has one book from this series. - self.search("Auntie poldi", FirstMatch(series="Auntie Poldi")) - - def test_39_clues(self): - # We have many books in this series. - self.search("39 clues", SpecificSeries(series="the 39 clues")) - - def test_maggie_hope(self): - # We have many books in this series. - self.search("Maggie hope", SpecificSeries(series="Maggie Hope", threshold=0.9)) - - def test_game_of_thrones(self): - # People often search for the name of the TV show, but the - # series name is different. There are so many books about the - # TV show that results are dominated by title matches, but - # there is also a novel called "A Game of Thrones", and we - # find that. - self.search( - "game of thrones", - [ - Common(title=re.compile("Game of Thrones", re.I)), - AtLeastOne(series="a song of ice and fire"), - ], - ) - - def test_harry_potter(self): - # This puts foreign-language titles above English titles, but - # that's fine because our search document doesn't include a - # language filter. - # - # The very first result is an exact title match -- a guide to - # the film series. - self.search( - "Harry potter", - SpecificSeries( - series="Harry Potter", threshold=0.9, first_must_match=False - ), - ) - - def test_maisie_dobbs(self): - # Misspelled proper noun - self.search("maise dobbs", SpecificSeries(series="Maisie Dobbs", threshold=0.5)) - - def test_gossip_girl(self): - self.search( - "Gossip girl", - SpecificSeries( - series="Gossip Girl", - author=re.compile("cecily von ziegesar"), - ), - ) - - def test_gossip_girl_misspelled(self): - # Typo in the first character of a word. - self.search( - "Gossip hirl", - SpecificSeries( - series="Gossip Girl", - author=re.compile("cecily von ziegesar"), - ), - ) - - def test_magic(self): - # This book isn't in the collection, but the results include other books from - # the same series. - self.search("Frogs and french kisses", AtLeastOne(series="Magic in Manhattan")) - - def test_goosebumps(self): - self.search( - "goosebumps", - SpecificSeries( - series="Goosebumps", - author="R. L. Stine", - ), - ) - - def test_goosebump_singular(self): - self.search( - "goosebump", - SpecificSeries( - series="Goosebumps", - author="R. L. Stine", - ), - ) - - def test_goosebumps_misspelled(self): - self.search( - "goosebump", - SpecificSeries( - series="Goosebumps", - author="R. L. Stine", - ), - ) - - def test_severance(self): - # We only have one of these titles. - # - # Searching for 'severance' alone is going to get title - # matches, which is as it should be. - self.search("severance trilogy", AtLeastOne(series="The Severance Trilogy")) - - def test_severance_misspelled(self): - # Slightly misspelled - self.search("severence trilogy", AtLeastOne(series="The Severance Trilogy")) - - def test_hunger_games(self): - self.search("the hunger games", SpecificSeries(series="The Hunger Games")) - - def test_hunger_games_misspelled(self): - self.search("The hinger games", SpecificSeries(series="The Hunger Games")) - - def test_mockingjay(self): - self.search( - "The hunger games mockingjay", - [FirstMatch(title="Mockingjay"), SpecificSeries(series="The Hunger Games")], - ) - - def test_i_funny(self): - self.search( - "i funny", - SpecificSeries(series="I, Funny", author="Chris Grabenstein"), - ) - - def test_foundation(self): - # Series and full author. This should only get Foundation - # books *by Isaac Asimov*, not books in the same series by - # other authors. - self.search( - "Isaac asimov foundation", - [ - FirstMatch(title="Foundation"), - SpecificSeries(series="Foundation", author="Isaac Asimov"), - ], - ) - - def test_dark_tower(self): - # There exist two completely unrelated books called "The Dark - # Tower"--it's fine for one of those to be the first result. - self.search( - "The dark tower", - [ - SpecificSeries( - series="The Dark Tower", - author="Stephen King", - first_must_match=False, - ) - ], - ) - - def test_science_comics(self): - # We don't have a .series match for "science comics" but - # we do have one title match, which shows up first. - - # TODO: Since this is two different genre names we should - # test the hypothesis that the requestor wants the intersection - # of two genres. - self.search( - "Science comics", - [ - FirstMatch(title=re.compile("^science comics")), - ], - ) - - def test_who_is(self): - # These children's biographies don't have .series set but - # are clearly part of a series. - # - # Because those books don't have .series set, the matches - # happen solely through title, so unrelated books like "Who Is - # Rich?" appear to be part of the series. - self.search("who is", SpecificSeries(series="Who Is")) - - def test_who_was(self): - # From the same series of biographies as test_who_is(). - self.search("who was", SpecificSeries(series="Who Was")) - - def test_wimpy_kid_misspelled(self): - # Series name contains the wrong stopword ('the' vs 'a') - self.search( - "dairy of the wimpy kid", SpecificSeries(series="Diary of a Wimpy Kid") - ) - - -class TestSeriesTitleMatch(SearchTest): - """Test a search that tries to match a specific book in a series.""" - - def test_39_clues_specific_title(self): - self.search( - "39 clues maze of bones", - [ - FirstMatch(title="The Maze of Bones"), - SpecificSeries(series="the 39 clues"), - ], - ) - - def test_harry_potter_specific_title(self): - # The first result is the requested title. - # - # NOTE: It would be good if other results came be from the - # same series, but this doesn't happen much compared to other, - # similar tests. We get more partial title matches. - self.search( - "chamber of secrets", - [ - FirstMatch(title="Harry Potter and the Chamber of Secrets"), - SpecificSeries(series="Harry Potter", threshold=0.2), - ], - ) - - @known_to_fail - def test_wimpy_kid_specific_title(self): - # The first result is the requested title. Other results - # are from the same series. - # - # NOTE: The title match is too powerful -- "Wimpy Kid" - # overrides "Dog Days" - self.search( - "dairy of the wimpy kid dog days", - [ - FirstMatch(title="Dog Days", author="Jeff Kinney"), - SpecificSeries(series="Diary of a Wimpy Kid", author="Jeff Kinney"), - ], - ) - - @known_to_fail - def test_foundation_specific_title_by_number(self): - # NOTE: we don't have series position information for this series, - # and we don't search it, so there's no way to make this work. - self.search( - "Isaac Asimov foundation book 1", - FirstMatch(series="Foundation", title="Foundation"), - ) - - @known_to_fail - def test_survivors_specific_title(self): - # NOTE: This gives a lot of title matches for "Survivor" - # or "Survivors". Theoretically we could use "book 1" - # as a signal that we only want a series match. - self.search( - "survivors book 1", - [ - Common(series="Survivors"), - FirstMatch(title="The Empty City"), - ], - ) - - -# Classes that test many different kinds of searches for a particular -# series. -# -class TestISurvived(VariantSearchTest): - # Test different ways of spelling "I Survived" - # .series is not set for these books so we check the title. - EVALUATOR = Common(title=re.compile("^i survived ")) - - def test_correct_spelling(self): - self.search("i survived") - - def test_incorrect_1(self): - self.search("i survied") - - @known_to_fail - def test_incorrect_2(self): - # NOTE: This gives good results overall but the first - # match is "I Had to Survive", which is understandable - # but not the best match. - self.search("i survive") - - def test_incorrect_3(self): - self.search("i survided") - - -class TestDorkDiaries(VariantSearchTest): - # Test different ways of spelling "Dork Diaries" - EVALUATOR = SpecificAuthor(re.compile("Rachel .* Russell", re.I)) - - def test_correct_spelling(self): - self.search("dork diaries") - - def test_misspelling_and_number(self): - self.search("dork diarys #11") - - @known_to_fail - def test_misspelling_with_punctuation(self): - self.search("doke diaries.") - - def test_singular(self): - self.search("dork diary") - - def test_misspelling_1(self): - self.search("dork diarys") - - @known_to_fail - def test_misspelling_2(self): - self.search("doke dirares") - - @known_to_fail - def test_misspelling_3(self): - self.search("doke dares") - - @known_to_fail - def test_misspelling_4(self): - self.search("doke dires") - - def test_misspelling_5(self): - self.search("dork diareis") - - -class TestMyLittlePony(VariantSearchTest): - # Test different ways of spelling "My Little Pony" - - # .series is not set for these books so we check the title. - EVALUATOR = Common(title=re.compile("my little pony")) - - def test_correct_spelling(self): - self.search("my little pony") - - @known_to_fail - def test_misspelling_1(self): - # NOTE: This gets a title match on "Cousin Pons" - self.search("my little pon") - - def test_misspelling_2(self): - self.search("my little ponie") - - -class TestLanguageRestriction(SearchTest): - # Verify that adding the name of a language restricts the results - # to books in that language. - # - # NOTE: We don't parse language out of queries, so if any of these - # work it's because the name of the language is present in some - # other field. - - def test_language_espanol(self): - # "Espanol" is itself a Spanish word, so it would mainly show - # up in metadata for Spanish titles. - self.search("espanol", Common(language="spa")) - - @known_to_fail - def test_language_spanish(self): - self.search("spanish", Common(language="spa")) - - @known_to_fail - def test_author_with_language(self): - self.search( - "Pablo escobar spanish", FirstMatch(author="Pablo Escobar", language="spa") - ) - - def test_gatos(self): - # Searching for a Spanish word should mostly bring up books in Spanish, - # since that's where the word would be used. - # - # However, 'gatos' also shows up in English, e.g. in place names. - self.search("gatos", Common(language="spa", threshold=0.7)) - - -class TestAwardSearch(SearchTest): - # Attempts to find books that won particular awards. - - @known_to_fail - def test_hugo(self): - # This has big problems because the name of the award is also - # a very common personal name. - self.search( - "hugo award", - [ - Common(summary=re.compile("hugo award")), - Uncommon(author="Victor Hugo"), - Uncommon(series=re.compile("hugo")), - ], - ) - - def test_nebula(self): - self.search("nebula award", Common(summary=re.compile("nebula award"))) - - def test_nebula_no_award(self): - # This one does great -- the award is the most common - # use of the word "nebula". - self.search("nebula", Common(summary=re.compile("nebula award"))) - - def test_world_fantasy(self): - # This award contains the name of a genre. - self.search( - "world fantasy award", - Common(summary=re.compile("world fantasy award"), first_must_match=False), - ) - - @known_to_fail - def test_tiptree_award(self): - # This award is named after an author. We don't want their - # books -- we want the award winners. - self.search( - "tiptree award", - [ - Common(summary=re.compile("tiptree award")), - Uncommon(author=re.compile("james tiptree")), - ], - ) - - @known_to_fail - def test_newberry(self): - # Tends to get author matches. - self.search("newbery", Common(summary=re.compile("newbery medal"))) - - @known_to_fail - def test_man_booker(self): - # This gets author and title matches. - self.search( - "man booker prize", - Common(summary=re.compile("man booker prize"), first_must_match=False), - ) - - def test_award_winning(self): - # NOTE: It's unclear how to validate these results, but it's - # more likely an award-winning book will mention "award" in - # its summary than in its title. - self.search( - "award-winning", - [ - Common(summary=re.compile("award"), threshold=0.5), - Uncommon(title=re.compile("award"), threshold=0.5), - ], - ) - - @known_to_fail - def test_staff_picks(self): - # We're looking for books that are this library's staff picks, - # not books attributed to some company's "staff". - # - # We don't know which books are staff picks, but we can check - # that the obvious wrong answers don't show up. - self.search( - "staff picks", - [ - Uncommon(author=re.compile("(staff|picks)")), - Uncommon(title=re.compile("(staff|picks)")), - ], - ) - - -class TestCharacterMatch(SearchTest): - # These searches are best understood as an attempt to find books - # featuring certain fictional characters. - def test_3_little_pigs(self): - self.search( - "3 little pigs", - [ - AtLeastOne(title=re.compile("three little pigs")), - Common(title=re.compile("pig")), - ], - ) - - @known_to_fail - def test_3_little_pigs_more_precise(self): - # NOTE: This would require that '3' and 'three' be analyzed - # the same way. - self.search( - "3 little pigs", - FirstMatch(title="Three Little Pigs"), - ) - - def test_batman(self): - self.search("batman book", Common(title=re.compile("batman"))) - - @known_to_fail - def test_batman_two_words(self): - # Patron is searching for 'batman' but treats it as two words. - self.search("bat man book", Common(title=re.compile("batman"))) - - def test_christian_grey(self): - # This search uses a character name to stand in for a series. - self.search( - "christian grey", FirstMatch(author=re.compile(r"E.\s*L.\s*James", re.I)) - ) - - def test_spiderman_hyphenated(self): - self.search("spider-man", Common(title=re.compile("spider-man"))) - - @known_to_fail - def test_spiderman_one_word(self): - # NOTE: There are some Spider-Man titles but not as many as - # with the hyphen. - self.search("spiderman", Common(title=re.compile("spider-man"))) - - @known_to_fail - def test_spiderman_run_on(self): - # NOTE: This gets no results at all. - self.search("spidermanbook", Common(title=re.compile("spider-man"))) - - def test_teen_titans(self): - self.search("teen titans", Common(title=re.compile("^teen titans")), limit=5) - - @known_to_fail - def test_teen_titans_girls(self): - # We don't gender books so we can't deliver results tailored - # to 'teen titans girls', but we should at least give - # _similar_ results to 'teen titans' and not go off - # on tangents because of the 'girls' part. - self.search( - "teen titans girls", Common(title=re.compile("^teen titans")), limit=5 - ) - - def test_thrawn(self): - # "Thrawn" is a specific title but the patron may be referring - # to a series of books featuring this character (though it's - # not the official name of the series), so we check beyond the - # first result. - self.search( - "thrawn", - [ - FirstMatch(title="Thrawn"), - Common( - author="Timothy Zahn", - series=re.compile("star wars", re.I), - threshold=0.9, - ), - ], - ) - - -class TestAgeRangeRestriction(SearchTest): - # Verify that adding an age range restricts the results returned - # to contain exclusively children's books. - - def all_children(self, q): - # Verify that this search finds nothing but books for children. - self.search(q, Common(audience="Children", threshold=1)) - - def mostly_adult(self, q): - # Verify that this search finds mostly books for grown-ups. - self.search(q, Common(audience="Adult", first_must_match=False)) - - def test_black(self): - self.all_children("black age 3-5") - self.mostly_adult("black") - - def test_island(self): - self.all_children("island age 3-5") - self.mostly_adult("island") - - def test_panda(self): - self.all_children("panda age 3-5") - # We don't call mostly_adult() because 'panda' on its own - # finds mostly children's books. - - def test_chapter_books(self): - # Chapter books are a book format aimed at a specific - # age range. - self.search("chapter books", Common(target_age=(6, 10))) - - def test_chapter_books_misspelled_1(self): - # NOTE: We don't do fuzzy matching on things that would become - # filter terms. When this works, it's because of fuzzy title - # matches and description matches. - self.search("chapter bookd", Common(target_age=(6, 10))) - - @known_to_fail - def test_chapter_books_misspelled_2(self): - # This fails for a similar reason as misspelled_1, though it - # actually does a little better -- only the first result is - # bad. - self.search("chaptr books", Common(target_age=(6, 10))) - - @known_to_fail - def test_grade_and_subject(self): - # NOTE: this doesn't work because we don't parse grade numbers - # when they're spelled out, only when they're provided as - # digits. - self.search( - "Seventh grade science", - [Common(target_age=(12, 13)), Common(genre="Science")], - ) - - -class TestSearchOnStopwords(SearchTest): - # These tests verify our ability to search, when necessary, using - # words that are normally stripped out as stopwords. - def test_black_and_the_blue(self): - # This is a real book title that is almost entirely stopwords. - # Putting in a few words of the title will find that specific - # title even if most of the words are stopwords. - self.search("the black and", FirstMatch(title="The Black and the Blue")) - - @known_to_fail - def test_the_real(self): - # This is vague, but we get "The Real" results - # over just "Real" results. - # - # NOTE: These results are very good, but the first result is - # "Tiger: The Real Story", which is a subtitle match. A title match - # should be better. - self.search("the real", Common(title=re.compile("The Real", re.I))) - - def test_nothing_but_stopwords(self): - # If we always stripped stopwords, this would match nothing, - # but we get the best results we can manage -- e.g. - # "History of Florence and of the Affairs of Italy" - self.search( - "and of the", Common(title_or_subtitle=re.compile("and of the", re.I)) - ) - - -_db = production_session() -library = None - -index = ExternalSearchIndex(_db) -SearchTest.searcher = Searcher(library, index) - - -def teardown_module(): - failures = SearchTest.expected_failures - if failures: - logging.info("%d tests were expected to fail, and did.", len(failures)) - successes = SearchTest.unexpected_successes - if successes: - logging.info("%d tests passed unexepectedly:", len(successes)) - for success in successes: - logging.info( - "Line #%d: %s", - success.__code__.co_firstlineno, - success.__name__, - ) From 3be267ea67944c408593d2edba4b97cbb2eaf058 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 16:45:10 -0300 Subject: [PATCH 058/262] Bump types-psycopg2 from 2.9.21.13 to 2.9.21.14 (#1406) Bumps [types-psycopg2](https://github.com/python/typeshed) from 2.9.21.13 to 2.9.21.14. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-psycopg2 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0cd72f5b8a..b3832ad325 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4098,13 +4098,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.13" +version = "2.9.21.14" description = "Typing stubs for psycopg2" optional = false python-versions = "*" files = [ - {file = "types-psycopg2-2.9.21.13.tar.gz", hash = "sha256:662e6d7b03d89e3bac6aaf2892a97f2cca287f861e693dcefc96ca2e996642c5"}, - {file = "types_psycopg2-2.9.21.13-py3-none-any.whl", hash = "sha256:3ee7c32918d18c133bd0b4d92db7e1bd4c5f78a2ff217d74d4e3b6a09c95d506"}, + {file = "types-psycopg2-2.9.21.14.tar.gz", hash = "sha256:bf73a0ac4da4e278c89bf1b01fc596d5a5ac7a356cfe6ac0249f47b9e259f868"}, + {file = "types_psycopg2-2.9.21.14-py3-none-any.whl", hash = "sha256:cd9c5350631f3bc6184ec8d48f2ed31d4ea660f89d0fffe78239450782f383c5"}, ] [[package]] From 2915e765831209dd60c4e214a0646ab4dcc4174a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 16:57:28 -0300 Subject: [PATCH 059/262] Bump types-pyyaml from 6.0.12.11 to 6.0.12.12 (#1409) Bumps [types-pyyaml](https://github.com/python/typeshed) from 6.0.12.11 to 6.0.12.12. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pyyaml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index b3832ad325..e675129019 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4145,13 +4145,13 @@ files = [ [[package]] name = "types-pyyaml" -version = "6.0.12.11" +version = "6.0.12.12" description = "Typing stubs for PyYAML" optional = false python-versions = "*" files = [ - {file = "types-PyYAML-6.0.12.11.tar.gz", hash = "sha256:7d340b19ca28cddfdba438ee638cd4084bde213e501a3978738543e27094775b"}, - {file = "types_PyYAML-6.0.12.11-py3-none-any.whl", hash = "sha256:a461508f3096d1d5810ec5ab95d7eeecb651f3a15b71959999988942063bf01d"}, + {file = "types-PyYAML-6.0.12.12.tar.gz", hash = "sha256:334373d392fde0fdf95af5c3f1661885fa10c52167b14593eb856289e1855062"}, + {file = "types_PyYAML-6.0.12.12-py3-none-any.whl", hash = "sha256:c05bc6c158facb0676674b7f11fe3960db4f389718e19e62bd2b84d6205cfd24"}, ] [[package]] From 047239bfe649cc58d53f7ddbe12dcf26ee0a898e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 16:57:51 -0300 Subject: [PATCH 060/262] Bump types-requests from 2.31.0.3 to 2.31.0.5 (#1410) Bumps [types-requests](https://github.com/python/typeshed) from 2.31.0.3 to 2.31.0.5. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e675129019..fa7236dfd5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4156,13 +4156,13 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.3" +version = "2.31.0.5" description = "Typing stubs for requests" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.3.tar.gz", hash = "sha256:d5d7a08965fca12bedf716eaf5430c6e3d0da9f3164a1dba2a7f3885f9ebe3c0"}, - {file = "types_requests-2.31.0.3-py3-none-any.whl", hash = "sha256:938f51653c757716aeca5d72c405c5e2befad8b0d330e3b385ce7f148e1b10dc"}, + {file = "types-requests-2.31.0.5.tar.gz", hash = "sha256:e4153c2a4e48dcc661600fa5f199b483cdcbd21965de0b5e2df26e93343c0f57"}, + {file = "types_requests-2.31.0.5-py3-none-any.whl", hash = "sha256:e2523825754b2832e04cdc1e731423390e731457890113a201ebca8ad9b40427"}, ] [package.dependencies] From 382a3c1dafea07891a200899be1ef06cc96362dd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 25 Sep 2023 16:58:15 -0300 Subject: [PATCH 061/262] Bump types-jsonschema from 4.17.0.10 to 4.19.0.0 (#1407) Bumps [types-jsonschema](https://github.com/python/typeshed) from 4.17.0.10 to 4.19.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-jsonschema dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 195 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 192 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index fa7236dfd5..e1f6ce7a47 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2382,6 +2382,89 @@ files = [ {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, ] +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "multipledispatch" version = "0.6.0" @@ -3633,6 +3716,22 @@ docs = ["pydata-sphinx-theme", "sphinx (>=1.7.5)", "sphinx-book-theme", "sphinx- style = ["css-inline"] test = ["Pillow", "css-inline", "jinja2", "matplotlib", "openpyxl", "pandas", "pytest"] +[[package]] +name = "referencing" +version = "0.8.11" +description = "JSON Referencing + Python" +optional = false +python-versions = ">=3.8" +files = [ + {file = "referencing-0.8.11-py3-none-any.whl", hash = "sha256:f7edae1893624cd85f3370920ee1f863440ec9c349daba5da197a2fc713e98ec"}, + {file = "referencing-0.8.11.tar.gz", hash = "sha256:7cca400800f8bde596ad88159c25407fed907da2b1254402ff2c7a8e61eb229e"}, +] + +[package.dependencies] +attrs = "*" +pyrsistent = "*" +yarl = "*" + [[package]] name = "regex" version = "2022.4.24" @@ -4076,15 +4175,18 @@ Flask = ">=2.0.0" [[package]] name = "types-jsonschema" -version = "4.17.0.10" +version = "4.19.0.0" description = "Typing stubs for jsonschema" optional = false python-versions = "*" files = [ - {file = "types-jsonschema-4.17.0.10.tar.gz", hash = "sha256:8e979db34d69bc9f9b3d6e8b89bdbc60b3a41cfce4e1fb87bf191d205c7f5098"}, - {file = "types_jsonschema-4.17.0.10-py3-none-any.whl", hash = "sha256:3aa2a89afbd9eaa6ce0c15618b36f02692a621433889ce73014656f7d8caf971"}, + {file = "types-jsonschema-4.19.0.0.tar.gz", hash = "sha256:55ba17305fa35c674389a5c47f98b4c314441bdeadaf8541144acdf14dc5c4b0"}, + {file = "types_jsonschema-4.19.0.0-py3-none-any.whl", hash = "sha256:c7d9553807c31a572bdbdc4e18ab1b2afa648e588aeca52fa931605b0fc1150e"}, ] +[package.dependencies] +referencing = "*" + [[package]] name = "types-pillow" version = "10.0.0.3" @@ -4424,6 +4526,93 @@ files = [ [package.dependencies] lxml = ">=3.8" +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + [[package]] name = "zipp" version = "3.11.0" From b9fe886c94563184c8234fd7fbf0b147612e686e Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 26 Sep 2023 13:55:52 +0530 Subject: [PATCH 062/262] PP-490 Fixed the lcp and adobe drm tag names in the annotators (#1405) * Fixed the lcp and adobe drm tag names in the annotators Had to make feed metadata properties strict via a dataclass since there were differences in how the drm information is consumed in the metadata vs within a link entry --- core/feed/acquisition.py | 6 ++-- core/feed/annotator/circulation.py | 8 +++-- core/feed/annotator/loan_and_hold.py | 4 +-- core/feed/navigation.py | 6 ++-- core/feed/serializer/opds.py | 27 +++++++++++++-- core/feed/serializer/opds2.py | 8 ++--- core/feed/types.py | 21 +++++++----- tests/api/feed/test_library_annotator.py | 35 ++++++++++---------- tests/api/feed/test_opds2_serializer.py | 7 ++-- tests/api/feed/test_opds_acquisition_feed.py | 8 ++--- tests/api/feed/test_opds_serializer.py | 35 +++++++++++++++----- 11 files changed, 105 insertions(+), 60 deletions(-) diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py index 12713e1daf..56ab4ed8f3 100644 --- a/core/feed/acquisition.py +++ b/core/feed/acquisition.py @@ -74,9 +74,9 @@ def __init__( def generate_feed(self, annotate: bool = True) -> None: """Generate the feed metadata and links. We assume the entries have already been annotated.""" - self._feed.add_metadata("id", text=self.url) - self._feed.add_metadata("title", text=self.title) - self._feed.add_metadata("updated", text=strftime(utc_now())) + self._feed.metadata.id = self.url + self._feed.metadata.title = self.title + self._feed.metadata.updated = strftime(utc_now()) self._feed.add_link(href=self.url, rel="self") if annotate: self.annotator.annotate_feed(self._feed) diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py index 1538f253b4..995d597914 100644 --- a/core/feed/annotator/circulation.py +++ b/core/feed/annotator/circulation.py @@ -1503,7 +1503,7 @@ def adobe_id_tags( drm_licensor = FeedEntryType.create( vendor=vendor_id, clientToken=FeedEntryType(text=token) ) - cached = {"licensor": drm_licensor} + cached = {"drm_licensor": drm_licensor} self._adobe_id_cache[cache_key] = cached else: @@ -1525,7 +1525,9 @@ def lcp_key_retrieval_tags(self, active_loan: Loan) -> Dict[str, FeedEntryType]: hashed_passphrase: LCPHashedPassphrase = ( lcp_credential_factory.get_hashed_passphrase(db, active_loan.patron) ) - response["hashed_passphrase"] = FeedEntryType(text=hashed_passphrase.hashed) + response["lcp_hashed_passphrase"] = FeedEntryType( + text=hashed_passphrase.hashed + ) except LCPError: # The patron's passphrase wasn't generated yet and not present in the database. pass @@ -1544,7 +1546,7 @@ def add_patron(self, feed: FeedData) -> None: ] = self.patron.authorization_identifier patron_tag = FeedEntryType.create(**patron_details) - feed.add_metadata("patron", patron_tag) + feed.metadata.patron = patron_tag def add_authentication_document_link(self, feed_obj: FeedData) -> None: """Create a tag that points to the circulation diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py index efdf42977a..8880326e2c 100644 --- a/core/feed/annotator/loan_and_hold.py +++ b/core/feed/annotator/loan_and_hold.py @@ -87,8 +87,8 @@ def annotate_feed(self, feed: FeedData) -> None: link = self.user_profile_management_protocol_link if link.href is not None: feed.add_link(link.href, rel=link.rel) - for name, value in tags.items(): - feed.add_metadata(name, feed_entry=value) + if "drm_licensor" in tags: + feed.metadata.drm_licensor = tags["drm_licensor"] def annotate_work_entry( self, entry: WorkEntry, updated: Optional[datetime] = None diff --git a/core/feed/navigation.py b/core/feed/navigation.py index 8c4031cc01..70abc7e218 100644 --- a/core/feed/navigation.py +++ b/core/feed/navigation.py @@ -51,9 +51,9 @@ def navigation( return feed def generate_feed(self) -> None: - self._feed.add_metadata("title", text=self.title) - self._feed.add_metadata("id", text=self.url) - self._feed.add_metadata("updated", text=strftime(utc_now())) + self._feed.metadata.title = self.title + self._feed.metadata.id = self.url + self._feed.metadata.updated = strftime(utc_now()) self._feed.add_link(href=self.url, rel="self") if not self.lane.children: # We can't generate links to children, since this Worklist diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index db4c67c7dd..8a1f25146b 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -13,6 +13,7 @@ DataEntry, FeedData, FeedEntryType, + FeedMetadata, IndirectAcquisition, WorkEntryData, ) @@ -78,9 +79,7 @@ def serialize_feed( if feed.entrypoint: serialized.set(f"{{{OPDSFeed.SIMPLIFIED_NS}}}entrypoint", feed.entrypoint) - for name, metadata in feed.metadata.items(): - element = self._serialize_feed_entry(name, metadata) - serialized.append(element) + serialized.extend(self._serialize_feed_metadata(feed.metadata)) for entry in feed.entries: if entry.computed: @@ -113,6 +112,28 @@ def serialize_feed( etree.indent(serialized) return self.to_string(serialized) + def _serialize_feed_metadata(self, metadata: FeedMetadata) -> List[etree._Element]: + tags = [] + # Compulsory title + tags.append(self._tag("title", metadata.title or "")) + + if metadata.id: + tags.append(self._tag("id", metadata.id)) + if metadata.updated: + tags.append(self._tag("updated", metadata.updated)) + if metadata.patron: + tags.append(self._serialize_feed_entry("patron", metadata.patron)) + if metadata.drm_licensor: + tags.append(self._serialize_feed_entry("licensor", metadata.drm_licensor)) + if metadata.lcp_hashed_passphrase: + tags.append( + self._serialize_feed_entry( + "hashed_passphrase", metadata.lcp_hashed_passphrase + ) + ) + + return tags + def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: entry: etree._Element = OPDSFeed.entry() diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index 26f59a3275..74597da7e1 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -54,10 +54,10 @@ def serialize_feed( def _serialize_metadata(self, feed: FeedData) -> Dict[str, Any]: fmeta = feed.metadata metadata: Dict[str, Any] = {} - if title := fmeta.get("title"): - metadata["title"] = title.text - if item_count := fmeta.get("items_per_page"): - metadata["itemsPerPage"] = int(item_count.text or 0) + if fmeta.title: + metadata["title"] = fmeta.title + if fmeta.items_per_page is not None: + metadata["itemsPerPage"] = fmeta.items_per_page return metadata def serialize_opds_message(self, entry: OPDSMessage) -> Dict[str, Any]: diff --git a/core/feed/types.py b/core/feed/types.py index 1b6d1d4000..f81b70fb36 100644 --- a/core/feed/types.py +++ b/core/feed/types.py @@ -201,6 +201,17 @@ def __init__( self.license_pool = license_pool +@dataclass +class FeedMetadata(BaseModel): + title: Optional[str] = None + id: Optional[str] = None + updated: Optional[str] = None + items_per_page: Optional[int] = None + patron: Optional[FeedEntryType] = None + drm_licensor: Optional[FeedEntryType] = None + lcp_hashed_passphrase: Optional[FeedEntryType] = None + + class DataEntryTypes: NAVIGATION = "navigation" @@ -222,7 +233,7 @@ class FeedData(BaseModel): facet_links: List[Link] = field(default_factory=list) entries: List[WorkEntry] = field(default_factory=list) data_entries: List[DataEntry] = field(default_factory=list) - metadata: Dict[str, FeedEntryType] = field(default_factory=dict) + metadata: FeedMetadata = field(default_factory=lambda: FeedMetadata()) entrypoint: Optional[str] = None class Config: @@ -230,11 +241,3 @@ class Config: def add_link(self, href: str, **kwargs: Any) -> None: self.links.append(Link(href=href, **kwargs)) - - def add_metadata( - self, name: str, feed_entry: Optional[FeedEntryType] = None, **kwargs: Any - ) -> None: - if not feed_entry: - self.metadata[name] = FeedEntryType(**kwargs) - else: - self.metadata[name] = feed_entry diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 7a304fd663..2670225ac2 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -262,8 +262,7 @@ def test_fulfill_link_includes_device_registration_tags( pool, loan, other_delivery_mechanism ) assert link is not None - for name, child in link: - assert name != "licensor" + assert link.drm_licensor is None # No new Credential has been associated with the patron. assert old_credentials == patron.credentials @@ -273,8 +272,8 @@ def test_fulfill_link_includes_device_registration_tags( link = annotator_fixture.annotator.fulfill_link( pool, loan, adobe_delivery_mechanism ) - licensor = getattr(link, "licensor", None) - assert None != licensor + assert link is not None + assert link.drm_licensor is not None # An Adobe ID-specific identifier has been created for the patron. [adobe_id_identifier] = [ @@ -293,7 +292,8 @@ def test_fulfill_link_includes_device_registration_tags( expect = annotator_fixture.annotator.adobe_id_tags( adobe_id_identifier.credential ) - assert expect.get("licensor") == licensor + assert link is not None + assert expect.get("drm_licensor") == link.drm_licensor def test_no_adobe_id_tags_when_vendor_id_not_configured( self, annotator_fixture: LibraryAnnotatorFixture @@ -318,12 +318,12 @@ def test_adobe_id_tags_when_vendor_id_configured( patron_identifier = "patron identifier" element = annotator_fixture.annotator.adobe_id_tags(patron_identifier) - assert "licensor" in element + assert "drm_licensor" in element assert vendor_id_fixture.TEST_VENDOR_ID == getattr( - element["licensor"], "vendor", None + element["drm_licensor"], "vendor", None ) - token = getattr(element["licensor"], "clientToken", None) + token = getattr(element["drm_licensor"], "clientToken", None) assert token is not None # token.text is a token which we can decode, since we know # the secret. @@ -338,7 +338,7 @@ def test_adobe_id_tags_when_vendor_id_configured( # object that renders to the same data. same_tag = annotator_fixture.annotator.adobe_id_tags(patron_identifier) assert same_tag is not element - assert same_tag["licensor"].dict() == element["licensor"].dict() + assert same_tag["drm_licensor"].dict() == element["drm_licensor"].dict() # If the Adobe Vendor ID configuration is present but # incomplete, adobe_id_tags does nothing. @@ -375,15 +375,16 @@ def test_lcp_acquisition_link_contains_hashed_passphrase( link = annotator_fixture.annotator.fulfill_link( pool, loan, other_delivery_mechanism ) - assert not hasattr(link, "hashed_passphrase") + assert link is not None + assert link.lcp_hashed_passphrase is None # The fulfill link for lcp DRM includes hashed_passphrase link = annotator_fixture.annotator.fulfill_link( pool, loan, lcp_delivery_mechanism ) - hashed_passphrase = getattr(link, "hashed_passphrase", None) - assert hashed_passphrase is not None - assert hashed_passphrase.text == hashed_password.hashed + assert link is not None + assert link.lcp_hashed_passphrase is not None + assert link.lcp_hashed_passphrase.text == hashed_password.hashed def test_default_lane_url(self, annotator_fixture: LibraryAnnotatorFixture): default_lane_url = annotator_fixture.annotator.default_lane_url() @@ -1411,14 +1412,14 @@ def test_drm_device_registration_feed_tags( # The feed-level tag has the drm:scheme attribute set. assert ( "http://librarysimplified.org/terms/drm/scheme/ACS" - == feed_tag["licensor"].scheme + == feed_tag["drm_licensor"].scheme ) # If we remove that attribute, the feed-level tag is the same as the # generic tag. - assert feed_tag["licensor"].dict() != generic_tag["licensor"].dict() - delattr(feed_tag["licensor"], "scheme") - assert feed_tag["licensor"].dict() == generic_tag["licensor"].dict() + assert feed_tag["drm_licensor"].dict() != generic_tag["drm_licensor"].dict() + delattr(feed_tag["drm_licensor"], "scheme") + assert feed_tag["drm_licensor"].dict() == generic_tag["drm_licensor"].dict() def test_borrow_link_raises_unfulfillable_work( self, annotator_fixture: LibraryAnnotatorFixture diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py index 2b2bfcdf68..f18dbcd958 100644 --- a/tests/api/feed/test_opds2_serializer.py +++ b/tests/api/feed/test_opds2_serializer.py @@ -6,6 +6,7 @@ Author, FeedData, FeedEntryType, + FeedMetadata, IndirectAcquisition, Link, WorkEntry, @@ -20,9 +21,9 @@ class TestOPDS2Serializer: def test_serialize_feed(self): feed = FeedData( - metadata=dict( - items_per_page=FeedEntryType(text="20"), - title=FeedEntryType(text="Title"), + metadata=FeedMetadata( + title="Title", + items_per_page=20, ) ) w = WorkEntry( diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index 035e799a33..a4b794679e 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -1402,11 +1402,11 @@ def test_navigation_with_sublanes( feed = response._feed - assert "Navigation" == feed.metadata["title"].text + assert "Navigation" == feed.metadata.title [self_link] = feed.links assert "http://navigation" == self_link.href assert "self" == self_link.rel - assert "http://navigation" == feed.metadata["id"].text + assert "http://navigation" == feed.metadata.id [fantasy, romance] = sorted(feed.data_entries, key=lambda x: x.title or "") assert data.fantasy.display_name == fantasy.title @@ -1436,11 +1436,11 @@ def test_navigation_without_sublanes( session, "Navigation", "http://navigation", data.fantasy, MockAnnotator() ) parsed = feed._feed - assert "Navigation" == parsed.metadata["title"].text + assert "Navigation" == parsed.metadata.title [self_link] = parsed.links assert "http://navigation" == self_link.href assert "self" == self_link.rel - assert "http://navigation" == parsed.metadata["id"].text + assert "http://navigation" == parsed.metadata.id [fantasy] = parsed.data_entries assert "All " + data.fantasy.display_name == fantasy.title diff --git a/tests/api/feed/test_opds_serializer.py b/tests/api/feed/test_opds_serializer.py index afe28b71c5..da0254a6f0 100644 --- a/tests/api/feed/test_opds_serializer.py +++ b/tests/api/feed/test_opds_serializer.py @@ -96,20 +96,37 @@ def test__serialize_acquistion_link(self): copies_total="1", availability_status="available", indirect_acquisitions=[IndirectAcquisition(type="indirect")], + lcp_hashed_passphrase=FeedEntryType(text="passphrase"), + drm_licensor=FeedEntryType.create( + vendor="vendor", clientToken=FeedEntryType(text="token") + ), ) element = OPDS1Serializer()._serialize_acquistion_link(link) assert element.tag == "link" assert dict(element.attrib) == dict(href=link.href) - for child in element: - if child.tag == f"{{{OPDSFeed.OPDS_NS}}}indirectAcquisition": - assert child.get("type") == "indirect" - elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}holds": - assert child.get("total") == "0" - elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}copies": - assert child.get("total") == "1" - elif child.tag == f"{{{OPDSFeed.OPDS_NS}}}availability": - assert child.get("status") == "available" + tests = [ + ( + f"{{{OPDSFeed.OPDS_NS}}}indirectAcquisition", + lambda child: child.get("type") == "indirect", + ), + (f"{{{OPDSFeed.OPDS_NS}}}holds", lambda child: child.get("total") == "0"), + (f"{{{OPDSFeed.OPDS_NS}}}copies", lambda child: child.get("total") == "1"), + ( + f"{{{OPDSFeed.OPDS_NS}}}availability", + lambda child: child.get("status") == "available", + ), + ("hashed_passphrase", lambda child: child.text == "passphrase"), + ( + f"{{{OPDSFeed.DRM_NS}}}licensor", + lambda child: child.get(f"{{{OPDSFeed.DRM_NS}}}vendor") == "vendor" + and child[0].text == "token", + ), + ] + for tag, test_fn in tests: + children = element.findall(tag) + assert len(children) == 1 + assert test_fn(children[0]) def test_serialize_work_entry(self): data = WorkEntryData( From ed4dc5e5dd277007986c70adeb030d6622369a92 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 26 Sep 2023 15:37:55 +0530 Subject: [PATCH 063/262] Switched the _cluster API body to the AWS supported flat format (#1411) --- core/search/service.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/core/search/service.py b/core/search/service.py index aad21dc2e8..8efb159a51 100644 --- a/core/search/service.py +++ b/core/search/service.py @@ -168,8 +168,9 @@ def __init__(self, client: OpenSearch, base_revision_name: str): self._indexes_created: List[str] = [] # Documents are not allowed to automatically create indexes. + # AWS OpenSearch only accepts the "flat" format self._client.cluster.put_settings( - body={"persistent": {"action": {"auto_create_index": "false"}}} + body={"persistent": {"action.auto_create_index": "false"}} ) def indexes_created(self) -> List[str]: From 3ec27e4abe553b957ab64b00dff58ea0c318a750 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Sep 2023 21:28:44 -0400 Subject: [PATCH 064/262] Bump pydantic from 1.10.12 to 1.10.13 (#1416) Bumps [pydantic](https://github.com/pydantic/pydantic) from 1.10.12 to 1.10.13. - [Release notes](https://github.com/pydantic/pydantic/releases) - [Changelog](https://github.com/pydantic/pydantic/blob/main/HISTORY.md) - [Commits](https://github.com/pydantic/pydantic/compare/v1.10.12...v1.10.13) --- updated-dependencies: - dependency-name: pydantic dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 74 ++++++++++++++++++++++++++--------------------------- 1 file changed, 37 insertions(+), 37 deletions(-) diff --git a/poetry.lock b/poetry.lock index e1f6ce7a47..771738f58e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3076,47 +3076,47 @@ files = [ [[package]] name = "pydantic" -version = "1.10.12" +version = "1.10.13" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.12-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a1fcb59f2f355ec350073af41d927bf83a63b50e640f4dbaa01053a28b7a7718"}, - {file = "pydantic-1.10.12-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b7ccf02d7eb340b216ec33e53a3a629856afe1c6e0ef91d84a4e6f2fb2ca70fe"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8fb2aa3ab3728d950bcc885a2e9eff6c8fc40bc0b7bb434e555c215491bcf48b"}, - {file = "pydantic-1.10.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:771735dc43cf8383959dc9b90aa281f0b6092321ca98677c5fb6125a6f56d58d"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ca48477862372ac3770969b9d75f1bf66131d386dba79506c46d75e6b48c1e09"}, - {file = "pydantic-1.10.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a5e7add47a5b5a40c49b3036d464e3c7802f8ae0d1e66035ea16aa5b7a3923ed"}, - {file = "pydantic-1.10.12-cp310-cp310-win_amd64.whl", hash = "sha256:e4129b528c6baa99a429f97ce733fff478ec955513630e61b49804b6cf9b224a"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0d191db0f92dfcb1dec210ca244fdae5cbe918c6050b342d619c09d31eea0cc"}, - {file = "pydantic-1.10.12-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:795e34e6cc065f8f498c89b894a3c6da294a936ee71e644e4bd44de048af1405"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69328e15cfda2c392da4e713443c7dbffa1505bc9d566e71e55abe14c97ddc62"}, - {file = "pydantic-1.10.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2031de0967c279df0d8a1c72b4ffc411ecd06bac607a212892757db7462fc494"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ba5b2e6fe6ca2b7e013398bc7d7b170e21cce322d266ffcd57cca313e54fb246"}, - {file = "pydantic-1.10.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2a7bac939fa326db1ab741c9d7f44c565a1d1e80908b3797f7f81a4f86bc8d33"}, - {file = "pydantic-1.10.12-cp311-cp311-win_amd64.whl", hash = "sha256:87afda5539d5140cb8ba9e8b8c8865cb5b1463924d38490d73d3ccfd80896b3f"}, - {file = "pydantic-1.10.12-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:549a8e3d81df0a85226963611950b12d2d334f214436a19537b2efed61b7639a"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598da88dfa127b666852bef6d0d796573a8cf5009ffd62104094a4fe39599565"}, - {file = "pydantic-1.10.12-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ba5c4a8552bff16c61882db58544116d021d0b31ee7c66958d14cf386a5b5350"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c79e6a11a07da7374f46970410b41d5e266f7f38f6a17a9c4823db80dadf4303"}, - {file = "pydantic-1.10.12-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab26038b8375581dc832a63c948f261ae0aa21f1d34c1293469f135fa92972a5"}, - {file = "pydantic-1.10.12-cp37-cp37m-win_amd64.whl", hash = "sha256:e0a16d274b588767602b7646fa05af2782576a6cf1022f4ba74cbb4db66f6ca8"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6a9dfa722316f4acf4460afdf5d41d5246a80e249c7ff475c43a3a1e9d75cf62"}, - {file = "pydantic-1.10.12-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a73f489aebd0c2121ed974054cb2759af8a9f747de120acd2c3394cf84176ccb"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b30bcb8cbfccfcf02acb8f1a261143fab622831d9c0989707e0e659f77a18e0"}, - {file = "pydantic-1.10.12-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2fcfb5296d7877af406ba1547dfde9943b1256d8928732267e2653c26938cd9c"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2f9a6fab5f82ada41d56b0602606a5506aab165ca54e52bc4545028382ef1c5d"}, - {file = "pydantic-1.10.12-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:dea7adcc33d5d105896401a1f37d56b47d443a2b2605ff8a969a0ed5543f7e33"}, - {file = "pydantic-1.10.12-cp38-cp38-win_amd64.whl", hash = "sha256:1eb2085c13bce1612da8537b2d90f549c8cbb05c67e8f22854e201bde5d98a47"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef6c96b2baa2100ec91a4b428f80d8f28a3c9e53568219b6c298c1125572ebc6"}, - {file = "pydantic-1.10.12-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c076be61cd0177a8433c0adcb03475baf4ee91edf5a4e550161ad57fc90f523"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d5a58feb9a39f481eda4d5ca220aa8b9d4f21a41274760b9bc66bfd72595b86"}, - {file = "pydantic-1.10.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e5f805d2d5d0a41633651a73fa4ecdd0b3d7a49de4ec3fadf062fe16501ddbf1"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1289c180abd4bd4555bb927c42ee42abc3aee02b0fb2d1223fb7c6e5bef87dbe"}, - {file = "pydantic-1.10.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5d1197e462e0364906cbc19681605cb7c036f2475c899b6f296104ad42b9f5fb"}, - {file = "pydantic-1.10.12-cp39-cp39-win_amd64.whl", hash = "sha256:fdbdd1d630195689f325c9ef1a12900524dceb503b00a987663ff4f58669b93d"}, - {file = "pydantic-1.10.12-py3-none-any.whl", hash = "sha256:b749a43aa51e32839c9d71dc67eb1e4221bb04af1033a32e3923d46f9effa942"}, - {file = "pydantic-1.10.12.tar.gz", hash = "sha256:0fe8a415cea8f340e7a9af9c54fc71a649b43e8ca3cc732986116b3cb135d303"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:efff03cc7a4f29d9009d1c96ceb1e7a70a65cfe86e89d34e4a5f2ab1e5693737"}, + {file = "pydantic-1.10.13-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ecea2b9d80e5333303eeb77e180b90e95eea8f765d08c3d278cd56b00345d01"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1740068fd8e2ef6eb27a20e5651df000978edce6da6803c2bef0bc74540f9548"}, + {file = "pydantic-1.10.13-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84bafe2e60b5e78bc64a2941b4c071a4b7404c5c907f5f5a99b0139781e69ed8"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:bc0898c12f8e9c97f6cd44c0ed70d55749eaf783716896960b4ecce2edfd2d69"}, + {file = "pydantic-1.10.13-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:654db58ae399fe6434e55325a2c3e959836bd17a6f6a0b6ca8107ea0571d2e17"}, + {file = "pydantic-1.10.13-cp310-cp310-win_amd64.whl", hash = "sha256:75ac15385a3534d887a99c713aa3da88a30fbd6204a5cd0dc4dab3d770b9bd2f"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c553f6a156deb868ba38a23cf0df886c63492e9257f60a79c0fd8e7173537653"}, + {file = "pydantic-1.10.13-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5e08865bc6464df8c7d61439ef4439829e3ab62ab1669cddea8dd00cd74b9ffe"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31647d85a2013d926ce60b84f9dd5300d44535a9941fe825dc349ae1f760df9"}, + {file = "pydantic-1.10.13-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:210ce042e8f6f7c01168b2d84d4c9eb2b009fe7bf572c2266e235edf14bacd80"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ae5dd6b721459bfa30805f4c25880e0dd78fc5b5879f9f7a692196ddcb5a580"}, + {file = "pydantic-1.10.13-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f8e81fc5fb17dae698f52bdd1c4f18b6ca674d7068242b2aff075f588301bbb0"}, + {file = "pydantic-1.10.13-cp311-cp311-win_amd64.whl", hash = "sha256:61d9dce220447fb74f45e73d7ff3b530e25db30192ad8d425166d43c5deb6df0"}, + {file = "pydantic-1.10.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4b03e42ec20286f052490423682016fd80fda830d8e4119f8ab13ec7464c0132"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f59ef915cac80275245824e9d771ee939133be38215555e9dc90c6cb148aaeb5"}, + {file = "pydantic-1.10.13-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a1f9f747851338933942db7af7b6ee8268568ef2ed86c4185c6ef4402e80ba8"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:97cce3ae7341f7620a0ba5ef6cf043975cd9d2b81f3aa5f4ea37928269bc1b87"}, + {file = "pydantic-1.10.13-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854223752ba81e3abf663d685f105c64150873cc6f5d0c01d3e3220bcff7d36f"}, + {file = "pydantic-1.10.13-cp37-cp37m-win_amd64.whl", hash = "sha256:b97c1fac8c49be29486df85968682b0afa77e1b809aff74b83081cc115e52f33"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c958d053453a1c4b1c2062b05cd42d9d5c8eb67537b8d5a7e3c3032943ecd261"}, + {file = "pydantic-1.10.13-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4c5370a7edaac06daee3af1c8b1192e305bc102abcbf2a92374b5bc793818599"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d6f6e7305244bddb4414ba7094ce910560c907bdfa3501e9db1a7fd7eaea127"}, + {file = "pydantic-1.10.13-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3a3c792a58e1622667a2837512099eac62490cdfd63bd407993aaf200a4cf1f"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c636925f38b8db208e09d344c7aa4f29a86bb9947495dd6b6d376ad10334fb78"}, + {file = "pydantic-1.10.13-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:678bcf5591b63cc917100dc50ab6caebe597ac67e8c9ccb75e698f66038ea953"}, + {file = "pydantic-1.10.13-cp38-cp38-win_amd64.whl", hash = "sha256:6cf25c1a65c27923a17b3da28a0bdb99f62ee04230c931d83e888012851f4e7f"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8ef467901d7a41fa0ca6db9ae3ec0021e3f657ce2c208e98cd511f3161c762c6"}, + {file = "pydantic-1.10.13-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:968ac42970f57b8344ee08837b62f6ee6f53c33f603547a55571c954a4225691"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9849f031cf8a2f0a928fe885e5a04b08006d6d41876b8bbd2fc68a18f9f2e3fd"}, + {file = "pydantic-1.10.13-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56e3ff861c3b9c6857579de282ce8baabf443f42ffba355bf070770ed63e11e1"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f00790179497767aae6bcdc36355792c79e7bbb20b145ff449700eb076c5f96"}, + {file = "pydantic-1.10.13-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:75b297827b59bc229cac1a23a2f7a4ac0031068e5be0ce385be1462e7e17a35d"}, + {file = "pydantic-1.10.13-cp39-cp39-win_amd64.whl", hash = "sha256:e70ca129d2053fb8b728ee7d1af8e553a928d7e301a311094b8a0501adc8763d"}, + {file = "pydantic-1.10.13-py3-none-any.whl", hash = "sha256:b87326822e71bd5f313e7d3bfdc77ac3247035ac10b0c0618bd99dcf95b1e687"}, + {file = "pydantic-1.10.13.tar.gz", hash = "sha256:32c8b48dcd3b2ac4e78b0ba4af3a2c2eb6048cb75202f0ea7b34feb740efc340"}, ] [package.dependencies] From 2693b1ff9d82da857d4a33beb1620644442d8933 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Sep 2023 01:34:30 +0000 Subject: [PATCH 065/262] Bump types-jsonschema from 4.19.0.0 to 4.19.0.3 (#1412) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 771738f58e..2953ffb627 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4175,13 +4175,13 @@ Flask = ">=2.0.0" [[package]] name = "types-jsonschema" -version = "4.19.0.0" +version = "4.19.0.3" description = "Typing stubs for jsonschema" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.19.0.0.tar.gz", hash = "sha256:55ba17305fa35c674389a5c47f98b4c314441bdeadaf8541144acdf14dc5c4b0"}, - {file = "types_jsonschema-4.19.0.0-py3-none-any.whl", hash = "sha256:c7d9553807c31a572bdbdc4e18ab1b2afa648e588aeca52fa931605b0fc1150e"}, + {file = "types-jsonschema-4.19.0.3.tar.gz", hash = "sha256:e0fc0f5d51fd0988bf193be42174a5376b0096820ff79505d9c1b66de23f0581"}, + {file = "types_jsonschema-4.19.0.3-py3-none-any.whl", hash = "sha256:5cedbb661e5ca88d95b94b79902423e3f97a389c245e5fe0ab384122f27d56b9"}, ] [package.dependencies] From ccca3d11fc459f2fac8b63357e50e147fd9c5f6b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Sep 2023 21:35:22 -0400 Subject: [PATCH 066/262] Bump types-requests from 2.31.0.5 to 2.31.0.6 (#1414) Bumps [types-requests](https://github.com/python/typeshed) from 2.31.0.5 to 2.31.0.6. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2953ffb627..d2800371cf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4258,13 +4258,13 @@ files = [ [[package]] name = "types-requests" -version = "2.31.0.5" +version = "2.31.0.6" description = "Typing stubs for requests" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-requests-2.31.0.5.tar.gz", hash = "sha256:e4153c2a4e48dcc661600fa5f199b483cdcbd21965de0b5e2df26e93343c0f57"}, - {file = "types_requests-2.31.0.5-py3-none-any.whl", hash = "sha256:e2523825754b2832e04cdc1e731423390e731457890113a201ebca8ad9b40427"}, + {file = "types-requests-2.31.0.6.tar.gz", hash = "sha256:cd74ce3b53c461f1228a9b783929ac73a666658f223e28ed29753771477b3bd0"}, + {file = "types_requests-2.31.0.6-py3-none-any.whl", hash = "sha256:a2db9cb228a81da8348b49ad6db3f5519452dd20a9c1e1a868c83c5fe88fd1a9"}, ] [package.dependencies] From 330eaf29a7267d619925faa20ab9f5fada95973a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Sep 2023 01:48:13 +0000 Subject: [PATCH 067/262] Bump levenshtein from 0.21.1 to 0.22.0 (#1413) --- poetry.lock | 216 +++++++++++++++++++++++++------------------------ pyproject.toml | 2 +- 2 files changed, 112 insertions(+), 106 deletions(-) diff --git a/poetry.lock b/poetry.lock index d2800371cf..1ca85b5391 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2003,113 +2003,119 @@ deprecated = "*" [[package]] name = "levenshtein" -version = "0.21.1" +version = "0.22.0" description = "Python extension for computing string edit distances and similarities." optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Levenshtein-0.21.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:59e5054c9dea821840af4623a4059c8f0ae56548a5eae8b9c7aaa0b3f1e33340"}, - {file = "Levenshtein-0.21.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:11694c6f7119d68cc199ff3b1407560c0efb0cc49f288169f28b2e032ee03cda"}, - {file = "Levenshtein-0.21.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f5f7ce639bea0f5e95a1f71963624b85521a39928a2a1bb0e66f6180facf5969"}, - {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39e8a1866325b6d54de4e7d1bffffaf4b4c8cbf0988f47f0f2e929edfbeb870d"}, - {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed73d619e203aad54e2e6119a2b58b7568a36bd50a547817d13618ea0acf4412"}, - {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50fbe01be99554f644657c32a9e3085369d23e8ccc540d855c683947d3b48b67"}, - {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675ba3afaa9e8ec393eb1eeee651697036e8391be54e6c28eae4bfdff4d5e64e"}, - {file = "Levenshtein-0.21.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c89a5ac319a80c131ca8d499ae0f7a91d4dd1dc3b2e9d8b095e991597b79c8f9"}, - {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f9e3a5f4386c8f1811153f309a0ba3dc47d17e81a6dd29aa22d3e10212a2fd73"}, - {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ea042ba262ea2a95d93c4d2d5879df956cf6c85ce22c037e3f0d4491182f10c5"}, - {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:622bc670b906c4bf219755625e9fa704ff07c561a90f1aa35f3f2d8ecd3ec088"}, - {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:f0e51ff6d5665884b0e39b4ae0ef4e2d2d0174147147db7a870ddc4123882212"}, - {file = "Levenshtein-0.21.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cc8eb12c48598b20b4b99128bc2bd62974dfb65204ceb37807480788b1e66e64"}, - {file = "Levenshtein-0.21.1-cp310-cp310-win32.whl", hash = "sha256:04d338c9153ddf70a32f324cf9f902fe94a6da82122b8037ccde969d4cc0a94b"}, - {file = "Levenshtein-0.21.1-cp310-cp310-win_amd64.whl", hash = "sha256:5a10fc3be2bfb05b03b868d462941e4099b680b7f358a90b8c6d7d5946e9e97c"}, - {file = "Levenshtein-0.21.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:938581ba87b306675bc41e21c2b2822a9eb83fb1a0e4a4903b7398d7845b22e3"}, - {file = "Levenshtein-0.21.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06da6c47aa459c725ee90dab467cd2f66956c5f9a43ddb51a0fe2496960f1d3e"}, - {file = "Levenshtein-0.21.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eea308d98c64dbea48ac351011c4adf66acd936c4de2bf9955826ba8435197e2"}, - {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a51974fcb8a94284325cb88b474b76227532a25b035938a46167bebd1646718e"}, - {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87edb05fc6e4eb14008433f02e89815a756fe4ecc32d7180bb757f26e4161e06"}, - {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:aee4f570652ad77961e5ab871d11fd42752e7d2117b08324a0c8801a7ee0a7c5"}, - {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43a06b0b492e0d936deff751ad4757786ba7cb5eee510d53b6dfe92c924ff733"}, - {file = "Levenshtein-0.21.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:952e72f173a65f271dfee102b5571004b6594d4f199864ddead77115a2c147fd"}, - {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d3f855669e1399597f7a2670310cf20fc04a35c6c446dd70320398e9aa481b3d"}, - {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ca992783feaf1d6e25403340157fb584cf71371b094a575134393bba10b974fa"}, - {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:20361f42f6e7efa5853f69a41a272e9ecb90da284bec4312e42b58fa42b9a752"}, - {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:9bcb3abbe97975cc6a97baf24a3b6e0491472ecedbc0247a41eb2c8d73ecde5d"}, - {file = "Levenshtein-0.21.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:72b0b84adc52f4cf970a1bb276e76e115b30d693d6dbcd25fca0bcee85ca7cc7"}, - {file = "Levenshtein-0.21.1-cp311-cp311-win32.whl", hash = "sha256:4217ae380f42f825862eb8e2f9beca627fe9ab613f36e206842c003bb1affafc"}, - {file = "Levenshtein-0.21.1-cp311-cp311-win_amd64.whl", hash = "sha256:12bb3540e021c73c5d8796ecf8148afd441c4471731924a112bc31bc25abeabf"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a0fa251b3b4c561d2f650d9a61fb8980815492bb088a0a521236995a1872e171"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d4bf11b89d8d7a7707ae5cac1ef86ac4ff78491482df037289470db8f0378043"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91dca7085aa358da71fa50682fc8ff7e21365c99ef17dc1962a7bbf488003528"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f187f0929a35b6ddabc1324161e8c73ddbd4a7747249f10ec9ceaa793e904f"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d997da10fdf1a82e208fd1b05aba40705ca3f053919c84d2e952141d33e3ab3"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ed8f99e4e4ba8a43bb4fe0255606724f22069405fa1e3be679a2d90f74770e5"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:5acb7e84ccd619dcff6e04928fa8d8cc24f55bb2c9cdfe96620ed85b0a82a7c7"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:62dca15301bdba4ec7fcf53c39dd8d9c198194990cf035def3f47b7cb9c3213e"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:832951ad7b5ee0df8152f239a9fc602322da055264459dcf4d50d3ed68e68045"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:e8ab4d5acdd3ac17161539d9f2ea764497dc269dcd8dc722ae4a394c7b64ae7f"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:3c13450450d537ec7ede3781be72d72db37cb131943148c8ada58b34e143fc6f"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-win32.whl", hash = "sha256:267ad98befffeed90e73b8c644a297027adb81f61044843aeade7b4a44ccc7d7"}, - {file = "Levenshtein-0.21.1-cp36-cp36m-win_amd64.whl", hash = "sha256:d66d8f3ebde14840a310a557c8f69eed3e153f2477747365355d058208eea515"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:78d0fb5faef0413864c1b593e5261a840eaa47842b0fa4af7be4c09d90b24a14"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dda976c1dae2a0b41a109facc48d1d242c7acb30ab4c04d8421496da6e153aa"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1dc54aeb02f38a36f16bca6b0f9d07462686d92716424d9a4a3fdd11f3624528"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:463fd7558f25c477c7e4a59af35c661e133473f62bb02ed2c07c9c95e1c2dc66"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f00495a80c5850466f0a57ea874761f78079702e28b63a1b6573ad254f828e44"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:31aa08e8ddac402edd530aaf708ab085fea7299c499404989eabfde143377911"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9e96217a7c6a7d43071c830b1353a3ee669757ae477673f0fd3e3a97def6d410"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d83b8c0ce41e410af143bd3abef94e480d143fdb83e60a01bab9069bf565dada"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:309f134f3d42fa7df7efbbd7975f2331de8c36da3ebdb3fad59abae84268abba"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:267bc6725506571fd3c03afcc871fa5cbf3d2cb6e4bd11043790fa60cbb0f8a4"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4a6cd85ac5f7800e8127b3194fa02c59be735b6bdfe55b8516d094652235e038"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-win32.whl", hash = "sha256:13e87517ce788d71deaa73e37332a67c4085c13e58ea3a0218092d555d1872ce"}, - {file = "Levenshtein-0.21.1-cp37-cp37m-win_amd64.whl", hash = "sha256:918f2e0f590cacb30edb88e7eccbf71b340d5f080c9e69009f1f00dc24810a67"}, - {file = "Levenshtein-0.21.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d17c2ee8aa380c012b3ba015b87502934662c51b7609ef17366c76863e9551d6"}, - {file = "Levenshtein-0.21.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee847d3e49870e914074fd31c069a1aaba6f71bee650d41de48e7e4b11671bf0"}, - {file = "Levenshtein-0.21.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8d01425bd54c482ccbbc6d953633450a2bdbb7d12450d9eeba6073a6d0f06a3c"}, - {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bff4f236d1b6c556a77975812a4d51071181721f3a29c08b42e5c4aa11730957"}, - {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:35a603d952e9f286fe8053332862c8cff426f5d8a85ee962c3a0f597f4c463c4"}, - {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9546ded45fb3cf8773ade9c91de164c6cb2cb4927516289abd422a262e81906c"}, - {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79259b10f105f78853210d8769cf77ca55dac8c368dca33b4c10ffa8965e2543"}, - {file = "Levenshtein-0.21.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41e0e539638a27b5e90a5d46679375f93a1cb65cf06efe7c413cf76f71d3d467"}, - {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ccd0b89300a25decdb34d7c4efe2a971438015f552eeb416b8da12918cb3edc0"}, - {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ef365ec78938597623d4fb96c8b0db423ab484fcfc00fae44c34b738b1eb1924"}, - {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e701b9dfb121faf71b0c5757485fc49e1b511b7b8a80034aa1f580488f8f872e"}, - {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e4c2fe1f49f1d8476fe44e243569d775c5454dca70a13be568430d2d2d760ea2"}, - {file = "Levenshtein-0.21.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:40a5e38d0c3e488d1dca5dc9c2691c000764813d4006c243f2ebd39e0b331e95"}, - {file = "Levenshtein-0.21.1-cp38-cp38-win32.whl", hash = "sha256:6c08879d0cf761cd750e976fda67bcc23cf1e485eaa030942e6628b876f4c6d8"}, - {file = "Levenshtein-0.21.1-cp38-cp38-win_amd64.whl", hash = "sha256:248348e94dee05c787b44f16533a366ec5bf8ba949c604ad0db69d0c872f3539"}, - {file = "Levenshtein-0.21.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:3824e9f75ec9f373fc8b4df23eae668918953487f5ff06db282ddcb3f9c802d2"}, - {file = "Levenshtein-0.21.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2e2ed817fa682243ef2e8a2728fcd0f9352d4e5edd104db44862d0bb55c75a7e"}, - {file = "Levenshtein-0.21.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:94a6ffd7257d12c64de34bc9f801a211e2daa624ec276305f8c67963a9896efa"}, - {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6833f8cefb96b8ccac457ad421866a74f4de973e7001699fcbbbe9ccb59a5c66"}, - {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8126d2b51621483823c6e31d16bc1f5a964ae976aab4f241bbe74ed19d93770"}, - {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58eaab403b77e62e096cbcbaf61728c8736f9f7a3e36a58fb663461e5d70144f"}, - {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47e6d66fe0110fd8e6efb1939d686099170c27b3ca838eab0c215f0781f05f06"}, - {file = "Levenshtein-0.21.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f5a1f28b34a15dd2d67bcc324f6661df8cfe66d6ec7ee7a64e921af8ae4c39b7"}, - {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c37609f4e460e570810ec5176c5cdf91c494a9979638f7fef5fd345597245d17"}, - {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:656c70814280c4002af89112f1457b6ad24c42dfba58dcb2047a249ae8ccdd04"}, - {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:863d507cba67de2fa66d5501ed1bc5029363d2b393662ac7d740dd0330c66aba"}, - {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:9437c2342937decf3cf5ac79d0b9497734897c0a09dc813378c97f2916b7aa76"}, - {file = "Levenshtein-0.21.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a1cd48db3d03adb88bf71b45de77b9720f96d3b9d5ab7a32304352baec482689"}, - {file = "Levenshtein-0.21.1-cp39-cp39-win32.whl", hash = "sha256:023dffdde576639e48cab3cc835bfaf9c441df7a8e2829bf20104868db6e4f72"}, - {file = "Levenshtein-0.21.1-cp39-cp39-win_amd64.whl", hash = "sha256:dcc712696d4332962ecab6e4df40d5126d7379c6612e6058ee2e9d3f924387e3"}, - {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9a8d60084e1c9e87ae247c601e331708de09ed23219b5e39af7c8e9115ab8152"}, - {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa6762f8ef1e7dfba101babe43de6edc541cbe64d33d816314ac67cd76c3979"}, - {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eec8a1eaaeadc217c15bc77d01bb29e146acdae73a0b2e9df1ad162263c9752e"}, - {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5da0e2dbddb98da890fb779823df991ad50f184b3d986b8c68784eecbb087f01"}, - {file = "Levenshtein-0.21.1-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:edac6490f84e8a0456cb40f6729d4199311ce50ca0ea4958572e1b7ea99f546c"}, - {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b33e2cbaca6f7d01092a28711605568dbc08a3bb7b796d8986bf5d0d651a0b09"}, - {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69a430ab564d286f309c19f7abed34fce9c144f39f984c609ee690dd175cc421"}, - {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f30474b2906301594c8fb64cb7492c6978290c466a717c4b5316887a18b77af5"}, - {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9817dca597abde9fc9571d56a7eca8bd667e9dfc0867b190f1e8b43ce4fde761"}, - {file = "Levenshtein-0.21.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:7d7e00e8cb45981386df9d3f99073ba7de59bdb739069766b32906421bb1026b"}, - {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c9a072cb0f6e90092c4323cd7731eb539a79ac360045dbe3cc49a123ba381fc5"}, - {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d880a87aca186342bc2fe16b064c3ed434d2a0c170c419f23b4e00261a5340a"}, - {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f282711a220d1bdf245da508e1fefdf7680d1f7482a094e37465674a7e6985ae"}, - {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdba9f8a7a98b0c4c0bc004b811fb31a96521cd264aeb5375898478e7703de4d"}, - {file = "Levenshtein-0.21.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:b2410469cc8fd0f42aa00e63063c42f8aff501996cd5424a5c904739bdaaf4fe"}, - {file = "Levenshtein-0.21.1.tar.gz", hash = "sha256:2e4fc4522f9bf73c6ab4cedec834783999b247312ec9e3d1435a5424ad5bc908"}, + {file = "Levenshtein-0.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7b5e165e4b36eea0df530a29a8b05c88d6bca01c652b0128f603be1f117e6ea1"}, + {file = "Levenshtein-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4f7ecd6669c94c28fdfb6be1561d2615a699823494140c382d9c58fece3d75b"}, + {file = "Levenshtein-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5138c2a8a62f5219c7d29ae077d2272c4e58626480b3748f48772e87a3e7fe9b"}, + {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fbc6377cc56d9f9b40785ed73b706b09f45c2117fb91a24230ad090d2bd5d8f"}, + {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a3a2b64965f79cd5db75b3207ad637175727fb188acee96a2c25989cb79eddc"}, + {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cef3132c6bd74e37706330206a87f7c165a2a5a67048bad986877fd83e13a44"}, + {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61af529827edb59610aaccf053508228e7205a07abbf9108fe25957c66c879b3"}, + {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acc4c9587d94053cbd314eb3d3372aa7c42282fced037c7ae597be8400b22e74"}, + {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:161815d2496221a361539122413d61b054e8881646a06129cc7328f65bffad8b"}, + {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b52823b4908cc7f4b3202242d6d632a3b021c01301906e08069071e939136be"}, + {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:76e216fcad971a5770a18a7cd97a4b0838974bdd54f073ebd9c3425a2efb7410"}, + {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0a11365daa4db76b538c0f48a63b1ae1cbc37e178bc81e3af818bf848bd345f7"}, + {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0d2c8208bc55e81f6192872c4bdb556fcbbd911a1107417c11ac9283648a356f"}, + {file = "Levenshtein-0.22.0-cp310-cp310-win32.whl", hash = "sha256:e49a4d8b9bbeceaf2618409ce0ba6cd83535b2ce8cf9144d5cb913728f17fffc"}, + {file = "Levenshtein-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:0a78fc02072e04b12711a1f01ed703cbffa852e1ff92edf9bf05d43e6044028b"}, + {file = "Levenshtein-0.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:8c9ea26ab65d4c35220801c73d59e181081db63b854de78b5645295c19880786"}, + {file = "Levenshtein-0.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:658b4085252d044390bf3e26eb52d0f8c4cc1bff7250711458d83ed3043b2a97"}, + {file = "Levenshtein-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:500aee88248bbe8bb6e33f60fff7d8fa2e0fa40c36589fe5382f5678770c0f90"}, + {file = "Levenshtein-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f61afd1b9c741d4c19d37473c045a581fc155f3c8f357f98c7c8caf306f3ad21"}, + {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5951b855d5111d27d6b330b5c31c882df030b86769899ba1c6a9bb819d15acd"}, + {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14702c06fbe59f78025b3a0c825b91ede14d55b96a049d34796f9b3771456e83"}, + {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:541e9feeb33fdcb8414c9b0f8bc2a6d11af4b746abf14899f8f0cad80b85ca03"}, + {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40b14d9c95c77407c2ce9063e28f420f502609efbcf48f2ae240137c1b0297a"}, + {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18749dfc6778821d8aeecc0b993906a49749a256bc762fa6067493f22a7ddf8e"}, + {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:10216260b155e8ebd19c82c3864a2e5bead2020eb46936bfb69a26efc73053ac"}, + {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1167e7f58588b991a1c058631ad12e7e3882644e3842ebc2ec55fff9615caf8b"}, + {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f5d95b4a8b91e267b3e061e6838bc7beee4394da161e9d8cf5ead5412a3841"}, + {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:842544ce1cfb7a0edcb0b21cf78f2b271a9e1ba911e9b6e2e4fa753eaf67150e"}, + {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:610909d494f23f8d24775499796f25ad650315c4abb59260c2ebb82ff9e3323d"}, + {file = "Levenshtein-0.22.0-cp311-cp311-win32.whl", hash = "sha256:203cf2034ad636eaf2b4b2bd44dfe5822abe556b732ccb98394d5d0a26d2b045"}, + {file = "Levenshtein-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:282c97890466a02174bd7713395fa69764d415c7816d8624386e74c3a1c358d6"}, + {file = "Levenshtein-0.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:caf45bd4aadca4c08127c903fd02f5564438966c6ce1e6f30595332ff844e360"}, + {file = "Levenshtein-0.22.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abb60261fa00029681d89b2e72342209f9253d8cab5c040dc4eb8db4eb9c998d"}, + {file = "Levenshtein-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ab939d05d506f9fc80603fa64a5c19a398eb927b1f188f97f60d9382e2a0efe"}, + {file = "Levenshtein-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:91f1945090b6a22527272f86354f1e352d94535e50855b29982c4a710d39ae2a"}, + {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d662ae4325af7865443674ddd0fe95df15cc962f75b27c93b5fe7286174d537"}, + {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16143f27641fc212e9745490948a0fbd42b8593058771737f0b020be1bacda33"}, + {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52d459efb8d9ad8a13317df38eb198f1b9ad4b669c8e00f0c3acf46e3468e8aa"}, + {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f29d764a31dba249cb8f44f9dfa3e4c3180f7fc73c457f5ec5a092f00a555b7"}, + {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:264b79ab26b03a1b099e40e4050451d4ae17cca514e0f06d8f2fc55a4cdd1ab5"}, + {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d29b09ea4cc45716b47d1ac0c34c0e952bf1de14e0d0578ea8bcde12e00d14eb"}, + {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:91cdfdebcf67210c89811c538bcf08dde237fde4aa35787ecf65621ae948a317"}, + {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:f839a1c2d0ba092ad1f571c817a1554a3a5fffad9ae8bd238307c7df4f95a042"}, + {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:e20dff9e11be7691b34dc49c5b58ed73b435d6720cfd1b37ee906f9696fc5a00"}, + {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38b1e26f04c8fb73882e7409026b249e7c4ffccc97992a013a3102295eabf193"}, + {file = "Levenshtein-0.22.0-cp312-cp312-win32.whl", hash = "sha256:8604794aba363638e00f10b11f3c704524cd5d32624f83601ba05d362f51da2a"}, + {file = "Levenshtein-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:008f665f7c885fa9f6537dd35b82dc25cfbd03f3cda48a045d3fa189ce972e26"}, + {file = "Levenshtein-0.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:8958b21fae93c9210e56b9c53520ca3aad2d60f0ff44dc33605c40e562d23411"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc53b1f9e4f43fa28faad39c2622f285e6b250455cfed2b38daf2337c69be63a"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:271c9c4485beda8d23561cc7b046e1049a5beee09bb16f8dec6b756f572c59ed"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef72b265c445bf4dd0187b4a49565fcd184183419918330ed4ea7b66031c041d"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:549f9eb3073c03d8626c5a94e2768fb3de92af7ef93a97c5aa639427158660b4"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bbc77c2d59a394075c1023fa1555082602aadd923a8c90b5121f8a543c8870"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f56203d5e3805ea947cad41b70d3f88d07cd21a634653ef87e9c74e813b579ae"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4b27cfd9396e6c720cb13b3bdd35545afca92816d13649c1e8b99586f5d81754"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98bcfa889c7a97155fb2675506fb242710596cfcd6bcadf37e58cfe421968f47"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c1c52c58e8ff50c8d58c6fe2753a6f6d6bf56394dd86549879fd6cac032d8f7d"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:eaa3d2174132ff6bfa488c320f9e309018f003b819f942203bd508fe16d2b1a6"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:14fc8ed8ad16f619d0975df70e63bd5a5c7b88aabc4037b8395346ff7bdb250c"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-win32.whl", hash = "sha256:8f4fbc8b7ae61fe596f3a2256ea4122f61d7a58fe77d45a7e85a594e4e03c8c2"}, + {file = "Levenshtein-0.22.0-cp37-cp37m-win_amd64.whl", hash = "sha256:606d5f868e2579d067f153a0c57563f144e45173aa1e21d5c5fbf2ce5a65615e"}, + {file = "Levenshtein-0.22.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e52c339fcca07749469c1d48157eeeda6671594577ad080f0ccdd8583e9d9a8c"}, + {file = "Levenshtein-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c65695e1650fea3e42db16ae75e8b3dd04109e0b59eed9f1e206f7d5d15d2dc9"}, + {file = "Levenshtein-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:157d4138dbbdbc3756f6cc166860c447f2268ef07ee80f520583cb940fd37545"}, + {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13832c213c72ed6fc59e2bcb69f6e540a3f812e169ef51c07509d299c4b233a2"}, + {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:370be3bafde8f69051a12305af862e18280fbf72f7b819b3fabbec13e7d0cac9"}, + {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7821a1776bd4aee6f1075b6f48612eae21abc04dc2e91f5858bce85ebc44bd83"}, + {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e80b870a85f487cb151d38532d52d6a4303f05f257915c1a20c06f24809e1c"}, + {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e0da18c2ca3b76dfc26ea6cf1c15d2526f3c0e9609d0a790210940ae58a79be"}, + {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0fcf720cfbcc46dbdc1322a3f95bd2680844b127edb8eba3ab168b5cb41d4443"}, + {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:003635abca2da5c96788588e24aa97a8b7c45e9accdf8e82ae9975daac0b798b"}, + {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:61d861a17f7e69b191a29a3029d9048cf2d6c73ba0d47bd57e1f36f5eb8e79cd"}, + {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:2eb4d6b953586ceb5ffa13873302037cbd678f3cb3eb28e7dfa6bd7d9237366e"}, + {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb08576209cc180a48ec47ba8444bbab16d1a98a21e557b2b5420916c0366b9f"}, + {file = "Levenshtein-0.22.0-cp38-cp38-win32.whl", hash = "sha256:bc86bd217386768d323662f8c864e0dee90a573f222675b6834e972330f4ec39"}, + {file = "Levenshtein-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:ac3cf596cd7fc16f79c4013c4a4b7fb1ec73caf0169c332d99322039bc3811bf"}, + {file = "Levenshtein-0.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bad0649573449a03f549a56783b37f87f514c478134d6416b1d5160bf1fcfd93"}, + {file = "Levenshtein-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8db9c2682a0fb0a83c0eb54baef63d78808b62885d3288abfa16cb9f29e062b6"}, + {file = "Levenshtein-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3bc743a8ce662355513aad10d81ca2f865b600a4bc63adc4ca9575885042f63"}, + {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3faf5497ac7699b0b34b06dff5277334e410cdab43d4c86e8ca34fc1df92a781"}, + {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd79d847422bdaffe5dc46d018c56b22f5bcdf59bcbc2855ded1517669a241df"}, + {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98a27eb7a655a956b219374d2232e16b908fde5a750f839da36e2b9ecbfa35e0"}, + {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbbbd989c9511461e0742e53d52aaa3e8edff8858f825d29364785d88df080a5"}, + {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a50b25a72be543dfe99ce5383c5d6a44235bda1dcef2b1eb69dfbde8f75149c"}, + {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:83869023f190d21c7246f02700b8ff72b443e5ad0baa9decf1bc146451b5d9b0"}, + {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1d7e491c9bfb86aa6dfedeee88edd2e1a760c6c085490a6fa28c99328a95d0ff"}, + {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b977a803fd69e37ea4a3253e450aac84aadd092e245036bf935e9ce69eb9bd09"}, + {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7b5e2bdae788d237f0e11ba7c06762f1fdb181a8d200961bcc2cd631c63c158a"}, + {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:306b9ea4acce215415ee8d11b9a1fd1f5020ea7a8fbf995e4f9789fc8ec947d8"}, + {file = "Levenshtein-0.22.0-cp39-cp39-win32.whl", hash = "sha256:4c1568e82a4d14ea0a9db11cd58618a55164e721f4f8c445b5ab70d2b304f1f5"}, + {file = "Levenshtein-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:981b27f8ad1dab9871807fc4a50e4db52c9b3c39706aadc053415152e70c15ff"}, + {file = "Levenshtein-0.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:f71840265862ad507873593749219c2e130830784ccbb71253e093e9d29ac63d"}, + {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be6cc97ad71185e714d52997cf85bc8432fabc60b46ed8e6b30717ca5f9dacc8"}, + {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48c056cdfb269ffc3f4383471a1a35217120fb15995785bf277bf16561626f59"}, + {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:389f1e4dee92f2d91297dfa4595a409bd688a3009bcc93523ab66d78cc7548b2"}, + {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26230f8ff50e72e82f3100d2f1153b3890fda9670bf8969755df7713484093ac"}, + {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:01b36cae9c5ddae8f178814e603a388968bc23059343b1b61fc396d72a51321f"}, + {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7bd018087add386d766b6926635168b1f83f440b8ce1bba8c497fac3a1995328"}, + {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5368c332632c2534060b8b63c9076a15370e4c35fbc2f22f45162713277aa239"}, + {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54670a6b626c5c2b96c5e9faaa8599c6e9a933a701441cfd82c01d1785b4dca5"}, + {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb2ac06a597e29a37d2ee9a2a91467b4790ff47cf67d724883fe2342d74e3100"}, + {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:49cea609250ec61e2b320afe9288c8a9ee91aa3978e249362af53ed9066f944e"}, + {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:692f28b632c3726ea55878f736b996457a1d2887b42a33474ee4c219b505798b"}, + {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7934381e902258b4a5f8e5cb56d45bd5da051763b7c8fb3acdaba1fdb91a197a"}, + {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2db7bab8d9865c51be9bf5006bc712cd30b31f2fcf09009470099ef07f21485"}, + {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a9015d600e4e0ad2339bc44c905019957f45228acfc8c441922d9550b106969"}, + {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:99c69647d56c90a3ea0d2c4bb252eb77d4724e0774f5102f098e7794125fc0cf"}, + {file = "Levenshtein-0.22.0.tar.gz", hash = "sha256:86d285d770551cb648d4fcfe5243449a479e694e56b65272dc6cbda879012051"}, ] [package.dependencies] @@ -4631,4 +4637,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "4a0a1f9123bc6217c1c2427e0a217e605497e140f5cb98c916c17b4209b82467" +content-hash = "d0ec3817759aa3c0fb1f4189505f7e054c0663b00e8d8f66921880fec831811d" diff --git a/pyproject.toml b/pyproject.toml index 131250f319..780e25538c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -180,7 +180,7 @@ html-sanitizer = "^2.1.0" isbnlib = "^3.10.14" itsdangerous = "^2.1.2" jwcrypto = "^1.4.2" -levenshtein = "~0.21" +levenshtein = "^0.22" lxml = "^4.9.3" money = "1.3.0" multipledispatch = "0.6.0" From 20bc8f00ac4b762db85e7fe8d4b278bb93b9268d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Sep 2023 02:09:15 +0000 Subject: [PATCH 068/262] Bump flask-pydantic-spec from 0.4.5 to 0.5.0 (#1415) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1ca85b5391..077b8524e5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1189,13 +1189,13 @@ Flask = ">=0.9" [[package]] name = "flask-pydantic-spec" -version = "0.4.5" +version = "0.5.0" description = "generate OpenAPI document and validate request & response with Python annotations." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "flask_pydantic_spec-0.4.5-py3-none-any.whl", hash = "sha256:c1479a9dd3c1263592c3d9b40eb170d53d8c8ffb0b6d19652cd53da7de7a8877"}, - {file = "flask_pydantic_spec-0.4.5.tar.gz", hash = "sha256:0c80540e5dfaf07913fc9fb490d583577c84428dbb526e08249602ca31876a3c"}, + {file = "flask_pydantic_spec-0.5.0-py3-none-any.whl", hash = "sha256:3add259e5736d8e1b2b6a9db307ee55b2b3a0154d81633d4c3bcc933b20acb9a"}, + {file = "flask_pydantic_spec-0.5.0.tar.gz", hash = "sha256:8d1f3d173c2a288e61e01bd64752374c6007338a5cd0ab78a5a8ba860c27295e"}, ] [package.dependencies] @@ -4637,4 +4637,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "d0ec3817759aa3c0fb1f4189505f7e054c0663b00e8d8f66921880fec831811d" +content-hash = "543db762875b9735cacabcd6e288c82155e9bc0f0d707547ef803d4ccac9786e" diff --git a/pyproject.toml b/pyproject.toml index 780e25538c..9fc86b06f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -174,7 +174,7 @@ firebase-admin = "^6.0.1" Flask = "^2.2.3" Flask-Babel = "2.0.0" Flask-Cors = "4.0.0" -flask-pydantic-spec = "^0.4.2" +flask-pydantic-spec = "^0.5.0" fuzzywuzzy = "0.18.0" # fuzzywuzzy is for author name manipulations html-sanitizer = "^2.1.0" isbnlib = "^3.10.14" From 928f511e23980606dcbe30f141534063ba25613d Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 2 Oct 2023 05:06:34 -0400 Subject: [PATCH 069/262] Remove configuration storage classes. (#1382) --- api/s3_analytics_provider.py | 9 +- core/model/configuration.py | 551 +----------------------- tests/core/models/test_configuration.py | 447 ------------------- 3 files changed, 2 insertions(+), 1005 deletions(-) diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index 8ddff535cc..4a954cc736 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -10,24 +10,17 @@ from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider from core.model import Library, LicensePool, MediaTypes -from core.model.configuration import ConfigurationGrouping from core.service.container import Services from core.service.storage.s3 import S3Service -class S3AnalyticsProviderConfiguration(ConfigurationGrouping): - """Contains configuration settings of the S3 Analytics provider.""" - - class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" NAME = _("S3 Analytics") DESCRIPTION = _("Store analytics events in a S3 bucket.") - SETTINGS = ( - LocalAnalyticsProvider.SETTINGS + S3AnalyticsProviderConfiguration.to_settings() - ) + SETTINGS = LocalAnalyticsProvider.SETTINGS def __init__( self, diff --git a/core/model/configuration.py b/core/model/configuration.py index 696368053e..c98e822845 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -1,20 +1,17 @@ from __future__ import annotations # ExternalIntegration, ExternalIntegrationLink, ConfigurationSetting -import inspect import json import logging from abc import ABCMeta, abstractmethod -from contextlib import contextmanager from enum import Enum -from typing import TYPE_CHECKING, Any, Iterable, Iterator, List, Optional, TypeVar +from typing import TYPE_CHECKING, List, Optional from sqlalchemy import Column, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_ -from core.configuration.ignored_identifier import IgnoredIdentifierSettings from core.model.hybrid import hybrid_property from ..config import CannotLoadConfiguration, Configuration @@ -787,554 +784,8 @@ def external_integration(self, db: Session) -> Optional[ExternalIntegration]: raise NotImplementedError() -class BaseConfigurationStorage(metaclass=ABCMeta): - """Serializes and deserializes values as configuration settings""" - - @abstractmethod - def save(self, db: Session, setting_name: str, value: Any): - """Save the value as as a new configuration setting - - :param db: Database session - :param setting_name: Name of the configuration setting - :param value: Value to be saved - """ - raise NotImplementedError() - - @abstractmethod - def load(self, db: Session, setting_name: str) -> Any: - """Loads and returns the library's configuration setting - - :param db: Database session - :param setting_name: Name of the configuration setting - """ - raise NotImplementedError() - - -class ConfigurationStorage(BaseConfigurationStorage): - """Serializes and deserializes values as configuration settings""" - - def __init__(self, integration_association: HasExternalIntegration): - """Initializes a new instance of ConfigurationStorage class - - :param integration_association: Association with an external integration - """ - self._integration_association = integration_association - - def save(self, db: Session, setting_name: str, value: Any): - """Save the value as as a new configuration setting - - :param db: Database session - :param setting_name: Name of the configuration setting - :param value: Value to be saved - """ - integration = self._integration_association.external_integration(db) - ConfigurationSetting.for_externalintegration( - setting_name, integration - ).value = value - - def load(self, db: Session, setting_name: str) -> Any: - """Loads and returns the library's configuration setting - - :param db: Database session - :param setting_name: Name of the library's configuration setting - """ - integration = self._integration_association.external_integration(db) - value = ConfigurationSetting.for_externalintegration( - setting_name, integration - ).value - - return value - - -class ConfigurationAttributeType(Enum): - """Enumeration of configuration setting types""" - - TEXT = "text" - TEXTAREA = "textarea" - SELECT = "select" - NUMBER = "number" - LIST = "list" - MENU = "menu" - - def to_control_type(self) -> str | None: - """Converts the value to a attribute type understandable by circulation-admin - - :return: String representation of attribute's type - """ - # NOTE: For some reason, circulation-admin converts "text" into so we have to turn it into None - # In this case circulation-admin will use - # TODO: To be fixed in https://jira.nypl.org/browse/SIMPLY-3008 - if self.value == ConfigurationAttributeType.TEXT.value: - return None - else: - return self.value - - -class ConfigurationAttribute(Enum): - """Enumeration of configuration setting attributes""" - - KEY = "key" - LABEL = "label" - DESCRIPTION = "description" - TYPE = "type" - REQUIRED = "required" - DEFAULT = "default" - OPTIONS = "options" - CATEGORY = "category" - FORMAT = "format" - - class ConfigurationAttributeValue(Enum): """Enumeration of common configuration attribute values""" YESVALUE = "yes" NOVALUE = "no" - - -class ConfigurationOption: - """Key-value pair containing information about configuration attribute option""" - - def __init__(self, key: str, label: str) -> None: - """Initializes a new instance of ConfigurationOption class - - :param key: Key - :param label: Label - """ - self._key = key - self._label = label - - def __eq__(self, other: object) -> bool: - """Compares two ConfigurationOption objects - - :param other: ConfigurationOption object - - :return: Boolean value indicating whether two items are equal - """ - if not isinstance(other, ConfigurationOption): - return False - - return self.key == other.key and self.label == other.label - - @property - def key(self) -> str: - """Returns option's key - - :return: Option's key - """ - return self._key - - @property - def label(self) -> str: - """Returns option's label - - :return: Option's label - """ - return self._label - - def to_settings(self) -> dict[str, str]: - """Returns a dictionary containing option metadata in the SETTINGS format - - :return: Dictionary containing option metadata in the SETTINGS format - """ - return {"key": self.key, "label": self.label} - - @staticmethod - def from_enum(cls: type[Enum]) -> list[ConfigurationOption]: - """Convers Enum to a list of options in the SETTINGS format - - :param cls: Enum type - - :return: List of options in the SETTINGS format - """ - if not issubclass(cls, Enum): - raise ValueError("Class should be descendant of Enum") - - return [ConfigurationOption(element.value, element.name) for element in cls] - - -class HasConfigurationSettings(metaclass=ABCMeta): - """Interface representing class containing ConfigurationMetadata properties""" - - @abstractmethod - def get_setting_value(self, setting_name: str) -> Any: - """Returns a settings'value - - :param setting_name: Name of the setting - - :return: Setting's value - """ - raise NotImplementedError() - - @abstractmethod - def set_setting_value(self, setting_name: str, setting_value: Any): - """Sets setting's value - - :param setting_name: Name of the setting - - :param setting_value: New value of the setting - """ - raise NotImplementedError() - - -class ConfigurationMetadata: - """Contains configuration metadata""" - - _counter = 0 - - def __init__( - self, - key: str, - label: str, - description: str, - type: ConfigurationAttributeType, - required: bool = False, - default: Any | None = None, - options: list[ConfigurationOption] | None = None, - category: str | None = None, - format=None, - index=None, - ): - """Initializes a new instance of ConfigurationMetadata class - - :param key: Setting's key - :param label: Setting's label - :param description: Setting's description - :param type: Setting's type - :param required: Boolean value indicating whether the setting is required or not - :param default: Setting's default value - :param options: Setting's options (used in the case of select) - :param category: Setting's category - """ - self._key = key - self._label = label - self._description = description - self._type = type - self._required = required - self._default = default - self._options = options - self._category = category - self._format = format - - if index is not None: - self._index = index - else: - ConfigurationMetadata._counter += 1 - self._index = ConfigurationMetadata._counter - - def __get__( - self, - owner_instance: HasConfigurationSettings | IgnoredIdentifierSettings | None, - owner_type: type | None, - ) -> Any: - """Returns a value of the setting - - :param owner_instance: Instance of the owner, class having instance of ConfigurationMetadata as an attribute - :param owner_type: Owner's class - - :return: ConfigurationMetadata instance (when called via a static method) or - the setting's value (when called via an instance method) - """ - # If owner_instance is empty, it means that this method was called - # via a static method of ConfigurationMetadataOwner (for example, ConfigurationBucket.to_settings). - # In this case we need to return the metadata instance itself - if owner_instance is None: - return self - - if not isinstance(owner_instance, HasConfigurationSettings): - raise Exception( - "owner must be an instance of HasConfigurationSettings type" - ) - - setting_value = owner_instance.get_setting_value(self._key) - - if setting_value is None: - setting_value = self.default - elif self.type == ConfigurationAttributeType.NUMBER: - try: - setting_value = float(setting_value) - except ValueError: - if setting_value != "": - # A non-empty value is a "bad" value, and should raise an exception - raise CannotLoadConfiguration( - f"Could not covert {self.label}'s value '{setting_value}'." - ) - setting_value = self.default - else: - # LIST and MENU configuration settings are stored as JSON-serialized lists in the database. - # We need to deserialize them to get actual values. - if self.type in ( - ConfigurationAttributeType.LIST, - ConfigurationAttributeType.MENU, - ): - if isinstance(setting_value, str): - setting_value = json.loads(setting_value) - else: - # We assume that LIST and MENU values can be either JSON or empty. - if setting_value is not None: - raise ValueError( - f"{self._type} configuration setting '{self._key}' has an incorrect format. " - f"Expected JSON-serialized list but got {setting_value}." - ) - - setting_value = [] - - return setting_value - - def __set__( - self, - owner_instance: HasConfigurationSettings | IgnoredIdentifierSettings | None, - value: Any, - ) -> Any: - """Updates the setting's value - - :param owner_instance: Instance of the owner, class having instance of ConfigurationMetadata as an attribute - - :param value: New setting's value - """ - if not isinstance(owner_instance, HasConfigurationSettings): - raise Exception( - "owner must be an instance of HasConfigurationSettings type" - ) - - return owner_instance.set_setting_value(self._key, value) - - @property - def key(self) -> str: - """Returns the setting's key - - :return: Setting's key - """ - return self._key - - @property - def label(self) -> str: - """Returns the setting's label - - :return: Setting's label - """ - return self._label - - @property - def description(self) -> str: - """Returns the setting's description - - :return: Setting's description - """ - return self._description - - @property - def type(self) -> ConfigurationAttributeType: - """Returns the setting's type - - :return: Setting's type - """ - return self._type - - @property - def required(self) -> bool: - """Returns the boolean value indicating whether the setting is required or not - - :return: Boolean value indicating whether the setting is required or not - """ - return self._required - - @property - def default(self) -> Any | None: - """Returns the setting's default value - - :return: Setting's default value - """ - return self._default - - @property - def options(self) -> list[ConfigurationOption] | None: - """Returns the setting's options (used in the case of select) - - :return: Setting's options (used in the case of select) - """ - return self._options - - @property - def category(self) -> str | None: - """Returns the setting's category - - :return: Setting's category - """ - return self._category - - @property - def format(self) -> str: - """Returns the setting's format - - :return: Setting's format - """ - return self._format - - @property - def index(self): - return self._index - - @staticmethod - def get_configuration_metadata(cls) -> list[tuple[str, ConfigurationMetadata]]: - """Returns a list of 2-tuples containing information ConfigurationMetadata properties in the specified class - - :param cls: Class - :return: List of 2-tuples containing information ConfigurationMetadata properties in the specified class - """ - members = inspect.getmembers(cls) - configuration_metadata = [] - - for name, member in members: - if isinstance(member, ConfigurationMetadata): - configuration_metadata.append((name, member)) - - configuration_metadata.sort(key=lambda pair: pair[1].index) - - return configuration_metadata - - def to_settings(self): - return { - ConfigurationAttribute.KEY.value: self.key, - ConfigurationAttribute.LABEL.value: self.label, - ConfigurationAttribute.DESCRIPTION.value: self.description, - ConfigurationAttribute.TYPE.value: self.type.to_control_type(), - ConfigurationAttribute.REQUIRED.value: self.required, - ConfigurationAttribute.DEFAULT.value: self.default, - ConfigurationAttribute.OPTIONS.value: [ - option.to_settings() for option in self.options - ] - if self.options - else None, - ConfigurationAttribute.CATEGORY.value: self.category, - ConfigurationAttribute.FORMAT.value: self.format, - } - - @staticmethod - def to_bool(metadata: ConfigurationMetadata) -> bool: - """Return a boolean scalar indicating whether the configuration setting - contains a value that can be treated as True (see ConfigurationSetting.MEANS_YES). - - :param metadata: ConfigurationMetadata object - :return: Boolean scalar indicating - whether this configuration setting contains a value that can be treated as True - """ - return str(metadata).lower() in ConfigurationSetting.MEANS_YES - - -class ConfigurationGrouping(HasConfigurationSettings): - """Base class for all classes containing configuration settings - - NOTE: Be aware that it's valid only while a database session is valid and must not be stored between requests - """ - - def __init__( - self, configuration_storage: BaseConfigurationStorage, db: Session - ) -> None: - """Initializes a new instance of ConfigurationGrouping - - :param configuration_storage: ConfigurationStorage object - :param db: Database session - """ - self._logger = logging.getLogger() - self._configuration_storage = configuration_storage - self._db = db - - def __enter__(self): - return self - - def __exit__(self, exc_type, exc_val, exc_tb): - self._db = None - - def get_setting_value(self, setting_name: str) -> Any: - """Returns a settings'value - - :param setting_name: Name of the setting - :return: Setting's value - """ - return self._configuration_storage.load(self._db, setting_name) - - def set_setting_value(self, setting_name: str, setting_value: Any) -> Any: - """Sets setting's value - - :param setting_name: Name of the setting - :param setting_value: New value of the setting - """ - self._configuration_storage.save(self._db, setting_name, setting_value) - - @classmethod - def to_settings_generator(cls) -> Iterable[dict]: - """Return a generator object returning settings in a format understandable by circulation-admin. - - :return: list of settings in a format understandable by circulation-admin. - """ - for name, member in ConfigurationMetadata.get_configuration_metadata(cls): - key_attribute = getattr(member, ConfigurationAttribute.KEY.value, None) - label_attribute = getattr(member, ConfigurationAttribute.LABEL.value, None) - description_attribute = getattr( - member, ConfigurationAttribute.DESCRIPTION.value, None - ) - type_attribute = getattr(member, ConfigurationAttribute.TYPE.value, None) - control_type = ( - type_attribute.to_control_type() if type_attribute is not None else None - ) - required_attribute = getattr( - member, ConfigurationAttribute.REQUIRED.value, None - ) - default_attribute = getattr( - member, ConfigurationAttribute.DEFAULT.value, None - ) - options_attribute = getattr( - member, ConfigurationAttribute.OPTIONS.value, None - ) - category_attribute = getattr( - member, ConfigurationAttribute.CATEGORY.value, None - ) - - yield { - ConfigurationAttribute.KEY.value: key_attribute, - ConfigurationAttribute.LABEL.value: label_attribute, - ConfigurationAttribute.DESCRIPTION.value: description_attribute, - ConfigurationAttribute.TYPE.value: control_type, - ConfigurationAttribute.REQUIRED.value: required_attribute, - ConfigurationAttribute.DEFAULT.value: default_attribute, - ConfigurationAttribute.OPTIONS.value: [ - option.to_settings() for option in options_attribute - ] - if options_attribute - else None, - ConfigurationAttribute.CATEGORY.value: category_attribute, - } - - @classmethod - def to_settings(cls) -> list[dict[str, Any]]: - """Return a list of settings in a format understandable by circulation-admin. - - :return: list of settings in a format understandable by circulation-admin. - """ - return list(cls.to_settings_generator()) - - -C = TypeVar("C", bound="ConfigurationGrouping") - - -class ConfigurationFactory: - """Factory creating new instances of ConfigurationGrouping class descendants.""" - - @contextmanager - def create( - self, - configuration_storage: ConfigurationStorage, - db: Session, - configuration_grouping_class: type[C], - ) -> Iterator[C]: - """Create a new instance of ConfigurationGrouping. - - :param configuration_storage: ConfigurationStorage object - :param db: Database session - :param configuration_grouping_class: Configuration bucket's class - :return: ConfigurationGrouping instance - """ - with configuration_grouping_class( - configuration_storage, db - ) as configuration_bucket: - yield configuration_bucket diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index e7f7fe8ee6..4d53c70b0d 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -1,25 +1,13 @@ -import json -from enum import Enum -from unittest.mock import MagicMock, create_autospec - import pytest -import sqlalchemy from sqlalchemy.exc import IntegrityError from core.config import CannotLoadConfiguration, Configuration from core.model import create, get_one from core.model.collection import Collection from core.model.configuration import ( - ConfigurationAttribute, - ConfigurationAttributeType, - ConfigurationGrouping, - ConfigurationMetadata, - ConfigurationOption, ConfigurationSetting, - ConfigurationStorage, ExternalIntegration, ExternalIntegrationLink, - HasExternalIntegration, ) from core.model.datasource import DataSource from tests.fixtures.database import DatabaseTransactionFixture @@ -757,438 +745,3 @@ def test_delete( external_integrations = session.query(ExternalIntegration).all() assert integration1 not in external_integrations assert integration2 in external_integrations - - -SETTING1_KEY = "setting1" -SETTING1_LABEL = "Setting 1's label" -SETTING1_DESCRIPTION = "Setting 1's description" -SETTING1_TYPE = ConfigurationAttributeType.TEXT -SETTING1_REQUIRED = False -SETTING1_DEFAULT = "12345" -SETTING1_CATEGORY = "Settings" - -SETTING2_KEY = "setting2" -SETTING2_LABEL = "Setting 2's label" -SETTING2_DESCRIPTION = "Setting 2's description" -SETTING2_TYPE = ConfigurationAttributeType.SELECT -SETTING2_REQUIRED = False -SETTING2_DEFAULT = "value1" -SETTING2_OPTIONS = [ - ConfigurationOption("key1", "value1"), - ConfigurationOption("key2", "value2"), - ConfigurationOption("key3", "value3"), -] -SETTING2_CATEGORY = "Settings" - -SETTING3_KEY = "setting3" -SETTING3_LABEL = "Setting 3's label" -SETTING3_DESCRIPTION = "Setting 3's description" -SETTING3_TYPE = ConfigurationAttributeType.MENU -SETTING3_REQUIRED = False -SETTING3_OPTIONS = [ - ConfigurationOption("key1", "value1"), - ConfigurationOption("key2", "value2"), - ConfigurationOption("key3", "value3"), -] -SETTING3_DEFAULT = [SETTING3_OPTIONS[0].key, SETTING3_OPTIONS[1].key] -SETTING3_CATEGORY = "Settings" - - -SETTING4_KEY = "setting4" -SETTING4_LABEL = "Setting 4's label" -SETTING4_DESCRIPTION = "Setting 4's description" -SETTING4_TYPE = ConfigurationAttributeType.LIST -SETTING4_REQUIRED = False -SETTING4_OPTIONS = None -SETTING4_DEFAULT = None -SETTING4_CATEGORY = "Settings" - -SETTING5_KEY = "setting5" -SETTING5_LABEL = "Setting 5's label" -SETTING5_DESCRIPTION = "Setting 5's description" -SETTING5_TYPE = ConfigurationAttributeType.NUMBER -SETTING5_REQUIRED = False -SETTING5_DEFAULT = 12345 -SETTING5_CATEGORY = "Settings" - - -class MockConfiguration(ConfigurationGrouping): - setting1 = ConfigurationMetadata( - key=SETTING1_KEY, - label=SETTING1_LABEL, - description=SETTING1_DESCRIPTION, - type=SETTING1_TYPE, - required=SETTING1_REQUIRED, - default=SETTING1_DEFAULT, - category=SETTING1_CATEGORY, - ) - - setting2 = ConfigurationMetadata( - key=SETTING2_KEY, - label=SETTING2_LABEL, - description=SETTING2_DESCRIPTION, - type=SETTING2_TYPE, - required=SETTING2_REQUIRED, - default=SETTING2_DEFAULT, - options=SETTING2_OPTIONS, - category=SETTING2_CATEGORY, - ) - - setting3 = ConfigurationMetadata( - key=SETTING3_KEY, - label=SETTING3_LABEL, - description=SETTING3_DESCRIPTION, - type=SETTING3_TYPE, - required=SETTING3_REQUIRED, - default=SETTING3_DEFAULT, - options=SETTING3_OPTIONS, - category=SETTING3_CATEGORY, - ) - - setting4 = ConfigurationMetadata( - key=SETTING4_KEY, - label=SETTING4_LABEL, - description=SETTING4_DESCRIPTION, - type=SETTING4_TYPE, - required=SETTING4_REQUIRED, - default=SETTING4_DEFAULT, - options=SETTING4_OPTIONS, - category=SETTING4_CATEGORY, - ) - - setting5 = ConfigurationMetadata( - key=SETTING5_KEY, - label=SETTING5_LABEL, - description=SETTING5_DESCRIPTION, - type=SETTING5_TYPE, - required=SETTING5_REQUIRED, - default=SETTING5_DEFAULT, - category=SETTING5_CATEGORY, - ) - - -class ConfigurationWithBooleanProperty(ConfigurationGrouping): - boolean_setting = ConfigurationMetadata( - key="boolean_setting", - label="Boolean Setting", - description="Boolean Setting", - type=ConfigurationAttributeType.SELECT, - required=True, - default="true", - options=[ - ConfigurationOption("true", "True"), - ConfigurationOption("false", "False"), - ], - ) - - -class MockConfiguration2(ConfigurationGrouping): - setting1 = ConfigurationMetadata( - key="setting1", - label=SETTING1_LABEL, - description=SETTING1_DESCRIPTION, - type=SETTING1_TYPE, - required=SETTING1_REQUIRED, - default=SETTING1_DEFAULT, - category=SETTING1_CATEGORY, - index=1, - ) - - setting2 = ConfigurationMetadata( - key="setting2", - label=SETTING2_LABEL, - description=SETTING2_DESCRIPTION, - type=SETTING2_TYPE, - required=SETTING2_REQUIRED, - default=SETTING2_DEFAULT, - options=SETTING2_OPTIONS, - category=SETTING2_CATEGORY, - index=0, - ) - - -class TestConfigurationOption: - def test_to_settings(self): - # Arrange - option = ConfigurationOption("key1", "value1") - expected_result = {"key": "key1", "label": "value1"} - - # Act - result = option.to_settings() - - # Assert - assert result == expected_result - - def test_from_enum(self): - # Arrange - class TestEnum(Enum): - LABEL1 = "KEY1" - LABEL2 = "KEY2" - - expected_result = [ - ConfigurationOption("KEY1", "LABEL1"), - ConfigurationOption("KEY2", "LABEL2"), - ] - - # Act - result = ConfigurationOption.from_enum(TestEnum) - - # Assert - assert result == expected_result - - -class TestConfigurationGrouping: - @pytest.mark.parametrize( - "_,setting_name,expected_value", - [("setting1", "setting1", 12345), ("setting2", "setting2", "12345")], - ) - def test_getters(self, _, setting_name, expected_value): - # Arrange - configuration_storage = create_autospec(spec=ConfigurationStorage) - configuration_storage.load = MagicMock(return_value=expected_value) - db = create_autospec(spec=sqlalchemy.orm.session.Session) - configuration = MockConfiguration(configuration_storage, db) - - # Act - setting_value = getattr(configuration, setting_name) - - # Assert - assert setting_value == expected_value - configuration_storage.load.assert_called_once_with(db, setting_name) - - @pytest.mark.parametrize( - "_,setting_name,db_value,expected_value", - [ - ( - "default_menu_value", - MockConfiguration.setting3.key, - None, - MockConfiguration.setting3.default, - ), - ( - "menu_value", - MockConfiguration.setting3.key, - json.dumps( - [ - MockConfiguration.setting3.options[0].key, - MockConfiguration.setting3.options[1].key, - ] - ), - [ - MockConfiguration.setting3.options[0].key, - MockConfiguration.setting3.options[1].key, - ], - ), - ( - "default_list_value", - MockConfiguration.setting4.key, - None, - MockConfiguration.setting4.default, - ), - ( - "menu_value", - MockConfiguration.setting4.key, - json.dumps(["value1", "value2"]), - ["value1", "value2"], - ), - ], - ) - def test_menu_and_list_getters(self, _, setting_name, db_value, expected_value): - # Arrange - configuration_storage = create_autospec(spec=ConfigurationStorage) - configuration_storage.load = MagicMock(return_value=db_value) - db = create_autospec(spec=sqlalchemy.orm.session.Session) - configuration = MockConfiguration(configuration_storage, db) - - # Act - setting_value = getattr(configuration, setting_name) - - # Assert - assert setting_value == expected_value - configuration_storage.load.assert_called_once_with(db, setting_name) - - def test_getter_return_default_value(self): - # Arrange - configuration_storage = create_autospec(spec=ConfigurationStorage) - configuration_storage.load = MagicMock(return_value=None) - db = create_autospec(spec=sqlalchemy.orm.session.Session) - configuration = MockConfiguration(configuration_storage, db) - - # Act - setting1_value = configuration.setting1 - setting5_value = configuration.setting5 - - # Assert - assert SETTING1_DEFAULT == setting1_value - assert SETTING5_DEFAULT == setting5_value - - @pytest.mark.parametrize( - "_,setting_name,expected_value", - [("setting1", "setting1", 12345), ("setting2", "setting2", "12345")], - ) - def test_setters(self, _, setting_name, expected_value): - # Arrange - configuration_storage = create_autospec(spec=ConfigurationStorage) - configuration_storage.save = MagicMock(return_value=expected_value) - db = create_autospec(spec=sqlalchemy.orm.session.Session) - configuration = MockConfiguration(configuration_storage, db) - - # Act - setattr(configuration, setting_name, expected_value) - - # Assert - configuration_storage.save.assert_called_once_with( - db, setting_name, expected_value - ) - - def test_to_settings_considers_default_indices(self): - # Act - settings = MockConfiguration.to_settings() - - # Assert - assert len(settings) == 5 - - assert settings[0][ConfigurationAttribute.KEY.value] == SETTING1_KEY - assert settings[0][ConfigurationAttribute.LABEL.value] == SETTING1_LABEL - assert ( - settings[0][ConfigurationAttribute.DESCRIPTION.value] - == SETTING1_DESCRIPTION - ) - assert settings[0][ConfigurationAttribute.TYPE.value] == None - assert settings[0][ConfigurationAttribute.REQUIRED.value] == SETTING1_REQUIRED - assert settings[0][ConfigurationAttribute.DEFAULT.value] == SETTING1_DEFAULT - assert settings[0][ConfigurationAttribute.CATEGORY.value] == SETTING1_CATEGORY - - assert settings[1][ConfigurationAttribute.KEY.value] == SETTING2_KEY - assert settings[1][ConfigurationAttribute.LABEL.value] == SETTING2_LABEL - assert ( - settings[1][ConfigurationAttribute.DESCRIPTION.value] - == SETTING2_DESCRIPTION - ) - assert settings[1][ConfigurationAttribute.TYPE.value] == SETTING2_TYPE.value - assert settings[1][ConfigurationAttribute.REQUIRED.value] == SETTING2_REQUIRED - assert settings[1][ConfigurationAttribute.DEFAULT.value] == SETTING2_DEFAULT - assert settings[1][ConfigurationAttribute.OPTIONS.value] == [ - option.to_settings() for option in SETTING2_OPTIONS - ] - assert settings[1][ConfigurationAttribute.CATEGORY.value] == SETTING2_CATEGORY - - assert settings[2][ConfigurationAttribute.KEY.value] == SETTING3_KEY - assert settings[2][ConfigurationAttribute.LABEL.value] == SETTING3_LABEL - assert ( - settings[2][ConfigurationAttribute.DESCRIPTION.value] - == SETTING3_DESCRIPTION - ) - assert settings[2][ConfigurationAttribute.TYPE.value] == SETTING3_TYPE.value - assert settings[2][ConfigurationAttribute.REQUIRED.value] == SETTING3_REQUIRED - assert settings[2][ConfigurationAttribute.DEFAULT.value] == SETTING3_DEFAULT - assert settings[2][ConfigurationAttribute.OPTIONS.value] == [ - option.to_settings() for option in SETTING3_OPTIONS - ] - assert settings[2][ConfigurationAttribute.CATEGORY.value] == SETTING3_CATEGORY - - assert settings[3][ConfigurationAttribute.KEY.value] == SETTING4_KEY - assert settings[3][ConfigurationAttribute.LABEL.value] == SETTING4_LABEL - assert ( - settings[3][ConfigurationAttribute.DESCRIPTION.value] - == SETTING4_DESCRIPTION - ) - assert settings[3][ConfigurationAttribute.TYPE.value] == SETTING4_TYPE.value - assert settings[3][ConfigurationAttribute.REQUIRED.value] == SETTING4_REQUIRED - assert settings[3][ConfigurationAttribute.DEFAULT.value] == SETTING4_DEFAULT - assert settings[3][ConfigurationAttribute.CATEGORY.value] == SETTING4_CATEGORY - - def test_to_settings_considers_explicit_indices(self): - # Act - settings = MockConfiguration2.to_settings() - - # Assert - assert len(settings) == 2 - - assert settings[0][ConfigurationAttribute.KEY.value] == SETTING2_KEY - assert settings[0][ConfigurationAttribute.LABEL.value] == SETTING2_LABEL - assert ( - settings[0][ConfigurationAttribute.DESCRIPTION.value] - == SETTING2_DESCRIPTION - ) - assert settings[0][ConfigurationAttribute.TYPE.value] == SETTING2_TYPE.value - assert settings[0][ConfigurationAttribute.REQUIRED.value] == SETTING2_REQUIRED - assert settings[0][ConfigurationAttribute.DEFAULT.value] == SETTING2_DEFAULT - assert settings[0][ConfigurationAttribute.OPTIONS.value] == [ - option.to_settings() for option in SETTING2_OPTIONS - ] - assert settings[0][ConfigurationAttribute.CATEGORY.value] == SETTING2_CATEGORY - - assert settings[1][ConfigurationAttribute.KEY.value] == SETTING1_KEY - assert settings[1][ConfigurationAttribute.LABEL.value] == SETTING1_LABEL - assert ( - settings[1][ConfigurationAttribute.DESCRIPTION.value] - == SETTING1_DESCRIPTION - ) - assert settings[1][ConfigurationAttribute.TYPE.value] == None - assert settings[1][ConfigurationAttribute.REQUIRED.value] == SETTING1_REQUIRED - assert settings[1][ConfigurationAttribute.DEFAULT.value] == SETTING1_DEFAULT - assert settings[1][ConfigurationAttribute.CATEGORY.value] == SETTING1_CATEGORY - - -class TestNumberConfigurationMetadata: - def test_number_type_getter(self, db: DatabaseTransactionFixture): - # Arrange - external_integration = db.external_integration("test") - external_integration_association = create_autospec(spec=HasExternalIntegration) - external_integration_association.external_integration = MagicMock( - return_value=external_integration - ) - configuration_storage = ConfigurationStorage(external_integration_association) - configuration = MockConfiguration(configuration_storage, db.session) - - configuration.setting5 = "abc" - with pytest.raises(CannotLoadConfiguration): - configuration.setting5 - - configuration.setting5 = "123" - assert configuration.setting5 == 123.0 - - configuration.setting5 = "" - assert configuration.setting5 == SETTING5_DEFAULT - - -class TestBooleanConfigurationMetadata: - @pytest.mark.parametrize( - "provided,expected", - [ - ("true", True), - ("t", True), - ("yes", True), - ("y", True), - (1, False), - ("false", False), - ], - ) - def test_configuration_metadata_correctly_cast_bool_values( - self, db: DatabaseTransactionFixture, provided, expected - ): - """Ensure that ConfigurationMetadata.to_bool correctly translates different values into boolean (True/False).""" - # Arrange - external_integration = db.external_integration("test") - - external_integration_association = create_autospec(spec=HasExternalIntegration) - external_integration_association.external_integration = MagicMock( - return_value=external_integration - ) - - configuration_storage = ConfigurationStorage(external_integration_association) - - configuration = ConfigurationWithBooleanProperty( - configuration_storage, db.session - ) - - # We set a new value using ConfigurationMetadata.__set__ - configuration.boolean_setting = provided - - # Act - # We read the existing value using ConfigurationMetadata.__get__ - result = ConfigurationMetadata.to_bool(configuration.boolean_setting) - - # Assert - assert expected == result From e65d97994a05173852d0471365f1c54276fdbe5f Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 2 Oct 2023 06:22:47 -0400 Subject: [PATCH 070/262] Remove unused coverage provider (#1393) --- api/coverage.py | 147 ----------------------- core/opds_import.py | 55 --------- tests/api/mockapi/opds.py | 23 ---- tests/api/test_coverage.py | 238 ------------------------------------- 4 files changed, 463 deletions(-) delete mode 100644 api/coverage.py delete mode 100644 tests/api/mockapi/opds.py delete mode 100644 tests/api/test_coverage.py diff --git a/api/coverage.py b/api/coverage.py deleted file mode 100644 index 05e561fd2b..0000000000 --- a/api/coverage.py +++ /dev/null @@ -1,147 +0,0 @@ -"""Base classes for CoverageProviders. - -The CoverageProviders themselves are in the file corresponding to the -service that needs coverage -- overdrive.py, and so on. -""" - - -from core.coverage import CollectionCoverageProvider, CoverageFailure -from core.model import DataSource -from core.opds_import import OPDSImporter - - -class OPDSImportCoverageProvider(CollectionCoverageProvider): - """Provide coverage for identifiers by looking them up, in batches, - using the Simplified lookup protocol. - """ - - DEFAULT_BATCH_SIZE = 25 - OPDS_IMPORTER_CLASS = OPDSImporter - - def __init__(self, collection, lookup_client, **kwargs): - """Constructor. - - :param lookup_client: A SimplifiedOPDSLookup object. - """ - super().__init__(collection, **kwargs) - self.lookup_client = lookup_client - - def process_batch(self, batch): - """Perform a Simplified lookup and import the resulting OPDS feed.""" - ( - imported_editions, - pools, - works, - error_messages_by_id, - ) = self.lookup_and_import_batch(batch) - - results = [] - imported_identifiers = set() - # We grant coverage if an Edition was created from the operation. - for edition in imported_editions: - identifier = edition.primary_identifier - results.append(identifier) - imported_identifiers.add(identifier) - - # The operation may also have updated information from a - # number of LicensePools. - for pool in pools: - identifier = pool.identifier - if identifier in imported_identifiers: - self.finalize_license_pool(pool) - else: - msg = "OPDS import operation imported LicensePool, but no Edition." - results.append(self.failure(identifier, msg, transient=True)) - - # Anything left over is either a CoverageFailure, or an - # Identifier that used to be a CoverageFailure, indicating - # that a simplified:message that a normal OPDSImporter would - # consider a 'failure' should actually be considered a - # success. - for failure_or_identifier in sorted(error_messages_by_id.values()): - if isinstance(failure_or_identifier, CoverageFailure): - failure_or_identifier.collection = self.collection_or_not - results.append(failure_or_identifier) - return results - - def process_item(self, identifier): - """Handle an individual item (e.g. through ensure_coverage) as a very - small batch. Not efficient, but it works. - """ - [result] = self.process_batch([identifier]) - return result - - def finalize_license_pool(self, pool): - """An OPDS entry was matched with a LicensePool. Do something special - to mark the occasion. - - By default, nothing happens. - """ - - @property - def api_method(self): - """The method to call to fetch an OPDS feed from the remote server.""" - return self.lookup_client.lookup - - def lookup_and_import_batch(self, batch): - """Look up a batch of identifiers and parse the resulting OPDS feed. - - This method is overridden by MockOPDSImportCoverageProvider. - """ - # id_mapping maps our local identifiers to identifiers the - # foreign data source will reocgnize. - id_mapping = self.create_identifier_mapping(batch) - if id_mapping: - foreign_identifiers = list(id_mapping.keys()) - else: - foreign_identifiers = batch - - response = self.api_method(foreign_identifiers) - - # import_feed_response takes id_mapping so it can map the - # foreign identifiers back to their local counterparts. - return self.import_feed_response(response, id_mapping) - - def create_identifier_mapping(self, batch): - """Map the internal identifiers used for books to the corresponding - identifiers used by the lookup client. - - By default, no identifier mapping is needed. - """ - return None - - def import_feed_response(self, response, id_mapping): - """Confirms OPDS feed response and imports feed through - the appropriate OPDSImporter subclass. - """ - self.lookup_client.check_content_type(response) - importer = self.OPDS_IMPORTER_CLASS( - self._db, - self.collection, - identifier_mapping=id_mapping, - data_source_name=self.data_source.name, - ) - return importer.import_from_feed(response.text) - - -class MockOPDSImportCoverageProvider(OPDSImportCoverageProvider): - - SERVICE_NAME = "Mock Provider" - DATA_SOURCE_NAME = DataSource.OA_CONTENT_SERVER - - def __init__(self, collection, *args, **kwargs): - super().__init__(collection, None, *args, **kwargs) - self.batches = [] - self.finalized = [] - self.import_results = [] - - def queue_import_results(self, editions, pools, works, messages_by_id): - self.import_results.insert(0, (editions, pools, works, messages_by_id)) - - def finalize_license_pool(self, license_pool): - self.finalized.append(license_pool) - super().finalize_license_pool(license_pool) - - def lookup_and_import_batch(self, batch): - self.batches.append(batch) - return self.import_results.pop() diff --git a/core/opds_import.py b/core/opds_import.py index 6b52d77406..faca6de823 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -16,14 +16,12 @@ from api.circulation import CirculationConfigurationMixin from api.selftest import HasCollectionSelfTests -from core.integration.goals import Goals from core.integration.settings import ( BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, ) -from core.model.integration import IntegrationConfiguration from .classifier import Classifier from .config import IntegrationException @@ -95,59 +93,6 @@ def parse_identifier(db, identifier): return parsed_identifier -class AccessNotAuthenticated(Exception): - """No authentication is configured for this service""" - - -class SimplifiedOPDSLookup: - """Tiny integration class for the Simplified 'lookup' protocol.""" - - LOOKUP_ENDPOINT = "lookup" - - @classmethod - def check_content_type(cls, response): - content_type = response.headers.get("content-type") - if content_type != OPDSFeed.ACQUISITION_FEED_TYPE: - raise BadResponseException.from_response( - response.url, "Wrong media type: %s" % content_type, response - ) - - @classmethod - def from_protocol(cls, _db, protocol, goal=Goals.LICENSE_GOAL, library=None): - config = get_one(_db, IntegrationConfiguration, protocol=protocol, goal=goal) - if config is not None and library is not None: - config = config.for_library(library.id) - if config is None: - return None - return cls(config.settings_dict["url"]) - - def __init__(self, base_url): - if not base_url.endswith("/"): - base_url += "/" - self.base_url = base_url - - @property - def lookup_endpoint(self): - return self.LOOKUP_ENDPOINT - - def _get(self, url, **kwargs): - """Make an HTTP request. This method is overridden in the mock class.""" - kwargs["timeout"] = kwargs.get("timeout", 300) - kwargs["allowed_response_codes"] = kwargs.get("allowed_response_codes", []) - kwargs["allowed_response_codes"] += ["2xx", "3xx"] - return HTTP.get_with_timeout(url, **kwargs) - - def urn_args(self, identifiers): - return "&".join({"urn=%s" % i.urn for i in identifiers}) - - def lookup(self, identifiers): - """Retrieve an OPDS feed with metadata for the given identifiers.""" - args = self.urn_args(identifiers) - url = self.base_url + self.lookup_endpoint + "?" + args - logging.info("Lookup URL: %s", url) - return self._get(url) - - class OPDSXMLParser(XMLParser): NAMESPACES = { "simplified": "http://librarysimplified.org/terms/", diff --git a/tests/api/mockapi/opds.py b/tests/api/mockapi/opds.py deleted file mode 100644 index c64dbcd3b0..0000000000 --- a/tests/api/mockapi/opds.py +++ /dev/null @@ -1,23 +0,0 @@ -from core.opds_import import SimplifiedOPDSLookup -from core.util.http import HTTP -from tests.core.mock import MockRequestsResponse - - -class MockSimplifiedOPDSLookup(SimplifiedOPDSLookup): - def __init__(self, *args, **kwargs): - self.requests = [] - self.responses = [] - super().__init__(*args, **kwargs) - - def queue_response(self, status_code, headers={}, content=None): - self.responses.insert(0, MockRequestsResponse(status_code, headers, content)) - - def _get(self, url, *args, **kwargs): - self.requests.append((url, args, kwargs)) - response = self.responses.pop() - return HTTP._process_response( - url, - response, - kwargs.get("allowed_response_codes"), - kwargs.get("disallowed_response_codes"), - ) diff --git a/tests/api/test_coverage.py b/tests/api/test_coverage.py deleted file mode 100644 index a9cff5e28a..0000000000 --- a/tests/api/test_coverage.py +++ /dev/null @@ -1,238 +0,0 @@ -import pytest - -from api.coverage import MockOPDSImportCoverageProvider, OPDSImportCoverageProvider -from core.coverage import CoverageFailure -from core.model import Collection, DataSource, LicensePool -from core.opds_import import OPDSImporter -from core.util.http import BadResponseException -from core.util.opds_writer import OPDSFeed -from tests.api.mockapi.opds import MockSimplifiedOPDSLookup -from tests.core.mock import MockRequestsResponse -from tests.fixtures.database import DatabaseTransactionFixture - - -class TestOPDSImportCoverageProvider: - def _provider(self, db: DatabaseTransactionFixture): - """Create a generic MockOPDSImportCoverageProvider for testing purposes.""" - return MockOPDSImportCoverageProvider(db.default_collection()) - - def test_badresponseexception_on_non_opds_feed( - self, db: DatabaseTransactionFixture - ): - """If the lookup protocol sends something that's not an OPDS - feed, refuse to go any further. - """ - provider = self._provider(db) - provider.lookup_client = MockSimplifiedOPDSLookup(db.fresh_url()) - - response = MockRequestsResponse( - 200, {"content-type": "text/plain"}, "Some data" - ) - provider.lookup_client.queue_response(response) - with pytest.raises(BadResponseException) as excinfo: - provider.import_feed_response(response, None) - assert "Wrong media type: text/plain" in str(excinfo.value) - - def test_process_batch_with_identifier_mapping( - self, db: DatabaseTransactionFixture - ): - """Test that internal identifiers are mapped to and from the form used - by the external service. - """ - - # Unlike other tests in this class, we are using a real - # implementation of OPDSImportCoverageProvider.process_batch. - class TestProvider(OPDSImportCoverageProvider): - SERVICE_NAME = "Test provider" - DATA_SOURCE_NAME = DataSource.OA_CONTENT_SERVER - - mapping: dict - - # Mock the identifier mapping - def create_identifier_mapping(self, batch): - return self.mapping - - # This means we need to mock the lookup client instead. - lookup = MockSimplifiedOPDSLookup(db.fresh_url()) - - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{Collection.DATA_SOURCE_NAME_SETTING: DataSource.OA_CONTENT_SERVER} - ) - provider = TestProvider(db.default_collection(), lookup) - - # Create a hard-coded mapping. We use id1 internally, but the - # foreign data source knows the book as id2. - id1 = db.identifier() - id2 = db.identifier() - provider.mapping = {id2: id1} - - feed = ( - "%sHere's your title!" - % id2.urn - ) - headers = {"content-type": OPDSFeed.ACQUISITION_FEED_TYPE} - lookup.queue_response(200, headers=headers, content=feed) - [identifier] = provider.process_batch([id1]) - - # We wanted to process id1. We sent id2 to the server, the - # server responded with an for id2, and it was used to - # modify the Edition associated with id1. - assert id1 == identifier - - [edition] = id1.primarily_identifies - assert "Here's your title!" == edition.title - - def test_process_batch(self, db: DatabaseTransactionFixture): - provider = self._provider(db) - - # Here are an Edition and a LicensePool for the same identifier but - # from different data sources. We would expect this to happen - # when talking to the open-access content server. - edition = db.edition(data_source_name=DataSource.OA_CONTENT_SERVER) - identifier = edition.primary_identifier - - license_source = DataSource.lookup(db.session, DataSource.GUTENBERG) - pool, is_new = LicensePool.for_foreign_id( - db.session, - license_source, - identifier.type, - identifier.identifier, - collection=db.default_collection(), - ) - assert pool is not None - assert None == pool.work - - # Here's a second Edition/LicensePool that's going to cause a - # problem: the LicensePool will show up in the results, but - # the corresponding Edition will not. - edition2, pool2 = db.edition(with_license_pool=True) - - # Here's an identifier that can't be looked up at all, - # and an identifier that shows up in messages_by_id because - # its simplified:message was determined to indicate success - # rather than failure. - error_identifier = db.identifier() - not_an_error_identifier = db.identifier() - messages_by_id = { - error_identifier.urn: CoverageFailure( - error_identifier, "500: internal error" - ), - not_an_error_identifier.urn: not_an_error_identifier, - } - - # When we call CoverageProvider.process_batch(), it's going to - # return the information we just set up: a matched - # Edition/LicensePool pair, a mismatched LicensePool, and an - # error message. - provider.queue_import_results([edition], [pool, pool2], [], messages_by_id) - - # Make the CoverageProvider do its thing. - fake_batch = [object()] - ( - success_import, - failure_mismatched, - failure_message, - success_message, - ) = provider.process_batch(fake_batch) - - # The fake batch was provided to lookup_and_import_batch. - assert [fake_batch] == provider.batches - - # The matched Edition/LicensePool pair was returned. - assert success_import == edition.primary_identifier - - # The LicensePool of that pair was passed into finalize_license_pool. - # The mismatched LicensePool was not. - assert [pool] == provider.finalized - - # The mismatched LicensePool turned into a CoverageFailure - # object. - assert isinstance(failure_mismatched, CoverageFailure) - assert ( - "OPDS import operation imported LicensePool, but no Edition." - == failure_mismatched.exception - ) - assert pool2.identifier == failure_mismatched.obj - assert True == failure_mismatched.transient - - # The OPDSMessage with status code 500 was returned as a - # CoverageFailure object. - assert isinstance(failure_message, CoverageFailure) - assert "500: internal error" == failure_message.exception - assert error_identifier == failure_message.obj - assert True == failure_message.transient - - # The identifier that had a treat-as-success OPDSMessage was returned - # as-is. - assert not_an_error_identifier == success_message - - def test_process_batch_success_even_if_no_licensepool_exists( - self, db: DatabaseTransactionFixture - ): - """This shouldn't happen since CollectionCoverageProvider - only operates on Identifiers that are licensed through a Collection. - But if a lookup should return an Edition but no LicensePool, - that counts as a success. - """ - provider = self._provider(db) - edition, pool = db.edition(with_license_pool=True) - provider.queue_import_results([edition], [], [], {}) - fake_batch = [object()] - [success] = provider.process_batch(fake_batch) - - # The Edition's primary identifier was returned to indicate - # success. - assert edition.primary_identifier == success - - # However, since there is no LicensePool, nothing was finalized. - assert [] == provider.finalized - - def test_process_item(self, db: DatabaseTransactionFixture): - """To process a single item we process a batch containing - only that item. - """ - provider = self._provider(db) - edition = db.edition() - provider.queue_import_results([edition], [], [], {}) - item = object() - result = provider.process_item(item) - assert edition.primary_identifier == result - assert [[item]] == provider.batches - - def test_import_feed_response(self, db: DatabaseTransactionFixture): - """Verify that import_feed_response instantiates the - OPDS_IMPORTER_CLASS subclass and calls import_from_feed - on it. - """ - - class MockOPDSImporter(OPDSImporter): - def import_from_feed(self, text): - """Return information that's useful for verifying - that the OPDSImporter was instantiated with the - right values. - """ - return ( - text, - self.collection, - self.identifier_mapping, - self.data_source_name, - ) - - class MockProvider(MockOPDSImportCoverageProvider): - OPDS_IMPORTER_CLASS = MockOPDSImporter - - provider = MockProvider(db.default_collection()) - provider.lookup_client = MockSimplifiedOPDSLookup(db.fresh_url()) - - response = MockRequestsResponse( - 200, {"content-type": OPDSFeed.ACQUISITION_FEED_TYPE}, "some data" - ) - id_mapping = object() - (text, collection, mapping, data_source_name) = provider.import_feed_response( - response, id_mapping - ) - assert "some data" == text - assert provider.collection == collection - assert id_mapping == mapping - assert provider.data_source.name == data_source_name From 13e097fbe89deaf2fa560f7bc0f4ef162155d55f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Oct 2023 12:23:29 +0000 Subject: [PATCH 071/262] Bump psycopg2 from 2.9.7 to 2.9.8 (#1417) --- poetry.lock | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/poetry.lock b/poetry.lock index 077b8524e5..613ecee53f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2916,22 +2916,22 @@ files = [ [[package]] name = "psycopg2" -version = "2.9.7" +version = "2.9.8" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.6" files = [ - {file = "psycopg2-2.9.7-cp310-cp310-win32.whl", hash = "sha256:1a6a2d609bce44f78af4556bea0c62a5e7f05c23e5ea9c599e07678995609084"}, - {file = "psycopg2-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:b22ed9c66da2589a664e0f1ca2465c29b75aaab36fa209d4fb916025fb9119e5"}, - {file = "psycopg2-2.9.7-cp311-cp311-win32.whl", hash = "sha256:44d93a0109dfdf22fe399b419bcd7fa589d86895d3931b01fb321d74dadc68f1"}, - {file = "psycopg2-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:91e81a8333a0037babfc9fe6d11e997a9d4dac0f38c43074886b0d9dead94fe9"}, - {file = "psycopg2-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:d1210fcf99aae6f728812d1d2240afc1dc44b9e6cba526a06fb8134f969957c2"}, - {file = "psycopg2-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:e9b04cbef584310a1ac0f0d55bb623ca3244c87c51187645432e342de9ae81a8"}, - {file = "psycopg2-2.9.7-cp38-cp38-win32.whl", hash = "sha256:d5c5297e2fbc8068d4255f1e606bfc9291f06f91ec31b2a0d4c536210ac5c0a2"}, - {file = "psycopg2-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:8275abf628c6dc7ec834ea63f6f3846bf33518907a2b9b693d41fd063767a866"}, - {file = "psycopg2-2.9.7-cp39-cp39-win32.whl", hash = "sha256:c7949770cafbd2f12cecc97dea410c514368908a103acf519f2a346134caa4d5"}, - {file = "psycopg2-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:b6bd7d9d3a7a63faae6edf365f0ed0e9b0a1aaf1da3ca146e6b043fb3eb5d723"}, - {file = "psycopg2-2.9.7.tar.gz", hash = "sha256:f00cc35bd7119f1fed17b85bd1007855194dde2cbd8de01ab8ebb17487440ad8"}, + {file = "psycopg2-2.9.8-cp310-cp310-win32.whl", hash = "sha256:2f8594f92bbb5d8b59ffec04e2686c416401e2d4297de1193f8e75235937e71d"}, + {file = "psycopg2-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9ecbf504c4eaff90139d5c9b95d47275f2b2651e14eba56392b4041fbf4c2b3"}, + {file = "psycopg2-2.9.8-cp311-cp311-win32.whl", hash = "sha256:65f81e72136d8b9ac8abf5206938d60f50da424149a43b6073f1546063c0565e"}, + {file = "psycopg2-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:f7e62095d749359b7854143843f27edd7dccfcd3e1d833b880562aa5702d92b0"}, + {file = "psycopg2-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:81b21424023a290a40884c7f8b0093ba6465b59bd785c18f757e76945f65594c"}, + {file = "psycopg2-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:67c2f32f3aba79afb15799575e77ee2db6b46b8acf943c21d34d02d4e1041d50"}, + {file = "psycopg2-2.9.8-cp38-cp38-win32.whl", hash = "sha256:287a64ef168ef7fb9f382964705ff664b342bfff47e7242bf0a04ef203269dd5"}, + {file = "psycopg2-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:dcde3cad4920e29e74bf4e76c072649764914facb2069e6b7fa1ddbebcd49e9f"}, + {file = "psycopg2-2.9.8-cp39-cp39-win32.whl", hash = "sha256:d4ad050ea50a16731d219c3a85e8f2debf49415a070f0b8331ccc96c81700d9b"}, + {file = "psycopg2-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:d39bb3959788b2c9d7bf5ff762e29f436172b241cd7b47529baac77746fd7918"}, + {file = "psycopg2-2.9.8.tar.gz", hash = "sha256:3da6488042a53b50933244085f3f91803f1b7271f970f3e5536efa69314f6a49"}, ] [[package]] From 58b43c4ada9b177324f1b9dc66c3c7e5ccbcb75f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 2 Oct 2023 12:23:43 +0000 Subject: [PATCH 072/262] Bump psycopg2-binary from 2.9.7 to 2.9.8 (#1418) --- poetry.lock | 122 ++++++++++++++++++++++++++-------------------------- 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/poetry.lock b/poetry.lock index 613ecee53f..697ddff890 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2936,71 +2936,71 @@ files = [ [[package]] name = "psycopg2-binary" -version = "2.9.7" +version = "2.9.8" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false python-versions = ">=3.6" files = [ - {file = "psycopg2-binary-2.9.7.tar.gz", hash = "sha256:1b918f64a51ffe19cd2e230b3240ba481330ce1d4b7875ae67305bd1d37b041c"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ea5f8ee87f1eddc818fc04649d952c526db4426d26bab16efbe5a0c52b27d6ab"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2993ccb2b7e80844d534e55e0f12534c2871952f78e0da33c35e648bf002bbff"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbbc3c5d15ed76b0d9db7753c0db40899136ecfe97d50cbde918f630c5eb857a"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:692df8763b71d42eb8343f54091368f6f6c9cfc56dc391858cdb3c3ef1e3e584"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9dcfd5d37e027ec393a303cc0a216be564b96c80ba532f3d1e0d2b5e5e4b1e6e"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17cc17a70dfb295a240db7f65b6d8153c3d81efb145d76da1e4a096e9c5c0e63"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e5666632ba2b0d9757b38fc17337d84bdf932d38563c5234f5f8c54fd01349c9"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7db7b9b701974c96a88997d458b38ccb110eba8f805d4b4f74944aac48639b42"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c82986635a16fb1fa15cd5436035c88bc65c3d5ced1cfaac7f357ee9e9deddd4"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4fe13712357d802080cfccbf8c6266a3121dc0e27e2144819029095ccf708372"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-win32.whl", hash = "sha256:122641b7fab18ef76b18860dd0c772290566b6fb30cc08e923ad73d17461dc63"}, - {file = "psycopg2_binary-2.9.7-cp310-cp310-win_amd64.whl", hash = "sha256:f8651cf1f144f9ee0fa7d1a1df61a9184ab72962531ca99f077bbdcba3947c58"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4ecc15666f16f97709106d87284c136cdc82647e1c3f8392a672616aed3c7151"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fbb1184c7e9d28d67671992970718c05af5f77fc88e26fd7136613c4ece1f89"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a7968fd20bd550431837656872c19575b687f3f6f98120046228e451e4064df"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:094af2e77a1976efd4956a031028774b827029729725e136514aae3cdf49b87b"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:26484e913d472ecb6b45937ea55ce29c57c662066d222fb0fbdc1fab457f18c5"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f309b77a7c716e6ed9891b9b42953c3ff7d533dc548c1e33fddc73d2f5e21f9"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d92e139ca388ccfe8c04aacc163756e55ba4c623c6ba13d5d1595ed97523e4b"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:2df562bb2e4e00ee064779902d721223cfa9f8f58e7e52318c97d139cf7f012d"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4eec5d36dbcfc076caab61a2114c12094c0b7027d57e9e4387b634e8ab36fd44"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1011eeb0c51e5b9ea1016f0f45fa23aca63966a4c0afcf0340ccabe85a9f65bd"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-win32.whl", hash = "sha256:ded8e15f7550db9e75c60b3d9fcbc7737fea258a0f10032cdb7edc26c2a671fd"}, - {file = "psycopg2_binary-2.9.7-cp311-cp311-win_amd64.whl", hash = "sha256:8a136c8aaf6615653450817a7abe0fc01e4ea720ae41dfb2823eccae4b9062a3"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2dec5a75a3a5d42b120e88e6ed3e3b37b46459202bb8e36cd67591b6e5feebc1"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc10da7e7df3380426521e8c1ed975d22df678639da2ed0ec3244c3dc2ab54c8"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee919b676da28f78f91b464fb3e12238bd7474483352a59c8a16c39dfc59f0c5"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb1c0e682138f9067a58fc3c9a9bf1c83d8e08cfbee380d858e63196466d5c86"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00d8db270afb76f48a499f7bb8fa70297e66da67288471ca873db88382850bf4"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:9b0c2b466b2f4d89ccc33784c4ebb1627989bd84a39b79092e560e937a11d4ac"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:51d1b42d44f4ffb93188f9b39e6d1c82aa758fdb8d9de65e1ddfe7a7d250d7ad"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:11abdbfc6f7f7dea4a524b5f4117369b0d757725798f1593796be6ece20266cb"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f02f4a72cc3ab2565c6d9720f0343cb840fb2dc01a2e9ecb8bc58ccf95dc5c06"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-win32.whl", hash = "sha256:81d5dd2dd9ab78d31a451e357315f201d976c131ca7d43870a0e8063b6b7a1ec"}, - {file = "psycopg2_binary-2.9.7-cp37-cp37m-win_amd64.whl", hash = "sha256:62cb6de84d7767164a87ca97e22e5e0a134856ebcb08f21b621c6125baf61f16"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59f7e9109a59dfa31efa022e94a244736ae401526682de504e87bd11ce870c22"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:95a7a747bdc3b010bb6a980f053233e7610276d55f3ca506afff4ad7749ab58a"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c721ee464e45ecf609ff8c0a555018764974114f671815a0a7152aedb9f3343"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4f37bbc6588d402980ffbd1f3338c871368fb4b1cfa091debe13c68bb3852b3"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac83ab05e25354dad798401babaa6daa9577462136ba215694865394840e31f8"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:024eaeb2a08c9a65cd5f94b31ace1ee3bb3f978cd4d079406aef85169ba01f08"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:1c31c2606ac500dbd26381145684d87730a2fac9a62ebcfbaa2b119f8d6c19f4"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:42a62ef0e5abb55bf6ffb050eb2b0fcd767261fa3faf943a4267539168807522"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:7952807f95c8eba6a8ccb14e00bf170bb700cafcec3924d565235dffc7dc4ae8"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e02bc4f2966475a7393bd0f098e1165d470d3fa816264054359ed4f10f6914ea"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-win32.whl", hash = "sha256:fdca0511458d26cf39b827a663d7d87db6f32b93efc22442a742035728603d5f"}, - {file = "psycopg2_binary-2.9.7-cp38-cp38-win_amd64.whl", hash = "sha256:d0b16e5bb0ab78583f0ed7ab16378a0f8a89a27256bb5560402749dbe8a164d7"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6822c9c63308d650db201ba22fe6648bd6786ca6d14fdaf273b17e15608d0852"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8f94cb12150d57ea433e3e02aabd072205648e86f1d5a0a692d60242f7809b15"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5ee89587696d808c9a00876065d725d4ae606f5f7853b961cdbc348b0f7c9a1"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad5ec10b53cbb57e9a2e77b67e4e4368df56b54d6b00cc86398578f1c635f329"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:642df77484b2dcaf87d4237792246d8068653f9e0f5c025e2c692fc56b0dda70"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6a8b575ac45af1eaccbbcdcf710ab984fd50af048fe130672377f78aaff6fc1"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f955aa50d7d5220fcb6e38f69ea126eafecd812d96aeed5d5f3597f33fad43bb"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ad26d4eeaa0d722b25814cce97335ecf1b707630258f14ac4d2ed3d1d8415265"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ced63c054bdaf0298f62681d5dcae3afe60cbae332390bfb1acf0e23dcd25fc8"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2b04da24cbde33292ad34a40db9832a80ad12de26486ffeda883413c9e1b1d5e"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-win32.whl", hash = "sha256:18f12632ab516c47c1ac4841a78fddea6508a8284c7cf0f292cb1a523f2e2379"}, - {file = "psycopg2_binary-2.9.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb3b8d55924a6058a26db69fb1d3e7e32695ff8b491835ba9f479537e14dcf9f"}, + {file = "psycopg2-binary-2.9.8.tar.gz", hash = "sha256:80451e6b6b7c486828d5c7ed50769532bbb04ec3a411f1e833539d5c10eb691c"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e271ad6692d50d70ca75db3bd461bfc26316de78de8fe1f504ef16dcea8f2312"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ae22a0fa5c516b84ddb189157fabfa3f12eded5d630e1ce260a18e1771f8707"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a971086db0069aef2fd22ccffb670baac427f4ee2174c4f5c7206254f1e6794"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6928a502af71ca2ac9aad535e78c8309892ed3bfa7933182d4c760580c8af4"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f955fe6301b84b6fd13970a05f3640fbb62ca3a0d19342356585006c830e038"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3723c3f009e2b2771f2491b330edb7091846f1aad0c08fbbd9a1383d6a0c0841"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e3142c7e51b92855cff300580de949e36a94ab3bfa8f353b27fe26535e9b3542"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:de85105c568dc5f0f0efe793209ba83e4675d53d00faffc7a7c7a8bea9e0e19a"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c7ff2b6a79a92b1b169b03bb91b41806843f0cdf6055256554495bffed1d496d"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59f45cca0765aabb52a5822c72d5ff2ec46a28b1c1702de90dc0d306ec5c2001"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-win32.whl", hash = "sha256:1dbad789ebd1e61201256a19dc2e90fed4706bc966ccad4f374648e5336b1ab4"}, + {file = "psycopg2_binary-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:15458c81b0d199ab55825007115f697722831656e6477a427783fe75c201c82b"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:395c217156723fe21809dfe8f7a433c5bf8e9bce229944668e4ec709c37c5442"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14f85ff2d5d826a7ce9e6c31e803281ed5a096789f47f52cb728c88f488de01b"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e11373d8e4f1f46cf3065bf613f0df9854803dc95aa4a35354ffac19f8c52127"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01f9731761f711e42459f87bd2ad5d744b9773b5dd05446f3b579a0f077e78e3"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bf5c27bd5867a5fa5341fad29f0d5838e2fed617ef5346884baf8b8b16dd82"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfabbd7e70785af726cc0209e8e64b926abf91741eca80678b221aad9e72135"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6369f4bd4d27944498094dccced1ae7ca43376a59dbfe4c8b6a16e9e3dc3ccce"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4879ee1d07a6b2c232ae6a74570f4788cd7a29b3cd38bc39bf60225b1d075c78"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4336afc0e81726350bd5863e3c3116d8c12aa7f457d3d0b3b3dc36137fec6feb"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63ce1dccfd08d9c5341ac82d62aa04345bc4bf41b5e5b7b2c6c172a28e0eda27"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-win32.whl", hash = "sha256:59421806c1a0803ea7de9ed061d656c041a84db0da7e73266b98db4c7ba263da"}, + {file = "psycopg2_binary-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:ccaa2ae03990cedde1f618ff11ec89fefa84622da73091a67b44553ca8be6711"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5aa0c99c12075c593dcdccbb8a7aaa714b716560cc99ef9206f9e75b77520801"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91719f53ed2a95ebecefac48d855d811cba9d9fe300acc162993bdfde9bc1c3b"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c68a2e1afb4f2a5bb4b7bb8f90298d21196ac1c66418523e549430b8c4b7cb1e"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278ebd63ced5a5f3af5394cb75a9a067243eee21f42f0126c6f1cf85eaeb90f9"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c84ff9682bc4520504c474e189b3de7c4a4029e529c8b775e39c95c33073767"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6f5e70e40dae47a4dc7f8eb390753bb599b0f4ede314580e6faa3b7383695d19"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:673eafbdaa4ed9f5164c90e191c3895cc5f866b9b379fdb59f3a2294e914d9bd"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5a0a6e4004697ec98035ff3b8dfc4dba8daa477b23ee891d831cd3cd65ace6be"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d29efab3c5d6d978115855a0f2643e0ee8c6450dc536d5b4afec6f52ab99e99e"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:d4a19a3332f2ac6d093e60a6f1c589f97eb9f9de7e27ea80d67f188384e31572"}, + {file = "psycopg2_binary-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:5262713988d97a9d4cd54b682dec4a413b87b76790e5b16f480450550d11a8f7"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e46b0f4683539965ce849f2c13fc53e323bb08d84d4ba2e4b3d976f364c84210"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3fd44b52bc9c74c1512662e8da113a1c55127adeeacebaf460babe766517b049"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6c607ecb6a9c245ebe162d63ccd9222d38efa3c858bbe38d32810b08b8f87e"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6ef615d48fa60361e57f998327046bd89679c25d06eee9e78156be5a7a76e03"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65403113ac3a4813a1409fb6a1e43c658b459cc8ed8afcc5f4baf02ec8be4334"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debcb23a052f3fb4c165789ea513b562b2fac0f0f4f53eaf3cf4dc648907ff8"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dc145a241e1f6381efb924bcf3e3462d6020b8a147363f9111eb0a9c89331ad7"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1d669887df169a9b0c09e0f5b46891511850a9ddfcde3593408af9d9774c5c3a"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:19d40993701e39c49b50e75cd690a6af796d7e7210941ee0fe49cf12b25840e5"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b8b2cdf3bce4dd91dc035fbff4eb812f5607dda91364dc216b0920b97b521c7"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-win32.whl", hash = "sha256:4960c881471ca710b81a67ef148c33ee121c1f8e47a639cf7e06537fe9fee337"}, + {file = "psycopg2_binary-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:aeb09db95f38e75ae04e947d283e07be34d03c4c2ace4f0b73dbb9143d506e67"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5aef3296d44d05805e634dbbd2972aa8eb7497926dd86047f5e39a79c3ecc086"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d6b592ecc8667e608b9e7344259fbfb428cc053df0062ec3ac75d8270cd5a9f"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:693a4e7641556f0b421a7d6c6a74058aead407d860ac1cb9d0bf25be0ca73de8"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf60c599c40c266a01c458e9c71db7132b11760f98f08233f19b3e0a2153cbf1"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cbe1e19f59950afd66764e3c905ecee9f2aee9f8df2ef35af6f7948ad93f620"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc37de7e3a87f5966965fc874d33c9b68d638e6c3718fdf32a5083de563428b0"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e1bb4eb0d9925d65dabaaabcbb279fab444ba66d73f86d4c07dfd11f0139c06"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7bdc94217ae20ad03b375a991e107a31814053bee900ad8c967bf82ef3ff02e"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:565edaf9f691b17a7fdbabd368b5b3e67d0fdc8f7f6b52177c1d3289f4e763fd"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e3071c947bda6afc6fe2e7b64ebd64fb2cad1bc0e705a3594cb499291f2dfec"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-win32.whl", hash = "sha256:205cecdd81ff4f1ddd687ce7d06879b9b80cccc428d8d6ebf36fcba08bb6d361"}, + {file = "psycopg2_binary-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:1f279ba74f0d6b374526e5976c626d2ac3b8333b6a7b08755c513f4d380d3add"}, ] [[package]] From 9017c23dc5fec4d828622e2e2cf552c9d8d9cd4a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 2 Oct 2023 11:10:32 -0400 Subject: [PATCH 073/262] Fix TypeError in update_lane_size script (#1420) Add default of None to _db arg for the update_lane_size script. Fixes this traceback: File "bin/update_lane_size", line 11, in UpdateLaneSizeScript().run() TypeError: __init__() missing 1 required positional argument: '_db' --- core/scripts.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/scripts.py b/core/scripts.py index 62290e4575..a405ec1422 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -2645,7 +2645,7 @@ def add_line(id, name, protocol, metadata_identifier): class UpdateLaneSizeScript(LaneSweeperScript): - def __init__(self, _db, *args, **kwargs): + def __init__(self, _db=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) search = kwargs.get("search_index_client", None) self._search: ExternalSearchIndex = search or ExternalSearchIndex(self._db) From 6cee785135b07218153e342b556acab20334251d Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 2 Oct 2023 11:44:17 -0400 Subject: [PATCH 074/262] Fully type hint the OPDS Importer and OPDS2 Importer classes (PP-313) (#1397) * Add type hinting to OPDS importer class * Type hint ODL importer classes * Add type hints for opds for distributors --- api/axis.py | 2 +- api/circulation.py | 6 +- api/lcp/hash.py | 13 +- api/odl.py | 219 ++++++++----- api/odl2.py | 69 +++-- api/opds_for_distributors.py | 155 +++++++--- api/selftest.py | 4 +- core/feed/annotator/admin.py | 2 +- core/metadata_layer.py | 2 +- core/model/datasource.py | 2 +- core/model/identifier.py | 30 +- core/model/licensing.py | 23 +- core/model/measurement.py | 10 +- core/model/patron.py | 3 +- core/model/resource.py | 10 +- core/opds2_import.py | 63 ++-- core/opds_import.py | 378 +++++++++++++++-------- core/util/datetime_helpers.py | 12 +- core/util/xmlparser.py | 38 ++- pyproject.toml | 6 + tests/api/feed/test_library_annotator.py | 8 +- tests/api/test_odl.py | 6 +- tests/api/test_opds.py | 8 +- tests/api/test_opds_for_distributors.py | 17 +- tests/api/test_selftest.py | 3 +- tests/core/test_opds.py | 8 +- tests/core/test_opds_import.py | 39 +-- 27 files changed, 740 insertions(+), 396 deletions(-) diff --git a/api/axis.py b/api/axis.py index 546aa40076..1b0ad69a78 100644 --- a/api/axis.py +++ b/api/axis.py @@ -272,7 +272,7 @@ def _count_activity(): ) # Run the tests defined by HasCollectionSelfTests - for result in super()._run_self_tests(): + for result in super()._run_self_tests(_db): yield result def refresh_bearer_token(self): diff --git a/api/circulation.py b/api/circulation.py index 12fc47a4ff..290405d9d1 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -435,7 +435,7 @@ def __init__( identifier_type: Optional[str], identifier: Optional[str], start_date: Optional[datetime.datetime], - end_date: datetime.datetime, + end_date: Optional[datetime.datetime], fulfillment_info: Optional[FulfillmentInfo] = None, external_identifier: Optional[str] = None, locked_to: Optional[DeliveryMechanismInfo] = None, @@ -752,9 +752,7 @@ def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> No ... @abstractmethod - def update_availability( - self, licensepool: LicensePool - ) -> Tuple[LicensePool, bool, bool]: + def update_availability(self, licensepool: LicensePool) -> None: """Update availability information for a book.""" ... diff --git a/api/lcp/hash.py b/api/lcp/hash.py index 4569dfa58d..5fd98a75c7 100644 --- a/api/lcp/hash.py +++ b/api/lcp/hash.py @@ -1,5 +1,5 @@ import hashlib -from abc import ABCMeta, abstractmethod +from abc import ABC, abstractmethod from enum import Enum from core.exceptions import BaseError @@ -14,20 +14,19 @@ class HashingError(BaseError): """Raised in the case of errors occurred during hashing""" -class Hasher(metaclass=ABCMeta): +class Hasher(ABC): """Base class for all implementations of different hashing algorithms""" - def __init__(self, hashing_algorithm): + def __init__(self, hashing_algorithm: HashingAlgorithm) -> None: """Initializes a new instance of Hasher class :param hashing_algorithm: Hashing algorithm - :type hashing_algorithm: HashingAlgorithm """ self._hashing_algorithm = hashing_algorithm @abstractmethod - def hash(self, value): - raise NotImplementedError() + def hash(self, value: str) -> str: + ... class UniversalHasher(Hasher): @@ -49,5 +48,5 @@ def hash(self, value: str) -> str: class HasherFactory: - def create(self, hashing_algorithm): + def create(self, hashing_algorithm: HashingAlgorithm) -> Hasher: return UniversalHasher(hashing_algorithm) diff --git a/api/odl.py b/api/odl.py index 02e00010a1..4ee86279b2 100644 --- a/api/odl.py +++ b/api/odl.py @@ -1,15 +1,18 @@ +from __future__ import annotations + import binascii import datetime import json import logging import uuid -from typing import Callable, Dict, List, Optional, Tuple, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, Union import dateutil -import sqlalchemy from flask import url_for from flask_babel import lazy_gettext as _ +from lxml.etree import Element from pydantic import HttpUrl, PositiveInt +from requests import Response from sqlalchemy.sql.expression import or_ from uritemplate import URITemplate @@ -218,20 +221,22 @@ class ODLAPI( ] @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[ODLSettings]: return ODLSettings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> Type[ODLLibrarySettings]: return ODLLibrarySettings - def label(self): - return self.NAME + @classmethod + def label(cls) -> str: + return cls.NAME - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION # type: ignore[no-any-return] - def __init__(self, _db, collection): + def __init__(self, _db: Session, collection: Collection) -> None: super().__init__(_db, collection) if collection.protocol != self.NAME: raise ValueError( @@ -252,9 +257,7 @@ def __init__(self, _db, collection): self._credential_factory = LCPCredentialFactory() self._hasher_instance: Optional[Hasher] = None - def external_integration( - self, db: sqlalchemy.orm.session.Session - ) -> ExternalIntegration: + def external_integration(self, db: Session) -> ExternalIntegration: """Return an external integration associated with this object. :param db: Database session @@ -262,7 +265,9 @@ def external_integration( """ return self.collection.external_integration - def internal_format(self, delivery_mechanism): + def internal_format( # type: ignore[override] + self, delivery_mechanism: Optional[LicensePoolDeliveryMechanism] + ) -> Optional[LicensePoolDeliveryMechanism]: """Each consolidated copy is only available in one format, so we don't need a mapping to internal formats. """ @@ -280,23 +285,22 @@ def collection(self) -> Collection: raise ValueError(f"Collection not found: {self.collection_id}") return collection - def _get_hasher(self): + def _get_hasher(self) -> Hasher: """Returns a Hasher instance :return: Hasher instance - :rtype: hash.Hasher """ config = self.configuration() if self._hasher_instance is None: self._hasher_instance = self._hasher_factory.create( - config.encryption_algorithm + config.encryption_algorithm # type: ignore[arg-type] if config.encryption_algorithm else ODLAPIConstants.DEFAULT_ENCRYPTION_ALGORITHM ) return self._hasher_instance - def _get(self, url, headers=None): + def _get(self, url: str, headers: Optional[Dict[str, str]] = None) -> Response: """Make a normal HTTP request, but include an authentication header with the credentials for the collection. """ @@ -309,11 +313,11 @@ def _get(self, url, headers=None): return HTTP.get_with_timeout(url, headers=headers) - def _url_for(self, *args, **kwargs): + def _url_for(self, *args: Any, **kwargs: Any) -> str: """Wrapper around flask's url_for to be overridden for tests.""" return url_for(*args, **kwargs) - def get_license_status_document(self, loan): + def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: """Get the License Status Document for a loan. For a new loan, create a local loan with no external identifier and @@ -360,9 +364,10 @@ def get_license_status_document(self, loan): ) config = self.configuration() - url_template = URITemplate(loan.license.checkout_url) + checkout_url = str(loan.license.checkout_url) + url_template = URITemplate(checkout_url) url = url_template.expand( - id=id, + id=str(id), checkout_id=checkout_id, patron_id=patron_id, expires=expires.isoformat(), @@ -384,9 +389,9 @@ def get_license_status_document(self, loan): raise BadResponseException( url, "License Status Document had an unknown status value." ) - return status_doc + return status_doc # type: ignore[no-any-return] - def checkin(self, patron, pin, licensepool): + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> bool: # type: ignore[override] """Return a loan early.""" _db = Session.object_session(patron) @@ -397,10 +402,10 @@ def checkin(self, patron, pin, licensepool): ) if loan.count() < 1: raise NotCheckedOut() - loan = loan.one() - return self._checkin(loan) + loan_result = loan.one() + return self._checkin(loan_result) - def _checkin(self, loan): + def _checkin(self, loan: Loan) -> bool: _db = Session.object_session(loan) doc = self.get_license_status_document(loan) status = doc.get("status") @@ -427,7 +432,7 @@ def _checkin(self, loan): # must be returned through the DRM system. If that's true, the # app will already be doing that on its own, so we'll silently # do nothing. - return + return False # Hit the distributor's return link. self._get(return_url) @@ -439,12 +444,18 @@ def _checkin(self, loan): # However, it might be because the loan has already been fulfilled # and must be returned through the DRM system, which the app will # do on its own, so we can ignore the problem. - loan = get_one(_db, Loan, id=loan.id) - if loan: - return + new_loan = get_one(_db, Loan, id=loan.id) + if new_loan: + return False return True - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + internal_format: Optional[str], + ) -> LoanInfo: """Create a new loan.""" _db = Session.object_session(patron) @@ -457,18 +468,20 @@ def checkout(self, patron, pin, licensepool, internal_format): raise AlreadyCheckedOut() hold = get_one(_db, Hold, patron=patron, license_pool_id=licensepool.id) - loan = self._checkout(patron, licensepool, hold) + loan_obj = self._checkout(patron, licensepool, hold) return LoanInfo( licensepool.collection, licensepool.data_source.name, licensepool.identifier.type, licensepool.identifier.identifier, - loan.start, - loan.end, - external_identifier=loan.external_identifier, + loan_obj.start, + loan_obj.end, + external_identifier=loan_obj.external_identifier, ) - def _checkout(self, patron: Patron, licensepool, hold=None): + def _checkout( + self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + ) -> Loan: _db = Session.object_session(patron) if not any(l for l in licensepool.licenses if not l.is_inactive): @@ -483,7 +496,9 @@ def _checkout(self, patron: Patron, licensepool, hold=None): # If there's a holds queue, the patron must have a non-expired hold # with position 0 to check out the book. if ( - not hold or hold.position > 0 or (hold.end and hold.end < utc_now()) + not hold + or (hold.position and hold.position > 0) + or (hold.end and hold.end < utc_now()) ) and licensepool.licenses_available < 1: raise NoAvailableCopies() @@ -534,27 +549,28 @@ def _checkout(self, patron: Patron, licensepool, hold=None): self.update_licensepool(licensepool) return loan - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): - """Get the actual resource file to the patron. - - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - - :return: a FulfillmentInfo object. - """ + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + internal_format: Optional[str] = None, + part: Optional[str] = None, + fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, + ) -> FulfillmentInfo: + """Get the actual resource file to the patron.""" _db = Session.object_session(patron) loan = ( _db.query(Loan) .filter(Loan.patron == patron) .filter(Loan.license_pool_id == licensepool.id) - ) - loan = loan.one() + ).one() return self._fulfill(loan, internal_format) @staticmethod def _find_content_link_and_type( - links: List[Dict], + links: List[Dict[str, str]], drm_scheme: Optional[str], ) -> Tuple[Optional[str], Optional[str]]: """Find a content link with the type information corresponding to the selected delivery mechanism. @@ -647,7 +663,7 @@ def _count_holds_before(self, holdinfo: HoldInfo, pool: LicensePool) -> int: .count() ) - def _update_hold_data(self, hold: Hold): + def _update_hold_data(self, hold: Hold) -> None: pool: LicensePool = hold.license_pool holdinfo = HoldInfo( pool.collection, @@ -665,7 +681,7 @@ def _update_hold_data(self, hold: Hold): def _update_hold_end_date( self, holdinfo: HoldInfo, pool: LicensePool, library: Library - ): + ) -> None: _db = Session.object_session(pool) # First make sure the hold position is up-to-date, since we'll @@ -751,7 +767,7 @@ def _update_hold_end_date( days=default_reservation_period ) - def _update_hold_position(self, holdinfo: HoldInfo, pool: LicensePool): + def _update_hold_position(self, holdinfo: HoldInfo, pool: LicensePool) -> None: _db = Session.object_session(pool) loans_count = ( _db.query(Loan) @@ -774,7 +790,7 @@ def _update_hold_position(self, holdinfo: HoldInfo, pool: LicensePool): # Add 1 since position 0 indicates the hold is ready. holdinfo.hold_position = holds_count + 1 - def update_licensepool(self, licensepool: LicensePool): + def update_licensepool(self, licensepool: LicensePool) -> None: # Update the pool and the next holds in the queue when a license is reserved. licensepool.update_availability_from_licenses( analytics=self.analytics, @@ -786,11 +802,17 @@ def update_licensepool(self, licensepool: LicensePool): # This hold just got a reserved license. self._update_hold_data(hold) - def place_hold(self, patron, pin, licensepool, notification_email_address): + def place_hold( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + notification_email_address: Optional[str], + ) -> HoldInfo: """Create a new hold.""" return self._place_hold(patron, licensepool) - def _place_hold(self, patron, licensepool): + def _place_hold(self, patron: Patron, licensepool: LicensePool) -> HoldInfo: _db = Session.object_session(patron) # Make sure pool info is updated. @@ -810,14 +832,19 @@ def _place_hold(self, patron, licensepool): if hold is not None: raise AlreadyOnHold() - licensepool.patrons_in_hold_queue += 1 + patrons_in_hold_queue = ( + licensepool.patrons_in_hold_queue + if licensepool.patrons_in_hold_queue + else 0 + ) + licensepool.patrons_in_hold_queue = patrons_in_hold_queue + 1 holdinfo = HoldInfo( licensepool.collection, licensepool.data_source.name, licensepool.identifier.type, licensepool.identifier.identifier, utc_now(), - 0, + None, 0, ) library = patron.library @@ -825,7 +852,7 @@ def _place_hold(self, patron, licensepool): return holdinfo - def release_hold(self, patron, pin, licensepool): + def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: """Cancel a hold.""" _db = Session.object_session(patron) @@ -837,9 +864,9 @@ def release_hold(self, patron, pin, licensepool): ) if not hold: raise NotOnHold() - return self._release_hold(hold) + self._release_hold(hold) - def _release_hold(self, hold): + def _release_hold(self, hold: Hold) -> Literal[True]: # If the book was ready and the patron revoked the hold instead # of checking it out, but no one else had the book on hold, the # book is now available for anyone to check out. If someone else @@ -852,7 +879,7 @@ def _release_hold(self, hold): self.update_licensepool(licensepool) return True - def patron_activity(self, patron, pin): + def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: """Look up non-expired loans for this collection in the database.""" _db = Session.object_session(patron) loans = ( @@ -904,7 +931,9 @@ def patron_activity(self, patron, pin): for hold in remaining_holds ] - def update_loan(self, loan, status_doc=None): + def update_loan( + self, loan: Loan, status_doc: Optional[Dict[str, Any]] = None + ) -> None: """Check a loan's status, and if it is no longer active, delete the loan and update its pool's availability. """ @@ -918,7 +947,8 @@ def update_loan(self, loan, status_doc=None): # but if the document came from a notification it hasn't been checked yet. if status not in self.STATUS_VALUES: raise BadResponseException( - "The License Status Document had an unknown status value." + str(loan.license.checkout_url), + "The License Status Document had an unknown status value.", ) if status in [ @@ -937,7 +967,7 @@ def update_loan(self, loan, status_doc=None): _db.delete(loan) self.update_licensepool(loan.license_pool) - def update_availability(self, licensepool): + def update_availability(self, licensepool: LicensePool) -> None: pass @@ -975,11 +1005,13 @@ class ODLImporter(OPDSImporter): } @classmethod - def fetch_license_info(cls, document_link: str, do_get: Callable) -> Optional[dict]: + def fetch_license_info( + cls, document_link: str, do_get: Callable[..., Tuple[int, Any, bytes]] + ) -> Optional[Dict[str, Any]]: status_code, _, response = do_get(document_link, headers={}) if status_code in (200, 201): license_info_document = json.loads(response) - return license_info_document + return license_info_document # type: ignore[no-any-return] else: logging.warning( f"License Info Document is not available. " @@ -990,9 +1022,9 @@ def fetch_license_info(cls, document_link: str, do_get: Callable) -> Optional[di @classmethod def parse_license_info( cls, - license_info_document: dict, + license_info_document: Dict[str, Any], license_info_link: str, - checkout_link: str, + checkout_link: Optional[str], ) -> Optional[LicenseData]: """Check the license's attributes passed as parameters: - if they're correct, turn them into a LicenseData object @@ -1078,11 +1110,11 @@ def parse_license_info( def get_license_data( cls, license_info_link: str, - checkout_link: str, - feed_license_identifier: str, - feed_license_expires: str, - feed_concurrency: int, - do_get: Callable, + checkout_link: Optional[str], + feed_license_identifier: Optional[str], + feed_license_expires: Optional[datetime.datetime], + feed_concurrency: Optional[int], + do_get: Callable[..., Tuple[int, Any, bytes]], ) -> Optional[LicenseData]: license_info_document = cls.fetch_license_info(license_info_link, do_get) @@ -1130,8 +1162,12 @@ def get_license_data( @classmethod def _detail_for_elementtree_entry( - cls, parser, entry_tag, feed_url=None, do_get=None - ): + cls, + parser: OPDSXMLParser, + entry_tag: Element, + feed_url: Optional[str] = None, + do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + ) -> Dict[str, Any]: do_get = do_get or Representation.cautious_http_get # TODO: Review for consistency when updated ODL spec is ready. @@ -1152,7 +1188,7 @@ def _detail_for_elementtree_entry( # By default, dcterms:format includes the media type of a # DRM-free resource. content_type = full_content_type - drm_schemes = [] + drm_schemes: List[str | None] = [] # But it may instead describe an audiobook protected with # the Feedbooks access-control scheme. @@ -1206,11 +1242,12 @@ def _detail_for_elementtree_entry( concurrent_checkouts = subtag(terms[0], "odl:concurrent_checkouts") expires = subtag(terms[0], "odl:expires") - if concurrent_checkouts is not None: - concurrent_checkouts = int(concurrent_checkouts) - - if expires is not None: - expires = to_utc(dateutil.parser.parse(expires)) + concurrent_checkouts_int = ( + int(concurrent_checkouts) if concurrent_checkouts is not None else None + ) + expires_datetime = ( + to_utc(dateutil.parser.parse(expires)) if expires is not None else None + ) if not odl_status_link: parsed_license = None @@ -1219,8 +1256,8 @@ def _detail_for_elementtree_entry( odl_status_link, checkout_link, identifier, - expires, - concurrent_checkouts, + expires_datetime, + concurrent_checkouts_int, do_get, ) @@ -1248,7 +1285,13 @@ class ODLImportMonitor(OPDSImportMonitor): PROTOCOL = ODLImporter.NAME SERVICE_NAME = "ODL Import Monitor" - def __init__(self, _db, collection, import_class, **import_class_kwargs): + def __init__( + self, + _db: Session, + collection: Collection, + import_class: Type[OPDSImporter], + **import_class_kwargs: Any, + ): # Always force reimport ODL collections to get up to date license information super().__init__( _db, collection, import_class, force_reimport=True, **import_class_kwargs @@ -1262,11 +1305,17 @@ class ODLHoldReaper(CollectionMonitor): SERVICE_NAME = "ODL Hold Reaper" PROTOCOL = ODLAPI.NAME - def __init__(self, _db, collection=None, api=None, **kwargs): + def __init__( + self, + _db: Session, + collection: Collection, + api: Optional[ODLAPI] = None, + **kwargs: Any, + ): super().__init__(_db, collection, **kwargs) self.api = api or ODLAPI(_db, collection) - def run_once(self, progress): + def run_once(self, progress: TimestampData) -> TimestampData: # Find holds that have expired. expired_holds = ( self._db.query(Hold) diff --git a/api/odl2.py b/api/odl2.py index b4b8bc0dbd..946d71eb63 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -1,10 +1,11 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type from flask_babel import lazy_gettext as _ from pydantic import PositiveInt +from sqlalchemy.orm import Session from webpub_manifest_parser.odl import ODLFeedParserFactory from webpub_manifest_parser.opds2.registry import OPDS2LinkRelationsRegistry @@ -23,7 +24,12 @@ from core.util.datetime_helpers import to_utc if TYPE_CHECKING: - from core.model.patron import Patron + from webpub_manifest_parser.core.ast import Metadata + from webpub_manifest_parser.opds2.ast import OPDS2Feed, OPDS2Publication + + from api.circulation import HoldInfo + from core.model import Collection, Identifier, LicensePool + from core.model.patron import Hold, Loan, Patron class ODL2Settings(ODLSettings): @@ -71,16 +77,18 @@ class ODL2API(ODLAPI): NAME = ExternalIntegration.ODL2 @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[ODL2Settings]: return ODL2Settings - def __init__(self, _db, collection): + def __init__(self, _db: Session, collection: Collection) -> None: super().__init__(_db, collection) config = self.configuration() - self.loan_limit = config.loan_limit - self.hold_limit = config.hold_limit + self.loan_limit = config.loan_limit # type: ignore[attr-defined] + self.hold_limit = config.hold_limit # type: ignore[attr-defined] - def _checkout(self, patron: Patron, licensepool, hold=None): + def _checkout( + self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + ) -> Loan: # If the loan limit is not None or 0 if self.loan_limit: loans = list( @@ -93,7 +101,7 @@ def _checkout(self, patron: Patron, licensepool, hold=None): raise PatronLoanLimitReached(limit=self.loan_limit) return super()._checkout(patron, licensepool, hold) - def _place_hold(self, patron: Patron, licensepool): + def _place_hold(self, patron: Patron, licensepool: LicensePool) -> HoldInfo: # If the hold limit is not None or 0 if self.hold_limit: holds = list( @@ -117,19 +125,19 @@ class ODL2Importer(OPDS2Importer, HasExternalIntegration): NAME = ODL2API.NAME @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[ODL2Settings]: # type: ignore[override] return ODL2Settings def __init__( self, - db, - collection, - parser=None, - data_source_name=None, - identifier_mapping=None, - http_get=None, - content_modifier=None, - map_from_collection=None, + db: Session, + collection: Collection, + parser: Optional[RWPMManifestParser] = None, + data_source_name: str | None = None, + identifier_mapping: Dict[Identifier, Identifier] | None = None, + http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + content_modifier: Optional[Callable[..., None]] = None, + map_from_collection: Optional[bool] = None, ): """Initialize a new instance of ODL2Importer class. @@ -173,20 +181,19 @@ def __init__( ) self._logger = logging.getLogger(__name__) - def _extract_publication_metadata(self, feed, publication, data_source_name): + def _extract_publication_metadata( + self, + feed: OPDS2Feed, + publication: OPDS2Publication, + data_source_name: Optional[str], + ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. :param publication: Feed object - :type publication: opds2_ast.OPDS2Feed - :param publication: Publication object - :type publication: opds2_ast.OPDS2Publication - :param data_source_name: Data source's name - :type data_source_name: str :return: Publication's metadata - :rtype: Metadata """ metadata = super()._extract_publication_metadata( feed, publication, data_source_name @@ -195,7 +202,7 @@ def _extract_publication_metadata(self, feed, publication, data_source_name): licenses = [] medium = None - skipped_license_formats = self.configuration().skipped_license_formats + skipped_license_formats = self.configuration().skipped_license_formats # type: ignore[attr-defined] if skipped_license_formats: skipped_license_formats = set(skipped_license_formats) @@ -251,6 +258,7 @@ def _extract_publication_metadata(self, feed, publication, data_source_name): if not medium: medium = Edition.medium_from_media_type(license_format) + drm_schemes: List[str | None] if license_format in ODLImporter.LICENSE_FORMATS: # Special case to handle DeMarque audiobooks which include the protection # in the content type. When we see a license format of @@ -291,9 +299,6 @@ def _extract_publication_metadata(self, feed, publication, data_source_name): return metadata - def external_integration(self, db): - return self.collection.external_integration - class ODL2ImportMonitor(OPDS2ImportMonitor): """Import information from an ODL feed.""" @@ -301,7 +306,13 @@ class ODL2ImportMonitor(OPDS2ImportMonitor): PROTOCOL = ODL2Importer.NAME SERVICE_NAME = "ODL 2.x Import Monitor" - def __init__(self, _db, collection, import_class, **import_class_kwargs): + def __init__( + self, + _db: Session, + collection: Collection, + import_class: Type[ODL2Importer], + **import_class_kwargs: Any, + ) -> None: # Always force reimport ODL collections to get up to date license information super().__init__( _db, collection, import_class, force_reimport=True, **import_class_kwargs diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 88d5f753f8..be535dada3 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -1,10 +1,28 @@ +from __future__ import annotations + import datetime import json -from typing import Type +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generator, + List, + Optional, + Set, + Tuple, + Type, +) import feedparser from flask_babel import lazy_gettext as _ +from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo +from api.circulation_exceptions import ( + CannotFulfill, + LibraryAuthorizationFailedException, +) from api.selftest import HasCollectionSelfTests from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField @@ -27,8 +45,14 @@ from core.util.http import HTTP from core.util.string_helpers import base64 -from .circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo -from .circulation_exceptions import * +if TYPE_CHECKING: + from requests import Response + + from api.circulation import HoldInfo + from core.coverage import CoverageFailure + from core.metadata_layer import CirculationData + from core.model import Edition, LicensePoolDeliveryMechanism, Patron, Work + from core.selftest import SelfTestResult class OPDSForDistributorsSettings(BaseOPDSImporterSettings): @@ -81,20 +105,22 @@ class OPDSForDistributorsAPI( } @classmethod - def settings_class(cls) -> Type[BaseSettings]: + def settings_class(cls) -> Type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> Type[OPDSForDistributorsLibrarySettings]: return OPDSForDistributorsLibrarySettings - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION # type: ignore[no-any-return] - def label(self): - return self.NAME + @classmethod + def label(cls) -> str: + return cls.NAME - def __init__(self, _db, collection): + def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) self.external_integration_id = collection.external_integration.id @@ -103,24 +129,27 @@ def __init__(self, _db, collection): self.username = config.username self.password = config.password self.feed_url = collection.external_account_id - self.auth_url = None - - @property - def collection(self): - return Collection.by_id(self._db, id=self.collection_id) + self.auth_url: Optional[str] = None - def external_integration(self, _db): + def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: return get_one(_db, ExternalIntegration, id=self.external_integration_id) - def _run_self_tests(self, _db): + def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: """Try to get a token.""" yield self.run_test("Negotiate a fulfillment token", self._get_token, _db) - def _request_with_timeout(self, method, url, *args, **kwargs): + def _request_with_timeout( + self, method: str, url: Optional[str], *args: Any, **kwargs: Any + ) -> Response: """Wrapper around HTTP.request_with_timeout to be overridden for tests.""" + if url is None: + name = self.collection.name if self.collection else "unknown" + raise LibraryAuthorizationFailedException( + f"No URL provided to request_with_timeout for collection: {name}/{self.collection_id}." + ) return HTTP.request_with_timeout(method, url, *args, **kwargs) - def _get_token(self, _db) -> Credential: + def _get_token(self, _db: Session) -> Credential: # If this is the first time we're getting a token, we # need to find the authenticate url in the OPDS # authentication document. @@ -212,7 +241,12 @@ def refresh(credential: Credential) -> None: refresher_method=refresh, ) - def can_fulfill_without_loan(self, patron, licensepool, lpdm): + def can_fulfill_without_loan( + self, + patron: Optional[Patron], + pool: LicensePool, + lpdm: LicensePoolDeliveryMechanism, + ) -> bool: """Since OPDS For Distributors delivers books to the library rather than creating loans, any book can be fulfilled without identifying the patron, assuming the library's policies @@ -229,7 +263,7 @@ def can_fulfill_without_loan(self, patron, licensepool, lpdm): return True return False - def checkin(self, patron, pin, licensepool): + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: # Delete the patron's loan for this licensepool. _db = Session.object_session(patron) try: @@ -244,7 +278,13 @@ def checkin(self, patron, pin, licensepool): # The patron didn't have this book checked out. pass - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + internal_format: Optional[str], + ) -> LoanInfo: now = utc_now() return LoanInfo( licensepool.collection, @@ -255,7 +295,15 @@ def checkout(self, patron, pin, licensepool, internal_format): end_date=None, ) - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + internal_format: Optional[str] = None, + part: Optional[str] = None, + fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, + ) -> FulfillmentInfo: """Retrieve a bearer token that can be used to download the book. :param kwargs: A container for arguments to fulfill() @@ -282,7 +330,7 @@ def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): # Build a application/vnd.librarysimplified.bearer-token # document using information from the credential. now = utc_now() - expiration = int((credential.expires - now).total_seconds()) + expiration = int((credential.expires - now).total_seconds()) # type: ignore[operator] token_document = dict( token_type="Bearer", access_token=credential.credential, @@ -304,7 +352,7 @@ def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): # We couldn't find an acquisition link for this book. raise CannotFulfill() - def patron_activity(self, patron, pin): + def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: # Look up loans for this collection in the database. _db = Session.object_session(patron) loans = ( @@ -325,17 +373,23 @@ def patron_activity(self, patron, pin): for loan in loans ] - def release_hold(self, patron, pin, licensepool): + def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: # All the books for this integration are available as simultaneous # use, so there's no need to release a hold. raise NotImplementedError() - def place_hold(self, patron, pin, licensepool, notification_email_address): + def place_hold( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + notification_email_address: Optional[str], + ) -> HoldInfo: # All the books for this integration are available as simultaneous # use, so there's no need to place a hold. raise NotImplementedError() - def update_availability(self, licensepool): + def update_availability(self, licensepool: LicensePool) -> None: pass @@ -343,10 +397,14 @@ class OPDSForDistributorsImporter(OPDSImporter): NAME = OPDSForDistributorsAPI.NAME @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[OPDSForDistributorsSettings]: # type: ignore[override] return OPDSForDistributorsSettings - def update_work_for_edition(self, *args, **kwargs): + def update_work_for_edition( + self, + edition: Edition, + is_open_access: bool = False, + ) -> tuple[LicensePool | None, Work | None]: """After importing a LicensePool, set its availability appropriately. Books imported through OPDS For Distributors can be designated as @@ -354,17 +412,14 @@ def update_work_for_edition(self, *args, **kwargs): licensed content, a library that can perform this import is deemed to have a license for the title and can distribute unlimited copies. """ - pool, work = super().update_work_for_edition( - *args, is_open_access=False, **kwargs - ) - + pool, work = super().update_work_for_edition(edition, is_open_access=False) if pool: pool.unlimited_access = True return pool, work @classmethod - def _add_format_data(cls, circulation): + def _add_format_data(cls, circulation: CirculationData) -> None: for link in circulation.links: if ( link.rel == Hyperlink.GENERIC_OPDS_ACQUISITION @@ -388,12 +443,20 @@ class OPDSForDistributorsImportMonitor(OPDSImportMonitor): PROTOCOL = OPDSForDistributorsImporter.NAME SERVICE_NAME = "OPDS for Distributors Import Monitor" - def __init__(self, _db, collection, import_class, **kwargs): + def __init__( + self, + _db: Session, + collection: Collection, + import_class: Type[OPDSImporter], + **kwargs: Any, + ) -> None: super().__init__(_db, collection, import_class, **kwargs) self.api = OPDSForDistributorsAPI(_db, collection) - def _get(self, url, headers): + def _get( + self, url: str, headers: Dict[str, str] + ) -> Tuple[int, Dict[str, str], bytes]: """Make a normal HTTP request for an OPDS feed, but add in an auth header with the credentials for the collection. """ @@ -412,23 +475,31 @@ class OPDSForDistributorsReaperMonitor(OPDSForDistributorsImportMonitor): has been removed from the collection. """ - def __init__(self, _db, collection, import_class, **kwargs): + def __init__( + self, + _db: Session, + collection: Collection, + import_class: Type[OPDSImporter], + **kwargs: Any, + ) -> None: super().__init__(_db, collection, import_class, **kwargs) - self.seen_identifiers = set() + self.seen_identifiers: Set[str] = set() - def feed_contains_new_data(self, feed): + def feed_contains_new_data(self, feed: bytes | str) -> bool: # Always return True so that the importer will crawl the # entire feed. return True - def import_one_feed(self, feed): + def import_one_feed( + self, feed: bytes | str + ) -> Tuple[List[Edition], Dict[str, CoverageFailure | List[CoverageFailure]]]: # Collect all the identifiers in the feed. parsed_feed = feedparser.parse(feed) identifiers = [entry.get("id") for entry in parsed_feed.get("entries", [])] self.seen_identifiers.update(identifiers) return [], {} - def run_once(self, progress): + def run_once(self, progress: TimestampData) -> TimestampData: """Check to see if any identifiers we know about are no longer present on the remote. If there are any, remove them. diff --git a/api/selftest.py b/api/selftest.py index 3f26f58ff9..d86de07fe3 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -2,7 +2,7 @@ import logging from abc import ABC -from typing import Iterable, Optional, Tuple, Union +from typing import Generator, Iterable, Optional, Tuple, Union from sqlalchemy.orm.session import Session @@ -157,7 +157,7 @@ def _no_delivery_mechanisms_test(self): else: return "All titles in this collection have delivery mechanisms." - def _run_self_tests(self): + def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: yield self.run_test( "Checking for titles that have no delivery mechanisms.", self._no_delivery_mechanisms_test, diff --git a/core/feed/annotator/admin.py b/core/feed/annotator/admin.py index 8b5e903e29..27da250676 100644 --- a/core/feed/annotator/admin.py +++ b/core/feed/annotator/admin.py @@ -28,7 +28,7 @@ def annotate_work_entry( # Find staff rating and add a tag for it. for measurement in identifier.measurements: if ( - measurement.data_source.name == DataSource.LIBRARY_STAFF # type: ignore[attr-defined] + measurement.data_source.name == DataSource.LIBRARY_STAFF and measurement.is_most_recent and measurement.value is not None ): diff --git a/core/metadata_layer.py b/core/metadata_layer.py index e73ae1dc8b..4fe5e06edf 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -523,7 +523,7 @@ class LicenseData(LicenseFunctions): def __init__( self, identifier: str, - checkout_url: str, + checkout_url: Optional[str], status_url: str, status: LicenseStatus, checkouts_available: int, diff --git a/core/model/datasource.py b/core/model/datasource.py index d4bf6ecc5b..cc75bd275d 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -72,7 +72,7 @@ class DataSource(Base, HasSessionCache, DataSourceConstants): # One DataSource can generate many Measurements. measurements: Mapped[List[Measurement]] = relationship( - "Measurement", backref="data_source" + "Measurement", back_populates="data_source" ) # One DataSource can provide many Classifications. diff --git a/core/model/identifier.py b/core/model/identifier.py index 509d2d5a1a..295053181b 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -7,7 +7,7 @@ from abc import ABCMeta, abstractmethod from collections import defaultdict from functools import total_ordering -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING, List, overload from urllib.parse import quote, unquote import isbnlib @@ -391,16 +391,16 @@ def valid_as_foreign_identifier(cls, type, id): return True @property - def urn(self): - identifier_text = quote(self.identifier) + def urn(self) -> str: + identifier_text = quote(self.identifier or "") if self.type == Identifier.ISBN: return self.ISBN_URN_SCHEME_PREFIX + identifier_text elif self.type == Identifier.URI: - return self.identifier + return self.identifier or "" elif self.type == Identifier.GUTENBERG_ID: return self.GUTENBERG_URN_SCHEME_PREFIX + identifier_text else: - identifier_type = quote(self.type) + identifier_type = quote(self.type or "") return self.URN_SCHEME_PREFIX + "{}/{}".format( identifier_type, identifier_text ) @@ -561,6 +561,26 @@ def _parse_urn( return cls.for_foreign_id(_db, identifier_type, identifier_string) + @classmethod + @overload + def parse_urn( + cls, + _db: Session, + identifier_string: str, + must_support_license_pools: bool = False, + ) -> tuple[Identifier, bool]: + ... + + @classmethod + @overload + def parse_urn( + cls, + _db: Session, + identifier_string: str | None, + must_support_license_pools: bool = False, + ) -> tuple[Identifier | None, bool | None]: + ... + @classmethod def parse_urn( cls, diff --git a/core/model/licensing.py b/core/model/licensing.py index 93ab351e59..3511d3b8bd 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -5,7 +5,7 @@ import datetime import logging from enum import Enum as PythonEnum -from typing import TYPE_CHECKING, List, Literal, Tuple, overload +from typing import TYPE_CHECKING, List, Literal, Optional, Tuple, overload from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import Enum as AlchemyEnum @@ -139,12 +139,12 @@ class License(Base, LicenseFunctions): # One License can have many Loans. loans: Mapped[List[Loan]] = relationship( - "Loan", backref="license", cascade="all, delete-orphan" + "Loan", back_populates="license", cascade="all, delete-orphan" ) __table_args__ = (UniqueConstraint("identifier", "license_pool_id"),) - def loan_to(self, patron: Patron, **kwargs): + def loan_to(self, patron: Patron, **kwargs) -> Tuple[Loan, bool]: loan, is_new = self.license_pool.loan_to(patron, **kwargs) loan.license = self return loan, is_new @@ -1021,7 +1021,7 @@ def loan_to( end=None, fulfillment=None, external_identifier=None, - ): + ) -> Tuple[Loan, bool]: _db = Session.object_session(patron) kwargs = dict(start=start or utc_now(), end=end) loan, is_new = get_one_or_create( @@ -1067,7 +1067,7 @@ def on_hold_to( hold.external_identifier = external_identifier return hold, new - def best_available_license(self): + def best_available_license(self) -> License | None: """Determine the next license that should be lent out for this pool. Time-limited licenses and perpetual licenses are the best. It doesn't matter which @@ -1084,7 +1084,7 @@ def best_available_license(self): The worst option would be pay-per-use, but we don't yet support any distributors that offer that model. """ - best = None + best: Optional[License] = None now = utc_now() for license in self.licenses: @@ -1094,7 +1094,10 @@ def best_available_license(self): active_loan_count = len( [l for l in license.loans if not l.end or l.end > now] ) - if active_loan_count >= license.checkouts_available: + checkouts_available = ( + license.checkouts_available if license.checkouts_available else 0 + ) + if active_loan_count >= checkouts_available: continue if ( @@ -1103,13 +1106,13 @@ def best_available_license(self): or ( license.is_time_limited and best.is_time_limited - and license.expires < best.expires + and license.expires < best.expires # type: ignore[operator] ) or (license.is_perpetual and not best.is_time_limited) or ( license.is_loan_limited and best.is_loan_limited - and license.checkouts_left > best.checkouts_left + and license.checkouts_left > best.checkouts_left # type: ignore[operator] ) ): best = license @@ -2024,7 +2027,7 @@ def lookup(cls, _db, uri): return status @classmethod - def rights_uri_from_string(cls, rights): + def rights_uri_from_string(cls, rights: str) -> str: rights = rights.lower() if rights == "public domain in the usa.": return RightsStatus.PUBLIC_DOMAIN_USA diff --git a/core/model/measurement.py b/core/model/measurement.py index 0f0e74aead..751fe1a52b 100644 --- a/core/model/measurement.py +++ b/core/model/measurement.py @@ -1,14 +1,19 @@ # Measurement - +from __future__ import annotations import bisect import logging +from typing import TYPE_CHECKING from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, Unicode +from sqlalchemy.orm import Mapped, relationship from . import Base from .constants import DataSourceConstants +if TYPE_CHECKING: + from .datasource import DataSource + class Measurement(Base): """A measurement of some numeric quantity associated with a @@ -711,6 +716,9 @@ class Measurement(Base): # A Measurement always comes from some DataSource. data_source_id = Column(Integer, ForeignKey("datasources.id"), index=True) + data_source: Mapped[DataSource] = relationship( + "DataSource", back_populates="measurements" + ) # The quantity being measured. quantity_measured = Column(Unicode, index=True) diff --git a/core/model/patron.py b/core/model/patron.py index 1d4e27bcd0..f5e8ccf8dd 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -32,7 +32,7 @@ if TYPE_CHECKING: from core.model.library import Library - from core.model.licensing import LicensePool, LicensePoolDeliveryMechanism + from core.model.licensing import License, LicensePool, LicensePoolDeliveryMechanism from .devicetokens import DeviceToken @@ -544,6 +544,7 @@ class Loan(Base, LoanAndHoldMixin): # It may also be associated with an individual License if the source # provides information about individual licenses. license_id = Column(Integer, ForeignKey("licenses.id"), index=True, nullable=True) + license: Mapped[License] = relationship("License", back_populates="loans") fulfillment_id = Column(Integer, ForeignKey("licensepooldeliveries.id")) fulfillment: Mapped[Optional[LicensePoolDeliveryMechanism]] = relationship( diff --git a/core/model/resource.py b/core/model/resource.py index 8423e9fa52..475bb2eabd 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -10,7 +10,7 @@ import traceback from hashlib import md5 from io import BytesIO -from typing import TYPE_CHECKING, Any, Dict, List, Tuple +from typing import TYPE_CHECKING, Dict, List, Tuple from urllib.parse import quote, urlparse, urlsplit import requests @@ -43,7 +43,7 @@ from .licensing import LicensePoolDeliveryMechanism if TYPE_CHECKING: - from core.model import CachedMARCFile, Work # noqa: autoflake + from core.model import CachedMARCFile class Resource(Base): @@ -1019,12 +1019,14 @@ def headers_to_string(cls, d): return json.dumps(dict(d)) @classmethod - def simple_http_get(cls, url, headers, **kwargs) -> Tuple[int, Any, Any]: + def simple_http_get( + cls, url, headers, **kwargs + ) -> Tuple[int, Dict[str, str], bytes]: """The most simple HTTP-based GET.""" if not "allow_redirects" in kwargs: kwargs["allow_redirects"] = True response = HTTP.get_with_timeout(url, headers=headers, **kwargs) - return response.status_code, response.headers, response.content + return response.status_code, response.headers, response.content # type: ignore[return-value] @classmethod def simple_http_post(cls, url, headers, **kwargs): diff --git a/core/opds2_import.py b/core/opds2_import.py index 39edf52071..f2128e9841 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -3,15 +3,16 @@ import logging from datetime import datetime from io import BytesIO, StringIO -from typing import TYPE_CHECKING, Any, Callable, Iterable, Optional +from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Optional, Tuple, Type from urllib.parse import urljoin, urlparse import sqlalchemy import webpub_manifest_parser.opds2.ast as opds2_ast from flask_babel import lazy_gettext as _ +from sqlalchemy.orm import Session from webpub_manifest_parser.core import ManifestParserFactory, ManifestParserResult from webpub_manifest_parser.core.analyzer import NodeFinder -from webpub_manifest_parser.core.ast import Manifestlike +from webpub_manifest_parser.core.ast import Link, Manifestlike from webpub_manifest_parser.errors import BaseError from webpub_manifest_parser.opds2.registry import ( OPDS2LinkRelationsRegistry, @@ -79,7 +80,7 @@ def __init__(self, manifest_parser_factory: ManifestParserFactory): self._manifest_parser_factory = manifest_parser_factory def parse_manifest( - self, manifest: str | dict | Manifestlike + self, manifest: str | dict[str, Any] | Manifestlike ) -> ManifestParserResult: """Parse the feed into an RPWM-like AST object. @@ -145,25 +146,27 @@ class OPDS2Importer( NEXT_LINK_RELATION: str = "next" @classmethod - def settings_class(self): + def settings_class(cls) -> Type[OPDS2ImporterSettings]: return OPDS2ImporterSettings - def label(self): - return self.NAME + @classmethod + def label(cls) -> str: + return cls.NAME - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION def __init__( self, - db: sqlalchemy.orm.session.Session, + db: Session, collection: Collection, parser: RWPMManifestParser, data_source_name: str | None = None, - identifier_mapping: dict | None = None, - http_get: Callable | None = None, - content_modifier: Callable | None = None, - map_from_collection: dict | None = None, + identifier_mapping: Dict[Identifier, Identifier] | None = None, + http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + content_modifier: Optional[Callable[..., None]] = None, + map_from_collection: Optional[bool] = None, ): """Initialize a new instance of OPDS2Importer class. @@ -307,20 +310,16 @@ def _extract_contributors( return contributor_metadata_list - def _extract_link(self, link, feed_self_url, default_link_rel=None): + def _extract_link( + self, link: Link, feed_self_url: str, default_link_rel: Optional[str] = None + ) -> LinkData: """Extract a LinkData object from webpub-manifest-parser's link. :param link: webpub-manifest-parser's link - :type link: ast_core.Link - :param feed_self_url: Feed's self URL - :type feed_self_url: str - :param default_link_rel: Default link's relation - :type default_link_rel: Optional[str] :return: Link metadata - :rtype: LinkData """ self._logger.debug(f"Started extracting link metadata from {encode(link)}") @@ -599,7 +598,7 @@ def _extract_publication_metadata( self, feed: opds2_ast.OPDS2Feed, publication: opds2_ast.OPDS2Publication, - data_source_name: str, + data_source_name: Optional[str], ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. @@ -783,6 +782,8 @@ def external_integration( :param db: Database session :return: External integration associated with this object """ + if self.collection is None: + raise ValueError("Collection is not set") return self.collection.external_integration def integration_configuration(self) -> IntegrationConfiguration: @@ -854,7 +855,7 @@ def _is_open_access_link_( def _record_coverage_failure( self, - failures: dict[str, list[CoverageFailure]], + failures: dict[str, list[CoverageFailure] | CoverageFailure], identifier: Identifier, error_message: str, transient: bool = True, @@ -880,7 +881,7 @@ def _record_coverage_failure( transient=transient, collection=self.collection, ) - failures[identifier.identifier].append(failure) + failures[identifier.identifier].append(failure) # type: ignore[union-attr] return failure @@ -917,11 +918,11 @@ def extract_next_links(self, feed: str | opds2_ast.OPDS2Feed) -> list[str]: next_links = parsed_feed.links.get_by_rel(self.NEXT_LINK_RELATION) next_links = [next_link.href for next_link in next_links] - return next_links + return next_links # type: ignore[no-any-return] def extract_last_update_dates( self, feed: str | opds2_ast.OPDS2Feed - ) -> list[tuple[str, datetime]]: + ) -> list[tuple[Optional[str], Optional[datetime]]]: """Extract last update date of the feed. :param feed: OPDS 2.0 feed @@ -947,13 +948,13 @@ def _parse_feed_links(self, links: list[core_ast.Link]) -> None: if first_or_default(link.rels) == Hyperlink.TOKEN_AUTH: # Save the collection-wide token authentication endpoint auth_setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, self.collection.external_integration + ExternalIntegration.TOKEN_AUTH, self.external_integration(self._db) ) auth_setting.value = link.href def extract_feed_data( self, feed: str | opds2_ast.OPDS2Feed, feed_url: str | None = None - ) -> tuple[dict, dict]: + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure] | CoverageFailure]]: """Turn an OPDS 2.0 feed into lists of Metadata and CirculationData objects. :param feed: OPDS 2.0 feed :param feed_url: Feed URL used to resolve relative links @@ -961,7 +962,7 @@ def extract_feed_data( parser_result = self._parser.parse_manifest(feed) feed = parser_result.root publication_metadata_dictionary = {} - failures: dict[str, list[CoverageFailure]] = {} + failures: dict[str, list[CoverageFailure] | CoverageFailure] = {} if feed.links: self._parse_feed_links(feed.links) @@ -1011,7 +1012,9 @@ class OPDS2ImportMonitor(OPDSImportMonitor): PROTOCOL = ExternalIntegration.OPDS2_IMPORT MEDIA_TYPE = OPDS2MediaTypesRegistry.OPDS_FEED.key, "application/json" - def _verify_media_type(self, url, status_code, headers, feed): + def _verify_media_type( + self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. media_type = headers.get("content-type") @@ -1024,7 +1027,7 @@ def _verify_media_type(self, url, status_code, headers, feed): url, message=message, debug_message=feed, status_code=status_code ) - def _get_accept_header(self): + def _get_accept_header(self) -> str: return "{}, {};q=0.9, */*;q=0.1".format( OPDS2MediaTypesRegistry.OPDS_FEED.key, "application/json" ) diff --git a/core/opds_import.py b/core/opds_import.py index faca6de823..3eeebcfd4a 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -2,12 +2,29 @@ import logging import traceback +from datetime import datetime from io import BytesIO -from typing import TYPE_CHECKING, Optional -from urllib.parse import ParseResult, urljoin, urlparse +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Generator, + Iterable, + List, + Literal, + Optional, + Sequence, + Tuple, + Type, + overload, +) +from urllib.parse import urljoin, urlparse +from xml.etree.ElementTree import Element import dateutil import feedparser +from feedparser import FeedParserDict from flask_babel import lazy_gettext as _ from lxml import etree from pydantic import HttpUrl @@ -56,6 +73,7 @@ ) from .model.configuration import HasExternalIntegration from .monitor import CollectionMonitor +from .selftest import SelfTestResult from .util.datetime_helpers import datetime_utc, to_utc, utc_now from .util.http import HTTP, BadResponseException from .util.opds_writer import OPDSFeed, OPDSMessage @@ -66,7 +84,17 @@ from .model import Work -def parse_identifier(db, identifier): +@overload +def parse_identifier(db: Session, identifier: str) -> Identifier: + ... + + +@overload +def parse_identifier(db: Session, identifier: Optional[str]) -> Optional[Identifier]: + ... + + +def parse_identifier(db: Session, identifier: Optional[str]) -> Optional[Identifier]: """Parse the identifier and return an Identifier object representing it. :param db: Database session @@ -199,7 +227,9 @@ class OPDSImporterLibrarySettings(BaseSettings): pass -class OPDSImporter(CirculationConfigurationMixin): +class OPDSImporter( + CirculationConfigurationMixin[OPDSImporterSettings, OPDSImporterLibrarySettings] +): """Imports editions and license pools from an OPDS feed. Creates Edition, LicensePool and Work rows in the database, if those don't already exist. @@ -229,28 +259,30 @@ class OPDSImporter(CirculationConfigurationMixin): SUCCESS_STATUS_CODES: list[int] | None = None @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[OPDSImporterSettings]: return OPDSImporterSettings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: return OPDSImporterLibrarySettings - def label(self): + @classmethod + def label(cls) -> str: return "OPDS Importer" - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION # type: ignore[no-any-return] def __init__( self, - _db, - collection, - data_source_name=None, - identifier_mapping=None, - http_get=None, - content_modifier=None, - map_from_collection=None, + _db: Session, + collection: Optional[Collection], + data_source_name: Optional[str] = None, + identifier_mapping: Optional[Dict[Identifier, Identifier]] = None, + http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + content_modifier: Optional[Callable[..., None]] = None, + map_from_collection: Optional[bool] = None, ): """:param collection: LicensePools created by this OPDS import will be associated with the given Collection. If this is None, @@ -308,7 +340,7 @@ def __init__( self.map_from_collection = map_from_collection @property - def collection(self): + def collection(self) -> Optional[Collection]: """Returns an associated Collection object :return: Associated Collection object @@ -320,19 +352,21 @@ def collection(self): return None @property - def data_source(self): + def data_source(self) -> DataSource: """Look up or create a DataSource object representing the source of this OPDS feed. """ offers_licenses = self.collection is not None - return DataSource.lookup( + return DataSource.lookup( # type: ignore[no-any-return] self._db, self.data_source_name, autocreate=True, offers_licenses=offers_licenses, ) - def assert_importable_content(self, feed, feed_url, max_get_attempts=5): + def assert_importable_content( + self, feed: str, feed_url: str, max_get_attempts: int = 5 + ) -> Literal[True]: """Raise an exception if the given feed contains nothing that can, even theoretically, be turned into a LicensePool. @@ -352,7 +386,7 @@ def assert_importable_content(self, feed, feed_url, max_get_attempts=5): url = link.href success = self._is_open_access_link(url, link.media_type) if success: - return success + return True get_attempts += 1 if get_attempts >= max_get_attempts: error = ( @@ -368,7 +402,9 @@ def assert_importable_content(self, feed, feed_url, max_get_attempts=5): ) @classmethod - def _open_access_links(cls, metadatas): + def _open_access_links( + cls, metadatas: List[Metadata] + ) -> Generator[LinkData, None, None]: """Find all open-access links in a list of Metadata objects. :param metadatas: A list of Metadata objects. @@ -381,7 +417,9 @@ def _open_access_links(cls, metadatas): if link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: yield link - def _is_open_access_link(self, url, type): + def _is_open_access_link( + self, url: str, type: Optional[str] + ) -> str | Literal[False]: """Is `url` really an open-access link? That is, can we make a normal GET request and get something @@ -403,7 +441,7 @@ def _is_open_access_link(self, url, type): ) return False - def _parse_identifier(self, identifier): + def _parse_identifier(self, identifier: str) -> Identifier: """Parse the identifier and return an Identifier object representing it. :param identifier: String containing the identifier @@ -414,14 +452,19 @@ def _parse_identifier(self, identifier): """ return parse_identifier(self._db, identifier) - def import_from_feed(self, feed, feed_url=None): + def import_from_feed( + self, feed: str | bytes, feed_url: Optional[str] = None + ) -> Tuple[ + List[Edition], + List[LicensePool], + List[Work], + Dict[str, CoverageFailure | List[CoverageFailure]], + ]: # Keep track of editions that were imported. Pools and works # for those editions may be looked up or created. imported_editions = {} pools = {} works = {} - # CoverageFailures that note business logic errors and non-success download statuses - failures = {} # If parsing the overall feed throws an exception, we should address that before # moving on. Let the exception propagate. @@ -469,9 +512,10 @@ def import_from_feed(self, feed, feed_url=None): if work: works[key] = work except Exception as e: + collection_name = self.collection.name if self.collection else "None" logging.warning( f"Non-fatal exception: Failed to import item - import will continue: " - f"identifier={key}; collection={self.collection.name}; " + f"identifier={key}; collection={collection_name}/{self._collection_id}; " f"data_source={self.data_source}; exception={e}", stack_info=True, ) @@ -493,7 +537,7 @@ def import_from_feed(self, feed, feed_url=None): failures, ) - def import_edition_from_metadata(self, metadata): + def import_edition_from_metadata(self, metadata: Metadata) -> Edition: """For the passed-in Metadata object, see if can find or create an Edition in the database. Also create a LicensePool if the Metadata has CirculationData in it. @@ -517,12 +561,12 @@ def import_edition_from_metadata(self, metadata): replace=policy, ) - return edition + return edition # type: ignore[no-any-return] def update_work_for_edition( self, edition: Edition, - is_open_access=True, + is_open_access: bool = True, ) -> tuple[LicensePool | None, Work | None]: """If possible, ensure that there is a presentation-ready Work for the given edition's primary identifier. @@ -573,7 +617,7 @@ def update_work_for_edition( # background, and that's good enough. return pool, work - def extract_next_links(self, feed): + def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: if isinstance(feed, (bytes, str)): parsed = feedparser.parse(feed) else: @@ -586,7 +630,9 @@ def extract_next_links(self, feed): ] return next_links - def extract_last_update_dates(self, feed): + def extract_last_update_dates( + self, feed: str | bytes | FeedParserDict + ) -> List[Tuple[Optional[str], Optional[datetime]]]: if isinstance(feed, (bytes, str)): parsed_feed = feedparser.parse(feed) else: @@ -597,7 +643,7 @@ def extract_last_update_dates(self, feed): ] return [x for x in dates if x and x[1]] - def build_identifier_mapping(self, external_urns): + def build_identifier_mapping(self, external_urns: List[str]) -> None: """Uses the given Collection and a list of URNs to reverse engineer an identifier mapping. @@ -632,7 +678,9 @@ def build_identifier_mapping(self, external_urns): self.identifier_mapping = mapping - def extract_feed_data(self, feed, feed_url=None): + def extract_feed_data( + self, feed: str | bytes, feed_url: Optional[str] = None + ) -> Tuple[Dict[str, Metadata], Dict[str, CoverageFailure | List[CoverageFailure]]]: """Turn an OPDS feed into lists of Metadata and CirculationData objects, with associated messages and next_links. """ @@ -652,16 +700,16 @@ def extract_feed_data(self, feed, feed_url=None): ) # translate the id in failures to identifier.urn - identified_failures = {} + identified_failures: Dict[str, CoverageFailure | List[CoverageFailure]] = {} for urn, failure in list(fp_failures.items()) + list(xml_failures.items()): identifier, failure = self.handle_failure(urn, failure) identified_failures[identifier.urn] = failure # Use one loop for both, since the id will be the same for both dictionaries. metadata = {} - circulationdata = {} - for id, m_data_dict in list(fp_metadata.items()): - xml_data_dict = xml_data_meta.get(id, {}) + _id: str + for _id, m_data_dict in list(fp_metadata.items()): + xml_data_dict = xml_data_meta.get(_id, {}) external_identifier = None if self.primary_identifier_source == ExternalIntegration.DCTERMS_IDENTIFIER: @@ -677,7 +725,7 @@ def extract_feed_data(self, feed, feed_url=None): # the external identifier will be add later, so it must be removed at this point new_identifiers = dcterms_ids[1:] # Id must be in the identifiers with lower weight. - id_type, id_identifier = Identifier.type_and_identifier_for_urn(id) + id_type, id_identifier = Identifier.type_and_identifier_for_urn(_id) id_weight = 1 new_identifiers.append( IdentifierData(id_type, id_identifier, id_weight) @@ -685,9 +733,10 @@ def extract_feed_data(self, feed, feed_url=None): xml_data_dict["identifiers"] = new_identifiers if external_identifier is None: - external_identifier, ignore = Identifier.parse_urn(self._db, id) + external_identifier, ignore = Identifier.parse_urn(self._db, _id) - if self.identifier_mapping: + internal_identifier: Optional[Identifier] + if self.identifier_mapping and external_identifier is not None: internal_identifier = self.identifier_mapping.get( external_identifier, external_identifier ) @@ -753,7 +802,21 @@ def extract_feed_data(self, feed, feed_url=None): pass return metadata, identified_failures - def handle_failure(self, urn, failure): + @overload + def handle_failure( + self, urn: str, failure: Identifier + ) -> Tuple[Identifier, Identifier]: + ... + + @overload + def handle_failure( + self, urn: str, failure: CoverageFailure + ) -> Tuple[Identifier, CoverageFailure]: + ... + + def handle_failure( + self, urn: str, failure: Identifier | CoverageFailure + ) -> Tuple[Identifier, CoverageFailure | Identifier]: """Convert a URN and a failure message that came in through an OPDS feed into an Identifier and a CoverageFailure object. @@ -785,7 +848,7 @@ def handle_failure(self, urn, failure): return internal_identifier, failure @classmethod - def _add_format_data(cls, circulation): + def _add_format_data(cls, circulation: CirculationData) -> None: """Subclasses that specialize OPDS Import can implement this method to add formats to a CirculationData object with information that allows a patron to actually get a book @@ -793,14 +856,16 @@ def _add_format_data(cls, circulation): """ @classmethod - def combine(self, d1, d2): + def combine( + self, d1: Optional[Dict[str, Any]], d2: Optional[Dict[str, Any]] + ) -> Dict[str, Any]: """Combine two dictionaries that can be used as keyword arguments to the Metadata constructor. """ if not d1 and not d2: return dict() if not d1: - return dict(d2) + return dict(d2) # type: ignore[arg-type] if not d2: return dict(d1) new_dict = dict(d1) @@ -828,7 +893,9 @@ def combine(self, d1, d2): pass return new_dict - def extract_data_from_feedparser(self, feed, data_source): + def extract_data_from_feedparser( + self, feed: str | bytes, data_source: DataSource + ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: feedparser_parsed = feedparser.parse(feed) values = {} failures = {} @@ -849,15 +916,18 @@ def extract_data_from_feedparser(self, feed, data_source): # That's bad. Can't make an item-specific error message, but write to # log that something very wrong happened. logging.error( - "Tried to parse an element without a valid identifier. feed=%s" - % feed + f"Tried to parse an element without a valid identifier. feed={feed!r}" ) return values, failures @classmethod def extract_metadata_from_elementtree( - cls, feed, data_source, feed_url=None, do_get=None - ): + cls, + feed: bytes | str, + data_source: DataSource, + feed_url: Optional[str] = None, + do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: """Parse the OPDS as XML and extract all author and subject information, as well as ratings and medium. @@ -903,30 +973,34 @@ def extract_metadata_from_elementtree( # Then turn Atom tags into Metadata objects. for entry in parser._xpath(root, "/atom:feed/atom:entry"): - identifier, detail, failure = cls.detail_for_elementtree_entry( + identifier, detail, failure_entry = cls.detail_for_elementtree_entry( parser, entry, data_source, feed_url, do_get=do_get ) if identifier: - if failure: - failures[identifier] = failure + if failure_entry: + failures[identifier] = failure_entry if detail: values[identifier] = detail return values, failures @classmethod - def _datetime(cls, entry, key): + def _datetime(cls, entry: Dict[str, str], key: str) -> Optional[datetime]: value = entry.get(key, None) if not value: - return value + return None return datetime_utc(*value[:6]) - def last_update_date_for_feedparser_entry(self, entry): + def last_update_date_for_feedparser_entry( + self, entry: Dict[str, Any] + ) -> Tuple[Optional[str], Optional[datetime]]: identifier = entry.get("id") updated = self._datetime(entry, "updated_parsed") - return (identifier, updated) + return identifier, updated @classmethod - def data_detail_for_feedparser_entry(cls, entry, data_source): + def data_detail_for_feedparser_entry( + cls, entry: Dict[str, str], data_source: DataSource + ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: """Turn an entry dictionary created by feedparser into dictionaries of data that can be used as keyword arguments to the Metadata and CirculationData constructors. @@ -950,7 +1024,9 @@ def data_detail_for_feedparser_entry(cls, entry, data_source): return identifier, None, failure @classmethod - def _data_detail_for_feedparser_entry(cls, entry, metadata_data_source): + def _data_detail_for_feedparser_entry( + cls, entry: Dict[str, Any], metadata_data_source: DataSource + ) -> Dict[str, Any]: """Helper method that extracts metadata and circulation data from a feedparser entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. @@ -1010,7 +1086,7 @@ def _data_detail_for_feedparser_entry(cls, entry, metadata_data_source): links = [] - def summary_to_linkdata(detail): + def summary_to_linkdata(detail: Optional[Dict[str, str]]) -> Optional[LinkData]: if not detail: return None if not "value" in detail or not detail["value"]: @@ -1056,14 +1132,14 @@ def summary_to_linkdata(detail): return kwargs_meta @classmethod - def rights_uri(cls, rights_string): + def rights_uri(cls, rights_string: str) -> str: """Determine the URI that best encapsulates the rights status of the downloads associated with this book. """ return RightsStatus.rights_uri_from_string(rights_string) @classmethod - def rights_uri_from_feedparser_entry(cls, entry): + def rights_uri_from_feedparser_entry(cls, entry: Dict[str, str]) -> str: """Extract a rights URI from a parsed feedparser entry. :return: A rights URI. @@ -1072,17 +1148,20 @@ def rights_uri_from_feedparser_entry(cls, entry): return cls.rights_uri(rights) @classmethod - def rights_uri_from_entry_tag(cls, entry): + def rights_uri_from_entry_tag(cls, entry: Element) -> Optional[str]: """Extract a rights string from an lxml tag. :return: A rights URI. """ rights = cls.PARSER_CLASS._xpath1(entry, "rights") - if rights: - return cls.rights_uri(rights) + if rights is None: + return None + return cls.rights_uri(rights) @classmethod - def extract_messages(cls, parser, feed_tag): + def extract_messages( + cls, parser: OPDSXMLParser, feed_tag: str + ) -> Generator[OPDSMessage, None, None]: """Extract tags from an OPDS feed and convert them into OPDSMessage objects. """ @@ -1116,7 +1195,9 @@ def extract_messages(cls, parser, feed_tag): yield OPDSMessage(urn, status_code, description) @classmethod - def coveragefailures_from_messages(cls, data_source, parser, feed_tag): + def coveragefailures_from_messages( + cls, data_source: DataSource, parser: OPDSXMLParser, feed_tag: str + ) -> Generator[CoverageFailure, None, None]: """Extract CoverageFailure objects from a parsed OPDS document. This allows us to determine the fate of books which could not become tags. @@ -1127,7 +1208,9 @@ def coveragefailures_from_messages(cls, data_source, parser, feed_tag): yield failure @classmethod - def coveragefailure_from_message(cls, data_source, message): + def coveragefailure_from_message( + cls, data_source: DataSource, message: OPDSMessage + ) -> Optional[CoverageFailure]: """Turn a tag into a CoverageFailure.""" _db = Session.object_session(data_source) @@ -1149,7 +1232,7 @@ def coveragefailure_from_message(cls, data_source, message): if cls.SUCCESS_STATUS_CODES and message.status_code in cls.SUCCESS_STATUS_CODES: # This message is telling us that nothing went wrong. It # should be treated as a success. - return identifier + return identifier # type: ignore[no-any-return] if message.status_code == 200: # By default, we treat a message with a 200 status code @@ -1173,8 +1256,13 @@ def coveragefailure_from_message(cls, data_source, message): @classmethod def detail_for_elementtree_entry( - cls, parser, entry_tag, data_source, feed_url=None, do_get=None - ): + cls, + parser: OPDSXMLParser, + entry_tag: Element, + data_source: DataSource, + feed_url: Optional[str] = None, + do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: """Turn an tag into a dictionary of metadata that can be used as keyword arguments to the Metadata contructor. @@ -1203,15 +1291,19 @@ def detail_for_elementtree_entry( @classmethod def _detail_for_elementtree_entry( - cls, parser, entry_tag, feed_url=None, do_get=None - ): + cls, + parser: OPDSXMLParser, + entry_tag: Element, + feed_url: Optional[str] = None, + do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + ) -> Dict[str, Any]: """Helper method that extracts metadata and circulation data from an elementtree entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. """ # We will fill this dictionary with all the information # we can find. - data = dict() + data: Dict[str, Any] = dict() alternate_identifiers = [] for id_tag in parser._xpath(entry_tag, "dcterms:identifier"): @@ -1236,9 +1328,9 @@ def _detail_for_elementtree_entry( ratings = [] for rating_tag in parser._xpath(entry_tag, "schema:Rating"): - v = cls.extract_measurement(rating_tag) - if v: - ratings.append(v) + measurement = cls.extract_measurement(rating_tag) + if measurement: + ratings.append(measurement) data["measurements"] = ratings rights_uri = cls.rights_uri_from_entry_tag(entry_tag) @@ -1271,7 +1363,7 @@ def _detail_for_elementtree_entry( return data @classmethod - def get_medium_from_links(cls, links): + def get_medium_from_links(cls, links: List[LinkData]) -> Optional[str]: """Get medium if derivable from information in an acquisition link.""" derived = None for link in links: @@ -1287,9 +1379,11 @@ def get_medium_from_links(cls, links): return derived @classmethod - def extract_identifier(cls, identifier_tag): + def extract_identifier(cls, identifier_tag: Element) -> Optional[IdentifierData]: """Turn a tag into an IdentifierData object.""" try: + if identifier_tag.text is None: + return None type, identifier = Identifier.type_and_identifier_for_urn( identifier_tag.text.lower() ) @@ -1298,7 +1392,9 @@ def extract_identifier(cls, identifier_tag): return None @classmethod - def extract_medium(cls, entry_tag, default=Edition.BOOK_MEDIUM): + def extract_medium( + cls, entry_tag: Optional[Element], default: Optional[str] = Edition.BOOK_MEDIUM + ) -> Optional[str]: """Derive a value for Edition.medium from schema:additionalType or from a subtag. @@ -1320,7 +1416,9 @@ def extract_medium(cls, entry_tag, default=Edition.BOOK_MEDIUM): return medium or default @classmethod - def extract_contributor(cls, parser, author_tag): + def extract_contributor( + cls, parser: OPDSXMLParser, author_tag: Element + ) -> Optional[ContributorData]: """Turn an tag into a ContributorData object.""" subtag = parser.text_of_optional_subtag sort_name = subtag(author_tag, "simplified:sort_name") @@ -1350,14 +1448,16 @@ def extract_contributor(cls, parser, author_tag): return None @classmethod - def extract_subject(cls, parser, category_tag): + def extract_subject( + cls, parser: OPDSXMLParser, category_tag: Element + ) -> SubjectData: """Turn an tag into a SubjectData object.""" attr = category_tag.attrib # Retrieve the type of this subject - FAST, Dewey Decimal, # etc. scheme = attr.get("scheme") - subject_type = Subject.by_uri.get(scheme) + subject_type = Subject.by_uri.get(scheme) # type: ignore[arg-type] if not subject_type: # We can't represent this subject because we don't # know its scheme. Just treat it as a tag. @@ -1378,7 +1478,12 @@ def extract_subject(cls, parser, category_tag): return SubjectData(type=subject_type, identifier=term, name=name, weight=weight) @classmethod - def extract_link(cls, link_tag, feed_url=None, entry_rights_uri=None): + def extract_link( + cls, + link_tag: Element, + feed_url: Optional[str] = None, + entry_rights_uri: Optional[str] = None, + ) -> Optional[LinkData]: """Convert a tag into a LinkData object. :param feed_url: The URL to the enclosing feed, for use in resolving @@ -1398,12 +1503,12 @@ def extract_link(cls, link_tag, feed_url=None, entry_rights_uri=None): # relationship to the entry. return None rights = attr.get("{%s}rights" % OPDSXMLParser.NAMESPACES["dcterms"]) + rights_uri = entry_rights_uri if rights: # Rights associated with the link override rights # associated with the entry. rights_uri = cls.rights_uri(rights) - else: - rights_uri = entry_rights_uri + if feed_url and not urlparse(href).netloc: # This link is relative, so we need to get the absolute url href = urljoin(feed_url, href) @@ -1411,8 +1516,13 @@ def extract_link(cls, link_tag, feed_url=None, entry_rights_uri=None): @classmethod def make_link_data( - cls, rel, href=None, media_type=None, rights_uri=None, content=None - ): + cls, + rel: str, + href: Optional[str] = None, + media_type: Optional[str] = None, + rights_uri: Optional[str] = None, + content: Optional[str] = None, + ) -> LinkData: """Hook method for creating a LinkData object. Intended to be overridden in subclasses. @@ -1426,13 +1536,13 @@ def make_link_data( ) @classmethod - def consolidate_links(cls, links): + def consolidate_links(cls, links: Sequence[LinkData | None]) -> List[LinkData]: """Try to match up links with their thumbnails. If link n is an image and link n+1 is a thumbnail, then the thumbnail is assumed to be the thumbnail of the image. - Similarly if link n is a thumbnail and link n+1 is an image. + Similarly, if link n is a thumbnail and link n+1 is an image. """ # Strip out any links that didn't get turned into LinkData objects # due to missing `href` or whatever. @@ -1441,10 +1551,10 @@ def consolidate_links(cls, links): # Make a new list of links from that list, to iterate over -- # we'll be modifying new_links in place so we can't iterate # over it. - links = list(new_links) + _links = list(new_links) next_link_already_handled = False - for i, link in enumerate(links): + for i, link in enumerate(_links): if link.rel not in (Hyperlink.THUMBNAIL_IMAGE, Hyperlink.IMAGE): # This is not any kind of image. Ignore it. continue @@ -1455,13 +1565,13 @@ def consolidate_links(cls, links): next_link_already_handled = False continue - if i == len(links) - 1: + if i == len(_links) - 1: # This is the last link. Since there is no next link # there's nothing to do here. continue # Peek at the next link. - next_link = links[i + 1] + next_link = _links[i + 1] if ( link.rel == Hyperlink.THUMBNAIL_IMAGE @@ -1489,24 +1599,28 @@ def consolidate_links(cls, links): return new_links @classmethod - def extract_measurement(cls, rating_tag): + def extract_measurement(cls, rating_tag: Element) -> Optional[MeasurementData]: type = rating_tag.get("{http://schema.org/}additionalType") value = rating_tag.get("{http://schema.org/}ratingValue") if not value: value = rating_tag.attrib.get("{http://schema.org}ratingValue") if not type: type = Measurement.RATING + + if value is None: + return None + try: - value = float(value) + float_value = float(value) return MeasurementData( quantity_measured=type, - value=value, + value=float_value, ) except ValueError: return None @classmethod - def extract_series(cls, series_tag): + def extract_series(cls, series_tag: Element) -> Tuple[Optional[str], Optional[str]]: attr = series_tag.attrib series_name = attr.get("{http://schema.org/}name", None) series_position = attr.get("{http://schema.org/}position", None) @@ -1532,12 +1646,12 @@ class OPDSImportMonitor( def __init__( self, - _db, + _db: Session, collection: Collection, - import_class, - force_reimport=False, - **import_class_kwargs, - ): + import_class: Type[OPDSImporter], + force_reimport: bool = False, + **import_class_kwargs: Any, + ) -> None: if not collection: raise ValueError( "OPDSImportMonitor can only be run in the context of a Collection." @@ -1556,7 +1670,9 @@ def __init__( ) self.external_integration_id = collection.external_integration.id - self.feed_url = self.opds_url(collection) + feed_url = self.opds_url(collection) + self.feed_url = "" if feed_url is None else feed_url + self.force_reimport = force_reimport self.importer = import_class(_db, collection=collection, **import_class_kwargs) @@ -1576,14 +1692,14 @@ def __init__( except AttributeError: self._max_retry_count = 0 - parsed_url: ParseResult = urlparse(self.feed_url) + parsed_url = urlparse(self.feed_url) self._feed_base_url = f"{parsed_url.scheme}://{parsed_url.hostname}{(':' + str(parsed_url.port)) if parsed_url.port else ''}/" super().__init__(_db, collection) - def external_integration(self, _db): + def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: return get_one(_db, ExternalIntegration, id=self.external_integration_id) - def _run_self_tests(self, _db): + def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: """Retrieve the first page of the OPDS feed""" first_page = self.run_test( "Retrieve the first page of the OPDS feed (%s)" % self.feed_url, @@ -1606,7 +1722,9 @@ def _run_self_tests(self, _db): self.feed_url, ) - def _get(self, url, headers): + def _get( + self, url: str, headers: Dict[str, str] + ) -> Tuple[int, Dict[str, str], bytes]: """Make the sort of HTTP request that's normal for an OPDS feed. Long timeout, raise error on anything but 2xx or 3xx. @@ -1621,9 +1739,9 @@ def _get(self, url, headers): if not url.startswith("http"): url = urljoin(self._feed_base_url, url) response = HTTP.get_with_timeout(url, headers=headers, **kwargs) - return response.status_code, response.headers, response.content + return response.status_code, response.headers, response.content # type: ignore[return-value] - def _get_accept_header(self): + def _get_accept_header(self) -> str: return ",".join( [ OPDSFeed.ACQUISITION_FEED_TYPE, @@ -1633,7 +1751,7 @@ def _get_accept_header(self): ] ) - def _update_headers(self, headers): + def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: headers = dict(headers) if headers else {} if self.username and self.password and not "Authorization" in headers: headers["Authorization"] = "Basic %s" % base64.b64encode( @@ -1647,7 +1765,7 @@ def _update_headers(self, headers): return headers - def _parse_identifier(self, identifier): + def _parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: """Extract the publication's identifier from its metadata. :param identifier: String containing the identifier @@ -1658,7 +1776,7 @@ def _parse_identifier(self, identifier): """ return parse_identifier(self._db, identifier) - def opds_url(self, collection): + def opds_url(self, collection: Collection) -> Optional[str]: """Returns the OPDS import URL for the given collection. By default, this URL is stored as the external account ID, but @@ -1666,15 +1784,15 @@ def opds_url(self, collection): """ return collection.external_account_id - def data_source(self, collection): + def data_source(self, collection: Collection) -> Optional[DataSource]: """Returns the data source name for the given collection. By default, this URL is stored as a setting on the collection, but subclasses may hard-code it. """ - return collection.data_source + return collection.data_source # type: ignore[no-any-return] - def feed_contains_new_data(self, feed): + def feed_contains_new_data(self, feed: bytes | str) -> bool: """Does the given feed contain any entries that haven't been imported yet? """ @@ -1704,7 +1822,9 @@ def feed_contains_new_data(self, feed): break return new_data - def identifier_needs_import(self, identifier, last_updated_remote): + def identifier_needs_import( + self, identifier: Optional[Identifier], last_updated_remote: Optional[datetime] + ) -> bool: """Does the remote side have new information about this Identifier? :param identifier: An Identifier. @@ -1766,8 +1886,11 @@ def identifier_needs_import(self, identifier, last_updated_remote): last_updated_remote, ) return True + return False - def _verify_media_type(self, url, status_code, headers, feed): + def _verify_media_type( + self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. media_type = headers.get("content-type") @@ -1779,7 +1902,9 @@ def _verify_media_type(self, url, status_code, headers, feed): url, message=message, debug_message=feed, status_code=status_code ) - def follow_one_link(self, url, do_get=None): + def follow_one_link( + self, url: str, do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None + ) -> Tuple[List[str], Optional[bytes]]: """Download a representation of a URL and extract the useful information. @@ -1806,7 +1931,9 @@ def follow_one_link(self, url, do_get=None): self.log.info("No new data.") return [], None - def import_one_feed(self, feed): + def import_one_feed( + self, feed: bytes | str + ) -> Tuple[List[Edition], Dict[str, CoverageFailure | List[CoverageFailure]]]: """Import every book mentioned in an OPDS feed.""" # Because we are importing into a Collection, we will immediately @@ -1827,6 +1954,7 @@ def import_one_feed(self, feed): # Create CoverageRecords for the failures. for urn, failure in list(failures.items()): + failure_items: List[CoverageFailure] if isinstance(failure, list): failure_items = failure else: @@ -1839,7 +1967,7 @@ def import_one_feed(self, feed): return imported_editions, failures - def _get_feeds(self): + def _get_feeds(self) -> Iterable[Tuple[str, bytes]]: feeds = [] queue = [self.feed_url] seen_links = set() @@ -1863,11 +1991,9 @@ def _get_feeds(self): # Start importing at the end. If something fails, it will be easier to # pick up where we left off. - feeds = reversed(feeds) - - return feeds + return reversed(feeds) - def run_once(self, progress_ignore): + def run_once(self, progress: TimestampData) -> TimestampData: feeds = self._get_feeds() total_imported = 0 total_failures = 0 diff --git a/core/util/datetime_helpers.py b/core/util/datetime_helpers.py index 4d236984cc..6acfdc0a51 100644 --- a/core/util/datetime_helpers.py +++ b/core/util/datetime_helpers.py @@ -1,5 +1,5 @@ import datetime -from typing import Optional, Tuple +from typing import Optional, Tuple, overload import pytz from dateutil.relativedelta import relativedelta @@ -35,6 +35,16 @@ def utc_now() -> datetime.datetime: return datetime.datetime.now(tz=pytz.UTC) +@overload +def to_utc(dt: datetime.datetime) -> datetime.datetime: + ... + + +@overload +def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: + ... + + def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: """This converts a naive datetime object that represents UTC into an aware datetime object. diff --git a/core/util/xmlparser.py b/core/util/xmlparser.py index 1c3e11262f..2f3f998649 100644 --- a/core/util/xmlparser.py +++ b/core/util/xmlparser.py @@ -1,8 +1,16 @@ +from __future__ import annotations + from io import BytesIO -from typing import Dict +from typing import TYPE_CHECKING, Dict, List, Optional, TypeVar from lxml import etree +if TYPE_CHECKING: + from lxml.etree import Element + + +T = TypeVar("T") + class XMLParser: @@ -11,44 +19,56 @@ class XMLParser: NAMESPACES: Dict[str, str] = {} @classmethod - def _xpath(cls, tag, expression, namespaces=None): + def _xpath( + cls, tag: Element, expression: str, namespaces: Optional[Dict[str, str]] = None + ) -> List[Element]: if not namespaces: namespaces = cls.NAMESPACES """Wrapper to do a namespaced XPath expression.""" return tag.xpath(expression, namespaces=namespaces) @classmethod - def _xpath1(cls, tag, expression, namespaces=None): + def _xpath1( + cls, tag: Element, expression: str, namespaces: Optional[Dict[str, str]] = None + ) -> Optional[Element]: """Wrapper to do a namespaced XPath expression.""" values = cls._xpath(tag, expression, namespaces=namespaces) if not values: return None return values[0] - def _cls(self, tag_name, class_name): + def _cls(self, tag_name: str, class_name: str) -> str: """Return an XPath expression that will find a tag with the given CSS class.""" return ( 'descendant-or-self::node()/%s[contains(concat(" ", normalize-space(@class), " "), " %s ")]' % (tag_name, class_name) ) - def text_of_optional_subtag(self, tag, name, namespaces=None): + def text_of_optional_subtag( + self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + ) -> Optional[str]: tag = self._xpath1(tag, name, namespaces=namespaces) if tag is None or tag.text is None: return None else: return str(tag.text) - def text_of_subtag(self, tag, name, namespaces=None): + def text_of_subtag( + self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + ) -> str: return str(tag.xpath(name, namespaces=namespaces)[0].text) - def int_of_subtag(self, tag, name, namespaces=None): + def int_of_subtag( + self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + ) -> int: return int(self.text_of_subtag(tag, name, namespaces=namespaces)) - def int_of_optional_subtag(self, tag, name, namespaces=None): + def int_of_optional_subtag( + self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + ) -> Optional[int]: v = self.text_of_optional_subtag(tag, name, namespaces=namespaces) if not v: - return v + return None return int(v) def process_all(self, xml, xpath, namespaces=None, handler=None, parser=None): diff --git a/pyproject.toml b/pyproject.toml index 9fc86b06f8..7afd0b0d7e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,12 +78,18 @@ module = [ "api.circulation", "api.discovery.*", "api.integration.*", + "api.lcp.hash", + "api.odl", + "api.odl2", + "api.opds_for_distributors", "core.feed.*", "core.integration.*", "core.model.announcements", "core.model.hassessioncache", "core.model.integration", "core.model.library", + "core.opds2_import", + "core.opds_import", "core.selftest", "core.service.*", "core.settings.*", diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 2670225ac2..694fabd407 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -946,7 +946,7 @@ def test_active_loan_feed( tree = etree.fromstring(response.get_data(as_text=True)) parser = OPDSXMLParser() licensor = parser._xpath1(tree, "//atom:feed/drm:licensor") - + assert licensor is not None adobe_patron_identifier = AuthdataUtility._adobe_patron_identifier(patron) # The DRM licensing information includes the Adobe vendor ID @@ -1021,7 +1021,11 @@ def test_active_loan_feed( ) assert 2 == len(acquisitions) - availabilities = [parser._xpath1(x, "opds:availability") for x in acquisitions] + availabilities = [] + for x in acquisitions: + availability = parser._xpath1(x, "opds:availability") + assert availability is not None + availabilities.append(availability) # One of these availability tags has 'since' but not 'until'. # The other one has both. diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index b483f71901..518e6cf4de 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -1242,7 +1242,7 @@ def test_release_hold_success( odl_api_test_fixture.checkout(patron=loan_patron) odl_api_test_fixture.pool.on_hold_to(odl_api_test_fixture.patron, position=1) - assert True == odl_api_test_fixture.api.release_hold( + odl_api_test_fixture.api.release_hold( odl_api_test_fixture.patron, "pin", odl_api_test_fixture.pool ) assert 0 == odl_api_test_fixture.pool.licenses_available @@ -1253,7 +1253,7 @@ def test_release_hold_success( odl_api_test_fixture.pool.on_hold_to(odl_api_test_fixture.patron, position=0) odl_api_test_fixture.checkin(patron=loan_patron) - assert True == odl_api_test_fixture.api.release_hold( + odl_api_test_fixture.api.release_hold( odl_api_test_fixture.patron, "pin", odl_api_test_fixture.pool ) assert 1 == odl_api_test_fixture.pool.licenses_available @@ -1266,7 +1266,7 @@ def test_release_hold_success( db.patron(), position=2 ) - assert True == odl_api_test_fixture.api.release_hold( + odl_api_test_fixture.api.release_hold( odl_api_test_fixture.patron, "pin", odl_api_test_fixture.pool ) assert 0 == odl_api_test_fixture.pool.licenses_available diff --git a/tests/api/test_opds.py b/tests/api/test_opds.py index ba51138402..3decbf4040 100644 --- a/tests/api/test_opds.py +++ b/tests/api/test_opds.py @@ -1226,6 +1226,7 @@ def test_active_loan_feed( tree = etree.fromstring(response.get_data(as_text=True)) parser = OPDSXMLParser() licensor = parser._xpath1(tree, "//atom:feed/drm:licensor") + assert licensor is not None adobe_patron_identifier = AuthdataUtility._adobe_patron_identifier(patron) @@ -1294,7 +1295,11 @@ def test_active_loan_feed( ) assert 2 == len(acquisitions) - availabilities = [parser._xpath1(x, "opds:availability") for x in acquisitions] + availabilities = [] + for acquisition in acquisitions: + availability = parser._xpath1(acquisition, "opds:availability") + assert availability is not None + availabilities.append(availability) # One of these availability tags has 'since' but not 'until'. # The other one has both. @@ -1862,6 +1867,7 @@ def test_acquisition_links( opds_parser = OPDSXMLParser() availability = opds_parser._xpath1(fulfill, "opds:availability") + assert availability is not None assert _strftime(loan1.start) == availability.attrib.get("since") assert loan1.end == availability.attrib.get("until") assert None == loan1.end diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index a7ae2a868a..513fea0795 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -1,11 +1,10 @@ import datetime import json from typing import Callable, Union -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest -import core.opds_import from api.circulation_exceptions import * from api.opds_for_distributors import ( OPDSForDistributorsAPI, @@ -29,6 +28,7 @@ RightsStatus, Timestamp, create, + get_one, ) from core.util.datetime_helpers import utc_now from core.util.opds_writer import OPDSFeed @@ -147,7 +147,7 @@ def test_can_fulfill_without_loan( fulfilled with no underlying loan, if its delivery mechanism uses bearer token fulfillment. """ - patron = object() + patron = MagicMock() pool = opds_dist_api_fixture.db.licensepool( edition=None, collection=opds_dist_api_fixture.collection ) @@ -156,11 +156,11 @@ def test_can_fulfill_without_loan( m = opds_dist_api_fixture.api.can_fulfill_without_loan # No LicensePoolDeliveryMechanism -> False - assert False == m(patron, pool, None) + assert False == m(patron, pool, MagicMock()) # No LicensePool -> False (there can be multiple LicensePools for # a single LicensePoolDeliveryMechanism). - assert False == m(patron, None, lpdm) + assert False == m(patron, MagicMock(), lpdm) # No DeliveryMechanism -> False old_dm = lpdm.delivery_mechanism @@ -410,6 +410,7 @@ def test_checkout(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): # The loan's start date has been set to the current time. now = utc_now() + assert loan_info.start_date is not None assert (now - loan_info.start_date).seconds < 2 # The loan is of indefinite duration. @@ -471,6 +472,7 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): assert None == fulfillment_info.content_link assert DeliveryMechanism.BEARER_TOKEN == fulfillment_info.content_type + assert fulfillment_info.content is not None bearer_token_document = json.loads(fulfillment_info.content) expires_in = bearer_token_document["expires_in"] assert expires_in < 60 @@ -483,6 +485,7 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): # bearer token expires to the time at which the title was # originally fulfilled. expect_expiration = fulfillment_time + datetime.timedelta(seconds=expires_in) + assert fulfillment_info.content_expires is not None assert ( abs((fulfillment_info.content_expires - expect_expiration).total_seconds()) < 5 @@ -708,9 +711,7 @@ def setup_collection(*, name: str, datasource: DataSource) -> Collection: collection=collection2, ) - with patch( - "core.opds_import.get_one", wraps=core.opds_import.get_one - ) as get_one_mock: + with patch("core.opds_import.get_one", wraps=get_one) as get_one_mock: importer1_lp, _ = importer1.update_work_for_edition(edition) importer2_lp, _ = importer2.update_work_for_edition(edition) diff --git a/tests/api/test_selftest.py b/tests/api/test_selftest.py index 1865478fff..b7eea047ee 100644 --- a/tests/api/test_selftest.py +++ b/tests/api/test_selftest.py @@ -5,6 +5,7 @@ from io import StringIO from typing import TYPE_CHECKING from unittest import mock +from unittest.mock import MagicMock import pytest @@ -302,7 +303,7 @@ def _no_delivery_mechanisms_test(self): return "1" mock = Mock() - results = [x for x in mock._run_self_tests()] + results = [x for x in mock._run_self_tests(MagicMock())] assert ["1"] == [x.result for x in results] assert True == mock._no_delivery_mechanisms_called diff --git a/tests/core/test_opds.py b/tests/core/test_opds.py index 21c3a96faf..ce3d463672 100644 --- a/tests/core/test_opds.py +++ b/tests/core/test_opds.py @@ -855,12 +855,14 @@ def test_acquisition_feed_includes_available_and_issued_tag( entries = OPDSXMLParser._xpath(with_times, "/atom:feed/atom:entry") parsed = [] for entry in entries: - title = OPDSXMLParser._xpath1(entry, "atom:title").text + title_element = OPDSXMLParser._xpath1(entry, "atom:title") + assert title_element is not None + title = title_element.text issued = OPDSXMLParser._xpath1(entry, "dcterms:issued") - if issued != None: + if issued is not None: issued = issued.text published = OPDSXMLParser._xpath1(entry, "atom:published") - if published != None: + if published is not None: published = published.text parsed.append( dict( diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 4a4a8369f1..2a4088b890 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1,7 +1,7 @@ import random from io import StringIO from typing import Optional -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest import requests_mock @@ -118,7 +118,7 @@ def test_constructor(self, opds_importer_fixture: OPDSImporterFixture): assert Representation.cautious_http_get == importer.http_get # But you can pass in anything you want. - do_get = object() + do_get = MagicMock() importer = OPDSImporter(session, collection=None, http_get=do_get) assert do_get == importer.http_get @@ -229,6 +229,7 @@ def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): assert data_source_name == c2._data_source [failure] = list(failures.values()) + assert isinstance(failure, CoverageFailure) assert ( "202: I'm working to locate a source for this identifier." == failure.exception @@ -260,10 +261,10 @@ def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( # First book doesn't have , so must be used as identifier book_1 = metadata.get("https://root.uri/1") - assert book_1 != None + assert book_1 is not None # Second book have and , so must be used as id book_2 = metadata.get("urn:isbn:9781468316438") - assert book_2 != None + assert book_2 is not None # Verify if id was add in the end of identifier book_2_identifiers = book_2.identifiers found = False @@ -271,10 +272,10 @@ def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( if entry.identifier == "https://root.uri/2": found = True break - assert found == True + assert found is True # Third book has more than one dcterms:identifers, all of then must be present as metadata identifier book_3 = metadata.get("urn:isbn:9781683351993") - assert book_2 != None + assert book_3 is not None # Verify if id was add in the end of identifier book_3_identifiers = book_3.identifiers expected_identifier = [ @@ -857,7 +858,7 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): session, collection=None ).import_from_feed(feed) - [crow, mouse] = sorted(imported_editions, key=lambda x: x.title) + [crow, mouse] = sorted(imported_editions, key=lambda x: str(x.title)) # By default, this feed is treated as though it came from the # metadata wrangler. No Work has been created. @@ -873,7 +874,7 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): # Three links have been added to the identifier of the 'mouse' # edition. image, thumbnail, description = sorted( - mouse.primary_identifier.links, key=lambda x: x.rel + mouse.primary_identifier.links, key=lambda x: str(x.rel) ) # A Representation was imported for the summary with known @@ -896,22 +897,24 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): # Two links were added to the identifier of the 'crow' edition. [broken_image, working_image] = sorted( - crow.primary_identifier.links, key=lambda x: x.resource.url + crow.primary_identifier.links, key=lambda x: str(x.resource.url) ) # Because these images did not have a specified media type or a # distinctive extension, and we have not actually retrieved # the URLs yet, we were not able to determine their media type, # so they have no associated Representation. + assert broken_image.resource.url is not None assert broken_image.resource.url.endswith("/broken-cover-image") + assert working_image.resource.url is not None assert working_image.resource.url.endswith("/working-cover-image") - assert None == broken_image.resource.representation - assert None == working_image.resource.representation + assert broken_image.resource.representation is None + assert working_image.resource.representation is None # Three measurements have been added to the 'mouse' edition. popularity, quality, rating = sorted( (x for x in mouse.primary_identifier.measurements if x.is_most_recent), - key=lambda x: x.quantity_measured, + key=lambda x: str(x.quantity_measured), ) assert DataSource.METADATA_WRANGLER == popularity.data_source.name @@ -927,7 +930,7 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): assert 0.6 == rating.value seven, children, courtship, fantasy, pz, magic, new_york = sorted( - mouse.primary_identifier.classifications, key=lambda x: x.subject.name + mouse.primary_identifier.classifications, key=lambda x: str(x.subject.name) ) pz_s = pz.subject @@ -1556,8 +1559,8 @@ class NoLinks(Mock): "Simulate an OPDS feed that contains no open-access links." open_access_links = [] - # We don't be making any HTTP requests, even simulated ones. - do_get = object() + # We won't be making any HTTP requests, even simulated ones. + do_get = MagicMock() # Here, there are no links at all. importer = NoLinks(session, None, do_get) @@ -1628,7 +1631,7 @@ def _is_open_access_link(self, url, type): result = good_link_importer.assert_importable_content( "feed", "url", max_get_attempts=5 ) - assert "this is a book" == result + assert True == result # The first link didn't work, but the second one did, # so we didn't try the third one. @@ -2055,7 +2058,7 @@ def follow_one_link(self, url): assert ( "some content", feed_url, - ) == monitor.importer.assert_importable_content_called_with + ) == monitor.importer.assert_importable_content_called_with # type: ignore[attr-defined] assert "looks good" == found_content.result def test_hook_methods(self, opds_importer_fixture: OPDSImporterFixture): @@ -2355,7 +2358,7 @@ def import_one_feed(self, feed): monitor.queue_response([["second next link"], "second page"]) monitor.queue_response([["next link"], "first page"]) - progress = monitor.run_once(object()) + progress = monitor.run_once(MagicMock()) # Feeds are imported in reverse order assert ["last page", "second page", "first page"] == monitor.imports From a799294a9787617a8087cb30527703e1dde72e4d Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 2 Oct 2023 17:20:13 -0400 Subject: [PATCH 075/262] Configure logging integrations via environment variables (PP-496) (#1421) Update logging to be setup as part of the service container via environment variables, instead of being configured using configuration settings from the database. This should fix the issue we have been seeing where CI hangs and eventually times out. Since the code that is causing the hang is configuring logging in the database. And is part of the work we need to do anyway to convert our settings fully away from integrationsettings stored in the database. --- README.md | 23 + api/admin/config.py | 2 +- api/admin/controller/__init__.py | 2 - api/admin/controller/sitewide_services.py | 17 - api/admin/routes.py | 16 - api/app.py | 33 +- api/controller.py | 4 - api/routes.py | 17 +- core/app_server.py | 82 ++- core/coverage.py | 1 - core/log.py | 470 ------------------ core/model/__init__.py | 11 - core/model/configuration.py | 4 - core/monitor.py | 1 - core/scripts.py | 5 +- core/service/container.py | 17 +- core/service/logging/configuration.py | 51 ++ core/service/logging/container.py | 57 +++ core/service/logging/log.py | 135 +++++ core/service/storage/container.py | 14 +- poetry.lock | 17 +- pyproject.toml | 2 +- scripts.py | 7 +- tests/api/admin/test_routes.py | 23 - tests/api/test_routes.py | 46 -- tests/api/test_scripts.py | 2 +- tests/core/service/logging/__init__.py | 0 .../service/logging/test_configuration.py | 32 ++ tests/core/service/logging/test_log.py | 149 ++++++ tests/core/service/storage/test_s3.py | 3 +- tests/core/test_app_server.py | 50 +- tests/core/test_log.py | 240 --------- tests/fixtures/database.py | 4 - tests/fixtures/s3.py | 16 +- 34 files changed, 586 insertions(+), 967 deletions(-) delete mode 100644 core/log.py create mode 100644 core/service/logging/configuration.py create mode 100644 core/service/logging/container.py create mode 100644 core/service/logging/log.py create mode 100644 tests/core/service/logging/__init__.py create mode 100644 tests/core/service/logging/test_configuration.py create mode 100644 tests/core/service/logging/test_log.py delete mode 100644 tests/core/test_log.py diff --git a/README.md b/README.md index c9bd94fee6..ef92d5cdcb 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,29 @@ a storage service, you can set the following environment variables: - `{key}`: The key of the file. - `{region}`: The region of the storage service. +#### Logging + +The application uses the [Python logging](https://docs.python.org/3/library/logging.html) module for logging. Optionally +logs can be configured to be sent to AWS CloudWatch logs. The following environment variables can be used to configure +the logging: + +- `PALACE_LOG_LEVEL`: The log level to use for the application. The default is `INFO`. +- `PALACE_LOG_VERBOSE_LEVEL`: The log level to use for particularly verbose loggers. Keeping these loggers at a + higher log level by default makes it easier to troubleshoot issues. The default is `WARNING`. +- `PALACE_LOG_CLOUDWATCH_ENABLED`: Enable / disable sending logs to CloudWatch. The default is `false`. +- `PALACE_LOG_CLOUDWATCH_REGION`: The AWS region of the CloudWatch logs. This must be set if using CloudWatch logs. +- `PALACE_LOG_CLOUDWATCH_GROUP`: The name of the CloudWatch log group to send logs to. Default is `palace`. +- `PALACE_LOG_CLOUDWATCH_STREAM`: The name of the CloudWatch log stream to send logs to. Default is + `{machine_name}/{program_name}/{logger_name}/{process_id}`. See + [watchtower docs](https://github.com/kislyuk/watchtower#log-stream-naming) for details. +- `PALACE_LOG_CLOUDWATCH_INTERVAL`: The interval in seconds to send logs to CloudWatch. Default is `60`. +- `PALACE_LOG_CLOUDWATCH_CREATE_GROUP`: Whether to create the log group if it does not exist. Default is `true`. +- `PALACE_LOG_CLOUDWATCH_ACCESS_KEY`: The access key to use when sending logs to CloudWatch. This is optional. + - If this key is set it will be passed to boto3 when connecting to CloudWatch. + - If it is not set boto3 will attempt to find credentials as outlined in their + [documentation](https://boto3.amazonaws.com/v1/documentation/api/latest/guide/credentials.html#configuring-credentials). +- `PALACE_LOG_CLOUDWATCH_SECRET_KEY`: The secret key to use when sending logs to CloudWatch. This is optional. + #### Patron `Basic Token` authentication Enables/disables patron "basic token" authentication through setting the designated environment variable to any diff --git a/api/admin/config.py b/api/admin/config.py index a8ddac7736..4f109c0dee 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -18,7 +18,7 @@ class Configuration: APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" - PACKAGE_VERSION = "1.9.0" + PACKAGE_VERSION = "1.10.0" STATIC_ASSETS = { "admin_js": "circulation-admin.js", diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 9f8912d6a4..fb03af81bd 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -43,7 +43,6 @@ def setup_admin_controllers(manager: CirculationManager): from api.admin.controller.settings import SettingsController from api.admin.controller.sign_in import SignInController from api.admin.controller.sitewide_services import ( - LoggingServicesController, SearchServicesController, SitewideServicesController, ) @@ -94,7 +93,6 @@ def setup_admin_controllers(manager: CirculationManager): IndividualAdminSettingsController(manager) ) manager.admin_sitewide_services_controller = SitewideServicesController(manager) - manager.admin_logging_services_controller = LoggingServicesController(manager) manager.admin_search_service_self_tests_controller = ( SearchServiceSelfTestsController(manager) ) diff --git a/api/admin/controller/sitewide_services.py b/api/admin/controller/sitewide_services.py index f684752ce4..15f54cdec3 100644 --- a/api/admin/controller/sitewide_services.py +++ b/api/admin/controller/sitewide_services.py @@ -10,7 +10,6 @@ UNKNOWN_PROTOCOL, ) from core.external_search import ExternalSearchIndex -from core.log import CloudwatchLogs, SysLogger from core.model import ExternalIntegration, get_one_or_create from core.util.problem_detail import ProblemDetail @@ -108,22 +107,6 @@ def validate_form_fields(self, protocols, **fields): return UNKNOWN_PROTOCOL -class LoggingServicesController(SitewideServicesController): - def process_services(self): - detail = _( - "You tried to create a new logging service, but a logging service is already configured." - ) - return self._manage_sitewide_service( - ExternalIntegration.LOGGING_GOAL, - [SysLogger, CloudwatchLogs], - "logging_services", - detail, - ) - - def process_delete(self, service_id): - return self._delete_integration(service_id, ExternalIntegration.LOGGING_GOAL) - - class SearchServicesController(SitewideServicesController): def __init__(self, manager): super().__init__(manager) diff --git a/api/admin/routes.py b/api/admin/routes.py index 900096adbb..3a35da6e3c 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -553,22 +553,6 @@ def announcements_for_all(): return app.manager.admin_announcement_service.process_many() -@app.route("/admin/logging_services", methods=["GET", "POST"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def logging_services(): - return app.manager.admin_logging_services_controller.process_services() - - -@app.route("/admin/logging_service/", methods=["DELETE"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def logging_service(key): - return app.manager.admin_logging_services_controller.process_delete(key) - - @app.route("/admin/discovery_service_library_registrations", methods=["GET", "POST"]) @returns_json_or_response_or_problem_detail @requires_admin diff --git a/api/app.py b/api/app.py index d380beed10..e774655fa0 100644 --- a/api/app.py +++ b/api/app.py @@ -5,19 +5,18 @@ import flask_babel from flask_babel import Babel from flask_pydantic_spec import FlaskPydanticSpec -from sqlalchemy.orm import Session from api.config import Configuration +from core.app_server import ErrorHandler from core.flask_sqlalchemy_session import flask_scoped_session from core.local_analytics_provider import LocalAnalyticsProvider -from core.log import LogConfiguration from core.model import ( LOCK_ID_APP_INIT, ConfigurationSetting, SessionManager, pg_advisory_lock, ) -from core.service.container import container_instance +from core.service.container import Services, container_instance from core.util import LanguageCodes from core.util.cache import CachedData from scripts import InstanceInitializationScript @@ -66,14 +65,13 @@ def initialize_admin(_db=None): LocalAnalyticsProvider.initialize(_db) -def initialize_circulation_manager(): +def initialize_circulation_manager(container: Services): if os.environ.get("AUTOINITIALIZE") == "False": # It's the responsibility of the importing code to set app.manager # appropriately. pass else: if getattr(app, "manager", None) is None: - container = container_instance() try: app.manager = CirculationManager(app._db, container) except Exception: @@ -93,16 +91,6 @@ def initialize_database(): app._db = _db -def initialize_logging(db: Session, app: PalaceFlask): - testing = "TESTING" in os.environ - log_level = LogConfiguration.initialize(db, testing=testing) - debug = log_level == "DEBUG" - app.config["DEBUG"] = debug - app.debug = debug - db.commit() - logging.getLogger().info("Application debug mode==%r" % app.debug) - - from . import routes # noqa from .admin import routes as admin_routes # noqa @@ -110,6 +98,18 @@ def initialize_logging(db: Session, app: PalaceFlask): def initialize_application() -> PalaceFlask: with app.app_context(), flask_babel.force_locale("en"): initialize_database() + + # Load the application service container + container = container_instance() + + # Initialize the application services container, this will make sure + # that the logging system is initialized. + container.init_resources() + + # Initialize the applications error handler. + error_handler = ErrorHandler(app, container.config.logging.level()) + app.register_error_handler(Exception, error_handler.handle) + # TODO: Remove this lock once our settings are moved to integration settings. # We need this lock, so that only one instance of the application is # initialized at a time. This prevents database conflicts when multiple @@ -117,8 +117,7 @@ def initialize_application() -> PalaceFlask: # time during initialization. This should be able to go away once we # move our settings off the configurationsettings system. with pg_advisory_lock(app._db, LOCK_ID_APP_INIT): - initialize_logging(app._db, app) - initialize_circulation_manager() + initialize_circulation_manager(container) initialize_admin() return app diff --git a/api/controller.py b/api/controller.py index 4e1c90dd80..5143e4adf8 100644 --- a/api/controller.py +++ b/api/controller.py @@ -53,7 +53,6 @@ SearchFacets, WorkList, ) -from core.log import LogConfiguration from core.marc import MARCExporter from core.metadata_layer import ContributorData from core.model import ( @@ -160,7 +159,6 @@ from api.admin.controller.settings import SettingsController from api.admin.controller.sign_in import SignInController from api.admin.controller.sitewide_services import ( - LoggingServicesController, SearchServicesController, SitewideServicesController, ) @@ -217,7 +215,6 @@ class CirculationManager: admin_library_settings_controller: LibrarySettingsController admin_individual_admin_settings_controller: IndividualAdminSettingsController admin_sitewide_services_controller: SitewideServicesController - admin_logging_services_controller: LoggingServicesController admin_search_service_self_tests_controller: SearchServiceSelfTestsController admin_search_services_controller: SearchServicesController admin_catalog_services_controller: CatalogServicesController @@ -300,7 +297,6 @@ def load_settings(self): configuration after changes are made in the administrative interface. """ - LogConfiguration.initialize(self._db) self.analytics = Analytics(self._db, refresh=True) with elapsed_time_logging( diff --git a/api/routes.py b/api/routes.py index 3a6a172c28..96b0df52b4 100644 --- a/api/routes.py +++ b/api/routes.py @@ -5,11 +5,10 @@ from flask import Response, make_response, request from flask_cors.core import get_cors_options, set_cors_headers from flask_pydantic_spec import Response as SpecResponse -from werkzeug.exceptions import HTTPException from api.model.patron_auth import PatronAuthAccessToken from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse -from core.app_server import ErrorHandler, compressible, returns_problem_detail +from core.app_server import compressible, returns_problem_detail from core.model import HasSessionCache from core.util.problem_detail import ProblemDetail @@ -108,20 +107,6 @@ def wrapped_function(*args, **kwargs): return update_wrapper(wrapped_function, f) -h = ErrorHandler(app, app.config["DEBUG"]) - - -@app.errorhandler(Exception) -@allows_patron_web -def exception_handler(exception): - if isinstance(exception, HTTPException): - # This isn't an exception we need to handle, it's werkzeug's way - # of interrupting normal control flow with a specific HTTP response. - # Return the exception and it will be used as the response. - return exception - return h.handle(exception) - - def has_library(f): """Decorator to extract the library short name from the arguments.""" diff --git a/core/app_server.py b/core/app_server.py index 5eaa9464b7..ea5c0c6b9f 100644 --- a/core/app_server.py +++ b/core/app_server.py @@ -1,4 +1,5 @@ """Implement logic common to more than one of the Simplified applications.""" +from __future__ import annotations import gzip import logging @@ -6,25 +7,28 @@ import traceback from functools import wraps from io import BytesIO +from typing import TYPE_CHECKING import flask -from flask import make_response, url_for -from flask_babel import lazy_gettext as _ +from flask import Response, make_response, url_for from flask_pydantic_spec import FlaskPydanticSpec from psycopg2 import DatabaseError -from sqlalchemy.exc import SQLAlchemyError +from werkzeug.exceptions import HTTPException import core from api.admin.config import Configuration as AdminUiConfig from core.feed.acquisition import LookupAcquisitionFeed, OPDSAcquisitionFeed from .lane import Facets, Pagination -from .log import LogConfiguration from .model import Identifier from .problem_details import * +from .service.logging.configuration import LogLevel from .util.opds_writer import OPDSMessage from .util.problem_detail import ProblemDetail +if TYPE_CHECKING: + from api.util.flask import PalaceFlask + def load_facets_from_request( facet_config=None, @@ -167,57 +171,38 @@ def compress(response): class ErrorHandler: - def __init__(self, app, debug=False): + def __init__(self, app: PalaceFlask, log_level: LogLevel): """Constructor. - :param app: A flask.app object. - :param debug: Set this to True to give detailed debugging - information on errors, even if the site is not configured - to do so. + :param app: The Flask application object. + :param log_level: The log level set for this application. """ self.app = app - self.debug = debug + self.debug = log_level == LogLevel.debug + self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") - def handle(self, exception): + def handle(self, exception: Exception) -> Response | HTTPException: """Something very bad has happened. Notify the client.""" - # By default, when reporting errors, err on the side of - # terseness, to avoid leaking sensitive information. - debug = self.app.config["DEBUG"] or self.debug + if isinstance(exception, HTTPException): + # This isn't an exception we need to handle, it's werkzeug's way + # of interrupting normal control flow with a specific HTTP response. + # Return the exception and it will be used as the response. + return exception if hasattr(self.app, "manager") and hasattr(self.app.manager, "_db"): - # There is an active database session. - - # Use it to determine whether we are in debug mode, in - # which case we _should_ provide the client with a lot of - # information about the problem, without worrying - # whether it contains sensitive information. - _db = self.app.manager._db - try: - LogConfiguration.from_configuration(_db) - ( - log_level, - database_log_level, - handlers, - errors, - ) = LogConfiguration.from_configuration(self.app.manager._db) - debug = debug or ( - LogConfiguration.DEBUG in (log_level, database_log_level) - ) - except SQLAlchemyError as e: - # The database session could not be used, possibly due to - # the very error under consideration. Go with the - # preexisting value for `debug`. - pass - - # Then roll the session back. + # If there is an active database session, then roll the session back. self.app.manager._db.rollback() - tb = traceback.format_exc() + # By default, when reporting errors, we err on the side of + # terseness, to avoid leaking sensitive information. We only + # log a stack trace in the case we have debugging turned on. + # Otherwise, we just display a generic error message. + tb = traceback.format_exc() if isinstance(exception, DatabaseError): # The database session may have become tainted. For now # the simplest thing to do is to kill the entire process # and let uwsgi restart it. - logging.error( + self.log.error( "Database error: %s Treating as fatal to avoid holding on to a tainted session!", exception, exc_info=exception, @@ -229,15 +214,15 @@ def handle(self, exception): sys.exit() # By default, the error will be logged at log level ERROR. - log_method = logging.error + log_method = self.log.error # Okay, it's not a database error. Turn it into a useful HTTP error # response. if hasattr(exception, "as_problem_detail_document"): # This exception can be turned directly into a problem # detail document. - document = exception.as_problem_detail_document(debug) - if not debug: + document = exception.as_problem_detail_document(self.debug) + if not self.debug: document.debug_message = None else: if document.debug_message: @@ -249,17 +234,14 @@ def handle(self, exception): # service. It's a serious problem, but probably not # indicative of a bug in our software. Log it at log level # WARN. - log_method = logging.warning + log_method = self.log.warning response = make_response(document.response) else: # There's no way to turn this exception into a problem # document. This is probably indicative of a bug in our # software. - if debug: - body = tb - else: - body = _("An internal error occured") - response = make_response(str(body), 500, {"Content-Type": "text/plain"}) + body = tb if self.debug else "An internal error occurred" + response = make_response(body, 500, {"Content-Type": "text/plain"}) log_method("Exception in web app: %s", exception, exc_info=exception) return response diff --git a/core/coverage.py b/core/coverage.py index b4d344e3fb..b2a86881c4 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -8,7 +8,6 @@ from core.model.coverage import EquivalencyCoverageRecord -from . import log # This sets the appropriate log format. from .metadata_layer import ReplacementPolicy, TimestampData from .model import ( BaseCoverageRecord, diff --git a/core/log.py b/core/log.py deleted file mode 100644 index 02a09c1a0a..0000000000 --- a/core/log.py +++ /dev/null @@ -1,470 +0,0 @@ -import json -import logging -import socket - -from boto3 import client as AwsClient -from flask_babel import lazy_gettext as _ -from watchtower import CloudWatchLogHandler - -from .config import CannotLoadConfiguration, Configuration -from .model import ConfigurationSetting, ExternalIntegration -from .util.datetime_helpers import utc_now - - -class JSONFormatter(logging.Formatter): - hostname = socket.gethostname() - fqdn = socket.getfqdn() - if len(fqdn) > len(hostname): - hostname = fqdn - - def __init__(self, app_name): - super().__init__() - self.app_name = app_name or LogConfiguration.DEFAULT_APP_NAME - - def format(self, record): - def ensure_str(s): - """Ensure that unicode strings are used for a record's message. - We don't want to try to interpolate an incompatible byte type; it - could lead to a UnicodeDecodeError. - """ - if isinstance(s, bytes): - s = s.decode("utf-8") - return s - - message = ensure_str(record.msg) - - if record.args: - record_args = tuple(ensure_str(arg) for arg in record.args) - try: - message = message % record_args - except Exception as e: - # There was a problem formatting the log message, - # which points to a bug. A problem with the logging - # code shouldn't break the code that actually does the - # work, but we can't just let this slide -- we need to - # report the problem so it can be fixed. - message = ( - "Log message could not be formatted. Exception: %r. Original message: message=%r args=%r" - % (e, message, record_args) - ) - data = dict( - host=self.hostname, - app=self.app_name, - name=record.name, - level=record.levelname, - filename=record.filename, - message=message, - timestamp=utc_now().isoformat(), - ) - if record.exc_info: - data["traceback"] = self.formatException(record.exc_info) - return json.dumps(data) - - -class StringFormatter(logging.Formatter): - """Encode all output as a string.""" - - def format(self, record): - data = super().format(record) - return str(data) - - -class Logger: - """Abstract base class for logging""" - - DEFAULT_APP_NAME = "simplified" - - JSON_LOG_FORMAT = "json" - TEXT_LOG_FORMAT = "text" - DEFAULT_MESSAGE_TEMPLATE = ( - "%(asctime)s:%(name)s:%(levelname)s:%(filename)s:%(message)s" - ) - - @classmethod - def set_formatter( - cls, handler, app_name=None, log_format=None, message_template=None - ): - """Tell the given `handler` to format its log messages in a - certain way. - """ - # Initialize defaults - if log_format is None: - log_format = cls.JSON_LOG_FORMAT - if message_template is None: - message_template = cls.DEFAULT_MESSAGE_TEMPLATE - - if log_format == cls.JSON_LOG_FORMAT: - formatter = JSONFormatter(app_name) - else: - formatter = StringFormatter(message_template) - handler.setFormatter(formatter) - - @classmethod - def from_configuration(cls, _db, testing=False): - """Should be implemented in each logging class.""" - raise NotImplementedError() - - -class SysLogger(Logger): - - NAME = "sysLog" - - # Settings for the integration with protocol=INTERNAL_LOGGING - LOG_FORMAT = "log_format" - LOG_MESSAGE_TEMPLATE = "message_template" - - SETTINGS = [ - { - "key": LOG_FORMAT, - "label": _("Log Format"), - "type": "select", - "options": [ - {"key": Logger.JSON_LOG_FORMAT, "label": _("json")}, - {"key": Logger.TEXT_LOG_FORMAT, "label": _("text")}, - ], - }, - { - "key": LOG_MESSAGE_TEMPLATE, - "label": _("template"), - "default": Logger.DEFAULT_MESSAGE_TEMPLATE, - "required": True, - }, - ] - - SITEWIDE = True - - @classmethod - def _defaults(cls, testing=False): - """Return default log configuration values.""" - if testing: - internal_log_format = cls.TEXT_LOG_FORMAT - else: - internal_log_format = cls.JSON_LOG_FORMAT - message_template = cls.DEFAULT_MESSAGE_TEMPLATE - return internal_log_format, message_template - - @classmethod - def from_configuration(cls, _db, testing=False): - (internal_log_format, message_template) = cls._defaults(testing) - app_name = cls.DEFAULT_APP_NAME - - if _db and not testing: - goal = ExternalIntegration.LOGGING_GOAL - internal = ExternalIntegration.lookup( - _db, ExternalIntegration.INTERNAL_LOGGING, goal - ) - - if internal: - internal_log_format = ( - internal.setting(cls.LOG_FORMAT).value or internal_log_format - ) - message_template = ( - internal.setting(cls.LOG_MESSAGE_TEMPLATE).value or message_template - ) - app_name = ( - ConfigurationSetting.sitewide(_db, Configuration.LOG_APP_NAME).value - or app_name - ) - - handler = logging.StreamHandler() - cls.set_formatter( - handler, - log_format=internal_log_format, - message_template=message_template, - app_name=app_name, - ) - return handler - - -class CloudwatchLogs(Logger): - - NAME = "AWS Cloudwatch Logs" - GROUP = "group" - STREAM = "stream" - INTERVAL = "interval" - CREATE_GROUP = "create_group" - REGION = "region" - DEFAULT_REGION = "us-west-2" - DEFAULT_INTERVAL = 60 - DEFAULT_CREATE_GROUP = "TRUE" - - # https://docs.aws.amazon.com/general/latest/gr/rande.html#cwl_region - REGIONS = [ - {"key": "us-east-2", "label": _("US East (Ohio)")}, - {"key": "us-east-1", "label": _("US East (N. Virginia)")}, - {"key": "us-west-1", "label": _("US West (N. California)")}, - {"key": "us-west-2", "label": _("US West (Oregon)")}, - {"key": "ap-south-1", "label": _("Asia Pacific (Mumbai)")}, - {"key": "ap-northeast-3", "label": _("Asia Pacific (Osaka-Local)")}, - {"key": "ap-northeast-2", "label": _("Asia Pacific (Seoul)")}, - {"key": "ap-southeast-1", "label": _("Asia Pacific (Singapore)")}, - {"key": "ap-southeast-2", "label": _("Asia Pacific (Sydney)")}, - {"key": "ap-northeast-1", "label": _("Asia Pacific (Tokyo)")}, - {"key": "ca-central-1", "label": _("Canada (Central)")}, - {"key": "cn-north-1", "label": _("China (Beijing)")}, - {"key": "cn-northwest-1", "label": _("China (Ningxia)")}, - {"key": "eu-central-1", "label": _("EU (Frankfurt)")}, - {"key": "eu-west-1", "label": _("EU (Ireland)")}, - {"key": "eu-west-2", "label": _("EU (London)")}, - {"key": "eu-west-3", "label": _("EU (Paris)")}, - {"key": "sa-east-1", "label": _("South America (Sao Paulo)")}, - ] - - SETTINGS = [ - { - "key": GROUP, - "label": _("Log Group"), - "default": Logger.DEFAULT_APP_NAME, - "required": True, - }, - { - "key": STREAM, - "label": _("Log Stream"), - "default": Logger.DEFAULT_APP_NAME, - "required": True, - }, - { - "key": INTERVAL, - "label": _("Update Interval Seconds"), - "default": DEFAULT_INTERVAL, - "required": True, - }, - { - "key": REGION, - "label": _("AWS Region"), - "type": "select", - "options": REGIONS, - "default": DEFAULT_REGION, - "required": True, - }, - { - "key": CREATE_GROUP, - "label": _("Automatically Create Log Group"), - "type": "select", - "options": [ - {"key": "TRUE", "label": _("Yes")}, - {"key": "FALSE", "label": _("No")}, - ], - "default": True, - "required": True, - }, - ] - - SITEWIDE = True - - @classmethod - def from_configuration(cls, _db, testing=False): - settings = None - cloudwatch = None - - app_name = cls.DEFAULT_APP_NAME - if _db and not testing: - goal = ExternalIntegration.LOGGING_GOAL - settings = ExternalIntegration.lookup( - _db, ExternalIntegration.CLOUDWATCH, goal - ) - app_name = ( - ConfigurationSetting.sitewide(_db, Configuration.LOG_APP_NAME).value - or app_name - ) - - if settings: - cloudwatch = cls.get_handler(settings, testing) - cls.set_formatter(cloudwatch, app_name) - - return cloudwatch - - @classmethod - def get_handler(cls, settings, testing=False): - """Turn ExternalIntegration into a log handler.""" - group = settings.setting(cls.GROUP).value or cls.DEFAULT_APP_NAME - stream = settings.setting(cls.STREAM).value or cls.DEFAULT_APP_NAME - interval = settings.setting(cls.INTERVAL).value or cls.DEFAULT_INTERVAL - region = settings.setting(cls.REGION).value or cls.DEFAULT_REGION - create_group = ( - settings.setting(cls.CREATE_GROUP).value or cls.DEFAULT_CREATE_GROUP - ) - - try: - interval = int(interval) - if interval <= 0: - raise CannotLoadConfiguration( - "AWS Cloudwatch Logs interval must be a positive integer." - ) - except ValueError: - raise CannotLoadConfiguration( - "AWS Cloudwatch Logs interval configuration must be an integer." - ) - client = AwsClient("logs", region_name=region) - handler = CloudWatchLogHandler( - log_group_name=group, - log_stream_name=stream, - send_interval=interval, - boto3_client=client, - create_log_group=create_group == "TRUE", - ) - # Add a filter that makes sure no messages from botocore are processed by - # the cloudwatch logs integration, as these messages can lead to an infinite loop. - class BotoFilter(logging.Filter): - def filter(self, record): - return not record.name.startswith("botocore") - - handler.addFilter(BotoFilter()) - return handler - - -class LogConfiguration: - """Configures the active Python logging handlers based on logging - configuration from the database. - """ - - DEBUG = "DEBUG" - INFO = "INFO" - WARN = "WARN" - ERROR = "ERROR" - - # The default value to put into the 'app' field of JSON-format logs, - # unless LOG_APP_NAME overrides it. - DEFAULT_APP_NAME = "simplified" - LOG_APP_NAME = "log_app" - - DEFAULT_LOG_LEVEL = INFO - DEFAULT_DATABASE_LOG_LEVEL = WARN - - # Settings for the integration with protocol=INTERNAL_LOGGING - LOG_LEVEL = "log_level" - DATABASE_LOG_LEVEL = "database_log_level" - LOG_LEVEL_UI = [ - {"key": DEBUG, "value": _("Debug")}, - {"key": INFO, "value": _("Info")}, - {"key": WARN, "value": _("Warn")}, - {"key": ERROR, "value": _("Error")}, - ] - - SITEWIDE_SETTINGS = [ - { - "key": LOG_LEVEL, - "label": _("Log Level"), - "type": "select", - "options": LOG_LEVEL_UI, - "default": INFO, - }, - { - "key": LOG_APP_NAME, - "label": _("Log Application name"), - "description": _( - "Log messages originating from this application will be tagged with this name. If you run multiple instances, giving each one a different application name will help you determine which instance is having problems." - ), - "default": DEFAULT_APP_NAME, - }, - { - "key": DATABASE_LOG_LEVEL, - "label": _("Database Log Level"), - "type": "select", - "options": LOG_LEVEL_UI, - "description": _( - "Database logs are extremely verbose, so unless you're diagnosing a database-related problem, it's a good idea to set a higher log level for database messages." - ), - "default": WARN, - }, - ] - - @classmethod - def initialize(cls, _db, testing=False): - """Make the logging handlers reflect the current logging rules - as configured in the database. - - :param _db: A database connection. If this is None, the default logging - configuration will be used. - - :param testing: True if unit tests are currently running; otherwise False. - """ - log_level, database_log_level, new_handlers, errors = cls.from_configuration( - _db, testing - ) - - # Replace the set of handlers associated with the root logger. - logger = logging.getLogger() - logger.setLevel(log_level) - old_handlers = list(logger.handlers) - for handler in new_handlers: - logger.addHandler(handler) - handler.setLevel(log_level) - for handler in old_handlers: - logger.removeHandler(handler) - - # Set the loggers for various verbose libraries to the database - # log level, which is probably higher than the normal log level. - for logger in ( - "sqlalchemy.engine", - "opensearch", - "requests.packages.urllib3.connectionpool", - "botocore", - ): - logging.getLogger(logger).setLevel(database_log_level) - - # These loggers can cause infinite loops if they're set to - # DEBUG, because their log is triggered during the process of - # logging something to Cloudwatch. These loggers will never have their - # log level set lower than WARN. - if database_log_level == cls.ERROR: - loop_prevention_log_level = cls.ERROR - else: - loop_prevention_log_level = cls.WARN - for logger in ["urllib3.connectionpool"]: - logging.getLogger(logger).setLevel(loop_prevention_log_level) - - # If we had an error creating any log handlers report it - for error in errors: - logging.getLogger().error(error) - - return log_level - - @classmethod - def from_configuration(cls, _db, testing=False): - """Return the logging policy as configured in the database. - - :param _db: A database connection. If None, the default - logging policy will be used. - - :param testing: A boolean indicating whether a unit test is - happening right now. If True, the database configuration will - be ignored in favor of a known test-friendly policy. (It's - okay to pass in False during a test *of this method*.) - - :return: A 3-tuple (internal_log_level, database_log_level, - handlers). `internal_log_level` is the log level to be used - for most log messages. `database_log_level` is the log level - to be applied to the loggers for the database connector and - other verbose third-party libraries. `handlers` is a list of - Handler objects that will be associated with the top-level - logger. - """ - log_level = cls.DEFAULT_LOG_LEVEL - database_log_level = cls.DEFAULT_DATABASE_LOG_LEVEL - - if _db and not testing: - log_level = ( - ConfigurationSetting.sitewide(_db, Configuration.LOG_LEVEL).value - or log_level - ) - database_log_level = ( - ConfigurationSetting.sitewide( - _db, Configuration.DATABASE_LOG_LEVEL - ).value - or database_log_level - ) - - loggers = [SysLogger, CloudwatchLogs] - handlers = [] - errors = [] - - for logger in loggers: - try: - handler = logger.from_configuration(_db, testing) - if handler: - handlers.append(handler) - except Exception as e: - errors.append(f"Error creating logger {logger.NAME} {str(e)}") - - return log_level, database_log_level, handlers, errors diff --git a/core/model/__init__.py b/core/model/__init__.py index 069c48d324..fe795d857b 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -476,17 +476,6 @@ def production_session(initialize_data=True) -> Session: url = url[1:] logging.debug("Database url: %s", url) _db = SessionManager.session(url, initialize_data=initialize_data) - - # The first thing to do after getting a database connection is to - # set up the logging configuration. - # - # If called during a unit test, this will configure logging - # incorrectly, but 1) this method isn't normally called during - # unit tests, and 2) package_setup() will call initialize() again - # with the right arguments. - from ..log import LogConfiguration - - LogConfiguration.initialize(_db) return _db diff --git a/core/model/configuration.py b/core/model/configuration.py index c98e822845..8ba3f249e9 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -162,10 +162,6 @@ class ExternalIntegration(Base): # Integrations with ANALYTICS_GOAL GOOGLE_ANALYTICS = "Google Analytics" - # Integrations with LOGGING_GOAL - INTERNAL_LOGGING = "Internal logging" - CLOUDWATCH = "AWS Cloudwatch Logs" - # Integrations with CATALOG_GOAL MARC_EXPORT = "MARC Export" diff --git a/core/monitor.py b/core/monitor.py index e0500d37a9..ef73459a4b 100644 --- a/core/monitor.py +++ b/core/monitor.py @@ -8,7 +8,6 @@ from sqlalchemy.orm import defer from sqlalchemy.sql.expression import and_, or_ -from . import log # This sets the appropriate log format and level. from .config import Configuration from .metadata_layer import TimestampData from .model import ( diff --git a/core/scripts.py b/core/scripts.py index a405ec1422..a48108644f 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -126,6 +126,9 @@ def __init__(self, _db=None, services: Optional[Services] = None, *args, **kwarg self._services = container_instance() if services is None else services + # Call init_resources() to initialize the logging configuration. + self._services.init_resources() + def run(self): DataSource.well_known_sources(self._db) start_time = utc_now() @@ -2727,7 +2730,7 @@ class GenerateOverdriveAdvantageAccountList(InputScript): """ def __init__(self, _db=None, *args, **kwargs): - super().__init__(_db, args, kwargs) + super().__init__(_db, *args, **kwargs) self._data: List[List[str]] = list() def _create_overdrive_api(self, collection: Collection): diff --git a/core/service/container.py b/core/service/container.py index b204df6462..44ebc645b8 100644 --- a/core/service/container.py +++ b/core/service/container.py @@ -1,6 +1,9 @@ from dependency_injector import providers from dependency_injector.containers import DeclarativeContainer +from dependency_injector.providers import Container +from core.service.logging.configuration import LoggingConfiguration +from core.service.logging.container import Logging from core.service.storage.configuration import StorageConfiguration from core.service.storage.container import Storage @@ -9,15 +12,25 @@ class Services(DeclarativeContainer): config = providers.Configuration() - storage = providers.Container( + storage = Container( Storage, config=config.storage, ) + logging = Container( + Logging, + config=config.logging, + ) + def create_container() -> Services: container = Services() - container.config.from_dict({"storage": StorageConfiguration().dict()}) + container.config.from_dict( + { + "storage": StorageConfiguration().dict(), + "logging": LoggingConfiguration().dict(), + } + ) return container diff --git a/core/service/logging/configuration.py b/core/service/logging/configuration.py new file mode 100644 index 0000000000..0a758c9720 --- /dev/null +++ b/core/service/logging/configuration.py @@ -0,0 +1,51 @@ +from enum import Enum +from typing import Any, Dict, Optional + +import boto3 +from pydantic import PositiveInt, validator +from watchtower import DEFAULT_LOG_STREAM_NAME + +from core.service.configuration import ServiceConfiguration + + +class LogLevel(Enum): + debug = "DEBUG" + info = "INFO" + warning = "WARNING" + error = "ERROR" + + +class LoggingConfiguration(ServiceConfiguration): + level: LogLevel = LogLevel.info + verbose_level: LogLevel = LogLevel.warning + + cloudwatch_enabled: bool = False + cloudwatch_region: Optional[str] = None + cloudwatch_group: str = "palace" + cloudwatch_stream: str = DEFAULT_LOG_STREAM_NAME + cloudwatch_interval: PositiveInt = 60 + cloudwatch_create_group: bool = True + cloudwatch_access_key: Optional[str] = None + cloudwatch_secret_key: Optional[str] = None + + @validator("cloudwatch_region") + def validate_cloudwatch_region( + cls, v: Optional[str], values: Dict[str, Any] + ) -> Optional[str]: + if not values.get("cloudwatch_enabled"): + # If cloudwatch is not enabled, no validation is needed. + return None + + if v is None: + raise ValueError(f"Region must be provided if cloudwatch is enabled.") + + session = boto3.session.Session() + regions = session.get_available_regions(service_name="logs") + if v not in regions: + raise ValueError( + f"Invalid region: {v}. Region must be one of: {' ,'.join(regions)}." + ) + return v + + class Config: + env_prefix = "PALACE_LOG_" diff --git a/core/service/logging/container.py b/core/service/logging/container.py new file mode 100644 index 0000000000..91f4fa281e --- /dev/null +++ b/core/service/logging/container.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +from logging import Handler +from typing import TYPE_CHECKING, Optional + +import boto3 +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer +from dependency_injector.providers import Provider, Singleton + +from core.service.logging.log import ( + JSONFormatter, + create_cloudwatch_handler, + create_stream_handler, + setup_logging, +) + +if TYPE_CHECKING: + from mypy_boto3_logs import CloudWatchLogsClient + + +class Logging(DeclarativeContainer): + config = providers.Configuration() + + cloudwatch_client: Provider[CloudWatchLogsClient] = Singleton( + boto3.client, + service_name="logs", + aws_access_key_id=config.cloudwatch_access_key, + aws_secret_access_key=config.cloudwatch_secret_key, + region_name=config.cloudwatch_region, + ) + + json_formatter: Provider[JSONFormatter] = Singleton(JSONFormatter) + + cloudwatch_handler: Provider[Optional[Handler]] = providers.Singleton( + create_cloudwatch_handler, + formatter=json_formatter, + level=config.level, + client=cloudwatch_client, + group=config.cloudwatch_group, + stream=config.cloudwatch_stream, + interval=config.cloudwatch_interval, + create_group=config.cloudwatch_create_group, + ) + + stream_handler: Provider[Handler] = providers.Singleton( + create_stream_handler, formatter=json_formatter, level=config.level + ) + + logging = providers.Resource( + setup_logging, + level=config.level, + verbose_level=config.verbose_level, + stream=stream_handler, + cloudwatch_enabled=config.cloudwatch_enabled, + cloudwatch_callable=cloudwatch_handler.provider, + ) diff --git a/core/service/logging/log.py b/core/service/logging/log.py new file mode 100644 index 0000000000..23dc873e02 --- /dev/null +++ b/core/service/logging/log.py @@ -0,0 +1,135 @@ +from __future__ import annotations + +import json +import logging +import socket +from logging import Handler +from typing import TYPE_CHECKING, Any, Callable + +from watchtower import CloudWatchLogHandler + +from core.service.logging.configuration import LogLevel +from core.util.datetime_helpers import utc_now + +if TYPE_CHECKING: + from mypy_boto3_logs import CloudWatchLogsClient + + +class JSONFormatter(logging.Formatter): + def __init__(self) -> None: + super().__init__() + hostname = socket.gethostname() + fqdn = socket.getfqdn() + if len(fqdn) > len(hostname): + hostname = fqdn + self.hostname = hostname + + def format(self, record: logging.LogRecord) -> str: + def ensure_str(s: Any) -> Any: + """Ensure that unicode strings are used for a record's message. + We don't want to try to interpolate an incompatible byte type; it + could lead to a UnicodeDecodeError. + """ + if isinstance(s, bytes): + s = s.decode("utf-8") + return s + + message = ensure_str(record.msg) + if record.args: + record_args = tuple(ensure_str(arg) for arg in record.args) + try: + message = message % record_args + except Exception as e: + # There was a problem formatting the log message, + # which points to a bug. A problem with the logging + # code shouldn't break the code that actually does the + # work, but we can't just let this slide -- we need to + # report the problem so it can be fixed. + message = ( + "Log message could not be formatted. Exception: %r. Original message: message=%r args=%r" + % (e, message, record_args) + ) + data = dict( + host=self.hostname, + name=record.name, + level=record.levelname, + filename=record.filename, + message=message, + timestamp=utc_now().isoformat(), + ) + if record.exc_info: + data["traceback"] = self.formatException(record.exc_info) + return json.dumps(data) + + +class LogLoopPreventionFilter(logging.Filter): + """ + A filter that makes sure no messages from botocore or the urllib3 connection pool + are processed by the cloudwatch logs integration, as these messages can lead to an + infinite loop. + """ + + def filter(self, record: logging.LogRecord) -> bool: + if record.name.startswith("botocore"): + return False + elif record.name.startswith("urllib3.connectionpool"): + return False + + return True + + +def create_cloudwatch_handler( + formatter: logging.Formatter, + level: LogLevel, + client: CloudWatchLogsClient, + group: str, + stream: str, + interval: int, + create_group: bool, +) -> logging.Handler: + handler = CloudWatchLogHandler( + log_group_name=group, + log_stream_name=stream, + send_interval=interval, + boto3_client=client, + create_log_group=create_group, + ) + + handler.addFilter(LogLoopPreventionFilter()) + handler.setFormatter(formatter) + handler.setLevel(level.value) + return handler + + +def create_stream_handler( + formatter: logging.Formatter, level: LogLevel +) -> logging.Handler: + stream_handler = logging.StreamHandler() + stream_handler.setFormatter(formatter) + stream_handler.setLevel(level.value) + return stream_handler + + +def setup_logging( + level: LogLevel, + verbose_level: LogLevel, + stream: Handler, + cloudwatch_enabled: bool, + cloudwatch_callable: Callable[[], Handler], +) -> None: + # Set up the root logger + log_handlers = [stream] + if cloudwatch_enabled: + log_handlers.append(cloudwatch_callable()) + logging.basicConfig(force=True, level=level.value, handlers=log_handlers) + + # Set the loggers for various verbose libraries to the database + # log level, which is probably higher than the normal log level. + for logger in ( + "sqlalchemy.engine", + "opensearch", + "requests.packages.urllib3.connectionpool", + "botocore", + "urllib3.connectionpool", + ): + logging.getLogger(logger).setLevel(verbose_level.value) diff --git a/core/service/storage/container.py b/core/service/storage/container.py index 54cf2db835..cf454d39a0 100644 --- a/core/service/storage/container.py +++ b/core/service/storage/container.py @@ -1,14 +1,22 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING, Optional + import boto3 from dependency_injector import providers from dependency_injector.containers import DeclarativeContainer +from dependency_injector.providers import Provider, Singleton from core.service.storage.s3 import S3Service +if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client + class Storage(DeclarativeContainer): config = providers.Configuration() - s3_client = providers.Singleton( + s3_client: Provider[S3Client] = Singleton( boto3.client, service_name="s3", aws_access_key_id=config.access_key, @@ -17,7 +25,7 @@ class Storage(DeclarativeContainer): endpoint_url=config.endpoint_url, ) - analytics = providers.Singleton( + analytics: Provider[Optional[S3Service]] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, @@ -25,7 +33,7 @@ class Storage(DeclarativeContainer): url_template=config.url_template, ) - public = providers.Singleton( + public: Provider[Optional[S3Service]] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, diff --git a/poetry.lock b/poetry.lock index 697ddff890..a0825b41cd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -169,6 +169,7 @@ mypy-boto3-cloudformation = {version = ">=1.28.0,<1.29.0", optional = true, mark mypy-boto3-dynamodb = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} mypy-boto3-ec2 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} mypy-boto3-lambda = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-logs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"logs\""} mypy-boto3-rds = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} mypy-boto3-s3 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} mypy-boto3-sqs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} @@ -2588,6 +2589,20 @@ files = [ [package.dependencies] typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} +[[package]] +name = "mypy-boto3-logs" +version = "1.28.52" +description = "Type annotations for boto3.CloudWatchLogs 1.28.52 service generated with mypy-boto3-builder 7.19.0" +optional = false +python-versions = ">=3.7" +files = [ + {file = "mypy-boto3-logs-1.28.52.tar.gz", hash = "sha256:b51e9b97961223bfe2314ce16887bbd229857b3960a61d372480d6d688168b7e"}, + {file = "mypy_boto3_logs-1.28.52-py3-none-any.whl", hash = "sha256:d180d3ece8aeb349ae504fa9eddb1afb0d9574a237e9d728c6502a8b8e2d9147"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} + [[package]] name = "mypy-boto3-rds" version = "1.28.41" @@ -4637,4 +4652,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "543db762875b9735cacabcd6e288c82155e9bc0f0d707547ef803d4ccac9786e" +content-hash = "c2b590a718bdfee2f46331e169fbbeaed5458e659fdb77aaffb24b8dc8865264" diff --git a/pyproject.toml b/pyproject.toml index 7afd0b0d7e..c43b4c3b5a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -236,7 +236,7 @@ tox-docker = "^4.1" tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] -boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "s3"]} +boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "logs", "s3"]} freezegun = "~1.2.2" Jinja2 = "^3.1.2" mypy = "^1.4.1" diff --git a/scripts.py b/scripts.py index 2ea03abea3..fb563518a8 100644 --- a/scripts.py +++ b/scripts.py @@ -34,7 +34,6 @@ from core.external_search import ExternalSearchIndex from core.feed.acquisition import OPDSAcquisitionFeed from core.lane import Facets, FeaturedFacets, Lane, Pagination -from core.log import LogConfiguration from core.marc import MARCExporter from core.model import ( LOCK_ID_DB_INIT, @@ -64,6 +63,7 @@ ) from core.scripts import Script as CoreScript from core.scripts import TimestampScript +from core.service.container import container_instance from core.util import LanguageCodes from core.util.datetime_helpers import utc_now from core.util.opds_writer import OPDSFeed @@ -817,7 +817,10 @@ class InstanceInitializationScript: def __init__(self) -> None: self._log: Optional[logging.Logger] = None - LogConfiguration.initialize(None) + self._container = container_instance() + + # Call init_resources() to initialize the logging configuration. + self._container.init_resources() @property def log(self) -> logging.Logger: diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 6dae466615..7d0b50cd56 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -749,29 +749,6 @@ def test_process_delete(self, fixture: AdminRouteFixture): fixture.assert_supported_methods(url, "DELETE") -class TestAdminLoggingServices: - CONTROLLER_NAME = "admin_logging_services_controller" - - @pytest.fixture(scope="function") - def fixture(self, admin_route_fixture: AdminRouteFixture) -> AdminRouteFixture: - admin_route_fixture.set_controller_name(self.CONTROLLER_NAME) - return admin_route_fixture - - def test_process_services(self, fixture: AdminRouteFixture): - url = "/admin/logging_services" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_services # type: ignore - ) - fixture.assert_supported_methods(url, "GET", "POST") - - def test_process_delete(self, fixture: AdminRouteFixture): - url = "/admin/logging_service/" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_delete, "", http_method="DELETE" # type: ignore - ) - fixture.assert_supported_methods(url, "DELETE") - - class TestAdminDiscoveryServiceLibraryRegistrations: CONTROLLER_NAME = "admin_discovery_service_library_registrations_controller" diff --git a/tests/api/test_routes.py b/tests/api/test_routes.py index 7d80e92bfb..6f9f9b11fd 100644 --- a/tests/api/test_routes.py +++ b/tests/api/test_routes.py @@ -1,11 +1,6 @@ import pytest -from flask import Response -from werkzeug.exceptions import MethodNotAllowed from api import routes -from api.routes import exception_handler -from api.routes import h as error_handler_object -from core.app_server import ErrorHandler from tests.fixtures.api_routes import RouteTestFixture @@ -411,44 +406,3 @@ def test_health_check(self, route_test: RouteTestFixture): # not a mock method -- the Response returned by the mock # system would have an explanatory message in its .data. assert "" == response.get_data(as_text=True) - - -class TestExceptionHandler: - def test_exception_handling(self, route_test: RouteTestFixture): - # The exception handler deals with most exceptions by running them - # through ErrorHandler.handle() - assert isinstance(error_handler_object, ErrorHandler) - - # Temporarily replace the ErrorHandler used by the - # exception_handler function -- this is what we imported as - # error_handler_object. - class MockErrorHandler: - def handle(self, exception): - self.handled = exception - return Response("handled it", 500) - - routes.h: MockErrorHandler = MockErrorHandler() # type: ignore[misc] - - # Simulate a request that causes an unhandled exception. - with route_test.controller_fixture.app.test_request_context(): - value_error = ValueError() - result = exception_handler(value_error) - - # The exception was passed into MockErrorHandler.handle. - assert value_error == routes.h.handled - - # The Response is created was passed along. - assert "handled it" == result.get_data(as_text=True) - assert 500 == result.status_code - - # werkzeug HTTPExceptions are _not_ run through - # handle(). werkzeug handles the conversion to a Response - # object representing a more specific (and possibly even - # non-error) HTTP response. - with route_test.controller_fixture.app.test_request_context(): - exception = MethodNotAllowed() - response = exception_handler(exception) - assert 405 == response.status_code - - # Restore the normal error handler. - routes.h = error_handler_object diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index a8952f09f4..883907683f 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -798,7 +798,7 @@ def test_initialize(self, db: DatabaseTransactionFixture): def test_initialize_alembic_exception(self, caplog: LogCaptureFixture): # Test that we handle a CommandError exception being returned by Alembic. with patch("scripts.inspect") as inspect: - with patch("scripts.LogConfiguration"): + with patch("scripts.container_instance"): script = InstanceInitializationScript() caplog.set_level(logging.ERROR) diff --git a/tests/core/service/logging/__init__.py b/tests/core/service/logging/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/core/service/logging/test_configuration.py b/tests/core/service/logging/test_configuration.py new file mode 100644 index 0000000000..606e269d44 --- /dev/null +++ b/tests/core/service/logging/test_configuration.py @@ -0,0 +1,32 @@ +import pytest + +from core.config import CannotLoadConfiguration +from core.service.logging.configuration import LoggingConfiguration + + +def test_cloudwatch_region_none() -> None: + # If cloudwatch is not enabled, no validation is needed. + config = LoggingConfiguration(cloudwatch_enabled=False, cloudwatch_region=None) + assert config.cloudwatch_region is None + assert config.cloudwatch_enabled is False + + # If cloudwatch is enabled, region must be provided. + with pytest.raises(CannotLoadConfiguration) as execinfo: + LoggingConfiguration(cloudwatch_enabled=True, cloudwatch_region=None) + + assert "Region must be provided if cloudwatch is enabled." in str(execinfo.value) + + +def test_cloudwatch_region_invalid() -> None: + with pytest.raises(CannotLoadConfiguration) as execinfo: + LoggingConfiguration(cloudwatch_enabled=True, cloudwatch_region="invalid") + + assert "Invalid region: invalid. Region must be one of:" in str(execinfo.value) + + +def test_cloudwatch_region_valid() -> None: + config = LoggingConfiguration( + cloudwatch_enabled=True, cloudwatch_region="us-east-2" + ) + assert config.cloudwatch_region == "us-east-2" + assert config.cloudwatch_enabled is True diff --git a/tests/core/service/logging/test_log.py b/tests/core/service/logging/test_log.py new file mode 100644 index 0000000000..45482289a6 --- /dev/null +++ b/tests/core/service/logging/test_log.py @@ -0,0 +1,149 @@ +from __future__ import annotations + +import json +import logging +import sys +from functools import partial +from unittest.mock import MagicMock, patch + +import pytest +from freezegun import freeze_time +from watchtower import CloudWatchLogHandler + +from core.service.logging.configuration import LogLevel +from core.service.logging.log import ( + JSONFormatter, + LogLoopPreventionFilter, + create_cloudwatch_handler, + create_stream_handler, + setup_logging, +) + + +class TestJSONFormatter: + @freeze_time("1990-05-05") + def test_format(self) -> None: + formatter = JSONFormatter() + + exc_info = None + # Cause an exception so we can capture its exc_info() + try: + raise ValueError("fake exception") + except ValueError as e: + exc_info = sys.exc_info() + + record = logging.LogRecord( + "some logger", + logging.DEBUG, + "pathname", + 104, + "A message", + {}, + exc_info, + None, + ) + data = json.loads(formatter.format(record)) + assert "some logger" == data["name"] + assert "1990-05-05T00:00:00+00:00" == data["timestamp"] + assert "DEBUG" == data["level"] + assert "A message" == data["message"] + assert "pathname" == data["filename"] + assert "ValueError: fake exception" in data["traceback"] + + @pytest.mark.parametrize( + "msg, args", + [ + ("An important snowman: %s", "☃"), + ("An important snowman: %s", "☃".encode()), + (b"An important snowman: %s", "☃"), + (b"An important snowman: %s", "☃".encode()), + ], + ) + def test_format_with_different_types_of_strings( + self, msg: str | bytes, args: str | bytes + ) -> None: + # As long as all data is either Unicode or UTF-8, any combination + # of Unicode and bytestrings can be combined in log messages. + formatter = JSONFormatter() + record = logging.LogRecord( + "some logger", logging.DEBUG, "pathname", 104, msg, (args,), None, None + ) + data = json.loads(formatter.format(record)) + # The resulting data is always a Unicode string. + assert "An important snowman: ☃" == data["message"] + + +class TestLogLoopPreventionFilter: + @pytest.mark.parametrize( + "name, expected", + [ + ("requests.request", True), + ("palace.app", True), + ("palace.app.submodule", True), + ("botocore", False), + ("urllib3.connectionpool", False), + ], + ) + def test_filter(self, name: str, expected: bool) -> None: + filter = LogLoopPreventionFilter() + record = logging.LogRecord( + name, logging.DEBUG, "pathname", 104, "A message", {}, None, None + ) + assert expected == filter.filter(record) + + +def test_create_cloudwatch_handler() -> None: + mock_formatter = MagicMock() + mock_client = MagicMock() + + handler = create_cloudwatch_handler( + formatter=mock_formatter, + level=LogLevel.info, + client=mock_client, + group="test_group", + stream="test_stream", + interval=13, + create_group=True, + ) + + assert isinstance(handler, CloudWatchLogHandler) + assert handler.log_group_name == "test_group" + assert handler.log_stream_name == "test_stream" + assert handler.send_interval == 13 + assert any(isinstance(f, LogLoopPreventionFilter) for f in handler.filters) + assert handler.formatter == mock_formatter + assert handler.level == logging.INFO + + +def test_create_stream_handler() -> None: + mock_formatter = MagicMock() + + handler = create_stream_handler(formatter=mock_formatter, level=LogLevel.debug) + + assert isinstance(handler, logging.StreamHandler) + assert not any(isinstance(f, LogLoopPreventionFilter) for f in handler.filters) + assert handler.formatter == mock_formatter + assert handler.level == logging.DEBUG + + +def test_setup_logging_cloudwatch_disabled() -> None: + # If cloudwatch is disabled, no cloudwatch handler is created. + mock_cloudwatch_callable = MagicMock() + mock_stream_handler = MagicMock() + + setup = partial( + setup_logging, + level=LogLevel.info, + verbose_level=LogLevel.warning, + stream=mock_stream_handler, + cloudwatch_callable=mock_cloudwatch_callable, + ) + + # We patch logging so that we don't actually modify the global logging + # configuration. + with patch("core.service.logging.log.logging"): + setup(cloudwatch_enabled=False) + assert mock_cloudwatch_callable.call_count == 0 + + setup(cloudwatch_enabled=True) + assert mock_cloudwatch_callable.call_count == 1 diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index 7587de7a9e..fece838f9f 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -7,7 +7,6 @@ import pytest from botocore.exceptions import BotoCoreError, ClientError -from mypy_boto3_s3 import S3Client from pydantic import AnyHttpUrl from core.config import CannotLoadConfiguration @@ -16,6 +15,8 @@ from core.service.storage.s3 import S3Service if TYPE_CHECKING: + from mypy_boto3_s3 import S3Client + from tests.fixtures.s3 import S3ServiceFixture diff --git a/tests/core/test_app_server.py b/tests/core/test_app_server.py index da73365833..9c336084e6 100644 --- a/tests/core/test_app_server.py +++ b/tests/core/test_app_server.py @@ -1,7 +1,9 @@ import gzip import json +from functools import partial from io import BytesIO -from typing import Iterable +from typing import Callable, Iterable +from unittest.mock import MagicMock, PropertyMock import flask import pytest @@ -11,6 +13,7 @@ import core from api.admin.config import Configuration as AdminUiConfig +from api.util.flask import PalaceFlask from core.app_server import ( ApplicationVersionController, ErrorHandler, @@ -20,13 +23,12 @@ load_facets_from_request, load_pagination_from_request, ) -from core.config import Configuration from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint from core.feed.annotator.base import Annotator from core.lane import Facets, Pagination, SearchFacets, WorkList -from core.log import LogConfiguration -from core.model import ConfigurationSetting, Identifier +from core.model import Identifier from core.problem_details import INVALID_INPUT, INVALID_URN +from core.service.logging.configuration import LogLevel from core.util.opds_writer import OPDSFeed, OPDSMessage from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture @@ -483,7 +485,8 @@ def as_problem_detail_document(self, debug): class ErrorHandlerFixture: transaction: DatabaseTransactionFixture - app: Flask + app: PalaceFlask + handler: Callable[..., ErrorHandler] @pytest.fixture() @@ -492,38 +495,25 @@ def error_handler_fixture( ) -> ErrorHandlerFixture: session = db.session - class MockManager: - """Simulate an application manager object such as - the circulation manager's CirculationManager. - - This gives ErrorHandler access to a database connection. - """ - - _db = session + mock_manager = MagicMock() + type(mock_manager)._db = PropertyMock(return_value=session) data = ErrorHandlerFixture() data.transaction = db - data.app = Flask(ErrorHandlerFixture.__name__) - data.app.manager = MockManager() # type: ignore[attr-defined] + data.app = PalaceFlask(ErrorHandlerFixture.__name__) Babel(data.app) + data.app.manager = mock_manager + data.handler = partial(ErrorHandler, app=data.app, log_level=LogLevel.error) return data class TestErrorHandler: - def activate_debug_mode(self, session): - """Set a site-wide setting that controls whether - detailed exception information is provided. - """ - ConfigurationSetting.sitewide( - session, Configuration.DATABASE_LOG_LEVEL - ).value = LogConfiguration.DEBUG - def raise_exception(self, cls=Exception): """Simulate an exception that happens deep within the stack.""" raise cls() def test_unhandled_error(self, error_handler_fixture: ErrorHandlerFixture): - handler = ErrorHandler(error_handler_fixture.app) + handler = error_handler_fixture.handler() with error_handler_fixture.app.test_request_context("/"): response = None try: @@ -532,13 +522,12 @@ def test_unhandled_error(self, error_handler_fixture: ErrorHandlerFixture): response = handler.handle(exception) assert isinstance(response, Response) assert 500 == response.status_code - assert "An internal error occured" == response.data.decode("utf8") + assert "An internal error occurred" == response.data.decode("utf8") def test_unhandled_error_debug(self, error_handler_fixture: ErrorHandlerFixture): # Set the sitewide log level to DEBUG to get a stack trace # instead of a generic error message. - handler = ErrorHandler(error_handler_fixture.app) - self.activate_debug_mode(error_handler_fixture.transaction.session) + handler = error_handler_fixture.handler(log_level=LogLevel.debug) with error_handler_fixture.app.test_request_context("/"): response = None @@ -553,13 +542,14 @@ def test_unhandled_error_debug(self, error_handler_fixture: ErrorHandlerFixture) def test_handle_error_as_problem_detail_document( self, error_handler_fixture: ErrorHandlerFixture ): - handler = ErrorHandler(error_handler_fixture.app) + handler = error_handler_fixture.handler() with error_handler_fixture.app.test_request_context("/"): try: self.raise_exception(CanBeProblemDetailDocument) except Exception as exception: response = handler.handle(exception) + assert isinstance(response, Response) assert 400 == response.status_code data = json.loads(response.data.decode("utf8")) assert INVALID_URN.title == data["title"] @@ -573,14 +563,14 @@ def test_handle_error_as_problem_detail_document_debug( ): # When in debug mode, the debug_message is preserved and a # stack trace is appended to it. - handler = ErrorHandler(error_handler_fixture.app) - self.activate_debug_mode(error_handler_fixture.transaction.session) + handler = error_handler_fixture.handler(log_level=LogLevel.debug) with error_handler_fixture.app.test_request_context("/"): try: self.raise_exception(CanBeProblemDetailDocument) except Exception as exception: response = handler.handle(exception) + assert isinstance(response, Response) assert 400 == response.status_code data = json.loads(response.data.decode("utf8")) assert INVALID_URN.title == data["title"] diff --git a/tests/core/test_log.py b/tests/core/test_log.py deleted file mode 100644 index 7966b699a6..0000000000 --- a/tests/core/test_log.py +++ /dev/null @@ -1,240 +0,0 @@ -import json -import logging -import sys - -import pytest - -from core.config import Configuration -from core.log import ( - CannotLoadConfiguration, - CloudWatchLogHandler, - CloudwatchLogs, - JSONFormatter, - LogConfiguration, - StringFormatter, - SysLogger, -) -from core.model import ConfigurationSetting, ExternalIntegration -from tests.fixtures.database import DatabaseTransactionFixture - - -class TestJSONFormatter: - def test_format(self): - formatter = JSONFormatter("some app") - assert "some app" == formatter.app_name - - exc_info = None - # Cause an exception so we can capture its exc_info() - try: - raise ValueError("fake exception") - except ValueError as e: - exc_info = sys.exc_info() - - record = logging.LogRecord( - "some logger", - logging.DEBUG, - "pathname", - 104, - "A message", - {}, - exc_info, - None, - ) - data = json.loads(formatter.format(record)) - assert "some logger" == data["name"] - assert "some app" == data["app"] - assert "DEBUG" == data["level"] - assert "A message" == data["message"] - assert "pathname" == data["filename"] - assert "ValueError: fake exception" in data["traceback"] - - def test_format_with_different_types_of_strings(self): - # As long as all data is either Unicode or UTF-8, any combination - # of Unicode and bytestrings can be combined in log messages. - - unicode_message = "An important snowman: %s" - byte_message = unicode_message.encode("utf8") - - unicode_snowman = "☃" - utf8_snowman = unicode_snowman.encode("utf8") - - # Test every combination of Unicode and bytestring message and - # argument. - formatter = JSONFormatter("some app") - for msg, args in ( - (unicode_message, utf8_snowman), - (unicode_message, unicode_snowman), - (byte_message, utf8_snowman), - (byte_message, unicode_snowman), - ): - record = logging.LogRecord( - "some logger", logging.DEBUG, "pathname", 104, msg, (args,), None, None - ) - data = json.loads(formatter.format(record)) - # The resulting data is always a Unicode string. - assert "An important snowman: ☃" == data["message"] - - -class TestLogConfiguration: - def cloudwatch_integration(self, database_transaction: DatabaseTransactionFixture): - """Create an ExternalIntegration for a Cloudwatch account.""" - integration = database_transaction.external_integration( - protocol=ExternalIntegration.CLOUDWATCH, - goal=ExternalIntegration.LOGGING_GOAL, - ) - - integration.set_setting(CloudwatchLogs.CREATE_GROUP, "FALSE") - return integration - - def test_from_configuration(self, db: DatabaseTransactionFixture): - cls = LogConfiguration - config = Configuration - m = cls.from_configuration - # When logging is configured on initial startup, with no - # database connection, these are the defaults. - internal_log_level, database_log_level, [handler], errors = m( - None, testing=False - ) - assert cls.INFO == internal_log_level - assert cls.WARN == database_log_level - assert [] == errors - assert isinstance(handler.formatter, JSONFormatter) - - # The same defaults hold when there is a database connection - # but nothing is actually configured. - internal_log_level, database_log_level, [handler], errors = m( - db.session, testing=False - ) - assert cls.INFO == internal_log_level - assert cls.WARN == database_log_level - assert [] == errors - assert isinstance(handler.formatter, JSONFormatter) - - # Let's set up a integrations and change the defaults. - self.cloudwatch_integration(db) - internal = db.external_integration( - protocol=ExternalIntegration.INTERNAL_LOGGING, - goal=ExternalIntegration.LOGGING_GOAL, - ) - ConfigurationSetting.sitewide(db.session, config.LOG_LEVEL).value = config.ERROR - internal.setting(SysLogger.LOG_FORMAT).value = SysLogger.TEXT_LOG_FORMAT - ConfigurationSetting.sitewide( - db.session, config.DATABASE_LOG_LEVEL - ).value = config.DEBUG - ConfigurationSetting.sitewide( - db.session, config.LOG_APP_NAME - ).value = "test app" - template = "%(filename)s:%(message)s" - internal.setting(SysLogger.LOG_MESSAGE_TEMPLATE).value = template - internal_log_level, database_log_level, handlers, errors = m( - db.session, testing=False - ) - assert cls.ERROR == internal_log_level - assert cls.DEBUG == database_log_level - assert len(errors) == 0 - - [cloudwatch_handler] = [ - x for x in handlers if isinstance(x, CloudWatchLogHandler) - ] - assert "simplified" == cloudwatch_handler.log_stream_name - assert "simplified" == cloudwatch_handler.log_group_name - assert 60 == cloudwatch_handler.send_interval - - [stream_handler] = [x for x in handlers if isinstance(x, logging.StreamHandler)] - assert isinstance(stream_handler.formatter, StringFormatter) - assert template == stream_handler.formatter._fmt - - # If testing=True, then the database configuration is ignored, - # and the log setup is one that's appropriate for display - # alongside unit test output. - internal_log_level, database_log_level, [handler], errors = m( - db.session, testing=True - ) - assert cls.INFO == internal_log_level - assert cls.WARN == database_log_level - assert SysLogger.DEFAULT_MESSAGE_TEMPLATE == handler.formatter._fmt - - def test_syslog_defaults(self): - cls = SysLogger - - # Normally log messages are emitted in JSON format. - assert ( - SysLogger.JSON_LOG_FORMAT, - SysLogger.DEFAULT_MESSAGE_TEMPLATE, - ) == cls._defaults(testing=False) - - # When we're running unit tests, log messages are emitted in text format. - assert ( - SysLogger.TEXT_LOG_FORMAT, - SysLogger.DEFAULT_MESSAGE_TEMPLATE, - ) == cls._defaults(testing=True) - - def test_set_formatter(self): - # Create a generic handler. - handler = logging.StreamHandler() - - # Configure it for text output. - template = "%(filename)s:%(message)s" - SysLogger.set_formatter( - handler, - log_format=SysLogger.TEXT_LOG_FORMAT, - message_template=template, - app_name="some app", - ) - formatter = handler.formatter - assert isinstance(formatter, StringFormatter) - assert template == formatter._fmt - - # Configure a similar handler for JSON output. - handler = logging.StreamHandler() - SysLogger.set_formatter( - handler, log_format=SysLogger.JSON_LOG_FORMAT, message_template=template - ) - formatter = handler.formatter - assert isinstance(formatter, JSONFormatter) - assert LogConfiguration.DEFAULT_APP_NAME == formatter.app_name - - # In this case the template is irrelevant. The JSONFormatter - # uses the default format template, but it doesn't matter, - # because JSONFormatter overrides the format() method. - assert "%(message)s" == formatter._fmt - - def test_cloudwatch_handler(self, db: DatabaseTransactionFixture): - """Turn an appropriate ExternalIntegration into a CloudWatchLogHandler.""" - - integration = self.cloudwatch_integration(db) - integration.set_setting(CloudwatchLogs.GROUP, "test_group") - integration.set_setting(CloudwatchLogs.STREAM, "test_stream") - integration.set_setting(CloudwatchLogs.INTERVAL, 120) - integration.set_setting(CloudwatchLogs.REGION, "us-east-2") - handler = CloudwatchLogs.get_handler(integration, testing=True) - assert isinstance(handler, CloudWatchLogHandler) - assert "test_stream" == handler.log_stream_name - assert "test_group" == handler.log_group_name - assert 120 == handler.send_interval - - integration.setting(CloudwatchLogs.INTERVAL).value = -10 - pytest.raises( - CannotLoadConfiguration, CloudwatchLogs.get_handler, integration, True - ) - integration.setting(CloudwatchLogs.INTERVAL).value = "a string" - pytest.raises( - CannotLoadConfiguration, CloudwatchLogs.get_handler, integration, True - ) - - def test_cloudwatch_initialization_exception(self, db: DatabaseTransactionFixture): - # Make sure if an exception is thrown during initalization its caught. - - integration = self.cloudwatch_integration(db) - integration.set_setting(CloudwatchLogs.CREATE_GROUP, "TRUE") - ( - internal_log_level, - database_log_level, - [handler], - [error], - ) = LogConfiguration.from_configuration(db.session, testing=False) - assert isinstance(handler, logging.StreamHandler) - assert ( - "Error creating logger AWS Cloudwatch Logs Unable to locate credentials" - == error - ) diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index ec1362ac24..01e26fa8ac 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -25,7 +25,6 @@ from core.config import Configuration from core.configuration.library import LibrarySettings from core.integration.goals import Goals -from core.log import LogConfiguration from core.model import ( Classification, Collection, @@ -80,9 +79,6 @@ def create(cls): # This will make sure we always connect to the test database. os.environ["TESTING"] = "true" - # Ensure that the log configuration starts in a known state. - LogConfiguration.initialize(None, testing=True) - # Drop any existing schema. It will be recreated when the database is initialized. _cls = cls() _cls.drop_existing_schema() diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index 7f802b0382..2a2f194cd5 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -3,11 +3,19 @@ import functools import sys from types import TracebackType -from typing import BinaryIO, List, Literal, NamedTuple, Optional, Protocol, Type +from typing import ( + TYPE_CHECKING, + BinaryIO, + List, + Literal, + NamedTuple, + Optional, + Protocol, + Type, +) from unittest.mock import MagicMock import pytest -from mypy_boto3_s3 import S3Client from core.service.storage.s3 import MultipartS3ContextManager, S3Service @@ -17,6 +25,10 @@ from typing_extensions import Self +if TYPE_CHECKING: + from mypy_boto3_s3.client import S3Client + + class MockS3ServiceUpload(NamedTuple): key: str content: bytes From 1e4af2b911cc37b682f4339d67f82824af7f7bec Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 3 Oct 2023 06:15:13 -0300 Subject: [PATCH 076/262] Bump pre-commit linters to latest versions (#1424) --- .pre-commit-config.yaml | 12 ++++++------ api/admin/announcement_list_validator.py | 1 - api/admin/config.py | 1 - api/admin/controller/individual_admin_settings.py | 4 ++-- api/admin/controller/sitewide_settings.py | 1 - api/admin/controller/work_editor.py | 1 - api/admin/opds.py | 1 - api/admin/password_admin_authentication_provider.py | 1 - api/annotations.py | 1 - api/axis.py | 1 - api/base_controller.py | 1 - api/bibliotheca.py | 1 - api/config.py | 1 - api/custom_index.py | 1 - api/custom_patron_catalog.py | 1 - api/google_analytics_provider.py | 2 -- api/lanes.py | 3 --- api/local_analytics_exporter.py | 1 - api/novelist.py | 1 - api/nyt.py | 2 -- api/saml/wayfless.py | 1 - api/sip/client.py | 1 - api/web_publication_manifest.py | 1 - core/classifier/__init__.py | 2 -- core/classifier/age.py | 1 - core/classifier/ddc.py | 1 - core/classifier/gutenberg.py | 1 - core/classifier/lcc.py | 1 - core/classifier/overdrive.py | 1 - core/classifier/simplified.py | 1 - core/config.py | 2 -- core/coverage.py | 2 +- core/equivalents_coverage.py | 1 - core/facets.py | 2 -- core/metadata_layer.py | 3 +-- core/model/__init__.py | 1 - core/model/admin.py | 4 ++-- core/model/cachedfeed.py | 1 - core/model/configuration.py | 3 --- core/model/customlist.py | 1 - core/model/datasource.py | 1 - core/model/identifier.py | 1 - core/model/licensing.py | 1 - core/model/patron.py | 1 - core/model/resource.py | 1 - core/model/work.py | 1 - core/opds2.py | 1 + core/opensearch.py | 1 - core/python_expression_dsl/util.py | 1 - core/scripts.py | 12 ++++-------- core/search/service.py | 3 ++- core/service/container.py | 1 - core/util/__init__.py | 3 --- core/util/cache.py | 3 ++- core/util/flask_util.py | 1 - core/util/opds_writer.py | 2 -- scripts.py | 4 ---- tests/api/discovery/test_opds_registration.py | 1 - tests/api/mockapi/overdrive.py | 1 - tests/api/sip/test_authentication_provider.py | 1 - tests/api/test_adobe_vendor_id.py | 1 - tests/api/test_authenticator.py | 1 - tests/api/test_axis.py | 11 ++++++++--- tests/api/test_bibliotheca.py | 3 --- tests/api/test_circulationapi.py | 2 -- tests/api/test_controller_base.py | 2 -- tests/api/test_controller_scopedsession.py | 3 ++- tests/api/test_firstbook2.py | 1 - tests/api/test_millenium_patron.py | 1 - tests/api/test_novelist.py | 1 - tests/api/test_nyt.py | 1 - tests/api/test_odilo.py | 1 - tests/api/test_odl.py | 2 -- tests/api/test_opds_for_distributors.py | 3 +-- tests/api/test_routes.py | 9 --------- tests/api/test_selftest.py | 1 - tests/core/classifiers/test_classifier.py | 2 -- tests/core/classifiers/test_ddc.py | 1 - tests/core/classifiers/test_lcc.py | 1 - tests/core/models/test_circulationevent.py | 1 - tests/core/models/test_contributor.py | 1 - tests/core/models/test_edition.py | 3 --- tests/core/models/test_hassessioncache.py | 1 - tests/core/models/test_patron.py | 1 - tests/core/models/test_resource.py | 3 --- tests/core/service/storage/test_s3.py | 6 +++--- tests/core/test_coverage.py | 1 + tests/core/test_lane.py | 4 ---- tests/core/test_metadata.py | 5 ----- tests/core/test_monitor.py | 1 - tests/core/test_opds_import.py | 1 + tests/core/test_scripts.py | 1 - tests/core/test_selftest.py | 3 ++- tests/core/test_summary_evaluator.py | 1 - tests/core/util/test_opds_writer.py | 1 - tests/core/util/test_util.py | 2 -- tests/fixtures/database.py | 6 +++--- tests/migration/test_20230531_0af587ff8595.py | 1 - 98 files changed, 42 insertions(+), 158 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4988613c65..5b0c891912 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: - id: mixed-line-ending - repo: https://github.com/asottile/pyupgrade - rev: v3.3.2 + rev: v3.14.0 hooks: - id: pyupgrade args: @@ -25,7 +25,7 @@ repos: - --keep-runtime-typing - repo: https://github.com/myint/autoflake - rev: v2.1.1 + rev: v2.2.1 hooks: - id: autoflake args: @@ -34,7 +34,7 @@ repos: - --ignore-init-module-imports - repo: https://github.com/psf/black - rev: 22.10.0 + rev: 23.9.1 hooks: - id: black name: Run black @@ -46,20 +46,20 @@ repos: name: Run isort - repo: https://github.com/sirosen/check-jsonschema - rev: 0.22.0 + rev: 0.27.0 hooks: - id: check-github-workflows - id: check-github-actions - repo: https://github.com/pappasam/toml-sort - rev: v0.23.0 + rev: v0.23.1 hooks: - id: toml-sort args: [] files: pyproject.toml - repo: https://github.com/jackdewinter/pymarkdown - rev: v0.9.9 + rev: v0.9.13.4 hooks: - id: pymarkdown args: diff --git a/api/admin/announcement_list_validator.py b/api/admin/announcement_list_validator.py index 44dacd6a86..d7bcceaf64 100644 --- a/api/admin/announcement_list_validator.py +++ b/api/admin/announcement_list_validator.py @@ -12,7 +12,6 @@ class AnnouncementListValidator: - DATE_FORMAT = "%Y-%m-%d" def __init__( diff --git a/api/admin/config.py b/api/admin/config.py index 4f109c0dee..789d8c3ad5 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -15,7 +15,6 @@ class OperationalMode(str, Enum): class Configuration: - APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" PACKAGE_VERSION = "1.10.0" diff --git a/api/admin/controller/individual_admin_settings.py b/api/admin/controller/individual_admin_settings.py index 1693885666..7a3b67ccf5 100644 --- a/api/admin/controller/individual_admin_settings.py +++ b/api/admin/controller/individual_admin_settings.py @@ -74,7 +74,6 @@ def append_role(roles, role): roles = [] show_admin = True for role in admin.roles: - # System admin sees all if highest_role.role == AdminRole.SYSTEM_ADMIN: append_role(roles, role) @@ -318,7 +317,8 @@ def look_up_library_for_role(self, role): def handle_roles(self, admin, roles, settingUp): """Compare the admin's existing set of roles against the roles submitted in the form, and, - unless there's a problem with the roles or the permissions, modify the admin's roles accordingly""" + unless there's a problem with the roles or the permissions, modify the admin's roles accordingly + """ # User = person submitting the form; admin = person who the form is about diff --git a/api/admin/controller/sitewide_settings.py b/api/admin/controller/sitewide_settings.py index 17069d1e5f..156ae38b37 100644 --- a/api/admin/controller/sitewide_settings.py +++ b/api/admin/controller/sitewide_settings.py @@ -52,7 +52,6 @@ def process_delete(self, key): return Response(str(_("Deleted")), 200) def validate_form_fields(self, setting, fields): - MISSING_FIELD_MESSAGES = dict( key=MISSING_SITEWIDE_SETTING_KEY, value=MISSING_SITEWIDE_SETTING_VALUE ) diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index fc61840dd5..3b950f6167 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -34,7 +34,6 @@ class WorkController(CirculationManagerController, AdminPermissionsControllerMixin): - STAFF_WEIGHT = 1000 def details(self, identifier_type, identifier): diff --git a/api/admin/opds.py b/api/admin/opds.py index 2a4df51c95..b7d921cc56 100644 --- a/api/admin/opds.py +++ b/api/admin/opds.py @@ -14,7 +14,6 @@ def __init__(self, circulation, library, test_mode=False): def annotate_work_entry( self, work, active_license_pool, edition, identifier, feed, entry ): - super().annotate_work_entry( work, active_license_pool, edition, identifier, feed, entry ) diff --git a/api/admin/password_admin_authentication_provider.py b/api/admin/password_admin_authentication_provider.py index cb3770f496..bcc93ed9dd 100644 --- a/api/admin/password_admin_authentication_provider.py +++ b/api/admin/password_admin_authentication_provider.py @@ -22,7 +22,6 @@ class PasswordAdminAuthenticationProvider(AdminAuthenticationProvider): - NAME = "Password Auth" SIGN_IN_TEMPLATE = sign_in_template.format( diff --git a/api/annotations.py b/api/annotations.py index d2de031610..21a3a37a7e 100644 --- a/api/annotations.py +++ b/api/annotations.py @@ -37,7 +37,6 @@ def load_document(url, *args, **kargs): class AnnotationWriter: - CONTENT_TYPE = 'application/ld+json; profile="http://www.w3.org/ns/anno.jsonld"' JSONLD_CONTEXT = "http://www.w3.org/ns/anno.jsonld" diff --git a/api/axis.py b/api/axis.py index 1b0ad69a78..ae606674e9 100644 --- a/api/axis.py +++ b/api/axis.py @@ -138,7 +138,6 @@ class Axis360API( Axis360APIConstants, HasLibraryIntegrationConfiguration, ): - NAME = ExternalIntegration.AXIS_360 SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP diff --git a/api/base_controller.py b/api/base_controller.py index 55a9830440..24b2e6aad0 100644 --- a/api/base_controller.py +++ b/api/base_controller.py @@ -79,7 +79,6 @@ def authenticated_patron_from_request(self): return patron def authenticated_patron(self, authorization_header: Authorization): - """Look up the patron authenticated by the given authorization header. The header could contain a barcode and pin or a token for an diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 2c8de42eb2..1c582a38f0 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -98,7 +98,6 @@ class BibliothecaSettings(BaseSettings): class BibliothecaLibrarySettings(BaseCirculationLoanSettings): - dont_display_reserves: Optional[str] = FormField( form=ConfigurationFormItem( label=_("Show/Hide Titles with No Available Loans"), diff --git a/api/config.py b/api/config.py index b77a568329..bb8a0609da 100644 --- a/api/config.py +++ b/api/config.py @@ -16,7 +16,6 @@ class Configuration(CoreConfiguration): - DEFAULT_OPDS_FORMAT = "simple_opds_entry" # The list of patron web urls allowed to access this CM diff --git a/api/custom_index.py b/api/custom_index.py index a05941c011..1f7c498d92 100644 --- a/api/custom_index.py +++ b/api/custom_index.py @@ -82,7 +82,6 @@ def __call__(self, library, annotator): class COPPAGate(CustomIndexView): - PROTOCOL = "COPPA Age Gate" URI = "http://librarysimplified.org/terms/restrictions/coppa" diff --git a/api/custom_patron_catalog.py b/api/custom_patron_catalog.py index a932e97e52..27e2174a1e 100644 --- a/api/custom_patron_catalog.py +++ b/api/custom_patron_catalog.py @@ -153,7 +153,6 @@ def annotate_authentication_document(self, library, doc, url_for): class COPPAGate(CustomPatronCatalog): - PROTOCOL = "COPPA Age Gate" AUTHENTICATION_TYPE = "http://librarysimplified.org/terms/authentication/gate/coppa" diff --git a/api/google_analytics_provider.py b/api/google_analytics_provider.py index 09c82a72c8..56e33e4f94 100644 --- a/api/google_analytics_provider.py +++ b/api/google_analytics_provider.py @@ -13,7 +13,6 @@ class GoogleAnalyticsProvider: - NAME = _("Google Analytics") DESCRIPTION = _("How to Configure a Google Analytics Integration") INSTRUCTIONS = _( @@ -86,7 +85,6 @@ def __init__(self, integration, services: Services, library=None): ) def collect_event(self, library, license_pool, event_type, time, **kwargs): - # Explicitly destroy any neighborhood information -- we don't # want to send this to third-party sources. kwargs.pop("neighborhood", None) diff --git a/api/lanes.py b/api/lanes.py index 400f7eee02..c2a8310526 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -1377,13 +1377,11 @@ def default_facet(cls, config, facet_group_name): class CrawlableLane(DynamicLane): - # By default, crawlable feeds are cached for 12 hours. MAX_CACHE_AGE = 12 * 60 * 60 class CrawlableCollectionBasedLane(CrawlableLane): - # Since these collections may be shared collections, for which # recent information is very important, these feeds are only # cached for 2 hours. @@ -1393,7 +1391,6 @@ class CrawlableCollectionBasedLane(CrawlableLane): COLLECTION_ROUTE = "crawlable_collection_feed" def initialize(self, library_or_collections): - self.collection_feed = False if isinstance(library_or_collections, Library): diff --git a/api/local_analytics_exporter.py b/api/local_analytics_exporter.py index 51c697c622..d5a608d038 100644 --- a/api/local_analytics_exporter.py +++ b/api/local_analytics_exporter.py @@ -22,7 +22,6 @@ class LocalAnalyticsExporter: """Export large numbers of analytics events in CSV format.""" def export(self, _db, start, end, locations=None, library=None): - # Get the results from the database. query = self.analytics_query(start, end, locations, library) results = _db.execute(query) diff --git a/api/novelist.py b/api/novelist.py index b72658a2ad..c910fa45c5 100644 --- a/api/novelist.py +++ b/api/novelist.py @@ -38,7 +38,6 @@ class NoveListAPI: - PROTOCOL = ExternalIntegration.NOVELIST NAME = _("Novelist API") diff --git a/api/nyt.py b/api/nyt.py index 4bd497d324..fd6c41076d 100644 --- a/api/nyt.py +++ b/api/nyt.py @@ -24,7 +24,6 @@ class NYTAPI: - DATE_FORMAT = "%Y-%m-%d" # NYT best-seller lists are associated with dates, but fields like @@ -60,7 +59,6 @@ def date_string(cls, d): class NYTBestSellerAPI(NYTAPI, HasSelfTests): - PROTOCOL = ExternalIntegration.NYT GOAL = ExternalIntegration.METADATA_GOAL NAME = _("NYT Best Seller API") diff --git a/api/saml/wayfless.py b/api/saml/wayfless.py index 726eb5cd84..24967679cd 100644 --- a/api/saml/wayfless.py +++ b/api/saml/wayfless.py @@ -76,7 +76,6 @@ def external_integration( def fulfill( self, patron, pin, licensepool, delivery_mechanism, fulfillment: FulfillmentInfo ) -> FulfillmentInfo: - self._logger.debug( f"WAYFless acquisition link template: {self._wayfless_url_template}" ) diff --git a/api/sip/client.py b/api/sip/client.py index cb1aa163c7..7bfd53f9f3 100644 --- a/api/sip/client.py +++ b/api/sip/client.py @@ -223,7 +223,6 @@ class Constants: class SIPClient(Constants): - log = client_logger # Maximum retries of a SIP message before failing. diff --git a/api/web_publication_manifest.py b/api/web_publication_manifest.py index 86c6b3e973..ca86751ea2 100644 --- a/api/web_publication_manifest.py +++ b/api/web_publication_manifest.py @@ -36,7 +36,6 @@ def sort_key(self, o): class FindawayManifest(AudiobookManifest): - # This URI prefix makes it clear when we are using a term coined # by Findaway in a JSON-LD document. FINDAWAY_EXTENSION_CONTEXT = ( diff --git a/core/classifier/__init__.py b/core/classifier/__init__.py index 3d9e7a083f..549157b2fc 100644 --- a/core/classifier/__init__.py +++ b/core/classifier/__init__.py @@ -383,7 +383,6 @@ def audience(cls, identifier, name, require_explicit_age_marker=False): @classmethod def target_age(cls, identifier, name, require_explicit_grade_marker=False): - if (identifier and "education" in identifier) or (name and "education" in name): # This is a book about teaching, e.g. fifth grade. return cls.range_tuple(None, None) @@ -498,7 +497,6 @@ class AgeClassifier(Classifier): @classmethod def audience(cls, identifier, name, require_explicit_age_marker=False): - target_age = cls.target_age(identifier, name, require_explicit_age_marker) return cls.default_audience_for_target_age(target_age) diff --git a/core/classifier/age.py b/core/classifier/age.py index c0cd514b92..30ad21567d 100644 --- a/core/classifier/age.py +++ b/core/classifier/age.py @@ -75,7 +75,6 @@ def audience(cls, identifier, name, require_explicit_age_marker=False): @classmethod def target_age(cls, identifier, name, require_explicit_grade_marker=False): - if (identifier and "education" in identifier) or (name and "education" in name): # This is a book about teaching, e.g. fifth grade. return cls.range_tuple(None, None) diff --git a/core/classifier/ddc.py b/core/classifier/ddc.py index 43af16c15e..934175ae5d 100644 --- a/core/classifier/ddc.py +++ b/core/classifier/ddc.py @@ -8,7 +8,6 @@ class DeweyDecimalClassifier(Classifier): - NAMES = json.load(open(os.path.join(resource_dir, "dewey_1000.json"))) # Add some other values commonly found in MARC records. diff --git a/core/classifier/gutenberg.py b/core/classifier/gutenberg.py index 8c2f6c7a63..2cf9028627 100644 --- a/core/classifier/gutenberg.py +++ b/core/classifier/gutenberg.py @@ -2,7 +2,6 @@ class GutenbergBookshelfClassifier(Classifier): - # Any classification that includes the string "Fiction" will be # counted as fiction. This is just the leftovers. FICTION = { diff --git a/core/classifier/lcc.py b/core/classifier/lcc.py index 2a3fec5d3c..9a69f262de 100644 --- a/core/classifier/lcc.py +++ b/core/classifier/lcc.py @@ -2,7 +2,6 @@ class LCCClassifier(Classifier): - TOP_LEVEL = re.compile("^([A-Z]{1,2})") FICTION = {"PN", "PQ", "PR", "PS", "PT", "PZ"} JUVENILE = {"PZ"} diff --git a/core/classifier/overdrive.py b/core/classifier/overdrive.py index 01098d1108..5bd4ba9108 100644 --- a/core/classifier/overdrive.py +++ b/core/classifier/overdrive.py @@ -2,7 +2,6 @@ class OverdriveClassifier(Classifier): - # These genres are only used to describe video titles. VIDEO_GENRES = [ "Action", diff --git a/core/classifier/simplified.py b/core/classifier/simplified.py index d5d8addc99..48996dda42 100644 --- a/core/classifier/simplified.py +++ b/core/classifier/simplified.py @@ -4,7 +4,6 @@ class SimplifiedGenreClassifier(Classifier): - NONE = NO_VALUE @classmethod diff --git a/core/config.py b/core/config.py index 9744a2ece1..e449da24f4 100644 --- a/core/config.py +++ b/core/config.py @@ -27,13 +27,11 @@ class CannotLoadConfiguration(IntegrationException): class ConfigurationConstants: - TRUE = "true" FALSE = "false" class Configuration(ConfigurationConstants): - log = logging.getLogger("Configuration file loader") # Environment variables that contain URLs to the database diff --git a/core/coverage.py b/core/coverage.py index b2a86881c4..0dd0d29adb 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1277,7 +1277,7 @@ def work(self, identifier, license_pool=None, **calculate_work_kwargs): if license_pool: if not license_pool.work or not license_pool.work.presentation_ready: - for (v, default) in (("exclude_search", self.EXCLUDE_SEARCH_INDEX),): + for v, default in (("exclude_search", self.EXCLUDE_SEARCH_INDEX),): if not v in calculate_work_kwargs: calculate_work_kwargs[v] = default diff --git a/core/equivalents_coverage.py b/core/equivalents_coverage.py index 6a28e184f1..0f290a1fb4 100644 --- a/core/equivalents_coverage.py +++ b/core/equivalents_coverage.py @@ -103,7 +103,6 @@ def process_batch( recursive_equivs = [] for link_id, parent_id in chained_identifiers: - # First time around we MUST delete any chains formed from this identifier before if parent_id not in completed_identifiers: delete_stmt = delete(RecursiveEquivalencyCache).where( diff --git a/core/facets.py b/core/facets.py index 30bf841178..3dd7600879 100644 --- a/core/facets.py +++ b/core/facets.py @@ -2,7 +2,6 @@ class FacetConstants: - # A special constant, basically an additional rel, indicating that # an OPDS facet group represents different entry points into a # WorkList. @@ -154,7 +153,6 @@ class FacetConfig(FacetConstants): @classmethod def from_library(cls, library): - enabled_facets = dict() for group in list(FacetConstants.DEFAULT_ENABLED_FACETS.keys()): enabled_facets[group] = library.enabled_facets(group) diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 4fe5e06edf..52b1892d88 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -556,7 +556,6 @@ def add_to_pool(self, db: Session, pool: LicensePool): class TimestampData: - CLEAR_VALUE = Timestamp.CLEAR_VALUE def __init__( @@ -1998,7 +1997,7 @@ def row_to_metadata(self, row): primary_identifier = identifier subjects = [] - for (field_name, (subject_type, weight)) in list(self.subject_fields.items()): + for field_name, (subject_type, weight) in list(self.subject_fields.items()): values = self.list_field(row, field_name) for value in values: subjects.append( diff --git a/core/model/__init__.py b/core/model/__init__.py index fe795d857b..99c1edf25f 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -344,7 +344,6 @@ def json_serializer(*args, **kwargs) -> str: class SessionManager: - # A function that calculates recursively equivalent identifiers # is also defined in SQL. RECURSIVE_EQUIVALENTS_FUNCTION = "recursive_equivalents.sql" diff --git a/core/model/admin.py b/core/model/admin.py index 0f5fbbec40..76f3168c34 100644 --- a/core/model/admin.py +++ b/core/model/admin.py @@ -32,7 +32,6 @@ class Admin(Base, HasSessionCache): - __tablename__ = "admins" id = Column(Integer, primary_key=True) @@ -158,6 +157,7 @@ def is_library_manager(self, library): # First check if the admin is a manager of _all_ libraries. if self.is_sitewide_library_manager(): return True + # If not, they could still be a manager of _this_ library. def lookup_hook(): return ( @@ -186,6 +186,7 @@ def is_librarian(self, library): # Check if the admin is a librarian for _all_ libraries. if self.is_sitewide_librarian(): return True + # If not, they might be a librarian of _this_ library. def lookup_hook(): return ( @@ -272,7 +273,6 @@ def __repr__(self): class AdminRole(Base, HasSessionCache): - __tablename__ = "adminroles" id = Column(Integer, primary_key=True) diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index 4de95da13a..c8c6d66458 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -35,7 +35,6 @@ class CachedFeed(Base): - __tablename__ = "cachedfeeds" id = Column(Integer, primary_key=True) diff --git a/core/model/configuration.py b/core/model/configuration.py index 8ba3f249e9..4c67ff82a9 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -28,7 +28,6 @@ class ExternalIntegrationLink(Base): - __tablename__ = "externalintegrationslinks" NO_MIRROR_INTEGRATION = "NO_MIRROR" @@ -298,7 +297,6 @@ def for_collection_and_purpose(cls, _db, collection, purpose): @classmethod def lookup(cls, _db, protocol, goal, library=None): - integrations = _db.query(cls).filter(cls.protocol == protocol, cls.goal == goal) if library: @@ -647,7 +645,6 @@ def create(): @hybrid_property def value(self): - """What's the current value of this configuration setting? If not present, the value may be inherited from some other ConfigurationSetting. diff --git a/core/model/customlist.py b/core/model/customlist.py index f716462c1e..6a74d5d48c 100644 --- a/core/model/customlist.py +++ b/core/model/customlist.py @@ -359,7 +359,6 @@ def update_size(self, db: Session): class CustomListEntry(Base): - __tablename__ = "customlistentries" id = Column(Integer, primary_key=True) list_id = Column(Integer, ForeignKey("customlists.id"), index=True) diff --git a/core/model/datasource.py b/core/model/datasource.py index cc75bd275d..4baf9b2825 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -291,7 +291,6 @@ def well_known_sources(cls, _db): (cls.ENKI, True, False, IdentifierConstants.ENKI_ID, None), (cls.PROQUEST, True, False, IdentifierConstants.PROQUEST_ID, None), ): - obj = DataSource.lookup( _db, name, diff --git a/core/model/identifier.py b/core/model/identifier.py index 295053181b..0aac66778c 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -431,7 +431,6 @@ class UnresolvableIdentifierException(Exception): @classmethod def type_and_identifier_for_urn(cls, identifier_string: str) -> tuple[str, str]: - for parser in Identifier.PARSERS: result = parser.parse(identifier_string) if result: diff --git a/core/model/licensing.py b/core/model/licensing.py index 3511d3b8bd..6fc0b716eb 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -1773,7 +1773,6 @@ def cache_key(self): return (self.content_type, self.drm_scheme) def __repr__(self): - if self.default_client_can_fulfill: fulfillable = "fulfillable" else: diff --git a/core/model/patron.py b/core/model/patron.py index f5e8ccf8dd..09cdf9e207 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -60,7 +60,6 @@ def library(self): class Patron(Base): - __tablename__ = "patrons" id = Column(Integer, primary_key=True) diff --git a/core/model/resource.py b/core/model/resource.py index 475bb2eabd..27be842477 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -281,7 +281,6 @@ def image_type_priority(cls, media_type): @classmethod def best_covers_among(cls, resources): - """Choose the best covers from a list of Resources.""" champions = [] champion_key = None diff --git a/core/model/work.py b/core/model/work.py index 063f7cd446..ad55cc256f 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -476,7 +476,6 @@ def open_access_for_permanent_work_id(cls, _db, pwid, medium, language): ) for needs_merge in list(licensepools_for_work.keys()): if needs_merge != work: - # Make sure that Work we're about to merge has # nothing but LicensePools whose permanent # work ID matches the permanent work ID of the diff --git a/core/opds2.py b/core/opds2.py index 913255e2d9..2cc0bceda7 100644 --- a/core/opds2.py +++ b/core/opds2.py @@ -232,6 +232,7 @@ def publications( max_age: Optional[int] = None, ): """The publication feed, cached""" + # do some caching magic # then do the publication def refresh(): diff --git a/core/opensearch.py b/core/opensearch.py index b2bde8500b..638938e775 100644 --- a/core/opensearch.py +++ b/core/opensearch.py @@ -11,7 +11,6 @@ class OpenSearchDocument: @classmethod def search_info(cls, lane): - d = dict(name="Search") tags = [] diff --git a/core/python_expression_dsl/util.py b/core/python_expression_dsl/util.py index 18f5734c7d..4f3f4ba167 100644 --- a/core/python_expression_dsl/util.py +++ b/core/python_expression_dsl/util.py @@ -57,7 +57,6 @@ def _parse_number(tokens: ParseResults) -> Number: def _parse_unary_expression( expression_type: Type[UE], tokens: ParseResults ) -> Optional[UE]: - """Transform the token into an unary expression. :param tokens: ParseResults objects diff --git a/core/scripts.py b/core/scripts.py index a48108644f..be1970570b 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -362,8 +362,8 @@ def run(self, pool=None): for collection in collections: provider = self.provider_class(collection, **self.provider_kwargs) - with ( - pool or DatabasePool(self.worker_size, self.session_factory) + with pool or DatabasePool( + self.worker_size, self.session_factory ) as job_queue: query_size, batch_size = self.get_query_and_batch_sizes(provider) # Without a commit, the query to count which items need @@ -798,7 +798,6 @@ def parse_command_line(cls, _db, cmd_args=None, stdin=sys.stdin, *args, **kwargs def __init__( self, provider, _db=None, cmd_args=None, *provider_args, **provider_kwargs ): - super().__init__(_db) parsed_args = self.parse_command_line(self._db, cmd_args) if parsed_args.identifier_type: @@ -1581,7 +1580,6 @@ def do_run(self): class WorkProcessingScript(IdentifierInputScript): - name = "Work processing script" def __init__( @@ -1812,7 +1810,6 @@ def paginate_query(self, query) -> Generator: the ordering of the rows follows all the joined tables""" for subject in self._unchecked_subjects(): - last_work: Optional[Work] = None # Last work object of the previous page # IDs of the last work, for paging work_id, license_id, iden_id, classn_id = ( @@ -2114,7 +2111,6 @@ def make_query(self, _db, identifier_type, identifiers, log=None): return query.order_by(Edition.id) def do_run(self, batch_size=10): - self.query = self.make_query( self._db, self.parsed_args.identifier_type, @@ -2846,7 +2842,8 @@ def process_custom_list(self, custom_list: CustomList): def _update_list_with_new_entries(self, custom_list: CustomList): """Run a search on a custom list, assuming we have auto_update_enabled with a valid query - Only json type queries are supported right now, without any support for additional facets""" + Only json type queries are supported right now, without any support for additional facets + """ start_page = 1 json_query = None @@ -2902,7 +2899,6 @@ class DeleteInvisibleLanesScript(LibraryInputScript): """Delete lanes that are flagged as invisible""" def process_library(self, library): - try: for lane in self._db.query(Lane).filter(Lane.library_id == library.id): if not lane.visible: diff --git a/core/search/service.py b/core/search/service.py index 8efb159a51..3a7122d8ec 100644 --- a/core/search/service.py +++ b/core/search/service.py @@ -69,7 +69,8 @@ def from_bulk_error(cls, error: dict): class SearchService(ABC): """The interface we need from services like Opensearch. Essentially, it provides the operations we want with - sensible types, rather than the untyped pile of JSON the actual search client provides.""" + sensible types, rather than the untyped pile of JSON the actual search client provides. + """ @abstractmethod def read_pointer_name(self) -> str: diff --git a/core/service/container.py b/core/service/container.py index 44ebc645b8..e25f58deda 100644 --- a/core/service/container.py +++ b/core/service/container.py @@ -9,7 +9,6 @@ class Services(DeclarativeContainer): - config = providers.Configuration() storage = Container( diff --git a/core/util/__init__.py b/core/util/__init__.py index c3fd91159e..95eccfc990 100644 --- a/core/util/__init__.py +++ b/core/util/__init__.py @@ -232,7 +232,6 @@ def author_name_similarity(cls, authors1, authors2): class TitleProcessor: - title_stopwords = ["The ", "A ", "An "] @classmethod @@ -264,7 +263,6 @@ def extract_subtitle(cls, main_title, subtitled_title): class Bigrams: - all_letters = re.compile("^[a-z]+$") def __init__(self, bigrams): @@ -523,7 +521,6 @@ def process_data(cls, data, bigrams): class MoneyUtility: - DEFAULT_CURRENCY = "USD" @classmethod diff --git a/core/util/cache.py b/core/util/cache.py index 6cd1a3a70e..a5187be522 100644 --- a/core/util/cache.py +++ b/core/util/cache.py @@ -70,7 +70,8 @@ class CachedData: Cache data using the CachedData.cache instance This must be initialized somewhere in the vicinity of its usage with CacheData.initialize(_db) While writing methods to cache, always lock the body to the _db is used and updated in a threadsafe manner - Always expunge objects before returning the data, to avoid stale/cross-thread session usage""" + Always expunge objects before returning the data, to avoid stale/cross-thread session usage + """ # Instance of itself cache: Optional[CachedData] = None diff --git a/core/util/flask_util.py b/core/util/flask_util.py index a2e0e11a1d..b1eaa678ce 100644 --- a/core/util/flask_util.py +++ b/core/util/flask_util.py @@ -152,7 +152,6 @@ def __init__( max_age=None, private=None, ): - mimetype = mimetype or OPDSFeed.ACQUISITION_FEED_TYPE status = status or 200 if max_age is None: diff --git a/core/util/opds_writer.py b/core/util/opds_writer.py index a5930a8128..7340f7778e 100644 --- a/core/util/opds_writer.py +++ b/core/util/opds_writer.py @@ -20,7 +20,6 @@ def __dict__(self): class AtomFeed: - ATOM_TYPE = "application/atom+xml" ATOM_LIKE_TYPES = [ATOM_TYPE, "application/xml"] @@ -174,7 +173,6 @@ def __str__(self): class OPDSFeed(AtomFeed): - ACQUISITION_FEED_TYPE = ( AtomFeed.ATOM_TYPE + ";profile=opds-catalog;kind=acquisition" ) diff --git a/scripts.py b/scripts.py index fb563518a8..4636a370b1 100644 --- a/scripts.py +++ b/scripts.py @@ -150,7 +150,6 @@ def q(self): class CacheRepresentationPerLane(TimestampScript, LaneSweeperScript): - name = "Cache one representation per lane" @classmethod @@ -496,7 +495,6 @@ def do_generate(self, lane, facets, pagination, feed_class=None): class CacheOPDSGroupFeedPerLane(CacheRepresentationPerLane): - name = "Cache OPDS grouped feed for each lane" def should_process_lane(self, lane): @@ -1119,7 +1117,6 @@ def explain(self, licensepool): class NYTBestSellerListsScript(TimestampScript): - name = "Update New York Times best-seller lists" def __init__(self, include_history=False): @@ -1132,7 +1129,6 @@ def do_run(self): # For every best-seller list... names = self.api.list_of_lists() for l in sorted(names["results"], key=lambda x: x["list_name_encoded"]): - name = l["list_name_encoded"] self.log.info("Handling list %s" % name) best = self.api.best_seller_list(l) diff --git a/tests/api/discovery/test_opds_registration.py b/tests/api/discovery/test_opds_registration.py index 284f3163dc..94ee5ecda9 100644 --- a/tests/api/discovery/test_opds_registration.py +++ b/tests/api/discovery/test_opds_registration.py @@ -570,7 +570,6 @@ def test_create_registration_headers( def test__send_registration_request( self, remote_registry_fixture: RemoteRegistryFixture, requests_mock: Mocker ): - # If everything goes well, the return value of do_post is # passed through. url = "http://url.com" diff --git a/tests/api/mockapi/overdrive.py b/tests/api/mockapi/overdrive.py index ee4c33c3c7..ae7aa11192 100644 --- a/tests/api/mockapi/overdrive.py +++ b/tests/api/mockapi/overdrive.py @@ -123,7 +123,6 @@ def json(self): class MockOverdriveAPI(MockOverdriveCoreAPI, OverdriveAPI): - library_data = '{"id":1810,"name":"My Public Library (MA)","type":"Library","collectionToken":"1a09d9203","links":{"self":{"href":"http://api.overdrive.com/v1/libraries/1810","type":"application/vnd.overdrive.api+json"},"products":{"href":"http://api.overdrive.com/v1/collections/1a09d9203/products","type":"application/vnd.overdrive.api+json"},"dlrHomepage":{"href":"http://ebooks.nypl.org","type":"text/html"}},"formats":[{"id":"audiobook-wma","name":"OverDrive WMA Audiobook"},{"id":"ebook-pdf-adobe","name":"Adobe PDF eBook"},{"id":"ebook-mediado","name":"MediaDo eBook"},{"id":"ebook-epub-adobe","name":"Adobe EPUB eBook"},{"id":"ebook-kindle","name":"Kindle Book"},{"id":"audiobook-mp3","name":"OverDrive MP3 Audiobook"},{"id":"ebook-pdf-open","name":"Open PDF eBook"},{"id":"ebook-overdrive","name":"OverDrive Read"},{"id":"video-streaming","name":"Streaming Video"},{"id":"ebook-epub-open","name":"Open EPUB eBook"}]}' token_data = '{"access_token":"foo","token_type":"bearer","expires_in":3600,"scope":"LIB META AVAIL SRCH"}' diff --git a/tests/api/sip/test_authentication_provider.py b/tests/api/sip/test_authentication_provider.py index ba25df72d2..7173089339 100644 --- a/tests/api/sip/test_authentication_provider.py +++ b/tests/api/sip/test_authentication_provider.py @@ -121,7 +121,6 @@ def create_provider( class TestSIP2AuthenticationProvider: - # We feed sample data into the MockSIPClient, even though it adds # an extra step of indirection, because it lets us use as a # starting point the actual (albeit redacted) SIP2 messages we diff --git a/tests/api/test_adobe_vendor_id.py b/tests/api/test_adobe_vendor_id.py index 7f8714652d..18de4070d2 100644 --- a/tests/api/test_adobe_vendor_id.py +++ b/tests/api/test_adobe_vendor_id.py @@ -382,7 +382,6 @@ def test__encode_short_client_token_uses_adobe_base64_encoding( assert "lib|0|1234|IQlGTjZ:J0VzNTI;WCEjKVoqX1M@" == token def test_decode_two_part_short_client_token_uses_adobe_base64_encoding(self): - # The base64 encoding of this signature has a plus sign in it. signature = "LbU}66%\\-4zt>R>_)\n2Q" encoded_signature = AuthdataUtility.adobe_base64_encode(signature) diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 8f2dc76ae2..a4b0f5638a 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -1322,7 +1322,6 @@ def annotate_authentication_document(library, doc, url_for): class TestBasicAuthenticationProvider: - credentials = dict(username="user", password="") def test_authenticated_patron_passes_on_none( diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 0e67d7db0a..96e8d3cedc 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -74,7 +74,6 @@ class Axis360Fixture: - # Sample bibliographic and availability data you can use in a test # without having to parse it from an XML file. BIBLIOGRAPHIC_DATA = Metadata( @@ -595,7 +594,10 @@ def test_reap(self, axis360: Axis360Fixture): # If there is a LicensePool but it has no owned licenses, # it's already been reaped, so nothing happens. - edition, pool, = axis360.db.edition( + ( + edition, + pool, + ) = axis360.db.edition( data_source_name=DataSource.AXIS_360, identifier_type=id1.type, identifier_id=id1.identifier, @@ -607,7 +609,10 @@ def test_reap(self, axis360: Axis360Fixture): # collection from the collection associated with this # Axis360API object, so it's not affected. collection2 = axis360.db.collection() - edition2, pool2, = axis360.db.edition( + ( + edition2, + pool2, + ) = axis360.db.edition( data_source_name=DataSource.AXIS_360, identifier_type=id1.type, identifier_id=id1.identifier, diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index 38b95efb9a..3534bf1afc 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -875,7 +875,6 @@ def test_parse(self, bibliotheca_fixture: BibliothecaAPITestFixture): class TestErrorParser: - BIBLIOTHECA_ERROR_RESPONSE_BODY_TEMPLATE = ( '' "Gen-001" @@ -1048,7 +1047,6 @@ def test_remote_initiated_server_error( class TestBibliothecaEventParser: - # Sample event feed to test out the parser. TWO_EVENTS = """ 1b0d6667-a10e-424a-9f73-fb6f6d41308e @@ -2017,7 +2015,6 @@ def test_process_item_creates_presentation_ready_work( assert True == pool.work.presentation_ready def test_internal_formats(self): - m = ItemListParser.internal_formats def _check_format(input, expect_medium, expect_format, expect_drm): diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index cb42636d3d..69a3826ffe 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -1441,7 +1441,6 @@ def test_sync_bookshelf_updates_local_loan_and_hold_with_modified_timestamps( def test_sync_bookshelf_applies_locked_delivery_mechanism_to_loan( self, circulation_api: CirculationAPIFixture ): - # By the time we hear about the patron's loan, they've already # locked in an oddball delivery mechanism. mechanism = DeliveryMechanismInfo( @@ -1473,7 +1472,6 @@ def test_sync_bookshelf_applies_locked_delivery_mechanism_to_loan( def test_sync_bookshelf_respects_last_loan_activity_sync( self, circulation_api: CirculationAPIFixture ): - # We believe we have up-to-date loan activity for this patron. now = utc_now() circulation_api.patron.last_loan_activity_sync = now diff --git a/tests/api/test_controller_base.py b/tests/api/test_controller_base.py index b80e51c4f5..3e27c146da 100644 --- a/tests/api/test_controller_base.py +++ b/tests/api/test_controller_base.py @@ -32,7 +32,6 @@ class TestBaseController: def test_unscoped_session(self, circulation_fixture: CirculationControllerFixture): - """Compare to TestScopedSession.test_scoped_session to see how database sessions will be handled in production. """ @@ -200,7 +199,6 @@ def test_authentication_sends_proper_headers( def test_handle_conditional_request( self, circulation_fixture: CirculationControllerFixture ): - # First, test success: the client provides If-Modified-Since # and it is _not_ earlier than the 'last modified' date known by # the server. diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/test_controller_scopedsession.py index d07c5b05a5..53afdaa5cd 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/test_controller_scopedsession.py @@ -22,7 +22,8 @@ class ScopedHolder: """A scoped holder used to store some state in the test. This is necessary because we want to do some unusual things with scoped sessions, and don't necessary have access - to a database transaction fixture in all of the various methods that will be called.""" + to a database transaction fixture in all of the various methods that will be called. + """ def __init__(self): self.identifiers = 0 diff --git a/tests/api/test_firstbook2.py b/tests/api/test_firstbook2.py index 62636fe43b..2515ce19e9 100644 --- a/tests/api/test_firstbook2.py +++ b/tests/api/test_firstbook2.py @@ -26,7 +26,6 @@ def __init__(self, status_code, content): class MockFirstBookAuthenticationAPI(FirstBookAuthenticationAPI): - SUCCESS = '"Valid Code Pin Pair"' FAILURE = '{"code":404,"message":"Access Code Pin Pair not found"}' diff --git a/tests/api/test_millenium_patron.py b/tests/api/test_millenium_patron.py index 2cdd3aed1d..a6664ab940 100644 --- a/tests/api/test_millenium_patron.py +++ b/tests/api/test_millenium_patron.py @@ -28,7 +28,6 @@ def __init__(self, content): class MockAPI(MilleniumPatronAPI): - queue: List[Any] requests_made: List[Any] diff --git a/tests/api/test_novelist.py b/tests/api/test_novelist.py index 6632b1f8aa..8ec8bad0e5 100644 --- a/tests/api/test_novelist.py +++ b/tests/api/test_novelist.py @@ -15,7 +15,6 @@ class NoveListFixture: - db: DatabaseTransactionFixture files: NoveListFilesFixture integration: ExternalIntegration diff --git a/tests/api/test_nyt.py b/tests/api/test_nyt.py index 9b00e122d0..8aea651c06 100644 --- a/tests/api/test_nyt.py +++ b/tests/api/test_nyt.py @@ -240,7 +240,6 @@ def test_fill_in_history(self, nyt_fixture: NYTBestSellerAPIFixture): class TestNYTBestSellerListTitle: - one_list_title = json.loads( r"""{"list_name":"Combined Print and E-Book Fiction","display_name":"Combined Print & E-Book Fiction","bestsellers_date":"2015-01-17","published_date":"2015-02-01","rank":1,"rank_last_week":0,"weeks_on_list":1,"asterisk":0,"dagger":0,"amazon_product_url":"http:\/\/www.amazon.com\/The-Girl-Train-A-Novel-ebook\/dp\/B00L9B7IKE?tag=thenewyorktim-20","isbns":[{"isbn10":"1594633665","isbn13":"9781594633669"},{"isbn10":"0698185390","isbn13":"9780698185395"}],"book_details":[{"title":"THE GIRL ON THE TRAIN","description":"A psychological thriller set in London is full of complications and betrayals.","contributor":"by Paula Hawkins","author":"Paula Hawkins","contributor_note":"","price":0,"age_group":"","publisher":"Riverhead","isbns":[{"isbn10":"1594633665","isbn13":"9781594633669"},{"isbn10":"0698185390","isbn13":"9780698185395"}],"primary_isbn13":"9780698185395","primary_isbn10":"0698185390"}],"reviews":[{"book_review_link":"","first_chapter_link":"","sunday_review_link":"","article_chapter_link":""}]}""" ) diff --git a/tests/api/test_odilo.py b/tests/api/test_odilo.py index 96c7eb53d5..6cb50703e9 100644 --- a/tests/api/test_odilo.py +++ b/tests/api/test_odilo.py @@ -369,7 +369,6 @@ def test_02_data_not_found(self, odilo: OdiloFixture): odilo.api.log.info("Test resource not found on remote ok!") def test_make_absolute_url(self, odilo: OdiloFixture): - # A relative URL is made absolute using the API's base URL. relative = "/relative-url" absolute = odilo.api._make_absolute_url(relative) diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index 518e6cf4de..733a948d95 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -1854,7 +1854,6 @@ def test_odl_importer_reimport_expired_licenses(self, import_templated): # First import the license when it is not expired with freeze_time(license_expiry - datetime.timedelta(days=1)): - # Import the test feed. ( imported_editions, @@ -1881,7 +1880,6 @@ def test_odl_importer_reimport_expired_licenses(self, import_templated): # Reimport the license when it is expired with freeze_time(license_expiry + datetime.timedelta(days=1)): - # Import the test feed. ( imported_editions, diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 513fea0795..17ad4d879f 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -131,7 +131,7 @@ def test_supported_media_types( # BEARER_TOKEN access control scheme, then X is a supported # media type for an OPDS For Distributors collection. supported = opds_dist_api_fixture.api.SUPPORTED_MEDIA_TYPES - for (format, drm) in DeliveryMechanism.default_client_can_fulfill_lookup: + for format, drm in DeliveryMechanism.default_client_can_fulfill_lookup: if drm == (DeliveryMechanism.BEARER_TOKEN) and format is not None: assert format in supported @@ -624,7 +624,6 @@ def test_import(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): def test__add_format_data( self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture ): - # Mock SUPPORTED_MEDIA_TYPES for purposes of test. api = OPDSForDistributorsAPI old_value = api.SUPPORTED_MEDIA_TYPES diff --git a/tests/api/test_routes.py b/tests/api/test_routes.py index 6f9f9b11fd..1d38fe1eb4 100644 --- a/tests/api/test_routes.py +++ b/tests/api/test_routes.py @@ -5,14 +5,12 @@ class TestAppConfiguration: - # Test the configuration of the real Flask app. def test_configuration(self): assert False == routes.app.url_map.merge_slashes class TestIndex: - CONTROLLER_NAME = "index_controller" @pytest.fixture(scope="function") @@ -30,7 +28,6 @@ def test_authentication_document(self, fixture: RouteTestFixture): class TestOPDSFeed: - CONTROLLER_NAME = "opds_feeds" @pytest.fixture(scope="function") @@ -117,7 +114,6 @@ def test_marc_page(self, fixture: RouteTestFixture): class TestProfileController: - CONTROLLER_NAME = "profiles" @pytest.fixture(scope="function") @@ -134,7 +130,6 @@ def test_patron_profile(self, fixture: RouteTestFixture): class TestLoansController: - CONTROLLER_NAME = "loans" @pytest.fixture(scope="function") @@ -221,7 +216,6 @@ def test_loan_or_hold_detail(self, fixture: RouteTestFixture): class TestAnnotationsController: - CONTROLLER_NAME = "annotations" @pytest.fixture(scope="function") @@ -254,7 +248,6 @@ def test_annotations_for_work(self, fixture: RouteTestFixture): class TestURNLookupController: - CONTROLLER_NAME = "urn_lookup" @pytest.fixture(scope="function") @@ -268,7 +261,6 @@ def test_work(self, fixture: RouteTestFixture): class TestWorkController: - CONTROLLER_NAME = "work_controller" @pytest.fixture(scope="function") @@ -395,7 +387,6 @@ def test_heartbeat(self, fixture: RouteTestFixture): class TestHealthCheck: - # This code isn't in a controller, and it doesn't really do anything, # so we check that it returns a specific result. def test_health_check(self, route_test: RouteTestFixture): diff --git a/tests/api/test_selftest.py b/tests/api/test_selftest.py index b7eea047ee..b7eab7ae90 100644 --- a/tests/api/test_selftest.py +++ b/tests/api/test_selftest.py @@ -271,7 +271,6 @@ def run_self_tests(cls, _db, constructor_method, *constructor_args): assert ["result 1", "result 2"] == script.processed def test_process_result(self, db: DatabaseTransactionFixture): - # Test a successful test that returned a result. success = SelfTestResult("i succeeded") success.success = True diff --git a/tests/core/classifiers/test_classifier.py b/tests/core/classifiers/test_classifier.py index 211e6b4237..1976cfd4c6 100644 --- a/tests/core/classifiers/test_classifier.py +++ b/tests/core/classifiers/test_classifier.py @@ -34,7 +34,6 @@ class TestLowercased: def test_constructor(self): - l = Lowercased("A string") # A string is lowercased. @@ -65,7 +64,6 @@ def test_fiction_default(self): class TestClassifier: def test_default_target_age_for_audience(self): - assert (None, None) == Classifier.default_target_age_for_audience( Classifier.AUDIENCE_CHILDREN ) diff --git a/tests/core/classifiers/test_ddc.py b/tests/core/classifiers/test_ddc.py index 77b39c09b2..5af54023d5 100644 --- a/tests/core/classifiers/test_ddc.py +++ b/tests/core/classifiers/test_ddc.py @@ -14,7 +14,6 @@ def test_name_for(self): assert None == DDC.name_for("Fic") def test_audience(self): - child = Classifier.AUDIENCE_CHILDREN adult = Classifier.AUDIENCE_ADULT young_adult = Classifier.AUDIENCE_YOUNG_ADULT diff --git a/tests/core/classifiers/test_lcc.py b/tests/core/classifiers/test_lcc.py index 6bfca90230..8fad9cd1e6 100644 --- a/tests/core/classifiers/test_lcc.py +++ b/tests/core/classifiers/test_lcc.py @@ -4,7 +4,6 @@ class TestLCC: def test_name_for(self): - child = Classifier.AUDIENCE_CHILDREN adult = Classifier.AUDIENCE_ADULT diff --git a/tests/core/models/test_circulationevent.py b/tests/core/models/test_circulationevent.py index dcd5e60bec..f6bcffc9cf 100644 --- a/tests/core/models/test_circulationevent.py +++ b/tests/core/models/test_circulationevent.py @@ -78,7 +78,6 @@ def from_dict(self, data, db: DatabaseTransactionFixture): return event, was_new def test_new_title(self, db: DatabaseTransactionFixture): - # Here's a new title. collection = db.collection() data = self._event_data( diff --git a/tests/core/models/test_contributor.py b/tests/core/models/test_contributor.py index 6b4417817b..ecb34f9415 100644 --- a/tests/core/models/test_contributor.py +++ b/tests/core/models/test_contributor.py @@ -140,7 +140,6 @@ def _names(self, in_name, out_family, out_display, default_display_name=None): assert d == out_display def test_default_names(self, db: DatabaseTransactionFixture): - # Pass in a default display name and it will always be used. self._names( "Jones, Bob", "Jones", "Sally Smith", default_display_name="Sally Smith" diff --git a/tests/core/models/test_edition.py b/tests/core/models/test_edition.py index 2d107220a8..ebe47cf875 100644 --- a/tests/core/models/test_edition.py +++ b/tests/core/models/test_edition.py @@ -150,7 +150,6 @@ def test_missing_coverage_from(self, db: DatabaseTransactionFixture): ) def test_sort_by_priority(self, db: DatabaseTransactionFixture): - # Make editions created by the license source, the metadata # wrangler, and library staff. admin = db.edition( @@ -184,7 +183,6 @@ def ids(l): assert ids(expect) == ids(actual) def test_equivalent_identifiers(self, db: DatabaseTransactionFixture): - edition = db.edition() identifier = db.identifier() session = db.session @@ -203,7 +201,6 @@ def test_equivalent_identifiers(self, db: DatabaseTransactionFixture): ) def test_recursive_edition_equivalence(self, db: DatabaseTransactionFixture): - # Here's a Edition for a Project Gutenberg text. gutenberg, gutenberg_pool = db.edition( data_source_name=DataSource.GUTENBERG, diff --git a/tests/core/models/test_hassessioncache.py b/tests/core/models/test_hassessioncache.py index cda3071f97..131b770614 100644 --- a/tests/core/models/test_hassessioncache.py +++ b/tests/core/models/test_hassessioncache.py @@ -83,7 +83,6 @@ def test_by_id(self, mock_db, mock_class, mock): def test_by_cache_key_miss_triggers_cache_miss_hook( self, mock_db, mock_class, mock ): - db = mock_db() cache_miss_hook = MagicMock(side_effect=lambda: (mock, True)) created, is_new = mock_class.by_cache_key(db, mock.cache_key(), cache_miss_hook) diff --git a/tests/core/models/test_patron.py b/tests/core/models/test_patron.py index 4378544f20..bbca58fcd5 100644 --- a/tests/core/models/test_patron.py +++ b/tests/core/models/test_patron.py @@ -112,7 +112,6 @@ def test_work(self, db: DatabaseTransactionFixture): assert work == hold.work def test_until(self, db: DatabaseTransactionFixture): - one_day = datetime.timedelta(days=1) two_days = datetime.timedelta(days=2) diff --git a/tests/core/models/test_resource.py b/tests/core/models/test_resource.py index 8f38870115..73316f77ed 100644 --- a/tests/core/models/test_resource.py +++ b/tests/core/models/test_resource.py @@ -57,7 +57,6 @@ def test_default_filename(self): class TestResource: def test_as_delivery_mechanism_for(self, db: DatabaseTransactionFixture): - # Calling as_delivery_mechanism_for on a Resource that is used # to deliver a specific LicensePool returns the appropriate # LicensePoolDeliveryMechanism. @@ -447,7 +446,6 @@ def test_extension(self): assert "" == m("no/such-media-type") def test_default_filename(self, db: DatabaseTransactionFixture): - # Here's a common sort of URL. url = "http://example.com/foo/bar/baz.txt" representation, ignore = db.representation(url) @@ -498,7 +496,6 @@ def test_default_filename(self, db: DatabaseTransactionFixture): assert "cover.png" == filename def test_cautious_http_get(self): - h = DummyHTTPClient() h.queue_response(200, content="yay") diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index fece838f9f..8b62e3c699 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -302,9 +302,9 @@ def close(self): @pytest.fixture -def s3_service_integration_fixture() -> Generator[ - S3ServiceIntegrationFixture, None, None -]: +def s3_service_integration_fixture() -> ( + Generator[S3ServiceIntegrationFixture, None, None] +): fixture = S3ServiceIntegrationFixture() yield fixture fixture.close() diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index 7e38ea5acd..0a7b095ed8 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -684,6 +684,7 @@ def test_input_identifier_types(self, db: DatabaseTransactionFixture): """Test various acceptable and unacceptable values for the class variable INPUT_IDENTIFIER_TYPES. """ + # It's okay to set INPUT_IDENTIFIER_TYPES to None it means you # will cover any and all identifier types. class Base(IdentifierCoverageProvider): diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 0ac74afa62..9eac0b8699 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -328,7 +328,6 @@ def __init__(self, entrypoints): assert [] == m(None) def test_modify_search_filter(self): - # When an entry point is selected, search filters are modified so # that they only find works that fit that entry point. filter = Filter() @@ -949,7 +948,6 @@ def test_modify_database_query(self, db: DatabaseTransactionFixture): ), (Facets.AVAILABLE_NOT_NOW, [not_available]), ]: - facets = Facets(db.default_library(), None, availability, None, None, None) modified = facets.modify_database_query(db.session, qu) assert (availability, sorted(x.title for x in modified)) == ( @@ -2307,7 +2305,6 @@ def test_groups_propagates_facets(self, db: DatabaseTransactionFixture): # Verify that the Facets object passed into groups() is # propagated to the methods called by groups(). class MockWorkList(WorkList): - overview_facets_called_with = None def works(self, _db, pagination, facets): @@ -4576,7 +4573,6 @@ def modify_search_filter(cls, filter): # the WorkList. class MockWorkList: - display_name = "Mock" visible = True priority = 2 diff --git a/tests/core/test_metadata.py b/tests/core/test_metadata.py index 995eccd88a..49ea2b2c6b 100644 --- a/tests/core/test_metadata.py +++ b/tests/core/test_metadata.py @@ -696,7 +696,6 @@ def test_from_edition(self, db: DatabaseTransactionFixture): assert edition.series_position == metadata.series_position def test_update(self, db: DatabaseTransactionFixture): - # Tests that Metadata.update correctly prefers new fields to old, unless # new fields aren't defined. @@ -841,7 +840,6 @@ def assert_registered(full): assert_registered(full=False) def test_apply_identifier_equivalency(self, db: DatabaseTransactionFixture): - # Set up an Edition. edition, pool = db.edition(with_license_pool=True) @@ -893,7 +891,6 @@ def test_apply_identifier_equivalency(self, db: DatabaseTransactionFixture): assert equivalency.output.identifier == "def" def test_apply_no_value(self, db: DatabaseTransactionFixture): - edition_old, pool = db.edition(with_license_pool=True) metadata = Metadata( @@ -1152,7 +1149,6 @@ def _availability_needs_update(self, *args): class TestTimestampData: def test_constructor(self): - # By default, all fields are set to None d = TimestampData() for i in ( @@ -1235,7 +1231,6 @@ def test_finalize_minimal(self, db: DatabaseTransactionFixture): assert i == None def test_finalize_full(self, db: DatabaseTransactionFixture): - # You can call finalize() with a complete set of arguments. d = TimestampData() d.finalize( diff --git a/tests/core/test_monitor.py b/tests/core/test_monitor.py index 7c3c2ef4e5..ce0ff09e82 100644 --- a/tests/core/test_monitor.py +++ b/tests/core/test_monitor.py @@ -63,7 +63,6 @@ class MockMonitor(Monitor): - SERVICE_NAME = "Dummy monitor for test" def __init__(self, _db, collection=None): diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 2a4088b890..1b40710aab 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -2188,6 +2188,7 @@ def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): feed = data.content_server_mini_feed http = DummyHTTPClient() + # If there's new data, follow_one_link extracts the next links. def follow(): return monitor.follow_one_link("http://url", do_get=http.do_get) diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index e7d32e2199..bba5afac41 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -2112,7 +2112,6 @@ def test_do_run( class TestSearchIndexCoverageRemover: - SERVICE_NAME = "Search Index Coverage Remover" def test_do_run(self, db: DatabaseTransactionFixture): diff --git a/tests/core/test_selftest.py b/tests/core/test_selftest.py index 4eec7cefc0..910b0d04c5 100644 --- a/tests/core/test_selftest.py +++ b/tests/core/test_selftest.py @@ -19,7 +19,6 @@ class TestSelfTestResult: - now = utc_now() future = now + datetime.timedelta(seconds=5) @@ -228,6 +227,7 @@ def _run_self_tests(self, _db): def test_run_test_success(self): o = MockSelfTest() + # This self-test method will succeed. def successful_test(arg, kwarg): return arg, kwarg @@ -240,6 +240,7 @@ def successful_test(arg, kwarg): def test_run_test_failure(self): o = MockSelfTest() + # This self-test method will fail. def unsuccessful_test(arg, kwarg): raise IntegrationException(arg, kwarg) diff --git a/tests/core/test_summary_evaluator.py b/tests/core/test_summary_evaluator.py index d3c8b564ee..635e4a8a48 100644 --- a/tests/core/test_summary_evaluator.py +++ b/tests/core/test_summary_evaluator.py @@ -30,7 +30,6 @@ def test_shorter_is_better(self): assert s2 == self._best(s1, s2) def test_noun_phrase_coverage_is_important(self): - s1 = "The story of Alice and the White Rabbit." s2 = "The story of Alice and the Mock Turtle." s3 = "Alice meets the Mock Turtle and the White Rabbit." diff --git a/tests/core/util/test_opds_writer.py b/tests/core/util/test_opds_writer.py index 00022d000e..f2be7c81f3 100644 --- a/tests/core/util/test_opds_writer.py +++ b/tests/core/util/test_opds_writer.py @@ -9,7 +9,6 @@ class TestOPDSMessage: def test_equality(self): - a = OPDSMessage("urn", 200, "message") assert a == a assert a != None diff --git a/tests/core/util/test_util.py b/tests/core/util/test_util.py index 4110d7cad7..3fa4f39058 100644 --- a/tests/core/util/test_util.py +++ b/tests/core/util/test_util.py @@ -39,7 +39,6 @@ def test_identity(self): assert 1 == MetadataSimilarity.title_similarity("foo bar.", "FOO BAR") def test_histogram_distance(self): - # These two sets of titles generate exactly the same histogram. # Their distance is 0. a1 = ["The First Title", "The Second Title"] @@ -422,7 +421,6 @@ def test_limit(self, db: DatabaseTransactionFixture): class TestSlugify: def test_slugify(self): - # text are slugified. assert "hey-im-a-feed" == slugify("Hey! I'm a feed!!") assert "you-and-me-n-every_feed" == slugify("You & Me n Every_Feed") diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 01e26fa8ac..4b0d36a764 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -972,9 +972,9 @@ def directory(self) -> str: @pytest.fixture(scope="function") -def temporary_directory_configuration() -> Iterable[ - TemporaryDirectoryConfigurationFixture -]: +def temporary_directory_configuration() -> ( + Iterable[TemporaryDirectoryConfigurationFixture] +): fix = TemporaryDirectoryConfigurationFixture.create() yield fix fix.close() diff --git a/tests/migration/test_20230531_0af587ff8595.py b/tests/migration/test_20230531_0af587ff8595.py index 604b66c6c6..ae9001b396 100644 --- a/tests/migration/test_20230531_0af587ff8595.py +++ b/tests/migration/test_20230531_0af587ff8595.py @@ -101,7 +101,6 @@ def test_key_rename( create_config_setting: CreateConfigSetting, create_collection: CreateCollection, ) -> None: - alembic_runner.migrate_down_to("a9ed3f76d649") with alembic_engine.connect() as connection: integration_id = create_external_integration( From cd8ef179b0ec277151816522ebef7b34d3f2e403 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 3 Oct 2023 06:34:59 -0300 Subject: [PATCH 077/262] Absolufy our imports (#1423) --- .pre-commit-config.yaml | 5 ++ api/admin/controller/announcement_service.py | 5 +- api/admin/controller/library_settings.py | 11 ++- api/admin/controller/work_editor.py | 5 +- .../password_admin_authentication_provider.py | 17 ++-- api/admin/routes.py | 5 +- api/adobe_vendor_id.py | 7 +- api/annotations.py | 3 +- api/app.py | 21 +++-- api/authenticator.py | 7 +- api/axis.py | 23 +++--- api/base_controller.py | 5 +- api/bibliotheca.py | 21 +++-- api/circulation.py | 12 +-- api/circulation_exceptions.py | 3 +- api/controller.py | 51 ++++++------ api/custom_index.py | 3 +- api/custom_patron_catalog.py | 3 +- api/enki.py | 7 +- api/firstbook2.py | 11 ++- api/google_analytics_provider.py | 3 +- api/kansas_patron.py | 11 ++- api/lanes.py | 5 +- api/millenium_patron.py | 15 ++-- api/monitor.py | 3 +- api/nyt.py | 3 +- api/odilo.py | 7 +- api/odl.py | 19 +++-- api/opds.py | 11 ++- api/overdrive.py | 21 +++-- api/routes.py | 5 +- api/selftest.py | 2 +- api/simple_authentication.py | 15 ++-- core/analytics.py | 10 +-- core/app_server.py | 13 ++-- core/classifier/__init__.py | 29 ++++--- core/classifier/age.py | 2 +- core/classifier/bic.py | 2 +- core/classifier/bisac.py | 4 +- core/classifier/ddc.py | 2 +- core/classifier/gutenberg.py | 2 +- core/classifier/keyword.py | 2 +- core/classifier/lcc.py | 2 +- core/classifier/overdrive.py | 2 +- core/classifier/simplified.py | 2 +- core/config.py | 9 +-- core/coverage.py | 11 ++- core/entrypoint.py | 2 +- core/external_list.py | 6 +- core/external_search.py | 43 +++++----- core/feed/annotator/loan_and_hold.py | 3 +- core/lane.py | 53 +++++++------ core/lcp/credential.py | 5 +- core/lcp/exceptions.py | 2 +- core/local_analytics_provider.py | 4 +- core/marc.py | 16 ++-- core/metadata_layer.py | 16 ++-- core/model/__init__.py | 78 +++++++++++-------- core/model/admin.py | 5 +- core/model/cachedfeed.py | 12 +-- core/model/circulationevent.py | 4 +- core/model/classification.py | 10 +-- core/model/collection.py | 25 +++--- core/model/configuration.py | 13 ++-- core/model/contributor.py | 6 +- core/model/coverage.py | 24 +++--- core/model/credential.py | 12 +-- core/model/customlist.py | 16 ++-- core/model/datasource.py | 8 +- core/model/devicetokens.py | 3 +- core/model/edition.py | 23 +++--- core/model/hassessioncache.py | 2 +- core/model/identifier.py | 32 ++++---- core/model/library.py | 27 +++---- core/model/licensing.py | 37 +++++---- core/model/listeners.py | 17 ++-- core/model/measurement.py | 6 +- core/model/patron.py | 16 ++-- core/model/resource.py | 14 ++-- core/model/work.py | 63 ++++++++------- core/monitor.py | 12 +-- core/opds.py | 19 +++-- core/opds2_import.py | 17 ++-- core/opds_import.py | 31 ++++---- core/overdrive.py | 19 +++-- core/problem_details.py | 2 +- core/scripts.py | 52 +++++++------ core/selftest.py | 14 ++-- core/user_profile.py | 2 +- core/util/__init__.py | 2 +- core/util/cache.py | 2 +- core/util/flask_util.py | 6 +- core/util/http.py | 5 +- core/util/opds_writer.py | 2 +- core/util/personal_names.py | 2 +- core/util/problem_detail.py | 2 +- core/util/summary.py | 2 +- core/util/titles.py | 2 +- tests/api/test_annotations.py | 3 +- tests/api/test_authenticator.py | 13 ++-- tests/api/test_axis.py | 9 +-- tests/api/test_circulationapi.py | 7 +- tests/api/test_kansas_patron.py | 5 +- tests/api/test_novelist.py | 5 +- tests/api/test_odilo.py | 6 +- tests/api/test_overdrive.py | 11 ++- tests/core/test_opds2_import.py | 5 +- 107 files changed, 641 insertions(+), 656 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 5b0c891912..ef1fbb1adf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -24,6 +24,11 @@ repos: - --py38-plus - --keep-runtime-typing + - repo: https://github.com/MarcoGorelli/absolufy-imports + rev: v0.3.0 + hooks: + - id: absolufy-imports + - repo: https://github.com/myint/autoflake rev: v2.2.1 hooks: diff --git a/api/admin/controller/announcement_service.py b/api/admin/controller/announcement_service.py index 654d526ff6..68c320d29b 100644 --- a/api/admin/controller/announcement_service.py +++ b/api/admin/controller/announcement_service.py @@ -4,14 +4,13 @@ import flask +from api.admin.announcement_list_validator import AnnouncementListValidator +from api.admin.controller.settings import SettingsController from api.config import Configuration from core.model.announcements import Announcement from core.problem_details import INVALID_INPUT from core.util.problem_detail import ProblemDetail, ProblemError -from ..announcement_list_validator import AnnouncementListValidator -from .settings import SettingsController - class AnnouncementSettings(SettingsController): """Controller that manages global announcements for all libraries""" diff --git a/api/admin/controller/library_settings.py b/api/admin/controller/library_settings.py index 3b929da82b..706576d680 100644 --- a/api/admin/controller/library_settings.py +++ b/api/admin/controller/library_settings.py @@ -13,7 +13,12 @@ from PIL.Image import Resampling from werkzeug.datastructures import FileStorage +from api.admin.announcement_list_validator import AnnouncementListValidator +from api.admin.controller.base import AdminPermissionsControllerMixin +from api.admin.form_data import ProcessFormData from api.admin.problem_details import * +from api.config import Configuration +from api.controller import CirculationManager from api.lanes import create_default_lanes from core.configuration.library import LibrarySettings from core.model import ( @@ -29,12 +34,6 @@ from core.model.library import LibraryLogo from core.util.problem_detail import ProblemDetail, ProblemError -from ...config import Configuration -from ...controller import CirculationManager -from ..announcement_list_validator import AnnouncementListValidator -from ..form_data import ProcessFormData -from .base import AdminPermissionsControllerMixin - class LibrarySettingsController(AdminPermissionsControllerMixin): def __init__(self, manager: CirculationManager): diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index 3b950f6167..32357ec410 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -4,7 +4,9 @@ from flask import Response from flask_babel import lazy_gettext as _ +from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.problem_details import * +from api.controller import CirculationManagerController from core.classifier import NO_NUMBER, NO_VALUE, SimplifiedGenreClassifier, genres from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.admin import AdminAnnotator @@ -29,9 +31,6 @@ from core.util.datetime_helpers import strptime_utc, utc_now from core.util.problem_detail import ProblemDetail -from ...controller import CirculationManagerController -from .base import AdminPermissionsControllerMixin - class WorkController(CirculationManagerController, AdminPermissionsControllerMixin): STAFF_WEIGHT = 1000 diff --git a/api/admin/password_admin_authentication_provider.py b/api/admin/password_admin_authentication_provider.py index bcc93ed9dd..b6cd887955 100644 --- a/api/admin/password_admin_authentication_provider.py +++ b/api/admin/password_admin_authentication_provider.py @@ -3,22 +3,21 @@ from flask import render_template_string, url_for from sqlalchemy.orm.session import Session +from api.admin.admin_authentication_provider import AdminAuthenticationProvider from api.admin.config import Configuration as AdminClientConfig -from api.config import Configuration -from core.model import Admin, ConfigurationSetting -from core.util.email import EmailManager -from core.util.problem_detail import ProblemDetail - -from .admin_authentication_provider import AdminAuthenticationProvider -from .problem_details import INVALID_ADMIN_CREDENTIALS -from .template_styles import button_style, input_style, label_style -from .templates import ( +from api.admin.problem_details import INVALID_ADMIN_CREDENTIALS +from api.admin.template_styles import button_style, input_style, label_style +from api.admin.templates import ( forgot_password_template, reset_password_email_html, reset_password_email_text, reset_password_template, sign_in_template, ) +from api.config import Configuration +from core.model import Admin, ConfigurationSetting +from core.util.email import EmailManager +from core.util.problem_detail import ProblemDetail class PasswordAdminAuthenticationProvider(AdminAuthenticationProvider): diff --git a/api/admin/routes.py b/api/admin/routes.py index 3a35da6e3c..cc76269a15 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -8,16 +8,15 @@ from flask_pydantic_spec import Response as SpecResponse from api.admin.config import Configuration as AdminClientConfig +from api.admin.controller.custom_lists import CustomListsController from api.admin.dashboard_stats import generate_statistics from api.admin.model.dashboard_statistics import StatisticsResponse +from api.admin.templates import admin_sign_in_again as sign_in_again_template from api.app import api_spec, app from api.routes import allows_library, has_library, library_route from core.app_server import ensure_pydantic_after_problem_detail, returns_problem_detail from core.util.problem_detail import ProblemDetail, ProblemDetailModel -from .controller.custom_lists import CustomListsController -from .templates import admin_sign_in_again as sign_in_again_template - # An admin's session will expire after this amount of time and # the admin will have to log in again. app.permanent_session_lifetime = timedelta(hours=9) diff --git a/api/adobe_vendor_id.py b/api/adobe_vendor_id.py index 7ccd920d36..9af5bd615b 100644 --- a/api/adobe_vendor_id.py +++ b/api/adobe_vendor_id.py @@ -14,6 +14,9 @@ from sqlalchemy.orm import Query from sqlalchemy.orm.session import Session +from api.config import CannotLoadConfiguration +from api.discovery.opds_registration import OpdsRegistrationService +from api.integration.registry.discovery import DiscoveryRegistry from core.integration.goals import Goals from core.model import Credential, DataSource, IntegrationConfiguration, Library, Patron from core.model.discovery_service_registration import ( @@ -22,10 +25,6 @@ ) from core.util.datetime_helpers import datetime_utc, utc_now -from .config import CannotLoadConfiguration -from .discovery.opds_registration import OpdsRegistrationService -from .integration.registry.discovery import DiscoveryRegistry - if sys.version_info >= (3, 11): from typing import Self else: diff --git a/api/annotations.py b/api/annotations.py index 21a3a37a7e..86d01af32b 100644 --- a/api/annotations.py +++ b/api/annotations.py @@ -4,11 +4,10 @@ from flask import url_for from pyld import jsonld +from api.problem_details import * from core.model import Annotation, Identifier from core.util.datetime_helpers import utc_now -from .problem_details import * - def load_document(url, *args, **kargs): """Retrieves JSON-LD for the given URL from a local diff --git a/api/app.py b/api/app.py index e774655fa0..58e2f68fc6 100644 --- a/api/app.py +++ b/api/app.py @@ -6,7 +6,15 @@ from flask_babel import Babel from flask_pydantic_spec import FlaskPydanticSpec +from api.admin.controller import setup_admin_controllers from api.config import Configuration +from api.controller import CirculationManager +from api.util.flask import PalaceFlask +from api.util.profilers import ( + PalaceCProfileProfiler, + PalacePyInstrumentProfiler, + PalaceXrayProfiler, +) from core.app_server import ErrorHandler from core.flask_sqlalchemy_session import flask_scoped_session from core.local_analytics_provider import LocalAnalyticsProvider @@ -21,15 +29,6 @@ from core.util.cache import CachedData from scripts import InstanceInitializationScript -from .admin.controller import setup_admin_controllers -from .controller import CirculationManager -from .util.flask import PalaceFlask -from .util.profilers import ( - PalaceCProfileProfiler, - PalacePyInstrumentProfiler, - PalaceXrayProfiler, -) - app = PalaceFlask(__name__) app._db = None # type: ignore [assignment] app.config["BABEL_DEFAULT_LOCALE"] = LanguageCodes.three_to_two[ @@ -91,8 +90,8 @@ def initialize_database(): app._db = _db -from . import routes # noqa -from .admin import routes as admin_routes # noqa +from api import routes # noqa +from api.admin import routes as admin_routes # noqa def initialize_application() -> PalaceFlask: diff --git a/api/authenticator.py b/api/authenticator.py index 02ed93cfe0..2a334cd238 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -19,7 +19,10 @@ from api.authentication.base import AuthenticationProvider from api.authentication.basic import BasicAuthenticationProvider from api.authentication.basic_token import BasicTokenAuthenticationProvider +from api.config import CannotLoadConfiguration, Configuration from api.custom_patron_catalog import CustomPatronCatalog +from api.integration.registry.patron_auth import PatronAuthRegistry +from api.problem_details import * from core.analytics import Analytics from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry @@ -33,10 +36,6 @@ from core.util.log import elapsed_time_logging from core.util.problem_detail import ProblemDetail, ProblemError -from .config import CannotLoadConfiguration, Configuration -from .integration.registry.patron_auth import PatronAuthRegistry -from .problem_details import * - if sys.version_info >= (3, 11): from typing import Self else: diff --git a/api/axis.py b/api/axis.py index ae606674e9..9308d6204f 100644 --- a/api/axis.py +++ b/api/axis.py @@ -15,6 +15,17 @@ from pydantic import validator from api.admin.validator import Validator +from api.circulation import ( + APIAwareFulfillmentInfo, + BaseCirculationAPI, + BaseCirculationLoanSettings, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) +from api.circulation_exceptions import * +from api.selftest import HasCollectionSelfTests, SelfTestResult +from api.web_publication_manifest import FindawayManifest, SpineItem from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider, CoverageFailure @@ -58,18 +69,6 @@ from core.util.string_helpers import base64 from core.util.xmlparser import XMLParser -from .circulation import ( - APIAwareFulfillmentInfo, - BaseCirculationAPI, - BaseCirculationLoanSettings, - FulfillmentInfo, - HoldInfo, - LoanInfo, -) -from .circulation_exceptions import * -from .selftest import HasCollectionSelfTests, SelfTestResult -from .web_publication_manifest import FindawayManifest, SpineItem - class Axis360APIConstants: VERIFY_SSL = "verify_certificate" diff --git a/api/base_controller.py b/api/base_controller.py index 24b2e6aad0..62dd1a000c 100644 --- a/api/base_controller.py +++ b/api/base_controller.py @@ -3,12 +3,11 @@ from flask_babel import lazy_gettext as _ from werkzeug.datastructures import Authorization +from api.circulation_exceptions import * +from api.problem_details import * from core.model import Library, Patron from core.util.problem_detail import ProblemDetail -from .circulation_exceptions import * -from .problem_details import * - class BaseCirculationManagerController: """Define minimal standards for a circulation manager controller, diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 1c582a38f0..9c42557f62 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -16,6 +16,16 @@ from lxml import etree from pymarc import parse_xml_to_array +from api.circulation import ( + BaseCirculationAPI, + BaseCirculationLoanSettings, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) +from api.circulation_exceptions import * +from api.selftest import HasCollectionSelfTests, SelfTestResult +from api.web_publication_manifest import FindawayManifest, SpineItem from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider @@ -64,17 +74,6 @@ from core.util.string_helpers import base64 from core.util.xmlparser import XMLParser -from .circulation import ( - BaseCirculationAPI, - BaseCirculationLoanSettings, - FulfillmentInfo, - HoldInfo, - LoanInfo, -) -from .circulation_exceptions import * -from .selftest import HasCollectionSelfTests, SelfTestResult -from .web_publication_manifest import FindawayManifest, SpineItem - class BibliothecaSettings(BaseSettings): username: str = FormField( diff --git a/api/circulation.py b/api/circulation.py index 290405d9d1..f44734becf 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -26,6 +26,11 @@ from pydantic import PositiveInt from sqlalchemy.orm import Query +from api.circulation_exceptions import * +from api.integration.registry.license_providers import ( + CirculationLicenseProvidersRegistry, +) +from api.util.patron import PatronUtility from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.integration.base import HasLibraryIntegrationConfiguration @@ -55,10 +60,6 @@ from core.model.integration import IntegrationConfiguration from core.util.datetime_helpers import utc_now -from .circulation_exceptions import * -from .integration.registry.license_providers import CirculationLicenseProvidersRegistry -from .util.patron import PatronUtility - if TYPE_CHECKING: pass @@ -898,11 +899,10 @@ def default_fulfillment_post_processors_map( :return: Mapping of protocols to fulfillment post-processors. """ from api.opds2 import TokenAuthenticationFulfillmentProcessor + from api.saml.wayfless import SAMLWAYFlessAcquisitionLinkProcessor from core.opds2_import import OPDS2Importer from core.opds_import import OPDSImporter - from .saml.wayfless import SAMLWAYFlessAcquisitionLinkProcessor - return { OPDSImporter.NAME: SAMLWAYFlessAcquisitionLinkProcessor, OPDS2Importer.NAME: TokenAuthenticationFulfillmentProcessor, diff --git a/api/circulation_exceptions.py b/api/circulation_exceptions.py index 8b04aa6cc5..0b62206113 100644 --- a/api/circulation_exceptions.py +++ b/api/circulation_exceptions.py @@ -2,12 +2,11 @@ from flask_babel import lazy_gettext as _ +from api.problem_details import * from core.config import IntegrationException from core.problem_details import INTEGRATION_ERROR, INTERNAL_SERVER_ERROR from core.util.problem_detail import ProblemDetail -from .problem_details import * - class CirculationException(IntegrationException): """An exception occured when carrying out a circulation operation. diff --git a/api/controller.py b/api/controller.py index 5143e4adf8..7890e3bc3c 100644 --- a/api/controller.py +++ b/api/controller.py @@ -23,10 +23,35 @@ from sqlalchemy.orm.exc import NoResultFound from werkzeug.datastructures import MIMEAccept +from api.annotations import AnnotationParser, AnnotationWriter from api.authentication.access_token import AccessTokenProvider +from api.authenticator import Authenticator, CirculationPatronProfileStorage +from api.base_controller import BaseCirculationManagerController +from api.circulation import CirculationAPI +from api.circulation_exceptions import * +from api.config import CannotLoadConfiguration, Configuration +from api.custom_index import CustomIndexView +from api.lanes import ( + ContributorFacets, + ContributorLane, + CrawlableCollectionBasedLane, + CrawlableCustomListBasedLane, + CrawlableFacets, + HasSeriesFacets, + JackpotFacets, + JackpotWorkList, + RecommendationLane, + RelatedBooksLane, + SeriesFacets, + SeriesLane, + load_lanes, +) from api.model.patron_auth import PatronAuthAccessToken from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse +from api.odl import ODLAPI +from api.odl2 import ODL2API from api.opds2 import OPDS2NavigationsAnnotator +from api.problem_details import * from api.saml.controller import SAMLController from core.analytics import Analytics from core.app_server import ApplicationVersionController @@ -95,32 +120,6 @@ from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemError -from .annotations import AnnotationParser, AnnotationWriter -from .authenticator import Authenticator, CirculationPatronProfileStorage -from .base_controller import BaseCirculationManagerController -from .circulation import CirculationAPI -from .circulation_exceptions import * -from .config import CannotLoadConfiguration, Configuration -from .custom_index import CustomIndexView -from .lanes import ( - ContributorFacets, - ContributorLane, - CrawlableCollectionBasedLane, - CrawlableCustomListBasedLane, - CrawlableFacets, - HasSeriesFacets, - JackpotFacets, - JackpotWorkList, - RecommendationLane, - RelatedBooksLane, - SeriesFacets, - SeriesLane, - load_lanes, -) -from .odl import ODLAPI -from .odl2 import ODL2API -from .problem_details import * - if TYPE_CHECKING: from werkzeug import Response as wkResponse diff --git a/api/custom_index.py b/api/custom_index.py index 1f7c498d92..c74b119d8d 100644 --- a/api/custom_index.py +++ b/api/custom_index.py @@ -12,13 +12,12 @@ from flask_babel import lazy_gettext as _ from sqlalchemy.orm.session import Session +from api.config import CannotLoadConfiguration from core.lane import Lane from core.model import ConfigurationSetting, ExternalIntegration, get_one from core.util.datetime_helpers import utc_now from core.util.opds_writer import OPDSFeed -from .config import CannotLoadConfiguration - class CustomIndexView: """A custom view that replaces the default OPDS view for a diff --git a/api/custom_patron_catalog.py b/api/custom_patron_catalog.py index 27e2174a1e..af6a50e8aa 100644 --- a/api/custom_patron_catalog.py +++ b/api/custom_patron_catalog.py @@ -6,12 +6,11 @@ from flask_babel import lazy_gettext as _ from sqlalchemy.orm.session import Session +from api.config import CannotLoadConfiguration from core.lane import Lane from core.model import ConfigurationSetting, ExternalIntegration, get_one from core.util.opds_writer import OPDSFeed -from .config import CannotLoadConfiguration - class CustomPatronCatalog: """An annotator for a library's authentication document. diff --git a/api/enki.py b/api/enki.py index 0f97f804ce..aa00bba596 100644 --- a/api/enki.py +++ b/api/enki.py @@ -6,6 +6,9 @@ from flask_babel import lazy_gettext as _ from pydantic import HttpUrl +from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo +from api.circulation_exceptions import * +from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( @@ -40,10 +43,6 @@ from core.util.datetime_helpers import from_timestamp, strptime_utc, utc_now from core.util.http import HTTP, RemoteIntegrationException, RequestTimedOut -from .circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo -from .circulation_exceptions import * -from .selftest import HasCollectionSelfTests, SelfTestResult - class EnkiConstants: PRODUCTION_BASE_URL = "https://enkilibrary.org/API/" diff --git a/api/firstbook2.py b/api/firstbook2.py index 79a5caeb5f..6a88da5b40 100644 --- a/api/firstbook2.py +++ b/api/firstbook2.py @@ -9,16 +9,15 @@ from flask_babel import lazy_gettext as _ from pydantic import HttpUrl -from core.integration.settings import ConfigurationFormItem, FormField -from core.model import Patron - -from .authentication.base import PatronData -from .authentication.basic import ( +from api.authentication.base import PatronData +from api.authentication.basic import ( BasicAuthenticationProvider, BasicAuthProviderLibrarySettings, BasicAuthProviderSettings, ) -from .circulation_exceptions import RemoteInitiatedServerError +from api.circulation_exceptions import RemoteInitiatedServerError +from core.integration.settings import ConfigurationFormItem, FormField +from core.model import Patron class FirstBookAuthSettings(BasicAuthProviderSettings): diff --git a/api/google_analytics_provider.py b/api/google_analytics_provider.py index 56e33e4f94..f2586ae0fc 100644 --- a/api/google_analytics_provider.py +++ b/api/google_analytics_provider.py @@ -5,12 +5,11 @@ from flask_babel import lazy_gettext as _ +from api.config import CannotLoadConfiguration from core.model import ConfigurationSetting, ExternalIntegration, Session from core.service.container import Services from core.util.http import HTTP -from .config import CannotLoadConfiguration - class GoogleAnalyticsProvider: NAME = _("Google Analytics") diff --git a/api/kansas_patron.py b/api/kansas_patron.py index c2b0f2f18c..9f6603974c 100644 --- a/api/kansas_patron.py +++ b/api/kansas_patron.py @@ -4,16 +4,15 @@ from lxml import etree from pydantic import HttpUrl -from core.integration.settings import ConfigurationFormItem, FormField -from core.model import Patron -from core.util.http import HTTP - -from .authentication.base import PatronData -from .authentication.basic import ( +from api.authentication.base import PatronData +from api.authentication.basic import ( BasicAuthenticationProvider, BasicAuthProviderLibrarySettings, BasicAuthProviderSettings, ) +from core.integration.settings import ConfigurationFormItem, FormField +from core.model import Patron +from core.util.http import HTTP class KansasAuthSettings(BasicAuthProviderSettings): diff --git a/api/lanes.py b/api/lanes.py index c2a8310526..41a0e32c54 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -2,6 +2,8 @@ from typing import Optional import core.classifier as genres +from api.config import CannotLoadConfiguration, Configuration +from api.novelist import NoveListAPI from core import classifier from core.classifier import Classifier, GenreData, fiction_genres, nonfiction_genres from core.lane import ( @@ -24,9 +26,6 @@ ) from core.util import LanguageCodes -from .config import CannotLoadConfiguration, Configuration -from .novelist import NoveListAPI - def load_lanes(_db, library): """Return a WorkList that reflects the current lane structure of the diff --git a/api/millenium_patron.py b/api/millenium_patron.py index ad881b6a56..36d9517f1a 100644 --- a/api/millenium_patron.py +++ b/api/millenium_patron.py @@ -10,6 +10,13 @@ from money import Money from pydantic import HttpUrl, validator +from api.authentication.base import PatronData +from api.authentication.basic import ( + BasicAuthenticationProvider, + BasicAuthProviderLibrarySettings, + BasicAuthProviderSettings, +) +from api.authenticator import BasicAuthenticationProvider from core.analytics import Analytics from core.integration.settings import ( ConfigurationFormItem, @@ -22,14 +29,6 @@ from core.util.http import HTTP from core.util.xmlparser import XMLParser -from .authentication.base import PatronData -from .authentication.basic import ( - BasicAuthenticationProvider, - BasicAuthProviderLibrarySettings, - BasicAuthProviderSettings, -) -from .authenticator import BasicAuthenticationProvider - class NeighborhoodMode(Enum): DISABLED = "disabled" diff --git a/api/monitor.py b/api/monitor.py index fc98715158..209f41b9b2 100644 --- a/api/monitor.py +++ b/api/monitor.py @@ -2,6 +2,7 @@ from sqlalchemy import and_, or_ +from api.odl import ODLAPI from core.model import ( Annotation, Collection, @@ -13,8 +14,6 @@ from core.monitor import ReaperMonitor from core.util.datetime_helpers import utc_now -from .odl import ODLAPI - class LoanlikeReaperMonitor(ReaperMonitor): SOURCE_OF_TRUTH_PROTOCOLS = [ diff --git a/api/nyt.py b/api/nyt.py index fd6c41076d..3fb22e39af 100644 --- a/api/nyt.py +++ b/api/nyt.py @@ -7,6 +7,7 @@ from flask_babel import lazy_gettext as _ from sqlalchemy.orm.session import Session +from api.config import CannotLoadConfiguration, IntegrationException from core.external_list import TitleFromExternalList from core.metadata_layer import ContributorData, IdentifierData, Metadata from core.model import ( @@ -20,8 +21,6 @@ ) from core.selftest import HasSelfTests -from .config import CannotLoadConfiguration, IntegrationException - class NYTAPI: DATE_FORMAT = "%Y-%m-%d" diff --git a/api/odilo.py b/api/odilo.py index 9b23bcdd73..7cd7a0d19a 100644 --- a/api/odilo.py +++ b/api/odilo.py @@ -8,6 +8,9 @@ from pydantic import HttpUrl from sqlalchemy.orm.session import Session +from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation_exceptions import * +from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider @@ -42,10 +45,6 @@ from core.util.http import HTTP, BadResponseException from core.util.personal_names import sort_name_to_display_name -from .circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo -from .circulation_exceptions import * -from .selftest import HasCollectionSelfTests, SelfTestResult - class OdiloRepresentationExtractor: """Extract useful information from Odilo's JSON representations.""" diff --git a/api/odl.py b/api/odl.py index 4ee86279b2..ba79636f81 100644 --- a/api/odl.py +++ b/api/odl.py @@ -16,6 +16,15 @@ from sqlalchemy.sql.expression import or_ from uritemplate import URITemplate +from api.circulation import ( + BaseCirculationAPI, + BaseCirculationEbookLoanSettings, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) +from api.circulation_exceptions import * +from api.lcp.hash import Hasher, HasherFactory, HashingAlgorithm from core import util from core.analytics import Analytics from core.importers import BaseImporterSettings @@ -58,16 +67,6 @@ from core.util.http import HTTP, BadResponseException from core.util.string_helpers import base64 -from .circulation import ( - BaseCirculationAPI, - BaseCirculationEbookLoanSettings, - FulfillmentInfo, - HoldInfo, - LoanInfo, -) -from .circulation_exceptions import * -from .lcp.hash import Hasher, HasherFactory, HashingAlgorithm - class ODLAPIConstants: DEFAULT_PASSPHRASE_HINT = "View the help page for more information." diff --git a/api/opds.py b/api/opds.py index 1209ea139c..912ce848f0 100644 --- a/api/opds.py +++ b/api/opds.py @@ -10,7 +10,12 @@ from flask import url_for +from api.adobe_vendor_id import AuthdataUtility +from api.annotations import AnnotationWriter +from api.circulation import BaseCirculationAPI, FulfillmentInfo +from api.config import CannotLoadConfiguration, Configuration from api.lanes import DynamicLane +from api.novelist import NoveListAPI from api.problem_details import NOT_FOUND_ON_REMOTE from core.analytics import Analytics from core.classifier import Classifier @@ -42,12 +47,6 @@ from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemDetail -from .adobe_vendor_id import AuthdataUtility -from .annotations import AnnotationWriter -from .circulation import BaseCirculationAPI, FulfillmentInfo -from .config import CannotLoadConfiguration, Configuration -from .novelist import NoveListAPI - class CirculationManagerAnnotator(Annotator): hidden_content_types: list[str] diff --git a/api/overdrive.py b/api/overdrive.py index d15c03908f..bfb120b4e2 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -10,6 +10,16 @@ from flask_babel import lazy_gettext as _ from sqlalchemy.orm.exc import StaleDataError +from api.circulation import ( + BaseCirculationAPI, + BaseCirculationEbookLoanSettings, + DeliveryMechanismInfo, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) +from api.circulation_exceptions import * +from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics from core.integration.base import HasChildIntegrationConfiguration from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField @@ -38,17 +48,6 @@ from core.util.datetime_helpers import strptime_utc from core.util.http import HTTP -from .circulation import ( - BaseCirculationAPI, - BaseCirculationEbookLoanSettings, - DeliveryMechanismInfo, - FulfillmentInfo, - HoldInfo, - LoanInfo, -) -from .circulation_exceptions import * -from .selftest import HasCollectionSelfTests, SelfTestResult - class OverdriveAPIConstants: # These are not real Overdrive formats; we use them internally so diff --git a/api/routes.py b/api/routes.py index 96b0df52b4..fc24ac599d 100644 --- a/api/routes.py +++ b/api/routes.py @@ -6,15 +6,14 @@ from flask_cors.core import get_cors_options, set_cors_headers from flask_pydantic_spec import Response as SpecResponse +from api.app import api_spec, app, babel +from api.config import Configuration from api.model.patron_auth import PatronAuthAccessToken from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse from core.app_server import compressible, returns_problem_detail from core.model import HasSessionCache from core.util.problem_detail import ProblemDetail -from .app import api_spec, app, babel -from .config import Configuration - @babel.localeselector def get_locale(): diff --git a/api/selftest.py b/api/selftest.py index d86de07fe3..02bc5e3adb 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -84,7 +84,7 @@ def _determine_self_test_patron( :raise: _NoValidLibrarySelfTestPatron when a valid patron is not found. """ _db = _db or Session.object_session(library) - from .authenticator import LibraryAuthenticator + from api.authenticator import LibraryAuthenticator library_authenticator = LibraryAuthenticator.from_config(_db, library) auth = library_authenticator.basic_auth_provider diff --git a/api/simple_authentication.py b/api/simple_authentication.py index 96a1a95410..51789c6880 100644 --- a/api/simple_authentication.py +++ b/api/simple_authentication.py @@ -1,5 +1,12 @@ from typing import List, Optional, Type, Union +from api.authentication.base import PatronData +from api.authentication.basic import ( + BasicAuthenticationProvider, + BasicAuthProviderLibrarySettings, + BasicAuthProviderSettings, +) +from api.config import CannotLoadConfiguration from core.analytics import Analytics from core.integration.settings import ( ConfigurationFormItem, @@ -8,14 +15,6 @@ ) from core.model import Patron -from .authentication.base import PatronData -from .authentication.basic import ( - BasicAuthenticationProvider, - BasicAuthProviderLibrarySettings, - BasicAuthProviderSettings, -) -from .config import CannotLoadConfiguration - class SimpleAuthSettings(BasicAuthProviderSettings): test_identifier: str = FormField( diff --git a/core/analytics.py b/core/analytics.py index 4c1d9e27f3..ca288b9291 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -7,11 +7,11 @@ from sqlalchemy.orm.session import Session -from .config import CannotLoadConfiguration -from .model import ExternalIntegration -from .service.container import container_instance -from .util.datetime_helpers import utc_now -from .util.log import log_elapsed_time +from core.config import CannotLoadConfiguration +from core.model import ExternalIntegration +from core.service.container import container_instance +from core.util.datetime_helpers import utc_now +from core.util.log import log_elapsed_time class Analytics: diff --git a/core/app_server.py b/core/app_server.py index ea5c0c6b9f..7fbe03ce0d 100644 --- a/core/app_server.py +++ b/core/app_server.py @@ -18,13 +18,12 @@ import core from api.admin.config import Configuration as AdminUiConfig from core.feed.acquisition import LookupAcquisitionFeed, OPDSAcquisitionFeed - -from .lane import Facets, Pagination -from .model import Identifier -from .problem_details import * -from .service.logging.configuration import LogLevel -from .util.opds_writer import OPDSMessage -from .util.problem_detail import ProblemDetail +from core.lane import Facets, Pagination +from core.model import Identifier +from core.problem_details import * +from core.service.logging.configuration import LogLevel +from core.util.opds_writer import OPDSMessage +from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: from api.util.flask import PalaceFlask diff --git a/core/classifier/__init__.py b/core/classifier/__init__.py index 549157b2fc..873a5eda7e 100644 --- a/core/classifier/__init__.py +++ b/core/classifier/__init__.py @@ -1092,7 +1092,7 @@ def __init__(self, work, test_session=None, debug=False): def add(self, classification): """Prepare a single Classification for consideration.""" try: - from ..model import DataSource, Subject + from core.model import DataSource, Subject except ValueError: from model import DataSource, Subject @@ -1546,7 +1546,7 @@ def weigh_genre(self, genre_data, weight): objects, not GenreData objects, when weighting genres. """ try: - from ..model import Genre + from core.model import Genre except ValueError: from model import Genre genre, ignore = Genre.lookup(self._db, genre_data.name) @@ -1616,18 +1616,25 @@ def consolidate_genre_weights(cls, weights, subgenre_swallows_parent_at=0.03): Classifier.classifiers[Classifier.AXIS_360_AUDIENCE] = AgeOrGradeClassifier # Finally, import classifiers described in submodules. -from .age import AgeClassifier, GradeLevelClassifier, InterestLevelClassifier -from .bic import BICClassifier -from .bisac import BISACClassifier -from .ddc import DeweyDecimalClassifier -from .gutenberg import GutenbergBookshelfClassifier -from .keyword import ( +from core.classifier.age import ( + AgeClassifier, + GradeLevelClassifier, + InterestLevelClassifier, +) +from core.classifier.bic import BICClassifier +from core.classifier.bisac import BISACClassifier +from core.classifier.ddc import DeweyDecimalClassifier +from core.classifier.gutenberg import GutenbergBookshelfClassifier +from core.classifier.keyword import ( Eg, FASTClassifier, KeywordBasedClassifier, LCSHClassifier, TAGClassifier, ) -from .lcc import LCCClassifier -from .overdrive import OverdriveClassifier -from .simplified import SimplifiedFictionClassifier, SimplifiedGenreClassifier +from core.classifier.lcc import LCCClassifier +from core.classifier.overdrive import OverdriveClassifier +from core.classifier.simplified import ( + SimplifiedFictionClassifier, + SimplifiedGenreClassifier, +) diff --git a/core/classifier/age.py b/core/classifier/age.py index 30ad21567d..9345b5e9c8 100644 --- a/core/classifier/age.py +++ b/core/classifier/age.py @@ -1,6 +1,6 @@ import re -from . import Classifier +from core.classifier import Classifier class GradeLevelClassifier(Classifier): diff --git a/core/classifier/bic.py b/core/classifier/bic.py index bd46317122..c61519dacd 100644 --- a/core/classifier/bic.py +++ b/core/classifier/bic.py @@ -1,4 +1,4 @@ -from . import * +from core.classifier import * class BICClassifier(Classifier): diff --git a/core/classifier/bisac.py b/core/classifier/bisac.py index 9391a66bb6..d161d76261 100644 --- a/core/classifier/bisac.py +++ b/core/classifier/bisac.py @@ -2,8 +2,8 @@ import os import re -from . import * -from .keyword import KeywordBasedClassifier +from core.classifier import * +from core.classifier.keyword import KeywordBasedClassifier class CustomMatchToken: diff --git a/core/classifier/ddc.py b/core/classifier/ddc.py index 934175ae5d..016c34267e 100644 --- a/core/classifier/ddc.py +++ b/core/classifier/ddc.py @@ -1,7 +1,7 @@ import json import os -from . import * +from core.classifier import * base_dir = os.path.split(__file__)[0] resource_dir = os.path.join(base_dir, "..", "resources") diff --git a/core/classifier/gutenberg.py b/core/classifier/gutenberg.py index 2cf9028627..f2615190aa 100644 --- a/core/classifier/gutenberg.py +++ b/core/classifier/gutenberg.py @@ -1,4 +1,4 @@ -from . import * +from core.classifier import * class GutenbergBookshelfClassifier(Classifier): diff --git a/core/classifier/keyword.py b/core/classifier/keyword.py index 55a76b8633..08639b5fe1 100644 --- a/core/classifier/keyword.py +++ b/core/classifier/keyword.py @@ -1,4 +1,4 @@ -from . import * +from core.classifier import * def match_kw(*l): diff --git a/core/classifier/lcc.py b/core/classifier/lcc.py index 9a69f262de..14655cfc03 100644 --- a/core/classifier/lcc.py +++ b/core/classifier/lcc.py @@ -1,4 +1,4 @@ -from . import * +from core.classifier import * class LCCClassifier(Classifier): diff --git a/core/classifier/overdrive.py b/core/classifier/overdrive.py index 5bd4ba9108..a99398b4d0 100644 --- a/core/classifier/overdrive.py +++ b/core/classifier/overdrive.py @@ -1,4 +1,4 @@ -from . import * +from core.classifier import * class OverdriveClassifier(Classifier): diff --git a/core/classifier/simplified.py b/core/classifier/simplified.py index 48996dda42..f2ef322cd9 100644 --- a/core/classifier/simplified.py +++ b/core/classifier/simplified.py @@ -1,6 +1,6 @@ from urllib.parse import unquote -from . import * +from core.classifier import * class SimplifiedGenreClassifier(Classifier): diff --git a/core/config.py b/core/config.py index e449da24f4..941efc9d5d 100644 --- a/core/config.py +++ b/core/config.py @@ -10,9 +10,8 @@ # It's convenient for other modules import IntegrationException # from this module, alongside CannotLoadConfiguration. from core.exceptions import IntegrationException - -from .util import LanguageCodes, ansible_boolean -from .util.datetime_helpers import to_utc, utc_now +from core.util import LanguageCodes, ansible_boolean +from core.util.datetime_helpers import to_utc, utc_now class CannotLoadConfiguration(IntegrationException): @@ -342,7 +341,7 @@ def site_configuration_last_update(cls, _db, known_value=None, timeout=0): # NOTE: Currently we never check the database (because timeout is # never set to None). This code will hopefully be removed soon. if _db and timeout is None: - from .model import ConfigurationSetting + from core.model import ConfigurationSetting timeout = ConfigurationSetting.sitewide( _db, cls.SITE_CONFIGURATION_TIMEOUT @@ -371,7 +370,7 @@ def site_configuration_last_update(cls, _db, known_value=None, timeout=0): # site_configuration_was_changed() (defined in model.py) was # called. if not known_value: - from .model import Timestamp + from core.model import Timestamp known_value = Timestamp.value( _db, cls.SITE_CONFIGURATION_CHANGED, service_type=None, collection=None diff --git a/core/coverage.py b/core/coverage.py index 0dd0d29adb..38df3a7f3b 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -6,10 +6,8 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.functions import func -from core.model.coverage import EquivalencyCoverageRecord - -from .metadata_layer import ReplacementPolicy, TimestampData -from .model import ( +from core.metadata_layer import ReplacementPolicy, TimestampData +from core.model import ( BaseCoverageRecord, Collection, CollectionMissing, @@ -24,8 +22,9 @@ WorkCoverageRecord, get_one, ) -from .util.datetime_helpers import utc_now -from .util.worker_pools import DatabaseJob +from core.model.coverage import EquivalencyCoverageRecord +from core.util.datetime_helpers import utc_now +from core.util.worker_pools import DatabaseJob class CoverageFailure: diff --git a/core/entrypoint.py b/core/entrypoint.py index 0ddd499ccf..93b4686cdf 100644 --- a/core/entrypoint.py +++ b/core/entrypoint.py @@ -115,7 +115,7 @@ def modify_database_query(cls, _db, qu): """Modify a query against Work+LicensePool+Edition to match only items with the right medium. """ - from .model import Edition + from core.model import Edition return qu.filter(Edition.medium == cls.INTERNAL_NAME) diff --git a/core/external_list.py b/core/external_list.py index 4889360dad..13497dc781 100644 --- a/core/external_list.py +++ b/core/external_list.py @@ -4,8 +4,8 @@ from sqlalchemy import or_ from sqlalchemy.orm.session import Session -from .metadata_layer import ReplacementPolicy -from .model import ( +from core.metadata_layer import ReplacementPolicy +from core.model import ( Classification, CustomListEntry, Edition, @@ -13,7 +13,7 @@ Subject, get_one_or_create, ) -from .util.datetime_helpers import utc_now +from core.util.datetime_helpers import utc_now class TitleFromExternalList: diff --git a/core/external_search.py b/core/external_search.py index 458c35700f..493bf3ecd4 100644 --- a/core/external_search.py +++ b/core/external_search.py @@ -29,22 +29,18 @@ from opensearchpy import OpenSearch from spellchecker import SpellChecker -from core.search.coverage_remover import RemovesSearchCoverage -from core.util import Values -from core.util.languages import LanguageNames - -from .classifier import ( +from core.classifier import ( AgeClassifier, Classifier, GradeLevelClassifier, KeywordBasedClassifier, ) -from .config import CannotLoadConfiguration -from .coverage import CoverageFailure, WorkPresentationProvider -from .facets import FacetConstants -from .lane import Pagination -from .metadata_layer import IdentifierData -from .model import ( +from core.config import CannotLoadConfiguration +from core.coverage import CoverageFailure, WorkPresentationProvider +from core.facets import FacetConstants +from core.lane import Pagination +from core.metadata_layer import IdentifierData +from core.model import ( Collection, ConfigurationSetting, Contributor, @@ -57,22 +53,25 @@ WorkCoverageRecord, numericrange_to_tuple, ) -from .problem_details import INVALID_INPUT -from .search.migrator import ( +from core.problem_details import INVALID_INPUT +from core.search.coverage_remover import RemovesSearchCoverage +from core.search.migrator import ( SearchDocumentReceiver, SearchDocumentReceiverType, SearchMigrationInProgress, SearchMigrator, ) -from .search.revision import SearchSchemaRevision -from .search.revision_directory import SearchRevisionDirectory -from .search.service import SearchService, SearchServiceOpensearch1 -from .selftest import HasSelfTests -from .util.cache import CachedData -from .util.datetime_helpers import from_timestamp -from .util.personal_names import display_name_to_sort_name -from .util.problem_detail import ProblemDetail -from .util.stopwords import ENGLISH_STOPWORDS +from core.search.revision import SearchSchemaRevision +from core.search.revision_directory import SearchRevisionDirectory +from core.search.service import SearchService, SearchServiceOpensearch1 +from core.selftest import HasSelfTests +from core.util import Values +from core.util.cache import CachedData +from core.util.datetime_helpers import from_timestamp +from core.util.languages import LanguageNames +from core.util.personal_names import display_name_to_sort_name +from core.util.problem_detail import ProblemDetail +from core.util.stopwords import ENGLISH_STOPWORDS @contextlib.contextmanager diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py index 8880326e2c..c395c59618 100644 --- a/core/feed/annotator/loan_and_hold.py +++ b/core/feed/annotator/loan_and_hold.py @@ -2,13 +2,12 @@ from datetime import datetime from typing import Any, Dict, List, Optional +from core.feed.annotator.circulation import LibraryAnnotator from core.feed.types import FeedData, Link, WorkEntry from core.model.configuration import ExternalIntegration from core.model.constants import EditionConstants, LinkRelations from core.model.patron import Hold, Patron -from .circulation import LibraryAnnotator - class LibraryLoanAndHoldAnnotator(LibraryAnnotator): @staticmethod diff --git a/core/lane.py b/core/lane.py index ff57db7bd5..740a5ef105 100644 --- a/core/lane.py +++ b/core/lane.py @@ -37,20 +37,11 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql import select -from core.model.before_flush_decorator import Listener -from core.model.configuration import ( - ConfigurationAttributeValue, - ConfigurationSetting, - ExternalIntegration, -) -from core.model.hybrid import hybrid_property -from core.model.listeners import site_configuration_has_changed - -from .classifier import Classifier -from .config import Configuration -from .entrypoint import EntryPoint, EverythingEntryPoint -from .facets import FacetConstants -from .model import ( +from core.classifier import Classifier +from core.config import Configuration +from core.entrypoint import EntryPoint, EverythingEntryPoint +from core.facets import FacetConstants +from core.model import ( Base, CachedFeed, Collection, @@ -67,13 +58,21 @@ get_one_or_create, tuple_to_numericrange, ) -from .model.constants import EditionConstants -from .problem_details import * -from .util import LanguageCodes -from .util.accept_language import parse_accept_language -from .util.datetime_helpers import utc_now -from .util.opds_writer import OPDSFeed -from .util.problem_detail import ProblemDetail +from core.model.before_flush_decorator import Listener +from core.model.configuration import ( + ConfigurationAttributeValue, + ConfigurationSetting, + ExternalIntegration, +) +from core.model.constants import EditionConstants +from core.model.hybrid import hybrid_property +from core.model.listeners import site_configuration_has_changed +from core.problem_details import * +from core.util import LanguageCodes +from core.util.accept_language import parse_accept_language +from core.util.datetime_helpers import utc_now +from core.util.opds_writer import OPDSFeed +from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: from core.model import CachedMARCFile # noqa: autoflake @@ -1929,7 +1928,7 @@ def works( that generates such a list when executed. """ - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex search_engine = search_engine or ExternalSearchIndex.load(_db) filter = self.filter(_db, facets) @@ -1944,7 +1943,7 @@ def filter(self, _db, facets): Using this ensures that modify_search_filter_hook() is always called. """ - from .external_search import Filter + from core.external_search import Filter filter = Filter.from_worklist(_db, self, facets) modified = self.modify_search_filter_hook(filter) @@ -1981,7 +1980,7 @@ def works_for_resultsets(self, _db, resultsets, facets=None): """Convert a list of lists of Hit objects into a list of lists of Work objects. """ - from .external_search import Filter, WorkSearchResult + from core.external_search import Filter, WorkSearchResult has_script_fields = None work_ids = set() @@ -2128,7 +2127,7 @@ def _groups_for_lanes( else: target_size = pagination.size - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex search_engine = search_engine or ExternalSearchIndex.load(_db) @@ -2250,7 +2249,7 @@ def _featured_works_with_lanes( queries = [] for lane in lanes: overview_facets = lane.overview_facets(_db, facets) - from .external_search import Filter + from core.external_search import Filter filter = Filter.from_worklist(_db, lane, overview_facets) queries.append((None, filter, pagination)) @@ -3052,7 +3051,7 @@ def max_cache_age(self, type): def update_size(self, _db, search_engine=None): """Update the stored estimate of the number of Works in this Lane.""" library = self.get_library(_db) - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex search_engine = search_engine or ExternalSearchIndex.load(_db) diff --git a/core/lcp/credential.py b/core/lcp/credential.py index 68c2732d97..f88224521c 100644 --- a/core/lcp/credential.py +++ b/core/lcp/credential.py @@ -5,9 +5,8 @@ from sqlalchemy.orm import Session from api.lcp.hash import Hasher - -from ..model import Credential, DataSource, Patron -from .exceptions import LCPError +from core.lcp.exceptions import LCPError +from core.model import Credential, DataSource, Patron class LCPCredentialType(Enum): diff --git a/core/lcp/exceptions.py b/core/lcp/exceptions.py index aa287a5979..dce068cccb 100644 --- a/core/lcp/exceptions.py +++ b/core/lcp/exceptions.py @@ -1,4 +1,4 @@ -from ..exceptions import BaseError +from core.exceptions import BaseError class LCPError(BaseError): diff --git a/core/local_analytics_provider.py b/core/local_analytics_provider.py index 4b40c497bf..2ded27a636 100644 --- a/core/local_analytics_provider.py +++ b/core/local_analytics_provider.py @@ -1,8 +1,8 @@ from flask_babel import lazy_gettext as _ from sqlalchemy.orm.session import Session -from .model import CirculationEvent, ExternalIntegration, create, get_one -from .service.container import Services +from core.model import CirculationEvent, ExternalIntegration, create, get_one +from core.service.container import Services class LocalAnalyticsProvider: diff --git a/core/marc.py b/core/marc.py index 1157e5c86c..b58e8a5689 100644 --- a/core/marc.py +++ b/core/marc.py @@ -6,11 +6,11 @@ from pymarc import Field, Record, Subfield from sqlalchemy.orm.session import Session -from .classifier import Classifier -from .config import CannotLoadConfiguration -from .external_search import ExternalSearchIndex, SortKeyPagination -from .lane import BaseFacets, Lane -from .model import ( +from core.classifier import Classifier +from core.config import CannotLoadConfiguration +from core.external_search import ExternalSearchIndex, SortKeyPagination +from core.lane import BaseFacets, Lane +from core.model import ( CachedMARCFile, DeliveryMechanism, Edition, @@ -20,9 +20,9 @@ Work, get_one_or_create, ) -from .service.storage.s3 import MultipartS3ContextManager, S3Service -from .util import LanguageCodes -from .util.datetime_helpers import utc_now +from core.service.storage.s3 import MultipartS3ContextManager, S3Service +from core.util import LanguageCodes +from core.util.datetime_helpers import utc_now class Annotator: diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 52b1892d88..44a185f06b 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -16,9 +16,9 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ -from .analytics import Analytics -from .classifier import NO_NUMBER, NO_VALUE -from .model import ( +from core.analytics import Analytics +from core.classifier import NO_NUMBER, NO_VALUE +from core.model import ( Classification, Collection, Contributor, @@ -41,11 +41,11 @@ get_one, get_one_or_create, ) -from .model.licensing import LicenseFunctions, LicenseStatus -from .util import LanguageCodes -from .util.datetime_helpers import to_utc, utc_now -from .util.median import median -from .util.personal_names import display_name_to_sort_name +from core.model.licensing import LicenseFunctions, LicenseStatus +from core.util import LanguageCodes +from core.util.datetime_helpers import to_utc, utc_now +from core.util.median import median +from core.util.personal_names import display_name_to_sort_name class ReplacementPolicy: diff --git a/core/model/__init__.py b/core/model/__init__.py index 99c1edf25f..df0021276c 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -20,8 +20,8 @@ Base = declarative_base() -from .. import classifier -from .constants import ( +from core import classifier +from core.model.constants import ( DataSourceConstants, EditionConstants, IdentifierConstants, @@ -425,9 +425,9 @@ def initialize_data(cls, session: Session): session.execute(text(sql)) # Create initial content. - from .classification import Genre - from .datasource import DataSource - from .licensing import DeliveryMechanism + from core.model.classification import Genre + from core.model.datasource import DataSource + from core.model.licensing import DeliveryMechanism list(DataSource.well_known_sources(session)) @@ -523,35 +523,46 @@ def _bulk_operation(self): SAMLFederatedIdentityProvider, SAMLFederation, ) - -from .admin import Admin, AdminRole -from .cachedfeed import CachedFeed, CachedMARCFile, WillNotGenerateExpensiveFeed -from .circulationevent import CirculationEvent -from .classification import Classification, Genre, Subject -from .collection import ( +from core.model.admin import Admin, AdminRole +from core.model.cachedfeed import ( + CachedFeed, + CachedMARCFile, + WillNotGenerateExpensiveFeed, +) +from core.model.circulationevent import CirculationEvent +from core.model.classification import Classification, Genre, Subject +from core.model.collection import ( Collection, CollectionIdentifier, CollectionMissing, collections_identifiers, ) -from .configuration import ( +from core.model.configuration import ( ConfigurationSetting, ExternalIntegration, ExternalIntegrationLink, ) -from .contributor import Contribution, Contributor -from .coverage import BaseCoverageRecord, CoverageRecord, Timestamp, WorkCoverageRecord -from .credential import Credential -from .customlist import CustomList, CustomListEntry -from .datasource import DataSource -from .devicetokens import DeviceToken -from .discovery_service_registration import DiscoveryServiceRegistration -from .edition import Edition -from .hassessioncache import HasSessionCache -from .identifier import Equivalency, Identifier -from .integration import IntegrationConfiguration, IntegrationLibraryConfiguration -from .library import Library -from .licensing import ( +from core.model.contributor import Contribution, Contributor +from core.model.coverage import ( + BaseCoverageRecord, + CoverageRecord, + Timestamp, + WorkCoverageRecord, +) +from core.model.credential import Credential +from core.model.customlist import CustomList, CustomListEntry +from core.model.datasource import DataSource +from core.model.devicetokens import DeviceToken +from core.model.discovery_service_registration import DiscoveryServiceRegistration +from core.model.edition import Edition +from core.model.hassessioncache import HasSessionCache +from core.model.identifier import Equivalency, Identifier +from core.model.integration import ( + IntegrationConfiguration, + IntegrationLibraryConfiguration, +) +from core.model.library import Library +from core.model.licensing import ( DeliveryMechanism, License, LicensePool, @@ -559,9 +570,9 @@ def _bulk_operation(self): PolicyException, RightsStatus, ) -from .listeners import * -from .measurement import Measurement -from .patron import ( +from core.model.listeners import * +from core.model.measurement import Measurement +from core.model.patron import ( Annotation, Hold, Loan, @@ -569,9 +580,14 @@ def _bulk_operation(self): Patron, PatronProfileStorage, ) -from .resource import Hyperlink, Representation, Resource, ResourceTransformation -from .time_tracking import PlaytimeEntry, PlaytimeSummary -from .work import Work, WorkGenre +from core.model.resource import ( + Hyperlink, + Representation, + Resource, + ResourceTransformation, +) +from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary +from core.model.work import Work, WorkGenre # Import order important here to avoid an import cycle. from core.lane import Lane, LaneGenre # isort:skip diff --git a/core/model/admin.py b/core/model/admin.py index 76f3168c34..c9db3cce5d 100644 --- a/core/model/admin.py +++ b/core/model/admin.py @@ -20,13 +20,12 @@ from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import Session +from core.model import Base, get_one, get_one_or_create +from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property from core.problem_details import INVALID_RESET_PASSWORD_TOKEN from core.util.problem_detail import ProblemDetail -from . import Base, get_one, get_one_or_create -from .hassessioncache import HasSessionCache - if TYPE_CHECKING: from core.model.library import Library # noqa: autoflake diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index c8c6d66458..cc0ac2093f 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -10,13 +10,13 @@ from sqlalchemy.orm import Mapped, relationship from sqlalchemy.sql.expression import and_ -from ..util.datetime_helpers import utc_now -from ..util.flask_util import OPDSFeedResponse -from . import Base, flush, get_one, get_one_or_create -from .work import Work +from core.model import Base, flush, get_one, get_one_or_create +from core.model.work import Work +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSFeedResponse if TYPE_CHECKING: - from . import Representation + from core.model import Representation # This named tuple makes it easy to manage the return value of # CachedFeed._prepare_keys. @@ -332,7 +332,7 @@ def _prepare_keys(cls, _db, worklist, facets, pagination): work = getattr(worklist, "work", None) # Either lane_id or unique_key must be set, but not both. - from ..lane import Lane + from core.lane import Lane if isinstance(worklist, Lane): lane_id = worklist.id diff --git a/core/model/circulationevent.py b/core/model/circulationevent.py index 92ae0e133f..9070e5b4f3 100644 --- a/core/model/circulationevent.py +++ b/core/model/circulationevent.py @@ -5,8 +5,8 @@ from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, String, Unicode -from ..util.datetime_helpers import utc_now -from . import Base, get_one_or_create +from core.model import Base, get_one_or_create +from core.util.datetime_helpers import utc_now class CirculationEvent(Base): diff --git a/core/model/classification.py b/core/model/classification.py index 4dc152cc91..f225b1484d 100644 --- a/core/model/classification.py +++ b/core/model/classification.py @@ -19,14 +19,14 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.functions import func -from .. import classifier -from ..classifier import ( # type: ignore[attr-defined] +from core import classifier +from core.classifier import ( # type: ignore[attr-defined] COMICS_AND_GRAPHIC_NOVELS, Classifier, Erotica, GenreData, ) -from . import ( +from core.model import ( Base, get_one, get_one_or_create, @@ -34,8 +34,8 @@ numericrange_to_tuple, tuple_to_numericrange, ) -from .constants import DataSourceConstants -from .hassessioncache import HasSessionCache +from core.model.constants import DataSourceConstants +from core.model.hassessioncache import HasSessionCache if TYPE_CHECKING: # This is needed during type checking so we have the diff --git a/core/model/collection.py b/core/model/collection.py index a3a83329fd..1237ddfc8a 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -29,24 +29,23 @@ from sqlalchemy.sql.expression import and_, or_ from core.integration.goals import Goals +from core.model import Base, create, get_one, get_one_or_create +from core.model.configuration import ConfigurationSetting, ExternalIntegration +from core.model.constants import EditionConstants +from core.model.coverage import CoverageRecord, WorkCoverageRecord +from core.model.datasource import DataSource +from core.model.edition import Edition +from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property +from core.model.identifier import Identifier from core.model.integration import ( IntegrationConfiguration, IntegrationLibraryConfiguration, ) - -from ..util.string_helpers import base64 -from . import Base, create, get_one, get_one_or_create -from .configuration import ConfigurationSetting, ExternalIntegration -from .constants import EditionConstants -from .coverage import CoverageRecord, WorkCoverageRecord -from .datasource import DataSource -from .edition import Edition -from .hassessioncache import HasSessionCache -from .identifier import Identifier -from .library import Library -from .licensing import LicensePool, LicensePoolDeliveryMechanism -from .work import Work +from core.model.library import Library +from core.model.licensing import LicensePool, LicensePoolDeliveryMechanism +from core.model.work import Work +from core.util.string_helpers import base64 if TYPE_CHECKING: # This is needed during type checking so we have the diff --git a/core/model/configuration.py b/core/model/configuration.py index 4c67ff82a9..1056629d8f 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -12,14 +12,13 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_ +from core.config import CannotLoadConfiguration, Configuration +from core.model import Base, get_one, get_one_or_create +from core.model.constants import DataSourceConstants +from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property - -from ..config import CannotLoadConfiguration, Configuration -from ..util.string_helpers import random_string -from . import Base, get_one, get_one_or_create -from .constants import DataSourceConstants -from .hassessioncache import HasSessionCache -from .library import Library, externalintegrations_libraries +from core.model.library import Library, externalintegrations_libraries +from core.util.string_helpers import random_string if TYPE_CHECKING: # This is needed during type checking so we have the diff --git a/core/model/contributor.py b/core/model/contributor.py index 11577252e7..284d8e55b1 100644 --- a/core/model/contributor.py +++ b/core/model/contributor.py @@ -12,9 +12,9 @@ from sqlalchemy.orm import Mapped, relationship from sqlalchemy.orm.session import Session -from ..util.personal_names import display_name_to_sort_name -from . import Base, flush, get_one, get_one_or_create -from .hybrid import hybrid_property +from core.model import Base, flush, get_one, get_one_or_create +from core.model.hybrid import hybrid_property +from core.util.personal_names import display_name_to_sort_name if TYPE_CHECKING: from core.model import Edition # noqa: autoflake diff --git a/core/model/coverage.py b/core/model/coverage.py index c6d3426519..ef36fe784e 100644 --- a/core/model/coverage.py +++ b/core/model/coverage.py @@ -18,13 +18,11 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, literal, literal_column, or_ -from ..util.datetime_helpers import utc_now -from . import Base, SessionBulkOperation, get_one, get_one_or_create +from core.model import Base, SessionBulkOperation, get_one, get_one_or_create +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: - from core.model import Collection, DataSource, Identifier, Work - - from . import Equivalency + from core.model import Collection, DataSource, Equivalency, Identifier, Work class BaseCoverageRecord: @@ -296,7 +294,7 @@ def update( def to_data(self): """Convert this Timestamp to an unfinalized TimestampData.""" - from ..metadata_layer import TimestampData + from core.metadata_layer import TimestampData return TimestampData( start=self.start, @@ -399,9 +397,9 @@ def assert_coverage_operation(cls, operation, collection): def lookup( cls, edition_or_identifier, data_source, operation=None, collection=None ): - from .datasource import DataSource - from .edition import Edition - from .identifier import Identifier + from core.model.datasource import DataSource + from core.model.edition import Edition + from core.model.identifier import Identifier cls.assert_coverage_operation(operation, collection) @@ -438,8 +436,8 @@ def add_for( status=BaseCoverageRecord.SUCCESS, collection=None, ): - from .edition import Edition - from .identifier import Identifier + from core.model.edition import Edition + from core.model.identifier import Identifier cls.assert_coverage_operation(operation, collection) @@ -479,7 +477,7 @@ def bulk_add( """Create and update CoverageRecords so that every Identifier in `identifiers` has an identical record. """ - from .identifier import Identifier + from core.model.identifier import Identifier if not identifiers: # Nothing to do. @@ -683,7 +681,7 @@ def bulk_add( """Create and update WorkCoverageRecords so that every Work in `works` has an identical record. """ - from .work import Work + from core.model.work import Work if not works: # Nothing to do. diff --git a/core/model/credential.py b/core/model/credential.py index ce1de6a47f..89d530232a 100644 --- a/core/model/credential.py +++ b/core/model/credential.py @@ -9,9 +9,9 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_ -from ..util import is_session -from ..util.datetime_helpers import utc_now -from . import Base, get_one, get_one_or_create +from core.model import Base, get_one, get_one_or_create +from core.util import is_session +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: from core.model import Collection, DataSource, Patron @@ -127,7 +127,7 @@ def lookup( collection=None, force_refresh=False, ) -> Credential: - from .datasource import DataSource + from core.model.datasource import DataSource if isinstance(data_source, str): data_source = DataSource.lookup(_db, data_source) @@ -187,7 +187,7 @@ def lookup_by_patron( :param auto_create_datasource: Boolean value indicating whether a data source should be created in the case it doesn't """ - from .patron import Patron + from core.model.patron import Patron if not is_session(_db): raise ValueError('"_db" argument must be a valid SQLAlchemy session') @@ -202,7 +202,7 @@ def lookup_by_patron( if not isinstance(auto_create_datasource, bool): raise ValueError('"auto_create_datasource" argument must be boolean') - from .datasource import DataSource + from core.model.datasource import DataSource data_source = DataSource.lookup( _db, data_source_name, autocreate=auto_create_datasource diff --git a/core/model/customlist.py b/core/model/customlist.py index 6a74d5d48c..bf44e00d5f 100644 --- a/core/model/customlist.py +++ b/core/model/customlist.py @@ -21,15 +21,15 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import or_ -from ..util.datetime_helpers import utc_now -from . import Base, get_one_or_create -from .datasource import DataSource -from .identifier import Identifier -from .licensing import LicensePool -from .work import Work +from core.model import Base, get_one_or_create +from core.model.datasource import DataSource +from core.model.identifier import Identifier +from core.model.licensing import LicensePool +from core.model.work import Work +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: - from . import Collection, Library + from core.model import Collection, Library @total_ordering @@ -390,7 +390,7 @@ def set_work(self, metadata=None, policy=None): new_work = None if not metadata: - from ..metadata_layer import Metadata + from core.metadata_layer import Metadata metadata = Metadata.from_edition(edition) diff --git a/core/model/datasource.py b/core/model/datasource.py index 4baf9b2825..37106ab127 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -10,10 +10,10 @@ from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import Mapped, relationship -from . import Base, get_one, get_one_or_create -from .constants import DataSourceConstants, IdentifierConstants -from .hassessioncache import HasSessionCache -from .licensing import LicensePoolDeliveryMechanism +from core.model import Base, get_one, get_one_or_create +from core.model.constants import DataSourceConstants, IdentifierConstants +from core.model.hassessioncache import HasSessionCache +from core.model.licensing import LicensePoolDeliveryMechanism if TYPE_CHECKING: # This is needed during type checking so we have the diff --git a/core/model/devicetokens.py b/core/model/devicetokens.py index 2f8a2559fa..b324b65619 100644 --- a/core/model/devicetokens.py +++ b/core/model/devicetokens.py @@ -5,10 +5,9 @@ from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Mapped, backref, relationship +from core.model import Base from core.model.patron import Patron -from . import Base - if sys.version_info >= (3, 11): from typing import Self else: diff --git a/core/model/edition.py b/core/model/edition.py index 214e87a5f8..6d6307cc5c 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -11,15 +11,20 @@ from sqlalchemy.orm import Mapped, relationship from sqlalchemy.orm.session import Session -from ..util import LanguageCodes, TitleProcessor -from ..util.permanent_work_id import WorkIDCalculator -from . import Base, PresentationCalculationPolicy, get_one, get_one_or_create -from .constants import DataSourceConstants, EditionConstants, LinkRelations, MediaTypes -from .contributor import Contribution, Contributor -from .coverage import CoverageRecord -from .datasource import DataSource -from .identifier import Identifier -from .licensing import DeliveryMechanism, LicensePool +from core.model import Base, PresentationCalculationPolicy, get_one, get_one_or_create +from core.model.constants import ( + DataSourceConstants, + EditionConstants, + LinkRelations, + MediaTypes, +) +from core.model.contributor import Contribution, Contributor +from core.model.coverage import CoverageRecord +from core.model.datasource import DataSource +from core.model.identifier import Identifier +from core.model.licensing import DeliveryMechanism, LicensePool +from core.util import LanguageCodes, TitleProcessor +from core.util.permanent_work_id import WorkIDCalculator if TYPE_CHECKING: # This is needed during type checking so we have the diff --git a/core/model/hassessioncache.py b/core/model/hassessioncache.py index c18462992b..1f17ef9bd5 100644 --- a/core/model/hassessioncache.py +++ b/core/model/hassessioncache.py @@ -11,7 +11,7 @@ from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Mapped, Session -from . import get_one +from core.model import get_one if sys.version_info >= (3, 11): from typing import Self diff --git a/core/model/identifier.py b/core/model/identifier.py index 0aac66778c..d922d381a4 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -28,15 +28,21 @@ from sqlalchemy.sql import select from sqlalchemy.sql.expression import and_, or_ -from ..util.datetime_helpers import utc_now -from ..util.summary import SummaryEvaluator -from . import Base, PresentationCalculationPolicy, create, get_one, get_one_or_create -from .classification import Classification, Subject -from .constants import IdentifierConstants, LinkRelations -from .coverage import CoverageRecord -from .datasource import DataSource -from .licensing import LicensePoolDeliveryMechanism, RightsStatus -from .measurement import Measurement +from core.model import ( + Base, + PresentationCalculationPolicy, + create, + get_one, + get_one_or_create, +) +from core.model.classification import Classification, Subject +from core.model.constants import IdentifierConstants, LinkRelations +from core.model.coverage import CoverageRecord +from core.model.datasource import DataSource +from core.model.licensing import LicensePoolDeliveryMechanism, RightsStatus +from core.model.measurement import Measurement +from core.util.datetime_helpers import utc_now +from core.util.summary import SummaryEvaluator if TYPE_CHECKING: from core.model import ( # noqa: autoflake @@ -747,7 +753,7 @@ def add_link( fetching, mirroring and scaling Representations as links are created. It might be good to move that code into here. """ - from .resource import Hyperlink, Representation, Resource + from core.model.resource import Hyperlink, Representation, Resource _db = Session.object_session(self) # Find or create the Resource. @@ -924,7 +930,7 @@ def classify( def resources_for_identifier_ids( self, _db, identifier_ids, rel=None, data_source=None ): - from .resource import Hyperlink, Resource + from core.model.resource import Hyperlink, Resource resources = ( _db.query(Resource) @@ -956,7 +962,7 @@ def classifications_for_identifier_ids(self, _db, identifier_ids): def best_cover_for(cls, _db, identifier_ids, rel=None): # Find all image resources associated with any of # these identifiers. - from .resource import Hyperlink, Resource + from core.model.resource import Hyperlink, Resource rel = rel or Hyperlink.IMAGE images = cls.resources_for_identifier_ids(_db, identifier_ids, rel) @@ -1116,7 +1122,7 @@ def opds_entry(self): most_recent_update = max(timestamps) quality = Measurement.overall_quality(self.measurements) - from ..opds import AcquisitionFeed + from core.opds import AcquisitionFeed return AcquisitionFeed.minimal_opds_entry( identifier=self, diff --git a/core/model/library.py b/core/model/library.py index 3b8ad88f91..b29b66d428 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -33,18 +33,17 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.functions import func +from core.configuration.library import LibrarySettings +from core.entrypoint import EntryPoint +from core.facets import FacetConstants +from core.model import Base, get_one +from core.model.announcements import Announcement +from core.model.customlist import customlist_sharedlibrary +from core.model.edition import Edition +from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property - -from ..configuration.library import LibrarySettings -from ..entrypoint import EntryPoint -from ..facets import FacetConstants -from . import Base, get_one -from .announcements import Announcement -from .customlist import customlist_sharedlibrary -from .edition import Edition -from .hassessioncache import HasSessionCache -from .licensing import LicensePool -from .work import Work +from core.model.licensing import LicensePool +from core.model.work import Work if TYPE_CHECKING: from core.lane import Lane @@ -60,8 +59,6 @@ Patron, ) - from ..lane import Lane - class Library(Base, HasSessionCache): """A library that uses this circulation manager to authenticate @@ -364,7 +361,7 @@ def has_root_lanes(self) -> bool: # a server restart. value = Library._has_root_lane_cache.get(self.id, None) if value is None: - from ..lane import Lane + from core.lane import Lane _db = Session.object_session(self) root_lanes = ( @@ -393,7 +390,7 @@ def restrict_to_ready_deliverable_works( :param show_suppressed: Include titles that have nothing but suppressed LicensePools. """ - from .collection import Collection + from core.model.collection import Collection collection_ids = collection_ids or [ x.id for x in self.all_collections if x.id is not None diff --git a/core/model/licensing.py b/core/model/licensing.py index 6fc0b716eb..29778be6ad 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -14,17 +14,22 @@ from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import or_ +from core.model import Base, flush, get_one, get_one_or_create +from core.model.circulationevent import CirculationEvent +from core.model.constants import ( + DataSourceConstants, + EditionConstants, + LinkRelations, + MediaTypes, +) +from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property - -from ..util.datetime_helpers import utc_now -from . import Base, flush, get_one, get_one_or_create -from .circulationevent import CirculationEvent -from .constants import DataSourceConstants, EditionConstants, LinkRelations, MediaTypes -from .hassessioncache import HasSessionCache -from .patron import Hold, Loan, Patron +from core.model.patron import Hold, Loan, Patron +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: # Only import for type checking, since it creates an import cycle + from core.analytics import Analytics from core.model import ( # noqa: autoflake Collection, DataSource, @@ -32,8 +37,6 @@ Resource, ) - from ..analytics import Analytics - class PolicyException(Exception): pass @@ -359,9 +362,9 @@ def for_foreign_id( autocreate=True, ) -> Tuple[LicensePool | None, bool]: """Find or create a LicensePool for the given foreign ID.""" - from .collection import CollectionMissing - from .datasource import DataSource - from .identifier import Identifier + from core.model.collection import CollectionMissing + from core.model.datasource import DataSource + from core.model.identifier import Identifier if not collection: raise CollectionMissing() @@ -419,7 +422,7 @@ def for_foreign_id( @classmethod def with_no_work(cls, _db): """Find LicensePools that have no corresponding Work.""" - from .work import Work + from core.model.work import Work return _db.query(LicensePool).outerjoin(Work).filter(Work.id == None).all() @@ -547,7 +550,7 @@ def set_presentation_edition(self, equivalent_editions=None): :return: A boolean explaining whether any of the presentation information associated with this LicensePool actually changed. """ - from .edition import Edition + from core.model.edition import Edition _db = Session.object_session(self) old_presentation_edition = self.presentation_edition @@ -560,7 +563,7 @@ def set_presentation_edition(self, equivalent_editions=None): # Note: We can do a cleaner solution, if we refactor to not use metadata's # methods to update editions. For now, we're choosing to go with the below approach. - from ..metadata_layer import IdentifierData, Metadata, ReplacementPolicy + from core.metadata_layer import IdentifierData, Metadata, ReplacementPolicy if len(all_editions) == 1: # There's only one edition associated with this @@ -1157,7 +1160,7 @@ def calculate_work( from calling set_presentation_edition() and assumes we've already done that work. """ - from .work import Work + from core.model.work import Work if not self.identifier: # A LicensePool with no Identifier should never have a Work. @@ -1328,7 +1331,7 @@ def calculate_work( @property def open_access_links(self): """Yield all open-access Resources for this LicensePool.""" - from .identifier import Identifier + from core.model.identifier import Identifier open_access = LinkRelations.OPEN_ACCESS_DOWNLOAD _db = Session.object_session(self) diff --git a/core/model/listeners.py b/core/model/listeners.py index 2b46cfca7a..6148dc9999 100644 --- a/core/model/listeners.py +++ b/core/model/listeners.py @@ -7,18 +7,17 @@ from sqlalchemy import event, text from sqlalchemy.orm import Session +from core.config import Configuration +from core.model import Base from core.model.before_flush_decorator import Listener, ListenerState +from core.model.collection import Collection +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.identifier import Equivalency, Identifier, RecursiveEquivalencyCache +from core.model.library import Library +from core.model.licensing import LicensePool +from core.model.work import Work, add_work_to_customlists_for_collection from core.query.coverage import EquivalencyCoverageQueries - -from ..config import Configuration -from ..util.datetime_helpers import utc_now -from . import Base -from .collection import Collection -from .configuration import ConfigurationSetting, ExternalIntegration -from .library import Library -from .licensing import LicensePool -from .work import Work, add_work_to_customlists_for_collection +from core.util.datetime_helpers import utc_now site_configuration_has_changed_lock = RLock() diff --git a/core/model/measurement.py b/core/model/measurement.py index 751fe1a52b..59b1a06ac6 100644 --- a/core/model/measurement.py +++ b/core/model/measurement.py @@ -8,11 +8,11 @@ from sqlalchemy import Boolean, Column, DateTime, Float, ForeignKey, Integer, Unicode from sqlalchemy.orm import Mapped, relationship -from . import Base -from .constants import DataSourceConstants +from core.model import Base +from core.model.constants import DataSourceConstants if TYPE_CHECKING: - from .datasource import DataSource + from core.model.datasource import DataSource class Measurement(Base): diff --git a/core/model/patron.py b/core/model/patron.py index 09cdf9e207..3e5a7ee8a5 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -22,20 +22,18 @@ from sqlalchemy.orm import Mapped, relationship from sqlalchemy.orm.session import Session +from core.classifier import Classifier +from core.model import Base, get_one_or_create, numericrange_to_tuple +from core.model.credential import Credential from core.model.hybrid import hybrid_property - -from ..classifier import Classifier -from ..user_profile import ProfileStorage -from ..util.datetime_helpers import utc_now -from . import Base, get_one_or_create, numericrange_to_tuple -from .credential import Credential +from core.user_profile import ProfileStorage +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: + from core.model.devicetokens import DeviceToken from core.model.library import Library from core.model.licensing import License, LicensePool, LicensePoolDeliveryMechanism - from .devicetokens import DeviceToken - class LoanAndHoldMixin: @property @@ -327,7 +325,7 @@ def root_lane(self): return None _db = Session.object_session(self) - from ..lane import Lane + from core.lane import Lane qu = ( _db.query(Lane) diff --git a/core/model/resource.py b/core/model/resource.py index 27be842477..4aef497ddf 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -30,17 +30,17 @@ from sqlalchemy.orm import Mapped, backref, relationship from sqlalchemy.orm.session import Session -from ..util.datetime_helpers import utc_now -from ..util.http import HTTP -from . import Base, get_one, get_one_or_create -from .constants import ( +from core.model import Base, get_one, get_one_or_create +from core.model.constants import ( DataSourceConstants, IdentifierConstants, LinkRelations, MediaTypes, ) -from .edition import Edition -from .licensing import LicensePoolDeliveryMechanism +from core.model.edition import Edition +from core.model.licensing import LicensePoolDeliveryMechanism +from core.util.datetime_helpers import utc_now +from core.util.http import HTTP if TYPE_CHECKING: from core.model import CachedMARCFile @@ -75,7 +75,7 @@ class Resource(Base): # Many Works may use this resource (as opposed to other resources # linked to them with rel="description") as their summary. - from .work import Work + from core.model.work import Work summary_works: Mapped[List[Work]] = relationship( "Work", backref="summary", foreign_keys=[Work.summary_id] diff --git a/core/model/work.py b/core/model/work.py index ad55cc256f..f9db4ccd27 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -30,13 +30,9 @@ from sqlalchemy.sql.expression import and_, case, join, literal_column, or_, select from sqlalchemy.sql.functions import func -from core.model.classification import Classification, Subject - -from ..classifier import Classifier, WorkClassifier -from ..config import CannotLoadConfiguration -from ..util import LanguageCodes -from ..util.datetime_helpers import utc_now -from . import ( +from core.classifier import Classifier, WorkClassifier +from core.config import CannotLoadConfiguration +from core.model import ( Base, PresentationCalculationPolicy, flush, @@ -45,13 +41,16 @@ numericrange_to_tuple, tuple_to_numericrange, ) -from .constants import DataSourceConstants -from .contributor import Contribution, Contributor -from .coverage import CoverageRecord, WorkCoverageRecord -from .datasource import DataSource -from .edition import Edition -from .identifier import Identifier, RecursiveEquivalencyCache -from .measurement import Measurement +from core.model.classification import Classification, Subject +from core.model.constants import DataSourceConstants +from core.model.contributor import Contribution, Contributor +from core.model.coverage import CoverageRecord, WorkCoverageRecord +from core.model.datasource import DataSource +from core.model.edition import Edition +from core.model.identifier import Identifier, RecursiveEquivalencyCache +from core.model.measurement import Measurement +from core.util import LanguageCodes +from core.util.datetime_helpers import utc_now if sys.version_info >= (3, 11): from typing import Self @@ -360,8 +359,8 @@ def missing_coverage_from( @classmethod def for_unchecked_subjects(cls, _db): - from .classification import Classification, Subject - from .licensing import LicensePool + from core.model.classification import Classification, Subject + from core.model.licensing import LicensePool """Find all Works whose LicensePools have an Identifier that is classified under an unchecked Subject. @@ -389,7 +388,7 @@ def _potential_open_access_works_for_permanent_work_id( Counter tallying the number of affected LicensePools associated with a given work. """ - from .licensing import LicensePool + from core.model.licensing import LicensePool qu = ( _db.query(LicensePool) @@ -616,7 +615,7 @@ def set_summary(self, resource): @classmethod def with_genre(cls, _db, genre): """Find all Works classified under the given genre.""" - from .classification import Genre + from core.model.classification import Genre if isinstance(genre, (bytes, str)): genre, ignore = Genre.lookup(_db, genre) @@ -642,7 +641,7 @@ def from_identifiers(cls, _db, identifiers, base_query=None, policy=None): Identifiers. By default, this method will be very strict about equivalencies. """ - from .licensing import LicensePool + from core.model.licensing import LicensePool identifier_ids = [identifier.id for identifier in identifiers] if not identifier_ids: @@ -675,8 +674,8 @@ def from_identifiers(cls, _db, identifiers, base_query=None, policy=None): @classmethod def reject_covers(cls, _db, works_or_identifiers, search_index_client=None): """Suppresses the currently visible covers of a number of Works""" - from .licensing import LicensePool - from .resource import Hyperlink, Resource + from core.model.licensing import LicensePool + from core.model.resource import Hyperlink, Resource works = list(set(works_or_identifiers)) if not isinstance(works[0], cls): @@ -751,7 +750,7 @@ def all_editions(self, policy=None): determine how far to go when looking for equivalent Identifiers. """ - from .licensing import LicensePool + from core.model.licensing import LicensePool _db = Session.object_session(self) identifier_ids_subquery = ( @@ -1174,7 +1173,7 @@ def _ensure(s): return "\n".join(l) def calculate_opds_entries(self, verbose=True): - from ..opds import AcquisitionFeed, Annotator, VerboseAnnotator + from core.opds import AcquisitionFeed, Annotator, VerboseAnnotator _db = Session.object_session(self) simple = AcquisitionFeed.single_entry(_db, self, Annotator, force_create=True) @@ -1187,7 +1186,7 @@ def calculate_opds_entries(self, verbose=True): ) def calculate_marc_record(self): - from ..marc import Annotator, MARCExporter + from core.marc import Annotator, MARCExporter _db = Session.object_session(self) record = MARCExporter.create_record( @@ -1386,7 +1385,7 @@ def assign_genres( def assign_genres_from_weights(self, genre_weights): # Assign WorkGenre objects to the remainder. - from .classification import Genre + from core.model.classification import Genre changed = False _db = Session.object_session(self) @@ -1844,9 +1843,9 @@ def query_to_json_array(query): # This subquery gets Collection IDs for collections # that own more than zero licenses for this book. - from .classification import Genre, Subject - from .customlist import CustomListEntry - from .licensing import LicensePool + from core.model.classification import Genre, Subject + from core.model.customlist import CustomListEntry + from core.model.licensing import LicensePool # We need information about LicensePools for a few reasons: # @@ -2016,7 +2015,7 @@ def explicit_bool(label, t): ) # Normalize by dividing each weight by the sum of the weights for that Identifier's Classifications. - from .classification import Classification + from core.model.classification import Classification weight_column = ( func.sum(Classification.weight) @@ -2204,7 +2203,7 @@ def _restrict_to_customlist_subquery_condition( return qu def classifications_with_genre(self): - from .classification import Classification, Subject + from core.model.classification import Classification, Subject _db = Session.object_session(self) identifier = self.presentation_edition.primary_identifier @@ -2217,7 +2216,7 @@ def classifications_with_genre(self): ) def top_genre(self): - from .classification import Genre + from core.model.classification import Genre _db = Session.object_session(self) genre = ( @@ -2234,7 +2233,7 @@ def delete(self, search_index=None): _db = Session.object_session(self) if search_index is None: try: - from ..external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex search_index = ExternalSearchIndex(_db) except CannotLoadConfiguration as e: diff --git a/core/monitor.py b/core/monitor.py index ef73459a4b..79e64c7c63 100644 --- a/core/monitor.py +++ b/core/monitor.py @@ -8,9 +8,9 @@ from sqlalchemy.orm import defer from sqlalchemy.sql.expression import and_, or_ -from .config import Configuration -from .metadata_layer import TimestampData -from .model import ( +from core.config import Configuration +from core.metadata_layer import TimestampData +from core.model import ( Base, CachedFeed, CirculationEvent, @@ -31,8 +31,8 @@ get_one, get_one_or_create, ) -from .model.configuration import ConfigurationSetting -from .util.datetime_helpers import utc_now +from core.model.configuration import ConfigurationSetting +from core.util.datetime_helpers import utc_now if TYPE_CHECKING: from sqlalchemy.orm import Query @@ -942,7 +942,7 @@ class WorkReaper(ReaperMonitor): MODEL_CLASS = Work def __init__(self, *args, **kwargs): - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex search_index_client = kwargs.pop("search_index_client", None) super().__init__(*args, **kwargs) diff --git a/core/opds.py b/core/opds.py index a429d37b6e..58d5543e5c 100644 --- a/core/opds.py +++ b/core/opds.py @@ -10,13 +10,11 @@ from sqlalchemy.orm import joinedload from sqlalchemy.orm.session import Session +from core.classifier import Classifier +from core.entrypoint import EntryPoint from core.external_search import ExternalSearchIndex, QueryParseException -from core.problem_details import INVALID_INPUT - -from .classifier import Classifier -from .entrypoint import EntryPoint -from .facets import FacetConstants -from .lane import ( +from core.facets import FacetConstants +from core.lane import ( Facets, FacetsWithEntryPoint, FeaturedFacets, @@ -24,7 +22,7 @@ Pagination, SearchFacets, ) -from .model import ( +from core.model import ( CachedFeed, Contributor, DataSource, @@ -36,9 +34,10 @@ Subject, Work, ) -from .util.datetime_helpers import utc_now -from .util.flask_util import OPDSEntryResponse, OPDSFeedResponse -from .util.opds_writer import AtomFeed, OPDSFeed, OPDSMessage +from core.problem_details import INVALID_INPUT +from core.util.datetime_helpers import utc_now +from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse +from core.util.opds_writer import AtomFeed, OPDSFeed, OPDSMessage # Import related models when doing type checking if TYPE_CHECKING: diff --git a/core/opds2_import.py b/core/opds2_import.py index f2128e9841..234834b0e4 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -21,16 +21,13 @@ from webpub_manifest_parser.utils import encode, first_or_default from core.configuration.ignored_identifier import IgnoredIdentifierImporterMixin +from core.coverage import CoverageFailure from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, FormField, ) -from core.model.configuration import ConfigurationSetting, HasExternalIntegration -from core.model.integration import IntegrationConfiguration - -from .coverage import CoverageFailure -from .metadata_layer import ( +from core.metadata_layer import ( CirculationData, ContributorData, FormatData, @@ -39,7 +36,7 @@ Metadata, SubjectData, ) -from .model import ( +from core.model import ( Collection, Contributor, DeliveryMechanism, @@ -55,9 +52,11 @@ Subject, get_one, ) -from .opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor -from .util.http import BadResponseException -from .util.opds_writer import OPDSFeed +from core.model.configuration import ConfigurationSetting, HasExternalIntegration +from core.model.integration import IntegrationConfiguration +from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor +from core.util.http import BadResponseException +from core.util.opds_writer import OPDSFeed if TYPE_CHECKING: from webpub_manifest_parser.core import ast as core_ast diff --git a/core/opds_import.py b/core/opds_import.py index 3eeebcfd4a..fb627098d8 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -33,18 +33,17 @@ from api.circulation import CirculationConfigurationMixin from api.selftest import HasCollectionSelfTests +from core.classifier import Classifier +from core.config import IntegrationException +from core.coverage import CoverageFailure +from core.importers import BaseImporterSettings from core.integration.settings import ( BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, ) - -from .classifier import Classifier -from .config import IntegrationException -from .coverage import CoverageFailure -from .importers import BaseImporterSettings -from .metadata_layer import ( +from core.metadata_layer import ( CirculationData, ContributorData, IdentifierData, @@ -55,7 +54,7 @@ SubjectData, TimestampData, ) -from .model import ( +from core.model import ( Collection, CoverageRecord, DataSource, @@ -71,17 +70,17 @@ Subject, get_one, ) -from .model.configuration import HasExternalIntegration -from .monitor import CollectionMonitor -from .selftest import SelfTestResult -from .util.datetime_helpers import datetime_utc, to_utc, utc_now -from .util.http import HTTP, BadResponseException -from .util.opds_writer import OPDSFeed, OPDSMessage -from .util.string_helpers import base64 -from .util.xmlparser import XMLParser +from core.model.configuration import HasExternalIntegration +from core.monitor import CollectionMonitor +from core.selftest import SelfTestResult +from core.util.datetime_helpers import datetime_utc, to_utc, utc_now +from core.util.http import HTTP, BadResponseException +from core.util.opds_writer import OPDSFeed, OPDSMessage +from core.util.string_helpers import base64 +from core.util.xmlparser import XMLParser if TYPE_CHECKING: - from .model import Work + from core.model import Work @overload diff --git a/core/overdrive.py b/core/overdrive.py index ec58f361b1..5513bcb11e 100644 --- a/core/overdrive.py +++ b/core/overdrive.py @@ -15,16 +15,15 @@ from api.circulation import CirculationConfigurationMixin from api.circulation_exceptions import CannotFulfill +from core.config import CannotLoadConfiguration, Configuration +from core.coverage import BibliographicCoverageProvider +from core.importers import BaseImporterSettings from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, FormField, ) - -from .config import CannotLoadConfiguration, Configuration -from .coverage import BibliographicCoverageProvider -from .importers import BaseImporterSettings -from .metadata_layer import ( +from core.metadata_layer import ( CirculationData, ContributorData, FormatData, @@ -34,7 +33,7 @@ Metadata, SubjectData, ) -from .model import ( +from core.model import ( Classification, Collection, Contributor, @@ -51,10 +50,10 @@ Subject, get_one_or_create, ) -from .model.configuration import HasExternalIntegration -from .util.datetime_helpers import strptime_utc, utc_now -from .util.http import HTTP, BadResponseException -from .util.string_helpers import base64 +from core.model.configuration import HasExternalIntegration +from core.util.datetime_helpers import strptime_utc, utc_now +from core.util.http import HTTP, BadResponseException +from core.util.string_helpers import base64 if TYPE_CHECKING: pass diff --git a/core/problem_details.py b/core/problem_details.py index b7cbf1c7c1..492a625d4f 100644 --- a/core/problem_details.py +++ b/core/problem_details.py @@ -1,6 +1,6 @@ from flask_babel import lazy_gettext as _ -from .util.problem_detail import ProblemDetail as pd +from core.util.problem_detail import ProblemDetail as pd # Generic problem detail documents that recapitulate HTTP errors. # call detailed() to add more specific information. diff --git a/core/scripts.py b/core/scripts.py index be1970570b..0c39c190c5 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -17,19 +17,16 @@ from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound -from core.model.classification import Classification -from core.model.devicetokens import DeviceToken, DeviceTokenTypes -from core.model.patron import Loan -from core.query.customlist import CustomListQueries -from core.search.coverage_remover import RemovesSearchCoverage -from core.util.notifications import PushNotifications - -from .config import CannotLoadConfiguration, Configuration, ConfigurationConstants -from .coverage import CollectionCoverageProviderJob, CoverageProviderProgress -from .external_search import ExternalSearchIndex, Filter, SearchIndexCoverageProvider -from .lane import Lane -from .metadata_layer import TimestampData -from .model import ( +from core.config import CannotLoadConfiguration, Configuration, ConfigurationConstants +from core.coverage import CollectionCoverageProviderJob, CoverageProviderProgress +from core.external_search import ( + ExternalSearchIndex, + Filter, + SearchIndexCoverageProvider, +) +from core.lane import Lane +from core.metadata_layer import TimestampData +from core.model import ( BaseCoverageRecord, CachedFeed, Collection, @@ -54,15 +51,24 @@ get_one_or_create, production_session, ) -from .model.listeners import site_configuration_has_changed -from .monitor import CollectionMonitor, ReaperMonitor -from .opds_import import OPDSImporter, OPDSImportMonitor -from .overdrive import OverdriveCoreAPI -from .service.container import Services, container_instance -from .util import fast_query_count -from .util.datetime_helpers import strptime_utc, utc_now -from .util.personal_names import contributor_name_match_ratio, display_name_to_sort_name -from .util.worker_pools import DatabasePool +from core.model.classification import Classification +from core.model.devicetokens import DeviceToken, DeviceTokenTypes +from core.model.listeners import site_configuration_has_changed +from core.model.patron import Loan +from core.monitor import CollectionMonitor, ReaperMonitor +from core.opds_import import OPDSImporter, OPDSImportMonitor +from core.overdrive import OverdriveCoreAPI +from core.query.customlist import CustomListQueries +from core.search.coverage_remover import RemovesSearchCoverage +from core.service.container import Services, container_instance +from core.util import fast_query_count +from core.util.datetime_helpers import strptime_utc, utc_now +from core.util.notifications import PushNotifications +from core.util.personal_names import ( + contributor_name_match_ratio, + display_name_to_sort_name, +) +from core.util.worker_pools import DatabasePool class Script: @@ -718,7 +724,7 @@ class LaneSweeperScript(LibraryInputScript): """Do something to each lane in a library.""" def process_library(self, library): - from .lane import WorkList + from core.lane import WorkList top_level = WorkList.top_level_for_library(self._db, library) queue = [top_level] diff --git a/core/selftest.py b/core/selftest.py index 0b870bbd43..c1d01f4443 100644 --- a/core/selftest.py +++ b/core/selftest.py @@ -23,11 +23,11 @@ from sqlalchemy.orm import Session -from .model import Collection, ExternalIntegration -from .model.integration import IntegrationConfiguration -from .util.datetime_helpers import utc_now -from .util.http import IntegrationException -from .util.opds_writer import AtomFeed +from core.model import Collection, ExternalIntegration +from core.model.integration import IntegrationConfiguration +from core.util.datetime_helpers import utc_now +from core.util.http import IntegrationException +from core.util.opds_writer import AtomFeed if sys.version_info >= (3, 10): from typing import ParamSpec @@ -310,7 +310,7 @@ def store_self_test_results( ) -> None: """Store the results of a self-test in the database.""" integration: Optional[ExternalIntegration] - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex if isinstance(self, ExternalSearchIndex): integration = self.search_integration(_db) @@ -341,7 +341,7 @@ def prior_test_results( instance = constructor_method(*args, **kwargs) integration: Optional[ExternalIntegration] - from .external_search import ExternalSearchIndex + from core.external_search import ExternalSearchIndex if isinstance(instance, ExternalSearchIndex): integration = instance.search_integration(_db) diff --git a/core/user_profile.py b/core/user_profile.py index e2c9df0f26..6f744c169a 100644 --- a/core/user_profile.py +++ b/core/user_profile.py @@ -2,7 +2,7 @@ from flask_babel import lazy_gettext as _ -from .problem_details import * +from core.problem_details import * class ProfileController: diff --git a/core/util/__init__.py b/core/util/__init__.py index 95eccfc990..7e7acbbabd 100644 --- a/core/util/__init__.py +++ b/core/util/__init__.py @@ -16,7 +16,7 @@ # For backwards compatibility, import items that were moved to # languages.py -from .languages import LanguageCodes, LookupTable +from core.util.languages import LanguageCodes, LookupTable def batch(iterable, size=1): diff --git a/core/util/cache.py b/core/util/cache.py index a5187be522..986d03e348 100644 --- a/core/util/cache.py +++ b/core/util/cache.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session -from ..model.datasource import DataSource +from core.model.datasource import DataSource # TODO: Remove this when we drop support for Python 3.9 if sys.version_info >= (3, 10): diff --git a/core/util/flask_util.py b/core/util/flask_util.py index b1eaa678ce..b96f69088f 100644 --- a/core/util/flask_util.py +++ b/core/util/flask_util.py @@ -8,9 +8,9 @@ from lxml import etree from pydantic import BaseModel, Extra -from . import problem_detail -from .datetime_helpers import utc_now -from .opds_writer import OPDSFeed +from core.util import problem_detail +from core.util.datetime_helpers import utc_now +from core.util.opds_writer import OPDSFeed def problem_raw(type, status, title, detail=None, instance=None, headers={}): diff --git a/core/util/http.py b/core/util/http.py index b52e16ef85..43707df0e4 100644 --- a/core/util/http.py +++ b/core/util/http.py @@ -12,9 +12,8 @@ import core from core.exceptions import IntegrationException from core.problem_details import INTEGRATION_ERROR - -from .problem_detail import JSON_MEDIA_TYPE as PROBLEM_DETAIL_JSON_MEDIA_TYPE -from .problem_detail import ProblemError +from core.util.problem_detail import JSON_MEDIA_TYPE as PROBLEM_DETAIL_JSON_MEDIA_TYPE +from core.util.problem_detail import ProblemError class RemoteIntegrationException(IntegrationException): diff --git a/core/util/opds_writer.py b/core/util/opds_writer.py index 7340f7778e..c5b36ace4f 100644 --- a/core/util/opds_writer.py +++ b/core/util/opds_writer.py @@ -3,7 +3,7 @@ import pytz from lxml import builder, etree -from .datetime_helpers import utc_now +from core.util.datetime_helpers import utc_now class ElementMaker(builder.ElementMaker): diff --git a/core/util/personal_names.py b/core/util/personal_names.py index 8efcaa2a4c..e65417fcbf 100644 --- a/core/util/personal_names.py +++ b/core/util/personal_names.py @@ -4,7 +4,7 @@ from fuzzywuzzy import fuzz from nameparser import HumanName -from .permanent_work_id import WorkIDCalculator +from core.util.permanent_work_id import WorkIDCalculator """Fallback algorithms for dealing with personal names when VIAF fails us.""" diff --git a/core/util/problem_detail.py b/core/util/problem_detail.py index e2eed52e2c..72b4f507e1 100644 --- a/core/util/problem_detail.py +++ b/core/util/problem_detail.py @@ -11,7 +11,7 @@ from flask_babel import LazyString from pydantic import BaseModel -from ..exceptions import BaseError +from core.exceptions import BaseError JSON_MEDIA_TYPE = "application/api-problem+json" diff --git a/core/util/summary.py b/core/util/summary.py index ee1a2371b0..0e39f1d578 100644 --- a/core/util/summary.py +++ b/core/util/summary.py @@ -5,7 +5,7 @@ from textblob import TextBlob from textblob.exceptions import MissingCorpusError -from . import Bigrams, english_bigrams +from core.util import Bigrams, english_bigrams class SummaryEvaluator: diff --git a/core/util/titles.py b/core/util/titles.py index 21cdf55f4e..b2b85af0b2 100644 --- a/core/util/titles.py +++ b/core/util/titles.py @@ -2,7 +2,7 @@ from fuzzywuzzy import fuzz -from .permanent_work_id import WorkIDCalculator +from core.util.permanent_work_id import WorkIDCalculator def normalize_title_for_matching(title): diff --git a/tests/api/test_annotations.py b/tests/api/test_annotations.py index a185067f0d..eae2d7547c 100644 --- a/tests/api/test_annotations.py +++ b/tests/api/test_annotations.py @@ -9,8 +9,7 @@ from api.problem_details import * from core.model import Annotation, create from core.util.datetime_helpers import utc_now - -from ..fixtures.api_controller import ControllerFixture +from tests.fixtures.api_controller import ControllerFixture class AnnotationFixture: diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index a4b0f5638a..52930fdb5f 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -63,18 +63,17 @@ from core.util.datetime_helpers import utc_now from core.util.http import IntegrationException, RemoteIntegrationException from core.util.problem_detail import ProblemDetail - -from ..fixtures.announcements import AnnouncementFixture -from ..fixtures.library import LibraryFixture +from tests.fixtures.announcements import AnnouncementFixture +from tests.fixtures.library import LibraryFixture if TYPE_CHECKING: - from ..fixtures.api_controller import ControllerFixture - from ..fixtures.authenticator import ( + from tests.fixtures.api_controller import ControllerFixture + from tests.fixtures.authenticator import ( CreateAuthIntegrationFixture, MilleniumAuthIntegrationFixture, ) - from ..fixtures.database import DatabaseTransactionFixture - from ..fixtures.vendor_id import VendorIDFixture + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.vendor_id import VendorIDFixture class MockBasic(BasicAuthenticationProvider): diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 96e8d3cedc..706a98f6ae 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -64,13 +64,12 @@ from core.util.http import RemoteIntegrationException from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.axis import MockAxis360API - -from ..fixtures.library import LibraryFixture +from tests.fixtures.library import LibraryFixture if TYPE_CHECKING: - from ..fixtures.api_axis_files import AxisFilesFixture - from ..fixtures.authenticator import SimpleAuthIntegrationFixture - from ..fixtures.database import DatabaseTransactionFixture + from tests.fixtures.api_axis_files import AxisFilesFixture + from tests.fixtures.authenticator import SimpleAuthIntegrationFixture + from tests.fixtures.database import DatabaseTransactionFixture class Axis360Fixture: diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index 69a3826ffe..965bed8b5e 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -42,10 +42,9 @@ MockCirculationAPI, MockRemoteAPI, ) - -from ..fixtures.api_bibliotheca_files import BibliothecaFilesFixture -from ..fixtures.database import DatabaseTransactionFixture -from ..fixtures.library import LibraryFixture +from tests.fixtures.api_bibliotheca_files import BibliothecaFilesFixture +from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.library import LibraryFixture class CirculationAPIFixture: diff --git a/tests/api/test_kansas_patron.py b/tests/api/test_kansas_patron.py index d4bd906965..7a4a50b809 100644 --- a/tests/api/test_kansas_patron.py +++ b/tests/api/test_kansas_patron.py @@ -7,9 +7,8 @@ from api.authentication.base import PatronData from api.authentication.basic import BasicAuthProviderLibrarySettings from api.kansas_patron import KansasAuthenticationAPI, KansasAuthSettings - -from ..fixtures.api_kansas_files import KansasPatronFilesFixture -from ..fixtures.database import DatabaseTransactionFixture +from tests.fixtures.api_kansas_files import KansasPatronFilesFixture +from tests.fixtures.database import DatabaseTransactionFixture class MockResponse: diff --git a/tests/api/test_novelist.py b/tests/api/test_novelist.py index 8ec8bad0e5..0fa3e25602 100644 --- a/tests/api/test_novelist.py +++ b/tests/api/test_novelist.py @@ -9,9 +9,8 @@ from core.model import DataSource, ExternalIntegration, Identifier from core.util.http import HTTP from tests.core.mock import DummyHTTPClient, MockRequestsResponse - -from ..fixtures.api_novelist_files import NoveListFilesFixture -from ..fixtures.database import DatabaseTransactionFixture +from tests.fixtures.api_novelist_files import NoveListFilesFixture +from tests.fixtures.database import DatabaseTransactionFixture class NoveListFixture: diff --git a/tests/api/test_odilo.py b/tests/api/test_odilo.py index 6cb50703e9..d4db703055 100644 --- a/tests/api/test_odilo.py +++ b/tests/api/test_odilo.py @@ -33,9 +33,9 @@ from tests.core.mock import MockRequestsResponse if TYPE_CHECKING: - from ..fixtures.api_odilo_files import OdiloFilesFixture - from ..fixtures.authenticator import SimpleAuthIntegrationFixture - from ..fixtures.database import DatabaseTransactionFixture + from tests.fixtures.api_odilo_files import OdiloFilesFixture + from tests.fixtures.authenticator import SimpleAuthIntegrationFixture + from tests.fixtures.database import DatabaseTransactionFixture class OdiloFixture: diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index bb9caf8b4e..c54acd9f19 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -43,14 +43,13 @@ from core.util.datetime_helpers import datetime_utc, utc_now from tests.api.mockapi.overdrive import MockOverdriveAPI from tests.core.mock import DummyHTTPClient, MockRequestsResponse - -from ..fixtures.database import DatabaseTransactionFixture -from ..fixtures.library import LibraryFixture +from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.library import LibraryFixture if TYPE_CHECKING: - from ..fixtures.api_overdrive_files import OverdriveAPIFilesFixture - from ..fixtures.authenticator import SimpleAuthIntegrationFixture - from ..fixtures.time import Time + from tests.fixtures.api_overdrive_files import OverdriveAPIFilesFixture + from tests.fixtures.authenticator import SimpleAuthIntegrationFixture + from tests.fixtures.time import Time class OverdriveAPIFixture: diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index e1d2c5a8bb..fcae09a314 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -20,9 +20,8 @@ from core.model.collection import Collection from core.model.constants import IdentifierType from core.opds2_import import OPDS2Importer, RWPMManifestParser - -from ..fixtures.database import DatabaseTransactionFixture -from ..fixtures.opds2_files import OPDS2FilesFixture +from tests.fixtures.database import DatabaseTransactionFixture +from tests.fixtures.opds2_files import OPDS2FilesFixture class OPDS2Test: From 19d490ab674d6c54eafc113625981e487ea56839 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 3 Oct 2023 15:07:17 +0530 Subject: [PATCH 078/262] PP-482 Notifications logs (#1419) * Added additional logs to the hold notifications * Additional logging for all notification methods * Standard logging for classes --- core/util/log.py | 9 +++++++++ core/util/notifications.py | 23 ++++++++++++++++++++++- 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/core/util/log.py b/core/util/log.py index 40a5af45d3..69cc930be9 100644 --- a/core/util/log.py +++ b/core/util/log.py @@ -1,4 +1,5 @@ import functools +import logging import time from contextlib import contextmanager from typing import Callable, Optional @@ -55,3 +56,11 @@ def elapsed_time_logging( toc = time.perf_counter() elapsed_time = toc - tic log_method(f"{prefix}Completed. (elapsed time: {elapsed_time:0.4f} seconds)") + + +class LoggerMixin: + """Mixin that adds a standardized logger""" + + @classmethod + def logger(cls): + return logging.getLogger(f"{cls.__module__}.{cls.__name__}") diff --git a/core/util/notifications.py b/core/util/notifications.py index 00a49246bb..f9e191c615 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -14,12 +14,13 @@ from core.model.identifier import Identifier from core.model.patron import Hold, Loan, Patron from core.model.work import Work +from core.util.log import LoggerMixin if TYPE_CHECKING: from firebase_admin.messaging import SendResponse -class PushNotifications: +class PushNotifications(LoggerMixin): # Should be set to true while unit testing TESTING_MODE = False _fcm_app = None @@ -82,6 +83,9 @@ def send_loan_expiry_message( if loan.patron.authorization_identifier: data["authorization_identifier"] = loan.patron.authorization_identifier + cls.logger().info( + f"Patron {loan.patron.authorization_identifier} has {len(tokens)} device tokens." + ) for token in tokens: msg = messaging.Message( token=token.device_token, @@ -89,6 +93,9 @@ def send_loan_expiry_message( data=data, ) resp = messaging.send(msg, dry_run=cls.TESTING_MODE, app=cls.fcm_app()) + cls.logger().info( + f"Sent loan expiry notification for {loan.patron.authorization_identifier} ID: {resp}" + ) responses.append(resp) return responses @@ -115,6 +122,10 @@ def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: if patron.authorization_identifier: data["authorization_identifier"] = patron.authorization_identifier + cls.logger().info( + f"Must sync patron activity for {patron.authorization_identifier}, has {len(tokens)} device tokens." + ) + for token in tokens: msg = messaging.Message( token=token.device_token, @@ -124,6 +135,9 @@ def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: batch: messaging.BatchResponse = messaging.send_all( msgs, dry_run=cls.TESTING_MODE, app=cls.fcm_app() ) + cls.logger().info( + f"Activity Sync Notifications: Successes {batch.success_count}, failures {batch.failure_count}." + ) return [resp.message_id for resp in batch.responses] @classmethod @@ -137,6 +151,10 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: url = cls.base_url(_db) for hold in holds: tokens = cls.notifiable_tokens(hold.patron) + cls.logger().info( + f"Notifying patron {hold.patron.authorization_identifier or hold.patron.username} for hold: {hold.work.title}. " + f"Patron has {len(tokens)} device tokens." + ) loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work identifier: Identifier = hold.license_pool.identifier @@ -164,4 +182,7 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: batch: messaging.BatchResponse = messaging.send_all( msgs, dry_run=cls.TESTING_MODE, app=cls.fcm_app() ) + cls.logger().info( + f"Hold Notifications: Successes {batch.success_count}, failures {batch.failure_count}." + ) return [resp.message_id for resp in batch.responses] From 922fa7b889975619f5e978e6705b019ea5afe632 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 10:15:51 +0000 Subject: [PATCH 079/262] Bump werkzeug from 2.3.7 to 3.0.0 (#1422) --- api/admin/controller/sign_in.py | 4 ++-- api/admin/controller/view.py | 9 ++++++--- api/authenticator.py | 2 +- poetry.lock | 8 ++++---- pyproject.toml | 2 +- tests/api/test_controller_opdsfeed.py | 6 ++++-- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/api/admin/controller/sign_in.py b/api/admin/controller/sign_in.py index a137c3cf47..e61aa2e002 100644 --- a/api/admin/controller/sign_in.py +++ b/api/admin/controller/sign_in.py @@ -2,12 +2,12 @@ import logging from typing import Tuple +from urllib.parse import urlsplit import flask from flask import Response, redirect, url_for from flask_babel import lazy_gettext as _ from werkzeug import Response as WerkzeugResponse -from werkzeug.urls import BaseURL, url_parse from api.admin.config import Configuration as AdminClientConfig from api.admin.controller.base import AdminController @@ -153,7 +153,7 @@ def _check_redirect(target: str) -> Tuple[bool, str]: we extract the URL path and forbid redirecting to external hosts. """ - redirect_url: BaseURL = url_parse(target) + redirect_url = urlsplit(target) # If the redirect isn't asking for a particular host, then it's safe. if redirect_url.netloc in (None, ""): diff --git a/api/admin/controller/view.py b/api/admin/controller/view.py index bf0f071555..6683ee8e74 100644 --- a/api/admin/controller/view.py +++ b/api/admin/controller/view.py @@ -1,9 +1,10 @@ from __future__ import annotations +from urllib.parse import quote_plus + import flask from flask import Response, redirect, url_for from flask_babel import lazy_gettext as _ -from werkzeug.urls import url_quote_plus from api.admin.config import Configuration as AdminClientConfig from api.admin.controller.base import AdminController @@ -25,10 +26,12 @@ def __call__(self, collection, book, path=None): redirect_url = flask.request.url if collection: redirect_url = redirect_url.replace( - collection, url_quote_plus(collection) + collection, quote_plus(collection, safe="()") ) if book: - redirect_url = redirect_url.replace(book, url_quote_plus(book)) + redirect_url = redirect_url.replace( + book, quote_plus(book, safe="()") + ) return redirect( url_for("admin_sign_in", redirect=redirect_url, _external=True) ) diff --git a/api/authenticator.py b/api/authenticator.py index 2a334cd238..41f749ede3 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -464,7 +464,7 @@ def authenticated_patron( ProblemDetail if an error occurs. """ provider: AuthenticationProvider | None = None - provider_token: Dict[str, str] | str | None = None + provider_token: Dict[str, str | None] | str | None = None if self.basic_auth_provider and auth.type.lower() == "basic": # The patron wants to authenticate with the # BasicAuthenticationProvider. diff --git a/poetry.lock b/poetry.lock index a0825b41cd..85607ef213 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4436,13 +4436,13 @@ test = ["websockets"] [[package]] name = "werkzeug" -version = "2.3.7" +version = "3.0.0" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-2.3.7-py3-none-any.whl", hash = "sha256:effc12dba7f3bd72e605ce49807bbe692bd729c3bb122a3b91747a6ae77df528"}, - {file = "werkzeug-2.3.7.tar.gz", hash = "sha256:2b8c0e447b4b9dbcc85dd97b6eeb4dcbaf6c8b6c3be0bd654e25553e0a2157d8"}, + {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, + {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, ] [package.dependencies] @@ -4652,4 +4652,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "c2b590a718bdfee2f46331e169fbbeaed5458e659fdb77aaffb24b8dc8865264" +content-hash = "988bd03fe15b58ba0f02c5484e32563399325876828d2f1c68b739371ab56e06" diff --git a/pyproject.toml b/pyproject.toml index c43b4c3b5a..b6240e13ea 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -226,7 +226,7 @@ urllib3 = "~1.26.14" uWSGI = "~2.0.21" watchtower = "3.0.1" # watchtower is for Cloudwatch logging integration wcag-contrast-ratio = "0.9" -Werkzeug = "^2.2.3" +Werkzeug = "^3.0.0" [tool.poetry.group.ci.dependencies] dunamai = "^1.16" diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index 22a21471f8..585b2a4315 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -1,10 +1,10 @@ import json from typing import Any, Dict from unittest.mock import MagicMock +from urllib.parse import quote_plus import feedparser from flask import url_for -from werkzeug.urls import url_quote_plus from api.controller import CirculationManager from api.lanes import HasSeriesFacets, JackpotFacets, JackpotWorkList @@ -148,7 +148,9 @@ def test_feed( last_item.sort_author, last_item.id, ] - expect = "key=%s" % url_quote_plus(json.dumps(expected_pagination_key)) + expect = "key=%s" % quote_plus( + json.dumps(expected_pagination_key), safe="," + ) assert expect in next_link search_link = by_rel["search"] From 8794e72d4cbd8965541b1851f46ee5c43a2c5423 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 3 Oct 2023 11:18:58 +0000 Subject: [PATCH 080/262] Bump urllib3 from 1.26.16 to 1.26.17 (#1425) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 85607ef213..7f698ae7d9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4347,17 +4347,17 @@ files = [ [[package]] name = "urllib3" -version = "1.26.16" +version = "1.26.17" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.16-py2.py3-none-any.whl", hash = "sha256:8d36afa7616d8ab714608411b4a3b13e58f463aee519024578e062e141dce20f"}, - {file = "urllib3-1.26.16.tar.gz", hash = "sha256:8f135f6502756bde6b2a9b28989df5fbe87c9970cecaa69041edcce7f0589b14"}, + {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, + {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, ] [package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +brotli = ["brotli (==1.0.9)", "brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)", "urllib3-secure-extra"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] @@ -4652,4 +4652,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "988bd03fe15b58ba0f02c5484e32563399325876828d2f1c68b739371ab56e06" +content-hash = "968a965763de22669aa48e98378805806e8adab6f7acbb1148bad53b0ebb2d6c" diff --git a/pyproject.toml b/pyproject.toml index b6240e13ea..4a87b109a3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -222,7 +222,7 @@ types-pyyaml = "^6.0.12.9" typing_extensions = {version = "^4.5.0", python = "<3.11"} unicodecsv = "0.14.1" # this is used, but can probably be removed on py3 uritemplate = "4.1.1" -urllib3 = "~1.26.14" +urllib3 = "^1.26.17" uWSGI = "~2.0.21" watchtower = "3.0.1" # watchtower is for Cloudwatch logging integration wcag-contrast-ratio = "0.9" From c0aa504f11a5dcba30669e47c0a70f9a4c548926 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 3 Oct 2023 09:55:27 -0300 Subject: [PATCH 081/262] Use LoggerMixin (#1426) * Use new logging mixin more widely. * Add support for python 3.8. * Add some comments. --- api/admin/config.py | 8 ++---- api/admin/controller/integration_settings.py | 5 ++-- api/authentication/base.py | 7 ++--- api/authentication/basic.py | 3 --- api/authenticator.py | 11 +++----- api/opds2.py | 10 +++----- api/saml/configuration/model.py | 8 ++---- api/selftest.py | 5 ---- core/app_server.py | 5 ++-- core/integration/settings.py | 9 ++----- core/selftest.py | 8 ++---- core/service/storage/s3.py | 7 +++-- core/util/log.py | 27 ++++++++++++++++++-- 13 files changed, 49 insertions(+), 64 deletions(-) diff --git a/api/admin/config.py b/api/admin/config.py index 789d8c3ad5..e9cce39b10 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -1,4 +1,3 @@ -import logging import os from enum import Enum from typing import Optional @@ -7,6 +6,7 @@ from requests import RequestException from core.util.http import HTTP, RequestNetworkException +from core.util.log import LoggerMixin class OperationalMode(str, Enum): @@ -14,7 +14,7 @@ class OperationalMode(str, Enum): development = "development" -class Configuration: +class Configuration(LoggerMixin): APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" PACKAGE_VERSION = "1.10.0" @@ -62,10 +62,6 @@ def operational_mode(cls) -> OperationalMode: else OperationalMode.production ) - @classmethod - def logger(cls) -> logging.Logger: - return logging.getLogger(f"{cls.__module__}.{cls.__name__}") - @classmethod def package_name(cls) -> str: """Get the effective package name. diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index b1e8840f2f..804ae73220 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -1,5 +1,4 @@ import json -import logging from abc import ABC, abstractmethod from typing import Any, Dict, Generic, List, NamedTuple, Optional, Type, TypeVar @@ -28,6 +27,7 @@ ) from core.problem_details import INTERNAL_SERVER_ERROR, INVALID_INPUT from core.util.cache import memoize +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemError T = TypeVar("T", bound=HasIntegrationConfiguration) @@ -44,7 +44,7 @@ class ChangedLibrariesTuple(NamedTuple): removed: List[IntegrationLibraryConfiguration] -class IntegrationSettingsController(ABC, Generic[T]): +class IntegrationSettingsController(ABC, Generic[T], LoggerMixin): def __init__( self, manager: CirculationManager, @@ -52,7 +52,6 @@ def __init__( ): self._db = manager._db self.registry = registry or self.default_registry() - self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") @abstractmethod def default_registry(self) -> IntegrationRegistry[T]: diff --git a/api/authentication/base.py b/api/authentication/base.py index 0ffa125f86..9624ed4a13 100644 --- a/api/authentication/base.py +++ b/api/authentication/base.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from abc import ABC, abstractmethod from money import Money @@ -16,6 +15,7 @@ from core.selftest import HasSelfTestsIntegrationConfiguration from core.util.authentication_for_opds import OPDSAuthenticationFlow from core.util.datetime_helpers import utc_now +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail @@ -31,6 +31,7 @@ class AuthenticationProvider( OPDSAuthenticationFlow, HasLibraryIntegrationConfiguration, HasSelfTestsIntegrationConfiguration, + LoggerMixin, ABC, ): """Handle a specific patron authentication scheme.""" @@ -47,10 +48,6 @@ def __init__( self.integration_id = integration_id self.analytics = analytics - @classmethod - def logger(cls) -> logging.Logger: - return logging.getLogger(f"{cls.__module__}.{cls.__name__}") - def library(self, _db: Session) -> Library | None: return Library.by_id(_db, self.library_id) diff --git a/api/authentication/basic.py b/api/authentication/basic.py index 06e40f84a9..da98cd989e 100644 --- a/api/authentication/basic.py +++ b/api/authentication/basic.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging import re from abc import ABC, abstractmethod from enum import Enum @@ -278,8 +277,6 @@ def __init__( library_id, integration_id, settings, library_settings, analytics ) - self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") - self.identifier_re = settings.identifier_regular_expression self.password_re = settings.password_regular_expression self.test_username = settings.test_identifier diff --git a/api/authenticator.py b/api/authenticator.py index 41f749ede3..6d8b358a5b 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -33,7 +33,7 @@ from core.user_profile import ProfileController from core.util.authentication_for_opds import AuthenticationForOPDSDocument from core.util.http import RemoteIntegrationException -from core.util.log import elapsed_time_logging +from core.util.log import LoggerMixin, elapsed_time_logging from core.util.problem_detail import ProblemDetail, ProblemError if sys.version_info >= (3, 11): @@ -82,16 +82,13 @@ def profile_document(self): return doc -class Authenticator: +class Authenticator(LoggerMixin): """Route requests to the appropriate LibraryAuthenticator.""" def __init__( self, _db, libraries: Iterable[Library], analytics: Analytics | None = None ): # Create authenticators - self.log = logging.getLogger( - f"{self.__class__.__module__}.{self.__class__.__name__}" - ) self.library_authenticators: dict[str, LibraryAuthenticator] = {} self.populate_authenticators(_db, libraries, analytics) @@ -145,7 +142,7 @@ def decode_bearer_token(self, *args, **kwargs): return self.invoke_authenticator_method("decode_bearer_token", *args, **kwargs) -class LibraryAuthenticator: +class LibraryAuthenticator(LoggerMixin): """Use the registered AuthenticationProviders to turn incoming credentials into Patron objects. """ @@ -264,8 +261,6 @@ def __init__( if basic_auth_provider: self.register_basic_auth_provider(basic_auth_provider) - self.log = logging.getLogger("Authenticator") - if saml_providers: for provider in saml_providers: self.saml_providers_by_name[provider.label()] = provider diff --git a/api/opds2.py b/api/opds2.py index 9f26dddf4e..f52f39efc1 100644 --- a/api/opds2.py +++ b/api/opds2.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from typing import TYPE_CHECKING from flask import url_for @@ -17,6 +16,7 @@ from core.opds2 import OPDS2Annotator from core.problem_details import INVALID_CREDENTIALS from core.util.http import HTTP +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: @@ -85,7 +85,9 @@ def feed_links(self): ] -class TokenAuthenticationFulfillmentProcessor(CirculationFulfillmentPostProcessor): +class TokenAuthenticationFulfillmentProcessor( + CirculationFulfillmentPostProcessor, LoggerMixin +): """In case a feed has a token auth endpoint and the content_link requires an authentication token Then we must fetch the required authentication token from the token_auth endpoint and expand the templated url with the received token. @@ -94,10 +96,6 @@ class TokenAuthenticationFulfillmentProcessor(CirculationFulfillmentPostProcesso def __init__(self, collection) -> None: pass - @classmethod - def logger(cls) -> logging.Logger: - return logging.getLogger(f"{cls.__module__}.{cls.__name__}") - def fulfill( self, patron: Patron, diff --git a/api/saml/configuration/model.py b/api/saml/configuration/model.py index c4e0c6b7c0..d67239be7c 100644 --- a/api/saml/configuration/model.py +++ b/api/saml/configuration/model.py @@ -1,5 +1,4 @@ import html -import logging from datetime import datetime from enum import Enum from threading import Lock @@ -39,6 +38,7 @@ ) from core.python_expression_dsl.evaluator import DSLEvaluationVisitor, DSLEvaluator from core.python_expression_dsl.parser import DSLParser +from core.util.log import LoggerMixin class SAMLConfigurationError(BaseError): @@ -92,7 +92,7 @@ def _fetch(db: Session) -> Dict[Union[Enum, str], str]: return {entity_id: label for entity_id, label in identity_providers} -class SAMLWebSSOAuthSettings(AuthProviderSettings): +class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): """SAML Web SSO Authentication settings""" service_provider_xml_metadata: str = FormField( @@ -276,10 +276,6 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings): alias="debug", ) - @classmethod - def logger(cls): - return logging.getLogger(f"{cls.__module__}.{cls.__name__}") - @classmethod def validate_xml_metadata(cls, v: str, metadata_type: str): metadata_parser = SAMLMetadataParser() diff --git a/api/selftest.py b/api/selftest.py index 02bc5e3adb..c3eb5e5bef 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from abc import ABC from typing import Generator, Iterable, Optional, Tuple, Union @@ -132,10 +131,6 @@ class HasCollectionSelfTests(HasSelfTestsIntegrationConfiguration, HasPatronSelf def integration(self, _db: Session) -> IntegrationConfiguration | None: return self.collection.integration_configuration - @classmethod - def logger(cls) -> logging.Logger: - return logging.Logger(cls.__name__) - def _no_delivery_mechanisms_test(self): # Find works in the tested collection that have no delivery # mechanisms. diff --git a/core/app_server.py b/core/app_server.py index 7fbe03ce0d..1a604bac9f 100644 --- a/core/app_server.py +++ b/core/app_server.py @@ -2,7 +2,6 @@ from __future__ import annotations import gzip -import logging import sys import traceback from functools import wraps @@ -22,6 +21,7 @@ from core.model import Identifier from core.problem_details import * from core.service.logging.configuration import LogLevel +from core.util.log import LoggerMixin from core.util.opds_writer import OPDSMessage from core.util.problem_detail import ProblemDetail @@ -169,7 +169,7 @@ def compress(response): return compressor -class ErrorHandler: +class ErrorHandler(LoggerMixin): def __init__(self, app: PalaceFlask, log_level: LogLevel): """Constructor. @@ -178,7 +178,6 @@ def __init__(self, app: PalaceFlask, log_level: LogLevel): """ self.app = app self.debug = log_level == LogLevel.debug - self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") def handle(self, exception: Exception) -> Response | HTTPException: """Something very bad has happened. Notify the client.""" diff --git a/core/integration/settings.py b/core/integration/settings.py index 15f22c099e..49aaa2b5a1 100644 --- a/core/integration/settings.py +++ b/core/integration/settings.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging from dataclasses import dataclass from enum import Enum from typing import ( @@ -29,6 +28,7 @@ INCOMPLETE_CONFIGURATION, INVALID_CONFIGURATION_OPTION, ) +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail, ProblemError if TYPE_CHECKING: @@ -244,7 +244,7 @@ def to_dict( return self.weight, form_entry -class BaseSettings(BaseModel): +class BaseSettings(BaseModel, LoggerMixin): """ Base class for all our database backed pydantic settings classes @@ -309,11 +309,6 @@ class Config: # not the alias. allow_population_by_field_name = True - @classmethod - def logger(cls) -> logging.Logger: - """Get the logger for this class""" - return logging.getLogger(f"{cls.__module__}.{cls.__name__}") - @classmethod def configuration_form(cls, db: Session) -> List[Dict[str, Any]]: """Get the configuration dictionary for this class""" diff --git a/core/selftest.py b/core/selftest.py index c1d01f4443..a01c16c2c1 100644 --- a/core/selftest.py +++ b/core/selftest.py @@ -27,6 +27,7 @@ from core.model.integration import IntegrationConfiguration from core.util.datetime_helpers import utc_now from core.util.http import IntegrationException +from core.util.log import LoggerMixin from core.util.opds_writer import AtomFeed if sys.version_info >= (3, 10): @@ -371,7 +372,7 @@ def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: return None -class HasSelfTestsIntegrationConfiguration(BaseHasSelfTests, ABC): +class HasSelfTestsIntegrationConfiguration(BaseHasSelfTests, LoggerMixin, ABC): # Typing specific collection: Any @@ -424,8 +425,3 @@ def prior_test_results( @abstractmethod def integration(self, _db: Session) -> Optional[IntegrationConfiguration]: ... - - @classmethod - @abstractmethod - def logger(cls) -> logging.Logger: - ... diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py index e73c428004..65cb5539ba 100644 --- a/core/service/storage/s3.py +++ b/core/service/storage/s3.py @@ -12,6 +12,7 @@ from botocore.exceptions import BotoCoreError, ClientError from core.config import CannotLoadConfiguration +from core.util.log import LoggerMixin if sys.version_info >= (3, 11): from typing import Self @@ -29,7 +30,7 @@ class MultipartS3UploadPart: PartNumber: int -class MultipartS3ContextManager: +class MultipartS3ContextManager(LoggerMixin): def __init__( self, client: S3Client, @@ -43,7 +44,6 @@ def __init__( self.bucket = bucket self.part_number = 1 self.parts: List[MultipartS3UploadPart] = [] - self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") self.media_type = media_type self.upload: Optional[CreateMultipartUploadOutputTypeDef] = None self.upload_id: Optional[str] = None @@ -137,7 +137,7 @@ def exception(self) -> Optional[BaseException]: return self._exception -class S3Service: +class S3Service(LoggerMixin): def __init__( self, client: S3Client, @@ -149,7 +149,6 @@ def __init__( self.region = region self.bucket = bucket self.url_template = url_template - self.log = logging.getLogger(f"{self.__module__}.{self.__class__.__name__}") # Validate the URL template. formatter = Formatter() diff --git a/core/util/log.py b/core/util/log.py index 69cc930be9..2ac04a4220 100644 --- a/core/util/log.py +++ b/core/util/log.py @@ -1,5 +1,6 @@ import functools import logging +import sys import time from contextlib import contextmanager from typing import Callable, Optional @@ -58,9 +59,31 @@ def elapsed_time_logging( log_method(f"{prefix}Completed. (elapsed time: {elapsed_time:0.4f} seconds)") +# Once we drop python 3.8 this can go away +if sys.version_info >= (3, 9): + cache_decorator = functools.cache +else: + cache_decorator = functools.lru_cache + + class LoggerMixin: - """Mixin that adds a standardized logger""" + """Mixin that adds a logger with a standardized name""" @classmethod - def logger(cls): + @cache_decorator + def logger(cls) -> logging.Logger: + """ + Returns a logger named after the module and name of the class. + + This is cached so that we don't create a new logger every time + it is called. + """ return logging.getLogger(f"{cls.__module__}.{cls.__name__}") + + @property + def log(self) -> logging.Logger: + """ + A convenience property that returns the logger for the class, + so it is easier to access the logger from an instance. + """ + return self.logger() From 2d8603b0ef2f5ec1974d908e72585942c871c5dc Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 4 Oct 2023 14:09:27 +0530 Subject: [PATCH 082/262] Fixed the OPDS facetGroupType attribute namespace (#1432) --- core/feed/serializer/opds.py | 1 + 1 file changed, 1 insertion(+) diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index 8a1f25146b..a803ee4439 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -38,6 +38,7 @@ "rights": f"{{{OPDSFeed.DCTERMS_NS}}}rights", "ProviderName": f"{{{OPDSFeed.BIBFRAME_NS}}}ProviderName", "facetGroup": f"{{{OPDSFeed.OPDS_NS}}}facetGroup", + "facetGroupType": f"{{{OPDSFeed.SIMPLIFIED_NS}}}facetGroupType", "activeFacet": f"{{{OPDSFeed.OPDS_NS}}}activeFacet", "ratingValue": f"{{{OPDSFeed.SCHEMA_NS}}}ratingValue", } From 8dafd1c33bef74517f4b73f3c8e567c56a65ff26 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 4 Oct 2023 10:07:46 -0300 Subject: [PATCH 083/262] OPDS Importer Base Classes (PP-500) (#1428) Refactor the OPDS importer class hierarchy so that we have some base classes, that the specific importers implement. There are two new base classes: BaseOPDSImporter and BaseODLImporter. These classes contain all of the shared functionality across OPDSImporter, OPDS2Importer, ODLImporter, ODL2Importer and OPDSForDistributorsImporter, so its a little easier to see what it overridden and when looking at the classes. This also tried to make the tests for ODL and ODL2 a bit easier to understand by not having them inherit from each other and instead use a parameterized fixture that picks the correct API for the tests. This refactor is setting up for PP-502 to replace external integrations with configurationsettings in the collection integrations. --- api/admin/controller/collection_settings.py | 3 - api/circulation.py | 4 +- api/integration/registry/license_providers.py | 6 +- api/odl.py | 42 +- api/odl2.py | 38 +- api/opds_for_distributors.py | 2 +- core/metadata_layer.py | 2 - core/model/integration.py | 2 +- core/opds2_import.py | 146 ++-- core/opds_import.py | 672 ++++++++--------- .../api/admin/controller/test_collections.py | 39 +- tests/api/conftest.py | 3 +- tests/api/test_odl.py | 313 +++----- tests/api/test_odl2.py | 127 +--- tests/core/test_opds_import.py | 698 +++++++----------- tests/fixtures/api_odl.py | 227 ++++++ tests/fixtures/api_odl2_files.py | 16 - tests/fixtures/api_odl_files.py | 16 - tests/fixtures/odl.py | 3 +- 19 files changed, 1042 insertions(+), 1317 deletions(-) create mode 100644 tests/fixtures/api_odl.py delete mode 100644 tests/fixtures/api_odl2_files.py delete mode 100644 tests/fixtures/api_odl_files.py diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index d5a596d044..6474d2ef4d 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -84,9 +84,6 @@ def process_get(self): continue collection_dict = self.collection_to_dict(collection_object) - [protocol] = [ - p for p in protocols if p["name"] == collection_object.protocol - ] if collection_object.integration_configuration: libraries = self.load_libraries(collection_object, user) collection_dict["libraries"] = libraries diff --git a/api/circulation.py b/api/circulation.py index f44734becf..40fe01d5ba 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -567,11 +567,11 @@ def library_configuration(self, library_id: int) -> LibrarySettingsType | None: libconfig = self.integration_configuration().for_library(library_id=library_id) if libconfig: config = self.library_settings_class()(**libconfig.settings_dict) - return config # type: ignore [return-value] + return config # type: ignore[return-value] return None def configuration(self) -> SettingsType: - return self.settings_class()(**self.integration_configuration().settings_dict) # type: ignore [return-value] + return self.settings_class()(**self.integration_configuration().settings_dict) # type: ignore[return-value] class BaseCirculationAPI( diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index eee96900e9..9338cc0283 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -9,12 +9,12 @@ if TYPE_CHECKING: from api.circulation import BaseCirculationAPI # noqa: autoflake from core.integration.settings import BaseSettings # noqa: autoflake - from core.opds_import import OPDSImporter # noqa: autoflake + from core.opds_import import BaseOPDSImporter # noqa: autoflake class LicenseProvidersRegistry( IntegrationRegistry[ - Union["BaseCirculationAPI[BaseSettings, BaseSettings]", "OPDSImporter"] + Union["BaseCirculationAPI[BaseSettings, BaseSettings]", "BaseOPDSImporter"] ] ): def __init__(self) -> None: @@ -48,7 +48,7 @@ def __init__(self) -> None: self.register(ODL2API, canonical=ODL2API.NAME) -class OpenAccessLicenseProvidersRegistry(IntegrationRegistry["OPDSImporter"]): +class OpenAccessLicenseProvidersRegistry(IntegrationRegistry["BaseOPDSImporter"]): def __init__(self) -> None: super().__init__(Goals.LICENSE_GOAL) from core.opds2_import import OPDS2Importer diff --git a/api/odl.py b/api/odl.py index ba79636f81..a0334594f4 100644 --- a/api/odl.py +++ b/api/odl.py @@ -5,6 +5,7 @@ import json import logging import uuid +from abc import ABC from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, Union import dateutil @@ -28,7 +29,6 @@ from core import util from core.analytics import Analytics from core.importers import BaseImporterSettings -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, @@ -58,11 +58,15 @@ Session, get_one, ) -from core.model.configuration import HasExternalIntegration from core.model.licensing import LicenseStatus from core.model.patron import Patron from core.monitor import CollectionMonitor -from core.opds_import import OPDSImporter, OPDSImportMonitor, OPDSXMLParser +from core.opds_import import ( + BaseOPDSImporter, + OPDSImporter, + OPDSImportMonitor, + OPDSXMLParser, +) from core.util.datetime_helpers import to_utc, utc_now from core.util.http import HTTP, BadResponseException from core.util.string_helpers import base64 @@ -167,8 +171,6 @@ class ODLLibrarySettings(BaseCirculationEbookLoanSettings): class ODLAPI( BaseCirculationAPI[ODLSettings, ODLLibrarySettings], - HasExternalIntegration, - HasLibraryIntegrationConfiguration, ): """ODL (Open Distribution to Libraries) is a specification that allows libraries to manage their own loans and holds. It offers a deeper level @@ -974,20 +976,7 @@ class ODLXMLParser(OPDSXMLParser): NAMESPACES = dict(OPDSXMLParser.NAMESPACES, odl="http://opds-spec.org/odl") -class ODLImporter(OPDSImporter): - """Import information and formats from an ODL feed. - - The only change from OPDSImporter is that this importer extracts - format information from 'odl:license' tags. - """ - - NAME = ODLAPI.NAME - PARSER_CLASS = ODLXMLParser - - # The media type for a License Info Document, used to get information - # about the license. - LICENSE_INFO_DOCUMENT_MEDIA_TYPE = "application/vnd.odl.info+json" - +class BaseODLImporter(BaseOPDSImporter, ABC): FEEDBOOKS_AUDIO = "{}; protection={}".format( MediaTypes.AUDIOBOOK_MANIFEST_MEDIA_TYPE, DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM, @@ -1159,6 +1148,21 @@ def get_license_data( return parsed_license + +class ODLImporter(OPDSImporter, BaseODLImporter): + """Import information and formats from an ODL feed. + + The only change from OPDSImporter is that this importer extracts + format information from 'odl:license' tags. + """ + + NAME = ODLAPI.NAME + PARSER_CLASS = ODLXMLParser + + # The media type for a License Info Document, used to get information + # about the license. + LICENSE_INFO_DOCUMENT_MEDIA_TYPE = "application/vnd.odl.info+json" + @classmethod def _detail_for_elementtree_entry( cls, diff --git a/api/odl2.py b/api/odl2.py index 946d71eb63..0c2296eb11 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Type from flask_babel import lazy_gettext as _ from pydantic import PositiveInt @@ -10,7 +10,7 @@ from webpub_manifest_parser.opds2.registry import OPDS2LinkRelationsRegistry from api.circulation_exceptions import PatronHoldLimitReached, PatronLoanLimitReached -from api.odl import ODLAPI, ODLImporter, ODLSettings +from api.odl import ODLAPI, BaseODLImporter, ODLSettings from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, @@ -18,7 +18,7 @@ ) from core.metadata_layer import FormatData from core.model import Edition, RightsStatus -from core.model.configuration import ExternalIntegration, HasExternalIntegration +from core.model.configuration import ExternalIntegration from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser from core.util import first_or_default from core.util.datetime_helpers import to_utc @@ -28,7 +28,7 @@ from webpub_manifest_parser.opds2.ast import OPDS2Feed, OPDS2Publication from api.circulation import HoldInfo - from core.model import Collection, Identifier, LicensePool + from core.model import Collection, LicensePool from core.model.patron import Hold, Loan, Patron @@ -115,7 +115,7 @@ def _place_hold(self, patron: Patron, licensepool: LicensePool) -> HoldInfo: return super()._place_hold(patron, licensepool) -class ODL2Importer(OPDS2Importer, HasExternalIntegration): +class ODL2Importer(OPDS2Importer, BaseODLImporter): """Import information and formats from an ODL feed. The only change from OPDS2Importer is that this importer extracts @@ -134,10 +134,7 @@ def __init__( collection: Collection, parser: Optional[RWPMManifestParser] = None, data_source_name: str | None = None, - identifier_mapping: Dict[Identifier, Identifier] | None = None, http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - content_modifier: Optional[Callable[..., None]] = None, - map_from_collection: Optional[bool] = None, ): """Initialize a new instance of ODL2Importer class. @@ -158,26 +155,13 @@ def __init__( NOTE: If `collection` is provided, its .data_source will take precedence over any value provided here. This is only for use when you are importing OPDS metadata without any particular Collection in mind. :type data_source_name: str - - :param identifier_mapping: Dictionary used for mapping external identifiers into a set of internal ones - :type identifier_mapping: Dict - - :param content_modifier: A function that may modify-in-place representations (such as images and EPUB documents) - as they come in from the network. - :type content_modifier: Callable - - :param map_from_collection: Identifier mapping - :type map_from_collection: Dict """ super().__init__( db, collection, parser if parser else RWPMManifestParser(ODLFeedParserFactory()), data_source_name, - identifier_mapping, http_get, - content_modifier, - map_from_collection, ) self._logger = logging.getLogger(__name__) @@ -235,7 +219,7 @@ def _extract_publication_metadata( if not license_info_document_link: parsed_license = None else: - parsed_license = ODLImporter.get_license_data( + parsed_license = self.get_license_data( license_info_document_link, checkout_link, identifier, @@ -259,19 +243,17 @@ def _extract_publication_metadata( medium = Edition.medium_from_media_type(license_format) drm_schemes: List[str | None] - if license_format in ODLImporter.LICENSE_FORMATS: + if license_format in self.LICENSE_FORMATS: # Special case to handle DeMarque audiobooks which include the protection # in the content type. When we see a license format of # application/audiobook+json; protection=http://www.feedbooks.com/audiobooks/access-restriction # it means that this audiobook title is available through the DeMarque streaming manifest # endpoint. drm_schemes = [ - ODLImporter.LICENSE_FORMATS[license_format][ - ODLImporter.DRM_SCHEME - ] + self.LICENSE_FORMATS[license_format][self.DRM_SCHEME] ] - license_format = ODLImporter.LICENSE_FORMATS[license_format][ - ODLImporter.CONTENT_TYPE + license_format = self.LICENSE_FORMATS[license_format][ + self.CONTENT_TYPE ] else: drm_schemes = ( diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index be535dada3..cebe3a63e5 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -492,7 +492,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, CoverageFailure | List[CoverageFailure]]]: + ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: # Collect all the identifiers in the feed. parsed_feed = feedparser.parse(feed) identifiers = [entry.get("id") for entry in parsed_feed.get("entries", [])] diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 44a185f06b..1b02c7534a 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -62,7 +62,6 @@ def __init__( formats=False, rights=False, link_content=False, - content_modifier=None, analytics=None, even_if_not_apparently_updated=False, presentation_calculation_policy=None, @@ -75,7 +74,6 @@ def __init__( self.formats = formats self.link_content = link_content self.even_if_not_apparently_updated = even_if_not_apparently_updated - self.content_modifier = content_modifier self.analytics = analytics self.presentation_calculation_policy = ( presentation_calculation_policy or PresentationCalculationPolicy() diff --git a/core/model/integration.py b/core/model/integration.py index baae134f48..eaa4e43f6a 100644 --- a/core/model/integration.py +++ b/core/model/integration.py @@ -66,7 +66,7 @@ class IntegrationConfiguration(Base): @overload def for_library( - self, library_id: int, create: Literal[True] = True + self, library_id: int, create: Literal[True] ) -> IntegrationLibraryConfiguration: ... diff --git a/core/opds2_import.py b/core/opds2_import.py index 234834b0e4..12a3e3da93 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -3,10 +3,19 @@ import logging from datetime import datetime from io import BytesIO, StringIO -from typing import TYPE_CHECKING, Any, Callable, Dict, Iterable, Optional, Tuple, Type +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + Literal, + Optional, + Tuple, + Type, +) from urllib.parse import urljoin, urlparse -import sqlalchemy import webpub_manifest_parser.opds2.ast as opds2_ast from flask_babel import lazy_gettext as _ from sqlalchemy.orm import Session @@ -50,11 +59,14 @@ Representation, RightsStatus, Subject, - get_one, ) -from core.model.configuration import ConfigurationSetting, HasExternalIntegration -from core.model.integration import IntegrationConfiguration -from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor +from core.model.configuration import ConfigurationSetting +from core.opds_import import ( + BaseOPDSImporter, + OPDSImporterLibrarySettings, + OPDSImporterSettings, + OPDSImportMonitor, +) from core.util.http import BadResponseException from core.util.opds_writer import OPDSFeed @@ -133,11 +145,11 @@ class OPDS2ImporterSettings(OPDSImporterSettings): ) -class OPDS2Importer( - IgnoredIdentifierImporterMixin, - OPDSImporter, - HasExternalIntegration, -): +class OPDS2ImporterLibrarySettings(OPDSImporterLibrarySettings): + pass + + +class OPDS2Importer(IgnoredIdentifierImporterMixin, BaseOPDSImporter): """Imports editions and license pools from an OPDS 2.0 feed.""" NAME: str = ExternalIntegration.OPDS2_IMPORT @@ -148,6 +160,10 @@ class OPDS2Importer( def settings_class(cls) -> Type[OPDS2ImporterSettings]: return OPDS2ImporterSettings + @classmethod + def library_settings_class(cls) -> Type[OPDS2ImporterLibrarySettings]: + return OPDS2ImporterLibrarySettings + @classmethod def label(cls) -> str: return cls.NAME @@ -162,10 +178,7 @@ def __init__( collection: Collection, parser: RWPMManifestParser, data_source_name: str | None = None, - identifier_mapping: Dict[Identifier, Identifier] | None = None, http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - content_modifier: Optional[Callable[..., None]] = None, - map_from_collection: Optional[bool] = None, ): """Initialize a new instance of OPDS2Importer class. @@ -180,31 +193,14 @@ def __init__( If there is no DataSource with this name, one will be created. NOTE: If `collection` is provided, its .data_source will take precedence over any value provided here. This is only for use when you are importing OPDS metadata without any particular Collection in mind. - :param identifier_mapping: Dictionary used for mapping external identifiers into a set of internal ones - :param content_modifier: A function that may modify-in-place representations (such as images and EPUB documents) - as they come in from the network. - :param map_from_collection: Identifier mapping """ - super().__init__( - db, - collection, - data_source_name, - identifier_mapping, - http_get, - content_modifier, - map_from_collection, - ) - - if not isinstance(parser, RWPMManifestParser): - raise ValueError( - f"Argument 'parser' must be an instance of {RWPMManifestParser}" - ) + super().__init__(db, collection, data_source_name, http_get) + self._parser = parser - self._parser: RWPMManifestParser = parser - self._logger: logging.Logger = logging.getLogger(__name__) - - self._external_integration_id = collection.external_integration.id - self._integration_configuration_id = collection.integration_configuration_id + def assert_importable_content( + self, feed: str, feed_url: str, max_get_attempts: int = 5 + ) -> Literal[True]: + raise NotImplementedError("OPDS2Importer does not support this method") def _is_identifier_allowed(self, identifier: Identifier) -> bool: """Check the identifier and return a boolean value indicating whether CM can import it. @@ -226,12 +222,12 @@ def _extract_subjects(self, subjects: list[core_ast.Subject]) -> list[SubjectDat :param subjects: Parsed subject object :return: List of subjects metadata """ - self._logger.debug("Started extracting subjects metadata") + self.log.debug("Started extracting subjects metadata") subject_metadata_list = [] for subject in subjects: - self._logger.debug( + self.log.debug( f"Started extracting subject metadata from {encode(subject)}" ) @@ -249,13 +245,13 @@ def _extract_subjects(self, subjects: list[core_ast.Subject]) -> list[SubjectDat subject_metadata_list.append(subject_metadata) - self._logger.debug( + self.log.debug( "Finished extracting subject metadata from {}: {}".format( encode(subject), encode(subject_metadata) ) ) - self._logger.debug( + self.log.debug( "Finished extracting subjects metadata: {}".format( encode(subject_metadata_list) ) @@ -274,12 +270,12 @@ def _extract_contributors( :param default_role: Default role :return: List of contributors metadata """ - self._logger.debug("Started extracting contributors metadata") + self.log.debug("Started extracting contributors metadata") contributor_metadata_list = [] for contributor in contributors: - self._logger.debug( + self.log.debug( "Started extracting contributor metadata from {}".format( encode(contributor) ) @@ -293,7 +289,7 @@ def _extract_contributors( roles=contributor.roles if contributor.roles else default_role, ) - self._logger.debug( + self.log.debug( "Finished extracting contributor metadata from {}: {}".format( encode(contributor), encode(contributor_metadata) ) @@ -301,7 +297,7 @@ def _extract_contributors( contributor_metadata_list.append(contributor_metadata) - self._logger.debug( + self.log.debug( "Finished extracting contributors metadata: {}".format( encode(contributor_metadata_list) ) @@ -320,7 +316,7 @@ def _extract_link( :return: Link metadata """ - self._logger.debug(f"Started extracting link metadata from {encode(link)}") + self.log.debug(f"Started extracting link metadata from {encode(link)}") # FIXME: It seems that OPDS 2.0 spec doesn't contain information about rights so we use the default one. rights_uri = RightsStatus.rights_uri_from_string("") @@ -340,7 +336,7 @@ def _extract_link( content=None, ) - self._logger.debug( + self.log.debug( "Finished extracting link metadata from {}: {}".format( encode(link), encode(link_metadata) ) @@ -356,7 +352,7 @@ def _extract_description_link( :param publication: Publication object :return: LinkData object containing publication's description """ - self._logger.debug( + self.log.debug( "Started extracting a description link from {}".format( encode(publication.metadata.description) ) @@ -371,7 +367,7 @@ def _extract_description_link( content=publication.metadata.description, ) - self._logger.debug( + self.log.debug( "Finished extracting a description link from {}: {}".format( encode(publication.metadata.description), encode(description_link) ) @@ -388,7 +384,7 @@ def _extract_image_links( :param feed_self_url: Feed's self URL :return: List of links metadata """ - self._logger.debug( + self.log.debug( f"Started extracting image links from {encode(publication.images)}" ) @@ -431,7 +427,7 @@ def _extract_image_links( ) image_links.append(cover_link) - self._logger.debug( + self.log.debug( "Finished extracting image links from {}: {}".format( encode(publication.images), encode(image_links) ) @@ -448,7 +444,7 @@ def _extract_links( :param feed_self_url: Feed's self URL :return: List of links metadata """ - self._logger.debug(f"Started extracting links from {encode(publication.links)}") + self.log.debug(f"Started extracting links from {encode(publication.links)}") links = [] @@ -464,7 +460,7 @@ def _extract_links( if image_links: links.extend(image_links) - self._logger.debug( + self.log.debug( "Finished extracting links from {}: {}".format( encode(publication.links), encode(links) ) @@ -480,7 +476,7 @@ def _extract_media_types_and_drm_scheme_from_link( :param link: Link object :return: 2-tuple containing information about the content's media type and its DRM schema """ - self._logger.debug( + self.log.debug( "Started extracting media types and a DRM scheme from {}".format( encode(link) ) @@ -494,7 +490,7 @@ def _extract_media_types_and_drm_scheme_from_link( and link.properties.availability.state != opds2_ast.OPDS2AvailabilityType.AVAILABLE.value ): - self._logger.info(f"Link unavailable. Skipping. {encode(link)}") + self.log.info(f"Link unavailable. Skipping. {encode(link)}") return [] # We need to take into account indirect acquisition links @@ -533,7 +529,7 @@ def _extract_media_types_and_drm_scheme_from_link( ): media_types_and_drm_scheme.append((link.type, DeliveryMechanism.NO_DRM)) - self._logger.debug( + self.log.debug( "Finished extracting media types and a DRM scheme from {}: {}".format( encode(link), encode(media_types_and_drm_scheme) ) @@ -591,7 +587,7 @@ def _extract_identifier( :param publication: Publication object :return: Identifier object """ - return self._parse_identifier(publication.metadata.identifier) + return self.parse_identifier(publication.metadata.identifier) # type: ignore[no-any-return] def _extract_publication_metadata( self, @@ -606,7 +602,7 @@ def _extract_publication_metadata( :param data_source_name: Data source's name :return: Publication's metadata """ - self._logger.debug( + self.log.debug( "Started extracting metadata from publication {}".format( encode(publication) ) @@ -728,7 +724,7 @@ def _extract_publication_metadata( circulation=circulation_data, ) - self._logger.debug( + self.log.debug( "Finished extracting metadata from publication {}: {}".format( encode(publication), encode(metadata) ) @@ -774,9 +770,7 @@ def _find_formats_in_non_open_access_acquisition_links( return formats - def external_integration( - self, db: sqlalchemy.orm.session.Session - ) -> ExternalIntegration: + def external_integration(self, db: Session) -> ExternalIntegration: """Return an external integration associated with this object. :param db: Database session :return: External integration associated with this object @@ -785,20 +779,6 @@ def external_integration( raise ValueError("Collection is not set") return self.collection.external_integration - def integration_configuration(self) -> IntegrationConfiguration: - """Return an external integration associated with this object. - :param db: Database session - :return: External integration associated with this object - """ - ext = get_one( - self._db, IntegrationConfiguration, id=self._integration_configuration_id - ) - if not ext: - raise ValueError( - f"Integration Configuration not found {self._integration_configuration_id}" - ) - return ext - @staticmethod def _get_publications( feed: opds2_ast.OPDS2Feed, @@ -854,7 +834,7 @@ def _is_open_access_link_( def _record_coverage_failure( self, - failures: dict[str, list[CoverageFailure] | CoverageFailure], + failures: dict[str, list[CoverageFailure]], identifier: Identifier, error_message: str, transient: bool = True, @@ -880,7 +860,7 @@ def _record_coverage_failure( transient=transient, collection=self.collection, ) - failures[identifier.identifier].append(failure) # type: ignore[union-attr] + failures[identifier.identifier].append(failure) return failure @@ -896,9 +876,9 @@ def _record_publication_unrecognizable_identifier( title = publication.metadata.title if original_identifier is None: - self._logger.warning(f"Publication '{title}' does not have an identifier.") + self.log.warning(f"Publication '{title}' does not have an identifier.") else: - self._logger.warning( + self.log.warning( f"Publication # {original_identifier} ('{title}') has an unrecognizable identifier." ) @@ -953,7 +933,7 @@ def _parse_feed_links(self, links: list[core_ast.Link]) -> None: def extract_feed_data( self, feed: str | opds2_ast.OPDS2Feed, feed_url: str | None = None - ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure] | CoverageFailure]]: + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: """Turn an OPDS 2.0 feed into lists of Metadata and CirculationData objects. :param feed: OPDS 2.0 feed :param feed_url: Feed URL used to resolve relative links @@ -961,7 +941,7 @@ def extract_feed_data( parser_result = self._parser.parse_manifest(feed) feed = parser_result.root publication_metadata_dictionary = {} - failures: dict[str, list[CoverageFailure] | CoverageFailure] = {} + failures: dict[str, list[CoverageFailure]] = {} if feed.links: self._parse_feed_links(feed.links) @@ -1002,7 +982,7 @@ def extract_feed_data( failures, recognized_identifier, error.error_message ) else: - self._logger.warning(f"{error.error_message}") + self.log.warning(f"{error.error_message}") return publication_metadata_dictionary, failures diff --git a/core/opds_import.py b/core/opds_import.py index fb627098d8..ce3eb33561 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -2,6 +2,8 @@ import logging import traceback +from abc import ABC, abstractmethod +from collections import defaultdict from datetime import datetime from io import BytesIO from typing import ( @@ -28,7 +30,6 @@ from flask_babel import lazy_gettext as _ from lxml import etree from pydantic import HttpUrl -from sqlalchemy.orm import aliased from sqlalchemy.orm.session import Session from api.circulation import CirculationConfigurationMixin @@ -59,7 +60,6 @@ CoverageRecord, DataSource, Edition, - Equivalency, ExternalIntegration, Hyperlink, Identifier, @@ -75,6 +75,7 @@ from core.selftest import SelfTestResult from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException +from core.util.log import LoggerMixin from core.util.opds_writer import OPDSFeed, OPDSMessage from core.util.string_helpers import base64 from core.util.xmlparser import XMLParser @@ -83,43 +84,6 @@ from core.model import Work -@overload -def parse_identifier(db: Session, identifier: str) -> Identifier: - ... - - -@overload -def parse_identifier(db: Session, identifier: Optional[str]) -> Optional[Identifier]: - ... - - -def parse_identifier(db: Session, identifier: Optional[str]) -> Optional[Identifier]: - """Parse the identifier and return an Identifier object representing it. - - :param db: Database session - :type db: sqlalchemy.orm.session.Session - - :param identifier: String containing the identifier - :type identifier: str - - :return: Identifier object - :rtype: Optional[core.model.identifier.Identifier] - """ - parsed_identifier = None - - try: - result = Identifier.parse_urn(db, identifier) - - if result is not None: - parsed_identifier, _ = result - except Exception: - logging.error( - f"An unexpected exception occurred during parsing identifier {identifier}" - ) - - return parsed_identifier - - class OPDSXMLParser(XMLParser): NAMESPACES = { "simplified": "http://librarysimplified.org/terms/", @@ -226,91 +190,26 @@ class OPDSImporterLibrarySettings(BaseSettings): pass -class OPDSImporter( - CirculationConfigurationMixin[OPDSImporterSettings, OPDSImporterLibrarySettings] +class BaseOPDSImporter( + CirculationConfigurationMixin[OPDSImporterSettings, OPDSImporterLibrarySettings], + LoggerMixin, + ABC, ): - """Imports editions and license pools from an OPDS feed. - Creates Edition, LicensePool and Work rows in the database, if those - don't already exist. - - Should be used when a circulation server asks for data from - our internal content server, and also when our content server asks for data - from external content servers. - """ - - COULD_NOT_CREATE_LICENSE_POOL = ( - "No existing license pool for this identifier and no way of creating one." - ) - - NAME = ExternalIntegration.OPDS_IMPORT - DESCRIPTION = _("Import books from a publicly-accessible OPDS feed.") - - NO_DEFAULT_AUDIENCE = "" - - # Subclasses of OPDSImporter may define a different parser class that's - # a subclass of OPDSXMLParser. For example, a subclass may want to use - # tags from an additional namespace. - PARSER_CLASS = OPDSXMLParser - - # Subclasses of OPDSImporter may define a list of status codes - # that should be treated as indicating success, rather than failure, - # when they show up in tags. - SUCCESS_STATUS_CODES: list[int] | None = None - - @classmethod - def settings_class(cls) -> Type[OPDSImporterSettings]: - return OPDSImporterSettings - - @classmethod - def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: - return OPDSImporterLibrarySettings - - @classmethod - def label(cls) -> str: - return "OPDS Importer" - - @classmethod - def description(cls) -> str: - return cls.DESCRIPTION # type: ignore[no-any-return] - def __init__( self, _db: Session, - collection: Optional[Collection], - data_source_name: Optional[str] = None, - identifier_mapping: Optional[Dict[Identifier, Identifier]] = None, + collection: Collection, + data_source_name: Optional[str], http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - content_modifier: Optional[Callable[..., None]] = None, - map_from_collection: Optional[bool] = None, ): - """:param collection: LicensePools created by this OPDS import - will be associated with the given Collection. If this is None, - no LicensePools will be created -- only Editions. - - :param data_source_name: Name of the source of this OPDS feed. - All Editions created by this import will be associated with - this DataSource. If there is no DataSource with this name, one - will be created. NOTE: If `collection` is provided, its - .data_source will take precedence over any value provided - here. This is only for use when you are importing OPDS - metadata without any particular Collection in mind. - - :param http_get: Use this method to make an HTTP GET request. This - can be replaced with a stub method for testing purposes. - - :param content_modifier: A function that may modify-in-place - representations (such as images and EPUB documents) as they - come in from the network. - - :param map_from_collection - """ self._db = _db - self.log = logging.getLogger("OPDS Importer") - self._collection_id = collection.id if collection else None - self._integration_configuration_id = ( - collection.integration_configuration.id if collection else None - ) - if self.collection and not data_source_name: + if collection.id is None: + raise ValueError( + f"Unable to create importer for Collection with id = None. Collection: {collection.name}." + ) + self._collection_id = collection.id + self._integration_configuration_id = collection.integration_configuration_id + if data_source_name is None: # Use the Collection data_source for OPDS import. data_source = self.collection.data_source if data_source: @@ -319,36 +218,62 @@ def __init__( raise ValueError( "Cannot perform an OPDS import on a Collection that has no associated DataSource!" ) - else: - # Use the given data_source or default to the Metadata - # Wrangler. - data_source_name = data_source_name or DataSource.METADATA_WRANGLER self.data_source_name = data_source_name - self.identifier_mapping = identifier_mapping - - self.primary_identifier_source = None - if collection: - self.primary_identifier_source = collection.primary_identifier_source - - self.content_modifier = content_modifier # In general, we are cautious when mirroring resources so that # we don't, e.g. accidentally get our IP banned from # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get - self.map_from_collection = map_from_collection - @property - def collection(self) -> Optional[Collection]: - """Returns an associated Collection object + @abstractmethod + def extract_feed_data( + self, feed: str | bytes, feed_url: Optional[str] = None + ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + ... - :return: Associated Collection object - :rtype: Optional[Collection] + @abstractmethod + def extract_last_update_dates( + self, feed: str | bytes | FeedParserDict + ) -> List[Tuple[Optional[str], Optional[datetime]]]: + ... + + @abstractmethod + def extract_next_links(self, feed: str | bytes) -> List[str]: + ... + + @abstractmethod + def assert_importable_content( + self, feed: str, feed_url: str, max_get_attempts: int = 5 + ) -> Literal[True]: + ... + + @overload + def parse_identifier(self, identifier: str) -> Identifier: + ... + + @overload + def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + ... + + def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + """Parse the identifier and return an Identifier object representing it. + + :param identifier: String containing the identifier + + :return: Identifier object """ - if self._collection_id: - return Collection.by_id(self._db, id=self._collection_id) + parsed_identifier = None - return None + try: + result = Identifier.parse_urn(self._db, identifier) + if result is not None: + parsed_identifier, _ = result + except Exception: + self.log.error( + f"An unexpected exception occurred during parsing identifier {identifier}" + ) + + return parsed_identifier @property def data_source(self) -> DataSource: @@ -363,93 +288,91 @@ def data_source(self) -> DataSource: offers_licenses=offers_licenses, ) - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - """Raise an exception if the given feed contains nothing that can, - even theoretically, be turned into a LicensePool. + @property + def collection(self) -> Collection: + collection = Collection.by_id(self._db, self._collection_id) + if collection is None: + raise ValueError("Unable to load collection.") + return collection - By default, this means the feed must link to open-access content - that can actually be retrieved. + def import_edition_from_metadata(self, metadata: Metadata) -> Edition: + """For the passed-in Metadata object, see if can find or create an Edition + in the database. Also create a LicensePool if the Metadata has + CirculationData in it. """ - metadata, failures = self.extract_feed_data(feed, feed_url) - get_attempts = 0 - - # Find an open-access link, and try to GET it just to make - # sure OPDS feed isn't hiding non-open-access stuff behind an - # open-access link. - # - # To avoid taking forever or antagonizing API providers, we'll - # give up after `max_get_attempts` failures. - for link in self._open_access_links(list(metadata.values())): - url = link.href - success = self._is_open_access_link(url, link.media_type) - if success: - return True - get_attempts += 1 - if get_attempts >= max_get_attempts: - error = ( - "Was unable to GET supposedly open-access content such as %s (tried %s times)" - % (url, get_attempts) - ) - explanation = "This might be an OPDS For Distributors feed, or it might require different authentication credentials." - raise IntegrationException(error, explanation) + # Locate or create an Edition for this book. + edition, is_new_edition = metadata.edition(self._db) - raise IntegrationException( - "No open-access links were found in the OPDS feed.", - "This might be an OPDS for Distributors feed.", + policy = ReplacementPolicy( + subjects=True, + links=True, + contributions=True, + rights=True, + link_content=True, + formats=True, + even_if_not_apparently_updated=True, + ) + metadata.apply( + edition=edition, + collection=self.collection, + replace=policy, ) - @classmethod - def _open_access_links( - cls, metadatas: List[Metadata] - ) -> Generator[LinkData, None, None]: - """Find all open-access links in a list of Metadata objects. + return edition # type: ignore[no-any-return] - :param metadatas: A list of Metadata objects. - :yield: A sequence of `LinkData` objects. + def update_work_for_edition( + self, + edition: Edition, + is_open_access: bool = True, + ) -> tuple[LicensePool | None, Work | None]: + """If possible, ensure that there is a presentation-ready Work for the + given edition's primary identifier. + + :param edition: The edition whose license pool and work we're interested in. + :param is_open_access: Whether this is an open access edition. + :return: 2-Tuple of license pool (optional) and work (optional) for edition. """ - for item in metadatas: - if not item.circulation: - continue - for link in item.circulation.links: - if link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - yield link - def _is_open_access_link( - self, url: str, type: Optional[str] - ) -> str | Literal[False]: - """Is `url` really an open-access link? + work = None - That is, can we make a normal GET request and get something - that looks like a book? - """ - headers = {} - if type: - headers["Accept"] = type - status, headers, body = self.http_get(url, headers=headers) - if status == 200 and len(body) > 1024 * 10: - # We could also check the media types, but this is good - # enough for now. - return "Found a book-like thing at %s" % url - self.log.error( - "Supposedly open-access link %s didn't give us a book. Status=%s, body length=%s", - url, - status, - len(body), + # Looks up a license pool for the primary identifier associated with + # the given edition. If this is not an open access book, then the + # collection is also used as criteria for the lookup. Open access + # books don't require a collection match, according to this explanation + # from prior work: + # Find a LicensePool for the primary identifier. Any LicensePool will + # do--the collection doesn't have to match, since all + # LicensePools for a given identifier have the same Work. + # + # If we have CirculationData, a pool was created when we + # imported the edition. If there was already a pool from a + # different data source or a different collection, that's fine + # too. + collection_criteria = {} if is_open_access else {"collection": self.collection} + pool = get_one( + self._db, + LicensePool, + identifier=edition.primary_identifier, + on_multiple="interchangeable", + **collection_criteria, ) - return False - def _parse_identifier(self, identifier: str) -> Identifier: - """Parse the identifier and return an Identifier object representing it. - - :param identifier: String containing the identifier - :type identifier: str + if pool: + if not pool.work or not pool.work.presentation_ready: + # There is no presentation-ready Work for this + # LicensePool. Try to create one. + work, ignore = pool.calculate_work() + else: + # There is a presentation-ready Work for this LicensePool. + # Use it. + work = pool.work - :return: Identifier object - :rtype: Identifier - """ - return parse_identifier(self._db, identifier) + # If a presentation-ready Work already exists, there's no + # rush. We might have new metadata that will change the Work's + # presentation, but when we called Metadata.apply() the work + # was set up to have its presentation recalculated in the + # background, and that's good enough. + return pool, work def import_from_feed( self, feed: str | bytes, feed_url: Optional[str] = None @@ -457,7 +380,7 @@ def import_from_feed( List[Edition], List[LicensePool], List[Work], - Dict[str, CoverageFailure | List[CoverageFailure]], + Dict[str, List[CoverageFailure]], ]: # Keep track of editions that were imported. Pools and works # for those editions may be looked up or created. @@ -467,7 +390,8 @@ def import_from_feed( # If parsing the overall feed throws an exception, we should address that before # moving on. Let the exception propagate. - metadata_objs, failures = self.extract_feed_data(feed, feed_url) + metadata_objs, extracted_failures = self.extract_feed_data(feed, feed_url) + failures = defaultdict(list, extracted_failures) # make editions. if have problem, make sure associated pool and work aren't created. for key, metadata in metadata_objs.items(): # key is identifier.urn here @@ -497,7 +421,7 @@ def import_from_feed( transient=False, collection=self.collection, ) - failures[key] = failure + failures[key].append(failure) # clean up any edition might have created if key in imported_editions: del imported_editions[key] @@ -527,7 +451,7 @@ def import_from_feed( transient=False, collection=self.collection, ) - failures[key] = failure + failures[key].append(failure) return ( list(imported_editions.values()), @@ -536,85 +460,150 @@ def import_from_feed( failures, ) - def import_edition_from_metadata(self, metadata: Metadata) -> Edition: - """For the passed-in Metadata object, see if can find or create an Edition - in the database. Also create a LicensePool if the Metadata has - CirculationData in it. - """ - # Locate or create an Edition for this book. - edition, is_new_edition = metadata.edition(self._db) - policy = ReplacementPolicy( - subjects=True, - links=True, - contributions=True, - rights=True, - link_content=True, - formats=True, - even_if_not_apparently_updated=True, - content_modifier=self.content_modifier, - ) - metadata.apply( - edition=edition, - collection=self.collection, - replace=policy, - ) +class OPDSImporter(BaseOPDSImporter): + """Imports editions and license pools from an OPDS feed. + Creates Edition, LicensePool and Work rows in the database, if those + don't already exist. - return edition # type: ignore[no-any-return] + Should be used when a circulation server asks for data from + our internal content server, and also when our content server asks for data + from external content servers. + """ - def update_work_for_edition( + NAME = ExternalIntegration.OPDS_IMPORT + DESCRIPTION = _("Import books from a publicly-accessible OPDS feed.") + + # Subclasses of OPDSImporter may define a different parser class that's + # a subclass of OPDSXMLParser. For example, a subclass may want to use + # tags from an additional namespace. + PARSER_CLASS = OPDSXMLParser + + @classmethod + def settings_class(cls) -> Type[OPDSImporterSettings]: + return OPDSImporterSettings + + @classmethod + def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: + return OPDSImporterLibrarySettings + + @classmethod + def label(cls) -> str: + return "OPDS Importer" + + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION # type: ignore[no-any-return] + + def __init__( self, - edition: Edition, - is_open_access: bool = True, - ) -> tuple[LicensePool | None, Work | None]: - """If possible, ensure that there is a presentation-ready Work for the - given edition's primary identifier. + _db: Session, + collection: Collection, + data_source_name: Optional[str] = None, + http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + ): + """:param collection: LicensePools created by this OPDS import + will be associated with the given Collection. If this is None, + no LicensePools will be created -- only Editions. - :param edition: The edition whose license pool and work we're interested in. - :param is_open_access: Whether this is an open access edition. - :return: 2-Tuple of license pool (optional) and work (optional) for edition. + :param data_source_name: Name of the source of this OPDS feed. + All Editions created by this import will be associated with + this DataSource. If there is no DataSource with this name, one + will be created. NOTE: If `collection` is provided, its + .data_source will take precedence over any value provided + here. This is only for use when you are importing OPDS + metadata without any particular Collection in mind. + + :param http_get: Use this method to make an HTTP GET request. This + can be replaced with a stub method for testing purposes. """ + super().__init__(_db, collection, data_source_name) - work = None + self.primary_identifier_source = None + if collection: + self.primary_identifier_source = collection.primary_identifier_source - # Looks up a license pool for the primary identifier associated with - # the given edition. If this is not an open access book, then the - # collection is also used as criteria for the lookup. Open access - # books don't require a collection match, according to this explanation - # from prior work: - # Find a LicensePool for the primary identifier. Any LicensePool will - # do--the collection doesn't have to match, since all - # LicensePools for a given identifier have the same Work. + # In general, we are cautious when mirroring resources so that + # we don't, e.g. accidentally get our IP banned from + # gutenberg.org. + self.http_get = http_get or Representation.cautious_http_get + + def assert_importable_content( + self, feed: str, feed_url: str, max_get_attempts: int = 5 + ) -> Literal[True]: + """Raise an exception if the given feed contains nothing that can, + even theoretically, be turned into a LicensePool. + + By default, this means the feed must link to open-access content + that can actually be retrieved. + """ + metadata, failures = self.extract_feed_data(feed, feed_url) + get_attempts = 0 + + # Find an open-access link, and try to GET it just to make + # sure OPDS feed isn't hiding non-open-access stuff behind an + # open-access link. # - # If we have CirculationData, a pool was created when we - # imported the edition. If there was already a pool from a - # different data source or a different collection, that's fine - # too. - collection_criteria = {} if is_open_access else {"collection": self.collection} - pool = get_one( - self._db, - LicensePool, - identifier=edition.primary_identifier, - on_multiple="interchangeable", - **collection_criteria, + # To avoid taking forever or antagonizing API providers, we'll + # give up after `max_get_attempts` failures. + for link in self._open_access_links(list(metadata.values())): + url = link.href + success = self._is_open_access_link(url, link.media_type) + if success: + return True + get_attempts += 1 + if get_attempts >= max_get_attempts: + error = ( + "Was unable to GET supposedly open-access content such as %s (tried %s times)" + % (url, get_attempts) + ) + explanation = "This might be an OPDS For Distributors feed, or it might require different authentication credentials." + raise IntegrationException(error, explanation) + + raise IntegrationException( + "No open-access links were found in the OPDS feed.", + "This might be an OPDS for Distributors feed.", ) - if pool: - if not pool.work or not pool.work.presentation_ready: - # There is no presentation-ready Work for this - # LicensePool. Try to create one. - work, ignore = pool.calculate_work() - else: - # There is a presentation-ready Work for this LicensePool. - # Use it. - work = pool.work + @classmethod + def _open_access_links( + cls, metadatas: List[Metadata] + ) -> Generator[LinkData, None, None]: + """Find all open-access links in a list of Metadata objects. - # If a presentation-ready Work already exists, there's no - # rush. We might have new metadata that will change the Work's - # presentation, but when we called Metadata.apply() the work - # was set up to have its presentation recalculated in the - # background, and that's good enough. - return pool, work + :param metadatas: A list of Metadata objects. + :yield: A sequence of `LinkData` objects. + """ + for item in metadatas: + if not item.circulation: + continue + for link in item.circulation.links: + if link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: + yield link + + def _is_open_access_link( + self, url: str, type: Optional[str] + ) -> str | Literal[False]: + """Is `url` really an open-access link? + + That is, can we make a normal GET request and get something + that looks like a book? + """ + headers = {} + if type: + headers["Accept"] = type + status, headers, body = self.http_get(url, headers=headers) + if status == 200 and len(body) > 1024 * 10: + # We could also check the media types, but this is good + # enough for now. + return "Found a book-like thing at %s" % url + self.log.error( + "Supposedly open-access link %s didn't give us a book. Status=%s, body length=%s", + url, + status, + len(body), + ) + return False def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: if isinstance(feed, (bytes, str)): @@ -642,44 +631,9 @@ def extract_last_update_dates( ] return [x for x in dates if x and x[1]] - def build_identifier_mapping(self, external_urns: List[str]) -> None: - """Uses the given Collection and a list of URNs to reverse - engineer an identifier mapping. - - NOTE: It would be better if .identifier_mapping weren't - instance data, since a single OPDSImporter might import - multiple pages of a feed. However, the code as written should - work. - """ - if not self.collection: - return - - mapping = dict() - identifiers_by_urn, failures = Identifier.parse_urns( - self._db, external_urns, autocreate=False - ) - external_identifiers = list(identifiers_by_urn.values()) - - internal_identifier = aliased(Identifier) - qu = ( - self._db.query(Identifier, internal_identifier) - .join(Identifier.inbound_equivalencies) - .join(internal_identifier, Equivalency.input) - .join(internal_identifier.licensed_through) - .filter( - Identifier.id.in_([x.id for x in external_identifiers]), - LicensePool.collection == self.collection, - ) - ) - - for external_identifier, internal_identifier in qu: - mapping[external_identifier] = internal_identifier - - self.identifier_mapping = mapping - def extract_feed_data( self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[Dict[str, Metadata], Dict[str, CoverageFailure | List[CoverageFailure]]]: + ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: """Turn an OPDS feed into lists of Metadata and CirculationData objects, with associated messages and next_links. """ @@ -692,17 +646,11 @@ def extract_feed_data( feed, data_source=data_source, feed_url=feed_url, do_get=self.http_get ) - if self.map_from_collection: - # Build the identifier_mapping based on the Collection. - self.build_identifier_mapping( - list(fp_metadata.keys()) + list(fp_failures.keys()) - ) - # translate the id in failures to identifier.urn - identified_failures: Dict[str, CoverageFailure | List[CoverageFailure]] = {} + identified_failures = {} for urn, failure in list(fp_failures.items()) + list(xml_failures.items()): identifier, failure = self.handle_failure(urn, failure) - identified_failures[identifier.urn] = failure + identified_failures[identifier.urn] = [failure] # Use one loop for both, since the id will be the same for both dictionaries. metadata = {} @@ -734,20 +682,12 @@ def extract_feed_data( if external_identifier is None: external_identifier, ignore = Identifier.parse_urn(self._db, _id) - internal_identifier: Optional[Identifier] - if self.identifier_mapping and external_identifier is not None: - internal_identifier = self.identifier_mapping.get( - external_identifier, external_identifier - ) - else: - internal_identifier = external_identifier - # Don't process this item if there was already an error - if internal_identifier.urn in list(identified_failures.keys()): + if external_identifier.urn in list(identified_failures.keys()): continue identifier_obj = IdentifierData( - type=internal_identifier.type, identifier=internal_identifier.identifier + type=external_identifier.type, identifier=external_identifier.identifier ) # form the Metadata object @@ -757,7 +697,7 @@ def extract_feed_data( combined_meta["primary_identifier"] = identifier_obj - metadata[internal_identifier.urn] = Metadata(**combined_meta) + metadata[external_identifier.urn] = Metadata(**combined_meta) # Form the CirculationData that would correspond to this Metadata, # assuming there is a Collection to hold the LicensePool that @@ -772,7 +712,7 @@ def extract_feed_data( # not going to put anything under metadata.circulation, # and any partial data that got added to # metadata.circulation is going to be removed. - metadata[internal_identifier.urn].circulation = None + metadata[external_identifier.urn].circulation = None if c_data_dict: circ_links_dict = {} # extract just the links to pass to CirculationData constructor @@ -788,7 +728,7 @@ def extract_feed_data( self._add_format_data(circulation) if circulation.formats: - metadata[internal_identifier.urn].circulation = circulation + metadata[external_identifier.urn].circulation = circulation else: # If the CirculationData has no formats, it # doesn't really offer any way to actually get the @@ -819,32 +759,22 @@ def handle_failure( """Convert a URN and a failure message that came in through an OPDS feed into an Identifier and a CoverageFailure object. - The Identifier may not be the one designated by `urn` (if it's - found in self.identifier_mapping) and the 'failure' may turn out not - to be a CoverageFailure at all -- if it's an Identifier, that means - that what a normal OPDSImporter would consider 'failure' is - considered success. + The 'failure' may turn out not to be a CoverageFailure at + all -- if it's an Identifier, that means that what a normal + OPDSImporter would consider 'failure' is considered success. """ external_identifier, ignore = Identifier.parse_urn(self._db, urn) - if self.identifier_mapping: - # The identifier found in the OPDS feed is different from - # the identifier we want to export. - internal_identifier = self.identifier_mapping.get( - external_identifier, external_identifier - ) - else: - internal_identifier = external_identifier if isinstance(failure, Identifier): # The OPDSImporter does not actually consider this a # failure. Signal success by returning the internal # identifier as the 'failure' object. - failure = internal_identifier + failure = external_identifier else: # This really is a failure. Associate the internal # identifier with the CoverageFailure object. - failure.obj = internal_identifier + failure.obj = external_identifier failure.collection = self.collection - return internal_identifier, failure + return external_identifier, failure @classmethod def _add_format_data(cls, circulation: CirculationData) -> None: @@ -1228,11 +1158,6 @@ def coveragefailure_from_message( # Identifier so we can't turn it into a CoverageFailure. return None - if cls.SUCCESS_STATUS_CODES and message.status_code in cls.SUCCESS_STATUS_CODES: - # This message is telling us that nothing went wrong. It - # should be treated as a success. - return identifier # type: ignore[no-any-return] - if message.status_code == 200: # By default, we treat a message with a 200 status code # as though nothing had been returned at all. @@ -1647,7 +1572,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: Type[BaseOPDSImporter], force_reimport: bool = False, **import_class_kwargs: Any, ) -> None: @@ -1764,17 +1689,6 @@ def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: return headers - def _parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: - """Extract the publication's identifier from its metadata. - - :param identifier: String containing the identifier - :type identifier: str - - :return: Identifier object - :rtype: Identifier - """ - return parse_identifier(self._db, identifier) - def opds_url(self, collection: Collection) -> Optional[str]: """Returns the OPDS import URL for the given collection. @@ -1806,7 +1720,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: new_data = False for raw_identifier, remote_updated in last_update_dates: - identifier = self._parse_identifier(raw_identifier) + identifier = self.importer.parse_identifier(raw_identifier) if not identifier: # Maybe this is new, maybe not, but we can't associate # the information with an Identifier, so we can't do @@ -1932,7 +1846,7 @@ def follow_one_link( def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, CoverageFailure | List[CoverageFailure]]]: + ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: """Import every book mentioned in an OPDS feed.""" # Because we are importing into a Collection, we will immediately @@ -1952,13 +1866,7 @@ def import_one_feed( ) # Create CoverageRecords for the failures. - for urn, failure in list(failures.items()): - failure_items: List[CoverageFailure] - if isinstance(failure, list): - failure_items = failure - else: - failure_items = [failure] - + for urn, failure_items in list(failures.items()): for failure_item in failure_items: failure_item.to_coverage_record( operation=CoverageRecord.IMPORT_OPERATION diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 575b870229..1417bb7226 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -432,19 +432,13 @@ def test_collections_post_create( ] ) assert isinstance(l1.id, int) - assert ( - "l1_ils" - == collection.integration_configuration.for_library(l1.id).settings_dict[ - "ils_name" - ] - ) + l1_settings = collection.integration_configuration.for_library(l1.id) + assert l1_settings is not None + assert "l1_ils" == l1_settings.settings_dict["ils_name"] assert isinstance(l2.id, int) - assert ( - "l2_ils" - == collection.integration_configuration.for_library(l2.id).settings_dict[ - "ils_name" - ] - ) + l2_settings = collection.integration_configuration.for_library(l2.id) + assert l2_settings is not None + assert "l2_ils" == l2_settings.settings_dict["ils_name"] # This collection will be a child of the first collection. with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): @@ -482,12 +476,9 @@ def test_collections_post_create( # One library has access to the collection. assert [child] == l3.collections assert isinstance(l3.id, int) - assert ( - "l3_ils" - == child.integration_configuration.for_library(l3.id).settings_dict[ - "ils_name" - ] - ) + l3_settings = child.integration_configuration.for_library(l3.id) + assert l3_settings is not None + assert "l3_ils" == l3_settings.settings_dict["ils_name"] def test_collections_post_edit( self, settings_ctrl_fixture: SettingsControllerFixture @@ -544,9 +535,9 @@ def test_collections_post_edit( "overdrive_website_id" ) assert isinstance(l1.id, int) - assert "the_ils" == collection.integration_configuration.for_library( - l1.id - ).settings_dict.get("ils_name") + l1_settings = collection.integration_configuration.for_library(l1.id) + assert l1_settings is not None + assert "the_ils" == l1_settings.settings_dict.get("ils_name") with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( @@ -698,9 +689,9 @@ def test_collections_post_edit_library_specific_configuration( # Additional settings were set on the collection+library. assert isinstance(l1.id, int) - assert "14" == collection.integration_configuration.for_library( - l1.id - ).settings_dict.get("ebook_loan_duration") + l1_settings = collection.integration_configuration.for_library(l1.id) + assert l1_settings is not None + assert "14" == l1_settings.settings_dict.get("ebook_loan_duration") # Remove the connection between collection and library. with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 603e67b2c6..3122398d97 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -18,8 +18,7 @@ "tests.fixtures.api_novelist_files", "tests.fixtures.api_nyt_files", "tests.fixtures.api_odilo_files", - "tests.fixtures.api_odl2_files", - "tests.fixtures.api_odl_files", + "tests.fixtures.api_odl", "tests.fixtures.api_onix_files", "tests.fixtures.api_opds_dist_files", "tests.fixtures.api_opds_files", diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index 733a948d95..a701667bab 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -1,13 +1,13 @@ +from __future__ import annotations + import datetime import json import urllib.parse -import uuid -from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any, Dict import dateutil import pytest from freezegun import freeze_time -from jinja2 import Template from api.circulation import HoldInfo from api.circulation_exceptions import ( @@ -36,80 +36,23 @@ from core.util import datetime_helpers from core.util.datetime_helpers import datetime_utc, utc_now from core.util.http import BadResponseException -from tests.fixtures.api_odl_files import ODLAPIFilesFixture +from tests.fixtures.api_odl import ( + LicenseHelper, + LicenseInfoHelper, + MockGet, + OdlImportTemplatedFixture, +) from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.files import APIFilesFixture from tests.fixtures.odl import ODLAPITestFixture, ODLTestFixture if TYPE_CHECKING: from core.model import LicensePool -class LicenseHelper: - """Represents an ODL license.""" - - def __init__( - self, - identifier: Optional[str] = None, - checkouts: Optional[int] = None, - concurrency: Optional[int] = None, - expires: Optional[Union[datetime.datetime, str]] = None, - ) -> None: - """Initialize a new instance of LicenseHelper class. - - :param identifier: License's identifier - :param checkouts: Total number of checkouts before a license expires - :param concurrency: Number of concurrent checkouts allowed - :param expires: Date & time when a license expires - """ - self.identifier: str = identifier if identifier else f"urn:uuid:{uuid.uuid1()}" - self.checkouts: Optional[int] = checkouts - self.concurrency: Optional[int] = concurrency - if isinstance(expires, datetime.datetime): - self.expires = expires.isoformat() - else: - self.expires: Optional[str] = expires # type: ignore - - -class LicenseInfoHelper: - """Represents information about the current state of a license stored in the License Info Document.""" - - def __init__( - self, - license: LicenseHelper, - available: int, - status: str = "available", - left: Optional[int] = None, - ) -> None: - """Initialize a new instance of LicenseInfoHelper class.""" - self.license: LicenseHelper = license - self.status: str = status - self.left: Optional[int] = left - self.available: int = available - - def __str__(self) -> str: - """Return a JSON representation of a part of the License Info Document.""" - output = { - "identifier": self.license.identifier, - "status": self.status, - "terms": { - "concurrency": self.license.concurrency, - }, - "checkouts": { - "available": self.available, - }, - } - if self.license.expires is not None: - output["terms"]["expires"] = self.license.expires # type: ignore - if self.left is not None: - output["checkouts"]["left"] = self.left # type: ignore - return json.dumps(output) - - class TestODLAPI: def test_get_license_status_document_success( self, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # With a new loan. loan, _ = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) odl_api_test_fixture.api.queue_response( @@ -172,7 +115,7 @@ def test_get_license_status_document_success( def test_get_license_status_document_errors( self, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: loan, _ = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) odl_api_test_fixture.api.queue_response(200, content="not json") @@ -193,7 +136,7 @@ def test_get_license_status_document_errors( def test_checkin_success( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # A patron has a copy of this book checked out. odl_api_test_fixture.license.setup(concurrency=7, available=6) # type: ignore[attr-defined] @@ -218,7 +161,7 @@ def test_checkin_success( def test_checkin_success_with_holds_queue( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # A patron has the only copy of this book checked out. odl_api_test_fixture.license.setup(concurrency=1, available=0) # type: ignore[attr-defined] loan, _ = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) @@ -248,7 +191,7 @@ def test_checkin_success_with_holds_queue( def test_checkin_already_fulfilled( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # The loan is already fulfilled. odl_api_test_fixture.license.setup(concurrency=7, available=6) # type: ignore[attr-defined] loan, _ = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) @@ -272,7 +215,7 @@ def test_checkin_already_fulfilled( def test_checkin_not_checked_out( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # Not checked out locally. pytest.raises( NotCheckedOut, @@ -304,7 +247,7 @@ def test_checkin_not_checked_out( def test_checkin_cannot_return( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # Not fulfilled yet, but no return link from the distributor. loan, ignore = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) loan.external_identifier = db.fresh_str() @@ -345,7 +288,7 @@ def test_checkin_cannot_return( def test_checkout_success( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # This book is available to check out. odl_api_test_fixture.license.setup(concurrency=6, available=6, left=30) # type: ignore[attr-defined] @@ -378,7 +321,7 @@ def test_checkout_success( def test_checkout_success_with_hold( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # A patron has this book on hold, and the book just became available to check out. odl_api_test_fixture.pool.on_hold_to( odl_api_test_fixture.patron, @@ -420,7 +363,7 @@ def test_checkout_success_with_hold( def test_checkout_already_checked_out( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: odl_api_test_fixture.license.setup(concurrency=2, available=1) # type: ignore[attr-defined] # Checkout succeeds the first time @@ -433,7 +376,7 @@ def test_checkout_already_checked_out( def test_checkout_expired_hold( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # The patron was at the beginning of the hold queue, but the hold already expired. yesterday = utc_now() - datetime.timedelta(days=1) hold, _ = odl_api_test_fixture.pool.on_hold_to( @@ -455,7 +398,7 @@ def test_checkout_expired_hold( def test_checkout_no_available_copies( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # A different patron has the only copy checked out. odl_api_test_fixture.license.setup(concurrency=1, available=0) # type: ignore[attr-defined] existing_loan, _ = odl_api_test_fixture.license.loan_to(db.patron()) @@ -529,7 +472,7 @@ def test_checkout_no_available_copies( def test_checkout_no_licenses( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: odl_api_test_fixture.license.setup(concurrency=1, available=1, left=0) # type: ignore[attr-defined] pytest.raises( @@ -545,7 +488,7 @@ def test_checkout_no_licenses( def test_checkout_when_all_licenses_expired( self, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # license expired by expiration date odl_api_test_fixture.license.setup( # type: ignore[attr-defined] concurrency=1, @@ -582,7 +525,7 @@ def test_checkout_when_all_licenses_expired( def test_checkout_cannot_loan( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: lsd = json.dumps( { "status": "revoked", @@ -671,11 +614,11 @@ def test_fulfill_success( self, odl_api_test_fixture: ODLAPITestFixture, db: DatabaseTransactionFixture, - delivery_mechanism, - correct_type, - correct_link, - links, - ): + delivery_mechanism: str, + correct_type: str, + correct_link: str, + links: Dict[str, Any], + ) -> None: # Fulfill a loan in a way that gives access to a license file. odl_api_test_fixture.license.setup(concurrency=1, available=1) # type: ignore[attr-defined] odl_api_test_fixture.checkout() @@ -708,7 +651,7 @@ def test_fulfill_success( def test_fulfill_cannot_fulfill( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: odl_api_test_fixture.license.setup(concurrency=7, available=7) # type: ignore[attr-defined] odl_api_test_fixture.checkout() @@ -751,7 +694,7 @@ def _holdinfo_from_hold(self, hold: Hold) -> HoldInfo: def test_count_holds_before( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: now = utc_now() yesterday = now - datetime.timedelta(days=1) tomorrow = now + datetime.timedelta(days=1) @@ -803,7 +746,7 @@ def test_count_holds_before( def test_update_hold_end_date( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: now = utc_now() tomorrow = now + datetime.timedelta(days=1) yesterday = now - datetime.timedelta(days=1) @@ -823,6 +766,7 @@ def test_update_hold_end_date( config = odl_api_test_fixture.collection.integration_configuration.for_library( library.id ) + assert config is not None DatabaseTransactionFixture.set_settings( config, **{ @@ -989,7 +933,7 @@ def test_update_hold_end_date( def test_update_hold_position( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: now = utc_now() yesterday = now - datetime.timedelta(days=1) tomorrow = now + datetime.timedelta(days=1) @@ -1044,7 +988,7 @@ def test_update_hold_position( def test_update_hold_data( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: hold, is_new = odl_api_test_fixture.pool.on_hold_to( odl_api_test_fixture.patron, utc_now(), @@ -1057,7 +1001,7 @@ def test_update_hold_data( def test_update_hold_queue( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: licenses = [odl_api_test_fixture.license] DatabaseTransactionFixture.set_settings( @@ -1191,7 +1135,7 @@ def test_update_hold_queue( def test_place_hold_success( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: loan, _ = odl_api_test_fixture.checkout(patron=db.patron()) hold = odl_api_test_fixture.api.place_hold( @@ -1211,7 +1155,9 @@ def test_place_hold_success( assert loan.end_date == hold.end_date assert 1 == hold.hold_position - def test_place_hold_already_on_hold(self, odl_api_test_fixture: ODLAPITestFixture): + def test_place_hold_already_on_hold( + self, odl_api_test_fixture: ODLAPITestFixture + ) -> None: odl_api_test_fixture.license.setup(concurrency=1, available=0) # type: ignore[attr-defined] odl_api_test_fixture.pool.on_hold_to(odl_api_test_fixture.patron) pytest.raises( @@ -1225,7 +1171,7 @@ def test_place_hold_already_on_hold(self, odl_api_test_fixture: ODLAPITestFixtur def test_place_hold_currently_available( self, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: pytest.raises( CurrentlyAvailable, odl_api_test_fixture.api.place_hold, @@ -1237,7 +1183,7 @@ def test_place_hold_currently_available( def test_release_hold_success( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: loan_patron = db.patron() odl_api_test_fixture.checkout(patron=loan_patron) odl_api_test_fixture.pool.on_hold_to(odl_api_test_fixture.patron, position=1) @@ -1275,7 +1221,9 @@ def test_release_hold_success( assert 1 == db.session.query(Hold).count() assert 0 == other_hold.position - def test_release_hold_not_on_hold(self, odl_api_test_fixture: ODLAPITestFixture): + def test_release_hold_not_on_hold( + self, odl_api_test_fixture: ODLAPITestFixture + ) -> None: pytest.raises( NotOnHold, odl_api_test_fixture.api.release_hold, @@ -1286,7 +1234,7 @@ def test_release_hold_not_on_hold(self, odl_api_test_fixture: ODLAPITestFixture) def test_patron_activity_loan( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: # No loans yet. assert [] == odl_api_test_fixture.api.patron_activity( odl_api_test_fixture.patron, "pin" @@ -1383,7 +1331,7 @@ def test_patron_activity_loan( def test_update_loan_still_active( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: odl_api_test_fixture.license.setup(concurrency=6, available=6) # type: ignore[attr-defined] loan, _ = odl_api_test_fixture.license.loan_to(odl_api_test_fixture.patron) loan.external_identifier = db.fresh_str() @@ -1398,7 +1346,7 @@ def test_update_loan_still_active( def test_update_loan_removes_loan( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: odl_api_test_fixture.license.setup(concurrency=7, available=7) # type: ignore[attr-defined] _, loan = odl_api_test_fixture.checkout() @@ -1417,7 +1365,7 @@ def test_update_loan_removes_loan( def test_update_loan_removes_loan_with_hold_queue( self, db: DatabaseTransactionFixture, odl_api_test_fixture: ODLAPITestFixture - ): + ) -> None: _, loan = odl_api_test_fixture.checkout() hold, _ = odl_api_test_fixture.pool.on_hold_to(db.patron(), position=1) odl_api_test_fixture.pool.update_availability_from_licenses() @@ -1441,92 +1389,13 @@ def test_update_loan_removes_loan_with_hold_queue( class TestODLImporter: - class MockGet: - def __init__(self): - self.responses = [] - - def get(self, *args, **kwargs): - return 200, {}, str(self.responses.pop(0)) - - def add(self, item): - return self.responses.append(item) - - class MockMetadataClient: - def canonicalize_author_name(self, identifier, working_display_name): - return working_display_name - - @pytest.fixture() - def mock_get(self) -> MockGet: - return self.MockGet() - - @pytest.fixture() - def importer( + @freeze_time("2019-01-01T00:00:00+00:00") + def test_import( self, - db: DatabaseTransactionFixture, + odl_importer: ODLImporter, + odl_mock_get: MockGet, odl_test_fixture: ODLTestFixture, - mock_get, - ) -> ODLImporter: - library = odl_test_fixture.library() - return ODLImporter( - db.session, - collection=odl_test_fixture.collection(library), - http_get=mock_get.get, - ) - - @pytest.fixture() - def datasource( - self, db: DatabaseTransactionFixture, odl_test_fixture: ODLTestFixture - ) -> DataSource: - collection = odl_test_fixture.collection(odl_test_fixture.library()) - data_source = DataSource.lookup(db.session, "Feedbooks", autocreate=True) - DatabaseTransactionFixture.set_settings( - collection.integration_configuration, - **{Collection.DATA_SOURCE_NAME_SETTING: data_source.name}, - ) - return data_source - - @pytest.fixture() - def feed_template(self): - return "feed_template.xml.jinja" - - @pytest.fixture() - def import_templated( - self, - mock_get, - importer, - feed_template: str, - api_odl_files_fixture: ODLAPIFilesFixture, - ) -> Callable: - def i(licenses: List[LicenseInfoHelper]) -> Tuple[List, List, List, List]: - feed_licenses = [l.license for l in licenses] - [mock_get.add(l) for l in licenses] - feed = self.get_templated_feed( - files=api_odl_files_fixture, - filename=feed_template, - licenses=feed_licenses, - ) - return importer.import_from_feed(feed) - - return i - - def get_templated_feed( - self, files: APIFilesFixture, filename: str, licenses: List[LicenseHelper] - ) -> str: - """Get the test ODL feed with specific licensing information. - - :param files: Access to test files - :param filename: Name of template to load - :param licenses: List of ODL licenses - - :return: Test ODL feed - """ - text = files.sample_text(filename) - template = Template(text) - feed = template.render(licenses=licenses) - return feed - - @freeze_time("2019-01-01T00:00:00+00:00") - def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): + ) -> None: """Ensure that ODLImporter correctly processes and imports the ODL feed encoded using OPDS 1.x. NOTE: `freeze_time` decorator is required to treat the licenses in the ODL feed as non-expired. @@ -1566,24 +1435,22 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): available=5, ) - [ - mock_get.add(r) - for r in [ - warrior_time_limited, - canadianity_loan_limited, - canadianity_perpetual, - midnight_loan_limited_1, - midnight_loan_limited_2, - dragons_loan, - ] - ] + for r in [ + warrior_time_limited, + canadianity_loan_limited, + canadianity_perpetual, + midnight_loan_limited_1, + midnight_loan_limited_2, + dragons_loan, + ]: + odl_mock_get.add(r) ( imported_editions, imported_pools, imported_works, failures, - ) = importer.import_from_feed(feed) + ) = odl_importer.import_from_feed(feed) # This importer works the same as the base OPDSImporter, except that # it extracts format information from 'odl:license' tags and creates @@ -1602,7 +1469,7 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): warrior, blazing, midnight, - ] = sorted(imported_editions, key=lambda x: x.title) + ] = sorted(imported_editions, key=lambda x: str(x.title)) assert "The Blazing World" == blazing.title assert "Sun Warrior" == warrior.title assert "Canadianity" == canadianity.title @@ -1707,7 +1574,7 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): ) # 40 remaining checkouts + 1 perpetual license in the License Info Documents assert 11 == canadianity_pool.licenses_available [license1, license2] = sorted( - canadianity_pool.licenses, key=lambda x: x.identifier + canadianity_pool.licenses, key=lambda x: str(x.identifier) ) assert "2" == license1.identifier assert ( @@ -1756,7 +1623,7 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): ) # 20 + 52 remaining checkouts in corresponding License Info Documents assert 2 == midnight_pool.licenses_available [license1, license2] = sorted( - midnight_pool.licenses, key=lambda x: x.identifier + midnight_pool.licenses, key=lambda x: str(x.identifier) ) assert "4" == license1.identifier assert ( @@ -1783,6 +1650,8 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): assert 52 == license2.checkouts_left assert 1 == license2.checkouts_available + +class TestOdlAndOdl2Importer: @pytest.mark.parametrize( "license", [ @@ -1819,12 +1688,19 @@ def test_import(self, importer, mock_get, odl_test_fixture: ODLTestFixture): ], ) @freeze_time("2021-01-01T00:00:00+00:00") - def test_odl_importer_expired_licenses(self, import_templated, license): + def test_odl_importer_expired_licenses( + self, + odl_import_templated: OdlImportTemplatedFixture, + license: LicenseInfoHelper, + ): """Ensure ODLImporter imports expired licenses, but does not count them.""" # Import the test feed with an expired ODL license. - imported_editions, imported_pools, imported_works, failures = import_templated( - [license] - ) + ( + imported_editions, + imported_pools, + imported_works, + failures, + ) = odl_import_templated([license]) # The importer created 1 edition and 1 work with no failures. assert failures == {} @@ -1843,7 +1719,9 @@ def test_odl_importer_expired_licenses(self, import_templated, license): [imported_license] = imported_pool.licenses assert imported_license.is_inactive is True - def test_odl_importer_reimport_expired_licenses(self, import_templated): + def test_odl_importer_reimport_expired_licenses( + self, odl_import_templated: OdlImportTemplatedFixture + ): license_expiry = dateutil.parser.parse("2021-01-01T00:01:00+00:00") licenses = [ LicenseInfoHelper( @@ -1860,7 +1738,7 @@ def test_odl_importer_reimport_expired_licenses(self, import_templated): imported_pools, imported_works, failures, - ) = import_templated(licenses) + ) = odl_import_templated(licenses) # The importer created 1 edition and 1 work with no failures. assert failures == {} @@ -1886,7 +1764,7 @@ def test_odl_importer_reimport_expired_licenses(self, import_templated): imported_pools, imported_works, failures, - ) = import_templated(licenses) + ) = odl_import_templated(licenses) # The importer created 1 edition and 1 work with no failures. assert failures == {} @@ -1905,7 +1783,9 @@ def test_odl_importer_reimport_expired_licenses(self, import_templated): assert imported_license.is_inactive is True @freeze_time("2021-01-01T00:00:00+00:00") - def test_odl_importer_multiple_expired_licenses(self, import_templated): + def test_odl_importer_multiple_expired_licenses( + self, odl_import_templated: OdlImportTemplatedFixture + ): """Ensure ODLImporter imports expired licenses and does not count them in the total number of available licenses.""" @@ -1948,9 +1828,12 @@ def test_odl_importer_multiple_expired_licenses(self, import_templated): left=40, ), ] - imported_editions, imported_pools, imported_works, failures = import_templated( - active + inactive - ) + ( + imported_editions, + imported_pools, + imported_works, + failures, + ) = odl_import_templated(active + inactive) assert failures == {} @@ -1969,7 +1852,9 @@ def test_odl_importer_multiple_expired_licenses(self, import_templated): assert sum(not l.is_inactive for l in imported_pool.licenses) == len(active) assert sum(l.is_inactive for l in imported_pool.licenses) == len(inactive) - def test_odl_importer_reimport_multiple_licenses(self, import_templated): + def test_odl_importer_reimport_multiple_licenses( + self, odl_import_templated: OdlImportTemplatedFixture + ): """Ensure ODLImporter correctly imports licenses that have already been imported.""" # 1.1. Import the test feed with ODL licenses that are not expired. @@ -1995,7 +1880,7 @@ def test_odl_importer_reimport_multiple_licenses(self, import_templated): imported_pools, imported_works, failures, - ) = import_templated(licenses) + ) = odl_import_templated(licenses) # No failures in the import assert failures == {} @@ -2028,7 +1913,7 @@ def test_odl_importer_reimport_multiple_licenses(self, import_templated): imported_pools, imported_works, failures, - ) = import_templated(licenses) + ) = odl_import_templated(licenses) # No failures in the import assert failures == {} diff --git a/tests/api/test_odl2.py b/tests/api/test_odl2.py index dd2cdf0778..444e9b5fbe 100644 --- a/tests/api/test_odl2.py +++ b/tests/api/test_odl2.py @@ -1,5 +1,5 @@ import datetime -from typing import Callable, List, Tuple +from typing import List, Optional import pytest from freezegun import freeze_time @@ -10,7 +10,7 @@ ) from api.circulation_exceptions import PatronHoldLimitReached, PatronLoanLimitReached -from api.odl2 import ODL2API, ODL2Importer +from api.odl2 import ODL2Importer from core.coverage import CoverageFailure from core.model import ( Contribution, @@ -19,6 +19,7 @@ Edition, EditionConstants, LicensePool, + LicensePoolDeliveryMechanism, MediaTypes, Work, create, @@ -26,93 +27,49 @@ from core.model.constants import IdentifierConstants from core.model.patron import Hold from core.model.resource import Hyperlink -from tests.api.test_odl import LicenseHelper, LicenseInfoHelper, TestODLImporter -from tests.fixtures.api_odl2_files import ODL2APIFilesFixture +from tests.fixtures.api_odl import ( + LicenseHelper, + LicenseInfoHelper, + MockGet, + ODL2APIFilesFixture, +) from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.odl import ODL2APITestFixture, ODLTestFixture +from tests.fixtures.odl import ODL2APITestFixture -class TestODL2Importer(TestODLImporter): +class TestODL2Importer: @staticmethod def _get_delivery_mechanism_by_drm_scheme_and_content_type( - delivery_mechanisms, content_type, drm_scheme - ): + delivery_mechanisms: List[LicensePoolDeliveryMechanism], + content_type: str, + drm_scheme: str, + ) -> Optional[DeliveryMechanism]: """Find a license pool in the list by its identifier. :param delivery_mechanisms: List of delivery mechanisms - :type delivery_mechanisms: List[DeliveryMechanism] - :param content_type: Content type - :type content_type: str - :param drm_scheme: DRM scheme - :type drm_scheme: str - :return: Delivery mechanism with the the specified DRM scheme and content type (if any) - :rtype: Optional[DeliveryMechanism] + :return: Delivery mechanism with the specified DRM scheme and content type (if any) """ for delivery_mechanism in delivery_mechanisms: - delivery_mechanism = delivery_mechanism.delivery_mechanism + mechanism = delivery_mechanism.delivery_mechanism if ( - delivery_mechanism.drm_scheme == drm_scheme - and delivery_mechanism.content_type == content_type + mechanism.drm_scheme == drm_scheme + and mechanism.content_type == content_type ): - return delivery_mechanism + return mechanism return None - @pytest.fixture - def integration_protocol(self): - return ODL2API.NAME - - @pytest.fixture() - def import_templated( # type: ignore - self, - mock_get, - importer, - feed_template: str, - api_odl2_files_fixture: ODL2APIFilesFixture, - ) -> Callable: - def i(licenses: List[LicenseInfoHelper]) -> Tuple[List, List, List, List]: - feed_licenses = [l.license for l in licenses] - [mock_get.add(l) for l in licenses] - feed = self.get_templated_feed( - files=api_odl2_files_fixture, - filename=feed_template, - licenses=feed_licenses, - ) - return importer.import_from_feed(feed) - - return i - - @pytest.fixture() - def importer( # type: ignore[override] - self, - db: DatabaseTransactionFixture, - odl_test_fixture: ODLTestFixture, - mock_get, - ) -> ODL2Importer: - library = odl_test_fixture.library() - return ODL2Importer( - db.session, - collection=odl_test_fixture.collection(library), - http_get=mock_get.get, - ) - - @pytest.fixture() - def feed_template(self): - return "feed_template.json.jinja" - @freeze_time("2016-01-01T00:00:00+00:00") def test_import( self, - db: DatabaseTransactionFixture, - importer, - mock_get, - datasource, + odl2_importer: ODL2Importer, + odl_mock_get: MockGet, api_odl2_files_fixture: ODL2APIFilesFixture, - ): + ) -> None: """Ensure that ODL2Importer2 correctly processes and imports the ODL feed encoded using OPDS 2.x. NOTE: `freeze_time` decorator is required to treat the licenses in the ODL feed as non-expired. @@ -129,17 +86,17 @@ def test_import( available=10, ) - mock_get.add(moby_dick_license) + odl_mock_get.add(moby_dick_license) feed = api_odl2_files_fixture.sample_text("feed.json") - config = importer.collection.integration_configuration - importer.set_ignored_identifier_types([IdentifierConstants.URI], config) + config = odl2_importer.collection.integration_configuration + odl2_importer.set_ignored_identifier_types([IdentifierConstants.URI], config) DatabaseTransactionFixture.set_settings( config, odl2_skipped_license_formats=["text/html"] ) # Act - imported_editions, pools, works, failures = importer.import_from_feed(feed) + imported_editions, pools, works, failures = odl2_importer.import_from_feed(feed) # Assert @@ -174,7 +131,7 @@ def test_import( assert moby_dick_edition == moby_dick_author_author_contribution.edition assert Contributor.AUTHOR_ROLE == moby_dick_author_author_contribution.role - assert datasource == moby_dick_edition.data_source + assert "Feedbooks" == moby_dick_edition.data_source.name assert "Test Publisher" == moby_dick_edition.publisher assert datetime.date(2015, 9, 29) == moby_dick_edition.published @@ -249,6 +206,7 @@ def test_import( assert 1 == len(failures) huck_finn_failures = failures["9781234567897"] + assert isinstance(huck_finn_failures, list) assert 1 == len(huck_finn_failures) [huck_finn_failure] = huck_finn_failures assert isinstance(huck_finn_failure, CoverageFailure) @@ -268,22 +226,21 @@ def test_import( def test_import_audiobook_with_streaming( self, db: DatabaseTransactionFixture, - importer, - mock_get, - datasource, + odl2_importer: ODL2Importer, + odl_mock_get: MockGet, api_odl2_files_fixture: ODL2APIFilesFixture, - ): + ) -> None: """Ensure that ODL2Importer2 correctly processes and imports a feed with an audiobook.""" license = api_odl2_files_fixture.sample_text("license-audiobook.json") feed = api_odl2_files_fixture.sample_text("feed-audiobook-streaming.json") - mock_get.add(license) + odl_mock_get.add(license) - DatabaseTransactionFixture.set_settings( - importer.collection.integration_configuration, + db.set_settings( + odl2_importer.collection.integration_configuration, odl2_skipped_license_formats=["text/html"], ) - imported_editions, pools, works, failures = importer.import_from_feed(feed) + imported_editions, pools, works, failures = odl2_importer.import_from_feed(feed) # Make sure we imported one edition and it is an audiobook assert isinstance(imported_editions, list) @@ -327,21 +284,19 @@ def test_import_audiobook_with_streaming( @freeze_time("2016-01-01T00:00:00+00:00") def test_import_audiobook_no_streaming( self, - db: DatabaseTransactionFixture, - importer, - mock_get, - datasource, + odl2_importer: ODL2Importer, + odl_mock_get: MockGet, api_odl2_files_fixture: ODL2APIFilesFixture, - ): + ) -> None: """ Ensure that ODL2Importer2 correctly processes and imports a feed with an audiobook that is not available for streaming. """ license = api_odl2_files_fixture.sample_text("license-audiobook.json") feed = api_odl2_files_fixture.sample_text("feed-audiobook-no-streaming.json") - mock_get.add(license) + odl_mock_get.add(license) - imported_editions, pools, works, failures = importer.import_from_feed(feed) + imported_editions, pools, works, failures = odl2_importer.import_from_feed(feed) # Make sure we imported one edition and it is an audiobook assert isinstance(imported_editions, list) diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 1b40710aab..2aa3fee55c 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1,4 +1,5 @@ import random +from functools import partial from io import StringIO from typing import Optional from unittest.mock import MagicMock, patch @@ -70,13 +71,27 @@ def update_work_for_edition(self, edition, *args, **kwargs): class OPDSImporterFixture: - transaction: DatabaseTransactionFixture - content_server_feed: bytes - content_server_mini_feed: str - audiobooks_opds: bytes - wayfless_feed: bytes - feed_with_id_and_dcterms_identifier: bytes - service: ExternalIntegration + def __init__( + self, db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture + ): + self.db = db + self.content_server_feed = opds_files_fixture.sample_data("content_server.opds") + self.content_server_mini_feed = opds_files_fixture.sample_text( + "content_server_mini.opds" + ) + self.audiobooks_opds = opds_files_fixture.sample_data("audiobooks.opds") + self.wayfless_feed = opds_files_fixture.sample_data("wayfless.opds") + self.feed_with_id_and_dcterms_identifier = opds_files_fixture.sample_data( + "feed_with_id_and_dcterms_identifier.opds" + ) + self.importer = partial( + OPDSImporter, _db=self.db.session, collection=self.db.default_collection() + ) + db.set_settings( + db.default_collection().integration_configuration, + "data_source", + DataSource.OA_CONTENT_SERVER, + ) @pytest.fixture() @@ -84,66 +99,50 @@ def opds_importer_fixture( db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture, ) -> OPDSImporterFixture: - data = OPDSImporterFixture() - data.transaction = db - data.content_server_feed = opds_files_fixture.sample_data("content_server.opds") - data.content_server_mini_feed = opds_files_fixture.sample_text( - "content_server_mini.opds" - ) - data.audiobooks_opds = opds_files_fixture.sample_data("audiobooks.opds") - data.wayfless_feed = opds_files_fixture.sample_data("wayfless.opds") - data.feed_with_id_and_dcterms_identifier = opds_files_fixture.sample_data( - "feed_with_id_and_dcterms_identifier.opds" - ) - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - "data_source", - DataSource.OA_CONTENT_SERVER, - ) - + data = OPDSImporterFixture(db, opds_files_fixture) return data class TestOPDSImporter: def test_constructor(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # The default way of making HTTP requests is with # Representation.cautious_http_get. - importer = OPDSImporter(session, collection=None) + importer = opds_importer_fixture.importer() assert Representation.cautious_http_get == importer.http_get # But you can pass in anything you want. do_get = MagicMock() - importer = OPDSImporter(session, collection=None, http_get=do_get) + importer = OPDSImporter( + session, collection=db.default_collection(), http_get=do_get + ) assert do_get == importer.http_get def test_data_source_autocreated(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - name = "New data source " + transaction.fresh_str() - importer = OPDSImporter(session, collection=None, data_source_name=name) + name = "New data source " + db.fresh_str() + importer = opds_importer_fixture.importer(data_source_name=name) source1 = importer.data_source assert name == source1.name def test_extract_next_links(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - importer = OPDSImporter( - session, collection=None, data_source_name=DataSource.NYT - ) + importer = opds_importer_fixture.importer() next_links = importer.extract_next_links(data.content_server_mini_feed) assert 1 == len(next_links) @@ -152,15 +151,13 @@ def test_extract_next_links(self, opds_importer_fixture: OPDSImporterFixture): def test_extract_last_update_dates( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - importer = OPDSImporter( - session, collection=None, data_source_name=DataSource.NYT - ) + importer = opds_importer_fixture.importer() # This file has two tags and one tag. # The tags have their last update dates extracted, @@ -183,15 +180,13 @@ def test_extract_last_update_dates( def test_extract_last_update_dates_ignores_entries_with_no_update( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - importer = OPDSImporter( - session, collection=None, data_source_name=DataSource.NYT - ) + importer = opds_importer_fixture.importer() # Rename the and tags in the content # server so they don't show up. @@ -203,16 +198,14 @@ def test_extract_last_update_dates_ignores_entries_with_no_update( assert [] == last_update_dates def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - data_source_name = "Data source name " + transaction.fresh_str() - importer = OPDSImporter( - session, collection=None, data_source_name=data_source_name - ) + data_source_name = "Data source name " + db.fresh_str() + importer = opds_importer_fixture.importer(data_source_name=data_source_name) metadata, failures = importer.extract_feed_data(data.content_server_mini_feed) m1 = metadata["http://www.gutenberg.org/ebooks/10441"] @@ -228,7 +221,7 @@ def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): assert data_source_name == c1._data_source assert data_source_name == c2._data_source - [failure] = list(failures.values()) + [[failure]] = list(failures.values()) assert isinstance(failure, CoverageFailure) assert ( "202: I'm working to locate a source for this identifier." @@ -238,22 +231,17 @@ def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - data_source_name = "Data source name " + transaction.fresh_str() - collection_to_test = transaction.default_collection() + collection_to_test = db.default_collection() collection_to_test.primary_identifier_source = ( ExternalIntegration.DCTERMS_IDENTIFIER ) - importer = OPDSImporter( - session, - collection=collection_to_test, - data_source_name=data_source_name, - ) + importer = opds_importer_fixture.importer(collection=collection_to_test) metadata, failures = importer.extract_feed_data( data.feed_with_id_and_dcterms_identifier @@ -290,20 +278,15 @@ def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( def test_use_id_with_existing_dcterms_identifier( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - data_source_name = "Data source name " + transaction.fresh_str() - collection_to_test = transaction.default_collection() + collection_to_test = db.default_collection() collection_to_test.primary_identifier_source = None - importer = OPDSImporter( - session, - collection=collection_to_test, - data_source_name=data_source_name, - ) + importer = opds_importer_fixture.importer(collection=collection_to_test) metadata, failures = importer.extract_feed_data( data.feed_with_id_and_dcterms_identifier @@ -372,14 +355,14 @@ def test_extract_link_rights_uri(self): def test_extract_data_from_feedparser( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) - importer = OPDSImporter(session, None, data_source_name=data_source.name) + importer = opds_importer_fixture.importer(data_source_name=data_source.name) values, failures = importer.extract_data_from_feedparser( data.content_server_mini_feed, data_source ) @@ -402,10 +385,10 @@ def test_extract_data_from_feedparser( def test_extract_data_from_feedparser_handles_exception( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) class DoomedFeedparserOPDSImporter(OPDSImporter): @@ -417,7 +400,7 @@ def _data_detail_for_feedparser_entry(cls, entry, data_source): data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) importer = DoomedFeedparserOPDSImporter( - session, None, data_source_name=data_source.name + session, db.default_collection(), data_source_name=data_source.name ) values, failures = importer.extract_data_from_feedparser( data.content_server_mini_feed, data_source @@ -446,10 +429,10 @@ def _data_detail_for_feedparser_entry(cls, entry, data_source): def test_extract_metadata_from_elementtree( self, opds_importer_fixture: OPDSImporterFixture ): - fixture, transaction, session = ( + fixture, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) @@ -541,10 +524,10 @@ def test_extract_metadata_from_elementtree_treats_message_as_failure( opds_importer_fixture: OPDSImporterFixture, opds_files_fixture: OPDSFilesFixture, ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) @@ -616,20 +599,16 @@ def medium(additional_type, format, default="Default"): assert "Default" == medium("something-else", "image/jpeg") def test_handle_failure(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - axis_id = transaction.identifier(identifier_type=Identifier.AXIS_360_ID) - axis_isbn = transaction.identifier(Identifier.ISBN, "9781453219539") - identifier_mapping = {axis_isbn: axis_id} - importer = OPDSImporter( - session, - collection=None, + axis_id = db.identifier(identifier_type=Identifier.AXIS_360_ID) + axis_isbn = db.identifier(Identifier.ISBN, "9781453219539") + importer = opds_importer_fixture.importer( data_source_name=DataSource.OA_CONTENT_SERVER, - identifier_mapping=identifier_mapping, ) # The simplest case -- an identifier associated with a @@ -647,7 +626,7 @@ def test_handle_failure(self, opds_importer_fixture: OPDSImporterFixture): # because the 'failure' is an Identifier, not a # CoverageFailure, we're going to treat it as a success. identifier, not_a_failure = importer.handle_failure( - "urn:isbn:9781449358068", transaction.identifier() + "urn:isbn:9781449358068", db.identifier() ) assert expect_identifier == identifier assert identifier == not_a_failure @@ -655,30 +634,13 @@ def test_handle_failure(self, opds_importer_fixture: OPDSImporterFixture): # was passed in, not the Identifier that substituted as the 'failure'. # (In real usage, though, they should be the same.) - # An identifier that maps to some other identifier, - # associated with a CoverageFailure. - identifier, output_failure = importer.handle_failure( - axis_isbn.urn, input_failure - ) - assert axis_id == identifier - assert input_failure == output_failure - - # An identifier that maps to some other identifier, - # in a scenario where what OPDSImporter considers failure - # is considered success. - identifier, not_a_failure = importer.handle_failure( - axis_isbn.urn, transaction.identifier() - ) - assert axis_id == identifier - assert axis_id == not_a_failure - def test_coveragefailure_from_message( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) """Test all the different ways a tag might @@ -694,7 +656,7 @@ def f(*args): invalid_urn = f("urnblah", "500", "description") assert invalid_urn == None - identifier = transaction.identifier() + identifier = db.identifier() # If the 'message' is that everything is fine, no CoverageFailure # is created. @@ -716,56 +678,15 @@ def f(*args): no_information = f(identifier.urn, None, None) assert "No detail provided." == no_information.exception - def test_coveragefailure_from_message_with_success_status_codes( - self, opds_importer_fixture: OPDSImporterFixture - ): - data, transaction, session = ( - opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, - ) - - """When an OPDSImporter defines SUCCESS_STATUS_CODES, messages with - those status codes are always treated as successes. - """ - - class Mock(OPDSImporter): - SUCCESS_STATUS_CODES = [200, 999] - - data_source = DataSource.lookup(session, DataSource.OVERDRIVE) - - def f(*args): - message = OPDSMessage(*args) - return Mock.coveragefailure_from_message(data_source, message) - - identifier = transaction.identifier() - - # If the status code is 999, then the identifier is returned - # instead of a CoverageFailure -- we know that 999 means - # coverage was in fact provided. - failure = f(identifier.urn, "999", "hooray!") - assert identifier == failure - - # If the status code is 200, then the identifier is returned - # instead of None. - failure = f(identifier.urn, "200", "ok!") - assert identifier == failure - - # If the status code is anything else, a CoverageFailure - # is returned. - failure = f(identifier.urn, 500, "hooray???") - assert isinstance(failure, CoverageFailure) - assert "500: hooray???" == failure.exception - def test_extract_metadata_from_elementtree_handles_messages_that_become_identifiers( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) - not_a_failure = transaction.identifier() + not_a_failure = db.identifier() class MockOPDSImporter(OPDSImporter): @classmethod @@ -788,10 +709,10 @@ def coveragefailures_from_messages( def test_extract_metadata_from_elementtree_handles_exception( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) class DoomedElementtreeOPDSImporter(OPDSImporter): @@ -839,41 +760,41 @@ def _detail_for_elementtree_entry(cls, *args, **kwargs): assert "Utter failure!" in failure.exception def test_import_exception_if_unable_to_parse_feed( - self, db: DatabaseTransactionFixture + self, opds_importer_fixture: OPDSImporterFixture ): feed = "I am not a feed." - importer = OPDSImporter(db.session, collection=None) + importer = opds_importer_fixture.importer() pytest.raises(etree.XMLSyntaxError, importer.import_from_feed, feed) def test_import(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = data.content_server_mini_feed - imported_editions, pools, works, failures = OPDSImporter( - session, collection=None - ).import_from_feed(feed) + collection = db.default_collection() + importer = opds_importer_fixture.importer( + collection=collection, data_source_name=DataSource.METADATA_WRANGLER + ) + imported_editions, pools, works, failures = importer.import_from_feed(feed) [crow, mouse] = sorted(imported_editions, key=lambda x: str(x.title)) - # By default, this feed is treated as though it came from the - # metadata wrangler. No Work has been created. - assert DataSource.METADATA_WRANGLER == crow.data_source.name - assert None == crow.work - assert [] == crow.license_pools - assert Edition.BOOK_MEDIUM == crow.medium + # Work was created for both books. + assert crow.data_source.name == DataSource.METADATA_WRANGLER + assert crow.work is not None + assert crow.medium == Edition.BOOK_MEDIUM + assert crow.license_pools[0].collection == db.default_collection() - # not even the 'mouse' - assert None == mouse.work - assert Edition.PERIODICAL_MEDIUM == mouse.medium + assert mouse.work is not None + assert mouse.medium == Edition.PERIODICAL_MEDIUM - # Three links have been added to the identifier of the 'mouse' + # Four links have been added to the identifier of the 'mouse' # edition. - image, thumbnail, description = sorted( + acquisition, image, thumbnail, description = sorted( mouse.primary_identifier.links, key=lambda x: str(x.rel) ) @@ -895,8 +816,8 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): assert Representation.PNG_MEDIA_TYPE == thumbnail_rep.media_type assert image_rep == thumbnail_rep.thumbnail_of - # Two links were added to the identifier of the 'crow' edition. - [broken_image, working_image] = sorted( + # Three links were added to the identifier of the 'crow' edition. + broken_image, working_image, acquisition = sorted( crow.primary_identifier.links, key=lambda x: str(x.resource.url) ) @@ -949,32 +870,14 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): classifier = Classifier.classifiers.get(seven.subject.type, None) classifier.classify(seven.subject) - # If we import the same file again, we get the same list of Editions. - imported_editions_2, pools_2, works_2, failures_2 = OPDSImporter( - session, collection=None - ).import_from_feed(feed) - assert imported_editions_2 == imported_editions - - # importing with a collection and a lendable data source makes - # license pools and works. - imported_editions, pools, works, failures = OPDSImporter( - session, - collection=transaction.default_collection(), - data_source_name=DataSource.OA_CONTENT_SERVER, - ).import_from_feed(feed) - [crow_pool, mouse_pool] = sorted( pools, key=lambda x: x.presentation_edition.title ) - assert transaction.default_collection() == crow_pool.collection - assert transaction.default_collection() == mouse_pool.collection - # Work was created for both books. + assert db.default_collection() == crow_pool.collection + assert db.default_collection() == mouse_pool.collection assert crow_pool.work is not None - assert Edition.BOOK_MEDIUM == crow_pool.presentation_edition.medium - assert mouse_pool.work is not None - assert Edition.PERIODICAL_MEDIUM == mouse_pool.presentation_edition.medium work = mouse_pool.work work.calculate_presentation() @@ -988,61 +891,42 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): assert DeliveryMechanism.NO_DRM == mech.delivery_mechanism.drm_scheme assert "http://www.gutenberg.org/ebooks/10441.epub.images" == mech.resource.url + # If we import the same file again, we get the same list of Editions. + imported_editions_2, pools_2, works_2, failures_2 = importer.import_from_feed( + feed + ) + assert imported_editions_2 == imported_editions + def test_import_with_lendability(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) """Test that OPDS import creates Edition, LicensePool, and Work objects, as appropriate. - - When there is no Collection, it is appropriate to create - Editions, but not LicensePools or Works. When there is a - Collection, it is appropriate to create all three. """ feed = data.content_server_mini_feed # This import will create Editions, but not LicensePools or # Works, because there is no Collection. - importer_mw = OPDSImporter( - session, collection=None, data_source_name=DataSource.METADATA_WRANGLER - ) - ( - imported_editions_mw, - pools_mw, - works_mw, - failures_mw, - ) = importer_mw.import_from_feed(feed) + importer = opds_importer_fixture.importer() + imported_editions, pools, works, failures = importer.import_from_feed(feed) # Both editions were imported, because they were new. - assert 2 == len(imported_editions_mw) + assert 2 == len(imported_editions) - # But pools and works weren't created, because there is no Collection. - assert 0 == len(pools_mw) - assert 0 == len(works_mw) + # And pools and works were created + assert 2 == len(pools) + assert 2 == len(works) # 1 error message, corresponding to the tag # at the end of content_server_mini.opds. - assert 1 == len(failures_mw) - - # Try again, with a Collection to contain the LicensePools. - importer_g = OPDSImporter( - session, - collection=transaction.default_collection(), - ) - imported_editions_g, pools_g, works_g, failures_g = importer_g.import_from_feed( - feed - ) - - # now pools and works are in, too - assert 1 == len(failures_g) - assert 2 == len(pools_g) - assert 2 == len(works_g) + assert 1 == len(failures) # The pools have presentation editions. assert {"The Green Mouse", "Johnny Crow's Party"} == { - x.presentation_edition.title for x in pools_g + x.presentation_edition.title for x in pools } # The information used to create the first LicensePool said @@ -1052,7 +936,7 @@ def test_import_with_lendability(self, opds_importer_fixture: OPDSImporterFixtur # so the source of the OPDS feed (the open-access content server) # was used. assert {DataSource.GUTENBERG, DataSource.OA_CONTENT_SERVER} == { - pool.data_source.name for pool in pools_g + pool.data_source.name for pool in pools } def test_import_with_unrecognized_distributor_creates_distributor( @@ -1060,10 +944,10 @@ def test_import_with_unrecognized_distributor_creates_distributor( opds_importer_fixture: OPDSImporterFixture, opds_files_fixture: OPDSFilesFixture, ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) """We get a book from a previously unknown data source, with a license that comes from a second previously unknown data source. The @@ -1071,14 +955,11 @@ def test_import_with_unrecognized_distributor_creates_distributor( """ feed = opds_files_fixture.sample_data("unrecognized_distributor.opds") DatabaseTransactionFixture.set_settings( - transaction.default_collection().integration_configuration, + db.default_collection().integration_configuration, "data_source", "some new source", ) - importer = OPDSImporter( - session, - collection=transaction.default_collection(), - ) + importer = opds_importer_fixture.importer() imported_editions, pools, works, failures = importer.import_from_feed(feed) assert {} == failures @@ -1101,15 +982,15 @@ def test_import_updates_metadata( opds_importer_fixture: OPDSImporterFixture, opds_files_fixture: OPDSFilesFixture, ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = opds_files_fixture.sample_text("metadata_wrangler_overdrive.opds") - edition, is_new = transaction.edition( + edition, is_new = db.edition( DataSource.OVERDRIVE, Identifier.OVERDRIVE_ID, with_license_pool=True ) [old_license_pool] = edition.license_pools @@ -1119,14 +1000,16 @@ def test_import_updates_metadata( feed = feed.replace("{OVERDRIVE ID}", edition.primary_identifier.identifier) DatabaseTransactionFixture.set_settings( - transaction.default_collection().integration_configuration, + db.default_collection().integration_configuration, "data_source", DataSource.OVERDRIVE, ) - imported_editions, imported_pools, imported_works, failures = OPDSImporter( - session, - collection=transaction.default_collection(), - ).import_from_feed(feed) + ( + imported_editions, + imported_pools, + imported_works, + failures, + ) = opds_importer_fixture.importer().import_from_feed(feed) # The edition we created has had its metadata updated. [new_edition] = imported_editions @@ -1141,19 +1024,16 @@ def test_import_updates_metadata( def test_import_from_license_source( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # Instead of importing this data as though it came from the # metadata wrangler, let's import it as though it came from the # open-access content server. feed = data.content_server_mini_feed - importer = OPDSImporter( - session, - collection=transaction.default_collection(), - ) + importer = opds_importer_fixture.importer() ( imported_editions, @@ -1206,17 +1086,20 @@ def test_import_from_feed_treats_message_as_failure( opds_importer_fixture: OPDSImporterFixture, opds_files_fixture: OPDSFilesFixture, ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = opds_files_fixture.sample_data("unrecognized_identifier.opds") - imported_editions, imported_pools, imported_works, failures = OPDSImporter( - session, collection=transaction.default_collection() - ).import_from_feed(feed) + ( + imported_editions, + imported_pools, + imported_works, + failures, + ) = opds_importer_fixture.importer().import_from_feed(feed) - [failure] = list(failures.values()) + [[failure]] = list(failures.values()) assert isinstance(failure, CoverageFailure) assert True == failure.transient assert "404: I've never heard of this work." == failure.exception @@ -1224,10 +1107,10 @@ def test_import_from_feed_treats_message_as_failure( def test_import_edition_failure_becomes_coverage_failure( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # Make sure that an exception during import generates a # meaningful error message. @@ -1235,14 +1118,14 @@ def test_import_edition_failure_becomes_coverage_failure( feed = data.content_server_mini_feed imported_editions, pools, works, failures = DoomedOPDSImporter( session, - collection=transaction.default_collection(), + collection=db.default_collection(), ).import_from_feed(feed) # Only one book was imported, the other failed. assert 1 == len(imported_editions) # The other failed to import, and became a CoverageFailure - failure = failures["http://www.gutenberg.org/ebooks/10441"] + [failure] = failures["http://www.gutenberg.org/ebooks/10441"] assert isinstance(failure, CoverageFailure) assert False == failure.transient assert "Utter failure!" in failure.exception @@ -1250,23 +1133,21 @@ def test_import_edition_failure_becomes_coverage_failure( def test_import_work_failure_becomes_coverage_failure( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # Make sure that an exception while updating a work for an # imported edition generates a meaningful error message. feed = data.content_server_mini_feed DatabaseTransactionFixture.set_settings( - transaction.default_collection().integration_configuration, + db.default_collection().integration_configuration, "data_source", DataSource.OA_CONTENT_SERVER, ) - importer = DoomedWorkOPDSImporter( - session, collection=transaction.default_collection() - ) + importer = DoomedWorkOPDSImporter(session, collection=db.default_collection()) imported_editions, pools, works, failures = importer.import_from_feed(feed) @@ -1274,16 +1155,16 @@ def test_import_work_failure_becomes_coverage_failure( assert 1 == len(works) # There's an error message for the work that failed. - failure = failures["http://www.gutenberg.org/ebooks/10441"] + [failure] = failures["http://www.gutenberg.org/ebooks/10441"] assert isinstance(failure, CoverageFailure) assert False == failure.transient assert "Utter work failure!" in failure.exception def test_consolidate_links(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # If a link turns out to be a dud, consolidate_links() # gets rid of it. @@ -1291,7 +1172,7 @@ def test_consolidate_links(self, opds_importer_fixture: OPDSImporterFixture): assert [] == OPDSImporter.consolidate_links(none_links) links = [ - LinkData(href=transaction.fresh_url(), rel=rel, media_type="image/jpeg") + LinkData(href=db.fresh_url(), rel=rel, media_type="image/jpeg") for rel in [ Hyperlink.OPEN_ACCESS_DOWNLOAD, Hyperlink.IMAGE, @@ -1310,7 +1191,7 @@ def test_consolidate_links(self, opds_importer_fixture: OPDSImporterFixture): assert old_link == link.thumbnail links = [ - LinkData(href=transaction.fresh_url(), rel=rel, media_type="image/jpeg") + LinkData(href=db.fresh_url(), rel=rel, media_type="image/jpeg") for rel in [ Hyperlink.THUMBNAIL_IMAGE, Hyperlink.IMAGE, @@ -1325,7 +1206,7 @@ def test_consolidate_links(self, opds_importer_fixture: OPDSImporterFixture): assert t2 == i2.thumbnail links = [ - LinkData(href=transaction.fresh_url(), rel=rel, media_type="image/jpeg") + LinkData(href=db.fresh_url(), rel=rel, media_type="image/jpeg") for rel in [Hyperlink.THUMBNAIL_IMAGE, Hyperlink.IMAGE, Hyperlink.IMAGE] ] t1, i1, i2 = links @@ -1339,14 +1220,14 @@ def test_import_book_that_offers_no_license( opds_importer_fixture: OPDSImporterFixture, opds_files_fixture: OPDSFilesFixture, ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = opds_files_fixture.sample_data("book_without_license.opds") - importer = OPDSImporter(session, transaction.default_collection()) + importer = OPDSImporter(session, db.default_collection()) ( imported_editions, imported_pools, @@ -1364,59 +1245,14 @@ def test_import_book_that_offers_no_license( # based on its tag. assert Edition.AUDIO_MEDIUM == edition.medium - def test_build_identifier_mapping(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( - opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, - ) - """Reverse engineers an identifier_mapping based on a list of URNs""" - - collection = transaction.collection(protocol=ExternalIntegration.AXIS_360) - lp = transaction.licensepool( - None, collection=collection, data_source_name=DataSource.AXIS_360 - ) - - # Create a couple of ISBN equivalencies. - isbn1 = transaction.identifier( - identifier_type=Identifier.ISBN, foreign_id=transaction.isbn_take() - ) - isbn2 = transaction.identifier( - identifier_type=Identifier.ISBN, foreign_id=transaction.isbn_take() - ) - source = DataSource.lookup(session, DataSource.AXIS_360) - [lp.identifier.equivalent_to(source, isbn, 1) for isbn in [isbn1, isbn2]] - - # The importer is initialized without an identifier mapping. - importer = OPDSImporter(session, collection) - assert None == importer.identifier_mapping - - # We can build one. - importer.build_identifier_mapping([isbn1.urn]) - expected = {isbn1: lp.identifier} - assert expected == importer.identifier_mapping - - # If we already have one, it's overwritten. - importer.build_identifier_mapping([isbn2.urn]) - overwrite = {isbn2: lp.identifier} - assert importer.identifier_mapping == overwrite - - # If the importer doesn't have a collection, we can't build - # its mapping. - importer = OPDSImporter(session, None) - importer.build_identifier_mapping([isbn1]) - assert None == importer.identifier_mapping - def test_update_work_for_edition_having_no_work( - self, db: DatabaseTransactionFixture + self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture ): - session = db.session - # We have an Edition and a LicensePool but no Work. edition, lp = db.edition(with_license_pool=True) assert None == lp.work - importer = OPDSImporter(session, None) + importer = opds_importer_fixture.importer() returned_pool, returned_work = importer.update_work_for_edition(edition) # We now have a presentation-ready work. @@ -1440,7 +1276,7 @@ def explode(): importer.update_work_for_edition(edition) def test_update_work_for_edition_having_incomplete_work( - self, db: DatabaseTransactionFixture + self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture ): session = db.session @@ -1461,7 +1297,7 @@ def test_update_work_for_edition_having_incomplete_work( title="A working title", ) - importer = OPDSImporter(session, None) + importer = opds_importer_fixture.importer() returned_pool, returned_work = importer.update_work_for_edition(edition) assert returned_pool == pool assert returned_work == work @@ -1471,7 +1307,7 @@ def test_update_work_for_edition_having_incomplete_work( assert True == work.presentation_ready def test_update_work_for_edition_having_presentation_ready_work( - self, db: DatabaseTransactionFixture + self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture ): session = db.session @@ -1493,7 +1329,7 @@ def test_update_work_for_edition_having_presentation_ready_work( title="A new title", ) - importer = OPDSImporter(session, None) + importer = opds_importer_fixture.importer() returned_pool, returned_work = importer.update_work_for_edition(new_edition) # The existing LicensePool and Work were returned. @@ -1504,7 +1340,7 @@ def test_update_work_for_edition_having_presentation_ready_work( assert True == work.presentation_ready def test_update_work_for_edition_having_multiple_license_pools( - self, db: DatabaseTransactionFixture + self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture ): session = db.session @@ -1513,7 +1349,7 @@ def test_update_work_for_edition_having_multiple_license_pools( edition, lp = db.edition(with_license_pool=True) collection2 = db.collection() lp2 = db.licensepool(edition=edition, collection=collection2) - importer = OPDSImporter(session, None) + importer = opds_importer_fixture.importer() # Calling update_work_for_edition creates a Work and associates # it with the edition. @@ -1563,7 +1399,7 @@ class NoLinks(Mock): do_get = MagicMock() # Here, there are no links at all. - importer = NoLinks(session, None, do_get) + importer = NoLinks(session, db.default_collection(), do_get) with pytest.raises(IntegrationException) as excinfo: importer.assert_importable_content("feed", "url") assert "No open-access links were found in the OPDS feed." in str(excinfo.value) @@ -1590,7 +1426,7 @@ class BadLinks(Mock): ), ] - bad_links_importer = BadLinks(session, None, do_get) + bad_links_importer = BadLinks(session, db.default_collection(), do_get) with pytest.raises(IntegrationException) as excinfo: bad_links_importer.assert_importable_content( "feed", "url", max_get_attempts=2 @@ -1627,7 +1463,7 @@ def _is_open_access_link(self, url, type): return False return "this is a book" - good_link_importer = GoodLink(session, None, do_get) + good_link_importer = GoodLink(session, db.default_collection(), do_get) result = good_link_importer.assert_importable_content( "feed", "url", max_get_attempts=5 ) @@ -1670,7 +1506,9 @@ def test__open_access_links(self, db: DatabaseTransactionFixture): m([no_circulation, two_open_access_links, no_open_access_links]) ) - def test__is_open_access_link(self, db: DatabaseTransactionFixture): + def test__is_open_access_link( + self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture + ): session = db.session http = DummyHTTPClient() @@ -1681,7 +1519,7 @@ def test__is_open_access_link(self, db: DatabaseTransactionFixture): # Set up an HTTP response that looks enough like a book # to convince _is_open_access_link. http.queue_response(200, content=enough_content) - monitor = OPDSImporter(session, None, http_get=http.do_get) + monitor = opds_importer_fixture.importer(http_get=http.do_get) url = db.fresh_url() type = "text/html" @@ -1695,21 +1533,21 @@ def test__is_open_access_link(self, db: DatabaseTransactionFixture): # This HTTP response looks OK but it's not big enough to be # any kind of book. http.queue_response(200, content="not enough content") - monitor = OPDSImporter(session, None, http_get=http.do_get) + monitor = opds_importer_fixture.importer(http_get=http.do_get) assert False == monitor._is_open_access_link(url, None) # This HTTP response is clearly an error page. http.queue_response(404, content=enough_content) - monitor = OPDSImporter(session, None, http_get=http.do_get) + monitor = opds_importer_fixture.importer(http_get=http.do_get) assert False == monitor._is_open_access_link(url, None) def test_import_open_access_audiobook( self, opds_importer_fixture: OPDSImporterFixture ): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = data.audiobooks_opds @@ -1717,7 +1555,7 @@ def test_import_open_access_audiobook( importer = OPDSImporter( session, - collection=transaction.default_collection(), + collection=db.default_collection(), ) ( @@ -1745,9 +1583,9 @@ def test_import_open_access_audiobook( @pytest.fixture() def wayfless_circulation_api(self, opds_importer_fixture: OPDSImporterFixture): - transaction, session = ( - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + db, session = ( + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) def _wayfless_circulation_api( @@ -1761,10 +1599,8 @@ def _wayfless_circulation_api( ) feed = opds_importer_fixture.wayfless_feed - library = transaction.library( - "Test library with SAML authentication", "SAML" - ) - patron = transaction.patron(library=library) + library = db.library("Test library with SAML authentication", "SAML") + patron = db.patron(library=library) saml_subject = SAMLSubject( idp_entityID, SAMLNameID( @@ -1776,7 +1612,7 @@ def _wayfless_circulation_api( if has_saml_credential: saml_credential_manager.create_saml_token(session, patron, saml_subject) - collection = transaction.collection( + collection = db.collection( "OPDS collection with a WAYFless acquisition link", ExternalIntegration.OPDS_IMPORT, data_source_name="test", @@ -2000,27 +1836,27 @@ def test_get(self, db: DatabaseTransactionFixture): ) def test_external_integration(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) monitor = OPDSImportMonitor( session, - transaction.default_collection(), + db.default_collection(), import_class=OPDSImporter, ) assert ( - transaction.default_collection().external_integration + db.default_collection().external_integration == monitor.external_integration(session) ) def test__run_self_tests(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) """Verify the self-tests of an OPDS collection.""" @@ -2037,11 +1873,9 @@ def follow_one_link(self, url): self.follow_one_link_called_with.append(url) return ([], "some content") - feed_url = transaction.fresh_url() - transaction.default_collection().external_account_id = feed_url - monitor = Mock( - session, transaction.default_collection(), import_class=MockImporter - ) + feed_url = db.fresh_url() + db.default_collection().external_account_id = feed_url + monitor = Mock(session, db.default_collection(), import_class=MockImporter) [first_page, found_content] = monitor._run_self_tests(session) expect = "Retrieve the first page of the OPDS feed (%s)" % feed_url assert expect == first_page.name @@ -2062,32 +1896,32 @@ def follow_one_link(self, url): assert "looks good" == found_content.result def test_hook_methods(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) """By default, the OPDS URL and data source used by the importer come from the collection configuration. """ monitor = OPDSImportMonitor( session, - transaction.default_collection(), + db.default_collection(), import_class=OPDSImporter, ) - assert transaction.default_collection().external_account_id == monitor.opds_url( - transaction.default_collection() + assert db.default_collection().external_account_id == monitor.opds_url( + db.default_collection() ) - assert transaction.default_collection().data_source == monitor.data_source( - transaction.default_collection() + assert db.default_collection().data_source == monitor.data_source( + db.default_collection() ) def test_feed_contains_new_data(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) feed = data.content_server_mini_feed @@ -2098,7 +1932,7 @@ def _get(self, url, headers): monitor = OPDSImportMonitor( session, - transaction.default_collection(), + db.default_collection(), import_class=OPDSImporter, ) timestamp = monitor.timestamp() @@ -2110,7 +1944,7 @@ def _get(self, url, headers): # Now import the editions. monitor = MockOPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=OPDSImporter, ) monitor.run() @@ -2131,7 +1965,7 @@ def _get(self, url, headers): editions[0], data_source, CoverageRecord.IMPORT_OPERATION, - collection=transaction.default_collection(), + collection=db.default_collection(), ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -2139,7 +1973,7 @@ def _get(self, url, headers): editions[1], data_source, CoverageRecord.IMPORT_OPERATION, - collection=transaction.default_collection(), + collection=db.default_collection(), ) record2.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -2174,15 +2008,15 @@ def _get(self, url, headers): assert True == monitor.feed_contains_new_data(feed) def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) monitor = OPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=OPDSImporter, ) feed = data.content_server_mini_feed @@ -2212,7 +2046,7 @@ def follow(): edition, data_source, CoverageRecord.IMPORT_OPERATION, - collection=transaction.default_collection(), + collection=db.default_collection(), ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -2245,21 +2079,19 @@ def follow(): assert "Expected Atom feed, got not/atom" in str(excinfo.value) def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # Check coverage records are created. monitor = OPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=DoomedOPDSImporter, ) - transaction.default_collection().external_account_id = ( - "http://root-url/index.xml" - ) + db.default_collection().external_account_id = "http://root-url/index.xml" data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) feed = data.content_server_mini_feed @@ -2281,7 +2113,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): editions[0].primary_identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=transaction.default_collection(), + collection=db.default_collection(), ) assert CoverageRecord.SUCCESS == record.status assert None == record.exception @@ -2317,7 +2149,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=transaction.default_collection(), + collection=db.default_collection(), ) assert "Utter failure!" in failure.exception @@ -2326,10 +2158,10 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): assert 2 == len(failures) def test_run_once(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) class MockOPDSImportMonitor(OPDSImportMonitor): @@ -2351,7 +2183,7 @@ def import_one_feed(self, feed): monitor = MockOPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=OPDSImporter, ) @@ -2373,16 +2205,16 @@ def import_one_feed(self, feed): assert None == progress.finish def test_update_headers(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) # Test the _update_headers helper method. monitor = OPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=OPDSImporter, ) @@ -2420,10 +2252,10 @@ def test_update_headers(self, opds_importer_fixture: OPDSImporterFixture): assert headers == expect def test_retry(self, opds_importer_fixture: OPDSImporterFixture): - data, transaction, session = ( + data, db, session = ( opds_importer_fixture, - opds_importer_fixture.transaction, - opds_importer_fixture.transaction.session, + opds_importer_fixture.db, + opds_importer_fixture.db.session, ) retry_count = 15 @@ -2433,13 +2265,13 @@ def test_retry(self, opds_importer_fixture: OPDSImporterFixture): # After we overrode the value of configuration setting we can instantiate OPDSImportMonitor. # It'll load new "Max retry count"'s value from the database. DatabaseTransactionFixture.set_settings( - transaction.default_collection().integration_configuration, + db.default_collection().integration_configuration, "connection_max_retry_count", retry_count, ) monitor = OPDSImportMonitor( session, - collection=transaction.default_collection(), + collection=db.default_collection(), import_class=OPDSImporter, ) diff --git a/tests/fixtures/api_odl.py b/tests/fixtures/api_odl.py new file mode 100644 index 0000000000..5aeffb0e76 --- /dev/null +++ b/tests/fixtures/api_odl.py @@ -0,0 +1,227 @@ +from __future__ import annotations + +import datetime +import json +import uuid +from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union + +import pytest +from jinja2 import Template + +from api.odl import BaseODLImporter, ODLImporter +from api.odl2 import ODL2Importer +from core.coverage import CoverageFailure +from core.model import Edition, LicensePool, Work +from tests.fixtures.files import APIFilesFixture + +if TYPE_CHECKING: + from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.odl import ODLTestFixture + + +class LicenseHelper: + """Represents an ODL license.""" + + def __init__( + self, + identifier: Optional[str] = None, + checkouts: Optional[int] = None, + concurrency: Optional[int] = None, + expires: Optional[Union[datetime.datetime, str]] = None, + ) -> None: + """Initialize a new instance of LicenseHelper class. + + :param identifier: License's identifier + :param checkouts: Total number of checkouts before a license expires + :param concurrency: Number of concurrent checkouts allowed + :param expires: Date & time when a license expires + """ + self.identifier: str = identifier if identifier else f"urn:uuid:{uuid.uuid1()}" + self.checkouts: Optional[int] = checkouts + self.concurrency: Optional[int] = concurrency + if isinstance(expires, datetime.datetime): + self.expires = expires.isoformat() + else: + self.expires: Optional[str] = expires # type: ignore + + +class LicenseInfoHelper: + """Represents information about the current state of a license stored in the License Info Document.""" + + def __init__( + self, + license: LicenseHelper, + available: int, + status: str = "available", + left: Optional[int] = None, + ) -> None: + """Initialize a new instance of LicenseInfoHelper class.""" + self.license: LicenseHelper = license + self.status: str = status + self.left: Optional[int] = left + self.available: int = available + + def __str__(self) -> str: + """Return a JSON representation of a part of the License Info Document.""" + output = { + "identifier": self.license.identifier, + "status": self.status, + "terms": { + "concurrency": self.license.concurrency, + }, + "checkouts": { + "available": self.available, + }, + } + if self.license.expires is not None: + output["terms"]["expires"] = self.license.expires # type: ignore + if self.left is not None: + output["checkouts"]["left"] = self.left # type: ignore + return json.dumps(output) + + +class ODLAPIFilesFixture(APIFilesFixture): + """A fixture providing access to ODL files.""" + + def __init__(self): + super().__init__("odl") + + +@pytest.fixture() +def api_odl_files_fixture() -> ODLAPIFilesFixture: + """A fixture providing access to ODL files.""" + return ODLAPIFilesFixture() + + +class ODL2APIFilesFixture(APIFilesFixture): + """A fixture providing access to ODL2 files.""" + + def __init__(self): + super().__init__("odl2") + + +@pytest.fixture() +def api_odl2_files_fixture() -> ODL2APIFilesFixture: + """A fixture providing access to ODL2 files.""" + return ODL2APIFilesFixture() + + +class MockGet: + def __init__(self): + self.responses = [] + + def get(self, *args: Any, **kwargs: Any) -> Tuple[int, Dict[str, str], bytes]: + return 200, {}, self.responses.pop(0) + + def add(self, item: LicenseInfoHelper | str | bytes) -> None: + if isinstance(item, LicenseInfoHelper): + self.responses.append(str(item).encode("utf-8")) + elif isinstance(item, str): + self.responses.append(item.encode("utf-8")) + elif isinstance(item, bytes): + self.responses.append(item) + + +@pytest.fixture() +def odl_mock_get() -> MockGet: + return MockGet() + + +@pytest.fixture() +def odl_importer( + db: DatabaseTransactionFixture, + odl_test_fixture: ODLTestFixture, + odl_mock_get: MockGet, +) -> ODLImporter: + library = odl_test_fixture.library() + return ODLImporter( + db.session, + collection=odl_test_fixture.collection(library), + http_get=odl_mock_get.get, + ) + + +@pytest.fixture() +def odl2_importer( + db: DatabaseTransactionFixture, + odl_test_fixture: ODLTestFixture, + odl_mock_get: MockGet, +) -> ODL2Importer: + library = odl_test_fixture.library() + return ODL2Importer( + db.session, + collection=odl_test_fixture.collection(library), + http_get=odl_mock_get.get, + ) + + +class OdlImportTemplatedFixture: + def __init__( + self, + odl_mock_get: MockGet, + importer: BaseODLImporter, + files_fixture: APIFilesFixture, + feed_template: str, + ): + self.mock_get = odl_mock_get + self.importer = importer + self.files_fixture = files_fixture + self.feed_template = feed_template + + def __call__( + self, licenses: List[LicenseInfoHelper] + ) -> Tuple[ + List[Edition], + List[LicensePool], + List[Work], + Dict[str, List[CoverageFailure]], + ]: + feed_licenses = [l.license for l in licenses] + for _license in licenses: + self.mock_get.add(_license) + feed = self.get_templated_feed( + files=self.files_fixture, + filename=self.feed_template, + licenses=feed_licenses, + ) + return self.importer.import_from_feed(feed) + + def get_templated_feed( + self, files: APIFilesFixture, filename: str, licenses: List[LicenseHelper] + ) -> str: + """Get the test ODL feed with specific licensing information. + + :param files: Access to test files + :param filename: Name of template to load + :param licenses: List of ODL licenses + + :return: Test ODL feed + """ + text = files.sample_text(filename) + template = Template(text) + feed = template.render(licenses=licenses) + return feed + + +@pytest.fixture(params=["odl", "odl2"]) +def odl_import_templated( + request: pytest.FixtureRequest, + odl_mock_get: MockGet, + odl_importer: ODLImporter, + odl2_importer: ODL2Importer, + api_odl_files_fixture: ODLAPIFilesFixture, + api_odl2_files_fixture: ODL2APIFilesFixture, +) -> OdlImportTemplatedFixture: + if request.param == "odl": + return OdlImportTemplatedFixture( + odl_mock_get, odl_importer, api_odl_files_fixture, "feed_template.xml.jinja" + ) + elif request.param == "odl2": + return OdlImportTemplatedFixture( + odl_mock_get, + odl2_importer, + api_odl2_files_fixture, + "feed_template.json.jinja", + ) + + raise ValueError("Unknown param") diff --git a/tests/fixtures/api_odl2_files.py b/tests/fixtures/api_odl2_files.py deleted file mode 100644 index c3c7cd9ab4..0000000000 --- a/tests/fixtures/api_odl2_files.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - -from tests.fixtures.files import APIFilesFixture - - -class ODL2APIFilesFixture(APIFilesFixture): - """A fixture providing access to ODL2 files.""" - - def __init__(self): - super().__init__("odl2") - - -@pytest.fixture() -def api_odl2_files_fixture() -> ODL2APIFilesFixture: - """A fixture providing access to ODL2 files.""" - return ODL2APIFilesFixture() diff --git a/tests/fixtures/api_odl_files.py b/tests/fixtures/api_odl_files.py deleted file mode 100644 index f33d4a8ea9..0000000000 --- a/tests/fixtures/api_odl_files.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - -from tests.fixtures.files import APIFilesFixture - - -class ODLAPIFilesFixture(APIFilesFixture): - """A fixture providing access to ODL files.""" - - def __init__(self): - super().__init__("odl") - - -@pytest.fixture() -def api_odl_files_fixture() -> ODLAPIFilesFixture: - """A fixture providing access to ODL files.""" - return ODLAPIFilesFixture() diff --git a/tests/fixtures/odl.py b/tests/fixtures/odl.py index 9ef40a2519..4e8119387e 100644 --- a/tests/fixtures/odl.py +++ b/tests/fixtures/odl.py @@ -22,8 +22,7 @@ from core.model.configuration import ExternalIntegration from core.util.http import HTTP from tests.core.mock import MockRequestsResponse -from tests.fixtures.api_odl2_files import ODL2APIFilesFixture -from tests.fixtures.api_odl_files import ODLAPIFilesFixture +from tests.fixtures.api_odl import ODL2APIFilesFixture, ODLAPIFilesFixture from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.files import APIFilesFixture From cfcc76634dc1b02ba8a9ecc4072e3344d6942f23 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Oct 2023 13:08:27 +0000 Subject: [PATCH 084/262] Bump psycopg2 from 2.9.8 to 2.9.9 (#1429) --- poetry.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7f698ae7d9..7f4b1bb4a8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2931,22 +2931,22 @@ files = [ [[package]] name = "psycopg2" -version = "2.9.8" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-2.9.8-cp310-cp310-win32.whl", hash = "sha256:2f8594f92bbb5d8b59ffec04e2686c416401e2d4297de1193f8e75235937e71d"}, - {file = "psycopg2-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:f9ecbf504c4eaff90139d5c9b95d47275f2b2651e14eba56392b4041fbf4c2b3"}, - {file = "psycopg2-2.9.8-cp311-cp311-win32.whl", hash = "sha256:65f81e72136d8b9ac8abf5206938d60f50da424149a43b6073f1546063c0565e"}, - {file = "psycopg2-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:f7e62095d749359b7854143843f27edd7dccfcd3e1d833b880562aa5702d92b0"}, - {file = "psycopg2-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:81b21424023a290a40884c7f8b0093ba6465b59bd785c18f757e76945f65594c"}, - {file = "psycopg2-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:67c2f32f3aba79afb15799575e77ee2db6b46b8acf943c21d34d02d4e1041d50"}, - {file = "psycopg2-2.9.8-cp38-cp38-win32.whl", hash = "sha256:287a64ef168ef7fb9f382964705ff664b342bfff47e7242bf0a04ef203269dd5"}, - {file = "psycopg2-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:dcde3cad4920e29e74bf4e76c072649764914facb2069e6b7fa1ddbebcd49e9f"}, - {file = "psycopg2-2.9.8-cp39-cp39-win32.whl", hash = "sha256:d4ad050ea50a16731d219c3a85e8f2debf49415a070f0b8331ccc96c81700d9b"}, - {file = "psycopg2-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:d39bb3959788b2c9d7bf5ff762e29f436172b241cd7b47529baac77746fd7918"}, - {file = "psycopg2-2.9.8.tar.gz", hash = "sha256:3da6488042a53b50933244085f3f91803f1b7271f970f3e5536efa69314f6a49"}, + {file = "psycopg2-2.9.9-cp310-cp310-win32.whl", hash = "sha256:38a8dcc6856f569068b47de286b472b7c473ac7977243593a288ebce0dc89516"}, + {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, + {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, + {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, + {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, + {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, + {file = "psycopg2-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:bac58c024c9922c23550af2a581998624d6e02350f4ae9c5f0bc642c633a2d5e"}, + {file = "psycopg2-2.9.9-cp39-cp39-win32.whl", hash = "sha256:c92811b2d4c9b6ea0285942b2e7cac98a59e166d59c588fe5cfe1eda58e72d59"}, + {file = "psycopg2-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:de80739447af31525feddeb8effd640782cf5998e1a4e9192ebdf829717e3913"}, + {file = "psycopg2-2.9.9.tar.gz", hash = "sha256:d1454bde93fb1e224166811694d600e746430c006fbb031ea06ecc2ea41bf156"}, ] [[package]] From 61f921c3b90df17348c9ec0c7e1dc8fc82327c09 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Oct 2023 13:08:41 +0000 Subject: [PATCH 085/262] Bump psycopg2-binary from 2.9.8 to 2.9.9 (#1430) --- poetry.lock | 133 ++++++++++++++++++++++++++++------------------------ 1 file changed, 71 insertions(+), 62 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7f4b1bb4a8..ac5d6d72fe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2951,71 +2951,80 @@ files = [ [[package]] name = "psycopg2-binary" -version = "2.9.8" +version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "psycopg2-binary-2.9.8.tar.gz", hash = "sha256:80451e6b6b7c486828d5c7ed50769532bbb04ec3a411f1e833539d5c10eb691c"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e271ad6692d50d70ca75db3bd461bfc26316de78de8fe1f504ef16dcea8f2312"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3ae22a0fa5c516b84ddb189157fabfa3f12eded5d630e1ce260a18e1771f8707"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9a971086db0069aef2fd22ccffb670baac427f4ee2174c4f5c7206254f1e6794"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b6928a502af71ca2ac9aad535e78c8309892ed3bfa7933182d4c760580c8af4"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f955fe6301b84b6fd13970a05f3640fbb62ca3a0d19342356585006c830e038"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3723c3f009e2b2771f2491b330edb7091846f1aad0c08fbbd9a1383d6a0c0841"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e3142c7e51b92855cff300580de949e36a94ab3bfa8f353b27fe26535e9b3542"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:de85105c568dc5f0f0efe793209ba83e4675d53d00faffc7a7c7a8bea9e0e19a"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c7ff2b6a79a92b1b169b03bb91b41806843f0cdf6055256554495bffed1d496d"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59f45cca0765aabb52a5822c72d5ff2ec46a28b1c1702de90dc0d306ec5c2001"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-win32.whl", hash = "sha256:1dbad789ebd1e61201256a19dc2e90fed4706bc966ccad4f374648e5336b1ab4"}, - {file = "psycopg2_binary-2.9.8-cp310-cp310-win_amd64.whl", hash = "sha256:15458c81b0d199ab55825007115f697722831656e6477a427783fe75c201c82b"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:395c217156723fe21809dfe8f7a433c5bf8e9bce229944668e4ec709c37c5442"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:14f85ff2d5d826a7ce9e6c31e803281ed5a096789f47f52cb728c88f488de01b"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e11373d8e4f1f46cf3065bf613f0df9854803dc95aa4a35354ffac19f8c52127"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01f9731761f711e42459f87bd2ad5d744b9773b5dd05446f3b579a0f077e78e3"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:54bf5c27bd5867a5fa5341fad29f0d5838e2fed617ef5346884baf8b8b16dd82"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4bfabbd7e70785af726cc0209e8e64b926abf91741eca80678b221aad9e72135"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6369f4bd4d27944498094dccced1ae7ca43376a59dbfe4c8b6a16e9e3dc3ccce"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4879ee1d07a6b2c232ae6a74570f4788cd7a29b3cd38bc39bf60225b1d075c78"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:4336afc0e81726350bd5863e3c3116d8c12aa7f457d3d0b3b3dc36137fec6feb"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:63ce1dccfd08d9c5341ac82d62aa04345bc4bf41b5e5b7b2c6c172a28e0eda27"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-win32.whl", hash = "sha256:59421806c1a0803ea7de9ed061d656c041a84db0da7e73266b98db4c7ba263da"}, - {file = "psycopg2_binary-2.9.8-cp311-cp311-win_amd64.whl", hash = "sha256:ccaa2ae03990cedde1f618ff11ec89fefa84622da73091a67b44553ca8be6711"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5aa0c99c12075c593dcdccbb8a7aaa714b716560cc99ef9206f9e75b77520801"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91719f53ed2a95ebecefac48d855d811cba9d9fe300acc162993bdfde9bc1c3b"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c68a2e1afb4f2a5bb4b7bb8f90298d21196ac1c66418523e549430b8c4b7cb1e"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:278ebd63ced5a5f3af5394cb75a9a067243eee21f42f0126c6f1cf85eaeb90f9"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c84ff9682bc4520504c474e189b3de7c4a4029e529c8b775e39c95c33073767"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6f5e70e40dae47a4dc7f8eb390753bb599b0f4ede314580e6faa3b7383695d19"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:673eafbdaa4ed9f5164c90e191c3895cc5f866b9b379fdb59f3a2294e914d9bd"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:5a0a6e4004697ec98035ff3b8dfc4dba8daa477b23ee891d831cd3cd65ace6be"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d29efab3c5d6d978115855a0f2643e0ee8c6450dc536d5b4afec6f52ab99e99e"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-win32.whl", hash = "sha256:d4a19a3332f2ac6d093e60a6f1c589f97eb9f9de7e27ea80d67f188384e31572"}, - {file = "psycopg2_binary-2.9.8-cp37-cp37m-win_amd64.whl", hash = "sha256:5262713988d97a9d4cd54b682dec4a413b87b76790e5b16f480450550d11a8f7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e46b0f4683539965ce849f2c13fc53e323bb08d84d4ba2e4b3d976f364c84210"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3fd44b52bc9c74c1512662e8da113a1c55127adeeacebaf460babe766517b049"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b6c607ecb6a9c245ebe162d63ccd9222d38efa3c858bbe38d32810b08b8f87e"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6ef615d48fa60361e57f998327046bd89679c25d06eee9e78156be5a7a76e03"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65403113ac3a4813a1409fb6a1e43c658b459cc8ed8afcc5f4baf02ec8be4334"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debcb23a052f3fb4c165789ea513b562b2fac0f0f4f53eaf3cf4dc648907ff8"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dc145a241e1f6381efb924bcf3e3462d6020b8a147363f9111eb0a9c89331ad7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1d669887df169a9b0c09e0f5b46891511850a9ddfcde3593408af9d9774c5c3a"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:19d40993701e39c49b50e75cd690a6af796d7e7210941ee0fe49cf12b25840e5"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b8b2cdf3bce4dd91dc035fbff4eb812f5607dda91364dc216b0920b97b521c7"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-win32.whl", hash = "sha256:4960c881471ca710b81a67ef148c33ee121c1f8e47a639cf7e06537fe9fee337"}, - {file = "psycopg2_binary-2.9.8-cp38-cp38-win_amd64.whl", hash = "sha256:aeb09db95f38e75ae04e947d283e07be34d03c4c2ace4f0b73dbb9143d506e67"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5aef3296d44d05805e634dbbd2972aa8eb7497926dd86047f5e39a79c3ecc086"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4d6b592ecc8667e608b9e7344259fbfb428cc053df0062ec3ac75d8270cd5a9f"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:693a4e7641556f0b421a7d6c6a74058aead407d860ac1cb9d0bf25be0ca73de8"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf60c599c40c266a01c458e9c71db7132b11760f98f08233f19b3e0a2153cbf1"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cbe1e19f59950afd66764e3c905ecee9f2aee9f8df2ef35af6f7948ad93f620"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc37de7e3a87f5966965fc874d33c9b68d638e6c3718fdf32a5083de563428b0"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e1bb4eb0d9925d65dabaaabcbb279fab444ba66d73f86d4c07dfd11f0139c06"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e7bdc94217ae20ad03b375a991e107a31814053bee900ad8c967bf82ef3ff02e"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:565edaf9f691b17a7fdbabd368b5b3e67d0fdc8f7f6b52177c1d3289f4e763fd"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0e3071c947bda6afc6fe2e7b64ebd64fb2cad1bc0e705a3594cb499291f2dfec"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-win32.whl", hash = "sha256:205cecdd81ff4f1ddd687ce7d06879b9b80cccc428d8d6ebf36fcba08bb6d361"}, - {file = "psycopg2_binary-2.9.8-cp39-cp39-win_amd64.whl", hash = "sha256:1f279ba74f0d6b374526e5976c626d2ac3b8333b6a7b08755c513f4d380d3add"}, + {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, + {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, + {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, + {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, + {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, + {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, ] [[package]] From 54befd0556920ebac221022fbc79bbddcae21d33 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 4 Oct 2023 15:17:11 +0000 Subject: [PATCH 086/262] Bump dunamai from 1.18.1 to 1.19.0 (#1433) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index ac5d6d72fe..a9f3ad3a1a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1025,13 +1025,13 @@ tls = ["cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=17.5.0)"] [[package]] name = "dunamai" -version = "1.18.1" +version = "1.19.0" description = "Dynamic version generation" optional = false python-versions = ">=3.5,<4.0" files = [ - {file = "dunamai-1.18.1-py3-none-any.whl", hash = "sha256:ee7b042f7a687fa04fc383258eb93bd819c7bd8aec62e0974f3c69747e5958f2"}, - {file = "dunamai-1.18.1.tar.gz", hash = "sha256:5e9a91e43d16bb56fa8fcddcf92fa31b2e1126e060c3dcc8d094d9b508061f9d"}, + {file = "dunamai-1.19.0-py3-none-any.whl", hash = "sha256:1ed948676bbf0812bfaafe315a134634f8d6eb67138513c75aa66e747404b9c6"}, + {file = "dunamai-1.19.0.tar.gz", hash = "sha256:6ad99ae34f7cd290550a2ef1305d2e0292e6e6b5b1b830dfc07ceb7fd35fec09"}, ] [package.dependencies] From 3961f6c1ec4574de37d5d929b4e0d96ed4f29c9c Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 4 Oct 2023 12:44:43 -0300 Subject: [PATCH 087/262] Update webpub manifest parser to the latest. (#1434) --- poetry.lock | 458 ++++++++++++++++++++++--------------------------- pyproject.toml | 6 +- 2 files changed, 207 insertions(+), 257 deletions(-) diff --git a/poetry.lock b/poetry.lock index a9f3ad3a1a..41475dcdf1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -24,20 +24,21 @@ tz = ["python-dateutil"] [[package]] name = "attrs" -version = "21.4.0" +version = "23.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.7" files = [ - {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, - {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, ] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "aws-xray-sdk" @@ -1969,24 +1970,41 @@ files = [ [[package]] name = "jsonschema" -version = "3.2.0" +version = "4.19.1" description = "An implementation of JSON Schema validation for Python" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, - {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ] [package.dependencies] -attrs = ">=17.4.0" -pyrsistent = ">=0.14.0" -setuptools = "*" -six = ">=1.11.0" +attrs = ">=22.2.0" +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +jsonschema-specifications = ">=2023.03.6" +pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} +referencing = ">=0.28.4" +rpds-py = ">=0.7.1" [package.extras] -format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] -format-nongpl = ["idna", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "webcolors"] +format = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3987", "uri-template", "webcolors (>=1.11)"] +format-nongpl = ["fqdn", "idna", "isoduration", "jsonpointer (>1.13)", "rfc3339-validator", "rfc3986-validator (>0.1.0)", "uri-template", "webcolors (>=1.11)"] + +[[package]] +name = "jsonschema-specifications" +version = "2023.7.1" +description = "The JSON Schema meta-schemas and vocabularies, exposed as a Registry" +optional = false +python-versions = ">=3.8" +files = [ + {file = "jsonschema_specifications-2023.7.1-py3-none-any.whl", hash = "sha256:05adf340b659828a004220a9613be00fa3f223f2b82002e273dee62fd50524b1"}, + {file = "jsonschema_specifications-2023.7.1.tar.gz", hash = "sha256:c91a50404e88a1f6ba40636778e2ee08f6e24c5613fe4c53ac24578a5a7f72bb"}, +] + +[package.dependencies] +importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} +referencing = ">=0.28.0" [[package]] name = "jwcrypto" @@ -2389,104 +2407,17 @@ files = [ {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, ] -[[package]] -name = "multidict" -version = "6.0.4" -description = "multidict implementation" -optional = false -python-versions = ">=3.7" -files = [ - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, - {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, - {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, - {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, - {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, - {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, - {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, - {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, - {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, - {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, - {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, - {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, - {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, - {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, - {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, - {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, - {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, - {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, - {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, - {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, - {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, - {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, - {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, - {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, - {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, - {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, - {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, -] - [[package]] name = "multipledispatch" -version = "0.6.0" +version = "1.0.0" description = "Multiple dispatch" optional = false python-versions = "*" files = [ - {file = "multipledispatch-0.6.0-py2-none-any.whl", hash = "sha256:407e6d8c5fa27075968ba07c4db3ef5f02bea4e871e959570eeb69ee39a6565b"}, - {file = "multipledispatch-0.6.0-py3-none-any.whl", hash = "sha256:a55c512128fb3f7c2efd2533f2550accb93c35f1045242ef74645fc92a2c3cba"}, - {file = "multipledispatch-0.6.0.tar.gz", hash = "sha256:a7ab1451fd0bf9b92cab3edbd7b205622fb767aeefb4fb536c2e3de9e0a38bea"}, + {file = "multipledispatch-1.0.0-py3-none-any.whl", hash = "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4"}, + {file = "multipledispatch-1.0.0.tar.gz", hash = "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0"}, ] -[package.dependencies] -six = "*" - [[package]] name = "mypy" version = "1.5.1" @@ -2756,24 +2687,24 @@ files = [ [[package]] name = "palace-webpub-manifest-parser" -version = "3.0.1" +version = "3.1.0" description = "A parser for the Readium Web Publication Manifest, OPDS 2.0 and ODL formats." optional = false python-versions = ">=3.8,<4" files = [ - {file = "palace_webpub_manifest_parser-3.0.1-py3-none-any.whl", hash = "sha256:7f32c5e88d0a0e0789ccc1cbf5f3cea0e5605031f568ce1835b1ee105a579a38"}, - {file = "palace_webpub_manifest_parser-3.0.1.tar.gz", hash = "sha256:e2e194cbf7aea97876ba869d106448ad81858b46bb113292e10eaa4863f64ba4"}, + {file = "palace_webpub_manifest_parser-3.1.0-py3-none-any.whl", hash = "sha256:2d65fbfddafd70d0e571d8e0cee4ad726baf187180742bbab026ef9066068b5c"}, + {file = "palace_webpub_manifest_parser-3.1.0.tar.gz", hash = "sha256:9ca52be816ade5812e4f2cc1a3bd0892ba10c16f8497896aed43038ad831ee02"}, ] [package.dependencies] -jsonschema = ">=3.2,<5.0" -multipledispatch = ">=0.6.0,<0.7.0" -pyrsistent = "0.18.1" -python-dateutil = ">=2.8.2,<3.0.0" -pytz = ">=2021.1,<2022.0" -requests = ">=2.27.1,<3.0.0" -rfc3987 = ">=1.3.8,<2.0.0" -uritemplate = ">=3.0.1,<5.0.0" +jsonschema = ">=4.19,<5.0" +multipledispatch = ">=1.0,<2.0" +pyrsistent = ">=0.19,<0.20" +python-dateutil = ">=2.8,<3.0" +pytz = ">=2023.3,<2024.0" +requests = ">=2.27,<3.0" +rfc3987 = ">=1.3,<2.0" +uritemplate = ">=4.1,<5.0" [[package]] name = "pillow" @@ -2842,6 +2773,17 @@ files = [ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "pkgutil-resolve-name" +version = "1.3.10" +description = "Resolve a name to an object." +optional = false +python-versions = ">=3.6" +files = [ + {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, + {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, +] + [[package]] name = "platformdirs" version = "3.10.0" @@ -3364,32 +3306,38 @@ testing = ["covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytes [[package]] name = "pyrsistent" -version = "0.18.1" +version = "0.19.3" description = "Persistent/Functional/Immutable data structures" optional = false python-versions = ">=3.7" files = [ - {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, - {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, - {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, - {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, - {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, - {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, - {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, - {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, - {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, - {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, - {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, + {file = "pyrsistent-0.19.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:20460ac0ea439a3e79caa1dbd560344b64ed75e85d8703943e0b66c2a6150e4a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c18264cb84b5e68e7085a43723f9e4c1fd1d935ab240ce02c0324a8e01ccb64"}, + {file = "pyrsistent-0.19.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b774f9288dda8d425adb6544e5903f1fb6c273ab3128a355c6b972b7df39dcf"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win32.whl", hash = "sha256:5a474fb80f5e0d6c9394d8db0fc19e90fa540b82ee52dba7d246a7791712f74a"}, + {file = "pyrsistent-0.19.3-cp310-cp310-win_amd64.whl", hash = "sha256:49c32f216c17148695ca0e02a5c521e28a4ee6c5089f97e34fe24163113722da"}, + {file = "pyrsistent-0.19.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f0774bf48631f3a20471dd7c5989657b639fd2d285b861237ea9e82c36a415a9"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ab2204234c0ecd8b9368dbd6a53e83c3d4f3cab10ecaf6d0e772f456c442393"}, + {file = "pyrsistent-0.19.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e42296a09e83028b3476f7073fcb69ffebac0e66dbbfd1bd847d61f74db30f19"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win32.whl", hash = "sha256:64220c429e42a7150f4bfd280f6f4bb2850f95956bde93c6fda1b70507af6ef3"}, + {file = "pyrsistent-0.19.3-cp311-cp311-win_amd64.whl", hash = "sha256:016ad1afadf318eb7911baa24b049909f7f3bb2c5b1ed7b6a8f21db21ea3faa8"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c4db1bd596fefd66b296a3d5d943c94f4fac5bcd13e99bffe2ba6a759d959a28"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aeda827381f5e5d65cced3024126529ddc4289d944f75e090572c77ceb19adbf"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:42ac0b2f44607eb92ae88609eda931a4f0dfa03038c44c772e07f43e738bcac9"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win32.whl", hash = "sha256:e8f2b814a3dc6225964fa03d8582c6e0b6650d68a232df41e3cc1b66a5d2f8d1"}, + {file = "pyrsistent-0.19.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c9bb60a40a0ab9aba40a59f68214eed5a29c6274c83b2cc206a359c4a89fa41b"}, + {file = "pyrsistent-0.19.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a2471f3f8693101975b1ff85ffd19bb7ca7dd7c38f8a81701f67d6b4f97b87d8"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc5d149f31706762c1f8bda2e8c4f8fead6e80312e3692619a75301d3dbb819a"}, + {file = "pyrsistent-0.19.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3311cb4237a341aa52ab8448c27e3a9931e2ee09561ad150ba94e4cfd3fc888c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win32.whl", hash = "sha256:f0e7c4b2f77593871e918be000b96c8107da48444d57005b6a6bc61fb4331b2c"}, + {file = "pyrsistent-0.19.3-cp38-cp38-win_amd64.whl", hash = "sha256:c147257a92374fde8498491f53ffa8f4822cd70c0d85037e09028e478cababb7"}, + {file = "pyrsistent-0.19.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b735e538f74ec31378f5a1e3886a26d2ca6351106b4dfde376a26fc32a044edc"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99abb85579e2165bd8522f0c0138864da97847875ecbd45f3e7e2af569bfc6f2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a8cb235fa6d3fd7aae6a4f1429bbb1fec1577d978098da1252f0489937786f3"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win32.whl", hash = "sha256:c74bed51f9b41c48366a286395c67f4e894374306b197e62810e0fdaf2364da2"}, + {file = "pyrsistent-0.19.3-cp39-cp39-win_amd64.whl", hash = "sha256:878433581fc23e906d947a6814336eee031a00e6defba224234169ae3d3d6a98"}, + {file = "pyrsistent-0.19.3-py3-none-any.whl", hash = "sha256:ccf0d6bd208f8111179f0c26fdf84ed7c3891982f2edaeae7422575f47e66b64"}, + {file = "pyrsistent-0.19.3.tar.gz", hash = "sha256:1a2994773706bbb4995c31a97bc94f1418314923bd1048c6d964837040376440"}, ] [[package]] @@ -3523,13 +3471,13 @@ test = ["coverage (>=4.5.2)", "flake8 (>=3.6.0,<=5.0.0)", "freezegun (>=0.3.11,< [[package]] name = "pytz" -version = "2021.3" +version = "2023.3.post1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ - {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, - {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, + {file = "pytz-2023.3.post1-py2.py3-none-any.whl", hash = "sha256:ce42d816b81b68506614c11e8937d3aa9e41007ceb50bfdcb0749b921bf646c7"}, + {file = "pytz-2023.3.post1.tar.gz", hash = "sha256:7b4fddbeb94a1eba4b557da24f19fdf9db575192544270a9101d8509f9f43d7b"}, ] [[package]] @@ -3748,19 +3696,18 @@ test = ["Pillow", "css-inline", "jinja2", "matplotlib", "openpyxl", "pandas", "p [[package]] name = "referencing" -version = "0.8.11" +version = "0.30.2" description = "JSON Referencing + Python" optional = false python-versions = ">=3.8" files = [ - {file = "referencing-0.8.11-py3-none-any.whl", hash = "sha256:f7edae1893624cd85f3370920ee1f863440ec9c349daba5da197a2fc713e98ec"}, - {file = "referencing-0.8.11.tar.gz", hash = "sha256:7cca400800f8bde596ad88159c25407fed907da2b1254402ff2c7a8e61eb229e"}, + {file = "referencing-0.30.2-py3-none-any.whl", hash = "sha256:449b6669b6121a9e96a7f9e410b245d471e8d48964c67113ce9afe50c8dd7bdf"}, + {file = "referencing-0.30.2.tar.gz", hash = "sha256:794ad8003c65938edcdbc027f1933215e0d0ccc0291e3ce20a4d87432b59efc0"}, ] [package.dependencies] -attrs = "*" -pyrsistent = "*" -yarl = "*" +attrs = ">=22.2.0" +rpds-py = ">=0.7.0" [[package]] name = "regex" @@ -3896,6 +3843,112 @@ files = [ {file = "rfc3987-1.3.8.tar.gz", hash = "sha256:d3c4d257a560d544e9826b38bc81db676890c79ab9d7ac92b39c7a253d5ca733"}, ] +[[package]] +name = "rpds-py" +version = "0.10.3" +description = "Python bindings to Rust's persistent data structures (rpds)" +optional = false +python-versions = ">=3.8" +files = [ + {file = "rpds_py-0.10.3-cp310-cp310-macosx_10_7_x86_64.whl", hash = "sha256:485747ee62da83366a44fbba963c5fe017860ad408ccd6cd99aa66ea80d32b2e"}, + {file = "rpds_py-0.10.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c55f9821f88e8bee4b7a72c82cfb5ecd22b6aad04033334f33c329b29bfa4da0"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3b52a67ac66a3a64a7e710ba629f62d1e26ca0504c29ee8cbd99b97df7079a8"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3aed39db2f0ace76faa94f465d4234aac72e2f32b009f15da6492a561b3bbebd"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:271c360fdc464fe6a75f13ea0c08ddf71a321f4c55fc20a3fe62ea3ef09df7d9"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef5fddfb264e89c435be4adb3953cef5d2936fdeb4463b4161a6ba2f22e7b740"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a771417c9c06c56c9d53d11a5b084d1de75de82978e23c544270ab25e7c066ff"}, + {file = "rpds_py-0.10.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:52b5cbc0469328e58180021138207e6ec91d7ca2e037d3549cc9e34e2187330a"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:6ac3fefb0d168c7c6cab24fdfc80ec62cd2b4dfd9e65b84bdceb1cb01d385c33"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:8d54bbdf5d56e2c8cf81a1857250f3ea132de77af543d0ba5dce667183b61fec"}, + {file = "rpds_py-0.10.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cd2163f42868865597d89399a01aa33b7594ce8e2c4a28503127c81a2f17784e"}, + {file = "rpds_py-0.10.3-cp310-none-win32.whl", hash = "sha256:ea93163472db26ac6043e8f7f93a05d9b59e0505c760da2a3cd22c7dd7111391"}, + {file = "rpds_py-0.10.3-cp310-none-win_amd64.whl", hash = "sha256:7cd020b1fb41e3ab7716d4d2c3972d4588fdfbab9bfbbb64acc7078eccef8860"}, + {file = "rpds_py-0.10.3-cp311-cp311-macosx_10_7_x86_64.whl", hash = "sha256:1d9b5ee46dcb498fa3e46d4dfabcb531e1f2e76b477e0d99ef114f17bbd38453"}, + {file = "rpds_py-0.10.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:563646d74a4b4456d0cf3b714ca522e725243c603e8254ad85c3b59b7c0c4bf0"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e626b864725680cd3904414d72e7b0bd81c0e5b2b53a5b30b4273034253bb41f"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:485301ee56ce87a51ccb182a4b180d852c5cb2b3cb3a82f7d4714b4141119d8c"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42f712b4668831c0cd85e0a5b5a308700fe068e37dcd24c0062904c4e372b093"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6c9141af27a4e5819d74d67d227d5047a20fa3c7d4d9df43037a955b4c748ec5"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef750a20de1b65657a1425f77c525b0183eac63fe7b8f5ac0dd16f3668d3e64f"}, + {file = "rpds_py-0.10.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e1a0ffc39f51aa5f5c22114a8f1906b3c17eba68c5babb86c5f77d8b1bba14d1"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f4c179a7aeae10ddf44c6bac87938134c1379c49c884529f090f9bf05566c836"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:176287bb998fd1e9846a9b666e240e58f8d3373e3bf87e7642f15af5405187b8"}, + {file = "rpds_py-0.10.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6446002739ca29249f0beaaf067fcbc2b5aab4bc7ee8fb941bd194947ce19aff"}, + {file = "rpds_py-0.10.3-cp311-none-win32.whl", hash = "sha256:c7aed97f2e676561416c927b063802c8a6285e9b55e1b83213dfd99a8f4f9e48"}, + {file = "rpds_py-0.10.3-cp311-none-win_amd64.whl", hash = "sha256:8bd01ff4032abaed03f2db702fa9a61078bee37add0bd884a6190b05e63b028c"}, + {file = "rpds_py-0.10.3-cp312-cp312-macosx_10_7_x86_64.whl", hash = "sha256:4cf0855a842c5b5c391dd32ca273b09e86abf8367572073bd1edfc52bc44446b"}, + {file = "rpds_py-0.10.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:69b857a7d8bd4f5d6e0db4086da8c46309a26e8cefdfc778c0c5cc17d4b11e08"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:975382d9aa90dc59253d6a83a5ca72e07f4ada3ae3d6c0575ced513db322b8ec"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:35fbd23c1c8732cde7a94abe7fb071ec173c2f58c0bd0d7e5b669fdfc80a2c7b"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:106af1653007cc569d5fbb5f08c6648a49fe4de74c2df814e234e282ebc06957"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ce5e7504db95b76fc89055c7f41e367eaadef5b1d059e27e1d6eabf2b55ca314"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5aca759ada6b1967fcfd4336dcf460d02a8a23e6abe06e90ea7881e5c22c4de6"}, + {file = "rpds_py-0.10.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b5d4bdd697195f3876d134101c40c7d06d46c6ab25159ed5cbd44105c715278a"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a657250807b6efd19b28f5922520ae002a54cb43c2401e6f3d0230c352564d25"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:177c9dd834cdf4dc39c27436ade6fdf9fe81484758885f2d616d5d03c0a83bd2"}, + {file = "rpds_py-0.10.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e22491d25f97199fc3581ad8dd8ce198d8c8fdb8dae80dea3512e1ce6d5fa99f"}, + {file = "rpds_py-0.10.3-cp38-cp38-macosx_10_7_x86_64.whl", hash = "sha256:2f3e1867dd574014253b4b8f01ba443b9c914e61d45f3674e452a915d6e929a3"}, + {file = "rpds_py-0.10.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c22211c165166de6683de8136229721f3d5c8606cc2c3d1562da9a3a5058049c"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40bc802a696887b14c002edd43c18082cb7b6f9ee8b838239b03b56574d97f71"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e271dd97c7bb8eefda5cca38cd0b0373a1fea50f71e8071376b46968582af9b"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95cde244e7195b2c07ec9b73fa4c5026d4a27233451485caa1cd0c1b55f26dbd"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08a80cf4884920863623a9ee9a285ee04cef57ebedc1cc87b3e3e0f24c8acfe5"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:763ad59e105fca09705d9f9b29ecffb95ecdc3b0363be3bb56081b2c6de7977a"}, + {file = "rpds_py-0.10.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:187700668c018a7e76e89424b7c1042f317c8df9161f00c0c903c82b0a8cac5c"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:5267cfda873ad62591b9332fd9472d2409f7cf02a34a9c9cb367e2c0255994bf"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:2ed83d53a8c5902ec48b90b2ac045e28e1698c0bea9441af9409fc844dc79496"}, + {file = "rpds_py-0.10.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:255f1a10ae39b52122cce26ce0781f7a616f502feecce9e616976f6a87992d6b"}, + {file = "rpds_py-0.10.3-cp38-none-win32.whl", hash = "sha256:a019a344312d0b1f429c00d49c3be62fa273d4a1094e1b224f403716b6d03be1"}, + {file = "rpds_py-0.10.3-cp38-none-win_amd64.whl", hash = "sha256:efb9ece97e696bb56e31166a9dd7919f8f0c6b31967b454718c6509f29ef6fee"}, + {file = "rpds_py-0.10.3-cp39-cp39-macosx_10_7_x86_64.whl", hash = "sha256:570cc326e78ff23dec7f41487aa9c3dffd02e5ee9ab43a8f6ccc3df8f9327623"}, + {file = "rpds_py-0.10.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cff7351c251c7546407827b6a37bcef6416304fc54d12d44dbfecbb717064717"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177914f81f66c86c012311f8c7f46887ec375cfcfd2a2f28233a3053ac93a569"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:448a66b8266de0b581246ca7cd6a73b8d98d15100fb7165974535fa3b577340e"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bbac1953c17252f9cc675bb19372444aadf0179b5df575ac4b56faaec9f6294"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9dd9d9d9e898b9d30683bdd2b6c1849449158647d1049a125879cb397ee9cd12"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8c71ea77536149e36c4c784f6d420ffd20bea041e3ba21ed021cb40ce58e2c9"}, + {file = "rpds_py-0.10.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16a472300bc6c83fe4c2072cc22b3972f90d718d56f241adabc7ae509f53f154"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9255e7165083de7c1d605e818025e8860636348f34a79d84ec533546064f07e"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:53d7a3cd46cdc1689296348cb05ffd4f4280035770aee0c8ead3bbd4d6529acc"}, + {file = "rpds_py-0.10.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:22da15b902f9f8e267020d1c8bcfc4831ca646fecb60254f7bc71763569f56b1"}, + {file = "rpds_py-0.10.3-cp39-none-win32.whl", hash = "sha256:850c272e0e0d1a5c5d73b1b7871b0a7c2446b304cec55ccdb3eaac0d792bb065"}, + {file = "rpds_py-0.10.3-cp39-none-win_amd64.whl", hash = "sha256:de61e424062173b4f70eec07e12469edde7e17fa180019a2a0d75c13a5c5dc57"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_10_7_x86_64.whl", hash = "sha256:af247fd4f12cca4129c1b82090244ea5a9d5bb089e9a82feb5a2f7c6a9fe181d"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3ad59efe24a4d54c2742929001f2d02803aafc15d6d781c21379e3f7f66ec842"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:642ed0a209ced4be3a46f8cb094f2d76f1f479e2a1ceca6de6346a096cd3409d"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:37d0c59548ae56fae01c14998918d04ee0d5d3277363c10208eef8c4e2b68ed6"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aad6ed9e70ddfb34d849b761fb243be58c735be6a9265b9060d6ddb77751e3e8"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8f94fdd756ba1f79f988855d948ae0bad9ddf44df296770d9a58c774cfbcca72"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77076bdc8776a2b029e1e6ffbe6d7056e35f56f5e80d9dc0bad26ad4a024a762"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:87d9b206b1bd7a0523375dc2020a6ce88bca5330682ae2fe25e86fd5d45cea9c"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:8efaeb08ede95066da3a3e3c420fcc0a21693fcd0c4396d0585b019613d28515"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_i686.whl", hash = "sha256:a4d9bfda3f84fc563868fe25ca160c8ff0e69bc4443c5647f960d59400ce6557"}, + {file = "rpds_py-0.10.3-pp310-pypy310_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:d27aa6bbc1f33be920bb7adbb95581452cdf23005d5611b29a12bb6a3468cc95"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_10_7_x86_64.whl", hash = "sha256:ed8313809571a5463fd7db43aaca68ecb43ca7a58f5b23b6e6c6c5d02bdc7882"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:e10e6a1ed2b8661201e79dff5531f8ad4cdd83548a0f81c95cf79b3184b20c33"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:015de2ce2af1586ff5dc873e804434185199a15f7d96920ce67e50604592cae9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ae87137951bb3dc08c7d8bfb8988d8c119f3230731b08a71146e84aaa919a7a9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0bb4f48bd0dd18eebe826395e6a48b7331291078a879295bae4e5d053be50d4c"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:09362f86ec201288d5687d1dc476b07bf39c08478cde837cb710b302864e7ec9"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:821392559d37759caa67d622d0d2994c7a3f2fb29274948ac799d496d92bca73"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7170cbde4070dc3c77dec82abf86f3b210633d4f89550fa0ad2d4b549a05572a"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:5de11c041486681ce854c814844f4ce3282b6ea1656faae19208ebe09d31c5b8"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_i686.whl", hash = "sha256:4ed172d0c79f156c1b954e99c03bc2e3033c17efce8dd1a7c781bc4d5793dfac"}, + {file = "rpds_py-0.10.3-pp38-pypy38_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:11fdd1192240dda8d6c5d18a06146e9045cb7e3ba7c06de6973000ff035df7c6"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_10_7_x86_64.whl", hash = "sha256:f602881d80ee4228a2355c68da6b296a296cd22bbb91e5418d54577bbf17fa7c"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:691d50c99a937709ac4c4cd570d959a006bd6a6d970a484c84cc99543d4a5bbb"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24cd91a03543a0f8d09cb18d1cb27df80a84b5553d2bd94cba5979ef6af5c6e7"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fc2200e79d75b5238c8d69f6a30f8284290c777039d331e7340b6c17cad24a5a"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea65b59882d5fa8c74a23f8960db579e5e341534934f43f3b18ec1839b893e41"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:829e91f3a8574888b73e7a3feb3b1af698e717513597e23136ff4eba0bc8387a"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eab75a8569a095f2ad470b342f2751d9902f7944704f0571c8af46bede438475"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:061c3ff1f51ecec256e916cf71cc01f9975af8fb3af9b94d3c0cc8702cfea637"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:39d05e65f23a0fe897b6ac395f2a8d48c56ac0f583f5d663e0afec1da89b95da"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_i686.whl", hash = "sha256:4eca20917a06d2fca7628ef3c8b94a8c358f6b43f1a621c9815243462dcccf97"}, + {file = "rpds_py-0.10.3-pp39-pypy39_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:e8d0f0eca087630d58b8c662085529781fd5dc80f0a54eda42d5c9029f812599"}, + {file = "rpds_py-0.10.3.tar.gz", hash = "sha256:fcc1ebb7561a3e24a6588f7c6ded15d80aec22c66a070c757559b57b17ffd1cb"}, +] + [[package]] name = "rsa" version = "4.9" @@ -3927,22 +3980,6 @@ botocore = ">=1.12.36,<2.0a.0" [package.extras] crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] -[[package]] -name = "setuptools" -version = "65.5.1" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "setuptools-65.5.1-py3-none-any.whl", hash = "sha256:d0b9a8433464d5800cbe05094acf5c6d52a91bfac9b52bcfc4d41382be5d5d31"}, - {file = "setuptools-65.5.1.tar.gz", hash = "sha256:e197a19aa8ec9722928f2206f8de752def0e4c9fc6953527360d1c36d94ddb2f"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - [[package]] name = "sgmllib3k" version = "1.0.0" @@ -4556,93 +4593,6 @@ files = [ [package.dependencies] lxml = ">=3.8" -[[package]] -name = "yarl" -version = "1.9.2" -description = "Yet another URL library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, - {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, - {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, - {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, - {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, - {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, - {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, - {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, - {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, - {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, - {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, - {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, - {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, - {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, - {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, - {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, - {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, - {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, - {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, - {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, - {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, - {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, - {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, - {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, - {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, - {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, - {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, -] - -[package.dependencies] -idna = ">=2.0" -multidict = ">=4.0" - [[package]] name = "zipp" version = "3.11.0" @@ -4661,4 +4611,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "968a965763de22669aa48e98378805806e8adab6f7acbb1148bad53b0ebb2d6c" +content-hash = "2dcbabab70b53c06157b3366b30fc07f8ca677dac8e7c262d82d9617fdfc46b2" diff --git a/pyproject.toml b/pyproject.toml index 4a87b109a3..9927499db7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -189,12 +189,12 @@ jwcrypto = "^1.4.2" levenshtein = "^0.22" lxml = "^4.9.3" money = "1.3.0" -multipledispatch = "0.6.0" +multipledispatch = "^1.0" nameparser = "^1.1" # nameparser is for author name manipulations nltk = "3.8.1" # nltk is a textblob dependency. opensearch-dsl = "~1.0" opensearch-py = "~1.1" -palace-webpub-manifest-parser = "~3.0.1" +palace-webpub-manifest-parser = "^3.1" pillow = "^10.0" pycryptodome = "^3.18" pydantic = {version = "^1.10.9", extras = ["dotenv", "email"]} @@ -208,7 +208,7 @@ pyspellchecker = "0.7.2" python = ">=3.8,<4" python-dateutil = "2.8.2" python3-saml = "~1.15.0" # python-saml is required for SAML authentication -pytz = "2021.3" +pytz = "^2023.3" pyyaml = "^6.0" redmail = "^0.6.0" requests = "^2.29" From 5484028ff4fc04be9a7c6c6bc7ead4bb22b8d640 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 5 Oct 2023 07:10:59 -0300 Subject: [PATCH 088/262] =?UTF-8?q?Crush=20=F0=9F=AA=97=20core/overdrive.p?= =?UTF-8?q?y=20and=20api/overdrive.py=20together=20(PP-500)=20(#1436)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This refactors the core/overdrive.py and api/overdrive.py files to smash them together. In the process it combines the test fixtures for overdrive from overdrive_fixture, overdrive_with_api_fixture, and overdrive_api_fixture, to just overdrive_api_fixture. --- api/overdrive.py | 1603 ++++++++++++++++- .../overdrive-advantage-accounts | 2 +- bin/informational/overdrive-advantage-list | 2 +- bin/repair/overdrive_bibliographic_coverage | 2 +- core/overdrive.py | 1502 --------------- core/scripts.py | 120 +- tests/api/conftest.py | 1 - .../files/overdrive/advantage_accounts.json | 0 .../files/overdrive/audiobook.json | 0 .../files/overdrive/has_awards.json | 0 .../files/overdrive/has_grade_levels.json | 0 .../files/overdrive/has_sample.json | 0 .../overdrive_availability_advantage.json | 0 ...overdrive_availability_information_2.json} | 0 .../files/overdrive/overdrive_book_list.json | 0 .../overdrive_book_list_missing_data.json | 0 .../files/overdrive/overdrive_metadata.json | 0 tests/api/mockapi/overdrive.py | 62 +- tests/api/test_overdrive.py | 1206 ++++++++++++- tests/core/conftest.py | 1 - .../overdrive_availability_not_found.json | 5 - tests/core/test_overdrive.py | 1127 ------------ tests/core/test_scripts.py | 111 +- tests/fixtures/overdrive.py | 73 - 24 files changed, 2818 insertions(+), 2999 deletions(-) delete mode 100644 core/overdrive.py rename tests/{core => api}/files/overdrive/advantage_accounts.json (100%) rename tests/{core => api}/files/overdrive/audiobook.json (100%) rename tests/{core => api}/files/overdrive/has_awards.json (100%) rename tests/{core => api}/files/overdrive/has_grade_levels.json (100%) rename tests/{core => api}/files/overdrive/has_sample.json (100%) rename tests/{core => api}/files/overdrive/overdrive_availability_advantage.json (100%) rename tests/{core/files/overdrive/overdrive_availability_information.json => api/files/overdrive/overdrive_availability_information_2.json} (100%) rename tests/{core => api}/files/overdrive/overdrive_book_list.json (100%) rename tests/{core => api}/files/overdrive/overdrive_book_list_missing_data.json (100%) rename tests/{core => api}/files/overdrive/overdrive_metadata.json (100%) delete mode 100644 tests/core/files/overdrive/overdrive_availability_not_found.json delete mode 100644 tests/core/test_overdrive.py delete mode 100644 tests/fixtures/overdrive.py diff --git a/api/overdrive.py b/api/overdrive.py index bfb120b4e2..358e6461d4 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -1,13 +1,25 @@ +from __future__ import annotations + +import argparse +import csv import datetime import json +import logging import re import time import urllib.parse -from typing import Any, Dict, Optional, Tuple, Union +from threading import RLock +from typing import Any, Dict, List, Set, Tuple, Union, cast +from urllib.parse import quote, urlsplit, urlunsplit import dateutil import flask +import isbnlib from flask_babel import lazy_gettext as _ +from requests import Response +from requests.structures import CaseInsensitiveDict +from sqlalchemy.exc import NoResultFound +from sqlalchemy.orm import Query, Session from sqlalchemy.orm.exc import StaleDataError from api.circulation import ( @@ -19,37 +31,79 @@ LoanInfo, ) from api.circulation_exceptions import * +from api.circulation_exceptions import CannotFulfill from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics +from core.config import CannotLoadConfiguration, Configuration +from core.coverage import BibliographicCoverageProvider +from core.importers import BaseImporterSettings from core.integration.base import HasChildIntegrationConfiguration -from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField -from core.metadata_layer import ReplacementPolicy, TimestampData +from core.integration.settings import ( + BaseSettings, + ConfigurationFormItem, + ConfigurationFormItemType, + FormField, +) +from core.metadata_layer import ( + CirculationData, + ContributorData, + FormatData, + IdentifierData, + LinkData, + MeasurementData, + Metadata, + ReplacementPolicy, + SubjectData, + TimestampData, +) from core.model import ( + Classification, Collection, + Contributor, Credential, DataSource, DeliveryMechanism, Edition, ExternalIntegration, + Hyperlink, Identifier, LicensePool, + Measurement, MediaTypes, Patron, Representation, + Subject, + get_one_or_create, ) from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor -from core.overdrive import ( - OverdriveBibliographicCoverageProvider, - OverdriveCoreAPI, - OverdriveRepresentationExtractor, - OverdriveSettings, -) -from core.scripts import Script -from core.util.datetime_helpers import strptime_utc -from core.util.http import HTTP +from core.scripts import InputScript, Script +from core.util.datetime_helpers import strptime_utc, utc_now +from core.util.http import HTTP, BadResponseException +from core.util.string_helpers import base64 + + +class OverdriveConstants: + OVERDRIVE_CLIENT_KEY = "overdrive_client_key" + OVERDRIVE_CLIENT_SECRET = "overdrive_client_secret" + OVERDRIVE_SERVER_NICKNAME = "overdrive_server_nickname" + OVERDRIVE_WEBSITE_ID = "overdrive_website_id" + + # Note that the library ID is not included here because it is not Overdrive-specific + OVERDRIVE_CONFIGURATION_KEYS = { + OVERDRIVE_CLIENT_KEY, + OVERDRIVE_CLIENT_SECRET, + OVERDRIVE_SERVER_NICKNAME, + OVERDRIVE_WEBSITE_ID, + } + + PRODUCTION_SERVERS = "production" + TESTING_SERVERS = "testing" + # The formats we care about. + FORMATS = "ebook-epub-open,ebook-epub-adobe,ebook-pdf-adobe,ebook-pdf-open,audiobook-overdrive".split( + "," + ) -class OverdriveAPIConstants: # These are not real Overdrive formats; we use them internally so # we can distinguish between (e.g.) using "audiobook-overdrive" # to get into Overdrive Read, and using it to get a link to a @@ -66,10 +120,68 @@ class OverdriveAPIConstants: "audiobook-overdrive", ] + # When associating an Overdrive account with a library, it's + # necessary to also specify an "ILS name" obtained from + # Overdrive. Components that don't authenticate patrons (such as + # the metadata wrangler) don't need to set this value. + ILS_NAME_KEY = "ils_name" + ILS_NAME_DEFAULT = "default" + + +class OverdriveSettings(BaseImporterSettings): + """The basic Overdrive configuration""" + + external_account_id: Optional[str] = FormField( + form=ConfigurationFormItem( + label=_("Library ID"), + type=ConfigurationFormItemType.TEXT, + description="The library identifier.", + required=True, + ), + ) + overdrive_website_id: str = FormField( + form=ConfigurationFormItem( + label=_("Website ID"), + type=ConfigurationFormItemType.TEXT, + description="The web site identifier.", + required=True, + ) + ) + overdrive_client_key: str = FormField( + form=ConfigurationFormItem( + label=_("Client Key"), + type=ConfigurationFormItemType.TEXT, + description="The Overdrive client key.", + required=True, + ) + ) + overdrive_client_secret: str = FormField( + form=ConfigurationFormItem( + label=_("Client Secret"), + type=ConfigurationFormItemType.TEXT, + description="The Overdrive client secret.", + required=True, + ) + ) + + overdrive_server_nickname: str = FormField( + default=OverdriveConstants.PRODUCTION_SERVERS, + form=ConfigurationFormItem( + label=_("Server family"), + type=ConfigurationFormItemType.SELECT, + required=False, + description="Unless you hear otherwise from Overdrive, your integration should use their production servers.", + options={ + OverdriveConstants.PRODUCTION_SERVERS: ("Production"), + OverdriveConstants.TESTING_SERVERS: _("Testing"), + }, + ), + ) + class OverdriveLibrarySettings(BaseCirculationEbookLoanSettings): ils_name: str = FormField( - default=OverdriveCoreAPI.ILS_NAME_DEFAULT, + default=OverdriveConstants.ILS_NAME_DEFAULT, form=ConfigurationFormItem( label=_("ILS Name"), description=_( @@ -89,11 +201,10 @@ class OverdriveChildSettings(BaseSettings): class OverdriveAPI( - OverdriveCoreAPI, BaseCirculationAPI, HasCollectionSelfTests, HasChildIntegrationConfiguration, - OverdriveAPIConstants, + OverdriveConstants, ): NAME = ExternalIntegration.OVERDRIVE DESCRIPTION = _( @@ -130,9 +241,9 @@ class OverdriveAPI( # use other formats. LOCK_IN_FORMATS = [ x - for x in OverdriveCoreAPI.FORMATS - if x not in OverdriveAPIConstants.STREAMING_FORMATS - and x not in OverdriveAPIConstants.MANIFEST_INTERNAL_FORMATS + for x in OverdriveConstants.FORMATS + if x not in OverdriveConstants.STREAMING_FORMATS + and x not in OverdriveConstants.MANIFEST_INTERNAL_FORMATS ] # TODO: This is a terrible choice but this URL should never be @@ -146,6 +257,91 @@ class OverdriveAPI( "PatronHasExceededCheckoutLimit_ForCPC": PatronLoanLimitReached, } + # An OverDrive defined constant indicating the "main" or parent account + # associated with an OverDrive collection. + OVERDRIVE_MAIN_ACCOUNT_ID = -1 + + log = logging.getLogger("Overdrive API") + + # A lock for threaded usage. + lock = RLock() + + # Production and testing have different host names for some of the + # API endpoints. This is configurable on the collection level. + HOSTS = { + OverdriveConstants.PRODUCTION_SERVERS: dict( + host="https://api.overdrive.com", + patron_host="https://patron.api.overdrive.com", + ), + OverdriveConstants.TESTING_SERVERS: dict( + host="https://integration.api.overdrive.com", + patron_host="https://integration-patron.api.overdrive.com", + ), + } + + # Production and testing setups use the same URLs for Client + # Authentication and Patron Authentication, but we use the same + # system as for other hostnames to give a consistent look to the + # templates. + for host in list(HOSTS.values()): + host["oauth_patron_host"] = "https://oauth-patron.overdrive.com" + host["oauth_host"] = "https://oauth.overdrive.com" + + # Each of these endpoint URLs has a slot to plug in one of the + # appropriate servers. This will be filled in either by a call to + # the endpoint() method (if there are other variables in the + # template), or by the _do_get or _do_post methods (if there are + # no other variables). + TOKEN_ENDPOINT = "%(oauth_host)s/token" + PATRON_TOKEN_ENDPOINT = "%(oauth_patron_host)s/patrontoken" + + LIBRARY_ENDPOINT = "%(host)s/v1/libraries/%(library_id)s" + ADVANTAGE_LIBRARY_ENDPOINT = ( + "%(host)s/v1/libraries/%(parent_library_id)s/advantageAccounts/%(library_id)s" + ) + ALL_PRODUCTS_ENDPOINT = ( + "%(host)s/v1/collections/%(collection_token)s/products?sort=%(sort)s" + ) + METADATA_ENDPOINT = ( + "%(host)s/v1/collections/%(collection_token)s/products/%(item_id)s/metadata" + ) + EVENTS_ENDPOINT = "%(host)s/v1/collections/%(collection_token)s/products?lastUpdateTime=%(lastupdatetime)s&sort=%(sort)s&limit=%(limit)s" + AVAILABILITY_ENDPOINT = "%(host)s/v2/collections/%(collection_token)s/products/%(product_id)s/availability" + + PATRON_INFORMATION_ENDPOINT = "%(patron_host)s/v1/patrons/me" + CHECKOUTS_ENDPOINT = "%(patron_host)s/v1/patrons/me/checkouts" + CHECKOUT_ENDPOINT = "%(patron_host)s/v1/patrons/me/checkouts/%(overdrive_id)s" + FORMATS_ENDPOINT = ( + "%(patron_host)s/v1/patrons/me/checkouts/%(overdrive_id)s/formats" + ) + HOLDS_ENDPOINT = "%(patron_host)s/v1/patrons/me/holds" + HOLD_ENDPOINT = "%(patron_host)s/v1/patrons/me/holds/%(product_id)s" + ME_ENDPOINT = "%(patron_host)s/v1/patrons/me" + + MAX_CREDENTIAL_AGE = 50 * 60 + + PAGE_SIZE_LIMIT = 300 + EVENT_SOURCE = "Overdrive" + + EVENT_DELAY = datetime.timedelta(minutes=120) + + # The formats that can be read by the default Library Simplified reader. + DEFAULT_READABLE_FORMATS = { + "ebook-epub-open", + "ebook-epub-adobe", + "ebook-pdf-open", + "audiobook-overdrive", + } + + # The formats that indicate the book has been fulfilled on an + # incompatible platform and just can't be fulfilled on Simplified + # in any format. + INCOMPATIBLE_PLATFORM_FORMATS = {"ebook-kindle"} + + OVERDRIVE_READ_FORMAT = "ebook-overdrive" + + TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" + @classmethod def settings_class(cls): return OverdriveSettings @@ -166,12 +362,487 @@ def description(self): def __init__(self, _db, collection): super().__init__(_db, collection) + if collection.protocol != ExternalIntegration.OVERDRIVE: + raise ValueError( + "Collection protocol is %s, but passed into OverdriveAPI!" + % collection.protocol + ) + + _library_id = collection.external_account_id + if not _library_id: + raise ValueError( + "Collection %s must have an external account ID" % collection.id + ) + else: + self._library_id = _library_id + + self._db = _db + self._external_integration = collection.external_integration + if collection.id is None: + raise ValueError( + "Collection passed into OverdriveAPI must have an ID, but %s does not" + % collection.name + ) + self._collection_id = collection.id + + # Initialize configuration information. + self._integration_configuration_id = cast( + int, collection.integration_configuration.id + ) + self._configuration = OverdriveData() + + if collection.parent: + # This is an Overdrive Advantage account. + self.parent_library_id = collection.parent.external_account_id + + # We're going to inherit all of the Overdrive credentials + # from the parent (the main Overdrive account), except for the + # library ID, which we already set. + parent_integration = collection.parent.integration_configuration + parent_config = self.settings_class()(**parent_integration.settings_dict) + for key in OverdriveConstants.OVERDRIVE_CONFIGURATION_KEYS: + parent_value = getattr(parent_config, key, None) + setattr(self._configuration, key, parent_value) + else: + self.parent_library_id = None + + # Self settings should override parent settings where available + settings = collection.integration_configuration.settings_dict + for name, schema in self.settings_class().schema()["properties"].items(): + if name in settings or not hasattr(self._configuration, name): + setattr( + self._configuration, name, settings.get(name, schema.get("default")) + ) + + if not self._configuration.overdrive_client_key: + raise CannotLoadConfiguration("Overdrive client key is not configured") + if not self._configuration.overdrive_client_secret: + raise CannotLoadConfiguration( + "Overdrive client password/secret is not configured" + ) + if not self._configuration.overdrive_website_id: + raise CannotLoadConfiguration("Overdrive website ID is not configured") + + self._server_nickname = self._configuration.overdrive_server_nickname + + self._hosts = self._determine_hosts(server_nickname=self._server_nickname) + + # This is set by an access to .token, or by a call to + # check_creds() or refresh_creds(). + self._token = None + + # This is set by an access to .collection_token + self._collection_token = None self.overdrive_bibliographic_coverage_provider = ( OverdriveBibliographicCoverageProvider(collection, api_class=self) ) - def external_integration(self, _db): - return self.collection.external_integration + def configuration(self): + """Overdrive has a different implementation for configuration""" + return self._configuration + + def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: + # Figure out which hostnames we'll be using when constructing + # endpoint URLs. + if server_nickname not in self.HOSTS: + server_nickname = OverdriveConstants.PRODUCTION_SERVERS + + return dict(self.HOSTS[server_nickname]) + + def external_integration(self, db: Session) -> ExternalIntegration: + return self._external_integration + + def endpoint(self, url: str, **kwargs) -> str: + """Create the URL to an Overdrive API endpoint. + + :param url: A template for the URL. + :param kwargs: Arguments to be interpolated into the template. + The server hostname will be interpolated automatically; you + don't have to pass it in. + """ + if not "%(" in url: + # Nothing to interpolate. + return url + kwargs.update(self._hosts) + return url % kwargs + + @property + def token(self): + if not self._token: + self.check_creds() + return self._token + + @property + def collection_token(self): + """Get the token representing this particular Overdrive collection. + + As a side effect, this will verify that the Overdrive + credentials are working. + """ + if not self._collection_token: + self.check_creds() + library = self.get_library() + error = library.get("errorCode") + if error: + message = library.get("message") + raise CannotLoadConfiguration( + "Overdrive credentials are valid but could not fetch library: %s" + % message + ) + self._collection_token = library["collectionToken"] + return self._collection_token + + @property + def collection(self) -> Optional[Collection]: + return Collection.by_id(self._db, id=self._collection_id) + + @property + def source(self): + return DataSource.lookup(self._db, DataSource.OVERDRIVE) + + def ils_name(self, library): + """Determine the ILS name to use for the given Library.""" + config = self.integration_configuration().for_library(library.id) + if not config: + return self.ILS_NAME_DEFAULT + return config.settings_dict.get(self.ILS_NAME_KEY, self.ILS_NAME_DEFAULT) + + @property + def advantage_library_id(self): + """The library ID for this library, as we should look for it in + certain API documents served by Overdrive. + + For ordinary collections (ie non-Advantage) with or without associated + Advantage (ie child) collections shared among libraries, this will be + equal to the OVERDRIVE_MAIN_ACCOUNT_ID. + + For Overdrive Advantage accounts, this will be the numeric + value of the Overdrive library ID. + """ + if self.parent_library_id is None: + # This is not an Overdrive Advantage collection. + # + # Instead of looking for the library ID itself in these + # documents, we should look for the constant main account id. + return self.OVERDRIVE_MAIN_ACCOUNT_ID + return int(self._library_id) + + def check_creds(self, force_refresh=False): + """If the Bearer Token has expired, update it.""" + with self.lock: + refresh_on_lookup = self.refresh_creds + if force_refresh: + refresh_on_lookup = lambda x: x + + credential = self.credential_object(refresh_on_lookup) + if force_refresh: + self.refresh_creds(credential) + self._token = credential.credential + + def credential_object(self, refresh): + """Look up the Credential object that allows us to use + the Overdrive API. + """ + return Credential.lookup( + self._db, + DataSource.OVERDRIVE, + None, + None, + refresh, + collection=self.collection, + ) + + def refresh_creds(self, credential): + """Fetch a new Bearer Token and update the given Credential object.""" + response = self.token_post( + self.TOKEN_ENDPOINT, + dict(grant_type="client_credentials"), + allowed_response_codes=[200], + ) + data = response.json() + self._update_credential(credential, data) + self._token = credential.credential + + def get( + self, url: str, extra_headers={}, exception_on_401=False + ) -> Tuple[int, CaseInsensitiveDict, bytes]: + """Make an HTTP GET request using the active Bearer Token.""" + request_headers = dict(Authorization="Bearer %s" % self.token) + request_headers.update(extra_headers) + + response: Response = self._do_get( + url, request_headers, allowed_response_codes=["2xx", "3xx", "401", "404"] + ) + status_code: int = response.status_code + headers: CaseInsensitiveDict = response.headers + content: bytes = response.content + + if status_code == 401: + if exception_on_401: + # This is our second try. Give up. + raise BadResponseException.from_response( + url, + "Something's wrong with the Overdrive OAuth Bearer Token!", + (status_code, headers, content), + ) + else: + # Refresh the token and try again. + self.check_creds(True) + return self.get(url, extra_headers, True) + else: + return status_code, headers, content + + @property + def token_authorization_header(self) -> str: + s = b"%s:%s" % (self.client_key(), self.client_secret()) + return "Basic " + base64.standard_b64encode(s).strip() + + @property + def fulfillment_authorization_header(self) -> str: + is_test_mode = ( + True + if self._server_nickname == OverdriveConstants.TESTING_SERVERS + else False + ) + try: + client_credentials = Configuration.overdrive_fulfillment_keys( + testing=is_test_mode + ) + except CannotLoadConfiguration as e: + raise CannotFulfill(*e.args) + + s = b"%s:%s" % ( + client_credentials["key"].encode(), + client_credentials["secret"].encode(), + ) + return "Basic " + base64.standard_b64encode(s).strip() + + def token_post( + self, + url: str, + payload: Dict[str, str], + is_fulfillment=False, + headers={}, + **kwargs, + ) -> Response: + """Make an HTTP POST request for purposes of getting an OAuth token.""" + headers = dict(headers) + headers["Authorization"] = ( + self.token_authorization_header + if not is_fulfillment + else self.fulfillment_authorization_header + ) + return self._do_post(url, payload, headers, **kwargs) + + @staticmethod + def _update_credential(credential, overdrive_data): + """Copy Overdrive OAuth data into a Credential object.""" + credential.credential = overdrive_data["access_token"] + expires_in = overdrive_data["expires_in"] * 0.9 + credential.expires = utc_now() + datetime.timedelta(seconds=expires_in) + + @property + def _library_endpoint(self) -> str: + """Which URL should we go to to get information about this collection? + + If this is an ordinary Overdrive account, we get information + from LIBRARY_ENDPOINT. + + If this is an Overdrive Advantage account, we get information + from LIBRARY_ADVANTAGE_ENDPOINT. + """ + args = dict(library_id=self._library_id) + if self.parent_library_id: + # This is an Overdrive advantage account. + args["parent_library_id"] = self.parent_library_id + endpoint = self.ADVANTAGE_LIBRARY_ENDPOINT + else: + endpoint = self.LIBRARY_ENDPOINT + return self.endpoint(endpoint, **args) + + def get_library(self): + """Get basic information about the collection, including + a link to the titles in the collection. + """ + url = self._library_endpoint + with self.lock: + representation, cached = Representation.get( + self._db, + url, + self.get, + exception_handler=Representation.reraise_exception, + ) + return json.loads(representation.content) + + def get_advantage_accounts(self): + """Find all the Overdrive Advantage accounts managed by this library. + + :yield: A sequence of OverdriveAdvantageAccount objects. + """ + library = self.get_library() + links = library.get("links", {}) + advantage = links.get("advantageAccounts") + if not advantage: + return [] + if advantage: + # This library has Overdrive Advantage accounts, or at + # least a link where some may be found. + advantage_url = advantage.get("href") + if not advantage_url: + return + representation, cached = Representation.get( + self._db, + advantage_url, + self.get, + exception_handler=Representation.reraise_exception, + ) + return OverdriveAdvantageAccount.from_representation(representation.content) + + def all_ids(self): + """Get IDs for every book in the system, with the most recently added + ones at the front. + """ + next_link = self._all_products_link + while next_link: + page_inventory, next_link = self._get_book_list_page(next_link, "next") + + yield from page_inventory + + @property + def _all_products_link(self) -> str: + url = self.endpoint( + self.ALL_PRODUCTS_ENDPOINT, + collection_token=self.collection_token, + sort="dateAdded:desc", + ) + return self.make_link_safe(url) + + def _get_book_list_page(self, link, rel_to_follow="next", extractor_class=None): + """Process a page of inventory whose circulation we need to check. + + Returns a 2-tuple: (availability_info, next_link). + `availability_info` is a list of dictionaries, each containing + basic availability and bibliographic information about + one book. + `next_link` is a link to the next page of results. + """ + extractor_class = extractor_class or OverdriveRepresentationExtractor + # We don't cache this because it changes constantly. + status_code, headers, content = self.get(link, {}) + if isinstance(content, (bytes, str)): + content = json.loads(content) + + # Find the link to the next page of results, if any. + next_link = extractor_class.link(content, rel_to_follow) + + # Prepare to get availability information for all the books on + # this page. + availability_queue = extractor_class.availability_link_list(content) + return availability_queue, next_link + + def recently_changed_ids(self, start, cutoff): + """Get IDs of books whose status has changed between the start time + and now. + """ + # `cutoff` is not supported by Overdrive, so we ignore it. All + # we can do is get events between the start time and now. + + last_update_time = start - self.EVENT_DELAY + self.log.info("Asking for circulation changes since %s", last_update_time) + last_update = last_update_time.strftime(self.TIME_FORMAT) + + next_link = self.endpoint( + self.EVENTS_ENDPOINT, + lastupdatetime=last_update, + sort="popularity:desc", + limit=self.PAGE_SIZE_LIMIT, + collection_token=self.collection_token, + ) + next_link = self.make_link_safe(next_link) + while next_link: + page_inventory, next_link = self._get_book_list_page(next_link) + # We won't be sending out any events for these books yet, + # because we don't know if anything changed, but we will + # be putting them on the list of inventory items to + # refresh. At that point we will send out events. + yield from page_inventory + + def metadata_lookup(self, identifier): + """Look up metadata for an Overdrive identifier.""" + url = self.endpoint( + self.METADATA_ENDPOINT, + collection_token=self.collection_token, + item_id=identifier.identifier, + ) + status_code, headers, content = self.get(url, {}) + if isinstance(content, (bytes, str)): + content = json.loads(content) + return content + + def metadata_lookup_obj(self, identifier): + url = self.endpoint( + self.METADATA_ENDPOINT, + collection_token=self.collection_token, + item_id=identifier, + ) + status_code, headers, content = self.get(url, {}) + if isinstance(content, (bytes, str)): + content = json.loads(content) + return OverdriveRepresentationExtractor.book_info_to_metadata(content) + + @classmethod + def make_link_safe(cls, url: str) -> str: + """Turn a server-provided link into a link the server will accept! + + The {} part is completely obnoxious and I have complained about it to + Overdrive. + + The availability part is to make sure we always use v2 of the + availability API, even if Overdrive sent us a link to v1. + """ + parts = list(urlsplit(url)) + parts[2] = quote(parts[2]) + endings = ("/availability", "/availability/") + if parts[2].startswith("/v1/collections/") and any( + parts[2].endswith(x) for x in endings + ): + parts[2] = parts[2].replace("/v1/collections/", "/v2/collections/", 1) + query_string = parts[3] + query_string = query_string.replace("+", "%2B") + query_string = query_string.replace(":", "%3A") + query_string = query_string.replace("{", "%7B") + query_string = query_string.replace("}", "%7D") + parts[3] = query_string + return urlunsplit(tuple(parts)) + + def _do_get(self, url: str, headers, **kwargs) -> Response: + """This method is overridden in MockOverdriveAPI.""" + url = self.endpoint(url) + kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["timeout"] = 120 + return HTTP.get_with_timeout(url, headers=headers, **kwargs) + + def _do_post(self, url: str, payload, headers, **kwargs) -> Response: + """This method is overridden in MockOverdriveAPI.""" + url = self.endpoint(url) + kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["timeout"] = 120 + return HTTP.post_with_timeout(url, payload, headers=headers, **kwargs) + + def website_id(self) -> bytes: + return self._configuration.overdrive_website_id.encode("utf-8") + + def client_key(self) -> bytes: + return self._configuration.overdrive_client_key.encode("utf-8") + + def client_secret(self) -> bytes: + return self._configuration.overdrive_client_secret.encode("utf-8") + + def library_id(self) -> str: + return self._library_id + + def hosts(self) -> Dict[str, str]: + return dict(self._hosts) def _run_self_tests(self, _db): result = self.run_test( @@ -616,7 +1287,7 @@ def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): def get_fulfillment_link( self, patron: Patron, pin: Optional[str], overdrive_id: str, format_type: str - ) -> Union["OverdriveManifestFulfillmentInfo", Tuple[str, str]]: + ) -> Union[OverdriveManifestFulfillmentInfo, Tuple[str, str]]: """Get the link to the ACSM or manifest for an existing loan.""" try: loan = self.get_loan(patron, pin, overdrive_id) @@ -1469,6 +2140,894 @@ def process_item(self, identifier): break +class OverdriveData: + overdrive_client_key: str + overdrive_client_secret: str + overdrive_website_id: str + overdrive_server_nickname: str = OverdriveConstants.PRODUCTION_SERVERS + max_retry_count: int = 0 + + +class OverdriveRepresentationExtractor: + """Extract useful information from Overdrive's JSON representations.""" + + log = logging.getLogger("Overdrive representation extractor") + + def __init__(self, api): + """Constructor. + + :param api: An OverdriveAPI object. This will be used when deciding + which portions of a JSON representation are relevant to the active + Overdrive collection. + """ + self.library_id = api.advantage_library_id + + @classmethod + def availability_link_list(cls, book_list): + """:return: A list of dictionaries with keys `id`, `title`, `availability_link`.""" + l = [] + if not "products" in book_list: + return [] + + products = book_list["products"] + for product in products: + if not "id" in product: + cls.log.warning("No ID found in %r", product) + continue + book_id = product["id"] + data = dict( + id=book_id, + title=product.get("title"), + author_name=None, + date_added=product.get("dateAdded"), + ) + if "primaryCreator" in product: + creator = product["primaryCreator"] + if creator.get("role") == "Author": + data["author_name"] = creator.get("name") + links = product.get("links", []) + if "availability" in links: + link = links["availability"]["href"] + data["availability_link"] = OverdriveAPI.make_link_safe(link) + else: + logging.getLogger("Overdrive API").warning( + "No availability link for %s", book_id + ) + l.append(data) + return l + + @classmethod + def link(self, page, rel): + if "links" in page and rel in page["links"]: + raw_link = page["links"][rel]["href"] + link = OverdriveAPI.make_link_safe(raw_link) + else: + link = None + return link + + format_data_for_overdrive_format = { + "ebook-pdf-adobe": (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM), + "ebook-pdf-open": (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.NO_DRM), + "ebook-epub-adobe": ( + Representation.EPUB_MEDIA_TYPE, + DeliveryMechanism.ADOBE_DRM, + ), + "ebook-epub-open": (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM), + "audiobook-mp3": ("application/x-od-media", DeliveryMechanism.OVERDRIVE_DRM), + "music-mp3": ("application/x-od-media", DeliveryMechanism.OVERDRIVE_DRM), + "ebook-overdrive": [ + ( + MediaTypes.OVERDRIVE_EBOOK_MANIFEST_MEDIA_TYPE, + DeliveryMechanism.LIBBY_DRM, + ), + ( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, + DeliveryMechanism.STREAMING_DRM, + ), + ], + "audiobook-overdrive": [ + ( + MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE, + DeliveryMechanism.LIBBY_DRM, + ), + ( + DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE, + DeliveryMechanism.STREAMING_DRM, + ), + ], + "video-streaming": ( + DeliveryMechanism.STREAMING_VIDEO_CONTENT_TYPE, + DeliveryMechanism.STREAMING_DRM, + ), + "ebook-kindle": ( + DeliveryMechanism.KINDLE_CONTENT_TYPE, + DeliveryMechanism.KINDLE_DRM, + ), + "periodicals-nook": ( + DeliveryMechanism.NOOK_CONTENT_TYPE, + DeliveryMechanism.NOOK_DRM, + ), + } + + # A mapping of the overdrive format name to end sample content type + # Overdrive samples are not DRM protected so the links should be + # stored as the end sample content type + sample_format_to_content_type = { + "ebook-overdrive": "text/html", + "audiobook-wma": "audio/x-ms-wma", + "audiobook-mp3": "audio/mpeg", + "audiobook-overdrive": "text/html", + "ebook-epub-adobe": "application/epub+zip", + "magazine-overdrive": "text/html", + } + + @classmethod + def internal_formats(cls, overdrive_format): + """Yield all internal formats for the given Overdrive format. + + Some Overdrive formats become multiple internal formats. + + :yield: A sequence of (content type, DRM system) 2-tuples + """ + result = cls.format_data_for_overdrive_format.get(overdrive_format) + if not result: + return + if isinstance(result, list): + yield from result + else: + yield result + + ignorable_overdrive_formats: Set[str] = set() + + overdrive_role_to_simplified_role = { + "actor": Contributor.ACTOR_ROLE, + "artist": Contributor.ARTIST_ROLE, + "book producer": Contributor.PRODUCER_ROLE, + "associated name": Contributor.ASSOCIATED_ROLE, + "author": Contributor.AUTHOR_ROLE, + "author of introduction": Contributor.INTRODUCTION_ROLE, + "author of foreword": Contributor.FOREWORD_ROLE, + "author of afterword": Contributor.AFTERWORD_ROLE, + "contributor": Contributor.CONTRIBUTOR_ROLE, + "colophon": Contributor.COLOPHON_ROLE, + "adapter": Contributor.ADAPTER_ROLE, + "etc.": Contributor.UNKNOWN_ROLE, + "cast member": Contributor.ACTOR_ROLE, + "collaborator": Contributor.COLLABORATOR_ROLE, + "compiler": Contributor.COMPILER_ROLE, + "composer": Contributor.COMPOSER_ROLE, + "copyright holder": Contributor.COPYRIGHT_HOLDER_ROLE, + "director": Contributor.DIRECTOR_ROLE, + "editor": Contributor.EDITOR_ROLE, + "engineer": Contributor.ENGINEER_ROLE, + "executive producer": Contributor.EXECUTIVE_PRODUCER_ROLE, + "illustrator": Contributor.ILLUSTRATOR_ROLE, + "musician": Contributor.MUSICIAN_ROLE, + "narrator": Contributor.NARRATOR_ROLE, + "other": Contributor.UNKNOWN_ROLE, + "performer": Contributor.PERFORMER_ROLE, + "producer": Contributor.PRODUCER_ROLE, + "translator": Contributor.TRANSLATOR_ROLE, + "photographer": Contributor.PHOTOGRAPHER_ROLE, + "lyricist": Contributor.LYRICIST_ROLE, + "transcriber": Contributor.TRANSCRIBER_ROLE, + "designer": Contributor.DESIGNER_ROLE, + } + + overdrive_medium_to_simplified_medium = { + "eBook": Edition.BOOK_MEDIUM, + "Video": Edition.VIDEO_MEDIUM, + "Audiobook": Edition.AUDIO_MEDIUM, + "Music": Edition.MUSIC_MEDIUM, + "Periodicals": Edition.PERIODICAL_MEDIUM, + } + + DATE_FORMAT = "%Y-%m-%d" + + @classmethod + def parse_roles(cls, id, rolestring): + rolestring = rolestring.lower() + roles = [x.strip() for x in rolestring.split(",")] + if " and " in roles[-1]: + roles = roles[:-1] + [x.strip() for x in roles[-1].split(" and ")] + processed = [] + for x in roles: + if x not in cls.overdrive_role_to_simplified_role: + cls.log.error("Could not process role %s for %s", x, id) + else: + processed.append(cls.overdrive_role_to_simplified_role[x]) + return processed + + def book_info_to_circulation(self, book): + """Note: The json data passed into this method is from a different file/stream + from the json data that goes into the book_info_to_metadata() method. + """ + # In Overdrive, 'reserved' books show up as books on + # hold. There is no separate notion of reserved books. + licenses_reserved = 0 + + licenses_owned = None + licenses_available = None + patrons_in_hold_queue = None + + # TODO: The only reason this works for a NotFound error is the + # circulation code sticks the known book ID into `book` ahead + # of time. That's a code smell indicating that this system + # needs to be refactored. + if "reserveId" in book and not "id" in book: + book["id"] = book["reserveId"] + if not "id" in book: + return None + overdrive_id = book["id"] + primary_identifier = IdentifierData(Identifier.OVERDRIVE_ID, overdrive_id) + # TODO: We might be able to use this information to avoid the + # need for explicit configuration of Advantage collections, or + # at least to keep Advantage collections more up-to-date than + # they would be otherwise, as a side effect of updating + # regular Overdrive collections. + + # TODO: this would be the place to handle simultaneous use + # titles -- these can be detected with + # availabilityType="AlwaysAvailable" and have their + # .licenses_owned set to LicensePool.UNLIMITED_ACCESS. + # see http://developer.overdrive.com/apis/library-availability-new + + # TODO: Cost-per-circ titles + # (availabilityType="LimitedAvailablility") can be handled + # similarly, though those can abruptly become unavailable, so + # UNLIMITED_ACCESS is probably not appropriate. + + error_code = book.get("errorCode") + # TODO: It's not clear what other error codes there might be. + # The current behavior will respond to errors other than + # NotFound by leaving the book alone, but this might not be + # the right behavior. + if error_code == "NotFound": + licenses_owned = 0 + licenses_available = 0 + patrons_in_hold_queue = 0 + elif book.get("isOwnedByCollections") is not False: + # We own this book. + licenses_owned = 0 + licenses_available = 0 + + for account in self._get_applicable_accounts(book.get("accounts", [])): + licenses_owned += int(account.get("copiesOwned", 0)) + licenses_available += int(account.get("copiesAvailable", 0)) + + if "numberOfHolds" in book: + if patrons_in_hold_queue is None: + patrons_in_hold_queue = 0 + patrons_in_hold_queue += book["numberOfHolds"] + + return CirculationData( + data_source=DataSource.OVERDRIVE, + primary_identifier=primary_identifier, + licenses_owned=licenses_owned, + licenses_available=licenses_available, + licenses_reserved=licenses_reserved, + patrons_in_hold_queue=patrons_in_hold_queue, + ) + + def _get_applicable_accounts( + self, accounts: List[Dict[str, Any]] + ) -> List[Dict[str, Any]]: + """ + Returns those accounts from the accounts array that apply the + current overdrive collection context. + + If this is an overdrive parent collection, we want to return accounts + associated with the main OverDrive "library" and any non-main account + with sharing enabled. + + If this is a child OverDrive collection, then we return only the + account associated with that child's OverDrive Advantage "library". + Additionally, we want to exclude the account if it is "shared" since + we will be counting it with the parent collection. + """ + + if self.library_id == OverdriveAPI.OVERDRIVE_MAIN_ACCOUNT_ID: + # this is a parent collection + filtered_result = filter( + lambda account: account.get("id") + == OverdriveAPI.OVERDRIVE_MAIN_ACCOUNT_ID + or account.get("shared", False), + accounts, + ) + else: + # this is child collection + filtered_result = filter( + lambda account: account.get("id") == self.library_id + and not account.get("shared", False), + accounts, + ) + + return list(filtered_result) + + @classmethod + def image_link_to_linkdata(cls, link, rel): + if not link or not "href" in link: + return None + href = link["href"] + if "00000000-0000-0000-0000" in href: + # This is a stand-in cover for preorders. It's better not + # to have a cover at all -- we might be able to get one + # later, or from another source. + return None + href = OverdriveAPI.make_link_safe(href) + media_type = link.get("type", None) + return LinkData(rel=rel, href=href, media_type=media_type) + + @classmethod + def book_info_to_metadata( + cls, book, include_bibliographic=True, include_formats=True + ): + """Turn Overdrive's JSON representation of a book into a Metadata + object. + + Note: The json data passed into this method is from a different file/stream + from the json data that goes into the book_info_to_circulation() method. + """ + if not "id" in book: + return None + overdrive_id = book["id"] + primary_identifier = IdentifierData(Identifier.OVERDRIVE_ID, overdrive_id) + + # If we trust classification data, we'll give it this weight. + # Otherwise we'll probably give it a fraction of this weight. + trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT + + if include_bibliographic: + title = book.get("title", None) + sort_title = book.get("sortTitle") + subtitle = book.get("subtitle", None) + series = book.get("series", None) + publisher = book.get("publisher", None) + imprint = book.get("imprint", None) + + if "publishDate" in book: + published = strptime_utc(book["publishDate"][:10], cls.DATE_FORMAT) + else: + published = None + + languages = [l["code"] for l in book.get("languages", [])] + if "eng" in languages or not languages: + language = "eng" + else: + language = sorted(languages)[0] + + contributors = [] + for creator in book.get("creators", []): + sort_name = creator["fileAs"] + display_name = creator["name"] + role = creator["role"] + roles = cls.parse_roles(overdrive_id, role) or [ + Contributor.UNKNOWN_ROLE + ] + contributor = ContributorData( + sort_name=sort_name, + display_name=display_name, + roles=roles, + biography=creator.get("bioText", None), + ) + contributors.append(contributor) + + subjects = [] + for sub in book.get("subjects", []): + subject = SubjectData( + type=Subject.OVERDRIVE, + identifier=sub["value"], + weight=trusted_weight, + ) + subjects.append(subject) + + for sub in book.get("keywords", []): + subject = SubjectData( + type=Subject.TAG, + identifier=sub["value"], + # We don't use TRUSTED_DISTRIBUTOR_WEIGHT because + # we don't know where the tags come from -- + # probably Overdrive users -- and they're + # frequently wrong. + weight=1, + ) + subjects.append(subject) + + extra = dict() + if "grade_levels" in book: + # n.b. Grade levels are measurements of reading level, not + # age appropriateness. We can use them as a measure of age + # appropriateness in a pinch, but we weight them less + # heavily than TRUSTED_DISTRIBUTOR_WEIGHT. + for i in book["grade_levels"]: + subject = SubjectData( + type=Subject.GRADE_LEVEL, + identifier=i["value"], + weight=trusted_weight / 10, + ) + subjects.append(subject) + + overdrive_medium = book.get("mediaType", None) + if ( + overdrive_medium + and overdrive_medium not in cls.overdrive_medium_to_simplified_medium + ): + cls.log.error( + "Could not process medium %s for %s", overdrive_medium, overdrive_id + ) + + medium = cls.overdrive_medium_to_simplified_medium.get( + overdrive_medium, Edition.BOOK_MEDIUM + ) + + measurements = [] + if "awards" in book: + extra["awards"] = book.get("awards", []) + num_awards = len(extra["awards"]) + measurements.append( + MeasurementData(Measurement.AWARDS, str(num_awards)) + ) + + for name, subject_type in ( + ("ATOS", Subject.ATOS_SCORE), + ("lexileScore", Subject.LEXILE_SCORE), + ("interestLevel", Subject.INTEREST_LEVEL), + ): + if not name in book: + continue + identifier = str(book[name]) + subjects.append( + SubjectData( + type=subject_type, identifier=identifier, weight=trusted_weight + ) + ) + + for grade_level_info in book.get("gradeLevels", []): + grade_level = grade_level_info.get("value") + subjects.append( + SubjectData( + type=Subject.GRADE_LEVEL, + identifier=grade_level, + weight=trusted_weight, + ) + ) + + identifiers = [] + links = [] + sample_hrefs = set() + for format in book.get("formats", []): + for new_id in format.get("identifiers", []): + t = new_id["type"] + v = new_id["value"] + orig_v = v + type_key = None + if t == "ASIN": + type_key = Identifier.ASIN + elif t == "ISBN": + type_key = Identifier.ISBN + if len(v) == 10: + v = isbnlib.to_isbn13(v) + if v is None or not isbnlib.is_isbn13(v): + # Overdrive sometimes uses invalid values + # like "n/a" as placeholders. Ignore such + # values to avoid a situation where hundreds of + # books appear to have the same ISBN. ISBNs + # which fail check digit checks or are invalid + # also can occur. Log them for review. + cls.log.info("Bad ISBN value provided: %s", orig_v) + continue + elif t == "DOI": + type_key = Identifier.DOI + elif t == "UPC": + type_key = Identifier.UPC + elif t == "PublisherCatalogNumber": + continue + if type_key and v: + identifiers.append(IdentifierData(type_key, v, 1)) + + # Samples become links. + if "samples" in format: + for sample_info in format["samples"]: + href = sample_info["url"] + # Have we already parsed this sample? Overdrive repeats samples per format + if href in sample_hrefs: + continue + + # Every sample has its own format type + overdrive_format_name = sample_info.get("formatType") + if not overdrive_format_name: + # Malformed sample + continue + content_type = cls.sample_format_to_content_type.get( + overdrive_format_name + ) + if not content_type: + # Unusable by us. + cls.log.warning( + f"Did not find a sample format mapping for '{overdrive_format_name}': {href}" + ) + continue + + if Representation.is_media_type(content_type): + links.append( + LinkData( + rel=Hyperlink.SAMPLE, + href=href, + media_type=content_type, + ) + ) + sample_hrefs.add(href) + + # A cover and its thumbnail become a single LinkData. + if "images" in book: + images = book["images"] + image_data = cls.image_link_to_linkdata( + images.get("cover"), Hyperlink.IMAGE + ) + for name in ["cover300Wide", "cover150Wide", "thumbnail"]: + # Try to get a thumbnail that's as close as possible + # to the size we use. + image = images.get(name) + thumbnail_data = cls.image_link_to_linkdata( + image, Hyperlink.THUMBNAIL_IMAGE + ) + if not image_data: + image_data = cls.image_link_to_linkdata(image, Hyperlink.IMAGE) + if thumbnail_data: + break + + if image_data: + if thumbnail_data: + image_data.thumbnail = thumbnail_data + links.append(image_data) + + # Descriptions become links. + short = book.get("shortDescription") + full = book.get("fullDescription") + if full: + links.append( + LinkData( + rel=Hyperlink.DESCRIPTION, + content=full, + media_type="text/html", + ) + ) + + if short and (not full or not full.startswith(short)): + links.append( + LinkData( + rel=Hyperlink.SHORT_DESCRIPTION, + content=short, + media_type="text/html", + ) + ) + + # Add measurements: rating and popularity + if book.get("starRating") is not None and book["starRating"] > 0: + measurements.append( + MeasurementData( + quantity_measured=Measurement.RATING, value=book["starRating"] + ) + ) + + if book.get("popularity"): + measurements.append( + MeasurementData( + quantity_measured=Measurement.POPULARITY, + value=book["popularity"], + ) + ) + + metadata = Metadata( + data_source=DataSource.OVERDRIVE, + title=title, + subtitle=subtitle, + sort_title=sort_title, + language=language, + medium=medium, + series=series, + publisher=publisher, + imprint=imprint, + published=published, + primary_identifier=primary_identifier, + identifiers=identifiers, + subjects=subjects, + contributors=contributors, + measurements=measurements, + links=links, + ) + else: + metadata = Metadata( + data_source=DataSource.OVERDRIVE, + primary_identifier=primary_identifier, + ) + + if include_formats: + formats = [] + for format in book.get("formats", []): + format_id = format["id"] + internal_formats = list(cls.internal_formats(format_id)) + if internal_formats: + for content_type, drm_scheme in internal_formats: + formats.append(FormatData(content_type, drm_scheme)) + elif format_id not in cls.ignorable_overdrive_formats: + cls.log.error( + "Could not process Overdrive format %s for %s", + format_id, + overdrive_id, + ) + + # Also make a CirculationData so we can write the formats, + circulationdata = CirculationData( + data_source=DataSource.OVERDRIVE, + primary_identifier=primary_identifier, + formats=formats, + ) + + metadata.circulation = circulationdata + + return metadata + + +class OverdriveAdvantageAccount: + """Holder and parser for data associated with Overdrive Advantage.""" + + def __init__(self, parent_library_id: str, library_id: str, name: str, token: str): + """Constructor. + + :param parent_library_id: The library ID of the parent Overdrive + account. + :param library_id: The library ID of the Overdrive Advantage account. + :param name: The name of the library whose Advantage account this is. + :param token: The collection token for this Advantage account + """ + self.parent_library_id = parent_library_id + self.library_id = library_id + self.name = name + self.token = token + + @classmethod + def from_representation(cls, content): + """Turn the representation of an advantageAccounts link into a list of + OverdriveAdvantageAccount objects. + + :param content: The data obtained by following an advantageAccounts + link. + :yield: A sequence of OverdriveAdvantageAccount objects. + """ + data = json.loads(content) + parent_id = str(data.get("id")) + accounts = data.get("advantageAccounts", {}) + for account in accounts: + name = account["name"] + products_link = account["links"]["products"]["href"] + library_id = str(account.get("id")) + name = account.get("name") + token = account.get("collectionToken") + yield cls( + parent_library_id=parent_id, + library_id=library_id, + name=name, + token=token, + ) + + def to_collection(self, _db): + """Find or create a Collection object for this Overdrive Advantage + account. + + :return: a 2-tuple of Collections (primary Overdrive + collection, Overdrive Advantage collection) + """ + # First find the parent Collection. + try: + parent = ( + Collection.by_protocol(_db, ExternalIntegration.OVERDRIVE) + .filter(Collection.external_account_id == self.parent_library_id) + .one() + ) + except NoResultFound as e: + # Without the parent's credentials we can't access the child. + raise ValueError( + "Cannot create a Collection whose parent does not already exist." + ) + name = parent.name + " / " + self.name + child, is_new = get_one_or_create( + _db, + Collection, + parent_id=parent.id, + external_account_id=self.library_id, + create_method_kwargs=dict(name=name), + ) + if is_new: + # Make sure the child has its protocol set appropriately. + integration = child.create_external_integration( + ExternalIntegration.OVERDRIVE + ) + configuration = child.create_integration_configuration( + ExternalIntegration.OVERDRIVE + ) + + # Set or update the name of the collection to reflect the name of + # the library, just in case that name has changed. + child.name = name + return parent, child + + +class OverdriveBibliographicCoverageProvider(BibliographicCoverageProvider): + """Fill in bibliographic metadata for Overdrive records. + + This will occasionally fill in some availability information for a + single Collection, but we rely on Monitors to keep availability + information up to date for all Collections. + """ + + SERVICE_NAME = "Overdrive Bibliographic Coverage Provider" + DATA_SOURCE_NAME = DataSource.OVERDRIVE + PROTOCOL = ExternalIntegration.OVERDRIVE + INPUT_IDENTIFIER_TYPES = Identifier.OVERDRIVE_ID + + def __init__(self, collection, api_class=OverdriveAPI, **kwargs): + """Constructor. + + :param collection: Provide bibliographic coverage to all + Overdrive books in the given Collection. + :param api_class: Instantiate this class with the given Collection, + rather than instantiating OverdriveAPI. + """ + super().__init__(collection, **kwargs) + if isinstance(api_class, OverdriveAPI): + # Use a previously instantiated OverdriveAPI instance + # rather than creating a new one. + self.api = api_class + else: + # A web application should not use this option because it + # will put a non-scoped session in the mix. + _db = Session.object_session(collection) + self.api = api_class(_db, collection) + + def process_item(self, identifier): + info = self.api.metadata_lookup(identifier) + error = None + if info.get("errorCode") == "NotFound": + error = "ID not recognized by Overdrive: %s" % identifier.identifier + elif info.get("errorCode") == "InvalidGuid": + error = "Invalid Overdrive ID: %s" % identifier.identifier + + if error: + return self.failure(identifier, error, transient=False) + + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + + if not metadata: + e = "Could not extract metadata from Overdrive data: %r" % info + return self.failure(identifier, e) + + self.metadata_pre_hook(metadata) + return self.set_metadata(identifier, metadata) + + def metadata_pre_hook(self, metadata): + """A hook method that allows subclasses to modify a Metadata + object derived from Overdrive before it's applied. + """ + return metadata + + +class GenerateOverdriveAdvantageAccountList(InputScript): + """Generates a CSV containing the following fields: + circulation manager + collection + client_key + external_account_id + library_token + advantage_name + advantage_id + advantage_token + already_configured + """ + + def __init__(self, _db=None, *args, **kwargs): + super().__init__(_db, *args, **kwargs) + self._data: List[List[str]] = list() + + def _create_overdrive_api(self, collection: Collection): + return OverdriveAPI(_db=self._db, collection=collection) + + def do_run(self, *args, **kwargs): + parsed = GenerateOverdriveAdvantageAccountList.parse_command_line( + _db=self._db, *args, **kwargs + ) + query: Query = Collection.by_protocol( + self._db, protocol=ExternalIntegration.OVERDRIVE + ) + for c in query.filter(Collection.parent_id == None): + collection: Collection = c + api = self._create_overdrive_api(collection=collection) + client_key = api.client_key().decode() + client_secret = api.client_secret().decode() + + try: + library_token = api.collection_token + advantage_accounts = api.get_advantage_accounts() + + for aa in advantage_accounts: + existing_child_collections = query.filter( + Collection.parent_id == collection.id + ) + already_configured_aa_libraries = [ + e.external_account_id for e in existing_child_collections + ] + self._data.append( + [ + collection.name, + collection.external_account_id, + client_key, + client_secret, + library_token, + aa.name, + aa.library_id, + aa.token, + aa.library_id in already_configured_aa_libraries, + ] + ) + except Exception as e: + logging.error( + f"Could not connect to collection {c.name}: reason: {str(e)}." + ) + + file_path = parsed.output_file_path[0] + circ_manager_name = parsed.circulation_manager_name[0] + self.write_csv(output_file_path=file_path, circ_manager_name=circ_manager_name) + + def write_csv(self, output_file_path: str, circ_manager_name: str): + with open(output_file_path, "w", newline="") as csvfile: + writer = csv.writer(csvfile) + writer.writerow( + [ + "cm", + "collection", + "overdrive_library_id", + "client_key", + "client_secret", + "library_token", + "advantage_name", + "advantage_id", + "advantage_token", + "already_configured", + ] + ) + for i in self._data: + i.insert(0, circ_manager_name) + writer.writerow(i) + + @classmethod + def arg_parser(cls): + parser = argparse.ArgumentParser() + parser.add_argument( + "--output-file-path", + help="The path of an output file", + metavar="o", + nargs=1, + ) + + parser.add_argument( + "--circulation-manager-name", + help="The name of the circulation-manager", + metavar="c", + nargs=1, + required=True, + ) + + parser.add_argument( + "--file-format", + help="The file format of the output file", + metavar="f", + nargs=1, + default="csv", + ) + + return parser + + class OverdriveAdvantageAccountListScript(Script): def run(self): """Explain every Overdrive collection and, for each one, all of its diff --git a/bin/informational/overdrive-advantage-accounts b/bin/informational/overdrive-advantage-accounts index 269167f4a6..dff66cf3b6 100755 --- a/bin/informational/overdrive-advantage-accounts +++ b/bin/informational/overdrive-advantage-accounts @@ -6,6 +6,6 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..", "..") sys.path.append(os.path.abspath(package_dir)) -from core.scripts import GenerateOverdriveAdvantageAccountList +from api.overdrive import GenerateOverdriveAdvantageAccountList GenerateOverdriveAdvantageAccountList().run() diff --git a/bin/informational/overdrive-advantage-list b/bin/informational/overdrive-advantage-list index f4bc9b92c6..4f7964c099 100755 --- a/bin/informational/overdrive-advantage-list +++ b/bin/informational/overdrive-advantage-list @@ -6,6 +6,6 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..", "..") sys.path.append(os.path.abspath(package_dir)) -from api.overdrive import OverdriveAdvantageAccountListScript # noqa: E402 +from api.overdrive import OverdriveAdvantageAccountListScript OverdriveAdvantageAccountListScript().run() diff --git a/bin/repair/overdrive_bibliographic_coverage b/bin/repair/overdrive_bibliographic_coverage index 9c4218134e..9ac3242ae6 100755 --- a/bin/repair/overdrive_bibliographic_coverage +++ b/bin/repair/overdrive_bibliographic_coverage @@ -6,7 +6,7 @@ import sys bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..", "..") sys.path.append(os.path.abspath(package_dir)) -from core.overdrive import OverdriveBibliographicCoverageProvider +from api.overdrive import OverdriveBibliographicCoverageProvider from core.scripts import RunCoverageProviderScript RunCoverageProviderScript(OverdriveBibliographicCoverageProvider).run() diff --git a/core/overdrive.py b/core/overdrive.py deleted file mode 100644 index 5513bcb11e..0000000000 --- a/core/overdrive.py +++ /dev/null @@ -1,1502 +0,0 @@ -from __future__ import annotations - -import datetime -import json -import logging -from threading import RLock -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Tuple, cast -from urllib.parse import quote, urlsplit, urlunsplit - -import isbnlib -from flask_babel import lazy_gettext as _ -from requests.adapters import CaseInsensitiveDict, Response -from sqlalchemy.orm.exc import NoResultFound -from sqlalchemy.orm.session import Session - -from api.circulation import CirculationConfigurationMixin -from api.circulation_exceptions import CannotFulfill -from core.config import CannotLoadConfiguration, Configuration -from core.coverage import BibliographicCoverageProvider -from core.importers import BaseImporterSettings -from core.integration.settings import ( - ConfigurationFormItem, - ConfigurationFormItemType, - FormField, -) -from core.metadata_layer import ( - CirculationData, - ContributorData, - FormatData, - IdentifierData, - LinkData, - MeasurementData, - Metadata, - SubjectData, -) -from core.model import ( - Classification, - Collection, - Contributor, - Credential, - DataSource, - DeliveryMechanism, - Edition, - ExternalIntegration, - Hyperlink, - Identifier, - Measurement, - MediaTypes, - Representation, - Subject, - get_one_or_create, -) -from core.model.configuration import HasExternalIntegration -from core.util.datetime_helpers import strptime_utc, utc_now -from core.util.http import HTTP, BadResponseException -from core.util.string_helpers import base64 - -if TYPE_CHECKING: - pass - - -class OverdriveConstants: - OVERDRIVE_CLIENT_KEY = "overdrive_client_key" - OVERDRIVE_CLIENT_SECRET = "overdrive_client_secret" - OVERDRIVE_SERVER_NICKNAME = "overdrive_server_nickname" - OVERDRIVE_WEBSITE_ID = "overdrive_website_id" - - # Note that the library ID is not included here because it is not Overdrive-specific - OVERDRIVE_CONFIGURATION_KEYS = { - OVERDRIVE_CLIENT_KEY, - OVERDRIVE_CLIENT_SECRET, - OVERDRIVE_SERVER_NICKNAME, - OVERDRIVE_WEBSITE_ID, - } - - PRODUCTION_SERVERS = "production" - TESTING_SERVERS = "testing" - - -class OverdriveSettings(BaseImporterSettings): - """The basic Overdrive configuration""" - - external_account_id: Optional[str] = FormField( - form=ConfigurationFormItem( - label=_("Library ID"), - type=ConfigurationFormItemType.TEXT, - description="The library identifier.", - required=True, - ), - ) - overdrive_website_id: str = FormField( - form=ConfigurationFormItem( - label=_("Website ID"), - type=ConfigurationFormItemType.TEXT, - description="The web site identifier.", - required=True, - ) - ) - overdrive_client_key: str = FormField( - form=ConfigurationFormItem( - label=_("Client Key"), - type=ConfigurationFormItemType.TEXT, - description="The Overdrive client key.", - required=True, - ) - ) - overdrive_client_secret: str = FormField( - form=ConfigurationFormItem( - label=_("Client Secret"), - type=ConfigurationFormItemType.TEXT, - description="The Overdrive client secret.", - required=True, - ) - ) - - overdrive_server_nickname: str = FormField( - default=OverdriveConstants.PRODUCTION_SERVERS, - form=ConfigurationFormItem( - label=_("Server family"), - type=ConfigurationFormItemType.SELECT, - required=False, - description="Unless you hear otherwise from Overdrive, your integration should use their production servers.", - options={ - OverdriveConstants.PRODUCTION_SERVERS: ("Production"), - OverdriveConstants.TESTING_SERVERS: _("Testing"), - }, - ), - ) - - -class OverdriveData: - overdrive_client_key: str - overdrive_client_secret: str - overdrive_website_id: str - overdrive_server_nickname: str = OverdriveConstants.PRODUCTION_SERVERS - max_retry_count: int = 0 - - -class OverdriveCoreAPI( - HasExternalIntegration, - CirculationConfigurationMixin, -): - # An OverDrive defined constant indicating the "main" or parent account - # associated with an OverDrive collection. - OVERDRIVE_MAIN_ACCOUNT_ID = -1 - - log = logging.getLogger("Overdrive API") - - # A lock for threaded usage. - lock = RLock() - - # Production and testing have different host names for some of the - # API endpoints. This is configurable on the collection level. - HOSTS = { - OverdriveConstants.PRODUCTION_SERVERS: dict( - host="https://api.overdrive.com", - patron_host="https://patron.api.overdrive.com", - ), - OverdriveConstants.TESTING_SERVERS: dict( - host="https://integration.api.overdrive.com", - patron_host="https://integration-patron.api.overdrive.com", - ), - } - - # Production and testing setups use the same URLs for Client - # Authentication and Patron Authentication, but we use the same - # system as for other hostnames to give a consistent look to the - # templates. - for host in list(HOSTS.values()): - host["oauth_patron_host"] = "https://oauth-patron.overdrive.com" - host["oauth_host"] = "https://oauth.overdrive.com" - - # Each of these endpoint URLs has a slot to plug in one of the - # appropriate servers. This will be filled in either by a call to - # the endpoint() method (if there are other variables in the - # template), or by the _do_get or _do_post methods (if there are - # no other variables). - TOKEN_ENDPOINT = "%(oauth_host)s/token" - PATRON_TOKEN_ENDPOINT = "%(oauth_patron_host)s/patrontoken" - - LIBRARY_ENDPOINT = "%(host)s/v1/libraries/%(library_id)s" - ADVANTAGE_LIBRARY_ENDPOINT = ( - "%(host)s/v1/libraries/%(parent_library_id)s/advantageAccounts/%(library_id)s" - ) - ALL_PRODUCTS_ENDPOINT = ( - "%(host)s/v1/collections/%(collection_token)s/products?sort=%(sort)s" - ) - METADATA_ENDPOINT = ( - "%(host)s/v1/collections/%(collection_token)s/products/%(item_id)s/metadata" - ) - EVENTS_ENDPOINT = "%(host)s/v1/collections/%(collection_token)s/products?lastUpdateTime=%(lastupdatetime)s&sort=%(sort)s&limit=%(limit)s" - AVAILABILITY_ENDPOINT = "%(host)s/v2/collections/%(collection_token)s/products/%(product_id)s/availability" - - PATRON_INFORMATION_ENDPOINT = "%(patron_host)s/v1/patrons/me" - CHECKOUTS_ENDPOINT = "%(patron_host)s/v1/patrons/me/checkouts" - CHECKOUT_ENDPOINT = "%(patron_host)s/v1/patrons/me/checkouts/%(overdrive_id)s" - FORMATS_ENDPOINT = ( - "%(patron_host)s/v1/patrons/me/checkouts/%(overdrive_id)s/formats" - ) - HOLDS_ENDPOINT = "%(patron_host)s/v1/patrons/me/holds" - HOLD_ENDPOINT = "%(patron_host)s/v1/patrons/me/holds/%(product_id)s" - ME_ENDPOINT = "%(patron_host)s/v1/patrons/me" - - MAX_CREDENTIAL_AGE = 50 * 60 - - PAGE_SIZE_LIMIT = 300 - EVENT_SOURCE = "Overdrive" - - EVENT_DELAY = datetime.timedelta(minutes=120) - - # The formats we care about. - FORMATS = "ebook-epub-open,ebook-epub-adobe,ebook-pdf-adobe,ebook-pdf-open,audiobook-overdrive".split( - "," - ) - - # The formats that can be read by the default Library Simplified reader. - DEFAULT_READABLE_FORMATS = { - "ebook-epub-open", - "ebook-epub-adobe", - "ebook-pdf-open", - "audiobook-overdrive", - } - - # The formats that indicate the book has been fulfilled on an - # incompatible platform and just can't be fulfilled on Simplified - # in any format. - INCOMPATIBLE_PLATFORM_FORMATS = {"ebook-kindle"} - - OVERDRIVE_READ_FORMAT = "ebook-overdrive" - - TIME_FORMAT = "%Y-%m-%dT%H:%M:%SZ" - - # When associating an Overdrive account with a library, it's - # necessary to also specify an "ILS name" obtained from - # Overdrive. Components that don't authenticate patrons (such as - # the metadata wrangler) don't need to set this value. - ILS_NAME_KEY = "ils_name" - ILS_NAME_DEFAULT = "default" - - _external_integration: ExternalIntegration - _db: Session - _hosts: Dict[str, str] - _library_id: str - _collection_id: int - - def label(self): - return "Overdrive Core API" - - def description(self): - return "" - - @classmethod - def library_settings_class(cls): - raise NotImplementedError() - - @classmethod - def settings_class(cls): - return OverdriveSettings - - def __init__(self, _db: Session, collection: Collection): - if collection.protocol != ExternalIntegration.OVERDRIVE: - raise ValueError( - "Collection protocol is %s, but passed into OverdriveAPI!" - % collection.protocol - ) - - _library_id = collection.external_account_id - if not _library_id: - raise ValueError( - "Collection %s must have an external account ID" % collection.id - ) - else: - self._library_id = _library_id - - self._db = _db - self._external_integration = collection.external_integration - if collection.id is None: - raise ValueError( - "Collection passed into OverdriveAPI must have an ID, but %s does not" - % collection.name - ) - self._collection_id = collection.id - - # Initialize configuration information. - self._integration_configuration_id = cast( - int, collection.integration_configuration.id - ) - self._configuration = OverdriveData() - - if collection.parent: - # This is an Overdrive Advantage account. - self.parent_library_id = collection.parent.external_account_id - - # We're going to inherit all of the Overdrive credentials - # from the parent (the main Overdrive account), except for the - # library ID, which we already set. - parent_integration = collection.parent.integration_configuration - parent_config = self.settings_class()(**parent_integration.settings_dict) - for key in OverdriveConstants.OVERDRIVE_CONFIGURATION_KEYS: - parent_value = getattr(parent_config, key, None) - setattr(self._configuration, key, parent_value) - else: - self.parent_library_id = None - - # Self settings should override parent settings where available - settings = collection.integration_configuration.settings_dict - for name, schema in self.settings_class().schema()["properties"].items(): - if name in settings or not hasattr(self._configuration, name): - setattr( - self._configuration, name, settings.get(name, schema.get("default")) - ) - - if not self._configuration.overdrive_client_key: - raise CannotLoadConfiguration("Overdrive client key is not configured") - if not self._configuration.overdrive_client_secret: - raise CannotLoadConfiguration( - "Overdrive client password/secret is not configured" - ) - if not self._configuration.overdrive_website_id: - raise CannotLoadConfiguration("Overdrive website ID is not configured") - - self._server_nickname = self._configuration.overdrive_server_nickname - - self._hosts = self._determine_hosts(server_nickname=self._server_nickname) - - # This is set by an access to .token, or by a call to - # check_creds() or refresh_creds(). - self._token = None - - # This is set by an access to .collection_token - self._collection_token = None - - def configuration(self): - """Overdrive has a different implementation for configuration""" - return self._configuration - - def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: - # Figure out which hostnames we'll be using when constructing - # endpoint URLs. - if server_nickname not in self.HOSTS: - server_nickname = OverdriveConstants.PRODUCTION_SERVERS - - return dict(self.HOSTS[server_nickname]) - - def external_integration(self, db: Session) -> ExternalIntegration: - return self._external_integration - - def endpoint(self, url: str, **kwargs) -> str: - """Create the URL to an Overdrive API endpoint. - - :param url: A template for the URL. - :param kwargs: Arguments to be interpolated into the template. - The server hostname will be interpolated automatically; you - don't have to pass it in. - """ - if not "%(" in url: - # Nothing to interpolate. - return url - kwargs.update(self._hosts) - return url % kwargs - - @property - def token(self): - if not self._token: - self.check_creds() - return self._token - - @property - def collection_token(self): - """Get the token representing this particular Overdrive collection. - - As a side effect, this will verify that the Overdrive - credentials are working. - """ - if not self._collection_token: - self.check_creds() - library = self.get_library() - error = library.get("errorCode") - if error: - message = library.get("message") - raise CannotLoadConfiguration( - "Overdrive credentials are valid but could not fetch library: %s" - % message - ) - self._collection_token = library["collectionToken"] - return self._collection_token - - @property - def collection(self) -> Optional[Collection]: - return Collection.by_id(self._db, id=self._collection_id) - - @property - def source(self): - return DataSource.lookup(self._db, DataSource.OVERDRIVE) - - def ils_name(self, library): - """Determine the ILS name to use for the given Library.""" - config = self.integration_configuration().for_library(library.id) - if not config: - return self.ILS_NAME_DEFAULT - return config.settings_dict.get(self.ILS_NAME_KEY, self.ILS_NAME_DEFAULT) - - @property - def advantage_library_id(self): - """The library ID for this library, as we should look for it in - certain API documents served by Overdrive. - - For ordinary collections (ie non-Advantage) with or without associated - Advantage (ie child) collections shared among libraries, this will be - equal to the OVERDRIVE_MAIN_ACCOUNT_ID. - - For Overdrive Advantage accounts, this will be the numeric - value of the Overdrive library ID. - """ - if self.parent_library_id is None: - # This is not an Overdrive Advantage collection. - # - # Instead of looking for the library ID itself in these - # documents, we should look for the constant main account id. - return self.OVERDRIVE_MAIN_ACCOUNT_ID - return int(self._library_id) - - def check_creds(self, force_refresh=False): - """If the Bearer Token has expired, update it.""" - with self.lock: - refresh_on_lookup = self.refresh_creds - if force_refresh: - refresh_on_lookup = lambda x: x - - credential = self.credential_object(refresh_on_lookup) - if force_refresh: - self.refresh_creds(credential) - self._token = credential.credential - - def credential_object(self, refresh): - """Look up the Credential object that allows us to use - the Overdrive API. - """ - return Credential.lookup( - self._db, - DataSource.OVERDRIVE, - None, - None, - refresh, - collection=self.collection, - ) - - def refresh_creds(self, credential): - """Fetch a new Bearer Token and update the given Credential object.""" - response = self.token_post( - self.TOKEN_ENDPOINT, - dict(grant_type="client_credentials"), - allowed_response_codes=[200], - ) - data = response.json() - self._update_credential(credential, data) - self._token = credential.credential - - def get( - self, url: str, extra_headers={}, exception_on_401=False - ) -> Tuple[int, CaseInsensitiveDict, bytes]: - """Make an HTTP GET request using the active Bearer Token.""" - request_headers = dict(Authorization="Bearer %s" % self.token) - request_headers.update(extra_headers) - - response: Response = self._do_get( - url, request_headers, allowed_response_codes=["2xx", "3xx", "401", "404"] - ) - status_code: int = response.status_code - headers: CaseInsensitiveDict = response.headers - content: bytes = response.content - - if status_code == 401: - if exception_on_401: - # This is our second try. Give up. - raise BadResponseException.from_response( - url, - "Something's wrong with the Overdrive OAuth Bearer Token!", - (status_code, headers, content), - ) - else: - # Refresh the token and try again. - self.check_creds(True) - return self.get(url, extra_headers, True) - else: - return status_code, headers, content - - @property - def token_authorization_header(self) -> str: - s = b"%s:%s" % (self.client_key(), self.client_secret()) - return "Basic " + base64.standard_b64encode(s).strip() - - @property - def fulfillment_authorization_header(self) -> str: - is_test_mode = ( - True - if self._server_nickname == OverdriveConstants.TESTING_SERVERS - else False - ) - try: - client_credentials = Configuration.overdrive_fulfillment_keys( - testing=is_test_mode - ) - except CannotLoadConfiguration as e: - raise CannotFulfill(*e.args) - - s = b"%s:%s" % ( - client_credentials["key"].encode(), - client_credentials["secret"].encode(), - ) - return "Basic " + base64.standard_b64encode(s).strip() - - def token_post( - self, - url: str, - payload: Dict[str, str], - is_fulfillment=False, - headers={}, - **kwargs, - ) -> Response: - """Make an HTTP POST request for purposes of getting an OAuth token.""" - headers = dict(headers) - headers["Authorization"] = ( - self.token_authorization_header - if not is_fulfillment - else self.fulfillment_authorization_header - ) - return self._do_post(url, payload, headers, **kwargs) - - @staticmethod - def _update_credential(credential, overdrive_data): - """Copy Overdrive OAuth data into a Credential object.""" - credential.credential = overdrive_data["access_token"] - expires_in = overdrive_data["expires_in"] * 0.9 - credential.expires = utc_now() + datetime.timedelta(seconds=expires_in) - - @property - def _library_endpoint(self) -> str: - """Which URL should we go to to get information about this collection? - - If this is an ordinary Overdrive account, we get information - from LIBRARY_ENDPOINT. - - If this is an Overdrive Advantage account, we get information - from LIBRARY_ADVANTAGE_ENDPOINT. - """ - args = dict(library_id=self._library_id) - if self.parent_library_id: - # This is an Overdrive advantage account. - args["parent_library_id"] = self.parent_library_id - endpoint = self.ADVANTAGE_LIBRARY_ENDPOINT - else: - endpoint = self.LIBRARY_ENDPOINT - return self.endpoint(endpoint, **args) - - def get_library(self): - """Get basic information about the collection, including - a link to the titles in the collection. - """ - url = self._library_endpoint - with self.lock: - representation, cached = Representation.get( - self._db, - url, - self.get, - exception_handler=Representation.reraise_exception, - ) - return json.loads(representation.content) - - def get_advantage_accounts(self): - """Find all the Overdrive Advantage accounts managed by this library. - - :yield: A sequence of OverdriveAdvantageAccount objects. - """ - library = self.get_library() - links = library.get("links", {}) - advantage = links.get("advantageAccounts") - if not advantage: - return [] - if advantage: - # This library has Overdrive Advantage accounts, or at - # least a link where some may be found. - advantage_url = advantage.get("href") - if not advantage_url: - return - representation, cached = Representation.get( - self._db, - advantage_url, - self.get, - exception_handler=Representation.reraise_exception, - ) - return OverdriveAdvantageAccount.from_representation(representation.content) - - def all_ids(self): - """Get IDs for every book in the system, with the most recently added - ones at the front. - """ - next_link = self._all_products_link - while next_link: - page_inventory, next_link = self._get_book_list_page(next_link, "next") - - yield from page_inventory - - @property - def _all_products_link(self) -> str: - url = self.endpoint( - self.ALL_PRODUCTS_ENDPOINT, - collection_token=self.collection_token, - sort="dateAdded:desc", - ) - return self.make_link_safe(url) - - def _get_book_list_page(self, link, rel_to_follow="next", extractor_class=None): - """Process a page of inventory whose circulation we need to check. - - Returns a 2-tuple: (availability_info, next_link). - `availability_info` is a list of dictionaries, each containing - basic availability and bibliographic information about - one book. - `next_link` is a link to the next page of results. - """ - extractor_class = extractor_class or OverdriveRepresentationExtractor - # We don't cache this because it changes constantly. - status_code, headers, content = self.get(link, {}) - if isinstance(content, (bytes, str)): - content = json.loads(content) - - # Find the link to the next page of results, if any. - next_link = extractor_class.link(content, rel_to_follow) - - # Prepare to get availability information for all the books on - # this page. - availability_queue = extractor_class.availability_link_list(content) - return availability_queue, next_link - - def recently_changed_ids(self, start, cutoff): - """Get IDs of books whose status has changed between the start time - and now. - """ - # `cutoff` is not supported by Overdrive, so we ignore it. All - # we can do is get events between the start time and now. - - last_update_time = start - self.EVENT_DELAY - self.log.info("Asking for circulation changes since %s", last_update_time) - last_update = last_update_time.strftime(self.TIME_FORMAT) - - next_link = self.endpoint( - self.EVENTS_ENDPOINT, - lastupdatetime=last_update, - sort="popularity:desc", - limit=self.PAGE_SIZE_LIMIT, - collection_token=self.collection_token, - ) - next_link = self.make_link_safe(next_link) - while next_link: - page_inventory, next_link = self._get_book_list_page(next_link) - # We won't be sending out any events for these books yet, - # because we don't know if anything changed, but we will - # be putting them on the list of inventory items to - # refresh. At that point we will send out events. - yield from page_inventory - - def metadata_lookup(self, identifier): - """Look up metadata for an Overdrive identifier.""" - url = self.endpoint( - self.METADATA_ENDPOINT, - collection_token=self.collection_token, - item_id=identifier.identifier, - ) - status_code, headers, content = self.get(url, {}) - if isinstance(content, (bytes, str)): - content = json.loads(content) - return content - - def metadata_lookup_obj(self, identifier): - url = self.endpoint( - self.METADATA_ENDPOINT, - collection_token=self.collection_token, - item_id=identifier, - ) - status_code, headers, content = self.get(url, {}) - if isinstance(content, (bytes, str)): - content = json.loads(content) - return OverdriveRepresentationExtractor.book_info_to_metadata(content) - - @classmethod - def make_link_safe(cls, url: str) -> str: - """Turn a server-provided link into a link the server will accept! - - The {} part is completely obnoxious and I have complained about it to - Overdrive. - - The availability part is to make sure we always use v2 of the - availability API, even if Overdrive sent us a link to v1. - """ - parts = list(urlsplit(url)) - parts[2] = quote(parts[2]) - endings = ("/availability", "/availability/") - if parts[2].startswith("/v1/collections/") and any( - parts[2].endswith(x) for x in endings - ): - parts[2] = parts[2].replace("/v1/collections/", "/v2/collections/", 1) - query_string = parts[3] - query_string = query_string.replace("+", "%2B") - query_string = query_string.replace(":", "%3A") - query_string = query_string.replace("{", "%7B") - query_string = query_string.replace("}", "%7D") - parts[3] = query_string - return urlunsplit(tuple(parts)) - - def _do_get(self, url: str, headers, **kwargs) -> Response: - """This method is overridden in MockOverdriveAPI.""" - url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) - kwargs["timeout"] = 120 - return HTTP.get_with_timeout(url, headers=headers, **kwargs) - - def _do_post(self, url: str, payload, headers, **kwargs) -> Response: - """This method is overridden in MockOverdriveAPI.""" - url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) - kwargs["timeout"] = 120 - return HTTP.post_with_timeout(url, payload, headers=headers, **kwargs) - - def website_id(self) -> bytes: - return self._configuration.overdrive_website_id.encode("utf-8") - - def client_key(self) -> bytes: - return self._configuration.overdrive_client_key.encode("utf-8") - - def client_secret(self) -> bytes: - return self._configuration.overdrive_client_secret.encode("utf-8") - - def library_id(self) -> str: - return self._library_id - - def hosts(self) -> Dict[str, str]: - return dict(self._hosts) - - -class OverdriveRepresentationExtractor: - """Extract useful information from Overdrive's JSON representations.""" - - log = logging.getLogger("Overdrive representation extractor") - - def __init__(self, api): - """Constructor. - - :param api: An OverdriveAPI object. This will be used when deciding - which portions of a JSON representation are relevant to the active - Overdrive collection. - """ - self.library_id = api.advantage_library_id - - @classmethod - def availability_link_list(cls, book_list): - """:return: A list of dictionaries with keys `id`, `title`, `availability_link`.""" - l = [] - if not "products" in book_list: - return [] - - products = book_list["products"] - for product in products: - if not "id" in product: - cls.log.warning("No ID found in %r", product) - continue - book_id = product["id"] - data = dict( - id=book_id, - title=product.get("title"), - author_name=None, - date_added=product.get("dateAdded"), - ) - if "primaryCreator" in product: - creator = product["primaryCreator"] - if creator.get("role") == "Author": - data["author_name"] = creator.get("name") - links = product.get("links", []) - if "availability" in links: - link = links["availability"]["href"] - data["availability_link"] = OverdriveCoreAPI.make_link_safe(link) - else: - logging.getLogger("Overdrive API").warning( - "No availability link for %s", book_id - ) - l.append(data) - return l - - @classmethod - def link(self, page, rel): - if "links" in page and rel in page["links"]: - raw_link = page["links"][rel]["href"] - link = OverdriveCoreAPI.make_link_safe(raw_link) - else: - link = None - return link - - format_data_for_overdrive_format = { - "ebook-pdf-adobe": (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM), - "ebook-pdf-open": (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.NO_DRM), - "ebook-epub-adobe": ( - Representation.EPUB_MEDIA_TYPE, - DeliveryMechanism.ADOBE_DRM, - ), - "ebook-epub-open": (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM), - "audiobook-mp3": ("application/x-od-media", DeliveryMechanism.OVERDRIVE_DRM), - "music-mp3": ("application/x-od-media", DeliveryMechanism.OVERDRIVE_DRM), - "ebook-overdrive": [ - ( - MediaTypes.OVERDRIVE_EBOOK_MANIFEST_MEDIA_TYPE, - DeliveryMechanism.LIBBY_DRM, - ), - ( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - DeliveryMechanism.STREAMING_DRM, - ), - ], - "audiobook-overdrive": [ - ( - MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE, - DeliveryMechanism.LIBBY_DRM, - ), - ( - DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE, - DeliveryMechanism.STREAMING_DRM, - ), - ], - "video-streaming": ( - DeliveryMechanism.STREAMING_VIDEO_CONTENT_TYPE, - DeliveryMechanism.STREAMING_DRM, - ), - "ebook-kindle": ( - DeliveryMechanism.KINDLE_CONTENT_TYPE, - DeliveryMechanism.KINDLE_DRM, - ), - "periodicals-nook": ( - DeliveryMechanism.NOOK_CONTENT_TYPE, - DeliveryMechanism.NOOK_DRM, - ), - } - - # A mapping of the overdrive format name to end sample content type - # Overdrive samples are not DRM protected so the links should be - # stored as the end sample content type - sample_format_to_content_type = { - "ebook-overdrive": "text/html", - "audiobook-wma": "audio/x-ms-wma", - "audiobook-mp3": "audio/mpeg", - "audiobook-overdrive": "text/html", - "ebook-epub-adobe": "application/epub+zip", - "magazine-overdrive": "text/html", - } - - @classmethod - def internal_formats(cls, overdrive_format): - """Yield all internal formats for the given Overdrive format. - - Some Overdrive formats become multiple internal formats. - - :yield: A sequence of (content type, DRM system) 2-tuples - """ - result = cls.format_data_for_overdrive_format.get(overdrive_format) - if not result: - return - if isinstance(result, list): - yield from result - else: - yield result - - ignorable_overdrive_formats: Set[str] = set() - - overdrive_role_to_simplified_role = { - "actor": Contributor.ACTOR_ROLE, - "artist": Contributor.ARTIST_ROLE, - "book producer": Contributor.PRODUCER_ROLE, - "associated name": Contributor.ASSOCIATED_ROLE, - "author": Contributor.AUTHOR_ROLE, - "author of introduction": Contributor.INTRODUCTION_ROLE, - "author of foreword": Contributor.FOREWORD_ROLE, - "author of afterword": Contributor.AFTERWORD_ROLE, - "contributor": Contributor.CONTRIBUTOR_ROLE, - "colophon": Contributor.COLOPHON_ROLE, - "adapter": Contributor.ADAPTER_ROLE, - "etc.": Contributor.UNKNOWN_ROLE, - "cast member": Contributor.ACTOR_ROLE, - "collaborator": Contributor.COLLABORATOR_ROLE, - "compiler": Contributor.COMPILER_ROLE, - "composer": Contributor.COMPOSER_ROLE, - "copyright holder": Contributor.COPYRIGHT_HOLDER_ROLE, - "director": Contributor.DIRECTOR_ROLE, - "editor": Contributor.EDITOR_ROLE, - "engineer": Contributor.ENGINEER_ROLE, - "executive producer": Contributor.EXECUTIVE_PRODUCER_ROLE, - "illustrator": Contributor.ILLUSTRATOR_ROLE, - "musician": Contributor.MUSICIAN_ROLE, - "narrator": Contributor.NARRATOR_ROLE, - "other": Contributor.UNKNOWN_ROLE, - "performer": Contributor.PERFORMER_ROLE, - "producer": Contributor.PRODUCER_ROLE, - "translator": Contributor.TRANSLATOR_ROLE, - "photographer": Contributor.PHOTOGRAPHER_ROLE, - "lyricist": Contributor.LYRICIST_ROLE, - "transcriber": Contributor.TRANSCRIBER_ROLE, - "designer": Contributor.DESIGNER_ROLE, - } - - overdrive_medium_to_simplified_medium = { - "eBook": Edition.BOOK_MEDIUM, - "Video": Edition.VIDEO_MEDIUM, - "Audiobook": Edition.AUDIO_MEDIUM, - "Music": Edition.MUSIC_MEDIUM, - "Periodicals": Edition.PERIODICAL_MEDIUM, - } - - DATE_FORMAT = "%Y-%m-%d" - - @classmethod - def parse_roles(cls, id, rolestring): - rolestring = rolestring.lower() - roles = [x.strip() for x in rolestring.split(",")] - if " and " in roles[-1]: - roles = roles[:-1] + [x.strip() for x in roles[-1].split(" and ")] - processed = [] - for x in roles: - if x not in cls.overdrive_role_to_simplified_role: - cls.log.error("Could not process role %s for %s", x, id) - else: - processed.append(cls.overdrive_role_to_simplified_role[x]) - return processed - - def book_info_to_circulation(self, book): - """Note: The json data passed into this method is from a different file/stream - from the json data that goes into the book_info_to_metadata() method. - """ - # In Overdrive, 'reserved' books show up as books on - # hold. There is no separate notion of reserved books. - licenses_reserved = 0 - - licenses_owned = None - licenses_available = None - patrons_in_hold_queue = None - - # TODO: The only reason this works for a NotFound error is the - # circulation code sticks the known book ID into `book` ahead - # of time. That's a code smell indicating that this system - # needs to be refactored. - if "reserveId" in book and not "id" in book: - book["id"] = book["reserveId"] - if not "id" in book: - return None - overdrive_id = book["id"] - primary_identifier = IdentifierData(Identifier.OVERDRIVE_ID, overdrive_id) - # TODO: We might be able to use this information to avoid the - # need for explicit configuration of Advantage collections, or - # at least to keep Advantage collections more up-to-date than - # they would be otherwise, as a side effect of updating - # regular Overdrive collections. - - # TODO: this would be the place to handle simultaneous use - # titles -- these can be detected with - # availabilityType="AlwaysAvailable" and have their - # .licenses_owned set to LicensePool.UNLIMITED_ACCESS. - # see http://developer.overdrive.com/apis/library-availability-new - - # TODO: Cost-per-circ titles - # (availabilityType="LimitedAvailablility") can be handled - # similarly, though those can abruptly become unavailable, so - # UNLIMITED_ACCESS is probably not appropriate. - - error_code = book.get("errorCode") - # TODO: It's not clear what other error codes there might be. - # The current behavior will respond to errors other than - # NotFound by leaving the book alone, but this might not be - # the right behavior. - if error_code == "NotFound": - licenses_owned = 0 - licenses_available = 0 - patrons_in_hold_queue = 0 - elif book.get("isOwnedByCollections") is not False: - # We own this book. - licenses_owned = 0 - licenses_available = 0 - - for account in self._get_applicable_accounts(book.get("accounts", [])): - licenses_owned += int(account.get("copiesOwned", 0)) - licenses_available += int(account.get("copiesAvailable", 0)) - - if "numberOfHolds" in book: - if patrons_in_hold_queue is None: - patrons_in_hold_queue = 0 - patrons_in_hold_queue += book["numberOfHolds"] - - return CirculationData( - data_source=DataSource.OVERDRIVE, - primary_identifier=primary_identifier, - licenses_owned=licenses_owned, - licenses_available=licenses_available, - licenses_reserved=licenses_reserved, - patrons_in_hold_queue=patrons_in_hold_queue, - ) - - def _get_applicable_accounts( - self, accounts: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: - """ - Returns those accounts from the accounts array that apply the - current overdrive collection context. - - If this is an overdrive parent collection, we want to return accounts - associated with the main OverDrive "library" and any non-main account - with sharing enabled. - - If this is a child OverDrive collection, then we return only the - account associated with that child's OverDrive Advantage "library". - Additionally, we want to exclude the account if it is "shared" since - we will be counting it with the parent collection. - """ - - if self.library_id == OverdriveCoreAPI.OVERDRIVE_MAIN_ACCOUNT_ID: - # this is a parent collection - filtered_result = filter( - lambda account: account.get("id") - == OverdriveCoreAPI.OVERDRIVE_MAIN_ACCOUNT_ID - or account.get("shared", False), - accounts, - ) - else: - # this is child collection - filtered_result = filter( - lambda account: account.get("id") == self.library_id - and not account.get("shared", False), - accounts, - ) - - return list(filtered_result) - - @classmethod - def image_link_to_linkdata(cls, link, rel): - if not link or not "href" in link: - return None - href = link["href"] - if "00000000-0000-0000-0000" in href: - # This is a stand-in cover for preorders. It's better not - # to have a cover at all -- we might be able to get one - # later, or from another source. - return None - href = OverdriveCoreAPI.make_link_safe(href) - media_type = link.get("type", None) - return LinkData(rel=rel, href=href, media_type=media_type) - - @classmethod - def book_info_to_metadata( - cls, book, include_bibliographic=True, include_formats=True - ): - """Turn Overdrive's JSON representation of a book into a Metadata - object. - - Note: The json data passed into this method is from a different file/stream - from the json data that goes into the book_info_to_circulation() method. - """ - if not "id" in book: - return None - overdrive_id = book["id"] - primary_identifier = IdentifierData(Identifier.OVERDRIVE_ID, overdrive_id) - - # If we trust classification data, we'll give it this weight. - # Otherwise we'll probably give it a fraction of this weight. - trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - - if include_bibliographic: - title = book.get("title", None) - sort_title = book.get("sortTitle") - subtitle = book.get("subtitle", None) - series = book.get("series", None) - publisher = book.get("publisher", None) - imprint = book.get("imprint", None) - - if "publishDate" in book: - published = strptime_utc(book["publishDate"][:10], cls.DATE_FORMAT) - else: - published = None - - languages = [l["code"] for l in book.get("languages", [])] - if "eng" in languages or not languages: - language = "eng" - else: - language = sorted(languages)[0] - - contributors = [] - for creator in book.get("creators", []): - sort_name = creator["fileAs"] - display_name = creator["name"] - role = creator["role"] - roles = cls.parse_roles(overdrive_id, role) or [ - Contributor.UNKNOWN_ROLE - ] - contributor = ContributorData( - sort_name=sort_name, - display_name=display_name, - roles=roles, - biography=creator.get("bioText", None), - ) - contributors.append(contributor) - - subjects = [] - for sub in book.get("subjects", []): - subject = SubjectData( - type=Subject.OVERDRIVE, - identifier=sub["value"], - weight=trusted_weight, - ) - subjects.append(subject) - - for sub in book.get("keywords", []): - subject = SubjectData( - type=Subject.TAG, - identifier=sub["value"], - # We don't use TRUSTED_DISTRIBUTOR_WEIGHT because - # we don't know where the tags come from -- - # probably Overdrive users -- and they're - # frequently wrong. - weight=1, - ) - subjects.append(subject) - - extra = dict() - if "grade_levels" in book: - # n.b. Grade levels are measurements of reading level, not - # age appropriateness. We can use them as a measure of age - # appropriateness in a pinch, but we weight them less - # heavily than TRUSTED_DISTRIBUTOR_WEIGHT. - for i in book["grade_levels"]: - subject = SubjectData( - type=Subject.GRADE_LEVEL, - identifier=i["value"], - weight=trusted_weight / 10, - ) - subjects.append(subject) - - overdrive_medium = book.get("mediaType", None) - if ( - overdrive_medium - and overdrive_medium not in cls.overdrive_medium_to_simplified_medium - ): - cls.log.error( - "Could not process medium %s for %s", overdrive_medium, overdrive_id - ) - - medium = cls.overdrive_medium_to_simplified_medium.get( - overdrive_medium, Edition.BOOK_MEDIUM - ) - - measurements = [] - if "awards" in book: - extra["awards"] = book.get("awards", []) - num_awards = len(extra["awards"]) - measurements.append( - MeasurementData(Measurement.AWARDS, str(num_awards)) - ) - - for name, subject_type in ( - ("ATOS", Subject.ATOS_SCORE), - ("lexileScore", Subject.LEXILE_SCORE), - ("interestLevel", Subject.INTEREST_LEVEL), - ): - if not name in book: - continue - identifier = str(book[name]) - subjects.append( - SubjectData( - type=subject_type, identifier=identifier, weight=trusted_weight - ) - ) - - for grade_level_info in book.get("gradeLevels", []): - grade_level = grade_level_info.get("value") - subjects.append( - SubjectData( - type=Subject.GRADE_LEVEL, - identifier=grade_level, - weight=trusted_weight, - ) - ) - - identifiers = [] - links = [] - sample_hrefs = set() - for format in book.get("formats", []): - for new_id in format.get("identifiers", []): - t = new_id["type"] - v = new_id["value"] - orig_v = v - type_key = None - if t == "ASIN": - type_key = Identifier.ASIN - elif t == "ISBN": - type_key = Identifier.ISBN - if len(v) == 10: - v = isbnlib.to_isbn13(v) - if v is None or not isbnlib.is_isbn13(v): - # Overdrive sometimes uses invalid values - # like "n/a" as placeholders. Ignore such - # values to avoid a situation where hundreds of - # books appear to have the same ISBN. ISBNs - # which fail check digit checks or are invalid - # also can occur. Log them for review. - cls.log.info("Bad ISBN value provided: %s", orig_v) - continue - elif t == "DOI": - type_key = Identifier.DOI - elif t == "UPC": - type_key = Identifier.UPC - elif t == "PublisherCatalogNumber": - continue - if type_key and v: - identifiers.append(IdentifierData(type_key, v, 1)) - - # Samples become links. - if "samples" in format: - for sample_info in format["samples"]: - href = sample_info["url"] - # Have we already parsed this sample? Overdrive repeats samples per format - if href in sample_hrefs: - continue - - # Every sample has its own format type - overdrive_format_name = sample_info.get("formatType") - if not overdrive_format_name: - # Malformed sample - continue - content_type = cls.sample_format_to_content_type.get( - overdrive_format_name - ) - if not content_type: - # Unusable by us. - cls.log.warning( - f"Did not find a sample format mapping for '{overdrive_format_name}': {href}" - ) - continue - - if Representation.is_media_type(content_type): - links.append( - LinkData( - rel=Hyperlink.SAMPLE, - href=href, - media_type=content_type, - ) - ) - sample_hrefs.add(href) - - # A cover and its thumbnail become a single LinkData. - if "images" in book: - images = book["images"] - image_data = cls.image_link_to_linkdata( - images.get("cover"), Hyperlink.IMAGE - ) - for name in ["cover300Wide", "cover150Wide", "thumbnail"]: - # Try to get a thumbnail that's as close as possible - # to the size we use. - image = images.get(name) - thumbnail_data = cls.image_link_to_linkdata( - image, Hyperlink.THUMBNAIL_IMAGE - ) - if not image_data: - image_data = cls.image_link_to_linkdata(image, Hyperlink.IMAGE) - if thumbnail_data: - break - - if image_data: - if thumbnail_data: - image_data.thumbnail = thumbnail_data - links.append(image_data) - - # Descriptions become links. - short = book.get("shortDescription") - full = book.get("fullDescription") - if full: - links.append( - LinkData( - rel=Hyperlink.DESCRIPTION, - content=full, - media_type="text/html", - ) - ) - - if short and (not full or not full.startswith(short)): - links.append( - LinkData( - rel=Hyperlink.SHORT_DESCRIPTION, - content=short, - media_type="text/html", - ) - ) - - # Add measurements: rating and popularity - if book.get("starRating") is not None and book["starRating"] > 0: - measurements.append( - MeasurementData( - quantity_measured=Measurement.RATING, value=book["starRating"] - ) - ) - - if book.get("popularity"): - measurements.append( - MeasurementData( - quantity_measured=Measurement.POPULARITY, - value=book["popularity"], - ) - ) - - metadata = Metadata( - data_source=DataSource.OVERDRIVE, - title=title, - subtitle=subtitle, - sort_title=sort_title, - language=language, - medium=medium, - series=series, - publisher=publisher, - imprint=imprint, - published=published, - primary_identifier=primary_identifier, - identifiers=identifiers, - subjects=subjects, - contributors=contributors, - measurements=measurements, - links=links, - ) - else: - metadata = Metadata( - data_source=DataSource.OVERDRIVE, - primary_identifier=primary_identifier, - ) - - if include_formats: - formats = [] - for format in book.get("formats", []): - format_id = format["id"] - internal_formats = list(cls.internal_formats(format_id)) - if internal_formats: - for content_type, drm_scheme in internal_formats: - formats.append(FormatData(content_type, drm_scheme)) - elif format_id not in cls.ignorable_overdrive_formats: - cls.log.error( - "Could not process Overdrive format %s for %s", - format_id, - overdrive_id, - ) - - # Also make a CirculationData so we can write the formats, - circulationdata = CirculationData( - data_source=DataSource.OVERDRIVE, - primary_identifier=primary_identifier, - formats=formats, - ) - - metadata.circulation = circulationdata - - return metadata - - -class OverdriveAdvantageAccount: - """Holder and parser for data associated with Overdrive Advantage.""" - - def __init__(self, parent_library_id: str, library_id: str, name: str, token: str): - """Constructor. - - :param parent_library_id: The library ID of the parent Overdrive - account. - :param library_id: The library ID of the Overdrive Advantage account. - :param name: The name of the library whose Advantage account this is. - :param token: The collection token for this Advantage account - """ - self.parent_library_id = parent_library_id - self.library_id = library_id - self.name = name - self.token = token - - @classmethod - def from_representation(cls, content): - """Turn the representation of an advantageAccounts link into a list of - OverdriveAdvantageAccount objects. - - :param content: The data obtained by following an advantageAccounts - link. - :yield: A sequence of OverdriveAdvantageAccount objects. - """ - data = json.loads(content) - parent_id = str(data.get("id")) - accounts = data.get("advantageAccounts", {}) - for account in accounts: - name = account["name"] - products_link = account["links"]["products"]["href"] - library_id = str(account.get("id")) - name = account.get("name") - token = account.get("collectionToken") - yield cls( - parent_library_id=parent_id, - library_id=library_id, - name=name, - token=token, - ) - - def to_collection(self, _db): - """Find or create a Collection object for this Overdrive Advantage - account. - - :return: a 2-tuple of Collections (primary Overdrive - collection, Overdrive Advantage collection) - """ - # First find the parent Collection. - try: - parent = ( - Collection.by_protocol(_db, ExternalIntegration.OVERDRIVE) - .filter(Collection.external_account_id == self.parent_library_id) - .one() - ) - except NoResultFound as e: - # Without the parent's credentials we can't access the child. - raise ValueError( - "Cannot create a Collection whose parent does not already exist." - ) - name = parent.name + " / " + self.name - child, is_new = get_one_or_create( - _db, - Collection, - parent_id=parent.id, - external_account_id=self.library_id, - create_method_kwargs=dict(name=name), - ) - if is_new: - # Make sure the child has its protocol set appropriately. - integration = child.create_external_integration( - ExternalIntegration.OVERDRIVE - ) - configuration = child.create_integration_configuration( - ExternalIntegration.OVERDRIVE - ) - - # Set or update the name of the collection to reflect the name of - # the library, just in case that name has changed. - child.name = name - return parent, child - - -class OverdriveBibliographicCoverageProvider(BibliographicCoverageProvider): - """Fill in bibliographic metadata for Overdrive records. - - This will occasionally fill in some availability information for a - single Collection, but we rely on Monitors to keep availability - information up to date for all Collections. - """ - - SERVICE_NAME = "Overdrive Bibliographic Coverage Provider" - DATA_SOURCE_NAME = DataSource.OVERDRIVE - PROTOCOL = ExternalIntegration.OVERDRIVE - INPUT_IDENTIFIER_TYPES = Identifier.OVERDRIVE_ID - - def __init__(self, collection, api_class=OverdriveCoreAPI, **kwargs): - """Constructor. - - :param collection: Provide bibliographic coverage to all - Overdrive books in the given Collection. - :param api_class: Instantiate this class with the given Collection, - rather than instantiating OverdriveAPI. - """ - super().__init__(collection, **kwargs) - if isinstance(api_class, OverdriveCoreAPI): - # Use a previously instantiated OverdriveAPI instance - # rather than creating a new one. - self.api = api_class - else: - # A web application should not use this option because it - # will put a non-scoped session in the mix. - _db = Session.object_session(collection) - self.api = api_class(_db, collection) - - def process_item(self, identifier): - info = self.api.metadata_lookup(identifier) - error = None - if info.get("errorCode") == "NotFound": - error = "ID not recognized by Overdrive: %s" % identifier.identifier - elif info.get("errorCode") == "InvalidGuid": - error = "Invalid Overdrive ID: %s" % identifier.identifier - - if error: - return self.failure(identifier, error, transient=False) - - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - - if not metadata: - e = "Could not extract metadata from Overdrive data: %r" % info - return self.failure(identifier, e) - - self.metadata_pre_hook(metadata) - return self.set_metadata(identifier, metadata) - - def metadata_pre_hook(self, metadata): - """A hook method that allows subclasses to modify a Metadata - object derived from Overdrive before it's applied. - """ - return metadata diff --git a/core/scripts.py b/core/scripts.py index 0c39c190c5..7e55eee156 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -1,5 +1,4 @@ import argparse -import csv import datetime import json import logging @@ -10,7 +9,7 @@ import unicodedata import uuid from enum import Enum -from typing import Generator, List, Optional, Type +from typing import Generator, Optional, Type from sqlalchemy import and_, exists, tuple_ from sqlalchemy.orm import Query, Session, defer @@ -57,7 +56,6 @@ from core.model.patron import Loan from core.monitor import CollectionMonitor, ReaperMonitor from core.opds_import import OPDSImporter, OPDSImportMonitor -from core.overdrive import OverdriveCoreAPI from core.query.customlist import CustomListQueries from core.search.coverage_remover import RemovesSearchCoverage from core.service.container import Services, container_instance @@ -2718,122 +2716,6 @@ def do_run(self): ) -class GenerateOverdriveAdvantageAccountList(InputScript): - """Generates a CSV containing the following fields: - circulation manager - collection - client_key - external_account_id - library_token - advantage_name - advantage_id - advantage_token - already_configured - """ - - def __init__(self, _db=None, *args, **kwargs): - super().__init__(_db, *args, **kwargs) - self._data: List[List[str]] = list() - - def _create_overdrive_api(self, collection: Collection): - return OverdriveCoreAPI(_db=self._db, collection=collection) - - def do_run(self, *args, **kwargs): - parsed = GenerateOverdriveAdvantageAccountList.parse_command_line( - _db=self._db, *args, **kwargs - ) - query: Query = Collection.by_protocol( - self._db, protocol=ExternalIntegration.OVERDRIVE - ) - for c in query.filter(Collection.parent_id == None): - collection: Collection = c - api = self._create_overdrive_api(collection=collection) - client_key = api.client_key().decode() - client_secret = api.client_secret().decode() - - try: - library_token = api.collection_token - advantage_accounts = api.get_advantage_accounts() - - for aa in advantage_accounts: - existing_child_collections = query.filter( - Collection.parent_id == collection.id - ) - already_configured_aa_libraries = [ - e.external_account_id for e in existing_child_collections - ] - self._data.append( - [ - collection.name, - collection.external_account_id, - client_key, - client_secret, - library_token, - aa.name, - aa.library_id, - aa.token, - aa.library_id in already_configured_aa_libraries, - ] - ) - except Exception as e: - logging.error( - f"Could not connect to collection {c.name}: reason: {str(e)}." - ) - - file_path = parsed.output_file_path[0] - circ_manager_name = parsed.circulation_manager_name[0] - self.write_csv(output_file_path=file_path, circ_manager_name=circ_manager_name) - - def write_csv(self, output_file_path: str, circ_manager_name: str): - with open(output_file_path, "w", newline="") as csvfile: - writer = csv.writer(csvfile) - writer.writerow( - [ - "cm", - "collection", - "overdrive_library_id", - "client_key", - "client_secret", - "library_token", - "advantage_name", - "advantage_id", - "advantage_token", - "already_configured", - ] - ) - for i in self._data: - i.insert(0, circ_manager_name) - writer.writerow(i) - - @classmethod - def arg_parser(cls): - parser = argparse.ArgumentParser() - parser.add_argument( - "--output-file-path", - help="The path of an output file", - metavar="o", - nargs=1, - ) - - parser.add_argument( - "--circulation-manager-name", - help="The name of the circulation-manager", - metavar="c", - nargs=1, - required=True, - ) - - parser.add_argument( - "--file-format", - help="The file format of the output file", - metavar="f", - nargs=1, - default="csv", - ) - - return parser - - class CustomListUpdateEntriesScript(CustomListSweeperScript): """Traverse all entries and update lists if they have auto_update_enabled""" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 3122398d97..7ced547870 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -33,7 +33,6 @@ "tests.fixtures.odl", "tests.fixtures.opds2_files", "tests.fixtures.opds_files", - "tests.fixtures.overdrive", "tests.fixtures.sample_covers", "tests.fixtures.search", "tests.fixtures.time", diff --git a/tests/core/files/overdrive/advantage_accounts.json b/tests/api/files/overdrive/advantage_accounts.json similarity index 100% rename from tests/core/files/overdrive/advantage_accounts.json rename to tests/api/files/overdrive/advantage_accounts.json diff --git a/tests/core/files/overdrive/audiobook.json b/tests/api/files/overdrive/audiobook.json similarity index 100% rename from tests/core/files/overdrive/audiobook.json rename to tests/api/files/overdrive/audiobook.json diff --git a/tests/core/files/overdrive/has_awards.json b/tests/api/files/overdrive/has_awards.json similarity index 100% rename from tests/core/files/overdrive/has_awards.json rename to tests/api/files/overdrive/has_awards.json diff --git a/tests/core/files/overdrive/has_grade_levels.json b/tests/api/files/overdrive/has_grade_levels.json similarity index 100% rename from tests/core/files/overdrive/has_grade_levels.json rename to tests/api/files/overdrive/has_grade_levels.json diff --git a/tests/core/files/overdrive/has_sample.json b/tests/api/files/overdrive/has_sample.json similarity index 100% rename from tests/core/files/overdrive/has_sample.json rename to tests/api/files/overdrive/has_sample.json diff --git a/tests/core/files/overdrive/overdrive_availability_advantage.json b/tests/api/files/overdrive/overdrive_availability_advantage.json similarity index 100% rename from tests/core/files/overdrive/overdrive_availability_advantage.json rename to tests/api/files/overdrive/overdrive_availability_advantage.json diff --git a/tests/core/files/overdrive/overdrive_availability_information.json b/tests/api/files/overdrive/overdrive_availability_information_2.json similarity index 100% rename from tests/core/files/overdrive/overdrive_availability_information.json rename to tests/api/files/overdrive/overdrive_availability_information_2.json diff --git a/tests/core/files/overdrive/overdrive_book_list.json b/tests/api/files/overdrive/overdrive_book_list.json similarity index 100% rename from tests/core/files/overdrive/overdrive_book_list.json rename to tests/api/files/overdrive/overdrive_book_list.json diff --git a/tests/core/files/overdrive/overdrive_book_list_missing_data.json b/tests/api/files/overdrive/overdrive_book_list_missing_data.json similarity index 100% rename from tests/core/files/overdrive/overdrive_book_list_missing_data.json rename to tests/api/files/overdrive/overdrive_book_list_missing_data.json diff --git a/tests/core/files/overdrive/overdrive_metadata.json b/tests/api/files/overdrive/overdrive_metadata.json similarity index 100% rename from tests/core/files/overdrive/overdrive_metadata.json rename to tests/api/files/overdrive/overdrive_metadata.json diff --git a/tests/api/mockapi/overdrive.py b/tests/api/mockapi/overdrive.py index ae7aa11192..d7ea7d7f0f 100644 --- a/tests/api/mockapi/overdrive.py +++ b/tests/api/mockapi/overdrive.py @@ -2,20 +2,45 @@ from sqlalchemy.orm import Session -from api.overdrive import OverdriveAPI +from api.overdrive import OverdriveAPI, OverdriveConstants from core.model import Library, get_one_or_create from core.model.collection import Collection from core.model.configuration import ExternalIntegration -from core.overdrive import OverdriveConstants, OverdriveCoreAPI from core.util.http import HTTP from tests.core.mock import MockRequestsResponse from tests.fixtures.database import DatabaseTransactionFixture -class MockOverdriveCoreAPI(OverdriveCoreAPI): +class MockOverdriveResponse: + def __init__(self, status_code, headers, content): + self.status_code = status_code + self.headers = headers + self.content = content + + def json(self): + return json.loads(self.content) + + +class MockOverdriveAPI(OverdriveAPI): + library_data = '{"id":1810,"name":"My Public Library (MA)","type":"Library","collectionToken":"1a09d9203","links":{"self":{"href":"http://api.overdrive.com/v1/libraries/1810","type":"application/vnd.overdrive.api+json"},"products":{"href":"http://api.overdrive.com/v1/collections/1a09d9203/products","type":"application/vnd.overdrive.api+json"},"dlrHomepage":{"href":"http://ebooks.nypl.org","type":"text/html"}},"formats":[{"id":"audiobook-wma","name":"OverDrive WMA Audiobook"},{"id":"ebook-pdf-adobe","name":"Adobe PDF eBook"},{"id":"ebook-mediado","name":"MediaDo eBook"},{"id":"ebook-epub-adobe","name":"Adobe EPUB eBook"},{"id":"ebook-kindle","name":"Kindle Book"},{"id":"audiobook-mp3","name":"OverDrive MP3 Audiobook"},{"id":"ebook-pdf-open","name":"Open PDF eBook"},{"id":"ebook-overdrive","name":"OverDrive Read"},{"id":"video-streaming","name":"Streaming Video"},{"id":"ebook-epub-open","name":"Open EPUB eBook"}]}' + + token_data = '{"access_token":"foo","token_type":"bearer","expires_in":3600,"scope":"LIB META AVAIL SRCH"}' + + def __init__(self, _db, collection): + self.access_token_requests = [] + self.requests = [] + self.responses = [] + + # Almost all tests will try to request the access token, so + # set the response that will be returned if an attempt is + # made. + self.access_token_response = self.mock_access_token_response("bearer token") + super().__init__(_db, collection) + self._collection_token = "fake token" + @classmethod def mock_collection( - self, + cls, _db: Session, library: Library, name: str = "Test Overdrive Collection", @@ -49,17 +74,6 @@ def mock_collection( _db.refresh(config) return collection - def __init__(self, _db, collection, *args, **kwargs): - self.access_token_requests = [] - self.requests = [] - self.responses = [] - - # Almost all tests will try to request the access token, so - # set the response that will be returned if an attempt is - # made. - self.access_token_response = self.mock_access_token_response("bearer token") - super().__init__(_db, collection, *args, **kwargs) - def queue_collection_token(self): # Many tests immediately try to access the # collection token. This is a helper method to make it easy to @@ -111,24 +125,6 @@ def _make_request(self, url, *args, **kwargs): kwargs.get("disallowed_response_codes"), ) - -class MockOverdriveResponse: - def __init__(self, status_code, headers, content): - self.status_code = status_code - self.headers = headers - self.content = content - - def json(self): - return json.loads(self.content) - - -class MockOverdriveAPI(MockOverdriveCoreAPI, OverdriveAPI): - library_data = '{"id":1810,"name":"My Public Library (MA)","type":"Library","collectionToken":"1a09d9203","links":{"self":{"href":"http://api.overdrive.com/v1/libraries/1810","type":"application/vnd.overdrive.api+json"},"products":{"href":"http://api.overdrive.com/v1/collections/1a09d9203/products","type":"application/vnd.overdrive.api+json"},"dlrHomepage":{"href":"http://ebooks.nypl.org","type":"text/html"}},"formats":[{"id":"audiobook-wma","name":"OverDrive WMA Audiobook"},{"id":"ebook-pdf-adobe","name":"Adobe PDF eBook"},{"id":"ebook-mediado","name":"MediaDo eBook"},{"id":"ebook-epub-adobe","name":"Adobe EPUB eBook"},{"id":"ebook-kindle","name":"Kindle Book"},{"id":"audiobook-mp3","name":"OverDrive MP3 Audiobook"},{"id":"ebook-pdf-open","name":"Open PDF eBook"},{"id":"ebook-overdrive","name":"OverDrive Read"},{"id":"video-streaming","name":"Streaming Video"},{"id":"ebook-epub-open","name":"Open EPUB eBook"}]}' - - token_data = '{"access_token":"foo","token_type":"bearer","expires_in":3600,"scope":"LIB META AVAIL SRCH"}' - - collection_token = "fake token" - def patron_request(self, patron, pin, *args, **kwargs): response = self._make_request(*args, **kwargs) diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index c54acd9f19..86ba258711 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -1,12 +1,14 @@ from __future__ import annotations import base64 +import csv import json +import logging import os import random from datetime import timedelta from typing import TYPE_CHECKING, Any, Dict -from unittest.mock import MagicMock, create_autospec +from unittest.mock import MagicMock, PropertyMock, create_autospec, patch import pytest from requests import Response @@ -16,33 +18,46 @@ from api.circulation_exceptions import * from api.config import Configuration from api.overdrive import ( + GenerateOverdriveAdvantageAccountList, NewTitlesOverdriveCollectionMonitor, + OverdriveAdvantageAccount, OverdriveAPI, + OverdriveBibliographicCoverageProvider, OverdriveCirculationMonitor, OverdriveCollectionReaper, + OverdriveConstants, OverdriveFormatSweep, OverdriveManifestFulfillmentInfo, + OverdriveRepresentationExtractor, RecentOverdriveCollectionMonitor, ) from core.config import CannotLoadConfiguration +from core.coverage import CoverageFailure from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry -from core.metadata_layer import TimestampData +from core.metadata_layer import LinkData, TimestampData from core.model import ( + Collection, + Contributor, DataSource, DeliveryMechanism, Edition, ExternalIntegration, + Hyperlink, Identifier, LicensePool, + Measurement, MediaTypes, Representation, RightsStatus, + Subject, ) -from core.overdrive import OverdriveConstants +from core.scripts import RunCollectionCoverageProviderScript from core.util.datetime_helpers import datetime_utc, utc_now +from core.util.http import BadResponseException from tests.api.mockapi.overdrive import MockOverdriveAPI from tests.core.mock import DummyHTTPClient, MockRequestsResponse +from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture @@ -52,6 +67,19 @@ from tests.fixtures.time import Time +@pytest.fixture +def mock_web_server(): + """A test fixture that yields a usable mock web server for the lifetime of the test.""" + _server = MockAPIServer("127.0.0.1", 10256) + _server.start() + logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") + yield _server + logging.info( + f"shutting down mock web server on {_server.address()}:{_server.port()}" + ) + _server.stop() + + class OverdriveAPIFixture: def __init__(self, db: DatabaseTransactionFixture, data: OverdriveAPIFilesFixture): self.db = db @@ -98,6 +126,446 @@ def overdrive_api_fixture( class TestOverdriveAPI: + def test_errors_not_retried( + self, + overdrive_api_fixture: OverdriveAPIFixture, + mock_web_server: MockAPIServer, + ): + session = overdrive_api_fixture.db.session + library = overdrive_api_fixture.db.default_library() + collection = MockOverdriveAPI.mock_collection(session, library) + + # Enqueue a response for the request that the server will make for a token. + _r = MockAPIServerResponse() + _r.status_code = 200 + _r.set_content( + b"""{ + "access_token": "x", + "expires_in": 23 + } + """ + ) + mock_web_server.enqueue_response("POST", "/oauth/token", _r) + + api = OverdriveAPI(session, collection) + api._hosts["oauth_host"] = mock_web_server.url("/oauth") + + # Try a get() call for each error code + for code in [404]: + _r = MockAPIServerResponse() + _r.status_code = code + mock_web_server.enqueue_response("GET", "/a/b/c", _r) + _status, _, _ = api.get(mock_web_server.url("/a/b/c")) + assert _status == code + + for code in [400, 403, 500, 501, 502, 503]: + _r = MockAPIServerResponse() + _r.status_code = code + + # The default is to retry 5 times, so enqueue 5 responses. + for i in range(0, 6): + mock_web_server.enqueue_response("GET", "/a/b/c", _r) + try: + api.get(mock_web_server.url("/a/b/c")) + except BadResponseException: + pass + + # Exactly one request was made for each error code, plus one for a token + assert len(mock_web_server.requests()) == 8 + + def test_constructor_makes_no_requests( + self, + overdrive_api_fixture: OverdriveAPIFixture, + ): + session = overdrive_api_fixture.db.session + library = overdrive_api_fixture.db.default_library() + # Invoking the OverdriveAPI constructor does not, by itself, + # make any HTTP requests. + collection = MockOverdriveAPI.mock_collection(session, library) + + class NoRequests(OverdriveAPI): + MSG = "This is a unit test, you can't make HTTP requests!" + + def no_requests(self, *args, **kwargs): + raise Exception(self.MSG) + + _do_get = no_requests + _do_post = no_requests + _make_request = no_requests + + api = NoRequests(session, collection) + + # Attempting to access .token or .collection_token _will_ + # try to make an HTTP request. + for field in "token", "collection_token": + with pytest.raises(Exception) as excinfo: + getattr(api, field) + assert api.MSG in str(excinfo.value) + + def test_ils_name(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + transaction = overdrive_api_fixture.db + + """The 'ils_name' setting (defined in + MockOverdriveAPI.mock_collection) is available through + OverdriveAPI.ils_name(). + """ + assert "e" == fixture.api.ils_name(transaction.default_library()) + + # The value must be explicitly set for a given library, or + # else the default will be used. + l2 = transaction.library() + assert "default" == fixture.api.ils_name(l2) + + def test_make_link_safe(self): + # Unsafe characters are escaped. + assert "http://foo.com?q=%2B%3A%7B%7D" == OverdriveAPI.make_link_safe( + "http://foo.com?q=+:{}" + ) + + # Links to version 1 of the availability API are converted + # to links to version 2. + v1 = "https://qa.api.overdrive.com/v1/collections/abcde/products/12345/availability" + v2 = "https://qa.api.overdrive.com/v2/collections/abcde/products/12345/availability" + assert v2 == OverdriveAPI.make_link_safe(v1) + + # We also handle the case of a trailing slash, just in case Overdrive + # starts serving links with trailing slashes. + v1 = v1 + "/" + v2 = v2 + "/" + assert v2 == OverdriveAPI.make_link_safe(v1) + + # Links to other endpoints are not converted + leave_alone = "https://qa.api.overdrive.com/v1/collections/abcde/products/12345" + assert leave_alone == OverdriveAPI.make_link_safe(leave_alone) + + def test_hosts(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + session = overdrive_api_fixture.db.session + c = OverdriveAPI + + # By default, OverdriveAPI is initialized with the production + # set of hostnames. + assert fixture.api.hosts() == c.HOSTS[OverdriveConstants.PRODUCTION_SERVERS] + + # You can instead initialize it to use the testing set of + # hostnames. + def api_with_setting(x): + config = fixture.collection.integration_configuration + DatabaseTransactionFixture.set_settings(config, overdrive_server_nickname=x) + return c(session, fixture.collection) + + testing = api_with_setting(OverdriveConstants.TESTING_SERVERS) + assert testing.hosts() == c.HOSTS[OverdriveConstants.TESTING_SERVERS] + + # If the setting doesn't make sense, we default to production + # hostnames. + bad = api_with_setting("nonsensical") + assert bad.hosts() == c.HOSTS[OverdriveConstants.PRODUCTION_SERVERS] + + def test_endpoint(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + + # The .endpoint() method performs string interpolation, including + # the names of servers. + template = ( + "%(host)s %(patron_host)s %(oauth_host)s %(oauth_patron_host)s %(extra)s" + ) + result = fixture.api.endpoint(template, extra="val") + + # The host names and the 'extra' argument have been used to + # fill in the string interpolations. + expect_args = dict(fixture.api.hosts()) + expect_args["extra"] = "val" + assert result == template % expect_args + + # The string has been completely interpolated. + assert "%" not in result + + # Once interpolation has happened, doing it again has no effect. + assert result == fixture.api.endpoint(result, extra="something else") + + # This is important because an interpolated URL may superficially + # appear to contain extra formatting characters. + assert result + "%3A" == fixture.api.endpoint( + result + "%3A", extra="something else" + ) + + def test_token_authorization_header( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + + # Verify that the Authorization header needed to get an access + # token for a given collection is encoded properly. + assert fixture.api.token_authorization_header == "Basic YTpi" + assert ( + fixture.api.token_authorization_header + == "Basic " + + base64.standard_b64encode( + b"%s:%s" % (fixture.api.client_key(), fixture.api.client_secret()) + ).decode("utf8") + ) + + def test_token_post_success(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + transaction = fixture.db + + fixture.api.queue_response(200, content="some content") + response = fixture.api.token_post(transaction.fresh_url(), "the payload") + assert 200 == response.status_code + assert fixture.api.access_token_response.content == response.content + + def test_get_success(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + transaction = fixture.db + + fixture.api.queue_response(200, content="some content") + status_code, headers, content = fixture.api.get(transaction.fresh_url(), {}) + assert 200 == status_code + assert b"some content" == content + + def test_failure_to_get_library_is_fatal( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + + fixture.api.queue_response(500) + with pytest.raises(BadResponseException) as excinfo: + fixture.api.get_library() + assert "Got status code 500" in str(excinfo.value) + + def test_error_getting_library(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + session = fixture.db.session + + class MisconfiguredOverdriveAPI(MockOverdriveAPI): + """This Overdrive client has valid credentials but the library + can't be found -- probably because the library ID is wrong.""" + + def get_library(self): + return { + "errorCode": "Some error", + "message": "Some message.", + "token": "abc-def-ghi", + } + + # Just instantiating the API doesn't cause this error. + api = MisconfiguredOverdriveAPI(session, fixture.collection) + api._collection_token = None + + # But trying to access the collection token will cause it. + with pytest.raises(CannotLoadConfiguration) as excinfo: + api.collection_token() + assert ( + "Overdrive credentials are valid but could not fetch library: Some message." + in str(excinfo.value) + ) + + def test_401_on_get_refreshes_bearer_token( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + transaction = fixture.db + + # We have a token. + assert "bearer token" == fixture.api.token + + # But then we try to GET, and receive a 401. + fixture.api.queue_response(401) + + # We refresh the bearer token. (This happens in + # MockOverdriveAPI.token_post, so we don't mock the response + # in the normal way.) + fixture.api.access_token_response = fixture.api.mock_access_token_response( + "new bearer token" + ) + + # Then we retry the GET and it succeeds this time. + fixture.api.queue_response(200, content="at last, the content") + + status_code, headers, content = fixture.api.get(transaction.fresh_url(), {}) + + assert 200 == status_code + assert b"at last, the content" == content + + # The bearer token has been updated. + assert "new bearer token" == fixture.api.token + + def test_credential_refresh_success( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + + """Verify the process of refreshing the Overdrive bearer token.""" + # Perform the initial credential check. + fixture.api.check_creds() + credential = fixture.api.credential_object(lambda x: x) + assert "bearer token" == credential.credential + assert fixture.api.token == credential.credential + + fixture.api.access_token_response = fixture.api.mock_access_token_response( + "new bearer token" + ) + + # Refresh the credentials and the token will change to + # the mocked value. + fixture.api.refresh_creds(credential) + assert "new bearer token" == credential.credential + assert fixture.api.token == credential.credential + + def test_401_after_token_refresh_raises_error( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + + assert "bearer token" == fixture.api.token + + # We try to GET and receive a 401. + fixture.api.queue_response(401) + + # We refresh the bearer token. + fixture.api.access_token_response = fixture.api.mock_access_token_response( + "new bearer token" + ) + + # Then we retry the GET but we get another 401. + fixture.api.queue_response(401) + + credential = fixture.api.credential_object(lambda x: x) + fixture.api.refresh_creds(credential) + + # That raises a BadResponseException + with pytest.raises(BadResponseException) as excinfo: + fixture.api.get_library() + assert "Bad response from" in str(excinfo.value) + assert "Something's wrong with the Overdrive OAuth Bearer Token!" in str( + excinfo.value + ) + + def test_401_during_refresh_raises_error( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + + """If we fail to refresh the OAuth bearer token, an exception is + raised. + """ + fixture.api.access_token_response = MockRequestsResponse(401, {}, "") + with pytest.raises(BadResponseException) as excinfo: + fixture.api.refresh_creds(None) + assert "Got status code 401" in str(excinfo.value) + assert "can only continue on: 200." in str(excinfo.value) + + def test_advantage_differences(self, overdrive_api_fixture: OverdriveAPIFixture): + transaction = overdrive_api_fixture.db + session = transaction.session + + # Test the differences between Advantage collections and + # regular Overdrive collections. + + # Here's a regular Overdrive collection. + main = transaction.collection( + protocol=ExternalIntegration.OVERDRIVE, + external_account_id="1", + ) + DatabaseTransactionFixture.set_settings( + main.integration_configuration, "overdrive_client_key", "user" + ) + DatabaseTransactionFixture.set_settings( + main.integration_configuration, "overdrive_client_secret", "password" + ) + DatabaseTransactionFixture.set_settings( + main.integration_configuration, "overdrive_website_id", "100" + ) + DatabaseTransactionFixture.set_settings( + main.integration_configuration, "ils_name", "default" + ) + + # Here's an Overdrive API client for that collection. + overdrive_main = MockOverdriveAPI(session, main) + + # Note the "library" endpoint. + assert ( + "https://api.overdrive.com/v1/libraries/1" + == overdrive_main._library_endpoint + ) + + # The advantage_library_id of a non-Advantage Overdrive account + # is always -1. + assert "1" == overdrive_main.library_id() + assert -1 == overdrive_main.advantage_library_id + + # Here's an Overdrive Advantage collection associated with the + # main Overdrive collection. + child = transaction.collection( + protocol=ExternalIntegration.OVERDRIVE, + external_account_id="2", + ) + child.parent = main + overdrive_child = MockOverdriveAPI(session, child) + + # In URL-space, the "library" endpoint for the Advantage + # collection is beneath the the parent collection's "library" + # endpoint. + assert ( + "https://api.overdrive.com/v1/libraries/1/advantageAccounts/2" + == overdrive_child._library_endpoint + ) + + # The advantage_library_id of an Advantage collection is the + # numeric value of its external_account_id. + assert "2" == overdrive_child.library_id() + assert 2 == overdrive_child.advantage_library_id + + def test__get_book_list_page(self, overdrive_api_fixture: OverdriveAPIFixture): + fixture = overdrive_api_fixture + + # Test the internal method that retrieves a list of books and + # preprocesses it. + + class MockExtractor: + def link(self, content, rel_to_follow): + self.link_called_with = (content, rel_to_follow) + return "http://next-page/" + + def availability_link_list(self, content): + self.availability_link_list_called_with = content + return ["an availability queue"] + + original_data = {"key": "value"} + for content in ( + original_data, + json.dumps(original_data), + json.dumps(original_data).encode("utf8"), + ): + extractor = MockExtractor() + fixture.api.queue_response(200, content=content) + result = fixture.api._get_book_list_page( + "http://first-page/", "some-rel", extractor + ) + + # A single request was made to the requested page. + (url, headers, body) = fixture.api.requests.pop() + assert len(fixture.api.requests) == 0 + assert url == "http://first-page/" + + # The extractor was used to extract a link to the page + # with rel="some-rel". + # + # Note that the Python data structure (`original_data`) is passed in, + # regardless of whether the mock response body is a Python + # data structure, a bytestring, or a Unicode string. + assert extractor.link_called_with == (original_data, "some-rel") + + # The data structure was also passed into the extractor's + # availability_link_list() method. + assert extractor.availability_link_list_called_with == original_data + + # The final result is a queue of availability data (from + # this page) and a link to the next page. + assert result == (["an availability queue"], "http://next-page/") + def test_external_integration(self, overdrive_api_fixture: OverdriveAPIFixture): assert ( overdrive_api_fixture.collection.external_integration @@ -2820,3 +3288,735 @@ def test_instantiate(self, overdrive_api_fixture: OverdriveAPIFixture): monitor = OverdriveCollectionReaper( db.session, overdrive_api_fixture.collection, api_class=MockOverdriveAPI ) + + +class TestOverdriveRepresentationExtractor: + def test_availability_info(self, overdrive_api_fixture: OverdriveAPIFixture): + data, raw = overdrive_api_fixture.sample_json("overdrive_book_list.json") + availability = OverdriveRepresentationExtractor.availability_link_list(raw) + # Every item in the list has a few important values. + for item in availability: + for key in "availability_link", "author_name", "id", "title", "date_added": + assert key in item + + # Also run a spot check on the actual values. + spot = availability[0] + assert "210bdcad-29b7-445f-8d05-cdbb40abc03a" == spot["id"] + assert "King and Maxwell" == spot["title"] + assert "David Baldacci" == spot["author_name"] + assert "2013-11-12T14:13:00-05:00" == spot["date_added"] + + def test_availability_info_missing_data( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + # overdrive_book_list_missing_data.json has two products. One + # only has a title, the other only has an ID. + data, raw = overdrive_api_fixture.sample_json( + "overdrive_book_list_missing_data.json" + ) + [item] = OverdriveRepresentationExtractor.availability_link_list(raw) + + # We got a data structure -- full of missing data -- for the + # item that has an ID. + assert "i only have an id" == item["id"] + assert None == item["title"] + assert None == item["author_name"] + assert None == item["date_added"] + + # We did not get a data structure for the item that only has a + # title, because an ID is required -- otherwise we don't know + # what book we're talking about. + + def test_link(self, overdrive_api_fixture: OverdriveAPIFixture): + data, raw = overdrive_api_fixture.sample_json("overdrive_book_list.json") + expect = OverdriveAPI.make_link_safe( + "http://api.overdrive.com/v1/collections/collection-id/products?limit=300&offset=0&lastupdatetime=2014-04-28%2009:25:09&sort=popularity:desc&formats=ebook-epub-open,ebook-epub-adobe,ebook-pdf-adobe,ebook-pdf-open" + ) + assert expect == OverdriveRepresentationExtractor.link(raw, "first") + + def test_book_info_to_circulation(self, overdrive_api_fixture: OverdriveAPIFixture): + # Tests that can convert an overdrive json block into a CirculationData object. + fixture = overdrive_api_fixture + session = overdrive_api_fixture.db.session + + raw, info = fixture.sample_json("overdrive_availability_information_2.json") + extractor = OverdriveRepresentationExtractor(fixture.api) + circulationdata = extractor.book_info_to_circulation(info) + + # NOTE: It's not realistic for licenses_available and + # patrons_in_hold_queue to both be nonzero; this is just to + # verify that the test picks up whatever data is in the + # document. + assert 3 == circulationdata.licenses_owned + assert 1 == circulationdata.licenses_available + assert 10 == circulationdata.patrons_in_hold_queue + + # Related IDs. + identifier = circulationdata.primary_identifier(session) + assert (Identifier.OVERDRIVE_ID, "2a005d55-a417-4053-b90d-7a38ca6d2065") == ( + identifier.type, + identifier.identifier, + ) + + def test_book_info_to_circulation_advantage( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + # Overdrive Advantage accounts (a.k.a. "child" or "sub" accounts derive + # different information from the same API responses as "main" Overdrive + # accounts. + fixture = overdrive_api_fixture + raw, info = fixture.sample_json("overdrive_availability_advantage.json") + + extractor = OverdriveRepresentationExtractor(fixture.api) + # Calling in the context of a main account should return a count of + # the main account and any shared sub account owned and available. + consortial_data = extractor.book_info_to_circulation(info) + assert 10 == consortial_data.licenses_owned + assert 10 == consortial_data.licenses_available + + class MockAPI: + # Pretend to be an API for an Overdrive Advantage collection with + # library ID 61. + advantage_library_id = 61 + + extractor = OverdriveRepresentationExtractor(MockAPI()) + advantage_data = extractor.book_info_to_circulation(info) + assert 1 == advantage_data.licenses_owned + assert 1 == advantage_data.licenses_available + + # Both collections have the same information about active + # holds, because that information is not split out by + # collection. + assert 0 == advantage_data.patrons_in_hold_queue + assert 0 == consortial_data.patrons_in_hold_queue + + # If for whatever reason Overdrive doesn't mention the + # relevant collection at all, no collection-specific + # information is gleaned. + # + # TODO: It would probably be better not to return a + # CirculationData object at all, but this shouldn't happen in + # a real scenario. + class MockAPI2: + # Pretend to be an API for an Overdrive Advantage collection with + # library ID 62. + advantage_library_id = 62 + + extractor = OverdriveRepresentationExtractor(MockAPI2()) + advantage_data = extractor.book_info_to_circulation(info) + assert 0 == advantage_data.licenses_owned + assert 0 == advantage_data.licenses_available + + class MockAPI3: + # Pretend to be an API for an Overdrive Advantage collection with + # library ID 63 which contains shared copies. + advantage_library_id = 63 + + extractor = OverdriveRepresentationExtractor(MockAPI3()) + advantage_data = extractor.book_info_to_circulation(info) + # since these copies are shared and counted as part of the main + # context we do not count them here. + assert 0 == advantage_data.licenses_owned + assert 0 == advantage_data.licenses_available + + def test_not_found_error_to_circulationdata( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + fixture = overdrive_api_fixture + transaction = fixture.db + raw, info = fixture.sample_json("overdrive_availability_not_found.json") + + # By default, a "NotFound" error can't be converted to a + # CirculationData object, because we don't know _which_ book it + # was that wasn't found. + extractor = OverdriveRepresentationExtractor(fixture.api) + m = extractor.book_info_to_circulation + assert None == m(info) + + # However, if an ID was added to `info` ahead of time (as the + # circulation code does), we do know, and we can create a + # CirculationData. + identifier = transaction.identifier(identifier_type=Identifier.OVERDRIVE_ID) + info["id"] = identifier.identifier + data = m(info) + assert identifier == data.primary_identifier(transaction.session) + assert 0 == data.licenses_owned + assert 0 == data.licenses_available + assert 0 == data.patrons_in_hold_queue + + def test_book_info_with_metadata(self, overdrive_api_fixture: OverdriveAPIFixture): + # Tests that can convert an overdrive json block into a Metadata object. + + raw, info = overdrive_api_fixture.sample_json("overdrive_metadata.json") + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + + assert "Agile Documentation" == metadata.title + assert ( + "Agile Documentation A Pattern Guide to Producing Lightweight Documents for Software Projects" + == metadata.sort_title + ) + assert ( + "A Pattern Guide to Producing Lightweight Documents for Software Projects" + == metadata.subtitle + ) + assert Edition.BOOK_MEDIUM == metadata.medium + assert "Wiley Software Patterns" == metadata.series + assert "eng" == metadata.language + assert "Wiley" == metadata.publisher + assert "John Wiley & Sons, Inc." == metadata.imprint + assert 2005 == metadata.published.year + assert 1 == metadata.published.month + assert 31 == metadata.published.day + + [author] = metadata.contributors + assert "Rüping, Andreas" == author.sort_name + assert "Andreas Rüping" == author.display_name + assert [Contributor.AUTHOR_ROLE] == author.roles + + subjects = sorted(metadata.subjects, key=lambda x: x.identifier) + + assert [ + ("Computer Technology", Subject.OVERDRIVE, 100), + ("Nonfiction", Subject.OVERDRIVE, 100), + ("Object Technologies - Miscellaneous", "tag", 1), + ] == [(x.identifier, x.type, x.weight) for x in subjects] + + # Related IDs. + assert (Identifier.OVERDRIVE_ID, "3896665d-9d81-4cac-bd43-ffc5066de1f5") == ( + metadata.primary_identifier.type, + metadata.primary_identifier.identifier, + ) + + ids = [(x.type, x.identifier) for x in metadata.identifiers] + + # The original data contains an actual ASIN and ISBN, plus a blank + # ASIN and three invalid ISBNs: one which is common placeholder + # text, one which is mis-typed and has a bad check digit, and one + # which has an invalid character; the bad identifiers do not show + # up here. + assert [ + (Identifier.ASIN, "B000VI88N2"), + (Identifier.ISBN, "9780470856246"), + (Identifier.OVERDRIVE_ID, "3896665d-9d81-4cac-bd43-ffc5066de1f5"), + ] == sorted(ids) + + # Available formats. + [kindle, pdf] = sorted( + metadata.circulation.formats, key=lambda x: x.content_type + ) + assert DeliveryMechanism.KINDLE_CONTENT_TYPE == kindle.content_type + assert DeliveryMechanism.KINDLE_DRM == kindle.drm_scheme + + assert Representation.PDF_MEDIA_TYPE == pdf.content_type + assert DeliveryMechanism.ADOBE_DRM == pdf.drm_scheme + + # Links to various resources. + shortd, image, longd = sorted(metadata.links, key=lambda x: x.rel) + + assert Hyperlink.DESCRIPTION == longd.rel + assert longd.content.startswith("

Software documentation") + + assert Hyperlink.SHORT_DESCRIPTION == shortd.rel + assert shortd.content.startswith("

Software documentation") + assert len(shortd.content) < len(longd.content) + + assert Hyperlink.IMAGE == image.rel + assert ( + "http://images.contentreserve.com/ImageType-100/0128-1/%7B3896665D-9D81-4CAC-BD43-FFC5066DE1F5%7DImg100.jpg" + == image.href + ) + + thumbnail = image.thumbnail + + assert Hyperlink.THUMBNAIL_IMAGE == thumbnail.rel + assert ( + "http://images.contentreserve.com/ImageType-200/0128-1/%7B3896665D-9D81-4CAC-BD43-FFC5066DE1F5%7DImg200.jpg" + == thumbnail.href + ) + + # Measurements associated with the book. + + measurements = metadata.measurements + popularity = [ + x for x in measurements if x.quantity_measured == Measurement.POPULARITY + ][0] + assert 2 == popularity.value + + rating = [x for x in measurements if x.quantity_measured == Measurement.RATING][ + 0 + ] + assert 1 == rating.value + + # Request only the bibliographic information. + metadata = OverdriveRepresentationExtractor.book_info_to_metadata( + info, include_bibliographic=True, include_formats=False + ) + + assert "Agile Documentation" == metadata.title + assert None == metadata.circulation + + # Request only the format information. + metadata = OverdriveRepresentationExtractor.book_info_to_metadata( + info, include_bibliographic=False, include_formats=True + ) + + assert None == metadata.title + + [kindle, pdf] = sorted( + metadata.circulation.formats, key=lambda x: x.content_type + ) + assert DeliveryMechanism.KINDLE_CONTENT_TYPE == kindle.content_type + assert DeliveryMechanism.KINDLE_DRM == kindle.drm_scheme + + assert Representation.PDF_MEDIA_TYPE == pdf.content_type + assert DeliveryMechanism.ADOBE_DRM == pdf.drm_scheme + + def test_audiobook_info(self, overdrive_api_fixture: OverdriveAPIFixture): + # This book will be available in three formats: a link to the + # Overdrive Read website, a manifest file that SimplyE can + # download, and the legacy format used by the mobile app + # called 'Overdrive'. + raw, info = overdrive_api_fixture.sample_json("audiobook.json") + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + streaming, manifest, legacy = sorted( + metadata.circulation.formats, key=lambda x: x.content_type + ) + assert DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE == streaming.content_type + assert ( + MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE == manifest.content_type + ) + assert "application/x-od-media" == legacy.content_type + + def test_book_info_with_sample(self, overdrive_api_fixture: OverdriveAPIFixture): + # This book has two samples; one available as a direct download and + # one available through a manifest file. + raw, info = overdrive_api_fixture.sample_json("has_sample.json") + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + samples = [x for x in metadata.links if x.rel == Hyperlink.SAMPLE] + epub_sample, manifest_sample = sorted(samples, key=lambda x: x.media_type) + + # Here's the direct download. + assert ( + "http://excerpts.contentreserve.com/FormatType-410/1071-1/9BD/24F/82/BridesofConvenienceBundle9781426803697.epub" + == epub_sample.href + ) + assert MediaTypes.EPUB_MEDIA_TYPE == epub_sample.media_type + + # Here's the manifest. + assert ( + "https://samples.overdrive.com/?crid=9BD24F82-35C0-4E0A-B5E7-BCFED07835CF&.epub-sample.overdrive.com" + == manifest_sample.href + ) + # Assert we have the end content type of the sample, no DRM formats + assert "text/html" == manifest_sample.media_type + + def test_book_info_with_unknown_sample( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + raw, info = overdrive_api_fixture.sample_json("has_sample.json") + + # Just use one format, and change a sample type to unknown + # Only one (known sample) should be extracted then + info["formats"] = [info["formats"][1]] + info["formats"][0]["samples"][1]["formatType"] = "overdrive-unknown" + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + samples = [x for x in metadata.links if x.rel == Hyperlink.SAMPLE] + + assert 1 == len(samples) + assert samples[0].media_type == MediaTypes.EPUB_MEDIA_TYPE + + def test_book_info_with_grade_levels( + self, overdrive_api_fixture: OverdriveAPIFixture + ): + raw, info = overdrive_api_fixture.sample_json("has_grade_levels.json") + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + + grade_levels = sorted( + x.identifier for x in metadata.subjects if x.type == Subject.GRADE_LEVEL + ) + assert ["Grade 4", "Grade 5", "Grade 6", "Grade 7", "Grade 8"] == grade_levels + + def test_book_info_with_awards(self, overdrive_api_fixture: OverdriveAPIFixture): + raw, info = overdrive_api_fixture.sample_json("has_awards.json") + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + + [awards] = [ + x + for x in metadata.measurements + if Measurement.AWARDS == x.quantity_measured + ] + assert 1 == awards.value + assert 1 == awards.weight + + def test_image_link_to_linkdata(self): + def m(link): + return OverdriveRepresentationExtractor.image_link_to_linkdata(link, "rel") + + # Test missing data. + assert None == m(None) + assert None == m(dict()) + + # Test an ordinary success case. + url = "http://images.overdrive.com/image.png" + type = "image/type" + data = m(dict(href=url, type=type)) + assert isinstance(data, LinkData) + assert url == data.href + assert type == data.media_type + + # Test a case where no media type is provided. + data = m(dict(href=url)) + assert None == data.media_type + + # Verify that invalid URLs are made link-safe. + data = m(dict(href="http://api.overdrive.com/v1/foo:bar")) + assert "http://api.overdrive.com/v1/foo%3Abar" == data.href + + # Stand-in cover images are detected and filtered out. + data = m( + dict( + href="https://img1.od-cdn.com/ImageType-100/0293-1/{00000000-0000-0000-0000-000000000002}Img100.jpg" + ) + ) + assert None == data + + def test_internal_formats(self): + # Overdrive's internal format names may correspond to one or more + # delivery mechanisms. + def assert_formats(overdrive_name, *expect): + actual = OverdriveRepresentationExtractor.internal_formats(overdrive_name) + assert list(expect) == list(actual) + + # Most formats correspond to one delivery mechanism. + assert_formats( + "ebook-pdf-adobe", (MediaTypes.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM) + ) + + assert_formats( + "ebook-epub-open", (MediaTypes.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM) + ) + + # ebook-overdrive and audiobook-overdrive each correspond to + # two delivery mechanisms. + assert_formats( + "ebook-overdrive", + ( + MediaTypes.OVERDRIVE_EBOOK_MANIFEST_MEDIA_TYPE, + DeliveryMechanism.LIBBY_DRM, + ), + ( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, + DeliveryMechanism.STREAMING_DRM, + ), + ) + + assert_formats( + "audiobook-overdrive", + ( + MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE, + DeliveryMechanism.LIBBY_DRM, + ), + ( + DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE, + DeliveryMechanism.STREAMING_DRM, + ), + ) + + # An unrecognized format does not correspond to any delivery + # mechanisms. + assert_formats("no-such-format") + + +class TestOverdriveAdvantageAccount: + def test_no_advantage_accounts(self, overdrive_api_fixture: OverdriveAPIFixture): + """When there are no Advantage accounts, get_advantage_accounts() + returns an empty list. + """ + fixture = overdrive_api_fixture + fixture.api.queue_collection_token() + assert [] == fixture.api.get_advantage_accounts() + + def test_from_representation(self, overdrive_api_fixture: OverdriveAPIFixture): + """Test the creation of OverdriveAdvantageAccount objects + from Overdrive's representation of a list of accounts. + """ + fixture = overdrive_api_fixture + raw, data = fixture.sample_json("advantage_accounts.json") + [ac1, ac2] = OverdriveAdvantageAccount.from_representation(raw) + + # The two Advantage accounts have the same parent library ID. + assert "1225" == ac1.parent_library_id + assert "1225" == ac2.parent_library_id + + # But they have different names and library IDs. + assert "3" == ac1.library_id + assert "The Other Side of Town Library" == ac1.name + + assert "9" == ac2.library_id + assert "The Common Community Library" == ac2.name + + def test_to_collection(self, overdrive_api_fixture: OverdriveAPIFixture): + # Test that we can turn an OverdriveAdvantageAccount object into + # a Collection object. + fixture = overdrive_api_fixture + transaction, session = ( + fixture.db, + fixture.db.session, + ) + + account = OverdriveAdvantageAccount( + "parent_id", + "child_id", + "Library Name", + "token value", + ) + + # We can't just create a Collection object for this object because + # the parent doesn't exist. + with pytest.raises(ValueError) as excinfo: + account.to_collection(session) + assert "Cannot create a Collection whose parent does not already exist." in str( + excinfo.value + ) + + # So, create a Collection to be the parent. + parent = transaction.collection( + name="Parent", + protocol=ExternalIntegration.OVERDRIVE, + external_account_id="parent_id", + ) + + # Now it works. + p, collection = account.to_collection(session) + assert p == parent + assert parent == collection.parent + assert collection.external_account_id == account.library_id + assert ExternalIntegration.LICENSE_GOAL == collection.external_integration.goal + assert ExternalIntegration.OVERDRIVE == collection.protocol + assert Goals.LICENSE_GOAL == collection.integration_configuration.goal + assert ExternalIntegration.OVERDRIVE == collection.protocol + + # To ensure uniqueness, the collection was named after its + # parent. + assert f"{parent.name} / {account.name}" == collection.name + + +class OverdriveBibliographicCoverageProviderFixture: + overdrive: OverdriveAPIFixture + provider: OverdriveBibliographicCoverageProvider + api: MockOverdriveAPI + + +@pytest.fixture +def overdrive_biblio_provider_fixture( + overdrive_api_fixture: OverdriveAPIFixture, +) -> OverdriveBibliographicCoverageProviderFixture: + fix = OverdriveBibliographicCoverageProviderFixture() + fix.overdrive = overdrive_api_fixture + fix.provider = OverdriveBibliographicCoverageProvider( + overdrive_api_fixture.collection, api_class=MockOverdriveAPI + ) + fix.api = fix.provider.api + return fix + + +class TestOverdriveBibliographicCoverageProvider: + """Test the code that looks up bibliographic information from Overdrive.""" + + def test_script_instantiation( + self, + overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, + ): + """Test that RunCoverageProviderScript can instantiate + the coverage provider. + """ + + fixture = overdrive_biblio_provider_fixture + db = fixture.overdrive.db + + script = RunCollectionCoverageProviderScript( + OverdriveBibliographicCoverageProvider, + db.session, + api_class=MockOverdriveAPI, + ) + [provider] = script.providers + assert isinstance(provider, OverdriveBibliographicCoverageProvider) + assert isinstance(provider.api, MockOverdriveAPI) + assert fixture.overdrive.collection == provider.collection + + def test_invalid_or_unrecognized_guid( + self, + overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, + ): + """A bad or malformed GUID can't get coverage.""" + fixture = overdrive_biblio_provider_fixture + db = fixture.overdrive.db + + identifier = db.identifier() + identifier.identifier = "bad guid" + + error = '{"errorCode": "InvalidGuid", "message": "An invalid guid was given.", "token": "7aebce0e-2e88-41b3-b6d3-82bf15f8e1a2"}' + fixture.api.queue_response(200, content=error) + + failure = fixture.provider.process_item(identifier) + assert isinstance(failure, CoverageFailure) + assert False == failure.transient + assert "Invalid Overdrive ID: bad guid" == failure.exception + + # This is for when the GUID is well-formed but doesn't + # correspond to any real Overdrive book. + error = '{"errorCode": "NotFound", "message": "Not found in Overdrive collection.", "token": "7aebce0e-2e88-41b3-b6d3-82bf15f8e1a2"}' + fixture.api.queue_response(200, content=error) + + failure = fixture.provider.process_item(identifier) + assert isinstance(failure, CoverageFailure) + assert False == failure.transient + assert "ID not recognized by Overdrive: bad guid" == failure.exception + + def test_process_item_creates_presentation_ready_work( + self, + overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, + ): + """Test the normal workflow where we ask Overdrive for data, + Overdrive provides it, and we create a presentation-ready work. + """ + fixture = overdrive_biblio_provider_fixture + db = fixture.overdrive.db + + # Here's the book mentioned in overdrive_metadata.json. + identifier = db.identifier(identifier_type=Identifier.OVERDRIVE_ID) + identifier.identifier = "3896665d-9d81-4cac-bd43-ffc5066de1f5" + + # This book has no LicensePool. + assert [] == identifier.licensed_through + + # Run it through the OverdriveBibliographicCoverageProvider + raw, info = fixture.overdrive.sample_json("overdrive_metadata.json") + fixture.api.queue_response(200, content=raw) + + [result] = fixture.provider.process_batch([identifier]) + assert identifier == result + + # A LicensePool was created, not because we know anything + # about how we've licensed this book, but to have a place to + # store the information about what formats the book is + # available in. + [pool] = identifier.licensed_through + assert 0 == pool.licenses_owned + [lpdm1, lpdm2] = pool.delivery_mechanisms + names = [x.delivery_mechanism.name for x in pool.delivery_mechanisms] + assert sorted( + [ + "application/pdf (application/vnd.adobe.adept+xml)", + "Kindle via Amazon (Kindle DRM)", + ] + ) == sorted(names) + + # A Work was created and made presentation ready. + assert "Agile Documentation" == pool.work.title + assert True == pool.work.presentation_ready + + +class TestGenerateOverdriveAdvantageAccountList: + def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture): + output_file_path = "test-output.csv" + circ_manager_name = "circ_man_name" + parent_library_name = "Parent" + parent_od_library_id = "parent_id" + child1_library_name = "child1" + child1_advantage_library_id = "1" + child1_token = "token1" + child2_library_name = "child2" + child2_advantage_library_id = "2" + child2_token = "token2" + client_key = "ck" + client_secret = "cs" + library_token = "lt" + + parent: Collection = db.collection( + name=parent_library_name, + protocol=ExternalIntegration.OVERDRIVE, + external_account_id=parent_od_library_id, + ) + child1: Collection = db.collection( + name=child1_library_name, + protocol=ExternalIntegration.OVERDRIVE, + external_account_id=child1_advantage_library_id, + ) + child1.parent = parent + overdrive_api = MagicMock() + overdrive_api.get_advantage_accounts.return_value = [ + OverdriveAdvantageAccount( + parent_od_library_id, + child1_advantage_library_id, + child1_library_name, + child1_token, + ), + OverdriveAdvantageAccount( + parent_od_library_id, + child2_advantage_library_id, + child2_library_name, + child2_token, + ), + ] + + overdrive_api.client_key.return_value = bytes(client_key, "utf-8") + overdrive_api.client_secret.return_value = bytes(client_secret, "utf-8") + type(overdrive_api).collection_token = PropertyMock(return_value=library_token) + + with patch( + "api.overdrive.GenerateOverdriveAdvantageAccountList._create_overdrive_api" + ) as create_od_api: + create_od_api.return_value = overdrive_api + GenerateOverdriveAdvantageAccountList(db.session).do_run( + cmd_args=[ + "--output-file-path", + output_file_path, + "--circulation-manager-name", + circ_manager_name, + ] + ) + + with open(output_file_path, newline="") as csv_file: + csvreader = csv.reader(csv_file) + for index, row in enumerate(csvreader): + if index == 0: + assert "cm" == row[0] + assert "collection" == row[1] + assert "overdrive_library_id" == row[2] + assert "client_key" == row[3] + assert "client_secret" == row[4] + assert "library_token" == row[5] + assert "advantage_name" == row[6] + assert "advantage_id" == row[7] + assert "advantage_token" == row[8] + assert "already_configured" == row[9] + elif index == 1: + assert circ_manager_name == row[0] + assert parent_library_name == row[1] + assert parent_od_library_id == row[2] + assert client_key == row[3] + assert client_secret == row[4] + assert library_token == row[5] + assert child1_library_name == row[6] + assert child1_advantage_library_id == row[7] + assert child1_token == row[8] + assert "True" == row[9] + else: + assert circ_manager_name == row[0] + assert parent_library_name == row[1] + assert parent_od_library_id == row[2] + assert client_key == row[3] + assert client_secret == row[4] + assert library_token == row[5] + assert child2_library_name == row[6] + assert child2_advantage_library_id == row[7] + assert child2_token == row[8] + assert "False" == row[9] + last_index = index + + os.remove(output_file_path) + assert last_index == 2 + overdrive_api.client_key.assert_called_once() + overdrive_api.client_secret.assert_called_once() + overdrive_api.get_advantage_accounts.assert_called_once() diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 9ea0933198..15b69bb34a 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -5,7 +5,6 @@ "tests.fixtures.library", "tests.fixtures.opds2_files", "tests.fixtures.opds_files", - "tests.fixtures.overdrive", "tests.fixtures.s3", "tests.fixtures.sample_covers", "tests.fixtures.search", diff --git a/tests/core/files/overdrive/overdrive_availability_not_found.json b/tests/core/files/overdrive/overdrive_availability_not_found.json deleted file mode 100644 index 99a61e65f1..0000000000 --- a/tests/core/files/overdrive/overdrive_availability_not_found.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "errorCode": "NotFound", - "message": "The requested resource could not be found.", - "token": "60a18218-0d25-42b8-80c3-0bf9df782f1b" -} diff --git a/tests/core/test_overdrive.py b/tests/core/test_overdrive.py deleted file mode 100644 index eb1cc1e5cc..0000000000 --- a/tests/core/test_overdrive.py +++ /dev/null @@ -1,1127 +0,0 @@ -import json -import logging - -import pytest - -from core.config import CannotLoadConfiguration -from core.coverage import CoverageFailure -from core.integration.goals import Goals -from core.metadata_layer import LinkData -from core.model import ( - Contributor, - DeliveryMechanism, - Edition, - ExternalIntegration, - Hyperlink, - Identifier, - Measurement, - MediaTypes, - Representation, - Subject, -) -from core.overdrive import ( - OverdriveAdvantageAccount, - OverdriveBibliographicCoverageProvider, - OverdriveConstants, - OverdriveCoreAPI, - OverdriveRepresentationExtractor, -) -from core.scripts import RunCollectionCoverageProviderScript -from core.util.http import BadResponseException -from core.util.string_helpers import base64 -from tests.api.mockapi.overdrive import MockOverdriveCoreAPI -from tests.core.mock import MockRequestsResponse -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.overdrive import OverdriveFixture, OverdriveWithAPIFixture - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() - - -class TestOverdriveCoreAPI: - def test_errors_not_retried( - self, - overdrive_with_api_fixture: OverdriveWithAPIFixture, - mock_web_server: MockAPIServer, - ): - session = overdrive_with_api_fixture.overdrive.transaction.session - library = overdrive_with_api_fixture.overdrive.transaction.default_library() - collection = MockOverdriveCoreAPI.mock_collection(session, library) - - # Enqueue a response for the request that the server will make for a token. - _r = MockAPIServerResponse() - _r.status_code = 200 - _r.set_content( - b"""{ - "access_token": "x", - "expires_in": 23 - } - """ - ) - mock_web_server.enqueue_response("POST", "/oauth/token", _r) - - api = OverdriveCoreAPI(session, collection) - api._hosts["oauth_host"] = mock_web_server.url("/oauth") - - # Try a get() call for each error code - for code in [404]: - _r = MockAPIServerResponse() - _r.status_code = code - mock_web_server.enqueue_response("GET", "/a/b/c", _r) - _status, _, _ = api.get(mock_web_server.url("/a/b/c")) - assert _status == code - - for code in [400, 403, 500, 501, 502, 503]: - _r = MockAPIServerResponse() - _r.status_code = code - - # The default is to retry 5 times, so enqueue 5 responses. - for i in range(0, 6): - mock_web_server.enqueue_response("GET", "/a/b/c", _r) - try: - api.get(mock_web_server.url("/a/b/c")) - except BadResponseException: - pass - - # Exactly one request was made for each error code, plus one for a token - assert len(mock_web_server.requests()) == 8 - - def test_constructor_makes_no_requests( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - session = overdrive_with_api_fixture.overdrive.transaction.session - library = overdrive_with_api_fixture.overdrive.transaction.default_library() - # Invoking the OverdriveCoreAPI constructor does not, by itself, - # make any HTTP requests. - collection = MockOverdriveCoreAPI.mock_collection(session, library) - - class NoRequests(OverdriveCoreAPI): - MSG = "This is a unit test, you can't make HTTP requests!" - - def no_requests(self, *args, **kwargs): - raise Exception(self.MSG) - - _do_get = no_requests - _do_post = no_requests - _make_request = no_requests - - api = NoRequests(session, collection) - - # Attempting to access .token or .collection_token _will_ - # try to make an HTTP request. - for field in "token", "collection_token": - with pytest.raises(Exception) as excinfo: - getattr(api, field) - assert api.MSG in str(excinfo.value) - - def test_ils_name(self, overdrive_with_api_fixture: OverdriveWithAPIFixture): - fixture = overdrive_with_api_fixture - transaction = fixture.overdrive.transaction - - """The 'ils_name' setting (defined in - MockOverdriveCoreAPI.mock_collection) is available through - OverdriveCoreAPI.ils_name(). - """ - assert "e" == fixture.api.ils_name(transaction.default_library()) - - # The value must be explicitly set for a given library, or - # else the default will be used. - l2 = transaction.library() - assert "default" == fixture.api.ils_name(l2) - - def test_make_link_safe(self): - # Unsafe characters are escaped. - assert "http://foo.com?q=%2B%3A%7B%7D" == OverdriveCoreAPI.make_link_safe( - "http://foo.com?q=+:{}" - ) - - # Links to version 1 of the availability API are converted - # to links to version 2. - v1 = "https://qa.api.overdrive.com/v1/collections/abcde/products/12345/availability" - v2 = "https://qa.api.overdrive.com/v2/collections/abcde/products/12345/availability" - assert v2 == OverdriveCoreAPI.make_link_safe(v1) - - # We also handle the case of a trailing slash, just in case Overdrive - # starts serving links with trailing slashes. - v1 = v1 + "/" - v2 = v2 + "/" - assert v2 == OverdriveCoreAPI.make_link_safe(v1) - - # Links to other endpoints are not converted - leave_alone = "https://qa.api.overdrive.com/v1/collections/abcde/products/12345" - assert leave_alone == OverdriveCoreAPI.make_link_safe(leave_alone) - - def test_hosts(self, overdrive_with_api_fixture: OverdriveWithAPIFixture): - fixture = overdrive_with_api_fixture - session = fixture.overdrive.transaction.session - c = OverdriveCoreAPI - - # By default, OverdriveCoreAPI is initialized with the production - # set of hostnames. - assert fixture.api.hosts() == c.HOSTS[OverdriveConstants.PRODUCTION_SERVERS] - - # You can instead initialize it to use the testing set of - # hostnames. - def api_with_setting(x): - config = fixture.overdrive.collection.integration_configuration - DatabaseTransactionFixture.set_settings(config, overdrive_server_nickname=x) - return c(session, fixture.overdrive.collection) - - testing = api_with_setting(OverdriveConstants.TESTING_SERVERS) - assert testing.hosts() == c.HOSTS[OverdriveConstants.TESTING_SERVERS] - - # If the setting doesn't make sense, we default to production - # hostnames. - bad = api_with_setting("nonsensical") - assert bad.hosts() == c.HOSTS[OverdriveConstants.PRODUCTION_SERVERS] - - def test_endpoint(self, overdrive_with_api_fixture: OverdriveWithAPIFixture): - fixture = overdrive_with_api_fixture - - # The .endpoint() method performs string interpolation, including - # the names of servers. - template = ( - "%(host)s %(patron_host)s %(oauth_host)s %(oauth_patron_host)s %(extra)s" - ) - result = fixture.api.endpoint(template, extra="val") - - # The host names and the 'extra' argument have been used to - # fill in the string interpolations. - expect_args = dict(fixture.api.hosts()) - expect_args["extra"] = "val" - assert result == template % expect_args - - # The string has been completely interpolated. - assert "%" not in result - - # Once interpolation has happened, doing it again has no effect. - assert result == fixture.api.endpoint(result, extra="something else") - - # This is important because an interpolated URL may superficially - # appear to contain extra formatting characters. - assert result + "%3A" == fixture.api.endpoint( - result + "%3A", extra="something else" - ) - - def test_token_authorization_header( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - # Verify that the Authorization header needed to get an access - # token for a given collection is encoded properly. - assert fixture.api.token_authorization_header == "Basic YTpi" - assert ( - fixture.api.token_authorization_header - == "Basic " - + base64.standard_b64encode( - b"%s:%s" % (fixture.api.client_key(), fixture.api.client_secret()) - ) - ) - - def test_token_post_success( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - transaction = fixture.overdrive.transaction - - fixture.api.queue_response(200, content="some content") - response = fixture.api.token_post(transaction.fresh_url(), "the payload") - assert 200 == response.status_code - assert fixture.api.access_token_response.content == response.content - - def test_get_success(self, overdrive_with_api_fixture: OverdriveWithAPIFixture): - fixture = overdrive_with_api_fixture - transaction = fixture.overdrive.transaction - - fixture.api.queue_response(200, content="some content") - status_code, headers, content = fixture.api.get(transaction.fresh_url(), {}) - assert 200 == status_code - assert b"some content" == content - - def test_failure_to_get_library_is_fatal( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - fixture.api.queue_response(500) - with pytest.raises(BadResponseException) as excinfo: - fixture.api.get_library() - assert "Got status code 500" in str(excinfo.value) - - def test_error_getting_library( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - session = fixture.overdrive.transaction.session - - class MisconfiguredOverdriveCoreAPI(MockOverdriveCoreAPI): - """This Overdrive client has valid credentials but the library - can't be found -- probably because the library ID is wrong.""" - - def get_library(self): - return { - "errorCode": "Some error", - "message": "Some message.", - "token": "abc-def-ghi", - } - - # Just instantiating the API doesn't cause this error. - api = MisconfiguredOverdriveCoreAPI(session, fixture.overdrive.collection) - - # But trying to access the collection token will cause it. - with pytest.raises(CannotLoadConfiguration) as excinfo: - api.collection_token() - assert ( - "Overdrive credentials are valid but could not fetch library: Some message." - in str(excinfo.value) - ) - - def test_401_on_get_refreshes_bearer_token( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - transaction = fixture.overdrive.transaction - - # We have a token. - assert "bearer token" == fixture.api.token - - # But then we try to GET, and receive a 401. - fixture.api.queue_response(401) - - # We refresh the bearer token. (This happens in - # MockOverdriveCoreAPI.token_post, so we don't mock the response - # in the normal way.) - fixture.api.access_token_response = fixture.api.mock_access_token_response( - "new bearer token" - ) - - # Then we retry the GET and it succeeds this time. - fixture.api.queue_response(200, content="at last, the content") - - status_code, headers, content = fixture.api.get(transaction.fresh_url(), {}) - - assert 200 == status_code - assert b"at last, the content" == content - - # The bearer token has been updated. - assert "new bearer token" == fixture.api.token - - def test_credential_refresh_success( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - """Verify the process of refreshing the Overdrive bearer token.""" - # Perform the initial credential check. - fixture.api.check_creds() - credential = fixture.api.credential_object(lambda x: x) - assert "bearer token" == credential.credential - assert fixture.api.token == credential.credential - - fixture.api.access_token_response = fixture.api.mock_access_token_response( - "new bearer token" - ) - - # Refresh the credentials and the token will change to - # the mocked value. - fixture.api.refresh_creds(credential) - assert "new bearer token" == credential.credential - assert fixture.api.token == credential.credential - - def test_401_after_token_refresh_raises_error( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - assert "bearer token" == fixture.api.token - - # We try to GET and receive a 401. - fixture.api.queue_response(401) - - # We refresh the bearer token. - fixture.api.access_token_response = fixture.api.mock_access_token_response( - "new bearer token" - ) - - # Then we retry the GET but we get another 401. - fixture.api.queue_response(401) - - credential = fixture.api.credential_object(lambda x: x) - fixture.api.refresh_creds(credential) - - # That raises a BadResponseException - with pytest.raises(BadResponseException) as excinfo: - fixture.api.get_library() - assert "Bad response from" in str(excinfo.value) - assert "Something's wrong with the Overdrive OAuth Bearer Token!" in str( - excinfo.value - ) - - def test_401_during_refresh_raises_error( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - """If we fail to refresh the OAuth bearer token, an exception is - raised. - """ - fixture.api.access_token_response = MockRequestsResponse(401, {}, "") - with pytest.raises(BadResponseException) as excinfo: - fixture.api.refresh_creds(None) - assert "Got status code 401" in str(excinfo.value) - assert "can only continue on: 200." in str(excinfo.value) - - def test_advantage_differences( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - transaction = overdrive_with_api_fixture.overdrive.transaction - session = transaction.session - - # Test the differences between Advantage collections and - # regular Overdrive collections. - - # Here's a regular Overdrive collection. - main = transaction.collection( - protocol=ExternalIntegration.OVERDRIVE, - external_account_id="1", - ) - DatabaseTransactionFixture.set_settings( - main.integration_configuration, "overdrive_client_key", "user" - ) - DatabaseTransactionFixture.set_settings( - main.integration_configuration, "overdrive_client_secret", "password" - ) - DatabaseTransactionFixture.set_settings( - main.integration_configuration, "overdrive_website_id", "100" - ) - DatabaseTransactionFixture.set_settings( - main.integration_configuration, "ils_name", "default" - ) - - # Here's an Overdrive API client for that collection. - overdrive_main = MockOverdriveCoreAPI(session, main) - - # Note the "library" endpoint. - assert ( - "https://api.overdrive.com/v1/libraries/1" - == overdrive_main._library_endpoint - ) - - # The advantage_library_id of a non-Advantage Overdrive account - # is always -1. - assert "1" == overdrive_main.library_id() - assert -1 == overdrive_main.advantage_library_id - - # Here's an Overdrive Advantage collection associated with the - # main Overdrive collection. - child = transaction.collection( - protocol=ExternalIntegration.OVERDRIVE, - external_account_id="2", - ) - child.parent = main - overdrive_child = MockOverdriveCoreAPI(session, child) - - # In URL-space, the "library" endpoint for the Advantage - # collection is beneath the the parent collection's "library" - # endpoint. - assert ( - "https://api.overdrive.com/v1/libraries/1/advantageAccounts/2" - == overdrive_child._library_endpoint - ) - - # The advantage_library_id of an Advantage collection is the - # numeric value of its external_account_id. - assert "2" == overdrive_child.library_id() - assert 2 == overdrive_child.advantage_library_id - - def test__get_book_list_page( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - fixture = overdrive_with_api_fixture - - # Test the internal method that retrieves a list of books and - # preprocesses it. - - class MockExtractor: - def link(self, content, rel_to_follow): - self.link_called_with = (content, rel_to_follow) - return "http://next-page/" - - def availability_link_list(self, content): - self.availability_link_list_called_with = content - return ["an availability queue"] - - original_data = {"key": "value"} - for content in ( - original_data, - json.dumps(original_data), - json.dumps(original_data).encode("utf8"), - ): - extractor = MockExtractor() - fixture.api.queue_response(200, content=content) - result = fixture.api._get_book_list_page( - "http://first-page/", "some-rel", extractor - ) - - # A single request was made to the requested page. - (url, headers, body) = fixture.api.requests.pop() - assert len(fixture.api.requests) == 0 - assert url == "http://first-page/" - - # The extractor was used to extract a link to the page - # with rel="some-rel". - # - # Note that the Python data structure (`original_data`) is passed in, - # regardless of whether the mock response body is a Python - # data structure, a bytestring, or a Unicode string. - assert extractor.link_called_with == (original_data, "some-rel") - - # The data structure was also passed into the extractor's - # availability_link_list() method. - assert extractor.availability_link_list_called_with == original_data - - # The final result is a queue of availability data (from - # this page) and a link to the next page. - assert result == (["an availability queue"], "http://next-page/") - - -class TestOverdriveRepresentationExtractor: - def test_availability_info(self, overdrive_fixture: OverdriveFixture): - data, raw = overdrive_fixture.sample_json("overdrive_book_list.json") - availability = OverdriveRepresentationExtractor.availability_link_list(raw) - # Every item in the list has a few important values. - for item in availability: - for key in "availability_link", "author_name", "id", "title", "date_added": - assert key in item - - # Also run a spot check on the actual values. - spot = availability[0] - assert "210bdcad-29b7-445f-8d05-cdbb40abc03a" == spot["id"] - assert "King and Maxwell" == spot["title"] - assert "David Baldacci" == spot["author_name"] - assert "2013-11-12T14:13:00-05:00" == spot["date_added"] - - def test_availability_info_missing_data(self, overdrive_fixture: OverdriveFixture): - # overdrive_book_list_missing_data.json has two products. One - # only has a title, the other only has an ID. - data, raw = overdrive_fixture.sample_json( - "overdrive_book_list_missing_data.json" - ) - [item] = OverdriveRepresentationExtractor.availability_link_list(raw) - - # We got a data structure -- full of missing data -- for the - # item that has an ID. - assert "i only have an id" == item["id"] - assert None == item["title"] - assert None == item["author_name"] - assert None == item["date_added"] - - # We did not get a data structure for the item that only has a - # title, because an ID is required -- otherwise we don't know - # what book we're talking about. - - def test_link(self, overdrive_fixture: OverdriveFixture): - data, raw = overdrive_fixture.sample_json("overdrive_book_list.json") - expect = OverdriveCoreAPI.make_link_safe( - "http://api.overdrive.com/v1/collections/collection-id/products?limit=300&offset=0&lastupdatetime=2014-04-28%2009:25:09&sort=popularity:desc&formats=ebook-epub-open,ebook-epub-adobe,ebook-pdf-adobe,ebook-pdf-open" - ) - assert expect == OverdriveRepresentationExtractor.link(raw, "first") - - def test_book_info_to_circulation( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - # Tests that can convert an overdrive json block into a CirculationData object. - overdrive = overdrive_with_api_fixture.overdrive - session = overdrive.transaction.session - - raw, info = overdrive.sample_json("overdrive_availability_information.json") - extractor = OverdriveRepresentationExtractor(overdrive_with_api_fixture.api) - circulationdata = extractor.book_info_to_circulation(info) - - # NOTE: It's not realistic for licenses_available and - # patrons_in_hold_queue to both be nonzero; this is just to - # verify that the test picks up whatever data is in the - # document. - assert 3 == circulationdata.licenses_owned - assert 1 == circulationdata.licenses_available - assert 10 == circulationdata.patrons_in_hold_queue - - # Related IDs. - identifier = circulationdata.primary_identifier(session) - assert (Identifier.OVERDRIVE_ID, "2a005d55-a417-4053-b90d-7a38ca6d2065") == ( - identifier.type, - identifier.identifier, - ) - - def test_book_info_to_circulation_advantage( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - # Overdrive Advantage accounts (a.k.a. "child" or "sub" accounts derive - # different information from the same API responses as "main" Overdrive - # accounts. - overdrive = overdrive_with_api_fixture.overdrive - raw, info = overdrive.sample_json("overdrive_availability_advantage.json") - - extractor = OverdriveRepresentationExtractor(overdrive_with_api_fixture.api) - # Calling in the context of a main account should return a count of - # the main account and any shared sub account owned and available. - consortial_data = extractor.book_info_to_circulation(info) - assert 10 == consortial_data.licenses_owned - assert 10 == consortial_data.licenses_available - - class MockAPI: - # Pretend to be an API for an Overdrive Advantage collection with - # library ID 61. - advantage_library_id = 61 - - extractor = OverdriveRepresentationExtractor(MockAPI()) - advantage_data = extractor.book_info_to_circulation(info) - assert 1 == advantage_data.licenses_owned - assert 1 == advantage_data.licenses_available - - # Both collections have the same information about active - # holds, because that information is not split out by - # collection. - assert 0 == advantage_data.patrons_in_hold_queue - assert 0 == consortial_data.patrons_in_hold_queue - - # If for whatever reason Overdrive doesn't mention the - # relevant collection at all, no collection-specific - # information is gleaned. - # - # TODO: It would probably be better not to return a - # CirculationData object at all, but this shouldn't happen in - # a real scenario. - class MockAPI2: - # Pretend to be an API for an Overdrive Advantage collection with - # library ID 62. - advantage_library_id = 62 - - extractor = OverdriveRepresentationExtractor(MockAPI2()) - advantage_data = extractor.book_info_to_circulation(info) - assert 0 == advantage_data.licenses_owned - assert 0 == advantage_data.licenses_available - - class MockAPI3: - # Pretend to be an API for an Overdrive Advantage collection with - # library ID 63 which contains shared copies. - advantage_library_id = 63 - - extractor = OverdriveRepresentationExtractor(MockAPI3()) - advantage_data = extractor.book_info_to_circulation(info) - # since these copies are shared and counted as part of the main - # context we do not count them here. - assert 0 == advantage_data.licenses_owned - assert 0 == advantage_data.licenses_available - - def test_not_found_error_to_circulationdata( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - overdrive = overdrive_with_api_fixture.overdrive - transaction = overdrive.transaction - raw, info = overdrive.sample_json("overdrive_availability_not_found.json") - - # By default, a "NotFound" error can't be converted to a - # CirculationData object, because we don't know _which_ book it - # was that wasn't found. - extractor = OverdriveRepresentationExtractor(overdrive_with_api_fixture.api) - m = extractor.book_info_to_circulation - assert None == m(info) - - # However, if an ID was added to `info` ahead of time (as the - # circulation code does), we do know, and we can create a - # CirculationData. - identifier = transaction.identifier(identifier_type=Identifier.OVERDRIVE_ID) - info["id"] = identifier.identifier - data = m(info) - assert identifier == data.primary_identifier(transaction.session) - assert 0 == data.licenses_owned - assert 0 == data.licenses_available - assert 0 == data.patrons_in_hold_queue - - def test_book_info_with_metadata(self, overdrive_fixture: OverdriveFixture): - # Tests that can convert an overdrive json block into a Metadata object. - - raw, info = overdrive_fixture.sample_json("overdrive_metadata.json") - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - - assert "Agile Documentation" == metadata.title - assert ( - "Agile Documentation A Pattern Guide to Producing Lightweight Documents for Software Projects" - == metadata.sort_title - ) - assert ( - "A Pattern Guide to Producing Lightweight Documents for Software Projects" - == metadata.subtitle - ) - assert Edition.BOOK_MEDIUM == metadata.medium - assert "Wiley Software Patterns" == metadata.series - assert "eng" == metadata.language - assert "Wiley" == metadata.publisher - assert "John Wiley & Sons, Inc." == metadata.imprint - assert 2005 == metadata.published.year - assert 1 == metadata.published.month - assert 31 == metadata.published.day - - [author] = metadata.contributors - assert "Rüping, Andreas" == author.sort_name - assert "Andreas Rüping" == author.display_name - assert [Contributor.AUTHOR_ROLE] == author.roles - - subjects = sorted(metadata.subjects, key=lambda x: x.identifier) - - assert [ - ("Computer Technology", Subject.OVERDRIVE, 100), - ("Nonfiction", Subject.OVERDRIVE, 100), - ("Object Technologies - Miscellaneous", "tag", 1), - ] == [(x.identifier, x.type, x.weight) for x in subjects] - - # Related IDs. - assert (Identifier.OVERDRIVE_ID, "3896665d-9d81-4cac-bd43-ffc5066de1f5") == ( - metadata.primary_identifier.type, - metadata.primary_identifier.identifier, - ) - - ids = [(x.type, x.identifier) for x in metadata.identifiers] - - # The original data contains an actual ASIN and ISBN, plus a blank - # ASIN and three invalid ISBNs: one which is common placeholder - # text, one which is mis-typed and has a bad check digit, and one - # which has an invalid character; the bad identifiers do not show - # up here. - assert [ - (Identifier.ASIN, "B000VI88N2"), - (Identifier.ISBN, "9780470856246"), - (Identifier.OVERDRIVE_ID, "3896665d-9d81-4cac-bd43-ffc5066de1f5"), - ] == sorted(ids) - - # Available formats. - [kindle, pdf] = sorted( - metadata.circulation.formats, key=lambda x: x.content_type - ) - assert DeliveryMechanism.KINDLE_CONTENT_TYPE == kindle.content_type - assert DeliveryMechanism.KINDLE_DRM == kindle.drm_scheme - - assert Representation.PDF_MEDIA_TYPE == pdf.content_type - assert DeliveryMechanism.ADOBE_DRM == pdf.drm_scheme - - # Links to various resources. - shortd, image, longd = sorted(metadata.links, key=lambda x: x.rel) - - assert Hyperlink.DESCRIPTION == longd.rel - assert longd.content.startswith("

Software documentation") - - assert Hyperlink.SHORT_DESCRIPTION == shortd.rel - assert shortd.content.startswith("

Software documentation") - assert len(shortd.content) < len(longd.content) - - assert Hyperlink.IMAGE == image.rel - assert ( - "http://images.contentreserve.com/ImageType-100/0128-1/%7B3896665D-9D81-4CAC-BD43-FFC5066DE1F5%7DImg100.jpg" - == image.href - ) - - thumbnail = image.thumbnail - - assert Hyperlink.THUMBNAIL_IMAGE == thumbnail.rel - assert ( - "http://images.contentreserve.com/ImageType-200/0128-1/%7B3896665D-9D81-4CAC-BD43-FFC5066DE1F5%7DImg200.jpg" - == thumbnail.href - ) - - # Measurements associated with the book. - - measurements = metadata.measurements - popularity = [ - x for x in measurements if x.quantity_measured == Measurement.POPULARITY - ][0] - assert 2 == popularity.value - - rating = [x for x in measurements if x.quantity_measured == Measurement.RATING][ - 0 - ] - assert 1 == rating.value - - # Request only the bibliographic information. - metadata = OverdriveRepresentationExtractor.book_info_to_metadata( - info, include_bibliographic=True, include_formats=False - ) - - assert "Agile Documentation" == metadata.title - assert None == metadata.circulation - - # Request only the format information. - metadata = OverdriveRepresentationExtractor.book_info_to_metadata( - info, include_bibliographic=False, include_formats=True - ) - - assert None == metadata.title - - [kindle, pdf] = sorted( - metadata.circulation.formats, key=lambda x: x.content_type - ) - assert DeliveryMechanism.KINDLE_CONTENT_TYPE == kindle.content_type - assert DeliveryMechanism.KINDLE_DRM == kindle.drm_scheme - - assert Representation.PDF_MEDIA_TYPE == pdf.content_type - assert DeliveryMechanism.ADOBE_DRM == pdf.drm_scheme - - def test_audiobook_info(self, overdrive_fixture: OverdriveFixture): - # This book will be available in three formats: a link to the - # Overdrive Read website, a manifest file that SimplyE can - # download, and the legacy format used by the mobile app - # called 'Overdrive'. - raw, info = overdrive_fixture.sample_json("audiobook.json") - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - streaming, manifest, legacy = sorted( - metadata.circulation.formats, key=lambda x: x.content_type - ) - assert DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE == streaming.content_type - assert ( - MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE == manifest.content_type - ) - assert "application/x-od-media" == legacy.content_type - - def test_book_info_with_sample(self, overdrive_fixture: OverdriveFixture): - # This book has two samples; one available as a direct download and - # one available through a manifest file. - raw, info = overdrive_fixture.sample_json("has_sample.json") - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - samples = [x for x in metadata.links if x.rel == Hyperlink.SAMPLE] - epub_sample, manifest_sample = sorted(samples, key=lambda x: x.media_type) - - # Here's the direct download. - assert ( - "http://excerpts.contentreserve.com/FormatType-410/1071-1/9BD/24F/82/BridesofConvenienceBundle9781426803697.epub" - == epub_sample.href - ) - assert MediaTypes.EPUB_MEDIA_TYPE == epub_sample.media_type - - # Here's the manifest. - assert ( - "https://samples.overdrive.com/?crid=9BD24F82-35C0-4E0A-B5E7-BCFED07835CF&.epub-sample.overdrive.com" - == manifest_sample.href - ) - # Assert we have the end content type of the sample, no DRM formats - assert "text/html" == manifest_sample.media_type - - def test_book_info_with_unknown_sample(self, overdrive_fixture: OverdriveFixture): - raw, info = overdrive_fixture.sample_json("has_sample.json") - - # Just use one format, and change a sample type to unknown - # Only one (known sample) should be extracted then - info["formats"] = [info["formats"][1]] - info["formats"][0]["samples"][1]["formatType"] = "overdrive-unknown" - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - samples = [x for x in metadata.links if x.rel == Hyperlink.SAMPLE] - - assert 1 == len(samples) - assert samples[0].media_type == MediaTypes.EPUB_MEDIA_TYPE - - def test_book_info_with_grade_levels(self, overdrive_fixture: OverdriveFixture): - raw, info = overdrive_fixture.sample_json("has_grade_levels.json") - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - - grade_levels = sorted( - x.identifier for x in metadata.subjects if x.type == Subject.GRADE_LEVEL - ) - assert ["Grade 4", "Grade 5", "Grade 6", "Grade 7", "Grade 8"] == grade_levels - - def test_book_info_with_awards(self, overdrive_fixture: OverdriveFixture): - raw, info = overdrive_fixture.sample_json("has_awards.json") - metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) - - [awards] = [ - x - for x in metadata.measurements - if Measurement.AWARDS == x.quantity_measured - ] - assert 1 == awards.value - assert 1 == awards.weight - - def test_image_link_to_linkdata(self): - def m(link): - return OverdriveRepresentationExtractor.image_link_to_linkdata(link, "rel") - - # Test missing data. - assert None == m(None) - assert None == m(dict()) - - # Test an ordinary success case. - url = "http://images.overdrive.com/image.png" - type = "image/type" - data = m(dict(href=url, type=type)) - assert isinstance(data, LinkData) - assert url == data.href - assert type == data.media_type - - # Test a case where no media type is provided. - data = m(dict(href=url)) - assert None == data.media_type - - # Verify that invalid URLs are made link-safe. - data = m(dict(href="http://api.overdrive.com/v1/foo:bar")) - assert "http://api.overdrive.com/v1/foo%3Abar" == data.href - - # Stand-in cover images are detected and filtered out. - data = m( - dict( - href="https://img1.od-cdn.com/ImageType-100/0293-1/{00000000-0000-0000-0000-000000000002}Img100.jpg" - ) - ) - assert None == data - - def test_internal_formats(self): - # Overdrive's internal format names may correspond to one or more - # delivery mechanisms. - def assert_formats(overdrive_name, *expect): - actual = OverdriveRepresentationExtractor.internal_formats(overdrive_name) - assert list(expect) == list(actual) - - # Most formats correspond to one delivery mechanism. - assert_formats( - "ebook-pdf-adobe", (MediaTypes.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM) - ) - - assert_formats( - "ebook-epub-open", (MediaTypes.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM) - ) - - # ebook-overdrive and audiobook-overdrive each correspond to - # two delivery mechanisms. - assert_formats( - "ebook-overdrive", - ( - MediaTypes.OVERDRIVE_EBOOK_MANIFEST_MEDIA_TYPE, - DeliveryMechanism.LIBBY_DRM, - ), - ( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - DeliveryMechanism.STREAMING_DRM, - ), - ) - - assert_formats( - "audiobook-overdrive", - ( - MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE, - DeliveryMechanism.LIBBY_DRM, - ), - ( - DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE, - DeliveryMechanism.STREAMING_DRM, - ), - ) - - # An unrecognized format does not correspond to any delivery - # mechanisms. - assert_formats("no-such-format") - - -class TestOverdriveAdvantageAccount: - def test_no_advantage_accounts( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - """When there are no Advantage accounts, get_advantage_accounts() - returns an empty list. - """ - fixture = overdrive_with_api_fixture - fixture.api.queue_collection_token() - assert [] == fixture.api.get_advantage_accounts() - - def test_from_representation( - self, overdrive_with_api_fixture: OverdriveWithAPIFixture - ): - """Test the creation of OverdriveAdvantageAccount objects - from Overdrive's representation of a list of accounts. - """ - fixture = overdrive_with_api_fixture - raw, data = fixture.overdrive.sample_json("advantage_accounts.json") - [ac1, ac2] = OverdriveAdvantageAccount.from_representation(raw) - - # The two Advantage accounts have the same parent library ID. - assert "1225" == ac1.parent_library_id - assert "1225" == ac2.parent_library_id - - # But they have different names and library IDs. - assert "3" == ac1.library_id - assert "The Other Side of Town Library" == ac1.name - - assert "9" == ac2.library_id - assert "The Common Community Library" == ac2.name - - def test_to_collection(self, overdrive_with_api_fixture: OverdriveWithAPIFixture): - # Test that we can turn an OverdriveAdvantageAccount object into - # a Collection object. - fixture = overdrive_with_api_fixture - transaction, session = ( - fixture.overdrive.transaction, - fixture.overdrive.transaction.session, - ) - - account = OverdriveAdvantageAccount( - "parent_id", - "child_id", - "Library Name", - "token value", - ) - - # We can't just create a Collection object for this object because - # the parent doesn't exist. - with pytest.raises(ValueError) as excinfo: - account.to_collection(session) - assert "Cannot create a Collection whose parent does not already exist." in str( - excinfo.value - ) - - # So, create a Collection to be the parent. - parent = transaction.collection( - name="Parent", - protocol=ExternalIntegration.OVERDRIVE, - external_account_id="parent_id", - ) - - # Now it works. - p, collection = account.to_collection(session) - assert p == parent - assert parent == collection.parent - assert collection.external_account_id == account.library_id - assert ExternalIntegration.LICENSE_GOAL == collection.external_integration.goal - assert ExternalIntegration.OVERDRIVE == collection.protocol - assert Goals.LICENSE_GOAL == collection.integration_configuration.goal - assert ExternalIntegration.OVERDRIVE == collection.protocol - - # To ensure uniqueness, the collection was named after its - # parent. - assert f"{parent.name} / {account.name}" == collection.name - - -class OverdriveBibliographicCoverageProviderFixture: - overdrive: OverdriveFixture - provider: OverdriveBibliographicCoverageProvider - api: MockOverdriveCoreAPI - - -@pytest.fixture -def overdrive_biblio_provider_fixture( - overdrive_fixture: OverdriveFixture, -) -> OverdriveBibliographicCoverageProviderFixture: - fix = OverdriveBibliographicCoverageProviderFixture() - fix.overdrive = overdrive_fixture - fix.provider = OverdriveBibliographicCoverageProvider( - overdrive_fixture.collection, api_class=MockOverdriveCoreAPI - ) - fix.api = fix.provider.api - return fix - - -class TestOverdriveBibliographicCoverageProvider: - """Test the code that looks up bibliographic information from Overdrive.""" - - def test_script_instantiation( - self, - overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, - ): - """Test that RunCoverageProviderScript can instantiate - the coverage provider. - """ - - fixture = overdrive_biblio_provider_fixture - transaction = fixture.overdrive.transaction - - script = RunCollectionCoverageProviderScript( - OverdriveBibliographicCoverageProvider, - transaction.session, - api_class=MockOverdriveCoreAPI, - ) - [provider] = script.providers - assert isinstance(provider, OverdriveBibliographicCoverageProvider) - assert isinstance(provider.api, MockOverdriveCoreAPI) - assert fixture.overdrive.collection == provider.collection - - def test_invalid_or_unrecognized_guid( - self, - overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, - ): - """A bad or malformed GUID can't get coverage.""" - fixture = overdrive_biblio_provider_fixture - transaction = fixture.overdrive.transaction - - identifier = transaction.identifier() - identifier.identifier = "bad guid" - fixture.api.queue_collection_token() - - error = '{"errorCode": "InvalidGuid", "message": "An invalid guid was given.", "token": "7aebce0e-2e88-41b3-b6d3-82bf15f8e1a2"}' - fixture.api.queue_response(200, content=error) - - failure = fixture.provider.process_item(identifier) - assert isinstance(failure, CoverageFailure) - assert False == failure.transient - assert "Invalid Overdrive ID: bad guid" == failure.exception - - # This is for when the GUID is well-formed but doesn't - # correspond to any real Overdrive book. - error = '{"errorCode": "NotFound", "message": "Not found in Overdrive collection.", "token": "7aebce0e-2e88-41b3-b6d3-82bf15f8e1a2"}' - fixture.api.queue_response(200, content=error) - - failure = fixture.provider.process_item(identifier) - assert isinstance(failure, CoverageFailure) - assert False == failure.transient - assert "ID not recognized by Overdrive: bad guid" == failure.exception - - def test_process_item_creates_presentation_ready_work( - self, - overdrive_biblio_provider_fixture: OverdriveBibliographicCoverageProviderFixture, - ): - """Test the normal workflow where we ask Overdrive for data, - Overdrive provides it, and we create a presentation-ready work. - """ - fixture = overdrive_biblio_provider_fixture - transaction = fixture.overdrive.transaction - - fixture.api.queue_collection_token() - - # Here's the book mentioned in overdrive_metadata.json. - identifier = transaction.identifier(identifier_type=Identifier.OVERDRIVE_ID) - identifier.identifier = "3896665d-9d81-4cac-bd43-ffc5066de1f5" - - # This book has no LicensePool. - assert [] == identifier.licensed_through - - # Run it through the OverdriveBibliographicCoverageProvider - raw, info = fixture.overdrive.sample_json("overdrive_metadata.json") - fixture.api.queue_response(200, content=raw) - - [result] = fixture.provider.process_batch([identifier]) - assert identifier == result - - # A LicensePool was created, not because we know anything - # about how we've licensed this book, but to have a place to - # store the information about what formats the book is - # available in. - [pool] = identifier.licensed_through - assert 0 == pool.licenses_owned - [lpdm1, lpdm2] = pool.delivery_mechanisms - names = [x.delivery_mechanism.name for x in pool.delivery_mechanisms] - assert sorted( - [ - "application/pdf (application/vnd.adobe.adept+xml)", - "Kindle via Amazon (Kindle DRM)", - ] - ) == sorted(names) - - # A Work was created and made presentation ready. - assert "Agile Documentation" == pool.work.title - assert True == pool.work.presentation_ready diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index bba5afac41..38154fb1c2 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -1,12 +1,10 @@ from __future__ import annotations -import csv import datetime import json -import os import random from io import StringIO -from unittest.mock import MagicMock, PropertyMock, call, patch +from unittest.mock import MagicMock, call, patch import pytest from freezegun import freeze_time @@ -42,7 +40,6 @@ from core.model.patron import Patron from core.monitor import CollectionMonitor, Monitor, ReaperMonitor from core.opds_import import OPDSImportMonitor -from core.overdrive import OverdriveAdvantageAccount from core.scripts import ( AddClassificationScript, CheckContributorNamesInDB, @@ -56,7 +53,6 @@ CustomListUpdateEntriesScript, DeleteInvisibleLanesScript, Explain, - GenerateOverdriveAdvantageAccountList, IdentifierInputScript, LaneSweeperScript, LibraryInputScript, @@ -2568,111 +2564,6 @@ def test_do_run(self, db: DatabaseTransactionFixture): assert self.script.process_loan.call_count == 0 -class TestGenerateOverdriveAdvantageAccountList: - def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture): - output_file_path = "test-output.csv" - circ_manager_name = "circ_man_name" - parent_library_name = "Parent" - parent_od_library_id = "parent_id" - child1_library_name = "child1" - child1_advantage_library_id = "1" - child1_token = "token1" - child2_library_name = "child2" - child2_advantage_library_id = "2" - child2_token = "token2" - client_key = "ck" - client_secret = "cs" - library_token = "lt" - - parent: Collection = db.collection( - name=parent_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=parent_od_library_id, - ) - child1: Collection = db.collection( - name=child1_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=child1_advantage_library_id, - ) - child1.parent = parent - overdrive_api = MagicMock() - overdrive_api.get_advantage_accounts.return_value = [ - OverdriveAdvantageAccount( - parent_od_library_id, - child1_advantage_library_id, - child1_library_name, - child1_token, - ), - OverdriveAdvantageAccount( - parent_od_library_id, - child2_advantage_library_id, - child2_library_name, - child2_token, - ), - ] - - overdrive_api.client_key.return_value = bytes(client_key, "utf-8") - overdrive_api.client_secret.return_value = bytes(client_secret, "utf-8") - type(overdrive_api).collection_token = PropertyMock(return_value=library_token) - - with patch( - "core.scripts.GenerateOverdriveAdvantageAccountList._create_overdrive_api" - ) as create_od_api: - create_od_api.return_value = overdrive_api - GenerateOverdriveAdvantageAccountList(db.session).do_run( - cmd_args=[ - "--output-file-path", - output_file_path, - "--circulation-manager-name", - circ_manager_name, - ] - ) - - with open(output_file_path, newline="") as csv_file: - csvreader = csv.reader(csv_file) - for index, row in enumerate(csvreader): - if index == 0: - assert "cm" == row[0] - assert "collection" == row[1] - assert "overdrive_library_id" == row[2] - assert "client_key" == row[3] - assert "client_secret" == row[4] - assert "library_token" == row[5] - assert "advantage_name" == row[6] - assert "advantage_id" == row[7] - assert "advantage_token" == row[8] - assert "already_configured" == row[9] - elif index == 1: - assert circ_manager_name == row[0] - assert parent_library_name == row[1] - assert parent_od_library_id == row[2] - assert client_key == row[3] - assert client_secret == row[4] - assert library_token == row[5] - assert child1_library_name == row[6] - assert child1_advantage_library_id == row[7] - assert child1_token == row[8] - assert "True" == row[9] - else: - assert circ_manager_name == row[0] - assert parent_library_name == row[1] - assert parent_od_library_id == row[2] - assert client_key == row[3] - assert client_secret == row[4] - assert library_token == row[5] - assert child2_library_name == row[6] - assert child2_advantage_library_id == row[7] - assert child2_token == row[8] - assert "False" == row[9] - last_index = index - - os.remove(output_file_path) - assert last_index == 2 - overdrive_api.client_key.assert_called_once() - overdrive_api.client_secret.assert_called_once() - overdrive_api.get_advantage_accounts.assert_called_once() - - class TestWorkConsolidationScript: """TODO""" diff --git a/tests/fixtures/overdrive.py b/tests/fixtures/overdrive.py deleted file mode 100644 index 1aa8c98163..0000000000 --- a/tests/fixtures/overdrive.py +++ /dev/null @@ -1,73 +0,0 @@ -import json -import os -from pathlib import Path - -import pytest - -from core.model import Collection -from tests.api.mockapi.overdrive import MockOverdriveCoreAPI -from tests.fixtures.database import DatabaseTransactionFixture - - -class OverdriveFixture: - """A basic fixture for Overdrive tests.""" - - transaction: DatabaseTransactionFixture - collection: Collection - _resource_path: str - _base_path: str - - @classmethod - def create(cls, transaction: DatabaseTransactionFixture) -> "OverdriveFixture": - fix = OverdriveFixture() - fix._base_path = str(Path(__file__).parent.parent) - fix._resource_path = os.path.join(fix._base_path, "core", "files", "overdrive") - fix.transaction = transaction - fix.collection = MockOverdriveCoreAPI.mock_collection( - transaction.session, transaction.default_library() - ) - return fix - - def sample_json(self, filename): - path = os.path.join(self._resource_path, filename) - data = open(path).read() - return data, json.loads(data) - - -@pytest.fixture() -def overdrive_fixture( - db, -) -> OverdriveFixture: - """A basic fixture for Overdrive tests.""" - return OverdriveFixture.create(db) - - -class OverdriveWithAPIFixture: - overdrive: OverdriveFixture - api: MockOverdriveCoreAPI - - """Automatically create a MockOverdriveCoreAPI class during setup. - - We don't always do this because - TestOverdriveBibliographicCoverageProvider needs to create a - MockOverdriveCoreAPI during the test, and at the moment the second - MockOverdriveCoreAPI request created in a test behaves differently - from the first one. - """ - - @classmethod - def create( - cls, transaction: DatabaseTransactionFixture - ) -> "OverdriveWithAPIFixture": - fix = OverdriveWithAPIFixture() - fix.overdrive = OverdriveFixture.create(transaction) - fix.api = MockOverdriveCoreAPI(transaction.session, fix.overdrive.collection) - return fix - - -@pytest.fixture() -def overdrive_with_api_fixture( - db, -) -> OverdriveWithAPIFixture: - """A fixture for Overdrive tests that includes a mocked API.""" - return OverdriveWithAPIFixture.create(db) From 714dbd5993a5e76b0b8574fee17f802f6aaaf59b Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 6 Oct 2023 15:21:04 +0530 Subject: [PATCH 089/262] PP-337 Quicksight dashboard embed URL generation (#1378) * Quicksight dashboard embed URL generation * Pydantic 1.1 under python 3.8 does not allow custom datatypes with Generics Re-implemented as a functional_validator * Switched from using arrays to a descriptive dict for quicksight ARNs Added the a /names API to get the dashboard names available --- README.md | 10 + api/admin/controller/__init__.py | 3 + api/admin/controller/quicksight.py | 117 ++++++++++ api/admin/model/quicksight.py | 23 ++ api/admin/routes.py | 38 +++- api/controller.py | 2 + core/config.py | 12 +- core/util/flask_util.py | 11 + tests/api/admin/controller/test_quicksight.py | 204 ++++++++++++++++++ tests/api/admin/test_routes.py | 21 ++ tests/core/util/test_flask_util.py | 12 ++ 11 files changed, 450 insertions(+), 3 deletions(-) create mode 100644 api/admin/controller/quicksight.py create mode 100644 api/admin/model/quicksight.py create mode 100644 tests/api/admin/controller/test_quicksight.py diff --git a/README.md b/README.md index ef92d5cdcb..39eb39cb93 100644 --- a/README.md +++ b/README.md @@ -239,8 +239,18 @@ export SIMPLIFIED_FCM_CREDENTIALS_FILE="/opt/credentials/fcm_credentials.json" The FCM credentials can be downloaded once a Google Service account has been created. More details in the [FCM documentation](https://firebase.google.com/docs/admin/setup#set-up-project-and-service-account) +##### Quicksight Dashboards + +For generating quicksight dashboard links the following environment variable is required +`QUICKSIGHT_AUTHORIZED_ARNS` - A dictionary of the format `"": ["arn:aws:quicksight:...",...]` +where each quicksight dashboard gets treated with an arbitrary "name", and a list of "authorized arns". +The first the "authorized arns" is always considered as the `InitialDashboardID` when creating an embed URL +for the respective "dashboard name". + #### Email +### Email sending + To use the features that require sending emails, for example to reset the password for logged-out users, you will need to have a working SMTP server and set some environment variables: diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index fb03af81bd..cb7b581a11 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -2,6 +2,8 @@ from typing import TYPE_CHECKING +from api.admin.controller.quicksight import QuickSightController + if TYPE_CHECKING: from api.controller import CirculationManager @@ -100,3 +102,4 @@ def setup_admin_controllers(manager: CirculationManager): manager.admin_catalog_services_controller = CatalogServicesController(manager) manager.admin_announcement_service = AnnouncementSettings(manager) manager.admin_search_controller = AdminSearchController(manager) + manager.admin_quicksight_controller = QuickSightController(manager) diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py new file mode 100644 index 0000000000..2e36d23523 --- /dev/null +++ b/api/admin/controller/quicksight.py @@ -0,0 +1,117 @@ +import logging +from typing import Dict + +import boto3 +import flask + +from api.admin.model.quicksight import ( + QuicksightDashboardNamesResponse, + QuicksightGenerateUrlRequest, + QuicksightGenerateUrlResponse, +) +from api.controller import CirculationManagerController +from api.problem_details import NOT_FOUND_ON_REMOTE +from core.config import Configuration +from core.model.admin import Admin +from core.model.library import Library +from core.problem_details import INTERNAL_SERVER_ERROR, INVALID_INPUT +from core.util.problem_detail import ProblemError + + +class QuickSightController(CirculationManagerController): + def generate_quicksight_url(self, dashboard_name) -> Dict: + log = logging.getLogger(self.__class__.__name__) + admin: Admin = getattr(flask.request, "admin") + request_data = QuicksightGenerateUrlRequest(**flask.request.args) + + all_authorized_arns = Configuration.quicksight_authorized_arns() + if not all_authorized_arns: + log.error("No Quicksight ARNs were configured for this server.") + raise ProblemError( + INTERNAL_SERVER_ERROR.detailed( + "Quicksight has not been configured for this server." + ) + ) + + authorized_arns = all_authorized_arns.get(dashboard_name) + if not authorized_arns: + raise ProblemError( + INVALID_INPUT.detailed( + "The requested Dashboard ARN is not recognized by this server." + ) + ) + + # The first dashboard id is the primary ARN + dashboard_arn = authorized_arns[0] + # format aws:arn:quicksight::: + arn_parts = dashboard_arn.split(":") + # Pull the region and account id from the ARN + aws_account_id = arn_parts[4] + region = arn_parts[3] + dashboard_id = arn_parts[5].split("/", 1)[1] # drop the "dashboard/" part + + allowed_libraries = [] + for library in self._db.query(Library).all(): + if admin.is_librarian(library): + allowed_libraries.append(library) + + if request_data.library_ids: + allowed_library_ids = list( + set(request_data.library_ids).intersection( + {l.id for l in allowed_libraries} + ) + ) + else: + allowed_library_ids = [l.id for l in allowed_libraries] + + if not allowed_library_ids: + raise ProblemError( + NOT_FOUND_ON_REMOTE.detailed( + "No library was found for this Admin that matched the request." + ) + ) + + libraries = ( + self._db.query(Library).filter(Library.id.in_(allowed_library_ids)).all() + ) + + try: + delimiter = "|" + client = boto3.client("quicksight", region_name=region) + response = client.generate_embed_url_for_anonymous_user( + AwsAccountId=aws_account_id, + Namespace="default", # Default namespace only + AuthorizedResourceArns=authorized_arns, + ExperienceConfiguration={ + "Dashboard": {"InitialDashboardId": dashboard_id} + }, + SessionTags=[ + dict( + Key="library_name", + Value=delimiter.join([l.name for l in libraries]), + ) + ], + ) + except Exception as ex: + log.error(f"Error while fetching the Quisksight Embed url: {ex}") + raise ProblemError( + INTERNAL_SERVER_ERROR.detailed( + "Error while fetching the Quisksight Embed url." + ) + ) + + embed_url = response.get("EmbedUrl") + if response.get("Status") // 100 != 2 or embed_url is None: + log.error(f"QuiskSight Embed url error response {response}") + raise ProblemError( + INTERNAL_SERVER_ERROR.detailed( + "Error while fetching the Quisksight Embed url." + ) + ) + + return QuicksightGenerateUrlResponse(embed_url=embed_url).api_dict() + + def get_dashboard_names(self): + """Get the named dashboard IDs defined in the configuration""" + config = Configuration.quicksight_authorized_arns() + return QuicksightDashboardNamesResponse(names=list(config.keys())).api_dict() diff --git a/api/admin/model/quicksight.py b/api/admin/model/quicksight.py new file mode 100644 index 0000000000..752f889e37 --- /dev/null +++ b/api/admin/model/quicksight.py @@ -0,0 +1,23 @@ +from typing import List + +from pydantic import Field, validator + +from core.util.flask_util import CustomBaseModel, str_comma_list_validator + + +class QuicksightGenerateUrlRequest(CustomBaseModel): + library_ids: List[int] = Field( + description="The list of libraries to include in the dataset, an empty list is equivalent to all the libraries the user is allowed to access." + ) + + @validator("library_ids", pre=True) + def parse_library_ids(cls, value): + return str_comma_list_validator(value) + + +class QuicksightGenerateUrlResponse(CustomBaseModel): + embed_url: str = Field(description="The dashboard embed url.") + + +class QuicksightDashboardNamesResponse(CustomBaseModel): + names: List[str] = Field(description="The named quicksight dashboard ids") diff --git a/api/admin/routes.py b/api/admin/routes.py index cc76269a15..c334bdb32a 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -11,11 +11,16 @@ from api.admin.controller.custom_lists import CustomListsController from api.admin.dashboard_stats import generate_statistics from api.admin.model.dashboard_statistics import StatisticsResponse +from api.admin.model.quicksight import ( + QuicksightDashboardNamesResponse, + QuicksightGenerateUrlRequest, + QuicksightGenerateUrlResponse, +) from api.admin.templates import admin_sign_in_again as sign_in_again_template from api.app import api_spec, app from api.routes import allows_library, has_library, library_route from core.app_server import ensure_pydantic_after_problem_detail, returns_problem_detail -from core.util.problem_detail import ProblemDetail, ProblemDetailModel +from core.util.problem_detail import ProblemDetail, ProblemDetailModel, ProblemError # An admin's session will expire after this amount of time and # the admin will have to log in again. @@ -82,7 +87,11 @@ def returns_json_or_response_or_problem_detail(f): @wraps(f) def decorated(*args, **kwargs): - v = f(*args, **kwargs) + try: + v = f(*args, **kwargs) + except ProblemError as ex: + # A ProblemError is the same as a ProblemDetail + v = ex.problem_detail if isinstance(v, ProblemDetail): return v.response if isinstance(v, Response): @@ -313,6 +322,31 @@ def stats(): return statistics_response.api_dict() +@app.route("/admin/quicksight_embed/") +@api_spec.validate( + resp=SpecResponse(HTTP_200=QuicksightGenerateUrlResponse), + tags=["admin.quicksight"], + query=QuicksightGenerateUrlRequest, +) +@returns_json_or_response_or_problem_detail +@requires_admin +def generate_quicksight_url(dashboard_name: str): + return app.manager.admin_quicksight_controller.generate_quicksight_url( + dashboard_name + ) + + +@app.route("/admin/quicksight_embed/names") +@api_spec.validate( + resp=SpecResponse(HTTP_200=QuicksightDashboardNamesResponse), + tags=["admin.quicksight"], +) +@returns_json_or_response_or_problem_detail +@requires_admin +def get_quicksight_names(): + return app.manager.admin_quicksight_controller.get_dashboard_names() + + @app.route("/admin/libraries", methods=["GET", "POST"]) @returns_json_or_response_or_problem_detail @requires_admin diff --git a/api/controller.py b/api/controller.py index 7890e3bc3c..e382f5ff15 100644 --- a/api/controller.py +++ b/api/controller.py @@ -150,6 +150,7 @@ PatronAuthServiceSelfTestsController, ) from api.admin.controller.patron_auth_services import PatronAuthServicesController + from api.admin.controller.quicksight import QuickSightController from api.admin.controller.reset_password import ResetPasswordController from api.admin.controller.search_service_self_tests import ( SearchServiceSelfTestsController, @@ -220,6 +221,7 @@ class CirculationManager: admin_announcement_service: AnnouncementSettings admin_search_controller: AdminSearchController admin_view_controller: ViewController + admin_quicksight_controller: QuickSightController def __init__(self, _db, services: Services): self._db = _db diff --git a/core/config.py b/core/config.py index 941efc9d5d..00b17048e3 100644 --- a/core/config.py +++ b/core/config.py @@ -1,7 +1,7 @@ import json import logging import os -from typing import Dict +from typing import Dict, List from flask_babel import lazy_gettext as _ from sqlalchemy.engine.url import make_url @@ -51,6 +51,10 @@ class Configuration(ConfigurationConstants): OD_FULFILLMENT_CLIENT_KEY_SUFFIX = "OVERDRIVE_FULFILLMENT_CLIENT_KEY" OD_FULFILLMENT_CLIENT_SECRET_SUFFIX = "OVERDRIVE_FULFILLMENT_CLIENT_SECRET" + # Quicksight + # Comma separated aws arns + QUICKSIGHT_AUTHORIZED_ARNS_KEY = "QUICKSIGHT_AUTHORIZED_ARNS" + # Environment variable for SirsiDynix Auth SIRSI_DYNIX_APP_ID = "SIMPLIFIED_SIRSI_DYNIX_APP_ID" @@ -284,6 +288,12 @@ def overdrive_fulfillment_keys(cls, testing=False) -> Dict[str, str]: raise CannotLoadConfiguration("Invalid fulfillment credentials.") return {"key": key, "secret": secret} + @classmethod + def quicksight_authorized_arns(cls) -> Dict[str, List[str]]: + """Split the comma separated arns""" + arns_str = os.environ.get(cls.QUICKSIGHT_AUTHORIZED_ARNS_KEY, "") + return json.loads(arns_str) + @classmethod def localization_languages(cls): return [LanguageCodes.three_to_two["eng"]] diff --git a/core/util/flask_util.py b/core/util/flask_util.py index b96f69088f..d728af5e0c 100644 --- a/core/util/flask_util.py +++ b/core/util/flask_util.py @@ -205,3 +205,14 @@ def api_dict( rather than their Python class member names. """ return self.dict(*args, by_alias=by_alias, **kwargs) + + +def str_comma_list_validator(value): + """Validate a comma separated string and parse it into a list, generally used for query parameters""" + if isinstance(value, (int, float)): + # A single number shows up as an int + value = str(value) + elif not isinstance(value, str): + raise TypeError("string required") + + return value.split(",") diff --git a/tests/api/admin/controller/test_quicksight.py b/tests/api/admin/controller/test_quicksight.py new file mode 100644 index 0000000000..8f007c88b5 --- /dev/null +++ b/tests/api/admin/controller/test_quicksight.py @@ -0,0 +1,204 @@ +from unittest import mock + +import pytest + +from core.model import create +from core.model.admin import Admin, AdminRole +from core.util.problem_detail import ProblemError +from tests.fixtures.api_admin import AdminControllerFixture +from tests.fixtures.api_controller import ControllerFixture + + +class QuickSightControllerFixture(AdminControllerFixture): + def __init__(self, controller_fixture: ControllerFixture): + super().__init__(controller_fixture) + + +@pytest.fixture +def quicksight_fixture( + controller_fixture: ControllerFixture, +) -> QuickSightControllerFixture: + return QuickSightControllerFixture(controller_fixture) + + +class TestQuicksightController: + def test_generate_quicksight_url( + self, quicksight_fixture: QuickSightControllerFixture + ): + ctrl = quicksight_fixture.manager.admin_quicksight_controller + db = quicksight_fixture.ctrl.db + + system_admin, _ = create(db.session, Admin, email="admin@email.com") + system_admin.add_role(AdminRole.SYSTEM_ADMIN) + default = db.default_library() + library1 = db.library() + + with mock.patch( + "api.admin.controller.quicksight.boto3" + ) as mock_boto, mock.patch( + "api.admin.controller.quicksight.Configuration.quicksight_authorized_arns" + ) as mock_qs_arns: + arns = dict( + primary=[ + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid1", + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid2", + ], + secondary=[ + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid2", + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid1", + ], + ) + mock_qs_arns.return_value = arns + generate_method: mock.MagicMock = ( + mock_boto.client().generate_embed_url_for_anonymous_user + ) + generate_method.return_value = {"Status": 201, "EmbedUrl": "https://embed"} + + with quicksight_fixture.request_context_with_admin( + f"/?library_ids={default.id},{library1.id},30000", + admin=system_admin, + ) as ctx: + response = ctrl.generate_quicksight_url("primary") + + # Assert the right client was created, with a region + assert mock_boto.client.call_args == mock.call( + "quicksight", region_name="us-west-1" + ) + # Assert the reqest and response formats + assert response["embedUrl"] == "https://embed" + assert generate_method.call_args == mock.call( + AwsAccountId="aws-account-id", + Namespace="default", + AuthorizedResourceArns=arns["primary"], + ExperienceConfiguration={ + "Dashboard": {"InitialDashboardId": "uuid1"} + }, + SessionTags=[ + dict(Key="library_name", Value="|".join([default.name, library1.name])) # type: ignore[list-item] + ], + ) + + # Specific library roles + admin1, _ = create(db.session, Admin, email="admin1@email.com") + admin1.add_role(AdminRole.LIBRARY_MANAGER, library1) + + with quicksight_fixture.request_context_with_admin( + f"/?library_ids=1,{library1.id}", + admin=admin1, + ) as ctx: + generate_method.reset_mock() + ctrl.generate_quicksight_url("secondary") + + assert generate_method.call_args == mock.call( + AwsAccountId="aws-account-id", + Namespace="default", + AuthorizedResourceArns=arns["secondary"], + ExperienceConfiguration={ + "Dashboard": {"InitialDashboardId": "uuid2"} + }, + SessionTags=[ + dict(Key="library_name", Value="|".join([library1.name])) # type: ignore[list-item] + ], + ) + + def test_generate_quicksight_url_errors( + self, quicksight_fixture: QuickSightControllerFixture + ): + ctrl = quicksight_fixture.manager.admin_quicksight_controller + db = quicksight_fixture.ctrl.db + + library = db.library() + library_not_allowed = db.library() + admin, _ = create(db.session, Admin, email="admin@email.com") + admin.add_role(AdminRole.LIBRARY_MANAGER, library=library) + + with mock.patch( + "api.admin.controller.quicksight.boto3" + ) as mock_boto, mock.patch( + "api.admin.controller.quicksight.Configuration.quicksight_authorized_arns" + ) as mock_qs_arns: + arns = dict( + primary=[ + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid1", + "arn:aws:quicksight:us-west-1:aws-account-id:dashboard/uuid2", + ] + ) + mock_qs_arns.return_value = arns + + with quicksight_fixture.request_context_with_admin( + f"/?library_ids={library.id}", + admin=admin, + ) as ctx: + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("secondary") + assert ( + raised.value.problem_detail.detail + == "The requested Dashboard ARN is not recognized by this server." + ) + + mock_qs_arns.return_value = [] + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("primary") + assert ( + raised.value.problem_detail.detail + == "Quicksight has not been configured for this server." + ) + + with quicksight_fixture.request_context_with_admin( + f"/?library_ids={library_not_allowed.id}", + admin=admin, + ) as ctx: + mock_qs_arns.return_value = arns + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("primary") + assert ( + raised.value.problem_detail.detail + == "No library was found for this Admin that matched the request." + ) + + with quicksight_fixture.request_context_with_admin( + f"/?library_ids={library.id}", + admin=admin, + ) as ctx: + # Bad response from boto + mock_boto.generate_embed_url_for_anonymous_user.return_value = dict( + status=400, embed_url="http://embed" + ) + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("primary") + assert ( + raised.value.problem_detail.detail + == "Error while fetching the Quisksight Embed url." + ) + + # 200 status, but no url + mock_boto.generate_embed_url_for_anonymous_user.return_value = dict( + status=200, + ) + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("primary") + assert ( + raised.value.problem_detail.detail + == "Error while fetching the Quisksight Embed url." + ) + + # Boto threw an error + mock_boto.generate_embed_url_for_anonymous_user.side_effect = Exception( + "" + ) + with pytest.raises(ProblemError) as raised: + ctrl.generate_quicksight_url("primary") + assert ( + raised.value.problem_detail.detail + == "Error while fetching the Quisksight Embed url." + ) + + def test_get_dashboard_names(self, quicksight_fixture: QuickSightControllerFixture): + with mock.patch( + "api.admin.controller.quicksight.Configuration.quicksight_authorized_arns" + ) as mock_qs_arns: + mock_qs_arns.return_value = dict(primary=[], secondary=[], tertiary=[]) + ctrl = quicksight_fixture.manager.admin_quicksight_controller + assert ctrl.get_dashboard_names() == { + "names": ["primary", "secondary", "tertiary"] + } diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 7d0b50cd56..a86fd39b4a 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -12,6 +12,7 @@ from api.admin.controller import setup_admin_controllers from api.admin.problem_details import * from api.controller import CirculationManagerController +from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.api_routes import MockApp, MockController, MockManager @@ -908,3 +909,23 @@ def test_static_file(self, fixture: AdminRouteFixture): str(local_path), "circulation-admin.css", ) + + +def test_returns_json_or_response_or_problem_detail(): + @routes.returns_json_or_response_or_problem_detail + def mock_responses(response): + if isinstance(response, ProblemError): + raise response + return response + + problem = ProblemDetail( + "http://problem", status_code=400, title="Title", detail="Is a detail" + ) + + # Both raising an error and responding with a problem detail are equivalent + assert mock_responses(ProblemError(problem)) == problem.response + assert mock_responses(problem) == problem.response + + # A json provides a response object + with flask.app.Flask(__name__).test_request_context(): + assert mock_responses({"status": True}).json == {"status": True} diff --git a/tests/core/util/test_flask_util.py b/tests/core/util/test_flask_util.py index cb074178bf..a8ffb8551e 100644 --- a/tests/core/util/test_flask_util.py +++ b/tests/core/util/test_flask_util.py @@ -16,6 +16,7 @@ Response, _snake_to_camel_case, boolean_value, + str_comma_list_validator, ) from core.util.opds_writer import OPDSFeed @@ -206,3 +207,14 @@ def test_snake_to_camel_case(): # Error case with pytest.raises(ValueError): _snake_to_camel_case("_") + + +def test_str_comma_list_validator(): + assert str_comma_list_validator(5) == ["5"] + assert str_comma_list_validator(1.2) == ["1.2"] + assert str_comma_list_validator("1,2,3") == ["1", "2", "3"] + assert str_comma_list_validator("") == [""] + + # Unsupported types + assert pytest.raises(TypeError, str_comma_list_validator, None) + assert pytest.raises(TypeError, str_comma_list_validator, []) From aba61b727168d933b70a29198a9d429f71cfec89 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 6 Oct 2023 08:54:18 -0300 Subject: [PATCH 090/262] Remove the externalintegrationslinks table (#1379) --- ...a80073d5_remove_externalintegrationlink.py | 90 ++++++++++++ api/admin/controller/settings.py | 91 +----------- core/model/__init__.py | 6 +- core/model/collection.py | 8 -- core/model/configuration.py | 80 ----------- tests/core/models/test_configuration.py | 133 +----------------- tests/fixtures/database.py | 26 ---- 7 files changed, 99 insertions(+), 335 deletions(-) create mode 100644 alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py diff --git a/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py b/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py new file mode 100644 index 0000000000..1bff2f4e0a --- /dev/null +++ b/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py @@ -0,0 +1,90 @@ +"""Remove ExternalIntegrationLink. + +Revision ID: 5d71a80073d5 +Revises: 1c566151741f +Create Date: 2023-09-13 15:23:07.566404+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "5d71a80073d5" +down_revision = "1c566151741f" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_index( + "ix_externalintegrationslinks_external_integration_id", + table_name="externalintegrationslinks", + ) + op.drop_index( + "ix_externalintegrationslinks_library_id", + table_name="externalintegrationslinks", + ) + op.drop_index( + "ix_externalintegrationslinks_other_integration_id", + table_name="externalintegrationslinks", + ) + op.drop_index( + "ix_externalintegrationslinks_purpose", table_name="externalintegrationslinks" + ) + op.drop_table("externalintegrationslinks") + + +def downgrade() -> None: + op.create_table( + "externalintegrationslinks", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column( + "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "other_integration_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + sa.Column("purpose", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["external_integration_id"], + ["externalintegrations.id"], + name="externalintegrationslinks_external_integration_id_fkey", + ), + sa.ForeignKeyConstraint( + ["library_id"], + ["libraries.id"], + name="externalintegrationslinks_library_id_fkey", + ), + sa.ForeignKeyConstraint( + ["other_integration_id"], + ["externalintegrations.id"], + name="externalintegrationslinks_other_integration_id_fkey", + ), + sa.PrimaryKeyConstraint("id", name="externalintegrationslinks_pkey"), + ) + op.create_index( + "ix_externalintegrationslinks_purpose", + "externalintegrationslinks", + ["purpose"], + unique=False, + ) + op.create_index( + "ix_externalintegrationslinks_other_integration_id", + "externalintegrationslinks", + ["other_integration_id"], + unique=False, + ) + op.create_index( + "ix_externalintegrationslinks_library_id", + "externalintegrationslinks", + ["library_id"], + unique=False, + ) + op.create_index( + "ix_externalintegrationslinks_external_integration_id", + "externalintegrationslinks", + ["external_integration_id"], + unique=False, + ) diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 3b75a54431..3e84dc5eda 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -16,7 +16,6 @@ INTEGRATION_NAME_ALREADY_IN_USE, INTEGRATION_URL_ALREADY_IN_USE, INVALID_CONFIGURATION_OPTION, - MISSING_INTEGRATION, MISSING_SERVICE, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, @@ -37,7 +36,6 @@ from core.model import ( ConfigurationSetting, ExternalIntegration, - ExternalIntegrationLink, IntegrationConfiguration, IntegrationLibraryConfiguration, Library, @@ -58,66 +56,6 @@ class SettingsController(CirculationManagerController, AdminPermissionsControlle NO_MIRROR_INTEGRATION = "NO_MIRROR" - def _set_storage_external_integration_link( - self, service: ExternalIntegration, purpose: str, setting_key: str - ) -> Optional[ProblemDetail]: - """Either set or delete the external integration link between the - service and the storage integration. - - :param service: Service's ExternalIntegration object - - :param purpose: Service's purpose - - :param setting_key: Key of the configuration setting that must be set in the storage integration. - For example, a specific bucket (MARC, Analytics, etc.). - - :return: ProblemDetail object if the operation failed - """ - mirror_integration_id = flask.request.form.get("mirror_integration_id") - - if not mirror_integration_id: - return None - - # If no storage integration was selected, then delete the existing - # external integration link. - if mirror_integration_id == self.NO_MIRROR_INTEGRATION: - current_integration_link = get_one( - self._db, - ExternalIntegrationLink, - library_id=None, - external_integration_id=service.id, - purpose=purpose, - ) - - if current_integration_link: - self._db.delete(current_integration_link) - else: - storage_integration = get_one( - self._db, ExternalIntegration, id=mirror_integration_id - ) - - # Only get storage integrations that have a specific configuration setting set. - # For example: a specific bucket. - if ( - not storage_integration - or not storage_integration.setting(setting_key).value - ): - return MISSING_INTEGRATION - - current_integration_link_created, ignore = get_one_or_create( - self._db, - ExternalIntegrationLink, - library_id=None, - external_integration_id=service.id, - purpose=purpose, - ) - - current_integration_link_created.other_integration_id = ( - storage_integration.id - ) - - return None - def _get_settings_class( self, registry: IntegrationRegistry, protocol_name: str, is_child=False ) -> Type[BaseSettings] | ProblemDetail | None: @@ -233,29 +171,14 @@ def _get_integration_info(self, goal, protocols): settings = dict() for setting in protocol.get("settings", []): key = setting.get("key") - - # If the setting is a covers or books mirror, we need to get - # the value from ExternalIntegrationLink and - # not from a ConfigurationSetting. - if key.endswith("mirror_integration_id"): - storage_integration = get_one( - self._db, - ExternalIntegrationLink, - external_integration_id=service.id, - ) - if storage_integration: - value = str(storage_integration.other_integration_id) - else: - value = self.NO_MIRROR_INTEGRATION + if setting.get("type") in ("list", "menu"): + value = ConfigurationSetting.for_externalintegration( + key, service + ).json_value else: - if setting.get("type") in ("list", "menu"): - value = ConfigurationSetting.for_externalintegration( - key, service - ).json_value - else: - value = ConfigurationSetting.for_externalintegration( - key, service - ).value + value = ConfigurationSetting.for_externalintegration( + key, service + ).value settings[key] = value service_info = dict( diff --git a/core/model/__init__.py b/core/model/__init__.py index df0021276c..3c1bf65684 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -537,11 +537,7 @@ def _bulk_operation(self): CollectionMissing, collections_identifiers, ) -from core.model.configuration import ( - ConfigurationSetting, - ExternalIntegration, - ExternalIntegrationLink, -) +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.contributor import Contribution, Contributor from core.model.coverage import ( BaseCoverageRecord, diff --git a/core/model/collection.py b/core/model/collection.py index 1237ddfc8a..dec8041d3f 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,7 +1,6 @@ # Collection, CollectionIdentifier, CollectionMissing from __future__ import annotations -import logging from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, List, Optional @@ -946,13 +945,6 @@ def delete(self, search_index=None): # Delete the ExternalIntegration associated with this # Collection, assuming it wasn't deleted already. if self.external_integration: - for link in self.external_integration.links: - if link.other_integration and link.other_integration.goal == "storage": - logging.info( - f"Deletion of collection {self.name} is disassociating " - f"storage integration {link.other_integration.name}." - ) - _db.delete(self.external_integration) # Now delete the Collection itself. diff --git a/core/model/configuration.py b/core/model/configuration.py index 1056629d8f..a47218a8db 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -26,38 +26,6 @@ from core.model import Collection # noqa: autoflake -class ExternalIntegrationLink(Base): - __tablename__ = "externalintegrationslinks" - - NO_MIRROR_INTEGRATION = "NO_MIRROR" - # Possible purposes that a storage external integration can be used for. - # These string literals may be stored in the database, so changes to them - # may need to be accompanied by a DB migration. - COVERS = "covers_mirror" - COVERS_KEY = f"{COVERS}_integration_id" - - OPEN_ACCESS_BOOKS = "books_mirror" - OPEN_ACCESS_BOOKS_KEY = f"{OPEN_ACCESS_BOOKS}_integration_id" - - PROTECTED_ACCESS_BOOKS = "protected_access_books_mirror" - PROTECTED_ACCESS_BOOKS_KEY = f"{PROTECTED_ACCESS_BOOKS}_integration_id" - - ANALYTICS = "analytics_mirror" - ANALYTICS_KEY = f"{ANALYTICS}_integration_id" - - MARC = "MARC_mirror" - - id = Column(Integer, primary_key=True) - external_integration_id = Column( - Integer, ForeignKey("externalintegrations.id"), index=True - ) - library_id = Column(Integer, ForeignKey("libraries.id"), index=True) - other_integration_id = Column( - Integer, ForeignKey("externalintegrations.id"), index=True - ) - purpose = Column(Unicode, index=True) - - class ExternalIntegration(Base): """An external integration contains configuration for connecting @@ -224,20 +192,6 @@ class ExternalIntegration(Base): foreign_keys="Collection.external_integration_id", ) - links: Mapped[List[ExternalIntegrationLink]] = relationship( - "ExternalIntegrationLink", - backref="integration", - foreign_keys="ExternalIntegrationLink.external_integration_id", - cascade="all, delete-orphan", - ) - - other_links: Mapped[List[ExternalIntegrationLink]] = relationship( - "ExternalIntegrationLink", - backref="other_integration", - foreign_keys="ExternalIntegrationLink.other_integration_id", - cascade="all, delete-orphan", - ) - libraries: Mapped[List[Library]] = relationship( "Library", back_populates="integrations", @@ -260,40 +214,6 @@ def for_goal(cls, _db, goal): return integrations - @classmethod - def for_collection_and_purpose(cls, _db, collection, purpose): - """Find the ExternalIntegration for the collection. - - :param collection: Use the mirror configuration for this Collection. - :param purpose: Use the purpose of the mirror configuration. - """ - qu = ( - _db.query(cls) - .join( - ExternalIntegrationLink, - ExternalIntegrationLink.other_integration_id == cls.id, - ) - .filter( - ExternalIntegrationLink.external_integration_id - == collection.external_integration_id, - ExternalIntegrationLink.purpose == purpose, - ) - ) - integrations = qu.all() - if not integrations: - raise CannotLoadConfiguration( - "No storage integration for collection '%s' and purpose '%s' is configured." - % (collection.name, purpose) - ) - if len(integrations) > 1: - raise CannotLoadConfiguration( - "Multiple integrations found for collection '%s' and purpose '%s'" - % (collection.name, purpose) - ) - - [integration] = integrations - return integration - @classmethod def lookup(cls, _db, protocol, goal, library=None): integrations = _db.query(cls).filter(cls.protocol == protocol, cls.goal == goal) diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 4d53c70b0d..7d10ba373d 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -4,11 +4,7 @@ from core.config import CannotLoadConfiguration, Configuration from core.model import create, get_one from core.model.collection import Collection -from core.model.configuration import ( - ConfigurationSetting, - ExternalIntegration, - ExternalIntegrationLink, -) +from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.datasource import DataSource from tests.fixtures.database import DatabaseTransactionFixture @@ -399,61 +395,6 @@ def test_duplicate_library_integration_setting( pytest.raises(IntegrityError, db.session.flush) -class TestExternalIntegrationLink: - def test_relationships(self, db: DatabaseTransactionFixture): - # Create a collection with two storage external integrations. - collection = db.collection( - name="Collection", - protocol=ExternalIntegration.OVERDRIVE, - ) - - storage1 = db.external_integration( - name="integration1", - protocol="protocol", - ) - storage2 = db.external_integration( - name="integration2", - protocol="protocol", - goal="storage", - username="username", - password="password", - ) - - # Two external integration links need to be created to associate - # the collection's external integration with the two storage - # external integrations. - s1_external_integration_link = db.external_integration_link( - integration=collection.external_integration, - other_integration=storage1, - purpose="covers_mirror", - ) - s2_external_integration_link = db.external_integration_link( - integration=collection.external_integration, - other_integration=storage2, - purpose="books_mirror", - ) - - qu = db.session.query(ExternalIntegrationLink).order_by( - ExternalIntegrationLink.other_integration_id - ) - external_integration_links = qu.all() - - assert len(external_integration_links) == 2 - assert external_integration_links[0].other_integration_id == storage1.id - assert external_integration_links[1].other_integration_id == storage2.id - - # When a storage integration is deleted, the related external - # integration link row is deleted, and the relationship with the - # collection is removed. - db.session.delete(storage1) - - qu = db.session.query(ExternalIntegrationLink) - external_integration_links = qu.all() - - assert len(external_integration_links) == 1 - assert external_integration_links[0].other_integration_id == storage2.id - - class ExampleExternalIntegrationFixture: external_integration: ExternalIntegration database_fixture: DatabaseTransactionFixture @@ -519,33 +460,6 @@ def test_for_library_and_goal( db.default_library().name, goal ) in str(excinfo.value) - def test_for_collection_and_purpose( - self, example_externalintegration_fixture: ExampleExternalIntegrationFixture - ): - db = example_externalintegration_fixture.database_fixture - wrong_purpose = "isbn" - collection = db.collection() - - with pytest.raises(CannotLoadConfiguration) as excinfo: - ExternalIntegration.for_collection_and_purpose( - db.session, collection, wrong_purpose - ) - assert ( - "No storage integration for collection '%s' and purpose '%s' is configured" - % (collection.name, wrong_purpose) - in str(excinfo.value) - ) - - external_integration = db.external_integration("some protocol") - collection.external_integration_id = external_integration.id - purpose = "covers_mirror" - db.external_integration_link(integration=external_integration, purpose=purpose) - - integration = ExternalIntegration.for_collection_and_purpose( - db.session, collection=collection, purpose=purpose - ) - assert isinstance(integration, ExternalIntegration) - def test_with_setting_value( self, example_externalintegration_fixture: ExampleExternalIntegrationFixture ): @@ -700,48 +614,3 @@ def test_custom_accept_header( # Must be the same value if set integration.custom_accept_header = "custom header" assert integration.custom_accept_header == "custom header" - - def test_delete( - self, example_externalintegration_fixture: ExampleExternalIntegrationFixture - ): - """Ensure that ExternalIntegration.delete clears all orphan ExternalIntegrationLinks.""" - session = example_externalintegration_fixture.database_fixture.session - db = example_externalintegration_fixture.database_fixture - - integration1 = db.external_integration( - "protocol", - ExternalIntegration.LICENSE_GOAL, - libraries=[db.default_library()], - ) - integration2 = db.external_integration( - "storage", - "storage goal", - libraries=[db.default_library()], - ) - - # Set up a link associating integration2 with integration1. - link1 = db.external_integration_link( - integration1, - db.default_library(), - integration2, - ExternalIntegrationLink.PROTECTED_ACCESS_BOOKS, - ) - link2 = db.external_integration_link( - integration1, - db.default_library(), - integration2, - ExternalIntegrationLink.COVERS, - ) - - # Delete integration1. - session.delete(integration1) - - # Ensure that there are no orphan links. - links = session.query(ExternalIntegrationLink).all() - for link in (link1, link2): - assert link not in links - - # Ensure that the first integration was successfully removed. - external_integrations = session.query(ExternalIntegration).all() - assert integration1 not in external_integrations - assert integration2 in external_integrations diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 4b0d36a764..54d1c98f7a 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -36,7 +36,6 @@ DeliveryMechanism, Edition, ExternalIntegration, - ExternalIntegrationLink, Genre, Hyperlink, Identifier, @@ -723,31 +722,6 @@ def external_integration( return integration - def external_integration_link( - self, - integration=None, - library=None, - other_integration=None, - purpose="covers_mirror", - ): - integration = integration or self.external_integration("some protocol") - other_integration = other_integration or self.external_integration( - "some other protocol" - ) - - library_id = library.id if library else None - - external_integration_link, ignore = get_one_or_create( - self.session, - ExternalIntegrationLink, - library_id=library_id, - external_integration_id=integration.id, - other_integration_id=other_integration.id, - purpose=purpose, - ) - - return external_integration_link - def integration_configuration( self, protocol: str, goal=None, libraries=None, name=None, **kwargs ): From 3f8ca562daa4fdd5ccce3b4e8e9dc12a6a683659 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 6 Oct 2023 10:01:44 -0300 Subject: [PATCH 091/262] Refactor CirculationAPI to remove unused parameters (PP-500) (#1437) Remove the unused part and fulfill_part_url parameters from the fulfill call in CirculationAPI. Instead of passing internal_format into each API, let the APIs opt into this with a CirculationInternalFormatsMixin mixin class. Removing internal_format makes sense because only a couple APIs actually use it, the others ignore it. The forthcoming OPDS and OPDS2 APIs will also ignore this, so it made sense to refactor it out. --- api/axis.py | 44 +++++--- api/bibliotheca.py | 53 +++++----- api/circulation.py | 127 ++++++++---------------- api/controller.py | 23 +---- api/enki.py | 53 +++++----- api/odilo.py | 68 ++++++++----- api/odl.py | 46 +++------ api/opds_for_distributors.py | 103 ++++++++++--------- api/overdrive.py | 41 +++++--- api/routes.py | 6 +- pyproject.toml | 2 +- tests/api/mockapi/circulation.py | 18 ++-- tests/api/test_axis.py | 22 ++-- tests/api/test_bibliotheca.py | 16 ++- tests/api/test_controller_loan.py | 67 ++----------- tests/api/test_enki.py | 11 +- tests/api/test_odilo.py | 47 ++++++--- tests/api/test_odl.py | 9 +- tests/api/test_opds_for_distributors.py | 30 ++++-- tests/api/test_overdrive.py | 38 ++++--- tests/api/test_routes.py | 13 +-- 21 files changed, 394 insertions(+), 443 deletions(-) diff --git a/api/axis.py b/api/axis.py index 9308d6204f..7ef7c2f0fc 100644 --- a/api/axis.py +++ b/api/axis.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import html import json import logging @@ -19,6 +21,7 @@ APIAwareFulfillmentInfo, BaseCirculationAPI, BaseCirculationLoanSettings, + CirculationInternalFormatsMixin, FulfillmentInfo, HoldInfo, LoanInfo, @@ -29,7 +32,6 @@ from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider, CoverageFailure -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -55,8 +57,11 @@ ExternalIntegration, Hyperlink, Identifier, + LicensePool, + LicensePoolDeliveryMechanism, LinkRelations, MediaTypes, + Patron, Representation, Session, Subject, @@ -134,8 +139,8 @@ class Axis360LibrarySettings(BaseCirculationLoanSettings): class Axis360API( BaseCirculationAPI[Axis360Settings, Axis360LibrarySettings], HasCollectionSelfTests, + CirculationInternalFormatsMixin, Axis360APIConstants, - HasLibraryIntegrationConfiguration, ): NAME = ExternalIntegration.AXIS_360 @@ -149,8 +154,6 @@ class Axis360API( fulfillment_endpoint = "getfullfillmentInfo/v2" audiobook_metadata_endpoint = "getaudiobookmetadata/v2" - log = logging.getLogger("Axis 360 API") - # Create a lookup table between common DeliveryMechanism identifiers # and Axis 360 format types. epub = Representation.EPUB_MEDIA_TYPE @@ -384,10 +387,18 @@ def _checkin(self, title_id, patron_id): ) return self.request(url, method="GET", verbose=True) - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: title_id = licensepool.identifier.identifier patron_id = patron.authorization_identifier - response = self._checkout(title_id, patron_id, internal_format) + response = self._checkout( + title_id, patron_id, self.internal_format(delivery_mechanism) + ) try: return CheckoutResponseParser(licensepool.collection).process_all( response.content @@ -401,18 +412,21 @@ def _checkout(self, title_id, patron_id, internal_format): response = self.request(url, data=args, method="POST") return response - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): - """Fulfill a patron's request for a specific book. - - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - - :return: a FulfillmentInfo object. - """ + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + """Fulfill a patron's request for a specific book.""" identifier = licensepool.identifier # This should include only one 'activity'. activities = self.patron_activity( - patron, pin, licensepool.identifier, internal_format + patron, + pin, + licensepool.identifier, + self.internal_format(delivery_mechanism), ) for loan in activities: if not isinstance(loan, LoanInfo): diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 9c42557f62..eae9d108a1 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import hashlib import hmac import html @@ -29,7 +31,6 @@ from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -59,7 +60,9 @@ Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, Measurement, + Patron, Representation, Session, Subject, @@ -116,7 +119,6 @@ class BibliothecaLibrarySettings(BaseCirculationLoanSettings): class BibliothecaAPI( BaseCirculationAPI[BibliothecaSettings, BibliothecaLibrarySettings], HasCollectionSelfTests, - HasLibraryIntegrationConfiguration, ): NAME = ExternalIntegration.BIBLIOTHECA AUTH_TIME_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" @@ -127,8 +129,6 @@ class BibliothecaAPI( AUTHORIZATION_HEADER = "3mcl-Authorization" VERSION_HEADER = "3mcl-Version" - log = logging.getLogger("Bibliotheca API") - DEFAULT_VERSION = "2.0" DEFAULT_BASE_URL = "https://partner.yourcloudlibrary.com/" @@ -138,19 +138,6 @@ class BibliothecaAPI( SERVICE_NAME = "Bibliotheca" - # Create a lookup table between common DeliveryMechanism identifiers - # and Overdrive format types. - adobe_drm = DeliveryMechanism.ADOBE_DRM - findaway_drm = DeliveryMechanism.FINDAWAY_DRM - delivery_mechanism_to_internal_format = { - (Representation.EPUB_MEDIA_TYPE, adobe_drm): "ePub", - (Representation.PDF_MEDIA_TYPE, adobe_drm): "PDF", - (None, findaway_drm): "MP3", - } - internal_format_to_delivery_mechanism = { - v: k for k, v in list(delivery_mechanism_to_internal_format.items()) - } - @classmethod def settings_class(cls): return BibliothecaSettings @@ -418,7 +405,13 @@ def patron_activity(self, patron, pin): TEMPLATE = "<%(request_type)s>%(item_id)s%(patron_id)s" - def checkout(self, patron_obj, patron_password, licensepool, delivery_mechanism): + def checkout( + self, + patron_obj: Patron, + patron_password: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: """Check out a book on behalf of a patron. :param patron_obj: a Patron object for the patron who wants @@ -467,18 +460,18 @@ def checkout(self, patron_obj, patron_password, licensepool, delivery_mechanism) ) return loan - def fulfill(self, patron, password, pool, internal_format, **kwargs): - """Get the actual resource file to the patron. - - :param kwargs: A container for standard arguments to fulfill() - which are not relevant to this implementation. - - :return: a FulfillmentInfo object. - """ - media_type, drm_scheme = self.internal_format_to_delivery_mechanism.get( - internal_format, internal_format - ) - if drm_scheme == DeliveryMechanism.FINDAWAY_DRM: + def fulfill( + self, + patron: Patron, + password: str, + pool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + """Get the actual resource file to the patron.""" + if ( + delivery_mechanism.delivery_mechanism.drm_scheme + == DeliveryMechanism.FINDAWAY_DRM + ): fulfill_method = self.get_audio_fulfillment_file content_transformation = self.findaway_license_to_webpub_manifest else: diff --git a/api/circulation.py b/api/circulation.py index 40fe01d5ba..b0a3b0409e 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -10,7 +10,6 @@ from typing import ( TYPE_CHECKING, Any, - Callable, Dict, Generic, List, @@ -59,6 +58,7 @@ ) from core.model.integration import IntegrationConfiguration from core.util.datetime_helpers import utc_now +from core.util.log import LoggerMixin if TYPE_CHECKING: pass @@ -574,9 +574,43 @@ def configuration(self) -> SettingsType: return self.settings_class()(**self.integration_configuration().settings_dict) # type: ignore[return-value] +class CirculationInternalFormatsMixin: + """A mixin for CirculationAPIs that have internal formats.""" + + # Different APIs have different internal names for delivery + # mechanisms. This is a mapping of (content_type, drm_type) + # 2-tuples to those internal names. + # + # For instance, the combination ("application/epub+zip", + # "vnd.adobe/adept+xml") is called "ePub" in Axis 360 and 3M, but + # is called "ebook-epub-adobe" in Overdrive. + delivery_mechanism_to_internal_format: Dict[ + Tuple[Optional[str], Optional[str]], str + ] = {} + + def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> str: + """Look up the internal format for this delivery mechanism or + raise an exception. + + :param delivery_mechanism: A LicensePoolDeliveryMechanism + """ + d = delivery_mechanism.delivery_mechanism + key = (d.content_type, d.drm_scheme) + internal_format = self.delivery_mechanism_to_internal_format.get(key) + if internal_format is None: + raise DeliveryMechanismError( + _( + "Could not map delivery mechanism %(mechanism_name)s to internal delivery mechanism!", + mechanism_name=d.name, + ) + ) + return internal_format + + class BaseCirculationAPI( CirculationConfigurationMixin[SettingsType, LibrarySettingsType], HasLibraryIntegrationConfiguration, + LoggerMixin, ABC, ): """Encapsulates logic common to all circulation APIs.""" @@ -595,17 +629,6 @@ class BaseCirculationAPI( # delivery mechanisms (3M), set this to None. SET_DELIVERY_MECHANISM_AT: Optional[str] = FULFILL_STEP - # Different APIs have different internal names for delivery - # mechanisms. This is a mapping of (content_type, drm_type) - # 2-tuples to those internal names. - # - # For instance, the combination ("application/epub+zip", - # "vnd.adobe/adept+xml") is called "ePub" in Axis 360 and 3M, but - # is called "ebook-epub-adobe" in Overdrive. - delivery_mechanism_to_internal_format: Dict[ - Tuple[Optional[str], Optional[str]], str - ] = {} - def __init__(self, _db: Session, collection: Collection): self._db = _db self._integration_configuration_id = collection.integration_configuration.id @@ -617,28 +640,6 @@ def collection(self) -> Collection | None: return None return Collection.by_id(self._db, id=self.collection_id) - def internal_format( - self, delivery_mechanism: Optional[LicensePoolDeliveryMechanism] - ) -> Optional[str]: - """Look up the internal format for this delivery mechanism or - raise an exception. - - :param delivery_mechanism: A LicensePoolDeliveryMechanism - """ - if not delivery_mechanism: - return None - d = delivery_mechanism.delivery_mechanism - key = (d.content_type, d.drm_scheme) - internal_format = self.delivery_mechanism_to_internal_format.get(key) - if not internal_format: - raise DeliveryMechanismError( - _( - "Could not map Simplified delivery mechanism %(mechanism_name)s to internal delivery mechanism!", - mechanism_name=d.name, - ) - ) - return internal_format - @classmethod def default_notification_email_address( self, library_or_patron: Library | Patron, pin: str @@ -670,14 +671,14 @@ def checkout( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str], + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> LoanInfo | HoldInfo: """Check out a book on behalf of a patron. :param patron: a Patron object for the patron who wants to check out the book. :param pin: The patron's alleged password. :param licensepool: Contains lending info as well as link to parent Identifier. - :param internal_format: Represents the patron's desired book format. + :param delivery_mechanism: Represents the patron's desired book format. :return: a LoanInfo object. """ @@ -698,29 +699,9 @@ def fulfill( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str] = None, - part: Optional[str] = None, - fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> FulfillmentInfo: - """Get the actual resource file to the patron. - - Implementations are encouraged to define ``**kwargs`` as a container - for vendor-specific arguments, so that they don't have to change - as new arguments are added. - - :param internal_format: A vendor-specific name indicating - the format requested by the patron. - - :param part: A vendor-specific identifier indicating that the - patron wants to fulfill one specific part of the book - (e.g. one chapter of an audiobook), not the whole thing. - - :param fulfill_part_url: A function that takes one argument (a - vendor-specific part identifier) and returns the URL to use - when fulfilling that part. - - :return: a FulfillmentInfo object. - """ + """Get the actual resource file to the patron.""" ... @abstractmethod @@ -1089,10 +1070,6 @@ def borrow( if must_set_delivery_mechanism and not delivery_mechanism: raise DeliveryMechanismMissing() - content_link = content_expires = None - - internal_format = api.internal_format(delivery_mechanism) - # Do we (think we) already have this book out on loan? existing_loan = get_one( self._db, @@ -1154,7 +1131,9 @@ def borrow( # available -- someone else may have checked it in since we # last looked. try: - loan_info = api.checkout(patron, pin, licensepool, internal_format) + loan_info = api.checkout( + patron, pin, licensepool, delivery_mechanism=delivery_mechanism + ) if isinstance(loan_info, HoldInfo): # If the API couldn't give us a loan, it may have given us @@ -1426,8 +1405,6 @@ def fulfill( pin: str, licensepool: LicensePool, delivery_mechanism: LicensePoolDeliveryMechanism, - part: Optional[str] = None, - fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, sync_on_failure: bool = True, ) -> FulfillmentInfo: """Fulfil a book that a patron has previously checked out. @@ -1438,14 +1415,6 @@ def fulfill( mechanism, this parameter is ignored and the previously used mechanism takes precedence. - :param part: A vendor-specific identifier indicating that the - patron wants to fulfill one specific part of the book - (e.g. one chapter of an audiobook), not the whole thing. - - :param fulfill_part_url: A function that takes one argument (a - vendor-specific part identifier) and returns the URL to use - when fulfilling that part. - :return: A FulfillmentInfo object. """ @@ -1470,8 +1439,6 @@ def fulfill( pin, licensepool=licensepool, delivery_mechanism=delivery_mechanism, - part=part, - fulfill_part_url=fulfill_part_url, sync_on_failure=False, ) else: @@ -1508,19 +1475,11 @@ def fulfill( if not api: raise CannotFulfill() - internal_format = api.internal_format(delivery_mechanism) - - # Here we _do_ pass in the vendor-specific arguments, but - # we pass them in as keyword arguments, to minimize the - # impact on implementation signatures. Most vendor APIs - # will ignore one or more of these arguments. fulfillment = api.fulfill( patron, pin, licensepool, - internal_format=internal_format, - part=part, - fulfill_part_url=fulfill_part_url, + delivery_mechanism=delivery_mechanism, ) if not fulfillment or not (fulfillment.content_link or fulfillment.content): raise NoAcceptableFormat() diff --git a/api/controller.py b/api/controller.py index e382f5ff15..e19aa1aed7 100644 --- a/api/controller.py +++ b/api/controller.py @@ -1663,7 +1663,6 @@ def fulfill( self, license_pool_id: int, mechanism_id: int | None = None, - part: str | None = None, do_get: Any | None = None, ) -> wkResponse | ProblemDetail: """Fulfill a book that has already been checked out, @@ -1676,10 +1675,6 @@ def fulfill( :param license_pool_id: Database ID of a LicensePool. :param mechanism_id: Database ID of a DeliveryMechanism. - - :param part: Vendor-specific identifier used when fulfilling a - specific part of a book rather than the whole thing (e.g. a - single chapter of an audiobook). """ do_get = do_get or Representation.simple_http_get @@ -1751,26 +1746,12 @@ def fulfill( _("You must specify a delivery mechanism to fulfill this loan.") ) - # Define a function that, given a part identifier, will create - # an appropriate link to this controller. - def fulfill_part_url(part): - return url_for( - "fulfill", - license_pool_id=requested_license_pool.id, - mechanism_id=mechanism.delivery_mechanism.id, - library_short_name=library.short_name, - part=str(part), - _external=True, - ) - try: fulfillment = self.circulation.fulfill( patron, credential, requested_license_pool, mechanism, - part=part, - fulfill_part_url=fulfill_part_url, ) except DeliveryMechanismConflict as e: return DELIVERY_CONFLICT.detailed(str(e)) @@ -1779,10 +1760,10 @@ def fulfill_part_url(part): _("Can't fulfill loan because you have no active loan for this book."), status_code=e.status_code, ) - except CannotFulfill as e: - return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) except FormatNotAvailable as e: return NO_ACCEPTABLE_FORMAT.with_debug(str(e), status_code=e.status_code) + except CannotFulfill as e: + return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) except DeliveryMechanismError as e: return BAD_DELIVERY_MECHANISM.with_debug(str(e), status_code=e.status_code) diff --git a/api/enki.py b/api/enki.py index aa00bba596..db586ecad5 100644 --- a/api/enki.py +++ b/api/enki.py @@ -10,7 +10,6 @@ from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -35,6 +34,9 @@ Edition, Hyperlink, Identifier, + LicensePool, + LicensePoolDeliveryMechanism, + Patron, Representation, Subject, ) @@ -81,7 +83,6 @@ class EnkiAPI( BaseCirculationAPI[EnkiSettings, EnkiLibrarySettings], HasCollectionSelfTests, EnkiConstants, - HasLibraryIntegrationConfiguration, ): ENKI_LIBRARY_ID_KEY = "enki_library_id" DESCRIPTION = _("Integrate an Enki collection.") @@ -95,17 +96,6 @@ class EnkiAPI( ENKI_EXTERNAL = NAME ENKI_ID = "Enki ID" - # Create a lookup table between common DeliveryMechanism identifiers - # and Enki format types. - epub = Representation.EPUB_MEDIA_TYPE - adobe_drm = DeliveryMechanism.ADOBE_DRM - no_drm = DeliveryMechanism.NO_DRM - - delivery_mechanism_to_internal_format = { - (epub, no_drm): "free", - (epub, adobe_drm): "acs", - } - # Enki API serves all responses with a 200 error code and a # text/html Content-Type. However, there's a string that # reliably shows up in error pages which is unlikely to show up @@ -114,7 +104,6 @@ class EnkiAPI( SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.FULFILL_STEP SERVICE_NAME = "Enki" - log = logging.getLogger("Enki API") @classmethod def settings_class(cls): @@ -368,7 +357,13 @@ def _epoch_to_struct(cls, epoch_string): time.strftime(time_format, time.gmtime(float(epoch_string))), time_format ) - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: identifier = licensepool.identifier enki_id = identifier.identifier enki_library_id = self.enki_library_id(patron.library) @@ -421,14 +416,14 @@ def loan_request(self, barcode, pin, book_id, enki_library_id): response = self.request(url, method="get", params=args) return response - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): - """Get the actual resource file to the patron. - - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - - :return: a FulfillmentInfo object. - """ + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + """Get the actual resource file to the patron.""" book_id = licensepool.identifier.identifier enki_library_id = self.enki_library_id(patron.library) response = self.loan_request( @@ -454,11 +449,11 @@ def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): # whether the content link points to the actual book or an # ACSM file) but since Enki titles only have a single delivery # mechanism, it's easy to make a guess. - drm_type = self.no_drm + drm_type = DeliveryMechanism.NO_DRM for lpdm in licensepool.delivery_mechanisms: - delivery_mechanism = lpdm.delivery_mechanism - if delivery_mechanism: - drm_type = delivery_mechanism.drm_scheme + mechanism = lpdm.delivery_mechanism + if mechanism: + drm_type = mechanism.drm_scheme break return FulfillmentInfo( @@ -683,9 +678,9 @@ def extract_circulation(self, primary_identifier, availability, formattype): licenses_owned = availability.get("totalCopies", 0) licenses_available = availability.get("availableCopies", 0) hold = availability.get("onHold", 0) - drm_type = EnkiAPI.no_drm + drm_type = DeliveryMechanism.NO_DRM if availability.get("accessType") == "acs": - drm_type = EnkiAPI.adobe_drm + drm_type = DeliveryMechanism.ADOBE_DRM formats = [] content_type = None diff --git a/api/odilo.py b/api/odilo.py index 7cd7a0d19a..e92d9c9cce 100644 --- a/api/odilo.py +++ b/api/odilo.py @@ -8,13 +8,18 @@ from pydantic import HttpUrl from sqlalchemy.orm.session import Session -from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation import ( + BaseCirculationAPI, + CirculationInternalFormatsMixin, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField from core.metadata_layer import ( CirculationData, @@ -37,6 +42,9 @@ ExternalIntegration, Hyperlink, Identifier, + LicensePool, + LicensePoolDeliveryMechanism, + Patron, Representation, Subject, ) @@ -343,9 +351,8 @@ class OdiloLibrarySettings(BaseSettings): class OdiloAPI( BaseCirculationAPI[OdiloSettings, OdiloLibrarySettings], HasCollectionSelfTests, - HasLibraryIntegrationConfiguration, + CirculationInternalFormatsMixin, ): - log = logging.getLogger("Odilo API") LIBRARY_API_BASE_URL = "library_api_base_url" NAME = ExternalIntegration.ODILO @@ -621,7 +628,13 @@ def token_post(self, url, payload, headers={}, **kwargs): self.library_api_base_url + url, payload, headers, **kwargs ) - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: """Check out a book on behalf of a patron. :param patron: a Patron object for the patron who wants @@ -632,17 +645,18 @@ def checkout(self, patron, pin, licensepool, internal_format): :param licensepool: Identifier of the book to be checked out is attached to this licensepool. - :param internal_format: Represents the patron's desired book format. + :param delivery_mechanism: Represents the patron's desired book format. :return: a LoanInfo object. """ record_id = licensepool.identifier.identifier + internal_format = self.internal_format(delivery_mechanism) # Data just as 'x-www-form-urlencoded', no JSON payload = dict( patronId=patron.authorization_identifier, - format=internal_format, + format=self.internal_format, ) response = self.patron_request( @@ -756,14 +770,15 @@ def get_hold(self, patron, pin, record_id): ) ) - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): - """Get the actual resource file to the patron. - - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - - :return: a FulfillmentInfo object. - """ + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + """Get the actual resource file to the patron.""" + internal_format = self.internal_format(delivery_mechanism) record_id = licensepool.identifier.identifier content_link, content, content_type = self.get_fulfillment_link( patron, pin, record_id, internal_format @@ -774,17 +789,18 @@ def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): "Odilo record_id %s was not available as %s" % (record_id, internal_format) ) - else: - return FulfillmentInfo( - licensepool.collection, - DataSource.ODILO, - Identifier.ODILO_ID, - record_id, - content_link=content_link, - content=content, - content_type=content_type, - content_expires=None, - ) + raise CannotFulfill() + + return FulfillmentInfo( + licensepool.collection, + DataSource.ODILO, + Identifier.ODILO_ID, + record_id, + content_link=content_link, + content=content, + content_type=content_type, + content_expires=None, + ) def get_fulfillment_link(self, patron, pin, record_id, format_type): """Get the link corresponding to an existing checkout.""" diff --git a/api/odl.py b/api/odl.py index a0334594f4..62e224e0fe 100644 --- a/api/odl.py +++ b/api/odl.py @@ -3,10 +3,9 @@ import binascii import datetime import json -import logging import uuid from abc import ABC -from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, Union +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type import dateutil from flask import url_for @@ -266,14 +265,6 @@ def external_integration(self, db: Session) -> ExternalIntegration: """ return self.collection.external_integration - def internal_format( # type: ignore[override] - self, delivery_mechanism: Optional[LicensePoolDeliveryMechanism] - ) -> Optional[LicensePoolDeliveryMechanism]: - """Each consolidated copy is only available in one format, so we don't need - a mapping to internal formats. - """ - return delivery_mechanism - @property def collection(self) -> Collection: """Return a collection associated with this object. @@ -455,7 +446,7 @@ def checkout( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str], + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> LoanInfo: """Create a new loan.""" _db = Session.object_session(patron) @@ -555,9 +546,7 @@ def fulfill( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str] = None, - part: Optional[str] = None, - fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> FulfillmentInfo: """Get the actual resource file to the patron.""" _db = Session.object_session(patron) @@ -567,7 +556,7 @@ def fulfill( .filter(Loan.patron == patron) .filter(Loan.license_pool_id == licensepool.id) ).one() - return self._fulfill(loan, internal_format) + return self._fulfill(loan, delivery_mechanism) @staticmethod def _find_content_link_and_type( @@ -595,11 +584,6 @@ def _find_content_link_and_type( # No candidates return None, None - if not drm_scheme: - # If we don't have a requested DRM scheme, so we use the first one. - # TODO: Can this just be dropped? - return candidates[0] - # For DeMarque audiobook content, we need to translate the type property # to reflect what we have stored in our delivery mechanisms. if drm_scheme == DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM: @@ -610,7 +594,7 @@ def _find_content_link_and_type( def _fulfill( self, loan: Loan, - delivery_mechanism: Optional[Union[str, LicensePoolDeliveryMechanism]] = None, + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> FulfillmentInfo: licensepool = loan.license_pool doc = self.get_license_status_document(loan) @@ -628,11 +612,9 @@ def _fulfill( expires = dateutil.parser.parse(expires) links = doc.get("links", []) - if isinstance(delivery_mechanism, LicensePoolDeliveryMechanism): - delivery_mechanism = delivery_mechanism.delivery_mechanism.drm_scheme content_link, content_type = self._find_content_link_and_type( - links, delivery_mechanism + links, delivery_mechanism.delivery_mechanism.drm_scheme ) return FulfillmentInfo( @@ -1001,7 +983,7 @@ def fetch_license_info( license_info_document = json.loads(response) return license_info_document # type: ignore[no-any-return] else: - logging.warning( + cls.logger().warning( f"License Info Document is not available. " f"Status link {document_link} failed with {status_code} code." ) @@ -1036,7 +1018,7 @@ def parse_license_info( document_format = license_info_document.get("format") if identifier is None: - logging.error("License info document has no identifier.") + cls.logger().error("License info document has no identifier.") return None expires = None @@ -1047,13 +1029,13 @@ def parse_license_info( if document_status is not None: status = LicenseStatus.get(document_status) if status.value != document_status: - logging.warning( + cls.logger().warning( f"Identifier # {identifier} unknown status value " f"{document_status} defaulting to {status.value}." ) else: status = LicenseStatus.unavailable - logging.warning( + cls.logger().warning( f"Identifier # {identifier} license info document does not have " f"required key 'status'." ) @@ -1062,7 +1044,7 @@ def parse_license_info( available = int(document_available) else: available = 0 - logging.warning( + cls.logger().warning( f"Identifier # {identifier} license info document does not have " f"required key 'checkouts.available'." ) @@ -1120,7 +1102,7 @@ def get_license_data( # There is a mismatch between the license info document and # the feed we are importing. Since we don't know which to believe # we log an error and continue. - logging.error( + cls.logger().error( f"Mismatch between license identifier in the feed ({feed_license_identifier}) " f"and the identifier in the license info document " f"({parsed_license.identifier}) ignoring license completely." @@ -1128,7 +1110,7 @@ def get_license_data( return None if parsed_license.expires != feed_license_expires: - logging.error( + cls.logger().error( f"License identifier {feed_license_identifier}. Mismatch between license " f"expiry in the feed ({feed_license_expires}) and the expiry in the license " f"info document ({parsed_license.expires}) setting license status " @@ -1137,7 +1119,7 @@ def get_license_data( parsed_license.status = LicenseStatus.unavailable if parsed_license.terms_concurrency != feed_concurrency: - logging.error( + cls.logger().error( f"License identifier {feed_license_identifier}. Mismatch between license " f"concurrency in the feed ({feed_concurrency}) and the " f"concurrency in the license info document (" diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index cebe3a63e5..24b7234024 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -2,18 +2,7 @@ import datetime import json -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Optional, - Set, - Tuple, - Type, -) +from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Set, Tuple, Type import feedparser from flask_babel import lazy_gettext as _ @@ -21,6 +10,7 @@ from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo from api.circulation_exceptions import ( CannotFulfill, + DeliveryMechanismError, LibraryAuthorizationFailedException, ) from api.selftest import HasCollectionSelfTests @@ -98,12 +88,6 @@ class OPDSForDistributorsAPI( if drm == (DeliveryMechanism.BEARER_TOKEN) and format is not None ] - # ...and we should map requests for delivery of that media type to - # the (type, BEARER_TOKEN) DeliveryMechanism. - delivery_mechanism_to_internal_format = { - (type, DeliveryMechanism.BEARER_TOKEN): type for type in SUPPORTED_MEDIA_TYPES - } - @classmethod def settings_class(cls) -> Type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings @@ -283,7 +267,7 @@ def checkout( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str], + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> LoanInfo: now = utc_now() return LoanInfo( @@ -300,57 +284,70 @@ def fulfill( patron: Patron, pin: str, licensepool: LicensePool, - internal_format: Optional[str] = None, - part: Optional[str] = None, - fulfill_part_url: Optional[Callable[[Optional[str]], str]] = None, + delivery_mechanism: LicensePoolDeliveryMechanism, ) -> FulfillmentInfo: """Retrieve a bearer token that can be used to download the book. - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - :return: a FulfillmentInfo object. """ + if ( + delivery_mechanism.delivery_mechanism.drm_scheme + != DeliveryMechanism.BEARER_TOKEN + ): + raise DeliveryMechanismError( + "Cannot fulfill a loan through OPDS For Distributors using a delivery mechanism with DRM scheme %s" + % delivery_mechanism.delivery_mechanism.drm_scheme + ) links = licensepool.identifier.links + # Find the acquisition link with the right media type. + url = None for link in links: media_type = link.resource.representation.media_type if ( link.rel == Hyperlink.GENERIC_OPDS_ACQUISITION - and media_type == internal_format + and media_type == delivery_mechanism.delivery_mechanism.content_type ): url = link.resource.representation.url + break - # Obtain a Credential with the information from our - # bearer token. - _db = Session.object_session(licensepool) - credential = self._get_token(_db) - - # Build a application/vnd.librarysimplified.bearer-token - # document using information from the credential. - now = utc_now() - expiration = int((credential.expires - now).total_seconds()) # type: ignore[operator] - token_document = dict( - token_type="Bearer", - access_token=credential.credential, - expires_in=expiration, - location=url, - ) + if url is None: + # We couldn't find an acquisition link for this book. + raise CannotFulfill() + + # Obtain a Credential with the information from our + # bearer token. + _db = Session.object_session(licensepool) + credential = self._get_token(_db) + if credential.expires is None: + self.log.error( + f"Credential ({credential.id}) for patron ({patron.authorization_identifier}/{patron.id}) " + "has no expiration date. Cannot fulfill loan." + ) + raise CannotFulfill() - return FulfillmentInfo( - licensepool.collection, - licensepool.data_source.name, - licensepool.identifier.type, - licensepool.identifier.identifier, - content_link=None, - content_type=DeliveryMechanism.BEARER_TOKEN, - content=json.dumps(token_document), - content_expires=credential.expires, - ) + # Build a application/vnd.librarysimplified.bearer-token + # document using information from the credential. + now = utc_now() + expiration = int((credential.expires - now).total_seconds()) + token_document = dict( + token_type="Bearer", + access_token=credential.credential, + expires_in=expiration, + location=url, + ) - # We couldn't find an acquisition link for this book. - raise CannotFulfill() + return FulfillmentInfo( + licensepool.collection, + licensepool.data_source.name, + licensepool.identifier.type, + licensepool.identifier.identifier, + content_link=None, + content_type=DeliveryMechanism.BEARER_TOKEN, + content=json.dumps(token_document), + content_expires=credential.expires, + ) def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: # Look up loans for this collection in the database. diff --git a/api/overdrive.py b/api/overdrive.py index 358e6461d4..48d0cd45b2 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -25,6 +25,7 @@ from api.circulation import ( BaseCirculationAPI, BaseCirculationEbookLoanSettings, + CirculationInternalFormatsMixin, DeliveryMechanismInfo, FulfillmentInfo, HoldInfo, @@ -68,6 +69,7 @@ Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, Measurement, MediaTypes, Patron, @@ -202,6 +204,7 @@ class OverdriveChildSettings(BaseSettings): class OverdriveAPI( BaseCirculationAPI, + CirculationInternalFormatsMixin, HasCollectionSelfTests, HasChildIntegrationConfiguration, OverdriveConstants, @@ -261,8 +264,6 @@ class OverdriveAPI( # associated with an OverDrive collection. OVERDRIVE_MAIN_ACCOUNT_ID = -1 - log = logging.getLogger("Overdrive API") - # A lock for threaded usage. lock = RLock() @@ -1010,7 +1011,13 @@ def refresh_patron_access_token( raise PatronAuthorizationFailedException(message, debug) return credential - def checkout(self, patron, pin, licensepool, internal_format): + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: """Check out a book on behalf of a patron. :param patron: a Patron object for the patron who wants @@ -1021,7 +1028,7 @@ def checkout(self, patron, pin, licensepool, internal_format): :param licensepool: Identifier of the book to be checked out is attached to this licensepool. - :param internal_format: Represents the patron's desired book format. + :param delivery_mechanism: Represents the patron's desired book format. :return: a LoanInfo object. """ @@ -1029,8 +1036,8 @@ def checkout(self, patron, pin, licensepool, internal_format): identifier = licensepool.identifier overdrive_id = identifier.identifier headers = {"Content-Type": "application/json"} - payload = dict(fields=[dict(name="reserveId", value=overdrive_id)]) - payload = json.dumps(payload) + payload_dict = dict(fields=[dict(name="reserveId", value=overdrive_id)]) + payload = json.dumps(payload_dict) response = self.patron_request( patron, pin, self.CHECKOUTS_ENDPOINT, extra_headers=headers, data=payload @@ -1228,14 +1235,20 @@ def get_hold(self, patron, pin, overdrive_id): self.raise_exception_on_error(data) return data - def fulfill(self, patron, pin, licensepool, internal_format, **kwargs): - """Get the actual resource file to the patron. - - :param kwargs: A container for arguments to fulfill() - which are not relevant to this vendor. - - :return: a FulfillmentInfo object. - """ + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + """Get the actual resource file to the patron.""" + internal_format = self.internal_format(delivery_mechanism) + if licensepool.identifier.identifier is None: + self.log.error( + f"Cannot fulfill licensepool with no identifier. Licensepool.id: {licensepool.id}" + ) + raise CannotFulfill() try: result = self.get_fulfillment_link( patron, pin, licensepool.identifier.identifier, internal_format diff --git a/api/routes.py b/api/routes.py index fc24ac599d..375df01cd8 100644 --- a/api/routes.py +++ b/api/routes.py @@ -445,12 +445,12 @@ def borrow(identifier_type, identifier, mechanism_id=None): @library_route("/works//fulfill") @library_route("/works//fulfill/") -@library_route("/works//fulfill//") +@library_route("/works//fulfill/") @has_library @allows_patron_web @returns_problem_detail -def fulfill(license_pool_id, mechanism_id=None, part=None): - return app.manager.loans.fulfill(license_pool_id, mechanism_id, part) +def fulfill(license_pool_id, mechanism_id=None): + return app.manager.loans.fulfill(license_pool_id, mechanism_id) @library_route("/loans//revoke", methods=["GET", "PUT"]) diff --git a/pyproject.toml b/pyproject.toml index 9927499db7..95ab13f327 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -144,7 +144,7 @@ module = [ "opensearch_dsl.*", "pyfakefs.*", "pyld", - "pymarc", + "pymarc.*", "pyparsing", "spellchecker", "textblob.*", diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index ff1392c83a..77a5d13e55 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,7 +1,6 @@ -import logging from abc import ABC from collections import defaultdict -from typing import Optional +from typing import Optional, Type from unittest.mock import MagicMock from sqlalchemy.orm import Session @@ -17,18 +16,20 @@ class MockBaseCirculationAPI(BaseCirculationAPI, ABC): - def label(self): + @classmethod + def label(cls) -> str: return "" - def description(self): + @classmethod + def description(cls) -> str: return "" @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[BaseSettings]: return BaseSettings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> Type[BaseSettings]: return BaseSettings @@ -39,7 +40,6 @@ def __init__( self.SET_DELIVERY_MECHANISM_AT = set_delivery_mechanism_at self.CAN_REVOKE_HOLD_WHEN_RESERVED = can_revoke_hold_when_reserved self.responses = defaultdict(list) - self.log = logging.getLogger("Mock remote API") self.availability_updated_for = [] def checkout(self, patron_obj, patron_password, licensepool, delivery_mechanism): @@ -59,9 +59,7 @@ def fulfill( patron, pin, licensepool, - internal_format=None, - part=None, - fulfill_part_url=None, + delivery_mechanism, ): # Should be a FulfillmentInfo. return self._return_or_raise("fulfill") diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 706a98f6ae..83fa987400 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -423,11 +423,15 @@ def test_fulfill(self, axis360: Axis360Fixture): patron = axis360.db.patron() patron.authorization_identifier = "a barcode" - - def fulfill(internal_format="not AxisNow"): - return axis360.api.fulfill( - patron, "pin", licensepool=pool, internal_format=internal_format - ) + delivery_mechanism = pool.delivery_mechanisms[0] + + fulfill = partial( + axis360.api.fulfill, + patron, + "pin", + licensepool=pool, + delivery_mechanism=delivery_mechanism, + ) # If Axis 360 says a patron does not have a title checked out, # an attempt to fulfill that title will fail with NoActiveLoan. @@ -440,7 +444,7 @@ def fulfill(internal_format="not AxisNow"): # object with a content link. data = axis360.sample_data("availability_with_loan_and_hold.xml") axis360.api.queue_response(200, content=data) - fulfillment = fulfill(internal_format="ePub") + fulfillment = fulfill() assert isinstance(fulfillment, FulfillmentInfo) assert not isinstance(fulfillment, Axis360FulfillmentInfo) assert DeliveryMechanism.ADOBE_DRM == fulfillment.content_type @@ -452,7 +456,8 @@ def fulfill(internal_format="not AxisNow"): data = axis360.sample_data("availability_with_axisnow_fulfillment.xml") data = data.replace(b"0016820953", pool.identifier.identifier.encode("utf8")) axis360.api.queue_response(200, content=data) - fulfillment = fulfill("AxisNow") + delivery_mechanism.drm_scheme = DeliveryMechanism.AXISNOW_DRM + fulfillment = fulfill() assert isinstance(fulfillment, Axis360FulfillmentInfo) # Looking up the details of the Axis360FulfillmentInfo will @@ -475,7 +480,8 @@ def fulfill(internal_format="not AxisNow"): ) data = axis360.sample_data("availability_with_audiobook_fulfillment.xml") axis360.api.queue_response(200, content=data) - fulfillment = fulfill(internal_format="irrelevant") + delivery_mechanism.drm_scheme = DeliveryMechanism.FINDAWAY_DRM + fulfillment = fulfill() assert isinstance(fulfillment, Axis360FulfillmentInfo) def test_patron_activity(self, axis360: Axis360Fixture): diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index 3534bf1afc..f42d691f25 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -6,11 +6,11 @@ from io import BytesIO, StringIO from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, runtime_checkable from unittest import mock -from unittest.mock import MagicMock +from unittest.mock import MagicMock, create_autospec import pytest from pymarc import parse_xml_to_array -from pymarc.record import Record # type: ignore +from pymarc.record import Record from api.bibliotheca import ( BibliothecaAPI, @@ -54,6 +54,7 @@ Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, Measurement, Representation, Subject, @@ -576,8 +577,10 @@ def test_fulfill(self, bibliotheca_fixture: BibliothecaAPITestFixture): headers={"Content-Type": "presumably/an-acsm"}, content="this is an ACSM", ) + delivery_mechanism = create_autospec(LicensePoolDeliveryMechanism) + delivery_mechanism.delivery_mechanism.drm_scheme = DeliveryMechanism.ADOBE_DRM fulfillment = bibliotheca_fixture.api.fulfill( - patron, "password", pool, internal_format="ePub" + patron, "password", pool, delivery_mechanism=delivery_mechanism ) assert isinstance(fulfillment, FulfillmentInfo) assert b"this is an ACSM" == fulfillment.content @@ -595,8 +598,11 @@ def test_fulfill(self, bibliotheca_fixture: BibliothecaAPITestFixture): bibliotheca_fixture.api.queue_response( 200, headers={"Content-Type": "application/json"}, content=license ) + delivery_mechanism.delivery_mechanism.drm_scheme = ( + DeliveryMechanism.FINDAWAY_DRM + ) fulfillment = bibliotheca_fixture.api.fulfill( - patron, "password", pool, internal_format="MP3" + patron, "password", pool, delivery_mechanism=delivery_mechanism ) assert isinstance(fulfillment, FulfillmentInfo) @@ -627,7 +633,7 @@ def test_fulfill(self, bibliotheca_fixture: BibliothecaAPITestFixture): 200, headers={"Content-Type": bad_media_type}, content=bad_content ) fulfillment = bibliotheca_fixture.api.fulfill( - patron, "password", pool, internal_format="MP3" + patron, "password", pool, delivery_mechanism=delivery_mechanism ) assert isinstance(fulfillment, FulfillmentInfo) diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index e0eb5032bf..4af9690604 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -14,7 +14,6 @@ from api.circulation import CirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo from api.circulation_exceptions import ( AlreadyOnHold, - CannotFulfill, NoAvailableCopies, NoLicenses, NotFoundOnRemote, @@ -672,29 +671,9 @@ def test_borrow_succeeds_when_work_already_checked_out( def test_fulfill(self, loan_fixture: LoanFixture): # Verify that arguments to the fulfill() method are propagated # correctly to the CirculationAPI. - class MockCirculationAPI: - def fulfill( - self, - patron, - credential, - requested_license_pool, - mechanism, - part, - fulfill_part_url, - ): - self.called_with = ( - patron, - credential, - requested_license_pool, - mechanism, - part, - fulfill_part_url, - ) - raise CannotFulfill() controller = loan_fixture.manager.loans - mock = MockCirculationAPI() - library_short_name = loan_fixture.db.default_library().short_name + mock = MagicMock(spec=CirculationAPI) controller.manager.circulation_apis[loan_fixture.db.default_library().id] = mock with loan_fixture.request_context_with_library( @@ -703,50 +682,20 @@ def fulfill( authenticated = controller.authenticated_patron_from_request() loan, ignore = loan_fixture.pool.loan_to(authenticated) - # Try to fulfill a certain part of the loan. + # Try to fulfill the loan. assert isinstance(loan_fixture.pool.id, int) - part = "part 1 million" controller.fulfill( - loan_fixture.pool.id, loan_fixture.mech2.delivery_mechanism.id, part + loan_fixture.pool.id, loan_fixture.mech2.delivery_mechanism.id ) # Verify that the right arguments were passed into # CirculationAPI. - ( - patron, - credential, - pool, - mechanism, - part, - fulfill_part_url, - ) = mock.called_with - assert authenticated == patron - assert loan_fixture.valid_credentials["password"] == credential - assert loan_fixture.pool == pool - assert loan_fixture.mech2 == mechanism - assert "part 1 million" == part - - # The last argument is complicated -- it's a function for - # generating partial fulfillment URLs. Let's try it out - # and make sure it gives the result we expect. - expect = url_for( - "fulfill", - license_pool_id=loan_fixture.pool.id, - mechanism_id=mechanism.delivery_mechanism.id, - library_short_name=library_short_name, - part=part, - _external=True, + mock.fulfill.assert_called_once_with( + authenticated, + loan_fixture.valid_credentials["password"], + loan_fixture.pool, + loan_fixture.mech2, ) - part_url = fulfill_part_url(part) - assert expect == part_url - - # Ensure that the library short name is the first segment - # of the path of the fulfillment url. We cannot perform - # patron authentication without it. - expected_path = urllib.parse.urlparse(expect).path - part_url_path = urllib.parse.urlparse(part_url).path - assert expected_path.startswith(f"/{library_short_name}/") - assert part_url_path.startswith(f"/{library_short_name}/") @pytest.mark.parametrize( "as_response_value", diff --git a/tests/api/test_enki.py b/tests/api/test_enki.py index 87acf751bd..e29cf94b2d 100644 --- a/tests/api/test_enki.py +++ b/tests/api/test_enki.py @@ -3,6 +3,7 @@ import datetime import json from typing import TYPE_CHECKING +from unittest.mock import MagicMock import pytest @@ -410,7 +411,7 @@ def test_checkout_success(self, enki_test_fixture: EnkiTestFixure): data = enki_test_fixture.files.sample_data("checked_out_acs.json") enki_test_fixture.api.queue_response(200, content=data) - loan = enki_test_fixture.api.checkout(patron, "pin", pool, "internal format") + loan = enki_test_fixture.api.checkout(patron, "pin", pool, MagicMock()) # An appropriate request to the "getSELink" endpoint was made., [ @@ -455,7 +456,7 @@ def test_checkout_bad_authorization(self, enki_test_fixture: EnkiTestFixure): patron = db.patron(external_identifier="notabarcode") - loan = enki_test_fixture.api.checkout(patron, "notapin", pool, None) + loan = enki_test_fixture.api.checkout(patron, "notapin", pool, MagicMock()) def test_checkout_not_available(self, enki_test_fixture: EnkiTestFixure): """Test that the correct exception is thrown upon an unsuccessful login.""" @@ -473,7 +474,7 @@ def test_checkout_not_available(self, enki_test_fixture: EnkiTestFixure): pool.identifier.identifier = "econtentRecord1" patron = db.patron(external_identifier="12345678901234") - loan = enki_test_fixture.api.checkout(patron, "1234", pool, None) + loan = enki_test_fixture.api.checkout(patron, "1234", pool, MagicMock()) def test_fulfillment_open_access_parser(self, enki_test_fixture: EnkiTestFixure): """Test that fulfillment info for non-ACS Enki books is parsed correctly.""" @@ -506,9 +507,7 @@ def test_fulfill_success(self, enki_test_fixture: EnkiTestFixure): data = enki_test_fixture.files.sample_data("checked_out_acs.json") enki_test_fixture.api.queue_response(200, content=data) - fulfillment = enki_test_fixture.api.fulfill( - patron, "pin", pool, "internal format" - ) + fulfillment = enki_test_fixture.api.fulfill(patron, "pin", pool, MagicMock()) # An appropriate request to the "getSELink" endpoint was made., [ diff --git a/tests/api/test_odilo.py b/tests/api/test_odilo.py index d4db703055..0b144a4ec1 100644 --- a/tests/api/test_odilo.py +++ b/tests/api/test_odilo.py @@ -25,6 +25,7 @@ ExternalIntegration, Hyperlink, Identifier, + MediaTypes, Representation, ) from core.util.datetime_helpers import datetime_utc, utc_now @@ -83,6 +84,8 @@ def __init__(self, db: DatabaseTransactionFixture, files: OdiloFilesFixture): with_license_pool=True, ) + self.delivery_mechanism = self.licensepool.delivery_mechanisms[0] + @pytest.fixture(scope="function") def odilo( @@ -346,7 +349,7 @@ def test_01_patron_not_found(self, odilo: OdiloFixture): patron, odilo.PIN, odilo.licensepool, - "ACSM_EPUB", + odilo.delivery_mechanism, ) odilo.api.log.info("Test patron not found ok!") @@ -364,7 +367,7 @@ def test_02_data_not_found(self, odilo: OdiloFixture): odilo.patron, odilo.PIN, odilo.licensepool, - "ACSM_EPUB", + odilo.delivery_mechanism, ) odilo.api.log.info("Test resource not found on remote ok!") @@ -392,34 +395,43 @@ def test_11_checkout_fake_format(self, odilo: OdiloFixture): odilo.patron, odilo.PIN, odilo.licensepool, - "FAKE_FORMAT", + odilo.delivery_mechanism, ) odilo.api.log.info("Test invalid format for resource ok!") def test_12_checkout_acsm_epub(self, odilo: OdiloFixture): checkout_data, checkout_json = odilo.sample_json("checkout_acsm_epub_ok.json") odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout("ACSM_EPUB", odilo) + self.perform_and_validate_checkout(odilo) def test_13_checkout_acsm_pdf(self, odilo: OdiloFixture): + odilo.delivery_mechanism.delivery_mechanism.content_type = ( + MediaTypes.PDF_MEDIA_TYPE + ) checkout_data, checkout_json = odilo.sample_json("checkout_acsm_pdf_ok.json") odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout("ACSM_PDF", odilo) + self.perform_and_validate_checkout(odilo) def test_14_checkout_ebook_streaming(self, odilo: OdiloFixture): + odilo.delivery_mechanism.delivery_mechanism.content_type = ( + Representation.TEXT_HTML_MEDIA_TYPE + ) + odilo.delivery_mechanism.delivery_mechanism.drm_scheme = ( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE + ) checkout_data, checkout_json = odilo.sample_json( "checkout_ebook_streaming_ok.json" ) odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout("EBOOK_STREAMING", odilo) + self.perform_and_validate_checkout(odilo) def test_mechanism_set_on_borrow(self, odilo: OdiloFixture): """The delivery mechanism for an Odilo title is set on checkout.""" assert OdiloAPI.SET_DELIVERY_MECHANISM_AT == OdiloAPI.BORROW_STEP - def perform_and_validate_checkout(self, internal_format, odilo: OdiloFixture): + def perform_and_validate_checkout(self, odilo: OdiloFixture): loan_info = odilo.api.checkout( - odilo.patron, odilo.PIN, odilo.licensepool, internal_format + odilo.patron, odilo.PIN, odilo.licensepool, odilo.delivery_mechanism ) assert loan_info, "LoanInfo null --> checkout failed!" odilo.api.log.info("Loan ok: %s" % loan_info.identifier) @@ -435,36 +447,45 @@ def test_21_fulfill_acsm_epub(self, odilo: OdiloFixture): acsm_data = odilo.sample_data("fulfill_ok_acsm_epub.acsm") odilo.api.queue_response(200, content=acsm_data) - fulfillment_info = self.fulfill("ACSM_EPUB", odilo) + fulfillment_info = self.fulfill(odilo) assert fulfillment_info.content_type[0] == Representation.EPUB_MEDIA_TYPE assert fulfillment_info.content_type[1] == DeliveryMechanism.ADOBE_DRM def test_22_fulfill_acsm_pdf(self, odilo: OdiloFixture): + odilo.delivery_mechanism.delivery_mechanism.content_type = ( + MediaTypes.PDF_MEDIA_TYPE + ) checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") odilo.api.queue_response(200, content=checkout_json) acsm_data = odilo.sample_data("fulfill_ok_acsm_pdf.acsm") odilo.api.queue_response(200, content=acsm_data) - fulfillment_info = self.fulfill("ACSM_PDF", odilo) + fulfillment_info = self.fulfill(odilo) assert fulfillment_info.content_type[0] == Representation.PDF_MEDIA_TYPE assert fulfillment_info.content_type[1] == DeliveryMechanism.ADOBE_DRM def test_23_fulfill_ebook_streaming(self, odilo: OdiloFixture): + odilo.delivery_mechanism.delivery_mechanism.content_type = ( + Representation.TEXT_HTML_MEDIA_TYPE + ) + odilo.delivery_mechanism.delivery_mechanism.drm_scheme = ( + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE + ) checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") odilo.api.queue_response(200, content=checkout_json) odilo.licensepool.identifier.identifier = "00011055" - fulfillment_info = self.fulfill("EBOOK_STREAMING", odilo) + fulfillment_info = self.fulfill(odilo) assert fulfillment_info.content_type[0] == Representation.TEXT_HTML_MEDIA_TYPE assert ( fulfillment_info.content_type[1] == DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE ) - def fulfill(self, internal_format, odilo: OdiloFixture): + def fulfill(self, odilo: OdiloFixture): fulfillment_info = odilo.api.fulfill( - odilo.patron, odilo.PIN, odilo.licensepool, internal_format + odilo.patron, odilo.PIN, odilo.licensepool, odilo.delivery_mechanism ) assert fulfillment_info, "Cannot Fulfill !!" diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index a701667bab..62635d6717 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -4,6 +4,7 @@ import json import urllib.parse from typing import TYPE_CHECKING, Any, Dict +from unittest.mock import MagicMock import dateutil import pytest @@ -28,6 +29,7 @@ DeliveryMechanism, Edition, Hold, + LicensePoolDeliveryMechanism, Loan, MediaTypes, Representation, @@ -623,6 +625,11 @@ def test_fulfill_success( odl_api_test_fixture.license.setup(concurrency=1, available=1) # type: ignore[attr-defined] odl_api_test_fixture.checkout() + lpdm = MagicMock(spec=LicensePoolDeliveryMechanism) + lpdm.delivery_mechanism = MagicMock(spec=DeliveryMechanism) + lpdm.delivery_mechanism.content_type = "ignored/format" + lpdm.delivery_mechanism.drm_scheme = delivery_mechanism + lsd = json.dumps( { "status": "ready", @@ -636,7 +643,7 @@ def test_fulfill_success( odl_api_test_fixture.patron, "pin", odl_api_test_fixture.pool, - delivery_mechanism, + lpdm, ) assert odl_api_test_fixture.collection == fulfillment.collection(db.session) diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 17ad4d879f..8d440b4b33 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -401,7 +401,7 @@ def test_checkout(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): ) loan_info = opds_dist_api_fixture.api.checkout( - patron, "1234", pool, Representation.EPUB_MEDIA_TYPE + patron, "1234", pool, MagicMock() ) assert opds_dist_api_fixture.collection.id == loan_info.collection_id assert data_source.name == loan_info.data_source_name @@ -428,6 +428,23 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): with_license_pool=True, collection=opds_dist_api_fixture.collection, ) + pool.set_delivery_mechanism( + Representation.EPUB_MEDIA_TYPE, + DeliveryMechanism.BEARER_TOKEN, + RightsStatus.IN_COPYRIGHT, + None, + ) + + # Find the correct delivery mechanism + delivery_mechanism = None + for mechanism in pool.delivery_mechanisms: + if ( + mechanism.delivery_mechanism.drm_scheme + == DeliveryMechanism.BEARER_TOKEN + ): + delivery_mechanism = mechanism + assert delivery_mechanism is not None + # This pool doesn't have an acquisition link, so # we can't fulfill it yet. pytest.raises( @@ -436,7 +453,7 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): patron, "1234", pool, - Representation.EPUB_MEDIA_TYPE, + delivery_mechanism, ) # Set up an epub acquisition link for the pool. @@ -447,12 +464,7 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): data_source, Representation.EPUB_MEDIA_TYPE, ) - pool.set_delivery_mechanism( - Representation.EPUB_MEDIA_TYPE, - DeliveryMechanism.NO_DRM, - RightsStatus.IN_COPYRIGHT, - link.resource, - ) + delivery_mechanism.resource = link.resource # Set the API's auth url so it doesn't have to get it - # that's tested in test_get_token. @@ -463,7 +475,7 @@ def test_fulfill(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): fulfillment_time = utc_now() fulfillment_info = opds_dist_api_fixture.api.fulfill( - patron, "1234", pool, Representation.EPUB_MEDIA_TYPE + patron, "1234", pool, delivery_mechanism ) assert opds_dist_api_fixture.collection.id == fulfillment_info.collection_id assert data_source.name == fulfillment_info.data_source_name diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index 86ba258711..c665e6af37 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -46,6 +46,7 @@ Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, Measurement, MediaTypes, Representation, @@ -785,8 +786,9 @@ def test_scope_string(self, overdrive_api_fixture: OverdriveAPIFixture): def test_checkout(self, overdrive_api_fixture: OverdriveAPIFixture): # Verify the process of checking out a book. db = overdrive_api_fixture.db - patron = object() - pin = object() + patron = MagicMock() + pin = MagicMock() + delivery_mechanism = MagicMock() pool = db.licensepool(edition=None, collection=overdrive_api_fixture.collection) identifier = pool.identifier @@ -818,7 +820,7 @@ def _process_checkout_error(self, patron, pin, licensepool, data): api = Mock(db.session, overdrive_api_fixture.collection) api_response = json.dumps("some data") api.queue_response(201, content=api_response) - loan = api.checkout(patron, pin, pool, "internal format is ignored") + loan = api.checkout(patron, pin, pool, delivery_mechanism) # Verify that a good-looking patron request went out. endpoint, ignore, kwargs = api.requests.pop() @@ -854,7 +856,7 @@ def _process_checkout_error(self, patron, pin, licensepool, data): # Most of the time, an error simply results in an exception. api.queue_response(400, content=api_response) with pytest.raises(Exception) as excinfo: - api.checkout(patron, pin, pool, "internal format is ignored") + api.checkout(patron, pin, pool, delivery_mechanism) assert "exception in _process_checkout_error" in str(excinfo.value) assert ( patron, @@ -869,7 +871,7 @@ def _process_checkout_error(self, patron, pin, licensepool, data): api.PROCESS_CHECKOUT_ERROR_RESULT = "Actually, I was able to recover" # type: ignore[assignment] api.queue_response(400, content=api_response) assert "Actually, I was able to recover" == api.checkout( - patron, pin, pool, "internal format is ignored" + patron, pin, pool, delivery_mechanism ) assert ( patron, @@ -1462,7 +1464,6 @@ def test_fulfill_returns_fulfillmentinfo_if_returned_by_get_fulfillment_link( # If get_fulfillment_link returns a FulfillmentInfo, it is returned # immediately and the rest of fulfill() does not run. - fulfillment = FulfillmentInfo( overdrive_api_fixture.collection, None, None, None, None, None, None, None ) @@ -1471,12 +1472,15 @@ class MockAPI(OverdriveAPI): def get_fulfillment_link(*args, **kwargs): return fulfillment - # Since most of the data is not provided, if fulfill() tried - # to actually run to completion, it would crash. + def internal_format( + self, delivery_mechanism: LicensePoolDeliveryMechanism + ) -> str: + return "format" + edition, pool = db.edition(with_license_pool=True) api = MockAPI(db.session, overdrive_api_fixture.collection) - result = api.fulfill(None, None, pool, None) - assert fulfillment == result + result = api.fulfill(MagicMock(), MagicMock(), pool, MagicMock()) + assert result is fulfillment def test_fulfill_raises_exception_and_updates_formats_for_outdated_format( self, overdrive_api_fixture: OverdriveAPIFixture @@ -1543,7 +1547,7 @@ def test_fulfill_raises_exception_and_updates_formats_for_outdated_format( db.patron(), "pin", pool, - "ebook-epub-adobe", + pool.delivery_mechanisms[0], ) # The delivery mechanisms have been updated. @@ -2238,11 +2242,19 @@ def test_no_drm_fulfillment(self, overdrive_api_fixture: OverdriveAPIFixture): ) ) + # Mock delivery mechanism + delivery_mechanism = create_autospec(LicensePoolDeliveryMechanism) + delivery_mechanism.delivery_mechanism = create_autospec(DeliveryMechanism) + delivery_mechanism.delivery_mechanism.drm_scheme = DeliveryMechanism.NO_DRM + delivery_mechanism.delivery_mechanism.content_type = ( + Representation.EPUB_MEDIA_TYPE + ) + fulfill = od_api.fulfill( - patron, "pin", work.active_license_pool(), "ebook-epub-open" + patron, "pin", work.active_license_pool(), delivery_mechanism ) - assert fulfill.content_link_redirect == True + assert fulfill.content_link_redirect is True assert fulfill.content_link == "https://example.org/epub-redirect" diff --git a/tests/api/test_routes.py b/tests/api/test_routes.py index 1d38fe1eb4..3d5197d6af 100644 --- a/tests/api/test_routes.py +++ b/tests/api/test_routes.py @@ -174,21 +174,12 @@ def test_fulfill(self, fixture: RouteTestFixture): # open-access titles. url = "/works//fulfill" fixture.assert_request_calls( - url, fixture.controller.fulfill, "", None, None # type: ignore[union-attr] + url, fixture.controller.fulfill, "", None # type: ignore[union-attr] ) url = "/works//fulfill/" fixture.assert_request_calls( - url, fixture.controller.fulfill, "", "", None # type: ignore[union-attr] - ) - - url = "/works//fulfill//" - fixture.assert_request_calls( - url, - fixture.controller.fulfill, # type: ignore[union-attr] - "", - "", - "", + url, fixture.controller.fulfill, "", "" # type: ignore[union-attr] ) def test_revoke_loan_or_hold(self, fixture: RouteTestFixture): From e864ae5eabf9d5fa5e7134610d3b7e0161a6fded Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 6 Oct 2023 11:39:36 -0300 Subject: [PATCH 092/262] Type hint the XML parser classes. (#1439) --- api/axis.py | 354 ++++++++++++++++++----------- api/bibliotheca.py | 187 ++++++++------- api/millenium_patron.py | 3 +- core/util/xmlparser.py | 119 ++++++++-- pyproject.toml | 1 + tests/api/test_axis.py | 121 ++++++---- tests/api/test_bibliotheca.py | 33 +-- tests/core/util/test_xml_parser.py | 73 ++++-- 8 files changed, 572 insertions(+), 319 deletions(-) diff --git a/api/axis.py b/api/axis.py index 7ef7c2f0fc..d2e8ca74b0 100644 --- a/api/axis.py +++ b/api/axis.py @@ -1,5 +1,6 @@ from __future__ import annotations +import datetime import html import json import logging @@ -7,13 +8,27 @@ import socket import ssl import urllib +from abc import ABC, abstractmethod from datetime import timedelta -from typing import Union +from typing import ( + Any, + Dict, + Generic, + List, + Literal, + Mapping, + Tuple, + Type, + TypeVar, + Union, + cast, +) from urllib.parse import urlparse import certifi from flask_babel import lazy_gettext as _ from lxml import etree +from lxml.etree import _Element from pydantic import validator from api.admin.validator import Validator @@ -50,6 +65,7 @@ ) from core.model import ( Classification, + Collection, Contributor, DataSource, DeliveryMechanism, @@ -70,9 +86,10 @@ from core.util.datetime_helpers import datetime_utc, strptime_utc, utc_now from core.util.flask_util import Response from core.util.http import HTTP, RequestNetworkException +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail from core.util.string_helpers import base64 -from core.util.xmlparser import XMLParser +from core.util.xmlparser import XMLProcessor class Axis360APIConstants: @@ -373,7 +390,7 @@ def checkin(self, patron, pin, licensepool): patron_id = patron.authorization_identifier response = self._checkin(title_id, patron_id) try: - return CheckinResponseParser(licensepool.collection).process_all( + return CheckinResponseParser(licensepool.collection).process_first( response.content ) except etree.XMLSyntaxError as e: @@ -400,9 +417,12 @@ def checkout( title_id, patron_id, self.internal_format(delivery_mechanism) ) try: - return CheckoutResponseParser(licensepool.collection).process_all( + loan_info = CheckoutResponseParser(licensepool.collection).process_first( response.content ) + if loan_info is None: + raise CannotLoan() + return loan_info except etree.XMLSyntaxError as e: raise RemoteInitiatedServerError(response.content, self.SERVICE_NAME) @@ -460,7 +480,7 @@ def place_hold(self, patron, pin, licensepool, hold_notification_email): titleId=title_id, patronId=patron_id, email=hold_notification_email ) response = self.request(url, params=params) - hold_info = HoldResponseParser(licensepool.collection).process_all( + hold_info = HoldResponseParser(licensepool.collection).process_first( response.content ) if not hold_info.identifier: @@ -479,7 +499,7 @@ def release_hold(self, patron, pin, licensepool): params = dict(titleId=title_id, patronId=patron_id) response = self.request(url, params=params) try: - HoldReleaseResponseParser(licensepool.collection).process_all( + HoldReleaseResponseParser(licensepool.collection).process_first( response.content ) except NotOnHold: @@ -788,14 +808,17 @@ def process_items(self, identifiers): self.api.update_licensepools_for_identifiers(identifiers) -class Axis360Parser(XMLParser): - NS = {"axis": "http://axis360api.baker-taylor.com/vendorAPI"} +T = TypeVar("T") + +class Axis360Parser(XMLProcessor[T], ABC): SHORT_DATE_FORMAT = "%m/%d/%Y" FULL_DATE_FORMAT_IMPLICIT_UTC = "%m/%d/%Y %I:%M:%S %p" FULL_DATE_FORMAT_EXPLICIT_UTC = "%m/%d/%Y %I:%M:%S %p +00:00" - def _pd(self, date): + NAMESPACES = {"axis": "http://axis360api.baker-taylor.com/vendorAPI"} + + def _pd(self, date: Optional[str]) -> Optional[datetime.datetime]: """Stupid function to parse a date.""" if date is None: return date @@ -805,7 +828,13 @@ def _pd(self, date): pass return strptime_utc(date, self.FULL_DATE_FORMAT_EXPLICIT_UTC) - def _xpath1_boolean(self, e, target, ns, default=False): + def _xpath1_boolean( + self, + e: _Element, + target: str, + ns: Optional[Dict[str, str]], + default: bool = False, + ) -> bool: text = self.text_of_optional_subtag(e, target, ns) if text is None: return default @@ -814,12 +843,16 @@ def _xpath1_boolean(self, e, target, ns, default=False): else: return False - def _xpath1_date(self, e, target, ns): + def _xpath1_date( + self, e: _Element, target: str, ns: Optional[Dict[str, str]] + ) -> Optional[datetime.datetime]: value = self.text_of_optional_subtag(e, target, ns) return self._pd(value) -class BibliographicParser(Axis360Parser): +class BibliographicParser( + Axis360Parser[Tuple[Optional[Metadata], Optional[CirculationData]]], LoggerMixin +): DELIVERY_DATA_FOR_AXIS_FORMAT = { "Blio": None, # Legacy format, handled the same way as AxisNow "Acoustik": (None, DeliveryMechanism.FINDAWAY_DRM), # Audiobooks @@ -828,10 +861,8 @@ class BibliographicParser(Axis360Parser): "PDF": (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM), } - log = logging.getLogger("Axis 360 Bibliographic Parser") - @classmethod - def parse_list(cls, l): + def parse_list(cls, l: str) -> List[str]: """Turn strings like this into lists: FICTION / Thrillers; FICTION / Suspense; FICTION / General @@ -839,14 +870,22 @@ def parse_list(cls, l): """ return [x.strip() for x in l.split(";")] - def __init__(self, include_availability=True, include_bibliographic=True): + def __init__( + self, include_availability: bool = True, include_bibliographic: bool = True + ): self.include_availability = include_availability self.include_bibliographic = include_bibliographic - def process_all(self, string): - yield from super().process_all(string, "//axis:title", self.NS) + @property + def xpath_expression(self) -> str: + return "//axis:title" - def extract_availability(self, circulation_data, element, ns): + def extract_availability( + self, + circulation_data: Optional[CirculationData], + element: _Element, + ns: Optional[Dict[str, str]], + ) -> CirculationData: identifier = self.text_of_subtag(element, "axis:titleId", ns) primary_identifier = IdentifierData(Identifier.AXIS_360_ID, identifier) if not circulation_data: @@ -860,13 +899,6 @@ def extract_availability(self, circulation_data, element, ns): available_copies = self.int_of_subtag(availability, "axis:availableCopies", ns) size_of_hold_queue = self.int_of_subtag(availability, "axis:holdsQueueSize", ns) - availability_updated = self.text_of_optional_subtag( - availability, "axis:updateDate", ns - ) - if availability_updated: - # NOTE: We don't actually do anything with this. - availability_updated = self._pd(availability_updated) - circulation_data.licenses_owned = total_copies circulation_data.licenses_available = available_copies circulation_data.licenses_reserved = 0 @@ -890,7 +922,12 @@ def extract_availability(self, circulation_data, element, ns): ) @classmethod - def parse_contributor(cls, author, primary_author_found=False, force_role=None): + def parse_contributor( + cls, + author: str, + primary_author_found: bool = False, + force_role: Optional[str] = None, + ) -> ContributorData: """Parse an Axis 360 contributor string. The contributor string looks like "Butler, Octavia" or "Walt @@ -918,17 +955,22 @@ def parse_contributor(cls, author, primary_author_found=False, force_role=None): match = cls.role_abbreviation.search(author) if match: role_type = match.groups()[0] - role = cls.role_abbreviation_to_role.get( + mapped_role = cls.role_abbreviation_to_role.get( role_type, Contributor.UNKNOWN_ROLE ) - if role is cls.generic_author: - role = default_author_role + role = ( + default_author_role + if mapped_role is cls.generic_author + else cast(str, mapped_role) + ) author = author[:-5].strip() if force_role: role = force_role return ContributorData(sort_name=author, roles=[role]) - def extract_bibliographic(self, element, ns): + def extract_bibliographic( + self, element: _Element, ns: Optional[Dict[str, str]] + ) -> Metadata: """Turn bibliographic metadata into a Metadata and a CirculationData objects, and return them as a tuple.""" @@ -948,18 +990,18 @@ def extract_bibliographic(self, element, ns): found_primary_author = False if contributor: for c in self.parse_list(contributor): - contributor = self.parse_contributor(c, found_primary_author) - if Contributor.PRIMARY_AUTHOR_ROLE in contributor.roles: + contributor_data = self.parse_contributor(c, found_primary_author) + if Contributor.PRIMARY_AUTHOR_ROLE in contributor_data.roles: found_primary_author = True - contributors.append(contributor) + contributors.append(contributor_data) narrator = self.text_of_optional_subtag(element, "axis:narrator", ns) if narrator: for n in self.parse_list(narrator): - contributor = self.parse_contributor( + contributor_data = self.parse_contributor( n, force_role=Contributor.NARRATOR_ROLE ) - contributors.append(contributor) + contributors.append(contributor_data) links = [] description = self.text_of_optional_subtag(element, "axis:annotation", ns) @@ -1042,7 +1084,6 @@ def extract_bibliographic(self, element, ns): identifiers.append(IdentifierData(Identifier.ISBN, isbn)) formats = [] - acceptable = False seen_formats = [] # All of the formats we don't support, like Blio, are ebook @@ -1078,10 +1119,8 @@ def extract_bibliographic(self, element, ns): "Unrecognized Axis format name for %s: %s" % (identifier, informal_name) ) - elif self.DELIVERY_DATA_FOR_AXIS_FORMAT.get(informal_name): - content_type, drm_scheme = self.DELIVERY_DATA_FOR_AXIS_FORMAT[ - informal_name - ] + elif delivery_data := self.DELIVERY_DATA_FOR_AXIS_FORMAT.get(informal_name): + content_type, drm_scheme = delivery_data formats.append( FormatData(content_type=content_type, drm_scheme=drm_scheme) ) @@ -1133,7 +1172,9 @@ def extract_bibliographic(self, element, ns): metadata.circulation = circulationdata return metadata - def process_one(self, element, ns): + def process_one( + self, element: _Element, ns: Optional[Dict[str, str]] + ) -> Tuple[Optional[Metadata], Optional[CirculationData]]: if self.include_bibliographic: bibliographic = self.extract_bibliographic(element, ns) else: @@ -1153,13 +1194,13 @@ def process_one(self, element, ns): return bibliographic, availability -class ResponseParser(Axis360Parser): +class ResponseParser: id_type = Identifier.AXIS_360_ID SERVICE_NAME = "Axis 360" # Map Axis 360 error codes to our circulation exceptions. - code_to_exception = { + code_to_exception: Mapping[int | Tuple[int, str], Type[IntegrationException]] = { 315: InvalidInputException, # Bad password 316: InvalidInputException, # DRM account already exists 1000: PatronAuthorizationFailedException, @@ -1214,7 +1255,46 @@ class ResponseParser(Axis360Parser): 5004: LibraryInvalidInputException, # Missing TransactionID } - def __init__(self, collection): + @classmethod + def _raise_exception_on_error( + cls, + code: str | int, + message: str, + custom_error_classes: Optional[ + Mapping[int | Tuple[int, str], Type[IntegrationException]] + ] = None, + ignore_error_codes=None, + ) -> Tuple[int, str]: + try: + code = int(code) + except ValueError: + # Non-numeric code? Inconceivable! + raise RemoteInitiatedServerError( + "Invalid response code from Axis 360: %s" % code, cls.SERVICE_NAME + ) + + if ignore_error_codes and code in ignore_error_codes: + return code, message + + if custom_error_classes is None: + custom_error_classes = {} + for d in custom_error_classes, cls.code_to_exception: + if (code, message) in d: + raise d[(code, message)] + elif code in d: + # Something went wrong and we know how to turn it into a + # specific exception. + error_class = d[code] + if error_class is RemoteInitiatedServerError: + e = error_class(message, cls.SERVICE_NAME) + else: + e = error_class(message) + raise e + return code, message + + +class XMLResponseParser(ResponseParser, Axis360Parser[T], ABC): + def __init__(self, collection: Collection): """Constructor. :param collection: A Collection, in case parsing this document @@ -1223,7 +1303,13 @@ def __init__(self, collection): self.collection = collection def raise_exception_on_error( - self, e, ns, custom_error_classes={}, ignore_error_codes=None + self, + e: _Element, + ns: Optional[Dict[str, str]], + custom_error_classes: Optional[ + Mapping[int | Tuple[int, str], Type[IntegrationException]] + ] = None, + ignore_error_codes: Optional[List[int]] = None, ): """Raise an error if the given lxml node represents an Axis 360 error condition. @@ -1248,53 +1334,28 @@ def raise_exception_on_error( code.text, message, custom_error_classes, ignore_error_codes ) - @classmethod - def _raise_exception_on_error( - cls, code, message, custom_error_classes={}, ignore_error_codes=None - ): - try: - code = int(code) - except ValueError: - # Non-numeric code? Inconcievable! - raise RemoteInitiatedServerError( - "Invalid response code from Axis 360: %s" % code, cls.SERVICE_NAME - ) - if ignore_error_codes and code in ignore_error_codes: - return code, message - - for d in custom_error_classes, cls.code_to_exception: - if (code, message) in d: - raise d[(code, message)] - elif code in d: - # Something went wrong and we know how to turn it into a - # specific exception. - error_class = d[code] - if error_class is RemoteInitiatedServerError: - e = error_class(message, cls.SERVICE_NAME) - else: - e = error_class(message) - raise e - return code, message - - -class CheckinResponseParser(ResponseParser): - def process_all(self, string): - for i in super().process_all(string, "//axis:EarlyCheckinRestResult", self.NS): - return i +class CheckinResponseParser(XMLResponseParser[Literal[True]]): + @property + def xpath_expression(self) -> str: + return "//axis:EarlyCheckinRestResult" - def process_one(self, e, namespaces): + def process_one( + self, e: _Element, namespaces: Optional[Dict[str, str]] + ) -> Literal[True]: """Either raise an appropriate exception, or do nothing.""" self.raise_exception_on_error(e, namespaces, ignore_error_codes=[4058]) return True -class CheckoutResponseParser(ResponseParser): - def process_all(self, string): - for i in super().process_all(string, "//axis:checkoutResult", self.NS): - return i +class CheckoutResponseParser(XMLResponseParser[LoanInfo]): + @property + def xpath_expression(self) -> str: + return "//axis:checkoutResult" - def process_one(self, e, namespaces): + def process_one( + self, e: _Element, namespaces: Optional[Dict[str, str]] + ) -> LoanInfo: """Either turn the given document into a LoanInfo object, or raise an appropriate exception. """ @@ -1322,12 +1383,14 @@ def process_one(self, e, namespaces): return loan -class HoldResponseParser(ResponseParser): - def process_all(self, string): - for i in super().process_all(string, "//axis:addtoholdResult", self.NS): - return i +class HoldResponseParser(XMLResponseParser[HoldInfo]): + @property + def xpath_expression(self) -> str: + return "//axis:addtoholdResult" - def process_one(self, e, namespaces): + def process_one( + self, e: _Element, namespaces: Optional[Dict[str, str]] + ) -> HoldInfo: """Either turn the given document into a HoldInfo object, or raise an appropriate exception. """ @@ -1359,26 +1422,24 @@ def process_one(self, e, namespaces): return hold -class HoldReleaseResponseParser(ResponseParser): - def process_all(self, string): - for i in super().process_all(string, "//axis:removeholdResult", self.NS): - return i - - def post_process(self, i): - r"""Unlike other ResponseParser subclasses, we don't return any type of - \*Info object, so there's no need to do any post-processing. - """ - return i +class HoldReleaseResponseParser(XMLResponseParser[Literal[True]]): + @property + def xpath_expression(self) -> str: + return "//axis:removeholdResult" - def process_one(self, e, namespaces): + def process_one( + self, e: _Element, namespaces: Optional[Dict[str, str]] + ) -> Literal[True]: # There's no data to gather here. Either there was an error # or we were successful. self.raise_exception_on_error(e, namespaces, {3109: NotOnHold}) return True -class AvailabilityResponseParser(ResponseParser): - def __init__(self, api, internal_format=None): +class AvailabilityResponseParser( + XMLResponseParser[Optional[Union[LoanInfo, HoldInfo]]] +): + def __init__(self, api: Axis360API, internal_format: Optional[str] = None) -> None: """Constructor. :param api: An Axis360API instance, in case the parsing of an @@ -1391,16 +1452,19 @@ def __init__(self, api, internal_format=None): """ self.api = api self.internal_format = internal_format + if api.collection is None: + raise ValueError( + "Cannot use an Axis360AvailabilityResponseParser without a Collection." + ) super().__init__(api.collection) - def process_all(self, string): - for info in super().process_all(string, "//axis:title", self.NS): - # Filter out books where nothing in particular is - # happening. - if info: - yield info + @property + def xpath_expression(self) -> str: + return "//axis:title" - def process_one(self, e, ns): + def process_one( + self, e: _Element, ns: Optional[Dict[str, str]] + ) -> Optional[Union[LoanInfo, HoldInfo]]: # Figure out which book we're talking about. axis_identifier = self.text_of_subtag(e, "axis:titleId", ns) availability = self._xpath1(e, "axis:availability", ns) @@ -1410,7 +1474,7 @@ def process_one(self, e, ns): checked_out = self._xpath1_boolean(availability, "axis:isCheckedout", ns) on_hold = self._xpath1_boolean(availability, "axis:isInHoldQueue", ns) - info = None + info: Optional[Union[LoanInfo, HoldInfo]] = None if checked_out: start_date = self._xpath1_date(availability, "axis:checkoutStartDate", ns) end_date = self._xpath1_date(availability, "axis:checkoutEndDate", ns) @@ -1430,6 +1494,7 @@ def process_one(self, e, ns): identifier=axis_identifier, ) + fulfillment: Optional[FulfillmentInfo] if download_url and self.internal_format != self.api.AXISNOW: # The patron wants a direct link to the book, which we can deliver # immediately, without making any more API requests. @@ -1499,7 +1564,7 @@ def process_one(self, e, ns): return info -class JSONResponseParser(ResponseParser): +class JSONResponseParser(Generic[T], ResponseParser, ABC): """Most ResponseParsers parse XML documents; subclasses of JSONResponseParser parse JSON documents. @@ -1508,7 +1573,7 @@ class JSONResponseParser(ResponseParser): """ @classmethod - def _required_key(cls, key, json_obj): + def _required_key(cls, key: str, json_obj: Optional[Mapping[str, Any]]) -> Any: """Raise an exception if the given key is not present in the given object. """ @@ -1524,20 +1589,20 @@ def _required_key(cls, key, json_obj): return json_obj[key] @classmethod - def verify_status_code(cls, parsed): + def verify_status_code(cls, parsed: Optional[Mapping[str, Any]]) -> None: """Assert that the incoming JSON document represents a successful response. """ k = cls._required_key status = k("Status", parsed) - code = k("Code", status) + code: int = k("Code", status) message = status.get("Message") # If the document describes an error condition, raise # an appropriate exception immediately. cls._raise_exception_on_error(code, message) - def parse(self, data, *args, **kwargs): + def parse(self, data: Union[Dict[str, Any], bytes, str], **kwargs: Any) -> T: """Parse a JSON document.""" if isinstance(data, dict): parsed = data # already parsed @@ -1547,36 +1612,45 @@ def parse(self, data, *args, **kwargs): except ValueError as e: # It's not JSON. raise RemoteInitiatedServerError( - "Invalid response from Axis 360 (was expecting JSON): %s" % data, + f"Invalid response from Axis 360 (was expecting JSON): {data!r}", self.SERVICE_NAME, ) # If the response indicates an error condition, don't continue -- # raise an exception immediately. self.verify_status_code(parsed) - return self._parse(parsed, *args, **kwargs) + return self._parse(parsed, **kwargs) - def _parse(self, parsed, *args, **kwargs): + @abstractmethod + def _parse(self, parsed: Dict[str, Any], **kwargs: Any) -> T: """Parse a document we know to represent success on the API level. Called by parse() once the high-level details have been worked out. """ - raise NotImplementedError() + ... -class Axis360FulfillmentInfoResponseParser(JSONResponseParser): +class Axis360FulfillmentInfoResponseParser( + JSONResponseParser[ + Tuple[Union[FindawayManifest, "AxisNowManifest"], datetime.datetime] + ] +): """Parse JSON documents into Findaway audiobook manifests or AxisNow manifests.""" - def __init__(self, api): + def __init__(self, api: Axis360API): """Constructor. :param api: An Axis360API instance, in case the parsing of a fulfillment document triggers additional API requests. """ self.api = api - super().__init__(self.api.collection) - def _parse(self, parsed, license_pool): + def _parse( + self, + parsed: Dict[str, Any], + license_pool: Optional[LicensePool] = None, + **kwargs, + ) -> Tuple[Union[FindawayManifest, AxisNowManifest], datetime.datetime]: """Extract all useful information from a parsed FulfillmentInfo response. @@ -1589,9 +1663,13 @@ def _parse(self, parsed, license_pool): :return: A 2-tuple (manifest, expiration_date). `manifest` is either a FindawayManifest (for an audiobook) or an AxisNowManifest (for an ebook). """ + if license_pool is None: + raise TypeError("Must pass in a LicensePool") + expiration_date = self._required_key("ExpirationDate", parsed) expiration_date = self.parse_date(expiration_date) + manifest: Union[FindawayManifest, AxisNowManifest] if "FNDTransactionID" in parsed: manifest = self.parse_findaway(parsed, license_pool) else: @@ -1599,7 +1677,7 @@ def _parse(self, parsed, license_pool): return manifest, expiration_date - def parse_date(self, date): + def parse_date(self, date: str) -> datetime.datetime: if "." in date: # Remove 7(?!) decimal places of precision and # UTC timezone, which are more trouble to parse @@ -1607,14 +1685,16 @@ def parse_date(self, date): date = date[: date.rindex(".")] try: - date = strptime_utc(date, "%Y-%m-%d %H:%M:%S") + date_parsed = strptime_utc(date, "%Y-%m-%d %H:%M:%S") except ValueError: raise RemoteInitiatedServerError( "Could not parse expiration date: %s" % date, self.SERVICE_NAME ) - return date + return date_parsed - def parse_findaway(self, parsed, license_pool): + def parse_findaway( + self, parsed: Dict[str, Any], license_pool: LicensePool + ) -> FindawayManifest: k = self._required_key fulfillmentId = k("FNDContentID", parsed) licenseId = k("FNDLicenseID", parsed) @@ -1623,7 +1703,7 @@ def parse_findaway(self, parsed, license_pool): # Acquire the TOC information metadata_response = self.api.get_audiobook_metadata(fulfillmentId) - parser = AudiobookMetadataParser(self.api.collection) + parser = AudiobookMetadataParser() accountId, spine_items = parser.parse(metadata_response.content) return FindawayManifest( @@ -1636,18 +1716,22 @@ def parse_findaway(self, parsed, license_pool): spine_items=spine_items, ) - def parse_axisnow(self, parsed): + def parse_axisnow(self, parsed: Dict[str, Any]) -> AxisNowManifest: k = self._required_key isbn = k("ISBN", parsed) book_vault_uuid = k("BookVaultUUID", parsed) return AxisNowManifest(book_vault_uuid, isbn) -class AudiobookMetadataParser(JSONResponseParser): +class AudiobookMetadataParser( + JSONResponseParser[Tuple[Optional[str], List[SpineItem]]] +): """Parse the results of Axis 360's audiobook metadata API call.""" @classmethod - def _parse(cls, parsed): + def _parse( + cls, parsed: Dict[str, Any], **kwargs + ) -> Tuple[Optional[str], List[SpineItem]]: spine_items = [] accountId = parsed.get("fndaccountid", None) for item in parsed.get("readingOrder", []): @@ -1674,7 +1758,7 @@ class AxisNowManifest: MEDIA_TYPE = DeliveryMechanism.AXISNOW_DRM - def __init__(self, book_vault_uuid, isbn): + def __init__(self, book_vault_uuid: str, isbn: str): """Constructor. :param book_vault_uuid: The UUID of a Book Vault. @@ -1683,7 +1767,7 @@ def __init__(self, book_vault_uuid, isbn): self.book_vault_uuid = book_vault_uuid self.isbn = isbn - def __str__(self): + def __str__(self) -> str: data = dict(isbn=self.isbn, book_vault_uuid=self.book_vault_uuid) return json.dumps(data, sort_keys=True) @@ -1704,7 +1788,7 @@ def do_fetch(self): transaction_id = self.key response = self.api.get_fulfillment_info(transaction_id) parser = Axis360FulfillmentInfoResponseParser(self.api) - manifest, expires = parser.parse(response.content, license_pool) + manifest, expires = parser.parse(response.content, license_pool=license_pool) self._content = str(manifest) self._content_type = manifest.MEDIA_TYPE self._content_expires = expires diff --git a/api/bibliotheca.py b/api/bibliotheca.py index eae9d108a1..8dcdfdb556 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -9,13 +9,14 @@ import re import time import urllib.parse +from abc import ABC from datetime import datetime, timedelta -from io import BytesIO, StringIO -from typing import Dict +from io import BytesIO +from typing import Dict, Generator, List, Tuple, Type, TypeVar, Union import dateutil.parser from flask_babel import lazy_gettext as _ -from lxml import etree +from lxml.etree import _Element from pymarc import parse_xml_to_array from api.circulation import ( @@ -75,7 +76,7 @@ from core.util.datetime_helpers import datetime_utc, strptime_utc, to_utc, utc_now from core.util.http import HTTP from core.util.string_helpers import base64 -from core.util.xmlparser import XMLParser +from core.util.xmlparser import XMLParser, XMLProcessor class BibliothecaSettings(BaseSettings): @@ -293,7 +294,7 @@ def marc_request(self, start, end, offset=1, limit=50): ) response = self.request(url) if response.status_code != 200: - raise ErrorParser().process_all(response.content) + raise ErrorParser().process_first(response.content) yield from parse_xml_to_array(BytesIO(response.content)) def bibliographic_lookup_request(self, identifiers): @@ -324,7 +325,7 @@ def bibliographic_lookup(self, identifiers): identifier_strings.append(i) data = self.bibliographic_lookup_request(identifier_strings) - return [metadata for metadata in self.item_list_parser.parse(data)] + return [metadata for metadata in self.item_list_parser.process_all(data)] def _request_with_timeout(self, method, url, *args, **kwargs): """This will be overridden in MockBibliothecaAPI.""" @@ -441,7 +442,7 @@ def checkout( start_date = None else: # Error condition. - error = ErrorParser().process_all(response.content) + error = ErrorParser().process_first(response.content) if isinstance(error, AlreadyCheckedOut): # It's already checked out. No problem. pass @@ -449,7 +450,7 @@ def checkout( raise error # At this point we know we have a loan. - loan_expires = CheckoutResponseParser().process_all(response.content) + loan_expires = CheckoutResponseParser().process_first(response.content) loan = LoanInfo( licensepool.collection, DataSource.BIBLIOTHECA, @@ -544,7 +545,7 @@ def place_hold(self, patron, pin, licensepool, hold_notification_email=None): response_content = response.content.decode("utf-8") if response.status_code in (200, 201): start_date = utc_now() - end_date = HoldResponseParser().process_all(response_content) + end_date = HoldResponseParser().process_first(response_content) return HoldInfo( licensepool.collection, DataSource.BIBLIOTHECA, @@ -557,7 +558,7 @@ def place_hold(self, patron, pin, licensepool, hold_notification_email=None): else: if not response_content: raise CannotHold() - error = ErrorParser().process_all(response_content) + error = ErrorParser().process_first(response_content) if isinstance(error, Exception): raise error else: @@ -643,16 +644,15 @@ def __init__(self, response_code, headers, content): self.content = content -class ItemListParser(XMLParser): +class ItemListParser(XMLProcessor[Metadata]): DATE_FORMAT = "%Y-%m-%d" YEAR_FORMAT = "%Y" - NAMESPACES: Dict[str, str] = {} - unescape_entity_references = html.unescape - def parse(self, xml): - yield from self.process_all(xml, "//Item") + @property + def xpath_expression(self) -> str: + return "//Item" parenthetical = re.compile(r" \([^)]+\)$") @@ -664,8 +664,10 @@ def parse(self, xml): } @classmethod - def contributors_from_string(cls, string, role=Contributor.AUTHOR_ROLE): - contributors = [] + def contributors_from_string( + cls, string: Optional[str], role: str = Contributor.AUTHOR_ROLE + ) -> List[ContributorData]: + contributors: List[ContributorData] = [] if not string: return contributors @@ -682,8 +684,8 @@ def contributors_from_string(cls, string, role=Contributor.AUTHOR_ROLE): return contributors @classmethod - def parse_genre_string(self, s): - genres = [] + def parse_genre_string(self, s: Optional[str]) -> List[SubjectData]: + genres: List[SubjectData] = [] if not s: return genres for i in s.split(","): @@ -705,17 +707,15 @@ def parse_genre_string(self, s): ) return genres - def process_one(self, tag, namespaces): + def process_one( + self, tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> Metadata: """Turn an tag into a Metadata and an encompassed CirculationData objects, and return the Metadata.""" def value(bibliotheca_key): return self.text_of_optional_subtag(tag, bibliotheca_key) - links = dict() - identifiers = dict() - subjects = [] - primary_identifier = IdentifierData(Identifier.BIBLIOTHECA_ID, value("ItemId")) identifiers = [] @@ -868,10 +868,14 @@ def internal_formats(cls, book_format): return medium, [format] -class BibliothecaParser(XMLParser): +T = TypeVar("T") + + +class BibliothecaParser(XMLProcessor[T], ABC): INPUT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" - def parse_date(self, value): + @classmethod + def parse_date(cls, value): """Parse the string Bibliotheca sends as a date. Usually this is a string in INPUT_TIME_FORMAT, but it might be None. @@ -880,7 +884,7 @@ def parse_date(self, value): value = None else: try: - value = strptime_utc(value, self.INPUT_TIME_FORMAT) + value = strptime_utc(value, cls.INPUT_TIME_FORMAT) except ValueError as e: logging.error( 'Unable to parse Bibliotheca date: "%s"', value, exc_info=e @@ -912,7 +916,7 @@ def __str__(self): ) -class ErrorParser(BibliothecaParser): +class ErrorParser(BibliothecaParser[Exception]): """Turn an error document from the Bibliotheca web service into a CheckoutException""" wrong_status = re.compile( @@ -928,23 +932,31 @@ class ErrorParser(BibliothecaParser): "The patron has no eBooks checked out": NotCheckedOut, } - def process_all(self, string): + @property + def xpath_expression(self) -> str: + return "//Error" + + def process_first(self, string: str | bytes) -> Exception: try: - for i in super().process_all(string, "//Error"): - return i + return_val = super().process_first(string) except Exception as e: # The server sent us an error with an incorrect or # nonstandard syntax. return RemoteInitiatedServerError(string, BibliothecaAPI.SERVICE_NAME) - # We were not able to interpret the result as an error. - # The most likely cause is that the Bibliotheca app server is down. - return RemoteInitiatedServerError( - "Unknown error", - BibliothecaAPI.SERVICE_NAME, - ) + if return_val is None: + # We were not able to interpret the result as an error. + # The most likely cause is that the Bibliotheca app server is down. + return RemoteInitiatedServerError( + "Unknown error", + BibliothecaAPI.SERVICE_NAME, + ) + + return return_val - def process_one(self, error_tag, namespaces): + def process_one( + self, error_tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> Exception: message = self.text_of_optional_subtag(error_tag, "Message") if not message: return RemoteInitiatedServerError( @@ -1007,7 +1019,7 @@ def process_one(self, error_tag, namespaces): return BibliothecaException(message) -class PatronCirculationParser(BibliothecaParser): +class PatronCirculationParser(XMLParser): """Parse Bibliotheca's patron circulation status document into a list of LoanInfo and HoldInfo objects. @@ -1015,39 +1027,45 @@ class PatronCirculationParser(BibliothecaParser): id_type = Identifier.BIBLIOTHECA_ID - def __init__(self, collection, *args, **kwargs): - super().__init__(*args, **kwargs) + def __init__(self, collection: Collection) -> None: self.collection = collection - def process_all(self, string): - parser = etree.XMLParser() - # If the data is an HTTP response, it is a bytestring and - # must be converted before it is parsed. - if isinstance(string, bytes): - string = string.decode("utf-8") - root = etree.parse(StringIO(string), parser) - sup = super() - loans = sup.process_all(root, "//Checkouts/Item", handler=self.process_one_loan) - holds = sup.process_all(root, "//Holds/Item", handler=self.process_one_hold) - reserves = sup.process_all( - root, "//Reserves/Item", handler=self.process_one_reserve + def process_all( + self, string: bytes | str + ) -> itertools.chain[Union[LoanInfo, HoldInfo]]: + xml = self._load_xml(string) + loans = self._process_all( + xml, "//Checkouts/Item", namespaces={}, handler=self.process_one_loan ) + holds = self._process_all( + xml, "//Holds/Item", namespaces={}, handler=self.process_one_hold + ) + reserves = self._process_all( + xml, "//Reserves/Item", namespaces={}, handler=self.process_one_reserve + ) + return itertools.chain(loans, holds, reserves) - everything = itertools.chain(loans, holds, reserves) - return [x for x in everything if x] - - def process_one_loan(self, tag, namespaces): + def process_one_loan( + self, tag: _Element, namespaces: Dict[str, str] + ) -> Optional[LoanInfo]: return self.process_one(tag, namespaces, LoanInfo) - def process_one_hold(self, tag, namespaces): + def process_one_hold( + self, tag: _Element, namespaces: Dict[str, str] + ) -> Optional[HoldInfo]: return self.process_one(tag, namespaces, HoldInfo) - def process_one_reserve(self, tag, namespaces): + def process_one_reserve( + self, tag: _Element, namespaces: Dict[str, str] + ) -> Optional[HoldInfo]: hold_info = self.process_one(tag, namespaces, HoldInfo) - hold_info.hold_position = 0 + if hold_info is not None: + hold_info.hold_position = 0 return hold_info - def process_one(self, tag, namespaces, source_class): + def process_one( + self, tag: _Element, namespaces: Dict[str, str], source_class: Type[T] + ) -> Optional[T]: if not tag.xpath("ItemId"): # This happens for events associated with books # no longer in our collection. @@ -1077,23 +1095,20 @@ def datevalue(key): return source_class(*a) -class DateResponseParser(BibliothecaParser): +class DateResponseParser(BibliothecaParser[Optional[datetime]], ABC): """Extract a date from a response.""" RESULT_TAG_NAME: Optional[str] = None DATE_TAG_NAME: Optional[str] = None - def process_all(self, string): - parser = etree.XMLParser() - # If the data is an HTTP response, it is a bytestring and - # must be converted before it is parsed. - if isinstance(string, bytes): - string = string.decode("utf-8") - root = etree.parse(StringIO(string), parser) - m = root.xpath(f"/{self.RESULT_TAG_NAME}/{self.DATE_TAG_NAME}") - if not m: - return None - due_date = m[0].text + @property + def xpath_expression(self) -> str: + return f"/{self.RESULT_TAG_NAME}/{self.DATE_TAG_NAME}" + + def process_one( + self, tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> Optional[datetime]: + due_date = tag.text if not due_date: return None return strptime_utc(due_date, EventParser.INPUT_TIME_FORMAT) @@ -1103,16 +1118,18 @@ class CheckoutResponseParser(DateResponseParser): """Extract due date from a checkout response.""" - RESULT_TAG_NAME = "CheckoutResult" - DATE_TAG_NAME = "DueDateInUTC" + @property + def xpath_expression(self) -> str: + return f"/CheckoutResult/DueDateInUTC" class HoldResponseParser(DateResponseParser): """Extract availability date from a hold response.""" - RESULT_TAG_NAME = "PlaceHoldResult" - DATE_TAG_NAME = "AvailabilityDateInUTC" + @property + def xpath_expression(self) -> str: + return f"/PlaceHoldResult/AvailabilityDateInUTC" class EventParser(BibliothecaParser): @@ -1133,9 +1150,17 @@ class EventParser(BibliothecaParser): "REMOVED": CirculationEvent.DISTRIBUTOR_LICENSE_REMOVE, } - def process_all(self, string, no_events_error=False): + @property + def xpath_expression(self) -> str: + return "//CloudLibraryEvent" + + def process_all( + self, string: bytes | str, no_events_error=False + ) -> Generator[ + Tuple[str, str, Optional[str], datetime, Optional[datetime], str], None, None + ]: has_events = False - for i in super().process_all(string, "//CloudLibraryEvent"): + for i in super().process_all(string): yield i has_events = True @@ -1154,7 +1179,9 @@ def process_all(self, string, no_events_error=False): BibliothecaAPI.SERVICE_NAME, ) - def process_one(self, tag, namespaces): + def process_one( + self, tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> Tuple[str, str, Optional[str], datetime, Optional[datetime], str]: isbn = self.text_of_subtag(tag, "ISBN") bibliotheca_id = self.text_of_subtag(tag, "ItemId") patron_id = self.text_of_optional_subtag(tag, "PatronId") diff --git a/api/millenium_patron.py b/api/millenium_patron.py index 36d9517f1a..552b9ff3d3 100644 --- a/api/millenium_patron.py +++ b/api/millenium_patron.py @@ -27,7 +27,6 @@ from core.util import MoneyUtility from core.util.datetime_helpers import datetime_utc, utc_now from core.util.http import HTTP -from core.util.xmlparser import XMLParser class NeighborhoodMode(Enum): @@ -167,7 +166,7 @@ class MilleniumPatronLibrarySettings(BasicAuthProviderLibrarySettings): ) -class MilleniumPatronAPI(BasicAuthenticationProvider, XMLParser): +class MilleniumPatronAPI(BasicAuthenticationProvider): @classmethod def label(cls) -> str: return "Millenium" diff --git a/core/util/xmlparser.py b/core/util/xmlparser.py index 2f3f998649..d14fb7c7ba 100644 --- a/core/util/xmlparser.py +++ b/core/util/xmlparser.py @@ -1,13 +1,22 @@ from __future__ import annotations +from abc import ABC, abstractmethod from io import BytesIO -from typing import TYPE_CHECKING, Dict, List, Optional, TypeVar +from typing import ( + TYPE_CHECKING, + Callable, + Dict, + Generator, + Generic, + List, + Optional, + TypeVar, +) from lxml import etree if TYPE_CHECKING: - from lxml.etree import Element - + from lxml.etree import _Element, _ElementTree T = TypeVar("T") @@ -20,17 +29,17 @@ class XMLParser: @classmethod def _xpath( - cls, tag: Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> List[Element]: + cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None + ) -> List[_Element]: if not namespaces: namespaces = cls.NAMESPACES """Wrapper to do a namespaced XPath expression.""" - return tag.xpath(expression, namespaces=namespaces) + return tag.xpath(expression, namespaces=namespaces) # type: ignore[no-any-return] @classmethod def _xpath1( - cls, tag: Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[Element]: + cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None + ) -> Optional[_Element]: """Wrapper to do a namespaced XPath expression.""" values = cls._xpath(tag, expression, namespaces=namespaces) if not values: @@ -45,7 +54,7 @@ def _cls(self, tag_name: str, class_name: str) -> str: ) def text_of_optional_subtag( - self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None ) -> Optional[str]: tag = self._xpath1(tag, name, namespaces=namespaces) if tag is None or tag.text is None: @@ -54,28 +63,31 @@ def text_of_optional_subtag( return str(tag.text) def text_of_subtag( - self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None ) -> str: return str(tag.xpath(name, namespaces=namespaces)[0].text) def int_of_subtag( - self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None ) -> int: return int(self.text_of_subtag(tag, name, namespaces=namespaces)) def int_of_optional_subtag( - self, tag: Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None ) -> Optional[int]: v = self.text_of_optional_subtag(tag, name, namespaces=namespaces) if not v: return None return int(v) - def process_all(self, xml, xpath, namespaces=None, handler=None, parser=None): - if not parser: - parser = etree.XMLParser(recover=True) - if not handler: - handler = self.process_one + @staticmethod + def _load_xml( + xml: str | bytes | _ElementTree, + ) -> _ElementTree: + """ + Load an XML document from string or bytes and handle the case where + the document has already been parsed. + """ if isinstance(xml, str): xml = xml.encode("utf8") @@ -85,14 +97,77 @@ def process_all(self, xml, xpath, namespaces=None, handler=None, parser=None): # encounters the null character. Remove that character # immediately and XMLParser will handle the rest. xml = xml.replace(b"\x00", b"") - root = etree.parse(BytesIO(xml), parser) - else: - root = xml + parser = etree.XMLParser(recover=True) + return etree.parse(BytesIO(xml), parser) - for i in root.xpath(xpath, namespaces=namespaces): + else: + return xml + + @staticmethod + def _process_all( + xml: _ElementTree, + xpath_expression: str, + namespaces: Dict[str, str], + handler: Callable[[_Element, Dict[str, str]], Optional[T]], + ) -> Generator[T, None, None]: + """ + Process all elements matching the given XPath expression. Calling + the given handler function on each element and yielding the result + if it is not None. + """ + for i in xml.xpath(xpath_expression, namespaces=namespaces): data = handler(i, namespaces) if data is not None: yield data - def process_one(self, tag, namespaces): + +class XMLProcessor(XMLParser, Generic[T], ABC): + """ + A class that simplifies making a class that processes XML documents. + It loads the XML document, runs an XPath expression to find all matching + elements, and calls the process_one function on each element. + """ + + def process_all( + self, + xml: str | bytes | _ElementTree, + ) -> Generator[T, None, None]: + """ + Process all elements matching the given XPath expression. Calling + process_one on each element and yielding the result if it is not None. + """ + root = self._load_xml(xml) + return self._process_all( + root, self.xpath_expression, self.NAMESPACES, self.process_one + ) + + def process_first( + self, + xml: str | bytes | _ElementTree, + ) -> Optional[T]: + """ + Process the first element matching the given XPath expression. Calling + process_one on the element and returning None if no elements match or + if process_one returns None. + """ + for i in self.process_all(xml): + return i return None + + @property + @abstractmethod + def xpath_expression(self) -> str: + """ + The xpath expression to use to find elements to process. + """ + ... + + @abstractmethod + def process_one( + self, tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> Optional[T]: + """ + Process one element and return the result. Return None if the element + should be ignored. + """ + ... diff --git a/pyproject.toml b/pyproject.toml index 95ab13f327..2aa547f033 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,7 @@ module = [ "core.util.authentication_for_opds", "core.util.cache", "core.util.problem_detail", + "core.util.xmlparser", "tests.fixtures.authenticator", "tests.migration.*", ] diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 83fa987400..5583df4ba1 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -996,8 +996,12 @@ def test_bibliographic_parser(self, axis360: Axis360Fixture): [bib1, av1], [bib2, av2] = BibliographicParser(False, True).process_all(data) # We didn't ask for availability information, so none was provided. - assert None == av1 - assert None == av2 + assert av1 is None + assert av2 is None + + # But we did get bibliographic information. + assert bib1 is not None + assert bib2 is not None assert "Faith of My Fathers : A Family Memoir" == bib1.title assert "eng" == bib1.language @@ -1116,6 +1120,9 @@ def test_bibliographic_parser_audiobook(self, axis360: Axis360Fixture): data = axis360.sample_data("availability_with_audiobook_fulfillment.xml") [[bib, av]] = BibliographicParser(False, True).process_all(data) + assert av is None + assert bib is not None + assert "Back Spin" == bib.title assert Edition.AUDIO_MEDIUM == bib.medium @@ -1137,6 +1144,8 @@ def test_bibliographic_parser_blio_format(self, axis360: Axis360Fixture): data = data.replace(b"AxisNow", b"No Such Format") [[bib, av]] = BibliographicParser(False, True).process_all(data) + assert av is None + assert bib is not None # A book in Blio format is treated as an AxisNow ebook. assert Edition.BOOK_MEDIUM == bib.medium @@ -1152,6 +1161,8 @@ def test_bibliographic_parser_blio_and_axisnow_format( data = data.replace(b"Acoustik", b"Blio") [[bib, av]] = BibliographicParser(False, True).process_all(data) + assert av is None + assert bib is not None # There is only one FormatData -- 'Blio' and 'AxisNow' mean the same thing. assert Edition.BOOK_MEDIUM == bib.medium @@ -1165,6 +1176,8 @@ def test_bibliographic_parser_unsupported_format(self, axis360: Axis360Fixture): data = data.replace(b"AxisNow", b"No Such Format 2") [[bib, av]] = BibliographicParser(False, True).process_all(data) + assert av is None + assert bib is not None # We don't support any of the formats, so no FormatData objects were created. assert [] == bib.circulation.formats @@ -1211,8 +1224,12 @@ def test_availability_parser(self, axis360: Axis360Fixture): [bib1, av1], [bib2, av2] = BibliographicParser(True, False).process_all(data) # We didn't ask for bibliographic information, so none was provided. - assert None == bib1 - assert None == bib2 + assert bib1 is None + assert bib2 is None + + # But we did get availability information. + assert av1 is not None + assert av2 is not None assert "0003642860" == av1.primary_identifier(axis360.db.session).identifier assert 9 == av1.licenses_owned @@ -1243,9 +1260,9 @@ def axis360parsers( class TestRaiseExceptionOnError: def test_internal_server_error(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("internal_server_error.xml") - parser = HoldReleaseResponseParser(None) + parser = HoldReleaseResponseParser(MagicMock()) with pytest.raises(RemoteInitiatedServerError) as excinfo: - parser.process_all(data) + parser.process_first(data) assert "Internal Server Error" in str(excinfo.value) def test_ignore_error_codes(self, axis360parsers: Axis360FixturePlusParsers): @@ -1262,21 +1279,21 @@ def process_one(self, e, namespaces): # Unlike in test_internal_server_error, no exception is # raised, because we told the parser to ignore this particular # error code. - parser = IgnoreISE(None) - assert retval == parser.process_all(data) + parser = IgnoreISE(MagicMock()) + assert retval == parser.process_first(data) def test_internal_server_error2(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("invalid_error_code.xml") - parser = HoldReleaseResponseParser(None) + parser = HoldReleaseResponseParser(MagicMock()) with pytest.raises(RemoteInitiatedServerError) as excinfo: - parser.process_all(data) + parser.process_first(data) assert "Invalid response code from Axis 360: abcd" in str(excinfo.value) def test_missing_error_code(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("missing_error_code.xml") - parser = HoldReleaseResponseParser(None) + parser = HoldReleaseResponseParser(MagicMock()) with pytest.raises(RemoteInitiatedServerError) as excinfo: - parser.process_all(data) + parser.process_first(data) assert "No status code!" in str(excinfo.value) @@ -1289,20 +1306,20 @@ def test_parse_checkin_success(self, axis360parsers: Axis360FixturePlusParsers): for filename in ("checkin_success.xml", "checkin_not_checked_out.xml"): data = axis360parsers.sample_data(filename) parser = CheckinResponseParser(axis360parsers.default_collection) - parsed = parser.process_all(data) - assert parsed == True + parsed = parser.process_first(data) + assert parsed is True def test_parse_checkin_failure(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("checkin_failure.xml") parser = CheckinResponseParser(axis360parsers.default_collection) - pytest.raises(NotFoundOnRemote, parser.process_all, data) + pytest.raises(NotFoundOnRemote, parser.process_first, data) class TestCheckoutResponseParser: def test_parse_checkout_success(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("checkout_success.xml") parser = CheckoutResponseParser(axis360parsers.default_collection) - parsed = parser.process_all(data) + parsed = parser.process_first(data) assert isinstance(parsed, LoanInfo) assert axis360parsers.default_collection.id == parsed.collection_id assert DataSource.AXIS_360 == parsed.data_source_name @@ -1316,20 +1333,20 @@ def test_parse_checkout_success(self, axis360parsers: Axis360FixturePlusParsers) def test_parse_already_checked_out(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("already_checked_out.xml") - parser = CheckoutResponseParser(None) - pytest.raises(AlreadyCheckedOut, parser.process_all, data) + parser = CheckoutResponseParser(MagicMock()) + pytest.raises(AlreadyCheckedOut, parser.process_first, data) def test_parse_not_found_on_remote(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("not_found_on_remote.xml") - parser = CheckoutResponseParser(None) - pytest.raises(NotFoundOnRemote, parser.process_all, data) + parser = CheckoutResponseParser(MagicMock()) + pytest.raises(NotFoundOnRemote, parser.process_first, data) class TestHoldResponseParser: def test_parse_hold_success(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("place_hold_success.xml") parser = HoldResponseParser(axis360parsers.default_collection) - parsed = parser.process_all(data) + parsed = parser.process_first(data) assert isinstance(parsed, HoldInfo) assert 1 == parsed.hold_position @@ -1339,20 +1356,20 @@ def test_parse_hold_success(self, axis360parsers: Axis360FixturePlusParsers): def test_parse_already_on_hold(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("already_on_hold.xml") - parser = HoldResponseParser(None) - pytest.raises(AlreadyOnHold, parser.process_all, data) + parser = HoldResponseParser(MagicMock()) + pytest.raises(AlreadyOnHold, parser.process_first, data) class TestHoldReleaseResponseParser: def test_success(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("release_hold_success.xml") - parser = HoldReleaseResponseParser(None) - assert True == parser.process_all(data) + parser = HoldReleaseResponseParser(MagicMock()) + assert True == parser.process_first(data) def test_failure(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("release_hold_failure.xml") - parser = HoldReleaseResponseParser(None) - pytest.raises(NotOnHold, parser.process_all, data) + parser = HoldReleaseResponseParser(MagicMock()) + pytest.raises(NotOnHold, parser.process_first, data) class TestAvailabilityResponseParser: @@ -1365,16 +1382,22 @@ def test_parse_loan_and_hold(self, axis360parsers: Axis360FixturePlusParsers): data = axis360parsers.sample_data("availability_with_loan_and_hold.xml") parser = AvailabilityResponseParser(axis360parsers.api) activity = list(parser.process_all(data)) - hold, loan, reserved = sorted(activity, key=lambda x: x.identifier) + hold, loan, reserved = sorted( + activity, key=lambda x: "" if x is None else str(x.identifier) + ) + assert isinstance(hold, HoldInfo) + assert isinstance(loan, LoanInfo) + assert isinstance(reserved, HoldInfo) assert axis360parsers.api.collection is not None assert axis360parsers.api.collection.id == hold.collection_id assert Identifier.AXIS_360_ID == hold.identifier_type assert "0012533119" == hold.identifier assert 1 == hold.hold_position - assert None == hold.end_date + assert hold.end_date is None assert axis360parsers.api.collection.id == loan.collection_id assert "0015176429" == loan.identifier + assert loan.fulfillment_info is not None assert "http://fulfillment/" == loan.fulfillment_info.content_link assert datetime_utc(2015, 8, 12, 17, 40, 27) == loan.end_date @@ -1389,6 +1412,7 @@ def test_parse_loan_no_availability( data = axis360parsers.sample_data("availability_without_fulfillment.xml") parser = AvailabilityResponseParser(axis360parsers.api) [loan] = list(parser.process_all(data)) + assert isinstance(loan, LoanInfo) assert axis360parsers.api.collection is not None assert axis360parsers.api.collection.id == loan.collection_id @@ -1402,6 +1426,7 @@ def test_parse_audiobook_availability( data = axis360parsers.sample_data("availability_with_audiobook_fulfillment.xml") parser = AvailabilityResponseParser(axis360parsers.api) [loan] = list(parser.process_all(data)) + assert isinstance(loan, LoanInfo) fulfillment = loan.fulfillment_info assert isinstance(fulfillment, Axis360FulfillmentInfo) @@ -1422,6 +1447,7 @@ def test_parse_ebook_availability(self, axis360parsers: Axis360FixturePlusParser # First, ask for an ePub. epub_parser = AvailabilityResponseParser(axis360parsers.api, "ePub") [availability] = list(epub_parser.process_all(data)) + assert isinstance(availability, LoanInfo) fulfillment = availability.fulfillment_info # This particular file has a downloadUrl ready to go, so we @@ -1442,6 +1468,7 @@ def test_parse_ebook_availability(self, axis360parsers: Axis360FixturePlusParser axis360parsers.api, axis360parsers.api.AXISNOW ) [availability] = list(axisnow_parser.process_all(data)) + assert isinstance(availability, LoanInfo) fulfillment = availability.fulfillment_info assert isinstance(fulfillment, Axis360FulfillmentInfo) assert "6670197A-D264-447A-86C7-E4CB829C0236" == fulfillment.key @@ -1494,36 +1521,36 @@ def test_verify_status_code(self): def test_parse(self): class Mock(JSONResponseParser): - def _parse(self, parsed, *args, **kwargs): - self.called_with = parsed, args, kwargs + def _parse(self, parsed, **kwargs): + self.called_with = parsed, kwargs return "success" - parser = Mock(object()) + parser = Mock() # Test success. doc = dict(Status=dict(Code=0000)) # The JSON will be parsed and passed in to _parse(); all other - # arguments to parse() will be passed through to _parse(). - result = parser.parse(json.dumps(doc), "value1", arg2="value2") + # keyword arguments to parse() will be passed through to _parse(). + result = parser.parse(json.dumps(doc), arg2="value2") assert "success" == result - assert (doc, ("value1",), dict(arg2="value2")) == parser.called_with + assert (doc, dict(arg2="value2")) == parser.called_with # It also works if the JSON was already parsed. - result = parser.parse(doc, "new_value") - assert (doc, ("new_value",), {}) == parser.called_with + result = parser.parse(doc, foo="bar") + assert (doc, {"foo": "bar"}) == parser.called_with # Non-JSON input causes an error. with pytest.raises(RemoteInitiatedServerError) as excinfo: parser.parse("I'm not JSON") assert ( - "Invalid response from Axis 360 (was expecting JSON): I'm not JSON" + 'Invalid response from Axis 360 (was expecting JSON): "I\'m not JSON"' in str(excinfo.value) ) class TestAxis360FulfillmentInfoResponseParser: - def test__parse_findaway(self, axis360parsers: Axis360FixturePlusParsers): + def test__parse_findaway(self, axis360parsers: Axis360FixturePlusParsers) -> None: # _parse will create a valid FindawayManifest given a # complete document. @@ -1550,7 +1577,7 @@ def get_data(): audiobook_metadata = axis360parsers.sample_data("audiobook_metadata.json") axis360parsers.api.queue_response(200, {}, audiobook_metadata) - manifest, expires = m(data, pool) + manifest, expires = m(data, license_pool=pool) assert isinstance(manifest, FindawayManifest) metadata = manifest.metadata @@ -1589,17 +1616,17 @@ def get_data(): missing_field = get_data() del missing_field[field] with pytest.raises(RemoteInitiatedServerError) as excinfo: - m(missing_field, pool) + m(missing_field, license_pool=pool) assert "Required key %s not present" % field in str(excinfo.value) # Try with a bad expiration date. bad_date = get_data() bad_date["ExpirationDate"] = "not-a-date" with pytest.raises(RemoteInitiatedServerError) as excinfo: - m(bad_date, pool) + m(bad_date, license_pool=pool) assert "Could not parse expiration date: not-a-date" in str(excinfo.value) - def test__parse_axisnow(self, axis360parsers: Axis360FixturePlusParsers): + def test__parse_axisnow(self, axis360parsers: Axis360FixturePlusParsers) -> None: # _parse will create a valid AxisNowManifest given a # complete document. @@ -1620,7 +1647,7 @@ def get_data(): # Since this is an ebook, not an audiobook, there will be no # second request to the API, the way there is in the audiobook # test. - manifest, expires = m(data, pool) + manifest, expires = m(data, license_pool=pool) assert isinstance(manifest, AxisNowManifest) assert { @@ -1632,7 +1659,7 @@ def get_data(): bad_date = get_data() bad_date["ExpirationDate"] = "not-a-date" with pytest.raises(RemoteInitiatedServerError) as excinfo: - m(bad_date, pool) + m(bad_date, license_pool=pool) assert "Could not parse expiration date: not-a-date" in str(excinfo.value) @@ -1648,13 +1675,13 @@ def _extract_spine_item(cls, part): metadata = dict( fndaccountid="An account ID", readingOrder=["Spine item 1", "Spine item 2"] ) - account_id, spine_items = Mock(None)._parse(metadata) + account_id, spine_items = Mock()._parse(metadata) assert "An account ID" == account_id assert ["Spine item 1 (extracted)", "Spine item 2 (extracted)"] == spine_items # No data? Nothing will be parsed. - account_id, spine_items = Mock(None)._parse({}) + account_id, spine_items = Mock()._parse({}) assert None == account_id assert [] == spine_items diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index f42d691f25..491b1e1407 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -4,7 +4,7 @@ import random from datetime import datetime, timedelta from io import BytesIO, StringIO -from typing import TYPE_CHECKING, Any, ClassVar, Optional, Protocol, runtime_checkable +from typing import TYPE_CHECKING, ClassVar, Optional, Protocol, Type, runtime_checkable from unittest import mock from unittest.mock import MagicMock, create_autospec @@ -30,6 +30,7 @@ AlreadyCheckedOut, AlreadyOnHold, CannotHold, + CirculationException, CurrentlyAvailable, NoAvailableCopies, NoLicenses, @@ -241,7 +242,7 @@ def test_bibliographic_lookup(self, bibliotheca_fixture: BibliothecaAPITestFixtu db = bibliotheca_fixture.db class MockItemListParser: - def parse(self, data): + def process_all(self, data): self.parse_called_with = data yield "item1" yield "item2" @@ -792,12 +793,11 @@ def test_circulation_sweep_discovers_work( class TestBibliothecaParser: def test_parse_date(self, bibliotheca_fixture: BibliothecaAPITestFixture): - parser = BibliothecaParser() - v = parser.parse_date("2016-01-02T12:34:56") - assert datetime_utc(2016, 1, 2, 12, 34, 56) == v + v = BibliothecaParser.parse_date("2016-01-02T12:34:56") + assert v == datetime_utc(2016, 1, 2, 12, 34, 56) - assert None == parser.parse_date(None) - assert None == parser.parse_date("Some weird value") + assert BibliothecaParser.parse_date(None) is None + assert BibliothecaParser.parse_date("Some weird value") is None class TestEventParser: @@ -837,7 +837,7 @@ class TestPatronCirculationParser: def test_parse(self, bibliotheca_fixture: BibliothecaAPITestFixture): data = bibliotheca_fixture.files.sample_data("checkouts.xml") collection = bibliotheca_fixture.collection - loans_and_holds = PatronCirculationParser(collection).process_all(data) + loans_and_holds = list(PatronCirculationParser(collection).process_all(data)) loans = [x for x in loans_and_holds if isinstance(x, LoanInfo)] holds = [x for x in loans_and_holds if isinstance(x, HoldInfo)] assert 2 == len(loans) @@ -876,7 +876,7 @@ def test_parse(self, bibliotheca_fixture: BibliothecaAPITestFixture): class TestCheckoutResponseParser: def test_parse(self, bibliotheca_fixture: BibliothecaAPITestFixture): data = bibliotheca_fixture.files.sample_data("successful_checkout.xml") - due_date = CheckoutResponseParser().process_all(data) + due_date = CheckoutResponseParser().process_first(data) assert datetime_utc(2015, 4, 16, 0, 32, 36) == due_date @@ -974,7 +974,7 @@ def as_problem_detail_document(self, debug=False) -> ProblemDetail: def test_exception( self, incoming_message: str, - error_class: Any, + error_class: Type[CirculationException], error_code: int, problem_detail_title: str, problem_detail_code: int, @@ -982,7 +982,7 @@ def test_exception( document = self.BIBLIOTHECA_ERROR_RESPONSE_BODY_TEMPLATE.format( message=incoming_message ) - error = ErrorParser().process_all(document) + error = ErrorParser().process_first(document) assert isinstance(error, error_class) assert incoming_message == str(error) assert error_code == error.status_code @@ -1039,14 +1039,15 @@ def test_remote_initiated_server_error( incoming_message = api_bibliotheca_files_fixture.files().sample_text( incoming_message_from_file ) - error = ErrorParser().process_all(incoming_message) - problem = error.as_problem_detail_document() - + assert incoming_message is not None + error = ErrorParser().process_first(incoming_message) assert isinstance(error, RemoteInitiatedServerError) + assert BibliothecaAPI.SERVICE_NAME == error.service_name assert 502 == error.status_code assert error_string == str(error) + problem = error.as_problem_detail_document() assert 502 == problem.status_code assert "Integration error communicating with Bibliotheca" == problem.detail assert "Third-party service failed." == problem.title @@ -1886,7 +1887,7 @@ def f(genre_string): def test_item_list(self, bibliotheca_fixture: BibliothecaAPITestFixture): data = bibliotheca_fixture.files.sample_data("item_metadata_list_mini.xml") - data_parsed = list(ItemListParser().parse(data)) + data_parsed = list(ItemListParser().process_all(data)) # There should be 2 items in the list. assert 2 == len(data_parsed) @@ -1946,7 +1947,7 @@ def test_multiple_contributor_roles( self, bibliotheca_fixture: BibliothecaAPITestFixture ): data = bibliotheca_fixture.files.sample_data("item_metadata_audio.xml") - [parsed_data] = list(ItemListParser().parse(data)) + [parsed_data] = list(ItemListParser().process_all(data)) names_and_roles = [] for c in parsed_data.contributors: [role] = c.roles diff --git a/tests/core/util/test_xml_parser.py b/tests/core/util/test_xml_parser.py index 4a855b927f..d7aad46706 100644 --- a/tests/core/util/test_xml_parser.py +++ b/tests/core/util/test_xml_parser.py @@ -1,52 +1,91 @@ -from core.util.xmlparser import XMLParser +from __future__ import annotations +from typing import Dict, Optional -class MockParser(XMLParser): +from lxml.etree import _Element + +from core.util.xmlparser import XMLProcessor + + +class MockProcessor(XMLProcessor[_Element]): """A mock XMLParser that just returns every tag it hears about.""" - def process_one(self, tag, namespaces): + def __init__(self, xpath_expression: str) -> None: + self._xpath_expression = xpath_expression + + @property + def xpath_expression(self) -> str: + return self._xpath_expression + + def process_one( + self, tag: _Element, namespaces: Optional[Dict[str, str]] + ) -> _Element: return tag -class TestXMLParser: - def test_process_all(self): +class TestXMLProcessor: + def test_process_all(self) -> None: # Verify that process_all can handle either XML markup # or an already-parsed tag object. data = "This is a tag." # Try it with markup. - parser = MockParser() - [tag] = parser.process_all(data, "/*") + parser = MockProcessor("/*") + [tag] = parser.process_all(data) assert "atag" == tag.tag assert "This is a tag." == tag.text # Try it with a tag. - [tag2] = parser.process_all(tag, "/*") + [tag2] = parser.process_all(tag) assert tag == tag2 - def test_process_all_with_xpath(self): + def test_process_all_with_xpath(self) -> None: # Verify that process_all processes only tags that # match the given XPath expression. data = "FirstSecondThirdFourth" - parser = MockParser() + parser = MockProcessor("/parent/a") # Only process the tags beneath the tag. - [tag1, tag3] = parser.process_all(data, "/parent/a") + [tag1, tag3] = parser.process_all(data) assert "First" == tag1.text assert "Third" == tag3.text - def test_invalid_characters_are_stripped(self): + def test_invalid_characters_are_stripped(self) -> None: data = b'I enjoy invalid characters, such as \x00\x01 and \x1F. But I also like \xe2\x80\x9csmart quotes\xe2\x80\x9d.' - parser = MockParser() - [tag] = parser.process_all(data, "/tag") + parser = MockProcessor("/tag") + [tag] = parser.process_all(data) assert ( "I enjoy invalid characters, such as and . But I also like “smart quotes”." == tag.text ) - def test_invalid_entities_are_stripped(self): + def test_invalid_entities_are_stripped(self) -> None: data = 'I enjoy invalid entities, such as � and ' - parser = MockParser() - [tag] = parser.process_all(data, "/tag") + parser = MockProcessor("/tag") + [tag] = parser.process_all(data) assert "I enjoy invalid entities, such as and " == tag.text + + def test_process_first_result(self) -> None: + # Verify that process_all processes only tags that + # match the given XPath expression. + data = "FirstSecondThirdFourth" + + parser = MockProcessor("/parent/a") + + # Only process the tags beneath the tag. + tag1_first_call = parser.process_first(data) + assert tag1_first_call is not None + assert "First" == tag1_first_call.text + + # Calling process first again will return the same tag + tag1_second_call = parser.process_first(data) + assert tag1_second_call is not None + assert "First" == tag1_second_call.text + + # But a different tag instance + assert tag1_first_call is not tag1_second_call + + # If no tag is found, process_first returns None + parser = MockProcessor("/parent/c") + assert parser.process_first(data) is None From cc1b501bd2e697df5b35b986bd4d669e8a34f4d9 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 9 Oct 2023 07:04:37 -0300 Subject: [PATCH 093/262] Fix flakey quicksight test. (#1440) --- api/admin/controller/quicksight.py | 9 ++++++--- tests/api/admin/controller/test_quicksight.py | 7 +++++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py index 2e36d23523..509098d6f9 100644 --- a/api/admin/controller/quicksight.py +++ b/api/admin/controller/quicksight.py @@ -3,6 +3,7 @@ import boto3 import flask +from sqlalchemy import select from api.admin.model.quicksight import ( QuicksightDashboardNamesResponse, @@ -71,9 +72,11 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: ) ) - libraries = ( - self._db.query(Library).filter(Library.id.in_(allowed_library_ids)).all() - ) + libraries = self._db.execute( + select(Library.name) + .where(Library.id.in_(allowed_library_ids)) + .order_by(Library.name) + ).all() try: delimiter = "|" diff --git a/tests/api/admin/controller/test_quicksight.py b/tests/api/admin/controller/test_quicksight.py index 8f007c88b5..5240ae4941 100644 --- a/tests/api/admin/controller/test_quicksight.py +++ b/tests/api/admin/controller/test_quicksight.py @@ -74,7 +74,10 @@ def test_generate_quicksight_url( "Dashboard": {"InitialDashboardId": "uuid1"} }, SessionTags=[ - dict(Key="library_name", Value="|".join([default.name, library1.name])) # type: ignore[list-item] + dict( + Key="library_name", + Value="|".join([str(library1.name), str(default.name)]), + ) ], ) @@ -97,7 +100,7 @@ def test_generate_quicksight_url( "Dashboard": {"InitialDashboardId": "uuid2"} }, SessionTags=[ - dict(Key="library_name", Value="|".join([library1.name])) # type: ignore[list-item] + dict(Key="library_name", Value="|".join([str(library1.name)])) ], ) From 07a91a2c0058ca18cc20006624d595776a45a4e4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 12:06:22 +0000 Subject: [PATCH 094/262] Bump levenshtein from 0.22.0 to 0.23.0 (#1448) --- poetry.lock | 570 +++++++++++++++++++++---------------------------- pyproject.toml | 2 +- 2 files changed, 242 insertions(+), 330 deletions(-) diff --git a/poetry.lock b/poetry.lock index 41475dcdf1..4d66c99485 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1818,117 +1818,6 @@ files = [ {file = "itsdangerous-2.1.2.tar.gz", hash = "sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a"}, ] -[[package]] -name = "jarowinkler" -version = "1.2.3" -description = "library for fast approximate string matching using Jaro and Jaro-Winkler similarity" -optional = false -python-versions = ">=3.6" -files = [ - {file = "jarowinkler-1.2.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:97175ef8bf47e796280c899c8d72788313e277a30cd5c4a549bbab60ce70e5f5"}, - {file = "jarowinkler-1.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:49796215bd66bb87d2d88da7131b785330b3b2e50cbd7a7be75b4964512f5aa9"}, - {file = "jarowinkler-1.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1097b349e09c6ae2d92520ef0ab79580b6b136f6f1c1d62ad783595011f0f260"}, - {file = "jarowinkler-1.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786db72036f9b43aa6e4848584580ff8d0a33816f67050cc1d17f283a9446002"}, - {file = "jarowinkler-1.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4feec944743bdcd099b8967d16802c78f1009f3222a241b3d7424795ad301c54"}, - {file = "jarowinkler-1.2.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b29a7adb25bf02f1e007fec412a67a5c3c8de1ba062454de539e623eb638fcaf"}, - {file = "jarowinkler-1.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0dd61e79babfbca37f6f4d2b81bfbc92979e5e22f02d04ba5e762d84901a95bf"}, - {file = "jarowinkler-1.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66aa6d4e961d956da7508d9bf837686e2b957db14a19dbfb0aefe259f9c6a177"}, - {file = "jarowinkler-1.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:db49e56139da097b5d85f323b1ed906a5d9d6d3a4336ce694910852d0a4cd607"}, - {file = "jarowinkler-1.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa55d91bcf097b464df6efa92762434aa3026a9774ab2509895a1948bf64b121"}, - {file = "jarowinkler-1.2.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:c1f3e5d5137419a608a878b76ec277c1618119259134ef94e323d5e7fdc2acfd"}, - {file = "jarowinkler-1.2.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d6648d9c68bcc79f80092fa00e9f897df12b9826f05b7211260b494742ae3e12"}, - {file = "jarowinkler-1.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53b9f9ca5cd56c82500171abc4818ef9f756e77e995ac57046f598fba2642f78"}, - {file = "jarowinkler-1.2.3-cp310-cp310-win32.whl", hash = "sha256:21ae65449c52b14578fd28f51c2efdd976a632979054cf12e714cc86fdc1d1aa"}, - {file = "jarowinkler-1.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:fa1db18ba0a0fe383e9396e2db91d31fcabfc0ff03fa599b5a10edc57416084a"}, - {file = "jarowinkler-1.2.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e68cb387d79871b45d20a670bdd33b0f9edb08ed85aa7a5eb19dafdecfa1c091"}, - {file = "jarowinkler-1.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa12fa8a788804fca8fde0f24c14015f3adf18b2336adb66526e326c15b59c72"}, - {file = "jarowinkler-1.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b8a0e1476e7a0cf316e32acd02733f6dba38a19e57c8aa58dad8cbb69627b54"}, - {file = "jarowinkler-1.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae3b951ff925a5c1fc7746845d796ce34891313813f6c3bc2d057759c8090c47"}, - {file = "jarowinkler-1.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:813f626b8f2703275e7ad18b842cedc1e6d06e4a334337f96b5a91afcda78ed2"}, - {file = "jarowinkler-1.2.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:288c615134ec2d5d122fb834eb0e134f5ccd0080ce1091e2f8170d861de4c24d"}, - {file = "jarowinkler-1.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4638b6b4569e418365aa12d8175025b93336bb074288ec8b9b259734da9990e"}, - {file = "jarowinkler-1.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc8182ba6561a19eddbbd88106b986b93ae11205919cea36385a260d2146c638"}, - {file = "jarowinkler-1.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7253c25288294474d98e269dd73d7e8d9f503655c77180201788c6f29848bb4e"}, - {file = "jarowinkler-1.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:6e49f8c2258bdab01fef9dd8111811de8ec000a7b6f5a12283f2322ce5f473e3"}, - {file = "jarowinkler-1.2.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a71063e01863f561d86459929ad7c5f6c389922aada4170b67ab7c266e6cf96c"}, - {file = "jarowinkler-1.2.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:51eeb42de858363e93c3407568b3fec1919b99a5ffb6d5c4e3dc494a12d37241"}, - {file = "jarowinkler-1.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:71a41f8d34bb315ea245a9c78e1dc40e58e560ba699ae34932f397271eaa830e"}, - {file = "jarowinkler-1.2.3-cp311-cp311-win32.whl", hash = "sha256:b696f0f80df13e8e86958a9d0eaa9218a6a311b5c566f6a081ef17d7d594713b"}, - {file = "jarowinkler-1.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:d2debc08e15e6c16999c27c1afc4c2493c0d3f140206d24970872f4619ea840a"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e8c6dba59166803347c96f48e1af608f8bbc8efe9d545e1a3f9bfb526e76fd62"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e83b734568aeabf71a89b8f9a7b9630eca71de68e74701d306d56f9e8621c3f"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea218d666041f41434957816e0a52e8533e7e191c8302ca062ebfa4ec42220d"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bc0dcc31ce493aa70067e1f7ed2cb1528b8bd86bb276f25b6c09fabf746b3df7"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b56248ab6e734b40309b6337b0de5cb37e7f0e71d64c7f5f0d58bcb46c05699d"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:532c89ab12246f36338500b7c7c36b87389e01fca93eec74680423e5e5678677"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:298c708bd8609b0563846cd770891f4fc6492ea1c09ef7ac24a68731f4ede37a"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:18e3af57ac066a617bd688d62b9d0da11da32dca977d9fe5c1726040be26ad2e"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:bbb94c0d894cde960b264f3f797c99cbe316e0280ea1b81e240d6ee4ec19fa0c"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b39b7cdb985f6c040830f047cd98a0563bbfa909944130223c23667432b39c73"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:cba8a789610e97d29c850370c6c8f68c0481355446a356bfa0b2703d8afb8436"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-win32.whl", hash = "sha256:7f964945c52bb21058718f1e074a14d231bff1dad83c8e8bd1607ed6add4b0fa"}, - {file = "jarowinkler-1.2.3-cp36-cp36m-win_amd64.whl", hash = "sha256:439d66dd82a452535293c2503a0930c2aacc4ebf9542f0ca52b351084e9f3e32"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:07df473a812772794181885fc8e9950b629809297c8a1c00e06d0376cb6f5611"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c0eac3a71193575002e2c374ff7be5ef4005e9370c29dad83e2537f57d09e07e"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5399e513b58496483eeda61ff180676fc6ff9c3b6ef53af3c53be0777e71247"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b59b4e82ade4b9b147646189b500f2085e06c8c7746dd6311e03bc4d4ad126e8"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ed3635427c04c8680807ecf6b71014c145ae760c22243f8ff6dd1a8cc7fa695"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9b2c89b9893c2c0fb1c7369160e2a08258415df5345019dd61c3e15c2ca74b65"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:dd1cd8a99f7f3347d3b30941460531a0ee8b855f199a3b56ac6d49aa98266600"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:da09cbcbb917d99fb341730dbb7892b7a642ef0ca371c7f3a647b4dae6770190"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b24b58bd62de20cc773b0b55352d0a43d6cf2beb9b0a21bbaf5ca1f6f50d3d44"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:46e042d75ee91e1fe678ad0bdb6eea4d6d052f6e6ee35adac8bf5d01942e1f6a"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d2cf546b18f9d49d25f33dd564c06fbe29c0e3090d062bad84ba04e77fc7d168"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-win32.whl", hash = "sha256:18da76d3a6d7a0898f36525a1ce8303fcb5413d1bcbc30c3f3634344aeecf397"}, - {file = "jarowinkler-1.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:b73bba435e9cd7618130907d753c708c84baddec5ee6e2637f9630f02496b189"}, - {file = "jarowinkler-1.2.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f8dd58576c81e8115ca29dc757feb413fd689d194789670a533384997306385e"}, - {file = "jarowinkler-1.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eaa520f9b4e6e955269a117d7481332b06aff3fb04981fd218294793ba4ae5fc"}, - {file = "jarowinkler-1.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5719f55bbc84ff08e8ef8d6a87ee936dfa2d29554ae2fc2888214a336c660cbd"}, - {file = "jarowinkler-1.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f968112e0b8be55b259e041be1f9f294931c8790f014c5c04f7c1ffe7928b78"}, - {file = "jarowinkler-1.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7dcfe9a47ec5e1c544add253660475fe44b771b0cc1b5d959ca9bdad8f778e65"}, - {file = "jarowinkler-1.2.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d8da4660934bbf3958e6bd0165ab088d6e65ef6cee0c52d82e86d424ca1be96f"}, - {file = "jarowinkler-1.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40905895ddbce8cbdc5f079299371630e771db3c0e7820b2d262c4bb6a8bea0"}, - {file = "jarowinkler-1.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:967a10aed9fca73b826ab41d859ac6a35021ac39efeea5991070964db10a9b13"}, - {file = "jarowinkler-1.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:91ccb6b51cff6158a7f699e0912ab243b7f0026d63919a7696214303e709a21e"}, - {file = "jarowinkler-1.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:68a77b3f262fa90ec30563a50835c760f7417a2cf55138a77606f2def1a4d8b3"}, - {file = "jarowinkler-1.2.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:aea994d5673e9c3b49d548b58f961448bd8a2ba40d3244c1809c891ed29daa02"}, - {file = "jarowinkler-1.2.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:e2d2c6341b021b146db418c77ee71d4318013074761681aba42c1d332a723f7d"}, - {file = "jarowinkler-1.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3af4e4aab7c6ba14f75bd74a21ee00befed67cd2221e626c5741545b4a57c60c"}, - {file = "jarowinkler-1.2.3-cp38-cp38-win32.whl", hash = "sha256:b959d3fcf4ffe865ee518328d77d137ea7b6ad0c8f1f8b96b7a08cd97d3a9c87"}, - {file = "jarowinkler-1.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:8e85bb480eee04681d7f99ce95e86ec8d9182204737a3d141f5a97216d164d6c"}, - {file = "jarowinkler-1.2.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:93cb99fc11b44db61631eea23294f6ae66e944d27129b2856e52f66f11eb8082"}, - {file = "jarowinkler-1.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5aa5645ed7b77ebfa18f9cf7276dfe532d00d64c551fdbdf086c1583a40a5079"}, - {file = "jarowinkler-1.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fbcb4aafadf3ac758de12deb3c90c4e4b6497a104d00ecc8cb6585757af3ab90"}, - {file = "jarowinkler-1.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a2297958acccf63da521f1e7d1c17e3f074db6bf6d4d9eb8c888e638fff2feb"}, - {file = "jarowinkler-1.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2daa79de5856d34ee6a813d9b049d55aad7014a92ce1d90fb3e487338ec362ef"}, - {file = "jarowinkler-1.2.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f2afe56a6cb3e84cc77af6e4a1e8eb6f4f6211a8dd0468237aeee27e16501752"}, - {file = "jarowinkler-1.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b77e53d9a1a8aa84f6c3817790d0fa336a42f726277d9e5a0cf2420337349ee"}, - {file = "jarowinkler-1.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c834dd86fc4f372c0cd6ec7a33432e49e644de7b5d37f520b96500cab7e9d992"}, - {file = "jarowinkler-1.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9bd54d4635bc9d01510fff1545b4ec1e26bddafde0aff6af1af4e46b80407e9e"}, - {file = "jarowinkler-1.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:e3edd98b7fa078b06b1bd0e12d7e244c875e7030ad242eb31719f2f87e343862"}, - {file = "jarowinkler-1.2.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28654c3cfd1f917900a44650cee3a6827210c1f1783ef5aca3399ee31ee2cd17"}, - {file = "jarowinkler-1.2.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e36a5af0db07010e3cc70000edebc2cdb92c39beb2d10d721604a7a52c48100d"}, - {file = "jarowinkler-1.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e954870ba9e8ad3ffdda976a71379b9cc8474195caa3009d89dda350cf5d0fe"}, - {file = "jarowinkler-1.2.3-cp39-cp39-win32.whl", hash = "sha256:21869871774ea4a34222538c33704234ee8e1b4c1a82fe95471215994575e631"}, - {file = "jarowinkler-1.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:447c9b1323e7b16ff21da9121164b54c4a806f352f716b2a6e1f937acabc6e73"}, - {file = "jarowinkler-1.2.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a03972d2878e6954852ffce67a843de8a30c515eaa257313b609151e16036bab"}, - {file = "jarowinkler-1.2.3-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef39035486ce07745a0fee9dd80bd9a0b692811111da4ef9aedbc0ddd23ff9cd"}, - {file = "jarowinkler-1.2.3-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4b9111a2092eaaaabd7dd33aa8703d734075a2f75ec87976eab0a2b60273ac"}, - {file = "jarowinkler-1.2.3-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e480e39ed2420a881ac445f6fea8064c36f535970deb4ee94677afe06985b917"}, - {file = "jarowinkler-1.2.3-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:50bac973e0aec697d73bf6b601e027e6079779fb9f6b0905eaefb055536bec39"}, - {file = "jarowinkler-1.2.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fa2d76d3572229ad282dd7ed0005387e9085bdfd954a7636a6f920530e3b670d"}, - {file = "jarowinkler-1.2.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f91e8ee2b81c44d8f4aec164e84a976fcabe754fe107efae3eae2e9fb433ffd"}, - {file = "jarowinkler-1.2.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91d6fe7fd12c5d3bb82b644500df13ee0f7ae949f067e6d967be896aa340732"}, - {file = "jarowinkler-1.2.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5586164f7063fd9d1704ba136041f5811d847e994dabb973ce4741f8d512a586"}, - {file = "jarowinkler-1.2.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:8493cb25ae8627272537f40b6fdfb376824e38d1e8e7e48196e49494bbdc78f3"}, - {file = "jarowinkler-1.2.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1b243a43ef1740bdec3101243347ceb59f698f28df0c514935f4cf856af22795"}, - {file = "jarowinkler-1.2.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7986c585540262e2abe3badda0e4982291f6513bd3cd313447b0faf77fae454f"}, - {file = "jarowinkler-1.2.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ea3f8e772debaf85ecf9b0aa07f9fd8de3bfaf52595edaa86c979309658afdc"}, - {file = "jarowinkler-1.2.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3f03c8178b94380e103c9368e84b88bfca437e59e484dc71d8b059d43c6e8dc"}, - {file = "jarowinkler-1.2.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4ffe4a84dba6a7cc9411f5185677a7fa86087d3a036281f837eec7a1afd93a34"}, - {file = "jarowinkler-1.2.3.tar.gz", hash = "sha256:af28ea284cfbd1b21b29ff94b759f20e94e4f7c06f424b0b4702e701c2a21668"}, -] - [[package]] name = "jinja2" version = "3.1.2" @@ -2022,123 +1911,123 @@ deprecated = "*" [[package]] name = "levenshtein" -version = "0.22.0" +version = "0.23.0" description = "Python extension for computing string edit distances and similarities." optional = false python-versions = ">=3.7" files = [ - {file = "Levenshtein-0.22.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7b5e165e4b36eea0df530a29a8b05c88d6bca01c652b0128f603be1f117e6ea1"}, - {file = "Levenshtein-0.22.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e4f7ecd6669c94c28fdfb6be1561d2615a699823494140c382d9c58fece3d75b"}, - {file = "Levenshtein-0.22.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5138c2a8a62f5219c7d29ae077d2272c4e58626480b3748f48772e87a3e7fe9b"}, - {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8fbc6377cc56d9f9b40785ed73b706b09f45c2117fb91a24230ad090d2bd5d8f"}, - {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0a3a2b64965f79cd5db75b3207ad637175727fb188acee96a2c25989cb79eddc"}, - {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8cef3132c6bd74e37706330206a87f7c165a2a5a67048bad986877fd83e13a44"}, - {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61af529827edb59610aaccf053508228e7205a07abbf9108fe25957c66c879b3"}, - {file = "Levenshtein-0.22.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acc4c9587d94053cbd314eb3d3372aa7c42282fced037c7ae597be8400b22e74"}, - {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:161815d2496221a361539122413d61b054e8881646a06129cc7328f65bffad8b"}, - {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8b52823b4908cc7f4b3202242d6d632a3b021c01301906e08069071e939136be"}, - {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:76e216fcad971a5770a18a7cd97a4b0838974bdd54f073ebd9c3425a2efb7410"}, - {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0a11365daa4db76b538c0f48a63b1ae1cbc37e178bc81e3af818bf848bd345f7"}, - {file = "Levenshtein-0.22.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0d2c8208bc55e81f6192872c4bdb556fcbbd911a1107417c11ac9283648a356f"}, - {file = "Levenshtein-0.22.0-cp310-cp310-win32.whl", hash = "sha256:e49a4d8b9bbeceaf2618409ce0ba6cd83535b2ce8cf9144d5cb913728f17fffc"}, - {file = "Levenshtein-0.22.0-cp310-cp310-win_amd64.whl", hash = "sha256:0a78fc02072e04b12711a1f01ed703cbffa852e1ff92edf9bf05d43e6044028b"}, - {file = "Levenshtein-0.22.0-cp310-cp310-win_arm64.whl", hash = "sha256:8c9ea26ab65d4c35220801c73d59e181081db63b854de78b5645295c19880786"}, - {file = "Levenshtein-0.22.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:658b4085252d044390bf3e26eb52d0f8c4cc1bff7250711458d83ed3043b2a97"}, - {file = "Levenshtein-0.22.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:500aee88248bbe8bb6e33f60fff7d8fa2e0fa40c36589fe5382f5678770c0f90"}, - {file = "Levenshtein-0.22.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f61afd1b9c741d4c19d37473c045a581fc155f3c8f357f98c7c8caf306f3ad21"}, - {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5951b855d5111d27d6b330b5c31c882df030b86769899ba1c6a9bb819d15acd"}, - {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:14702c06fbe59f78025b3a0c825b91ede14d55b96a049d34796f9b3771456e83"}, - {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:541e9feeb33fdcb8414c9b0f8bc2a6d11af4b746abf14899f8f0cad80b85ca03"}, - {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d40b14d9c95c77407c2ce9063e28f420f502609efbcf48f2ae240137c1b0297a"}, - {file = "Levenshtein-0.22.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18749dfc6778821d8aeecc0b993906a49749a256bc762fa6067493f22a7ddf8e"}, - {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:10216260b155e8ebd19c82c3864a2e5bead2020eb46936bfb69a26efc73053ac"}, - {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1167e7f58588b991a1c058631ad12e7e3882644e3842ebc2ec55fff9615caf8b"}, - {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f5d95b4a8b91e267b3e061e6838bc7beee4394da161e9d8cf5ead5412a3841"}, - {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:842544ce1cfb7a0edcb0b21cf78f2b271a9e1ba911e9b6e2e4fa753eaf67150e"}, - {file = "Levenshtein-0.22.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:610909d494f23f8d24775499796f25ad650315c4abb59260c2ebb82ff9e3323d"}, - {file = "Levenshtein-0.22.0-cp311-cp311-win32.whl", hash = "sha256:203cf2034ad636eaf2b4b2bd44dfe5822abe556b732ccb98394d5d0a26d2b045"}, - {file = "Levenshtein-0.22.0-cp311-cp311-win_amd64.whl", hash = "sha256:282c97890466a02174bd7713395fa69764d415c7816d8624386e74c3a1c358d6"}, - {file = "Levenshtein-0.22.0-cp311-cp311-win_arm64.whl", hash = "sha256:caf45bd4aadca4c08127c903fd02f5564438966c6ce1e6f30595332ff844e360"}, - {file = "Levenshtein-0.22.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:abb60261fa00029681d89b2e72342209f9253d8cab5c040dc4eb8db4eb9c998d"}, - {file = "Levenshtein-0.22.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:1ab939d05d506f9fc80603fa64a5c19a398eb927b1f188f97f60d9382e2a0efe"}, - {file = "Levenshtein-0.22.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:91f1945090b6a22527272f86354f1e352d94535e50855b29982c4a710d39ae2a"}, - {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d662ae4325af7865443674ddd0fe95df15cc962f75b27c93b5fe7286174d537"}, - {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:16143f27641fc212e9745490948a0fbd42b8593058771737f0b020be1bacda33"}, - {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52d459efb8d9ad8a13317df38eb198f1b9ad4b669c8e00f0c3acf46e3468e8aa"}, - {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f29d764a31dba249cb8f44f9dfa3e4c3180f7fc73c457f5ec5a092f00a555b7"}, - {file = "Levenshtein-0.22.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:264b79ab26b03a1b099e40e4050451d4ae17cca514e0f06d8f2fc55a4cdd1ab5"}, - {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d29b09ea4cc45716b47d1ac0c34c0e952bf1de14e0d0578ea8bcde12e00d14eb"}, - {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:91cdfdebcf67210c89811c538bcf08dde237fde4aa35787ecf65621ae948a317"}, - {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:f839a1c2d0ba092ad1f571c817a1554a3a5fffad9ae8bd238307c7df4f95a042"}, - {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:e20dff9e11be7691b34dc49c5b58ed73b435d6720cfd1b37ee906f9696fc5a00"}, - {file = "Levenshtein-0.22.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:38b1e26f04c8fb73882e7409026b249e7c4ffccc97992a013a3102295eabf193"}, - {file = "Levenshtein-0.22.0-cp312-cp312-win32.whl", hash = "sha256:8604794aba363638e00f10b11f3c704524cd5d32624f83601ba05d362f51da2a"}, - {file = "Levenshtein-0.22.0-cp312-cp312-win_amd64.whl", hash = "sha256:008f665f7c885fa9f6537dd35b82dc25cfbd03f3cda48a045d3fa189ce972e26"}, - {file = "Levenshtein-0.22.0-cp312-cp312-win_arm64.whl", hash = "sha256:8958b21fae93c9210e56b9c53520ca3aad2d60f0ff44dc33605c40e562d23411"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dc53b1f9e4f43fa28faad39c2622f285e6b250455cfed2b38daf2337c69be63a"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:271c9c4485beda8d23561cc7b046e1049a5beee09bb16f8dec6b756f572c59ed"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ef72b265c445bf4dd0187b4a49565fcd184183419918330ed4ea7b66031c041d"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:549f9eb3073c03d8626c5a94e2768fb3de92af7ef93a97c5aa639427158660b4"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bbc77c2d59a394075c1023fa1555082602aadd923a8c90b5121f8a543c8870"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f56203d5e3805ea947cad41b70d3f88d07cd21a634653ef87e9c74e813b579ae"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:4b27cfd9396e6c720cb13b3bdd35545afca92816d13649c1e8b99586f5d81754"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:98bcfa889c7a97155fb2675506fb242710596cfcd6bcadf37e58cfe421968f47"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c1c52c58e8ff50c8d58c6fe2753a6f6d6bf56394dd86549879fd6cac032d8f7d"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:eaa3d2174132ff6bfa488c320f9e309018f003b819f942203bd508fe16d2b1a6"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:14fc8ed8ad16f619d0975df70e63bd5a5c7b88aabc4037b8395346ff7bdb250c"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-win32.whl", hash = "sha256:8f4fbc8b7ae61fe596f3a2256ea4122f61d7a58fe77d45a7e85a594e4e03c8c2"}, - {file = "Levenshtein-0.22.0-cp37-cp37m-win_amd64.whl", hash = "sha256:606d5f868e2579d067f153a0c57563f144e45173aa1e21d5c5fbf2ce5a65615e"}, - {file = "Levenshtein-0.22.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e52c339fcca07749469c1d48157eeeda6671594577ad080f0ccdd8583e9d9a8c"}, - {file = "Levenshtein-0.22.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c65695e1650fea3e42db16ae75e8b3dd04109e0b59eed9f1e206f7d5d15d2dc9"}, - {file = "Levenshtein-0.22.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:157d4138dbbdbc3756f6cc166860c447f2268ef07ee80f520583cb940fd37545"}, - {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13832c213c72ed6fc59e2bcb69f6e540a3f812e169ef51c07509d299c4b233a2"}, - {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:370be3bafde8f69051a12305af862e18280fbf72f7b819b3fabbec13e7d0cac9"}, - {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7821a1776bd4aee6f1075b6f48612eae21abc04dc2e91f5858bce85ebc44bd83"}, - {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e80b870a85f487cb151d38532d52d6a4303f05f257915c1a20c06f24809e1c"}, - {file = "Levenshtein-0.22.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e0da18c2ca3b76dfc26ea6cf1c15d2526f3c0e9609d0a790210940ae58a79be"}, - {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:0fcf720cfbcc46dbdc1322a3f95bd2680844b127edb8eba3ab168b5cb41d4443"}, - {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:003635abca2da5c96788588e24aa97a8b7c45e9accdf8e82ae9975daac0b798b"}, - {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:61d861a17f7e69b191a29a3029d9048cf2d6c73ba0d47bd57e1f36f5eb8e79cd"}, - {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:2eb4d6b953586ceb5ffa13873302037cbd678f3cb3eb28e7dfa6bd7d9237366e"}, - {file = "Levenshtein-0.22.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb08576209cc180a48ec47ba8444bbab16d1a98a21e557b2b5420916c0366b9f"}, - {file = "Levenshtein-0.22.0-cp38-cp38-win32.whl", hash = "sha256:bc86bd217386768d323662f8c864e0dee90a573f222675b6834e972330f4ec39"}, - {file = "Levenshtein-0.22.0-cp38-cp38-win_amd64.whl", hash = "sha256:ac3cf596cd7fc16f79c4013c4a4b7fb1ec73caf0169c332d99322039bc3811bf"}, - {file = "Levenshtein-0.22.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:bad0649573449a03f549a56783b37f87f514c478134d6416b1d5160bf1fcfd93"}, - {file = "Levenshtein-0.22.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8db9c2682a0fb0a83c0eb54baef63d78808b62885d3288abfa16cb9f29e062b6"}, - {file = "Levenshtein-0.22.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a3bc743a8ce662355513aad10d81ca2f865b600a4bc63adc4ca9575885042f63"}, - {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3faf5497ac7699b0b34b06dff5277334e410cdab43d4c86e8ca34fc1df92a781"}, - {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd79d847422bdaffe5dc46d018c56b22f5bcdf59bcbc2855ded1517669a241df"}, - {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98a27eb7a655a956b219374d2232e16b908fde5a750f839da36e2b9ecbfa35e0"}, - {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbbbd989c9511461e0742e53d52aaa3e8edff8858f825d29364785d88df080a5"}, - {file = "Levenshtein-0.22.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a50b25a72be543dfe99ce5383c5d6a44235bda1dcef2b1eb69dfbde8f75149c"}, - {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:83869023f190d21c7246f02700b8ff72b443e5ad0baa9decf1bc146451b5d9b0"}, - {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:1d7e491c9bfb86aa6dfedeee88edd2e1a760c6c085490a6fa28c99328a95d0ff"}, - {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:b977a803fd69e37ea4a3253e450aac84aadd092e245036bf935e9ce69eb9bd09"}, - {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7b5e2bdae788d237f0e11ba7c06762f1fdb181a8d200961bcc2cd631c63c158a"}, - {file = "Levenshtein-0.22.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:306b9ea4acce215415ee8d11b9a1fd1f5020ea7a8fbf995e4f9789fc8ec947d8"}, - {file = "Levenshtein-0.22.0-cp39-cp39-win32.whl", hash = "sha256:4c1568e82a4d14ea0a9db11cd58618a55164e721f4f8c445b5ab70d2b304f1f5"}, - {file = "Levenshtein-0.22.0-cp39-cp39-win_amd64.whl", hash = "sha256:981b27f8ad1dab9871807fc4a50e4db52c9b3c39706aadc053415152e70c15ff"}, - {file = "Levenshtein-0.22.0-cp39-cp39-win_arm64.whl", hash = "sha256:f71840265862ad507873593749219c2e130830784ccbb71253e093e9d29ac63d"}, - {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:be6cc97ad71185e714d52997cf85bc8432fabc60b46ed8e6b30717ca5f9dacc8"}, - {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48c056cdfb269ffc3f4383471a1a35217120fb15995785bf277bf16561626f59"}, - {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:389f1e4dee92f2d91297dfa4595a409bd688a3009bcc93523ab66d78cc7548b2"}, - {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:26230f8ff50e72e82f3100d2f1153b3890fda9670bf8969755df7713484093ac"}, - {file = "Levenshtein-0.22.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:01b36cae9c5ddae8f178814e603a388968bc23059343b1b61fc396d72a51321f"}, - {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7bd018087add386d766b6926635168b1f83f440b8ce1bba8c497fac3a1995328"}, - {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5368c332632c2534060b8b63c9076a15370e4c35fbc2f22f45162713277aa239"}, - {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54670a6b626c5c2b96c5e9faaa8599c6e9a933a701441cfd82c01d1785b4dca5"}, - {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb2ac06a597e29a37d2ee9a2a91467b4790ff47cf67d724883fe2342d74e3100"}, - {file = "Levenshtein-0.22.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:49cea609250ec61e2b320afe9288c8a9ee91aa3978e249362af53ed9066f944e"}, - {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:692f28b632c3726ea55878f736b996457a1d2887b42a33474ee4c219b505798b"}, - {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7934381e902258b4a5f8e5cb56d45bd5da051763b7c8fb3acdaba1fdb91a197a"}, - {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2db7bab8d9865c51be9bf5006bc712cd30b31f2fcf09009470099ef07f21485"}, - {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a9015d600e4e0ad2339bc44c905019957f45228acfc8c441922d9550b106969"}, - {file = "Levenshtein-0.22.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:99c69647d56c90a3ea0d2c4bb252eb77d4724e0774f5102f098e7794125fc0cf"}, - {file = "Levenshtein-0.22.0.tar.gz", hash = "sha256:86d285d770551cb648d4fcfe5243449a479e694e56b65272dc6cbda879012051"}, + {file = "Levenshtein-0.23.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2d3f2b8e67915268c49f0faa29a29a8c26811a4b46bd96dd043bc8557428065d"}, + {file = "Levenshtein-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10b980dcc865f8fe04723e448fac4e9a32cbd21fb41ab548725a2d30d9a22429"}, + {file = "Levenshtein-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f8c8c48217b2733ae5bd8ef14e0ad730a30d113c84dc2cfc441435ef900732b"}, + {file = "Levenshtein-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:854a0962d6f5852b891b6b5789467d1e72b69722df1bc0dd85cbf70efeddc83f"}, + {file = "Levenshtein-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5abc4ee22340625ec401d6f11136afa387d377b7aa5dad475618ffce1f0d2e2f"}, + {file = "Levenshtein-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:20f79946481052bbbee5284c755aa0a5feb10a344d530e014a50cb9544745dd3"}, + {file = "Levenshtein-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a6084fc909a218843bb55723fde64a8a58bac7e9086854c37134269b3f946aeb"}, + {file = "Levenshtein-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0acaae1c20c8ed37915b0cde14b5c77d5a3ba08e05f9ce4f55e16843de9c7bb8"}, + {file = "Levenshtein-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:54a51036b02222912a029a6efa2ce1ee2be49c88e0bb32995e0999feba183913"}, + {file = "Levenshtein-0.23.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:68ec2ef442621027f290cb5cef80962889d86fff3e405e5d21c7f9634d096bbf"}, + {file = "Levenshtein-0.23.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:d8ba18720bafa4a65f07baba8c3228e98a6f8da7455de4ec58ae06de4ecdaea0"}, + {file = "Levenshtein-0.23.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:af1b70cac87c5627cd2227823318fa39c64fbfed686c8c3c2f713f72bc25813b"}, + {file = "Levenshtein-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe2810c42cc5bca15eeb4a2eb192b1f74ceef6005876b1a166ecbde1defbd22d"}, + {file = "Levenshtein-0.23.0-cp310-cp310-win32.whl", hash = "sha256:89a0829637221ff0fd6ce63dfbe59e22b25eeba914d50e191519b9d9b8ccf3e9"}, + {file = "Levenshtein-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:b8bc81d59205558326ac75c97e236fd72b8bcdf63fcdbfb7387bd63da242b209"}, + {file = "Levenshtein-0.23.0-cp310-cp310-win_arm64.whl", hash = "sha256:151046d1c70bdf01ede01f46467c11151ceb9c86fefaf400978b990110d0a55e"}, + {file = "Levenshtein-0.23.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7e992de09832ee11b35910c05c1581e8a9ab8ea9737c2f582c7eb540e2cdde69"}, + {file = "Levenshtein-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a5e3461d29b3188518464bd3121fc64635ff884ae544147b5d326ce13c50d36"}, + {file = "Levenshtein-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1772c4491f6ef6504e591c0dd60e1e418b2015074c3d56ee93af6b1a019906ee"}, + {file = "Levenshtein-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e125c92cd0ac3b53c4c80fcf2890d89a1d19ff4979dc804031773bc90223859f"}, + {file = "Levenshtein-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0d2f608c5ce7b9a0a0af3c910f43ea7eb060296655aa127b10e4af7be5559303"}, + {file = "Levenshtein-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe5c3b7d96a838d9d86bb4ec57495749965e598a3ea2c5b877a61aa09478bab7"}, + {file = "Levenshtein-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:249eaa351b5355b3e3ca7e3a8e2a0bca7bff4491c89a0b0fa3b9d0614cf3efeb"}, + {file = "Levenshtein-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0033a243510e829ead1ae62720389c9f17d422a98c0525da593d239a9ff434e5"}, + {file = "Levenshtein-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f956ad16cab9267c0e7d382a37b4baca6bf3bf1637a76fa95fdbf9dd3ea774d7"}, + {file = "Levenshtein-0.23.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:3789e4aeaeb830d944e1f502f9aa9024e9cd36b68d6eba6892df7972b884abd7"}, + {file = "Levenshtein-0.23.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:f91335f056b9a548070cb87b3e6cf017a18b27d34a83f222bdf46a5360615f11"}, + {file = "Levenshtein-0.23.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3497eda857e70863a090673a82442877914c57b5f04673c782642e69caf25c0c"}, + {file = "Levenshtein-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5e17ea59115179c269c6daea52415faaf54c6340d4ad91d9012750845a445a13"}, + {file = "Levenshtein-0.23.0-cp311-cp311-win32.whl", hash = "sha256:da2063cee1fbecc09e1692e7c4de7624fd4c47a54ee7588b7ea20540f8f8d779"}, + {file = "Levenshtein-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:4d3b9c9e2852eca20de6bd8ca7f47d817a056993fd4927a4d50728b62315376b"}, + {file = "Levenshtein-0.23.0-cp311-cp311-win_arm64.whl", hash = "sha256:ef2e3e93ae612ac87c3a28f08e8544b707d67e99f9624e420762a7c275bb13c5"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:85220b27a47df4a5106ef13d43b6181d73da77d3f78646ec7251a0c5eb08ac40"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6bb77b3ade7f256ca5882450aaf129be79b11e074505b56c5997af5058a8f834"}, + {file = "Levenshtein-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b487f08c32530ee608e8aab0c4075048262a7f5a6e113bac495b05154ae427"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f91d0a5d3696e373cae08c80ec99a4ff041e562e55648ebe582725cba555190"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fddda71ae372cd835ffd64990f0d0b160409e881bf8722b6c5dc15dc4239d7db"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7664bcf9a12e62c672a926c4579f74689507beaa24378ad7664f0603b0dafd20"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d6d07539502610ee8d6437a77840feedefa47044ab0f35cd3bc37adfc63753bd"}, + {file = "Levenshtein-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:830a74b6a045a13e1b1d28af62af9878aeae8e7386f14888c84084d577b92771"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:f29cbd0c172a8fc1d51eaacd163bdc11596aded5a90db617e6b778c2258c7006"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:df0704fd6a30a7c27c03655ae6dc77345c1655634fe59654e74bb06a3c7c1357"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:0ab52358f54ee48ad7656a773a0c72ef89bb9ba5acc6b380cfffd619fb223a23"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:f0a86394c9440e23a29f48f2bbc460de7b19950f46ec2bea3be8c2090839bb29"}, + {file = "Levenshtein-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a689e6e0514f48a434e7ee44cc1eb29c34b21c51c57accb304eac97fba87bf48"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win32.whl", hash = "sha256:2d3229c1336498c2b72842dd4c850dff1040588a5468abe5104444a372c1a573"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:5b9b6a8509415bc214d33f5828d7c700c80292ea25f9d9e8cba95ad5a74b3cdf"}, + {file = "Levenshtein-0.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:5a61606bad3afb9fcec0a2a21871319c3f7da933658d2e0e6e55ab4a34814f48"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:078bb87ea32a28825900f5d29ba2946dc9cf73094dfed4ba5d70f042f2435609"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26b468455f29fb255b62c22522026985cb3181a02e570c8b37659fedb1bc0170"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3dc62b2f74e4050f0a1261a34e11fd9e7c6d80a45679c0e02ac452b16fda7b34"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8b65b0b4e8b88e8326cdbfd3ec119953a0b10b514947f4bd03a4ed0fc58f6471"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bccaf7f16b9da5edb608705edc3c38401e83ea0ff04c6375f25c6fc15e88f9b3"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b35f752d04c0828fb1877d9bee5d1786b2574ec3b1cba0533008aa1ff203712"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:2c32f86bb54b9744c95c27b5398f108158cc6a87c5dbb3ad5a344634bf9b07d3"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fa8b65f483cdd3114d41736e0e9c3841e7ee6ac5861bae3d26e21e19faa229ff"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:9fdf67c10a5403b1668d1b6ade7744d20790367b10866d27394e64716992c3e4"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:eb6dfba3264b38a3e95cac8e64f318ad4c27e2232f6c566a69b3b113115c06ef"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8541f1b7516290f6ccc3faac9aea681183c5d0b1f8078b957ae41dfbd5b93b58"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-win32.whl", hash = "sha256:f35b138bb698b29467627318af9258ec677e021e0816ae0da9b84f9164ed7518"}, + {file = "Levenshtein-0.23.0-cp37-cp37m-win_amd64.whl", hash = "sha256:936320113eadd3d71d9ce371d9027b1c56299001b48ed197a0db4140e1d13bbd"}, + {file = "Levenshtein-0.23.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:da64e19e1ec0c1e8a1cd77c4802a0d656f8a6e0ab7a1479d435a9d2575e473f8"}, + {file = "Levenshtein-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e729781b6134a6e3b380a2d8eae0843a230fc3716bdc8bba4cde2b0ce260982b"}, + {file = "Levenshtein-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:97d0841a2682a3c302f70537e8316077e56795062c6f629714f5d0771f7a5838"}, + {file = "Levenshtein-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:727a679d19b18a0b4532abf87f9788070bcd94b78ff07135abe41c716bccbb7d"}, + {file = "Levenshtein-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48c8388a321e55c1feeef543b49fc969be6a5cf6bcf4dcb5dced82f5fea6793c"}, + {file = "Levenshtein-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:58f8b8f5d4348e470e8c0d4e9f7c23a8f7cfc3cbd8024cc5a1fc68cc81f7d6cb"}, + {file = "Levenshtein-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:549170257f052289df93a13526877cb397d351b0c8a3e4c9ae3936aeafd8ad17"}, + {file = "Levenshtein-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5d32f3b28065e430d54781e1f3b31198b6bfc21e6d565f0c06218e7618884551"}, + {file = "Levenshtein-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ecc8c12e710212c4d959fda3a52377ae6a30fa204822f2e63fd430e018be3d6f"}, + {file = "Levenshtein-0.23.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:88b47fbabbd9cee8be5d6c26ac4d599dd66146628b9ca23d9f4f209c4e3e143e"}, + {file = "Levenshtein-0.23.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:5106bce4e94bc1ae137b50d1e5f49b726997be879baf66eafc6ee365adec3db5"}, + {file = "Levenshtein-0.23.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:d36634491e06234672492715bc6ff7be61aeaf44822cb366dbbe9d924f2614cc"}, + {file = "Levenshtein-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a591c94f7047d105c29630e7606a2b007f96cf98651fb93e9f820272b0361e02"}, + {file = "Levenshtein-0.23.0-cp38-cp38-win32.whl", hash = "sha256:9fce199af18d459c8f19747501d1e852d86550162e7ccdc2c193b44e55d9bbfb"}, + {file = "Levenshtein-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:b4303024ffea56fd164a68f80f23df9e9158620593b7515c73c885285ec6a558"}, + {file = "Levenshtein-0.23.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:73aed4856e672ab12769472cf7aece04b4a6813eb917390d22e58002576136e0"}, + {file = "Levenshtein-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4e93dbfdf08360b4261a2385340d26ac491a1bf9bd17bf22a59636705d2d6479"}, + {file = "Levenshtein-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b847f716fc314cf83d128fedc2c16ffdff5431a439db412465c4b0ac1762478e"}, + {file = "Levenshtein-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0d567beb47cd403394bf241df8cfc14499279d0f3a6675f89b667249841aab1"}, + {file = "Levenshtein-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e13857d870048ff58ce95c8eb32e10285918ee74e1c9bf1825af08dd49b0bc6"}, + {file = "Levenshtein-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c4250f507bb1b7501f7187af8345e200cbc1a58ceb3730bf4e3fdc371fe732c0"}, + {file = "Levenshtein-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fb90de8a279ce83797bcafbbfe6d641362c3c96148c17d8c8612dddb02744c5"}, + {file = "Levenshtein-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:039dc7323fd28de44d6c13a334a34ab1ddee598762cb2dae3223ca1f083577f9"}, + {file = "Levenshtein-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5739f513cb02039f970054eabeccc62696ed2a1afff6e17f75d5492a3ed8d74"}, + {file = "Levenshtein-0.23.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2a3801a0463791440b4350b734e4ec0dbc140b675a3ce9ef936feed06b23c58d"}, + {file = "Levenshtein-0.23.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:606ba30bbdf06fc51b0a763760e113dea9085011a2399cf4b1f72316836e4d03"}, + {file = "Levenshtein-0.23.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:14c5f90859e512004cc25b50b79c7ae6f068ebe69a7213a9018c83bd88c1305b"}, + {file = "Levenshtein-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c8a75233798e334fd53305656ffcf0601f60e9ff461af759677006c07c060939"}, + {file = "Levenshtein-0.23.0-cp39-cp39-win32.whl", hash = "sha256:9a271d50643cf927bfc002d397b4f715abdbc6ca46a5a93d1d66a033eabaa5f3"}, + {file = "Levenshtein-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:684118d9e070e00df91bc4bd276e0559df7bb2319659699dafda16b5a0229553"}, + {file = "Levenshtein-0.23.0-cp39-cp39-win_arm64.whl", hash = "sha256:98412a7bdc49c7fbb493be3c3e7fd2f874eff29ed636b8c0eca325a1e3e74264"}, + {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:760c964ff0be8dea5f7eda20314cf66238fdd0fec63f1ce9c474736bb2904924"}, + {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:de42400ea86e3e8be3dc7f9b3b9ed51da7fd06dc2f3a426d7effd7fbf35de848"}, + {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b2080ee52aeac03854a0c6e73d4214d5be2120bdd5f16def4394f9fbc5666e04"}, + {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb00ecae116e62801613788d8dc3938df26f582efce5a3d3320e9692575e7c4d"}, + {file = "Levenshtein-0.23.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f351694f65d4df48ee2578d977d37a0560bd3e8535e85dfe59df6abeed12bd6e"}, + {file = "Levenshtein-0.23.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:34859c5ff7261f25daea810b5439ad80624cbb9021381df2c390c20eb75b79c6"}, + {file = "Levenshtein-0.23.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ece1d077d9006cff329bb95eb9704f407933ff4484e5d008a384d268b993439"}, + {file = "Levenshtein-0.23.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35ce82403730dd2a3b397abb2535786af06835fcf3dc40dc8ea67ed589bbd010"}, + {file = "Levenshtein-0.23.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a88aa3b5f49aeca08080b6c3fa7e1095d939eafb13f42dbe8f1b27ff405fd43"}, + {file = "Levenshtein-0.23.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:748fbba6d9c04fc39b956b44ccde8eb14f34e21ab68a0f9965aae3fa5c8fdb5e"}, + {file = "Levenshtein-0.23.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:60440d583986e344119a15cea9e12099f3a07bdddc1c98ec2dda69e96429fb25"}, + {file = "Levenshtein-0.23.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b048a83b07fc869648460f2af1255e265326d75965157a165dde2d9ba64fa73"}, + {file = "Levenshtein-0.23.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4be0e5e742f6a299acf7aa8d2e5cfca946bcff224383fd451d894e79499f0a46"}, + {file = "Levenshtein-0.23.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7a626637c1d967e3e504ced353f89c2a9f6c8b4b4dbf348fdd3e1daa947a23c"}, + {file = "Levenshtein-0.23.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:88d8a13cf310cfc893e3734f8e7e42ef20c52780506e9bdb96e76a8b75e3ba20"}, + {file = "Levenshtein-0.23.0.tar.gz", hash = "sha256:de7ccc31a471ea5bfafabe804c12a63e18b4511afc1014f23c3cc7be8c70d3bd"}, ] [package.dependencies] -rapidfuzz = ">=2.3.0,<4.0.0" +rapidfuzz = ">=3.1.0,<4.0.0" [[package]] name = "lxml" @@ -3560,118 +3449,121 @@ files = [ [[package]] name = "rapidfuzz" -version = "2.10.2" +version = "3.4.0" description = "rapid fuzzy string matching" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "rapidfuzz-2.10.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b867f0b75751e898ed4dc411c379bdf9bac0bc45d913eaa08482793e7a2ec2a9"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3451111affbdf5ee06dee92bebca71ba5b642f38ae843860eb9c38233c0b71e8"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:553300dbdbcc29262326c910337988b7ca89dc880e1ab804ca826218a15a6a6c"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd45c3d943ed0fd90ec5569bc0297e5f44fbafed0791dfdfdfc342d779a95671"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:71ec4151b8e46d3e51a92aa3a65ebda8a58ab6ad28493fe701da3b1137276e72"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f601754b6fefe335162d77b2fd112c6a60efb378fa0d64c74ff716112c3b748"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0284e5c4122e009f1217c008d98f201ca59f6ea31cbbdbe53345d25f60101ab8"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ada91d41f9064a79e21536871ec648db3c80ebc91e889f8f5e5aa3b69e4ebe40"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:362b2b0c19c22b4e70c92a3c05828350d1bd3a011bf1c62375e2a31890dcb1bc"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:35494d7b8dcd77704c28468b4b4f895d78f76ae28e1de3bfe5bf8957c6e8bd92"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:decb14fb5d52b454c702376a80faa7b36c0575356090669121a692789f55db1e"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:882bb4030400ae2ea0357273360d3c20150781b950e9b1df15dce3d776bbd100"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75a699e8f9b04980139617cd4be4c58ff06f49947e3e3237648b05b2727e40ce"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-win32.whl", hash = "sha256:d48411624c7a976ba8a657f04a7722fc3e0782398f57b9919221ecac279811ee"}, - {file = "rapidfuzz-2.10.2-cp310-cp310-win_amd64.whl", hash = "sha256:cbe7f2eacbbe49dd8853b8d3c1ec823deb4fa575aea8136d202dee8a411e2026"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ceba10342b141273dca5d9ad005e4a0ed8397bb76fb6136f9c6e3a2bc19fcc5"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:acc5ced3f1b2268d47ada404e7db89cef6a084ab3f11efdcc3ed19ca99b281fb"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfc5b924f886b095eb09f7c3d8ae54caefb12d36f07664a38f563f4f6534b0ef"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f2277377e4ece4e6d5ce3706496c91c0198d601639559936139a75476d88f6"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:254da78cc451bade8a13c339ce8c0497b8d11a026bcf115648ca1bc0fb93452c"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d2845c0ca8562cf3f0442ea8ea47ff809ca3070a88375547c2661975b987df4"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2db80fa00e3c5e3bb7b3964c97d3cfaf28e31086205a8ebd802e3cb5b0aa5a82"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03e383fb0f7528cbdbdec457821c17608503f44d181d3be097123089a76672dd"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:69e629f2af5adecbc5c6ed1a706ce7c73c00ff9b8ed1e1c16f4ce62f34d5daef"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d3162b6f62d216689cd099e17ef0dd40914d048ae34d2cb8d94a2038b3327759"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ecdcd84e8b59c8fcc8b34088f5b3d39ed72db7c7cb63bead93d67bd3e71af96b"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:3be968fa4df10c36c3bfb837e74abba1ab1bf6e495f838127bc935d0a156c05a"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7bb2f0cdcdfa4753a020ade11550abab270ddfa6b57c71cff69b18c658054679"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-win32.whl", hash = "sha256:d1444145017adbcdad6607dba6644e2006e1d76d2ebbff0b3128a6cadce52350"}, - {file = "rapidfuzz-2.10.2-cp311-cp311-win_amd64.whl", hash = "sha256:d7e5f8d4f449a75e374e0a99c8e5bc5ce12074050326f5c92087386eb343bd2e"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6a74967e9ff9d19318df3416e830ac0168803b01818f0529a4da8b78ab19560c"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8da4f23998eb265bf4a6773a9efcae1e3f5e994c2478406e8fdb353c87ad3ec"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3e50b213ad85ec51fe8a0752d4d66808a44de44548b9f24028b449f68d3ff73"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8df0f8dfa9b0a9b3eeffadff8c468219a2e6f1794dd8a722260825f78c233ed"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e322db1cfdc1efe269ad6e85e02bbaaea52bb3b52433420b302d39701cc222f7"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3e7d465d8bdfe5b4c80d7f5d4a30f90fc51e1de06d57ff05f4594a6dab819e4e"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:48be65a68e54ac987a7d1ce4e3c1ffa1ac1dcf8b6d18e3390876294334f5ea01"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:43dd54c4f6eef741387700bc7e3a2ded3873c81704c66aa458e6147b6005ccf1"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:218aaaa85ec3a6b0fd43eb45b29958f9fc62dc912f200da08fae74a69c7e04ca"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:3ad0d8b49fa341106fce6a8edaea288eba0de8b952d00a3ec6dfe67b10a4a993"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ee73d74a1dda83a3c4f045dbf5a5d7169d07c982359ac98428570be4fd807e96"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-win32.whl", hash = "sha256:1197d0fbcbf7f536159b3a0b7cb3fce88e253fb381fc974223daa9d2eb7ecebe"}, - {file = "rapidfuzz-2.10.2-cp36-cp36m-win_amd64.whl", hash = "sha256:89cb358e29bb9361fe2e714195937e5e4a843a2f3b2ebeab804c1ec2f88aab0d"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2c7a7d4aad3281d60922fc085122b7530e734183de6d5079a76c012e70c11311"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a043c60afa1f04bafefad4f41a1e7cf9e87246e0252ddb651330397412a924b"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b599fc8b83080f359afbd08d440f48fcad8f3b569e3c2de988ec45f9bd97b64"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:67ada4dcbb0cdd636505797f1fdf4047f88dac55ae1868d7424480977cf76321"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11e4a3fc374dfd76526eedc66b26431bc1f0810a2c2035bc8d7a6ef0aebe5bbe"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bc9913a456069c76970ca31780b0aa0bec355f57a20013214b8a2528ff7cb07"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b04a7815031531b9b6657c8aa160cdeb4f97d47eaf7e673e4b7c81effb200d64"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:582381a0251a7b5df8701b07a77198019b507ab5195b2fad3e7da65b4026a954"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:878540c4dc26a20b84932d9bfb81a54f1218b5f21fb44cdf075edf15915ce3d6"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:e9cd17d3be0efc4de37c71513e6d2dcdcdfd618c26a62d04f49f9fbd0fc3f70c"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87f6f157cfece06ca394d8bb49388e042a2abed926cf2123e7f5863ca5ee45a1"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-win32.whl", hash = "sha256:a99664b37d3fd8b82cd80e62ea416921326a78acec12b2d8a3bed0105a2b94e4"}, - {file = "rapidfuzz-2.10.2-cp37-cp37m-win_amd64.whl", hash = "sha256:68e8e86b8a9a5b200a53a9a574240cb6edf5a393c7f82e94127e589021da95f4"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b64c6c5e55519effe85d5231c04b2e5a6a54bcca9cbc0b1f3b930a8f73615d8e"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ea52eaef7d7b218206bcc9f35053981451369b9ccd10a0b270d27c88593571a5"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:42e953165c2e56f5601b47685ff20ec69ca2e90d7072406358d1a7031b1d3e4e"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2693a81790fcddd203495ff49c7bb2e0d6c237a2de251caed2e863acf26c5d62"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c180ba789b842943223b0e4d62546c8f95427316384d1839b70ef7a308961d3"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f81120f7a502ec3623ed0ba63be1d03e0cdb7f931310d48389756bd7e5d38ce7"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de162a764b7958fcef59f5a17dc3b4a531c17b978735f83d0d4cb56c9a6734a1"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c04633c70e3bb9342c009dbd644f5888912a894042fa651e3bfbf53fa2a523b"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b57b05cf61c0112fb1b67d7a8d7470cee97a9c87745be2938e34f739374ae95"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3b2056568b63aeadf57e628d38c9d7db72d01fd22b4524559a719382e18259bf"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:537876a7d9895d21c22b76041cea791a299aa7a009335b9c42cf3ea4b0332ebc"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8b77b687f594ba7d82866adb24a40a92fd21eb330fb694ba580b2bff6161b049"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:a3f1d9e4570115fbec9544f5c703ddf29daa92c2303f7113307d229726df7feb"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-win32.whl", hash = "sha256:23b421b58c0982a15851ae142be2746fac606654359f3826c632fd504a51b0f5"}, - {file = "rapidfuzz-2.10.2-cp38-cp38-win_amd64.whl", hash = "sha256:269c992257e4bf59dacab15b7ab5968cfafa6cde96b5d89776b03771a404c9e9"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:17343390c6623232ed8bf63659a89c3b4195edf6b90a1eaf7a46995f2ce8f1e3"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a582e59e90a025e4e38013b17a434fa37b5eae8992925bd1c64c901827a7d124"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9b4ffa2bca2338c5fbdb241b50e761dd7bfcfaa3377168e32111b95292aad3b3"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:066c75b50cb1b3ce5edf35ed1bd7b650ad0378933246e45793d6c8f2b718c637"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d985e0149f2b2cdf4debc22d3ea742916ad2ed3e218fd45b4cd2061c95bf99dd"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7cc19629576d390ff656df13fe93f7443bbdc819455aad2177946cbe84140cf"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e49b8b2e76b602e6bb001709505381b95dc233372676d90a675ab94d1f086013"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c395984b607835eac0c97d68aa03778dc075bf5cb02d6dc12495368514794a9a"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:39007c02478452cba66f6117876d0823487a16fed0ad6d25136f5ad9d2bacafd"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:2d783c753763437c4cc7c36b433ca458bc5aae69da76f3c0e8890b8cb6ac79d3"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:86c0703064fa7ba127a881985de8dc8462b461dbe9aff365782fed51c11ae20a"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:c3e4e9384bbd1e8e2dd62149bc547720f26483df5fbaf272f995200ca4603975"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ef5857b30e4f49ac165ba7c57ed377451a2dadadb9fc772e10c574c435c8e634"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-win32.whl", hash = "sha256:3506222bff145beebe4e0062ac52f3c51259a4d05f37a8abb9012cc752b034cc"}, - {file = "rapidfuzz-2.10.2-cp39-cp39-win_amd64.whl", hash = "sha256:b63746d773ed7374b46b3a60fd7b4d51936c21ded6749013cb9fc212bce3bdfc"}, - {file = "rapidfuzz-2.10.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f7777f8f58dcbe228378be285b96da5dff78ac47a243fd3a51ae8cbfd343e469"}, - {file = "rapidfuzz-2.10.2-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57a3c4e2f6dd92a099b563e079e528b039eadd5086389596f5b3db92d88110cb"}, - {file = "rapidfuzz-2.10.2-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5c7a16bd803d12fdcf508379f8271986ca321e44db958674dd0f2b9cdba0136a"}, - {file = "rapidfuzz-2.10.2-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e31b61c03daa6b9d2a8cf840dc64bda930b7bc8c636eec69bb5f825f2bcbf11f"}, - {file = "rapidfuzz-2.10.2-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:9b0d572d817782c794c392e5a3d1e0741372e5f17d6476f43c09176b02542a15"}, - {file = "rapidfuzz-2.10.2-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3314db75f3634e4b557c4775d460e834531610050b5ddb9750f7235b8ff49875"}, - {file = "rapidfuzz-2.10.2-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da90762b6ef7b182b86cc2a6d225b38aa3d882fc16ebd1a4e7a3ac24828bd41"}, - {file = "rapidfuzz-2.10.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d048dfc22133036d5156cec0532b4dcc859555451f6df4e9f081872aab4bf96e"}, - {file = "rapidfuzz-2.10.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c3c2055f835c58ac73e621e649ed43a0b4047c3bed6973c65ee704e4fb4880d"}, - {file = "rapidfuzz-2.10.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:c8ff3803b3daa4351ecbb9ee5e9eaa1d2f2f00e739a50c338de075bb04aff8c5"}, - {file = "rapidfuzz-2.10.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3e534fd1b7424165c40380216117e689a74a9c25448be51914338b7088c26048"}, - {file = "rapidfuzz-2.10.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:866a5d95ed945345a37d560ec1bc6ecca670cf6349dda9f4775d3f8d41088c9c"}, - {file = "rapidfuzz-2.10.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:454e0c6b50bcc65e9f549b149cd6e4cb906af7f327992900ed98e6734cf4e370"}, - {file = "rapidfuzz-2.10.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a4e0ab3655a8aa696b041955356a38105843726858b18668ce23eff87715a64"}, - {file = "rapidfuzz-2.10.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:669808bcac656759c011861cf2e5cea61895c7fad57a23f7e020023b7e4ceed6"}, - {file = "rapidfuzz-2.10.2.tar.gz", hash = "sha256:26a80cfe249a3100f94737c0207626b0b6f3ac4e77077248599f6dbe7860a9d0"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1438e68fe8869fe6819a313140e98641b34bfc89234b82486d8fd02044a067e8"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59f851c7a54a9652b9598553547e0940244bfce7c9b672bac728efa0b9028d03"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6286510910fcd649471a7f5b77fcc971e673729e7c84216dbf321bead580d5a1"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87409e12f9a82aa33a5b845c49dd8d5d4264f2f171f0a69ddc638e100fcc50de"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1d81d380ceabc8297880525c9d8b9e93fead38d3d2254e558c36c18aaf2553f"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a716efcfc92659d8695291f07da4fa60f42a131dc4ceab583931452dd5662e92"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:83387fb81c4c0234b199110655779762dd5982cdf9de4f7c321110713193133e"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55efb3231bb954f3597313ebdf104289b8d139d5429ad517051855f84e12b94e"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51d47d52c890cbdb2d8b2085d747e557f15efd9c990cb6ae624c8f6948c4aa3a"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3db79070888d0dcd4f6a20fd30b8184dd975d6b0f7818acff5d7e07eba19b71f"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:46efc5e4675e2bd5118427513f86eaf3689e1482ebd309ad4532bcefae78179d"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:d15c364c5aa8f032dadf5b82fa02b7a4bd9688a961a27961cd5b985203f58037"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f1e91460baa42f5408f3c062913456a24b2fc1a181959b58a9c06b5eef700ca6"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c7f4f6dac25c120de8845a65a97090658c8a976827ac22b6b86e2a16a60bb820"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:124578029d926b2be32d60b748be95ee0de6cb2753eb49d6d1d6146269b428b9"}, + {file = "rapidfuzz-3.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:3af0384132e79fe6f6370d49347649382e04f689277525903bef84d30f3992fd"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:66ff93b81b382269dc7c2d46c839ce72e2d2331ad46a06321770bc94016fe236"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:da2764604a31fd1e3f1cacf226b43a871cc9f28844a3196c2a6b1ba52ae12922"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8eb33895353bfcc33ccf4b4bae837c0afb4eaf20a0361aa6f0800cef12505e91"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ed3da08830c08c8bcd49414cc06b704a760d3067804775facc0df725b52085a4"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b38c7021f6114cfacba5717192fb3e1e50053261d49a774e645021a2f77e20a3"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5ea97886d2ec7b2b9a8172812a76e1d243f2ce705c2f24baf46f9ef5d3951"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b9a7ab061c1b75b274fc2ebd1d29cfa2e510c36e2f4cd9518a6d56d589003c8"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:23b07685c21c93cdf6d68b49eccacfe975651b8d99ea8a02687400c60315e5bc"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c2a564f748497b6a5e08a1dc0ac06655f65377cf072c4f0e2c73818acc655d36"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ef30b5f2720f0acbcfba0e0661a4cc118621c47cf69b5fe92531dfed1e369e1c"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:ab981f9091ae8bd32bca9289fa1019b4ec656543489e7e13e64882d57d989282"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a80f9aa4245a49e0677896d1b51b2b3bc36472aff7cec31c4a96f789135f03fe"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0d8c6cb80b5d2edf88bf6a88ac6827a353c974405c2d7e3025ed9527a5dbe1a6"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win32.whl", hash = "sha256:c0150d521199277b5ad8bd3b060a5f3c1dbdf11df0533b4d79f458ef11d07e8c"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:bd50bc90167601963e2a90b820fb862d239ecb096a991bf3ce33ffaa1d6eedee"}, + {file = "rapidfuzz-3.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:bd10d68baabb63a3bb36b683f98fc481fcc62230e493e4b31e316bd5b299ef68"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7f497f850d46c5e08f3340343842a28ede5d3997e5d1cadbd265793cf47417e5"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7d6a9f04ea1277add8943d4e144e59215009f54f2668124ff26dee18a875343"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b6fe2aff0d9b35191701714e05afe08f79eaea376a3a6ca802b72d9e5b48b545"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b81b8bc29114ca861fed23da548a837832b85495b0c1b2600e6060e3cf4d50aa"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:805dc2aa3ac295dcbf2df8c1e420e8a73b1f632d6820a5a1c8506d22c11e0f27"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1276c7f50cd90a48b00084feb25256135c9ace6c599295dd5932949ec30c0e70"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0b9197656a6d71483959bf7d216e7fb7a6b80ca507433bcb3015fb92abc266f8"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3456f4df5b8800315fd161045c996479016c112228e4da370d09ed80c24853e5"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:734046d557550589edb83d5ad1468a1341d1092f1c64f26fd0b1fc50f9efdce1"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:37d5f0fbad6c092c89840eea2c4c845564d40849785de74c5e6ff48b47b0ecf6"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:bfe14711b9a7b744e242a482c6cabb696517a1a9946fc1e88d353cd3eb384788"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:1a733c10b1fcc47f837c23ab4a255cc4021a88939ff81baa64d6738231cba33d"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:929e6b71e5b36caee2ee11c209e75a0fcbd716a1b76ae6162b89ee9b591b63b1"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win32.whl", hash = "sha256:c56073ba1d1b25585359ad9769163cb2f3183e7a03c03b914a0667fcbd95dc5c"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:bf58ba21df06fc8aeef3056fd137eca0a593c2f5c82923a4524d251dc5f3df5d"}, + {file = "rapidfuzz-3.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:f3effbe9c677658b3149da0d2778a740a6b7d8190c1407fd0c0770a4e223cfe0"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ed0d5761b44d9dd87278d5c32903bb55632346e4d84ea67ba2e4a84afc3b7d45"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bafbd3e2e9e0b5f740f66155cc7e1e23eee1e1f2c44eff12daf14f90af0e8ab"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2543fd8d0fb3b1ac065bf94ee54c0ea33343c62481d8e54b6117a88c92c9b721"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93ceb62ade1a0e62696487274002157a58bb751fc82cd25016fc5523ba558ca5"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:76f4162ce5fe08609455d318936ed4aa709f40784be61fb4e200a378137b0230"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f723197f2dbce508a7030dcf6d3fc940117aa54fc876021bf6f6feeaf3825ba1"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cfdc74afd93ac71270b5be5c25cb864b733b9ae32b07495705a6ac294ac4c390"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:273c7c7f5b405f2f54d41e805883572d57e1f0a56861f93ca5a6733672088acb"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:712dd91d429afaddbf7e86662155f2ad9bc8135fca5803a01035a3c1d76c5977"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:9814905414696080d8448d6e6df788a0148954ab34d7cd8d75bcb85ba30e0b25"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:01013ee67fb15608c8c5961af3bc2b1f242cff94c19f53237c9b3f0edb8e0a2d"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:8f5d2adc48c181486125d42230e80479a1e0568942e883d1ebdeb76cd3f83470"}, + {file = "rapidfuzz-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:c92d847c997c384670e3b4cf6727cb73a4d7a7ba6457310e2083cf06d56013c4"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d0bda173b0ec1fa546f123088c0d42c9096304771b4c0555d4e08a66a246b3f6"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bbb05b1203f683b341f44ebe8fe38afed6e56f606094f9840d6406e4a7bf0eab"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f0075ff8990437923da42202b60cf04b5c122ee2856f0cf2344fb890cadecf57"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f295842c282fe7fe93bfe7a20e78f33f43418f47fb601f2f0a05df8a8282b43"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ebee7313719dfe652debb74bdd4024e8cf381a59adc6d065520ff927f3445f4"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f71454249ddd29d8ba5415ed7307e7b7493fc7e9018f1ff496127b8b9a8df94b"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:52c6b7a178f0e800488fa1aede17b00f6397cab0b79d48531504b0d89e45315f"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d38596c804a9f2bd49360c15e1f4afbf016f181fe37fc4f1a4ddd247d3e91e5"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8756461e7ee79723b8f762fc6db226e65eb453bf9fa64b14fc0274d4aaaf9e21"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e14799297f194a4480f373e45142ef16d5dc68a42084c0e2018e0bdba56a8fef"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f813fb663d90038c1171d30ea1b6b275e09fced32f1d12b972c6045d9d4233f2"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:0df66e07e42e2831fae84dea481f7803bec7cfa53c31d770e86ac47bb18dcd57"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b05c7d4b4ddb617e977d648689013e50e5688140ee03538d3760a3a11d4fa8a2"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-win32.whl", hash = "sha256:74b9a1c1fc139d325fb0b89ccc85527d27096a76f6ed690ee3378143cc38e91d"}, + {file = "rapidfuzz-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5fe3ef7daecd79f852936528e37528fd88818bc000991e0fea23b9ac5b79e875"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:61f16bb0f3026853500e7968261831a2e1a35d56947752bb6cf6953afd70b9de"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d188e8fb5a9709931c6a48cc62c4ac9b9d163969333711e426d9dbd134c1489b"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c006aa481d1b91c2600920ce16e42d208a4b6f318d393aef4dd2172d568f2641"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02afbe7ed12e9191082ed7bda43398baced1d9d805302b7b010d397de3ae973f"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01d64710060bc3241c08ac1f1a9012c7184f3f4c3d6e2eebb16c6093a03f6a67"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3198f70b97127e52a4f96bb2f7de447f89baa338ff398eb126930c8e3137ad1"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:50ad7bac98a0f00492687eddda73d2c0bdf71c78b52fddaa5901634ae323d3ce"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc3efc06db79e818f4a6783a4e001b3c8b2c61bd05c0d5c4d333adaf64ed1b34"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:75d1365387ec8ef2128fd7e2f7436aa1a04a1953bc6d7068835bb769cd07c146"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a0750278693525b5ce58d3b313e432dfa5d90f00d06ae54fa8cde87f2a397eb0"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:2e49151572b842d290dcee2cc6f9ce7a7b40b77cc20d0f6d6b54e7afb7bafa5c"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:8b38d7677b2f20b137bb7aaf0dcd3d8ac2a2cde65f09f5621bf3f57d9a1e5d6e"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d904ac97f2e370f91e8170802669c8ad68641bf84d742968416b53c5960410c6"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win32.whl", hash = "sha256:53bbef345644eac1c2d7cc21ade4fe9554fa289f60eb2c576f7fdc454dbc0641"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:233bf022938c38060a93863ec548e624d69a56d7384634d8bea435b915b88e52"}, + {file = "rapidfuzz-3.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:63933792146f3d333680d415cecc237e6275b42ad948d0a798f9a81325517666"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e182ea5c809e7ed36ebfbcef4bb1808e213d27b33c036007a33bcbb7ba498356"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e1142c8d35fa6f3af8150d02ff8edcbea3723c851d889e8b2172e0d1b99f3f7"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b8258846e56b03230fa733d29bb4f9fb1f4790ac97d1ebe9faa3ff9d2850999"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:950d1dfd2927cd45c9bb2927933926718f0a17792841e651d42f4d1cb04a5c1d"}, + {file = "rapidfuzz-3.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:dd54dd0355225dc3c1d55e233d510adcccee9bb25d656b4cf1136114b92e7bf3"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f5921780e7995e9ac3cea41fa57b623159d7295788618d3f2946d61328c25c25"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc4b1b69a64d337c40fa07a721dae1b1550d90f17973fb348055f6440d597e26"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5c8b901b6d3be63591c68e2612f76ad85af27193d0a88d4d87bb047aeafcb3"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c67f5ced39aff6277dd772b239ef8aa8fc810200a3b42f69ddbb085ea0e18232"}, + {file = "rapidfuzz-3.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4fd94acab871afbc845400814134a83512a711e824dc2c9a9776d6123464a221"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:437508ec1ea6e71a77126715ac6208cb9c3e74272536ebfa79be9dd008cfb85f"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7215f7c5de912b364d5cf7c4c66915ccf4acf71aafbb8da62ad346569196e15"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:698488002eb7be2f737e48679ed0cd310b76291f26d8ec792db8345d13eb6573"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e77873126eb07e7461f0b675263e6c5d42c8a952e88e4a44eeff96f237b2b024"}, + {file = "rapidfuzz-3.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:28d03cd33817f6e0bea9b618b460f85ff9c9c3fedc6c19cfa0992f719a0d1801"}, + {file = "rapidfuzz-3.4.0.tar.gz", hash = "sha256:a74112e2126b428c77db5e96f7ce34e91e750552147305b2d361122cbede2955"}, ] -[package.dependencies] -jarowinkler = ">=1.2.2,<2.0.0" - [package.extras] full = ["numpy"] @@ -4023,6 +3915,7 @@ files = [ {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, + {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, @@ -4032,26 +3925,35 @@ files = [ {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, + {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, + {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, + {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, + {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, + {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, @@ -4523,6 +4425,16 @@ files = [ {file = "wrapt-1.14.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8ad85f7f4e20964db4daadcab70b47ab05c7c1cf2a7c1e51087bfaa83831854c"}, {file = "wrapt-1.14.1-cp310-cp310-win32.whl", hash = "sha256:a9a52172be0b5aae932bef82a79ec0a0ce87288c7d132946d645eba03f0ad8a8"}, {file = "wrapt-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:6d323e1554b3d22cfc03cd3243b5bb815a51f5249fdcbb86fda4bf62bab9e164"}, + {file = "wrapt-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ecee4132c6cd2ce5308e21672015ddfed1ff975ad0ac8d27168ea82e71413f55"}, + {file = "wrapt-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2020f391008ef874c6d9e208b24f28e31bcb85ccff4f335f15a3251d222b92d9"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2feecf86e1f7a86517cab34ae6c2f081fd2d0dac860cb0c0ded96d799d20b335"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:240b1686f38ae665d1b15475966fe0472f78e71b1b4903c143a842659c8e4cb9"}, + {file = "wrapt-1.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9008dad07d71f68487c91e96579c8567c98ca4c3881b9b113bc7b33e9fd78b8"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6447e9f3ba72f8e2b985a1da758767698efa72723d5b59accefd716e9e8272bf"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:acae32e13a4153809db37405f5eba5bac5fbe2e2ba61ab227926a22901051c0a"}, + {file = "wrapt-1.14.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:49ef582b7a1152ae2766557f0550a9fcbf7bbd76f43fbdc94dd3bf07cc7168be"}, + {file = "wrapt-1.14.1-cp311-cp311-win32.whl", hash = "sha256:358fe87cc899c6bb0ddc185bf3dbfa4ba646f05b1b0b9b5a27c2cb92c2cea204"}, + {file = "wrapt-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:26046cd03936ae745a502abf44dac702a5e6880b2b01c29aea8ddf3353b68224"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:43ca3bbbe97af00f49efb06e352eae40434ca9d915906f77def219b88e85d907"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:6b1a564e6cb69922c7fe3a678b9f9a3c54e72b469875aa8018f18b4d1dd1adf3"}, {file = "wrapt-1.14.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:00b6d4ea20a906c0ca56d84f93065b398ab74b927a7a3dbd470f6fc503f95dc3"}, @@ -4611,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "2dcbabab70b53c06157b3366b30fc07f8ca677dac8e7c262d82d9617fdfc46b2" +content-hash = "2ee6411718bb1a3c35657a7e0f80c29de793f528d0328fc297e4b6367a35c29a" diff --git a/pyproject.toml b/pyproject.toml index 2aa547f033..59984fb9f8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -187,7 +187,7 @@ html-sanitizer = "^2.1.0" isbnlib = "^3.10.14" itsdangerous = "^2.1.2" jwcrypto = "^1.4.2" -levenshtein = "^0.22" +levenshtein = "^0.23" lxml = "^4.9.3" money = "1.3.0" multipledispatch = "^1.0" From 8a8790f8b6f1e021a1cbc85f0bd601a147cf01f1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Oct 2023 12:07:00 +0000 Subject: [PATCH 095/262] Bump pytest-timeout from 2.1.0 to 2.2.0 (#1447) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d66c99485..f530576f50 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3298,13 +3298,13 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale [[package]] name = "pytest-timeout" -version = "2.1.0" +version = "2.2.0" description = "pytest plugin to abort hanging tests" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pytest-timeout-2.1.0.tar.gz", hash = "sha256:c07ca07404c612f8abbe22294b23c368e2e5104b521c1790195561f37e1ac3d9"}, - {file = "pytest_timeout-2.1.0-py3-none-any.whl", hash = "sha256:f6f50101443ce70ad325ceb4473c4255e9d74e3c7cd0ef827309dfa4c0d975c6"}, + {file = "pytest-timeout-2.2.0.tar.gz", hash = "sha256:3b0b95dabf3cb50bac9ef5ca912fa0cfc286526af17afc806824df20c2f72c90"}, + {file = "pytest_timeout-2.2.0-py3-none-any.whl", hash = "sha256:bde531e096466f49398a59f2dde76fa78429a09a12411466f88a07213e220de2"}, ] [package.dependencies] From f05e4fc3f0adbffdfc83fe5d96df23b83502af17 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Tue, 10 Oct 2023 20:11:04 +0530 Subject: [PATCH 096/262] PP-536 String coercion for fcm messages (#1449) * String coercion for fcm messages Changed the tests to ensure we do not send bad data again * Reverted the test changes for simplicity, the changes were not achieving anything more * Removed redundant fxture --- core/util/notifications.py | 2 +- pyproject.toml | 1 + tests/core/util/test_notifications.py | 53 +++++++++++++++++++++++++-- 3 files changed, 51 insertions(+), 5 deletions(-) diff --git a/core/util/notifications.py b/core/util/notifications.py index f9e191c615..9a287693a8 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -76,7 +76,7 @@ def send_loan_expiry_message( type=identifier.type, identifier=identifier.identifier, library=library_short_name, - days_to_expiry=days_to_expiry, + days_to_expiry=str(days_to_expiry), ) if loan.patron.external_identifier: data["external_identifier"] = loan.patron.external_identifier diff --git a/pyproject.toml b/pyproject.toml index 59984fb9f8..3ed2fb9e8b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,6 +133,7 @@ module = [ "flask_babel", "flask_pydantic_spec.*", "fuzzywuzzy", + "google.auth", "greenlet", "html_sanitizer", "isbnlib", diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index 54b08efd4c..73fb52cc3c 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,6 +1,11 @@ +import re +from typing import Generator from unittest import mock +import firebase_admin import pytest +from google.auth import credentials +from requests_mock import Mocker from core.config import Configuration from core.model import create, get_one_or_create @@ -12,9 +17,29 @@ from tests.fixtures.database import DatabaseTransactionFixture +# Mock credential classes pulled directly from the fcm test repository +# https://github.com/firebase/firebase-admin-python/blob/master/tests/testutils.py +class MockGoogleCredential(credentials.Credentials): + """A mock Google authentication credential.""" + + def refresh(self, request): + self.token = "mock-token" + + +class MockCredential(firebase_admin.credentials.Base): + """A mock Firebase credential implementation.""" + + def __init__(self): + self._g_credential = MockGoogleCredential() + + def get_credential(self): + return self._g_credential + + class PushNotificationsFixture: - def __init__(self, db: DatabaseTransactionFixture) -> None: + def __init__(self, db: DatabaseTransactionFixture, app: firebase_admin.App) -> None: self.db = db + self.app = app PushNotifications.TESTING_MODE = True setting = ConfigurationSetting.sitewide( self.db.session, Configuration.BASE_URL_KEY @@ -23,8 +48,14 @@ def __init__(self, db: DatabaseTransactionFixture) -> None: @pytest.fixture(scope="function") -def push_notf_fixture(db: DatabaseTransactionFixture) -> PushNotificationsFixture: - return PushNotificationsFixture(db) +def push_notf_fixture( + db: DatabaseTransactionFixture, +) -> Generator[PushNotificationsFixture, None, None]: + app = firebase_admin.initialize_app( + MockCredential(), options=dict(projectId="mock-app-1"), name="testapp" + ) + yield PushNotificationsFixture(db, app) + firebase_admin.delete_app(app) class TestPushNotifications: @@ -43,6 +74,20 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur work: Work = db.work(with_license_pool=True) loan, _ = work.active_license_pool().loan_to(patron) # type: ignore + # Test the data structuring down to the "send" method + # If bad data is detected, the fcm "send" method will error out + # If not, we are good + with mock.patch( + "core.util.notifications.PushNotifications.fcm_app" + ) as mock_fcm, Mocker() as mocker: + mocker.post( + re.compile("https://fcm.googleapis.com"), json=dict(name="mid-mock") + ) + mock_fcm.return_value = push_notf_fixture.app + assert PushNotifications.send_loan_expiry_message( + loan, 1, [device_token] + ) == ["mid-mock"] + with mock.patch( "core.util.notifications.PushNotifications.fcm_app" ) as mock_fcm, mock.patch("core.util.notifications.messaging") as messaging: @@ -67,7 +112,7 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur identifier=work.presentation_edition.primary_identifier.identifier, type=work.presentation_edition.primary_identifier.type, library=loan.library.short_name, - days_to_expiry=1, + days_to_expiry="1", ), }, ] From 9bb8b6fb0c7bc3e19976809d6d6fb37a3f17a1c9 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 11 Oct 2023 11:03:30 +0530 Subject: [PATCH 097/262] PP-309 Switched the marc exporter to upload files based on size (#1445) * Switched the marc exporter to upload files based on size rather than number of works --- core/marc.py | 19 ++++++++--------- tests/core/test_marc.py | 47 +++++++++++++++++++++++++++++++++++++++-- tests/fixtures/s3.py | 6 +++++- 3 files changed, 59 insertions(+), 13 deletions(-) diff --git a/core/marc.py b/core/marc.py index b58e8a5689..928c65e698 100644 --- a/core/marc.py +++ b/core/marc.py @@ -515,6 +515,9 @@ class MARCExporter: INCLUDE_SUMMARY = "include_summary" INCLUDE_SIMPLIFIED_GENRES = "include_simplified_genres" + # The minimum size each piece of a multipart upload should be + MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 5 * 1024 * 1024 # 5MB + LIBRARY_SETTINGS = [ { "key": UPDATE_FREQUENCY, @@ -655,7 +658,6 @@ def records( force_refresh=False, search_engine=None, query_batch_size=500, - upload_batch_size=7500, ): """ Create and export a MARC file for the books in a lane. @@ -666,10 +668,6 @@ def records( :param start_time: Only include records that were created or modified after this time. :param force_refresh: Create new records even when cached records are available. :param query_batch_size: Number of works to retrieve with a single Opensearch query. - :param upload_batch_size: Number of records to mirror at a time. This is different - from query_batch_size because S3 enforces a minimum size of 5MB for all parts - of a multipart upload except the last, but 5MB of records would be too many - works for a single query. """ # We store the content, if it's not empty. If it's empty, we create a CachedMARCFile @@ -694,7 +692,6 @@ def records( content_type=Representation.MARC_MEDIA_TYPE, ) as upload: this_batch = BytesIO() - this_batch_size = 0 while pagination is not None: # Retrieve one 'page' of works from the search index. works = lane.works( @@ -710,14 +707,16 @@ def records( work, annotator, force_refresh, self.integration ) if record: - this_batch.write(record.as_marc()) - this_batch_size += pagination.this_page_size - if this_batch_size >= upload_batch_size: + record_bytes = record.as_marc() + this_batch.write(record_bytes) + if ( + this_batch.getbuffer().nbytes + >= self.MINIMUM_UPLOAD_BATCH_SIZE_BYTES + ): # We've reached or exceeded the upload threshold. # Upload one part of the multipart document. self._upload_batch(this_batch, upload) this_batch = BytesIO() - this_batch_size = 0 pagination = pagination.next_page # Upload the final part of the multi-document, if diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index 27355241e1..cdb5fe9bce 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -2,6 +2,7 @@ import datetime from typing import TYPE_CHECKING +from unittest.mock import MagicMock from urllib.parse import quote import pytest @@ -644,7 +645,6 @@ def test_records_lane( annotator, storage_service, query_batch_size=1, - upload_batch_size=1, search_engine=search_engine, ) @@ -704,7 +704,6 @@ def test_records_start_time( storage_service, start_time=start_time, query_batch_size=2, - upload_batch_size=2, search_engine=search_engine, ) [cache] = db.session.query(CachedMARCFile).all() @@ -751,6 +750,50 @@ def test_records_empty_search( assert cache.start_time is None assert marc_exporter_fixture.now < cache.end_time + def test_records_minimum_size( + self, + db: DatabaseTransactionFixture, + s3_service_fixture: S3ServiceFixture, + marc_exporter_fixture: MarcExporterFixture, + ): + lane = db.lane(genres=["Mystery"]) + storage_service = s3_service_fixture.mock_service() + exporter = marc_exporter_fixture.exporter + annotator = marc_exporter_fixture.annotator + search_engine = marc_exporter_fixture.search_engine + + # Make sure we page exactly how many times we need to + works = [ + db.work(genre="Mystery", with_open_access_download=True) for _ in range(4) + ] + search_engine.mock_query_works(works) + + exporter.MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 100 + # Mock the "records" generated, and force the response to be of certain sizes + created_record_mock = MagicMock() + created_record_mock.as_marc = MagicMock( + side_effect=[b"1" * 600, b"2" * 20, b"3" * 500, b"4" * 10] + ) + exporter.create_record = lambda *args: created_record_mock + + exporter.records( + lane, + annotator, + storage_service, + search_engine=search_engine, + query_batch_size=1, + ) + + assert storage_service.mocked_multipart_upload is not None + # Even though there are 4 parts, we upload in 3 batches due to minimum size limitations + # The "4"th part gets uploaded due it being the tail piece + assert len(storage_service.mocked_multipart_upload.content_parts) == 3 + assert storage_service.mocked_multipart_upload.content_parts == [ + b"1" * 600, + b"2" * 20 + b"3" * 500, + b"4" * 10, + ] + class TestMARCExporterFacets: def test_modify_search_filter(self): diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index 2a2f194cd5..60c10e5687 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -49,6 +49,7 @@ def __init__( self.bucket = bucket self.media_type = media_type self.content = b"" + self.content_parts: List[bytes] = [] self._complete = False self._url = url self._exception = None @@ -70,6 +71,7 @@ def __exit__( return False def upload_part(self, content: bytes) -> None: + self.content_parts.append(content) self.content += content @@ -83,6 +85,7 @@ def __init__( ) -> None: super().__init__(client, region, bucket, url_template) self.uploads: List[MockS3ServiceUpload] = [] + self.mocked_multipart_upload: Optional[MockMultipartS3ContextManager] = None def store_stream( self, @@ -96,9 +99,10 @@ def store_stream( def multipart( self, key: str, content_type: Optional[str] = None ) -> MultipartS3ContextManager: - return MockMultipartS3ContextManager( + self.mocked_multipart_upload = MockMultipartS3ContextManager( self, self.bucket, key, self.generate_url(key), content_type ) + return self.mocked_multipart_upload class S3ServiceProtocol(Protocol): From b50d16f20ddc911cb8c0c6dceb10e9911f2c519d Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 11 Oct 2023 13:45:37 +0530 Subject: [PATCH 098/262] PP-520 Force content encoding during OPDS serialization (#1444) * Force content encoding during OPDS serialization By forcing the encoding we ensure that unicode characters (which exist) are encoded correctly --- core/feed/opds.py | 2 +- core/feed/serializer/base.py | 4 ++-- core/feed/serializer/opds.py | 6 +++--- core/feed/serializer/opds2.py | 6 +++--- tests/api/feed/equivalence/test_feed_equivalence.py | 2 +- 5 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core/feed/opds.py b/core/feed/opds.py index 84fe5246dc..e8bad3c3d1 100644 --- a/core/feed/opds.py +++ b/core/feed/opds.py @@ -44,7 +44,7 @@ def __init__( self._feed = FeedData() self.log = logging.getLogger(self.__class__.__name__) - def serialize(self, mime_types: Optional[MIMEAccept] = None) -> bytes: + def serialize(self, mime_types: Optional[MIMEAccept] = None) -> str: serializer = get_serializer(mime_types) return serializer.serialize_feed(self._feed) diff --git a/core/feed/serializer/base.py b/core/feed/serializer/base.py index 9141db8cea..5f07345781 100644 --- a/core/feed/serializer/base.py +++ b/core/feed/serializer/base.py @@ -10,13 +10,13 @@ class SerializerInterface(ABC, Generic[T]): @classmethod @abstractmethod - def to_string(cls, data: T) -> bytes: + def to_string(cls, data: T) -> str: ... @abstractmethod def serialize_feed( self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None - ) -> bytes: + ) -> str: ... @abstractmethod diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index a803ee4439..006cee4630 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -73,7 +73,7 @@ def _attr_name( def serialize_feed( self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None - ) -> bytes: + ) -> str: # First we do metadata serialized = self.E.feed() @@ -380,8 +380,8 @@ def _serialize_data_entry(self, entry: DataEntry) -> etree._Element: return element @classmethod - def to_string(cls, element: etree._Element) -> bytes: - return cast(bytes, etree.tostring(element)) + def to_string(cls, element: etree._Element) -> str: + return cast(str, etree.tostring(element, encoding="unicode")) def content_type(self) -> str: return OPDSFeed.ACQUISITION_FEED_TYPE diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index 74597da7e1..3b98532077 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -38,7 +38,7 @@ def __init__(self) -> None: def serialize_feed( self, feed: FeedData, precomposed_entries: Optional[List[Any]] = None - ) -> bytes: + ) -> str: serialized: Dict[str, Any] = {"publications": []} serialized["metadata"] = self._serialize_metadata(feed) @@ -209,5 +209,5 @@ def content_type(self) -> str: return "application/opds+json" @classmethod - def to_string(cls, data: Dict[str, Any]) -> bytes: - return json.dumps(data, indent=2).encode() + def to_string(cls, data: Dict[str, Any]) -> str: + return json.dumps(data, indent=2) diff --git a/tests/api/feed/equivalence/test_feed_equivalence.py b/tests/api/feed/equivalence/test_feed_equivalence.py index 2140b3dd5e..53e9db82e1 100644 --- a/tests/api/feed/equivalence/test_feed_equivalence.py +++ b/tests/api/feed/equivalence/test_feed_equivalence.py @@ -170,7 +170,7 @@ def test_groups_feed( search_engine=search_index, ) - assert_equal_xmls(str(old_feed), new_feed.serialize().decode()) + assert_equal_xmls(str(old_feed), new_feed.serialize()) def test_search_feed( self, From c986da1bad613e806a8c71200ef7bc09916c12dd Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:43:35 +0530 Subject: [PATCH 099/262] PP-414 Deleted all deprecated Acquisition feed and annotator classes :axe: (#1438) * Deleted all deprecated Acquisition feed and annotator classes This includes any of the caching scripts that used the feed directly * Removed unused OPDSFeed methods --- api/admin/controller/dashboard.py | 4 +- api/admin/controller/feed.py | 6 +- api/admin/controller/work_editor.py | 2 - api/admin/opds.py | 133 - api/authenticator.py | 2 +- api/controller.py | 69 +- api/opds.py | 1654 --------- api/opds2.py | 68 - api/routes.py | 18 - bin/opds_entry_coverage | 12 - bin/repair/opds_entries | 12 - core/bin/opds_entry_coverage | 8 - core/coverage.py | 25 +- core/feed/acquisition.py | 3 +- core/feed/annotator/circulation.py | 2 +- core/feed/navigation.py | 3 +- core/feed/opds.py | 21 +- core/model/__init__.py | 11 +- core/model/collection.py | 56 +- core/model/coverage.py | 1 - core/model/identifier.py | 54 - core/model/work.py | 16 - core/monitor.py | 15 - core/opds.py | 2117 ----------- core/opds2.py | 353 -- core/scripts.py | 3 - core/util/opds_writer.py | 52 +- docker/services/cron/cron.d/circulation | 1 - tests/api/admin/controller/test_dashboard.py | 6 +- .../controller/test_discovery_services.py | 2 +- .../api/admin/controller/test_work_editor.py | 22 - tests/api/admin/test_opds.py | 217 -- .../feed/equivalence/test_feed_equivalence.py | 317 -- tests/api/feed/test_annotators.py | 337 +- tests/api/feed/test_library_annotator.py | 6 +- tests/api/feed/test_opds2_serializer.py | 2 + tests/api/feed/test_opds_acquisition_feed.py | 11 +- tests/api/test_authenticator.py | 2 +- tests/api/test_controller_opdsfeed.py | 2 +- tests/api/test_opds.py | 2359 ------------ tests/api/test_opds2.py | 154 +- tests/api/test_scripts.py | 6 +- tests/core/models/test_collection.py | 45 +- tests/core/models/test_identifier.py | 88 +- tests/core/models/test_work.py | 38 +- tests/core/test_coverage.py | 23 - tests/core/test_monitor.py | 18 - tests/core/test_opds.py | 3238 ----------------- tests/core/test_opds2.py | 310 -- tests/core/test_scripts.py | 4 - tests/core/util/test_opds_writer.py | 7 - tests/fixtures/database.py | 2 - 52 files changed, 329 insertions(+), 11608 deletions(-) delete mode 100644 api/admin/opds.py delete mode 100644 api/opds.py delete mode 100755 bin/opds_entry_coverage delete mode 100755 bin/repair/opds_entries delete mode 100755 core/bin/opds_entry_coverage delete mode 100644 core/opds.py delete mode 100644 core/opds2.py delete mode 100644 tests/api/admin/test_opds.py delete mode 100644 tests/api/feed/equivalence/test_feed_equivalence.py delete mode 100644 tests/api/test_opds.py delete mode 100644 tests/core/test_opds.py delete mode 100644 tests/core/test_opds2.py diff --git a/api/admin/controller/dashboard.py b/api/admin/controller/dashboard.py index 05e3d8518b..fd53cb6e66 100644 --- a/api/admin/controller/dashboard.py +++ b/api/admin/controller/dashboard.py @@ -8,9 +8,9 @@ from sqlalchemy.orm import Session from api.admin.model.dashboard_statistics import StatisticsResponse -from api.admin.opds import AdminAnnotator from api.controller import CirculationManagerController from api.local_analytics_exporter import LocalAnalyticsExporter +from core.feed.annotator.admin import AdminAnnotator from core.model import ( Admin, CirculationEvent, @@ -51,8 +51,6 @@ def circulation_events(self): "book": { "title": result.license_pool.work.title, "url": annotator.permalink_for( - result.license_pool.work, - result.license_pool, result.license_pool.identifier, ), }, diff --git a/api/admin/controller/feed.py b/api/admin/controller/feed.py index 0cb766532f..ea91c10c38 100644 --- a/api/admin/controller/feed.py +++ b/api/admin/controller/feed.py @@ -4,11 +4,11 @@ from flask import url_for from api.admin.controller.base import AdminPermissionsControllerMixin -from api.admin.opds import AdminAnnotator, AdminFeed from api.controller import CirculationManagerController from core.app_server import load_pagination_from_request from core.classifier import genres -from core.util.flask_util import OPDSFeedResponse +from core.feed.admin import AdminFeed +from core.feed.annotator.admin import AdminAnnotator from core.util.problem_detail import ProblemDetail @@ -28,7 +28,7 @@ def suppressed(self): annotator=annotator, pagination=pagination, ) - return OPDSFeedResponse(opds_feed, max_age=0) + return opds_feed.as_response(max_age=0) def genres(self): data = dict({"Fiction": dict({}), "Nonfiction": dict({})}) diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index 32357ec410..bc79a1bfe4 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -339,7 +339,6 @@ def edit(self, identifier_type, identifier): # problem the user is trying to fix. policy = PresentationCalculationPolicy( classify=True, - regenerate_opds_entries=True, regenerate_marc_record=True, update_search_index=True, calculate_quality=changed_rating, @@ -622,7 +621,6 @@ def edit_classifications(self, identifier_type, identifier): # Update presentation policy = PresentationCalculationPolicy( classify=True, - regenerate_opds_entries=True, regenerate_marc_record=True, update_search_index=True, ) diff --git a/api/admin/opds.py b/api/admin/opds.py deleted file mode 100644 index b7d921cc56..0000000000 --- a/api/admin/opds.py +++ /dev/null @@ -1,133 +0,0 @@ -from sqlalchemy import and_ - -from api.opds import LibraryAnnotator -from core.lane import Pagination -from core.model import DataSource, LicensePool -from core.opds import AcquisitionFeed, VerboseAnnotator - - -class AdminAnnotator(LibraryAnnotator): - def __init__(self, circulation, library, test_mode=False): - super().__init__(circulation, None, library, test_mode=test_mode) - self.opds_cache_field = None - - def annotate_work_entry( - self, work, active_license_pool, edition, identifier, feed, entry - ): - super().annotate_work_entry( - work, active_license_pool, edition, identifier, feed, entry - ) - VerboseAnnotator.add_ratings(work, entry) - - # Find staff rating and add a tag for it. - for measurement in identifier.measurements: - if ( - measurement.data_source.name == DataSource.LIBRARY_STAFF - and measurement.is_most_recent - ): - entry.append( - self.rating_tag(measurement.quantity_measured, measurement.value) - ) - - if active_license_pool and active_license_pool.suppressed: - feed.add_link_to_entry( - entry, - rel="http://librarysimplified.org/terms/rel/restore", - href=self.url_for( - "unsuppress", - identifier_type=identifier.type, - identifier=identifier.identifier, - _external=True, - ), - ) - else: - feed.add_link_to_entry( - entry, - rel="http://librarysimplified.org/terms/rel/hide", - href=self.url_for( - "suppress", - identifier_type=identifier.type, - identifier=identifier.identifier, - _external=True, - ), - ) - - feed.add_link_to_entry( - entry, - rel="edit", - href=self.url_for( - "edit", - identifier_type=identifier.type, - identifier=identifier.identifier, - _external=True, - ), - ) - - def suppressed_url(self, pagination): - kwargs = dict(list(pagination.items())) - return self.url_for("suppressed", _external=True, **kwargs) - - def annotate_feed(self, feed): - # Add a 'search' link. - search_url = self.url_for("lane_search", languages=None, _external=True) - search_link = dict( - rel="search", type="application/opensearchdescription+xml", href=search_url - ) - feed.add_link_to_feed(feed.feed, **search_link) - - -class AdminFeed(AcquisitionFeed): - @classmethod - def suppressed(cls, _db, title, url, annotator, pagination=None): - pagination = pagination or Pagination.default() - - q = ( - _db.query(LicensePool) - .filter( - and_( - LicensePool.suppressed == True, - LicensePool.superceded == False, - ) - ) - .order_by(LicensePool.id) - ) - pools = pagination.modify_database_query(_db, q).all() - - works = [pool.work for pool in pools] - feed = cls(_db, title, url, works, annotator) - - # Render a 'start' link - top_level_title = annotator.top_level_title() - start_uri = annotator.groups_url(None) - AdminFeed.add_link_to_feed( - feed.feed, href=start_uri, rel="start", title=top_level_title - ) - - # Render an 'up' link, same as the 'start' link to indicate top-level feed - AdminFeed.add_link_to_feed( - feed.feed, href=start_uri, rel="up", title=top_level_title - ) - - if len(works) > 0: - # There are works in this list. Add a 'next' link. - AdminFeed.add_link_to_feed( - feed.feed, - rel="next", - href=annotator.suppressed_url(pagination.next_page), - ) - - if pagination.offset > 0: - AdminFeed.add_link_to_feed( - feed.feed, - rel="first", - href=annotator.suppressed_url(pagination.first_page), - ) - - previous_page = pagination.previous_page - if previous_page: - AdminFeed.add_link_to_feed( - feed.feed, rel="previous", href=annotator.suppressed_url(previous_page) - ) - - annotator.annotate_feed(feed) - return str(feed) diff --git a/api/authenticator.py b/api/authenticator.py index 6d8b358a5b..865d36d3f5 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -29,11 +29,11 @@ from core.model import ConfigurationSetting, Library, Patron, PatronProfileStorage from core.model.announcements import Announcement from core.model.integration import IntegrationLibraryConfiguration -from core.opds import OPDSFeed from core.user_profile import ProfileController from core.util.authentication_for_opds import AuthenticationForOPDSDocument from core.util.http import RemoteIntegrationException from core.util.log import LoggerMixin, elapsed_time_logging +from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemDetail, ProblemError if sys.version_info >= (3, 11): diff --git a/api/controller.py b/api/controller.py index e19aa1aed7..1908fc549a 100644 --- a/api/controller.py +++ b/api/controller.py @@ -21,7 +21,6 @@ from sqlalchemy import select from sqlalchemy.orm import eagerload from sqlalchemy.orm.exc import NoResultFound -from werkzeug.datastructures import MIMEAccept from api.annotations import AnnotationParser, AnnotationWriter from api.authentication.access_token import AccessTokenProvider @@ -50,7 +49,6 @@ from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse from api.odl import ODLAPI from api.odl2 import ODL2API -from api.opds2 import OPDS2NavigationsAnnotator from api.problem_details import * from api.saml.controller import SAMLController from core.analytics import Analytics @@ -69,6 +67,7 @@ LibraryAnnotator, ) from core.feed.navigation import NavigationFeed +from core.feed.opds import NavigationFacets from core.lane import ( BaseFacets, Facets, @@ -107,8 +106,6 @@ InvalidTokenTypeError, ) from core.model.discovery_service_registration import DiscoveryServiceRegistration -from core.opds import NavigationFacets -from core.opds2 import AcquisitonFeedOPDS2 from core.opensearch import OpenSearchDocument from core.query.playtime_entries import PlaytimeEntries from core.service.container import Services @@ -176,7 +173,6 @@ class CirculationManager: # API Controllers index_controller: IndexController opds_feeds: OPDSFeedController - opds2_feeds: OPDS2FeedController marc_records: MARCRecordController loans: LoanController annotations: AnnotationController @@ -439,7 +435,6 @@ def setup_one_time_controllers(self): """ self.index_controller = IndexController(self) self.opds_feeds = OPDSFeedController(self) - self.opds2_feeds = OPDS2FeedController(self) self.marc_records = MARCRecordController(self) self.loans = LoanController(self) self.annotations = AnnotationController(self) @@ -1275,68 +1270,6 @@ class FeedRequestParameters: problem: ProblemDetail | None = None -class OPDS2FeedController(CirculationManagerController): - """All OPDS2 type feeds are served through this controller""" - - def _parse_feed_request(self): - """Parse the request to get frequently used request parameters for the feeds""" - library = getattr(flask.request, "library", None) - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return FeedRequestParameters(problem=pagination) - - try: - facets = load_facets_from_request() - if isinstance(facets, ProblemDetail): - return FeedRequestParameters(problem=facets) - except AttributeError: - # No facets/library present, so NoneType - facets = None - - return FeedRequestParameters( - library=library, facets=facets, pagination=pagination - ) - - def publications(self): - """OPDS2 publications feed""" - params: FeedRequestParameters = self._parse_feed_request() - if params.problem: - return params.problem - lane = self.load_lane(None) - annotator = self.manager.annotator(lane, params.facets) - max_age = flask.request.args.get("max_age") - feed = OPDSAcquisitionFeed.page( - self._db, - lane.display_name, - flask.request.url, - lane, - annotator, - params.facets, - params.pagination, - self.search_engine, - ) - return feed.as_response( - mime_types=MIMEAccept([("application/opds+json", 1)]), # Force the type - max_age=int(max_age) if max_age is not None else None, - ) - - def navigation(self): - """OPDS2 navigation links""" - params: FeedRequestParameters = self._parse_feed_request() - annotator = OPDS2NavigationsAnnotator( - flask.request.url, - params.facets, - params.pagination, - params.library, - title="OPDS2 Navigation", - ) - feed = AcquisitonFeedOPDS2.navigation(self._db, annotator) - - return Response( - str(feed), status=200, headers={"Content-Type": annotator.OPDS2_TYPE} - ) - - class MARCRecordController(CirculationManagerController): DOWNLOAD_TEMPLATE = """ diff --git a/api/opds.py b/api/opds.py deleted file mode 100644 index 912ce848f0..0000000000 --- a/api/opds.py +++ /dev/null @@ -1,1654 +0,0 @@ -from __future__ import annotations - -import copy -import logging -import urllib.error -import urllib.parse -import urllib.request -from collections import defaultdict -from typing import Any, List - -from flask import url_for - -from api.adobe_vendor_id import AuthdataUtility -from api.annotations import AnnotationWriter -from api.circulation import BaseCirculationAPI, FulfillmentInfo -from api.config import CannotLoadConfiguration, Configuration -from api.lanes import DynamicLane -from api.novelist import NoveListAPI -from api.problem_details import NOT_FOUND_ON_REMOTE -from core.analytics import Analytics -from core.classifier import Classifier -from core.entrypoint import EverythingEntryPoint -from core.external_search import WorkSearchResult -from core.lane import Lane, WorkList -from core.lcp.credential import LCPCredentialFactory, LCPHashedPassphrase -from core.lcp.exceptions import LCPError -from core.model import ( - CirculationEvent, - Collection, - DeliveryMechanism, - Edition, - Hold, - Library, - LicensePool, - LicensePoolDeliveryMechanism, - Loan, - Patron, - Session, -) -from core.model.configuration import ExternalIntegration -from core.model.constants import EditionConstants, LinkRelations, MediaTypes -from core.model.formats import FormatPriorities -from core.model.integration import IntegrationConfiguration -from core.opds import AcquisitionFeed, Annotator, UnfulfillableWork -from core.util.datetime_helpers import from_timestamp -from core.util.flask_util import OPDSEntryResponse -from core.util.opds_writer import OPDSFeed -from core.util.problem_detail import ProblemDetail - - -class CirculationManagerAnnotator(Annotator): - hidden_content_types: list[str] - - def __init__( - self, - lane, - active_loans_by_work={}, - active_holds_by_work={}, - active_fulfillments_by_work={}, - hidden_content_types=[], - test_mode=False, - ): - if lane: - logger_name = "Circulation Manager Annotator for %s" % lane.display_name - else: - logger_name = "Circulation Manager Annotator" - self.log = logging.getLogger(logger_name) - self.lane = lane - self.active_loans_by_work = active_loans_by_work - self.active_holds_by_work = active_holds_by_work - self.active_fulfillments_by_work = active_fulfillments_by_work - self.hidden_content_types = hidden_content_types - self.test_mode = test_mode - - def is_work_entry_solo(self, work): - """Return a boolean value indicating whether the work's OPDS catalog entry is served by itself, - rather than as a part of the feed. - - :param work: Work object - :type work: core.model.work.Work - - :return: Boolean value indicating whether the work's OPDS catalog entry is served by itself, - rather than as a part of the feed - :rtype: bool - """ - return any( - work in x - for x in ( - self.active_loans_by_work, - self.active_holds_by_work, - self.active_fulfillments_by_work, - ) - ) - - def _lane_identifier(self, lane): - if isinstance(lane, Lane): - return lane.id - return None - - def top_level_title(self): - return "" - - def default_lane_url(self): - return self.feed_url(None) - - def lane_url(self, lane): - return self.feed_url(lane) - - def url_for(self, *args, **kwargs): - if self.test_mode: - new_kwargs = {} - for k, v in list(kwargs.items()): - if not k.startswith("_"): - new_kwargs[k] = v - return self.test_url_for(False, *args, **new_kwargs) - else: - return url_for(*args, **kwargs) - - def test_url_for(self, cdn=False, *args, **kwargs): - # Generate a plausible-looking URL that doesn't depend on Flask - # being set up. - if cdn: - host = "cdn" - else: - host = "host" - url = ("http://%s/" % host) + "/".join(args) - connector = "?" - for k, v in sorted(kwargs.items()): - if v is None: - v = "" - v = urllib.parse.quote(str(v)) - k = urllib.parse.quote(str(k)) - url += connector + f"{k}={v}" - connector = "&" - return url - - def facet_url(self, facets): - return self.feed_url(self.lane, facets=facets, default_route=self.facet_view) - - def feed_url( - self, - lane, - facets=None, - pagination=None, - default_route="feed", - extra_kwargs=None, - ): - if isinstance(lane, WorkList) and hasattr(lane, "url_arguments"): - route, kwargs = lane.url_arguments - else: - route = default_route - lane_identifier = self._lane_identifier(lane) - kwargs = dict(lane_identifier=lane_identifier) - if facets != None: - kwargs.update(dict(list(facets.items()))) - if pagination != None: - kwargs.update(dict(list(pagination.items()))) - if extra_kwargs: - kwargs.update(extra_kwargs) - return self.url_for(route, _external=True, **kwargs) - - def navigation_url(self, lane): - return self.url_for( - "navigation_feed", - lane_identifier=self._lane_identifier(lane), - library_short_name=lane.library.short_name, - _external=True, - ) - - def active_licensepool_for(self, work, library=None): - loan = self.active_loans_by_work.get(work) or self.active_holds_by_work.get( - work - ) - if loan: - # The active license pool is the one associated with - # the loan/hold. - return loan.license_pool - else: - # There is no active loan. Use the default logic for - # determining the active license pool. - return super().active_licensepool_for(work, library=library) - - @staticmethod - def _prioritized_formats_for_pool( - licensepool: LicensePool, - ) -> tuple[list[str], list[str]]: - collection: Collection = licensepool.collection - config: IntegrationConfiguration = collection.integration_configuration - - # Consult the configuration information for the integration configuration - # that underlies the license pool's collection. The configuration - # information _might_ contain a set of prioritized DRM schemes and - # content types. - prioritized_drm_schemes: list[str] = ( - config.settings_dict.get(FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY) or [] - ) - - content_setting: List[str] = ( - config.settings_dict.get(FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY) - or [] - ) - return prioritized_drm_schemes, content_setting - - @staticmethod - def _deprioritized_lcp_content( - licensepool: LicensePool, - ) -> bool: - collection: Collection = licensepool.collection - config: IntegrationConfiguration = collection.integration_configuration - - # Consult the configuration information for the integration configuration - # that underlies the license pool's collection. The configuration - # information _might_ contain a flag that indicates whether to deprioritize - # LCP content. By default, if no configuration value is specified, then - # the priority of LCP content will be left completely unchanged. - - _prioritize: bool = config.settings_dict.get( - FormatPriorities.DEPRIORITIZE_LCP_NON_EPUBS_KEY, False - ) - return _prioritize - - def visible_delivery_mechanisms( - self, licensepool: LicensePool | None - ) -> list[LicensePoolDeliveryMechanism]: - if not licensepool: - return [] - - ( - prioritized_drm_schemes, - prioritized_content_types, - ) = CirculationManagerAnnotator._prioritized_formats_for_pool(licensepool) - - return FormatPriorities( - prioritized_drm_schemes=prioritized_drm_schemes, - prioritized_content_types=prioritized_content_types, - hidden_content_types=self.hidden_content_types, - deprioritize_lcp_non_epubs=CirculationManagerAnnotator._deprioritized_lcp_content( - licensepool - ), - ).prioritize_for_pool(licensepool) - - def annotate_work_entry( - self, - work, - active_license_pool, - edition, - identifier, - feed, - entry, - updated=None, - ): - # If OpenSearch included a more accurate last_update_time, - # use it instead of Work.last_update_time - updated = work.last_update_time - if isinstance(work, WorkSearchResult): - # Opensearch puts this field in a list, but we've set it up - # so there will be at most one value. - last_updates = getattr(work._hit, "last_update", []) - if last_updates: - # last_update is seconds-since epoch; convert to UTC datetime. - updated = from_timestamp(last_updates[0]) - - # There's a chance that work.last_updated has been - # modified but the change hasn't made it to the search - # engine yet. Even then, we stick with the search - # engine value, because a sorted list is more - # important to the import process than an up-to-date - # 'last update' value. - - super().annotate_work_entry( - work, active_license_pool, edition, identifier, feed, entry, updated - ) - active_loan = self.active_loans_by_work.get(work) - active_hold = self.active_holds_by_work.get(work) - active_fulfillment = self.active_fulfillments_by_work.get(work) - - # Now we need to generate a tag for every delivery mechanism - # that has well-defined media types. - link_tags = self.acquisition_links( - active_license_pool, - active_loan, - active_hold, - active_fulfillment, - feed, - identifier, - ) - for tag in link_tags: - entry.append(tag) - - def acquisition_links( - self, - active_license_pool, - active_loan, - active_hold, - active_fulfillment, - feed, - identifier, - can_hold=True, - can_revoke_hold=True, - set_mechanism_at_borrow=False, - direct_fulfillment_delivery_mechanisms=[], - add_open_access_links=True, - ): - """Generate a number of tags that enumerate all acquisition - methods. - - :param direct_fulfillment_delivery_mechanisms: A way to - fulfill each LicensePoolDeliveryMechanism in this list will be - presented as a link with - rel="http://opds-spec.org/acquisition/open-access", indicating - that it can be downloaded with no intermediate steps such as - authentication. - """ - can_borrow = False - can_fulfill = False - can_revoke = False - - if active_loan: - can_fulfill = True - can_revoke = True - elif active_hold: - # We display the borrow link even if the patron can't - # borrow the book right this minute. - can_borrow = True - - can_revoke = can_revoke_hold - elif active_fulfillment: - can_fulfill = True - can_revoke = True - else: - # The patron has no existing relationship with this - # work. Give them the opportunity to check out the work - # or put it on hold. - can_borrow = True - - # If there is something to be revoked for this book, - # add a link to revoke it. - revoke_links = [] - if can_revoke: - revoke_links.append( - self.revoke_link(active_license_pool, active_loan, active_hold) - ) - - # Add next-step information for every useful delivery - # mechanism. - borrow_links = [] - if can_borrow: - # Borrowing a book gives you an OPDS entry that gives you - # fulfillment links for every visible delivery mechanism. - visible_mechanisms = self.visible_delivery_mechanisms(active_license_pool) - if set_mechanism_at_borrow and active_license_pool: - # The ebook distributor requires that the delivery - # mechanism be set at the point of checkout. This means - # a separate borrow link for each mechanism. - for mechanism in visible_mechanisms: - borrow_links.append( - self.borrow_link( - active_license_pool, mechanism, [mechanism], active_hold - ) - ) - elif active_license_pool: - # The ebook distributor does not require that the - # delivery mechanism be set at the point of - # checkout. This means a single borrow link with - # indirectAcquisition tags for every visible delivery - # mechanism. If a delivery mechanism must be set, it - # will be set at the point of fulfillment. - borrow_links.append( - self.borrow_link( - active_license_pool, None, visible_mechanisms, active_hold - ) - ) - - # Generate the licensing tags that tell you whether the book - # is available. - for link in borrow_links: - if link is not None: - for t in feed.license_tags( - active_license_pool, active_loan, active_hold - ): - link.append(t) - - # Add links for fulfilling an active loan. - fulfill_links = [] - if can_fulfill: - if active_fulfillment: - # We're making an entry for a specific fulfill link. - type = active_fulfillment.content_type - url = active_fulfillment.content_link - rel = OPDSFeed.ACQUISITION_REL - link_tag = AcquisitionFeed.acquisition_link( - rel=rel, href=url, types=[type], active_loan=active_loan - ) - fulfill_links.append(link_tag) - - elif active_loan and active_loan.fulfillment: - # The delivery mechanism for this loan has been - # set. There is one link for the delivery mechanism - # that was locked in, and links for any streaming - # delivery mechanisms. - # - # Since the delivery mechanism has already been locked in, - # we choose not to use visible_delivery_mechanisms -- - # they already chose it and they're stuck with it. - for lpdm in active_license_pool.delivery_mechanisms: - if ( - lpdm is active_loan.fulfillment - or lpdm.delivery_mechanism.is_streaming - ): - fulfill_links.append( - self.fulfill_link( - active_license_pool, - active_loan, - lpdm.delivery_mechanism, - ) - ) - else: - # The delivery mechanism for this loan has not been - # set. There is one fulfill link for every visible - # delivery mechanism. - for lpdm in self.visible_delivery_mechanisms(active_license_pool): - fulfill_links.append( - self.fulfill_link( - active_license_pool, active_loan, lpdm.delivery_mechanism - ) - ) - - open_access_links = [] - for lpdm in direct_fulfillment_delivery_mechanisms: - # These links use the OPDS 'open-access' link relation not - # because they are open access in the licensing sense, but - # because they are ways to download the book "without any - # requirement, which includes payment and registration." - # - # To avoid confusion, we explicitly add a dc:rights - # statement to each link explaining what the rights are to - # this title. - direct_fulfill = self.fulfill_link( - active_license_pool, - active_loan, - lpdm.delivery_mechanism, - rel=OPDSFeed.OPEN_ACCESS_REL, - ) - direct_fulfill.attrib.update(self.rights_attributes(lpdm)) - open_access_links.append(direct_fulfill) - - # If this is an open-access book, add an open-access link for - # every delivery mechanism with an associated resource. - # But only if this library allows it, generally this is if - # a library has no patron authentication attached to it - if ( - add_open_access_links - and active_license_pool - and active_license_pool.open_access - ): - for lpdm in active_license_pool.delivery_mechanisms: - if lpdm.resource: - open_access_links.append( - self.open_access_link(active_license_pool, lpdm) - ) - - return [ - x - for x in borrow_links + fulfill_links + open_access_links + revoke_links - if x is not None - ] - - def revoke_link(self, active_license_pool, active_loan, active_hold): - return None - - def borrow_link( - self, - active_license_pool, - borrow_mechanism, - fulfillment_mechanisms, - active_hold=None, - ): - return None - - def fulfill_link( - self, - license_pool, - active_loan, - delivery_mechanism, - rel=OPDSFeed.ACQUISITION_REL, - ): - return None - - def open_access_link(self, pool, lpdm): - _db = Session.object_session(lpdm) - kw = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") - - # Start off assuming that the URL associated with the - # LicensePoolDeliveryMechanism's Resource is the URL we should - # send for download purposes. This will be the case unless we - # previously mirrored that URL somewhere else. - href = lpdm.resource.url - - rep = lpdm.resource.representation - if rep: - if rep.media_type: - kw["type"] = rep.media_type - href = rep.public_url - kw["href"] = href - link_tag = AcquisitionFeed.link(**kw) - link_tag.attrib.update(self.rights_attributes(lpdm)) - always_available = OPDSFeed.makeelement( - "{%s}availability" % OPDSFeed.OPDS_NS, status="available" - ) - link_tag.append(always_available) - return link_tag - - def rights_attributes(self, lpdm): - """Create a dictionary of tag attributes that explain the - rights status of a LicensePoolDeliveryMechanism. - - If nothing is known, the dictionary will be empty. - """ - if not lpdm or not lpdm.rights_status or not lpdm.rights_status.uri: - return {} - rights_attr = "{%s}rights" % OPDSFeed.DCTERMS_NS - return {rights_attr: lpdm.rights_status.uri} - - @classmethod - def _single_entry_response( - cls, _db, work, annotator, url, feed_class=AcquisitionFeed, **response_kwargs - ): - """Helper method to create an OPDSEntryResponse for a single OPDS entry. - - :param _db: A database connection. - :param work: A Work - :param annotator: An Annotator - :param url: The URL of the feed to be served. Used only if there's - a problem with the Work. - :param feed_class: A replacement for AcquisitionFeed, for use in tests. - :param response_kwargs: A set of extra keyword arguments to - be passed into the OPDSEntryResponse constructor. - - :return: An OPDSEntryResponse if everything goes well; otherwise an OPDSFeedResponse - containing an error message. - """ - if not work: - return feed_class( - _db, title="Unknown work", url=url, works=[], annotator=annotator - ).as_error_response() - - # This method is generally used for reporting the results of - # authenticated transactions such as borrowing and hold - # placement. - # - # This means the document contains up-to-date information - # specific to the authenticated client. The client should - # cache this document for a while, but no one else should - # cache it. - response_kwargs.setdefault("max_age", 30 * 60) - response_kwargs.setdefault("private", True) - return feed_class.single_entry(_db, work, annotator, **response_kwargs) - - -class LibraryAnnotator(CirculationManagerAnnotator): - def __init__( - self, - circulation, - lane, - library, - patron=None, - active_loans_by_work={}, - active_holds_by_work={}, - active_fulfillments_by_work={}, - facet_view="feed", - test_mode=False, - top_level_title="All Books", - library_identifies_patrons=True, - facets=None, - ): - """Constructor. - - :param library_identifies_patrons: A boolean indicating - whether or not this library can distinguish between its - patrons. A library might not authenticate patrons at - all, or it might distinguish patrons from non-patrons in a - way that does not allow it to keep track of individuals. - - If this is false, links that imply the library can - distinguish between patrons will not be included. Depending - on the configured collections, some extra links may be - added, for direct acquisition of titles that would normally - require a loan. - """ - super().__init__( - lane, - active_loans_by_work=active_loans_by_work, - active_holds_by_work=active_holds_by_work, - active_fulfillments_by_work=active_fulfillments_by_work, - hidden_content_types=library.settings.hidden_content_types, - test_mode=test_mode, - ) - self.circulation = circulation - self.library: Library = library - self.patron = patron - self.lanes_by_work = defaultdict(list) - self.facet_view = facet_view - self._adobe_id_tags = {} - self._top_level_title = top_level_title - self.identifies_patrons = library_identifies_patrons - self.facets = facets or None - - def top_level_title(self): - return self._top_level_title - - def permalink_for(self, work, license_pool, identifier): - url = self.url_for( - "permalink", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - _external=True, - ) - return url, OPDSFeed.ENTRY_TYPE - - def groups_url(self, lane, facets=None): - lane_identifier = self._lane_identifier(lane) - if facets: - kwargs = dict(list(facets.items())) - else: - kwargs = {} - - return self.url_for( - "acquisition_groups", - lane_identifier=lane_identifier, - library_short_name=self.library.short_name, - _external=True, - **kwargs, - ) - - def default_lane_url(self, facets=None): - return self.groups_url(None, facets=facets) - - def feed_url(self, lane, facets=None, pagination=None, default_route="feed"): - extra_kwargs = dict() - if self.library: - extra_kwargs["library_short_name"] = self.library.short_name - return super().feed_url(lane, facets, pagination, default_route, extra_kwargs) - - def search_url(self, lane, query, pagination, facets=None): - lane_identifier = self._lane_identifier(lane) - kwargs = dict(q=query) - if facets: - kwargs.update(dict(list(facets.items()))) - if pagination: - kwargs.update(dict(list(pagination.items()))) - return self.url_for( - "lane_search", - lane_identifier=lane_identifier, - library_short_name=self.library.short_name, - _external=True, - **kwargs, - ) - - def group_uri(self, work, license_pool, identifier): - if not work in self.lanes_by_work: - return None, "" - - lanes = self.lanes_by_work[work] - if not lanes: - # I don't think this should ever happen? - lane_name = None - url = self.url_for( - "acquisition_groups", - lane_identifier=None, - library_short_name=self.library.short_name, - _external=True, - ) - title = "All Books" - return url, title - - lane = lanes[0] - self.lanes_by_work[work] = lanes[1:] - lane_name = "" - show_feed = False - - if isinstance(lane, dict): - show_feed = lane.get("link_to_list_feed", show_feed) - title = lane.get("label", lane_name) - lane = lane["lane"] - - if isinstance(lane, str): - return lane, lane_name - - if hasattr(lane, "display_name") and not title: - title = lane.display_name - - if show_feed: - return self.feed_url(lane, self.facets), title - - return self.lane_url(lane, self.facets), title - - def lane_url(self, lane, facets=None): - # If the lane has sublanes, the URL identifying the group will - # take the user to another set of groups for the - # sublanes. Otherwise it will take the user to a list of the - # books in the lane by author. - - if lane and isinstance(lane, Lane) and lane.sublanes: - url = self.groups_url(lane, facets=facets) - elif lane and (isinstance(lane, Lane) or isinstance(lane, DynamicLane)): - url = self.feed_url(lane, facets) - else: - # This lane isn't part of our lane hierarchy. It's probably - # a WorkList created to represent the top-level. Use the top-level - # url for it. - url = self.default_lane_url(facets=facets) - return url - - def annotate_work_entry( - self, work, active_license_pool, edition, identifier, feed, entry - ): - super().annotate_work_entry( - work, active_license_pool, edition, identifier, feed, entry - ) - - # Add a link to each author tag. - self.add_author_links(work, feed, entry) - - # And a series, if there is one. - if work.series: - self.add_series_link(work, feed, entry) - - if NoveListAPI.is_configured(self.library): - # If NoveList Select is configured, there might be - # recommendations, too. - feed.add_link_to_entry( - entry, - rel="recommendations", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - title="Recommended Works", - href=self.url_for( - "recommendations", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - _external=True, - ), - ) - - # Add a link for related books if available. - if self.related_books_available(work, self.library): - feed.add_link_to_entry( - entry, - rel="related", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - title="Recommended Works", - href=self.url_for( - "related_books", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - _external=True, - ), - ) - - # Add a link to get a patron's annotations for this book. - if self.identifies_patrons: - feed.add_link_to_entry( - entry, - rel="http://www.w3.org/ns/oa#annotationService", - type=AnnotationWriter.CONTENT_TYPE, - href=self.url_for( - "annotations_for_work", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - _external=True, - ), - ) - - if Analytics.is_configured(self.library): - feed.add_link_to_entry( - entry, - rel="http://librarysimplified.org/terms/rel/analytics/open-book", - href=self.url_for( - "track_analytics_event", - identifier_type=identifier.type, - identifier=identifier.identifier, - event_type=CirculationEvent.OPEN_BOOK, - library_short_name=self.library.short_name, - _external=True, - ), - ) - - @classmethod - def related_books_available(cls, work, library): - """:return: bool asserting whether related books might exist for a particular Work""" - contributions = work.sort_author and work.sort_author != Edition.UNKNOWN_AUTHOR - - return contributions or work.series or NoveListAPI.is_configured(library) - - def language_and_audience_key_from_work(self, work): - language_key = work.language - - audiences = None - if work.audience == Classifier.AUDIENCE_CHILDREN: - audiences = [Classifier.AUDIENCE_CHILDREN] - elif work.audience == Classifier.AUDIENCE_YOUNG_ADULT: - audiences = Classifier.AUDIENCES_JUVENILE - elif work.audience == Classifier.AUDIENCE_ALL_AGES: - audiences = [Classifier.AUDIENCE_CHILDREN, Classifier.AUDIENCE_ALL_AGES] - elif work.audience in Classifier.AUDIENCES_ADULT: - audiences = list(Classifier.AUDIENCES_NO_RESEARCH) - elif work.audience == Classifier.AUDIENCE_RESEARCH: - audiences = list(Classifier.AUDIENCES) - else: - audiences = [] - - audience_key = None - if audiences: - audience_strings = [urllib.parse.quote_plus(a) for a in sorted(audiences)] - audience_key = ",".join(audience_strings) - - return language_key, audience_key - - def add_author_links(self, work, feed, entry): - """Find all the tags and add a link - to each one that points to the author's other works. - """ - author_tag = "{%s}author" % OPDSFeed.ATOM_NS - author_entries = entry.findall(author_tag) - - languages, audiences = self.language_and_audience_key_from_work(work) - for author_entry in author_entries: - name_tag = "{%s}name" % OPDSFeed.ATOM_NS - - # A database ID would be better than a name, but the - # tag was created as part of the work's cached - # OPDS entry, and as a rule we don't put database IDs into - # the cached OPDS entry. - # - # So we take the content of the tag, use it in - # the link, and -- only if the user decides to fetch this feed - # -- we do a little extra work to turn this name back into - # one or more contributors. - # - # TODO: If we reliably had VIAF IDs for our contributors, - # we could stick them in the tags and get the - # best of both worlds. - contributor_name = author_entry.find(name_tag).text - if not contributor_name: - continue - - feed.add_link_to_entry( - author_entry, - rel="contributor", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - title=contributor_name, - href=self.url_for( - "contributor", - contributor_name=contributor_name, - languages=languages, - audiences=audiences, - library_short_name=self.library.short_name, - _external=True, - ), - ) - - def add_series_link(self, work, feed, entry): - series_tag = OPDSFeed.schema_("Series") - series_entry = entry.find(series_tag) - - if series_entry is None: - # There is no tag, and thus nothing to annotate. - # This probably indicates an out-of-date OPDS entry. - work_id = work.id - work_title = work.title - self.log.error( - 'add_series_link() called on work %s ("%s"), which has no tag in its OPDS entry.', - work_id, - work_title, - ) - return - - series_name = work.series - languages, audiences = self.language_and_audience_key_from_work(work) - href = self.url_for( - "series", - series_name=series_name, - languages=languages, - audiences=audiences, - library_short_name=self.library.short_name, - _external=True, - ) - feed.add_link_to_entry( - series_entry, - rel="series", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - title=series_name, - href=href, - ) - - def annotate_feed(self, feed, lane): - if self.patron: - # A patron is authenticated. - self.add_patron(feed) - else: - # No patron is authenticated. Show them how to - # authenticate (or that authentication is not supported). - self.add_authentication_document_link(feed) - - # Add a 'search' link if the lane is searchable. - if lane and lane.search_target: - search_facet_kwargs = {} - if self.facets != None: - if self.facets.entrypoint_is_default: - # The currently selected entry point is a default. - # Rather than using it, we want the 'default' behavior - # for search, which is to search everything. - search_facets = self.facets.navigate( - entrypoint=EverythingEntryPoint - ) - else: - search_facets = self.facets - search_facet_kwargs.update(dict(list(search_facets.items()))) - - lane_identifier = self._lane_identifier(lane) - search_url = self.url_for( - "lane_search", - lane_identifier=lane_identifier, - library_short_name=self.library.short_name, - _external=True, - **search_facet_kwargs, - ) - search_link = dict( - rel="search", - type="application/opensearchdescription+xml", - href=search_url, - ) - feed.add_link_to_feed(feed.feed, **search_link) - - if self.identifies_patrons: - # Since this library authenticates patrons it can offer - # a bookshelf and an annotation service. - shelf_link = dict( - rel="http://opds-spec.org/shelf", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - href=self.url_for( - "active_loans", - library_short_name=self.library.short_name, - _external=True, - ), - ) - feed.add_link_to_feed(feed.feed, **shelf_link) - - annotations_link = dict( - rel="http://www.w3.org/ns/oa#annotationService", - type=AnnotationWriter.CONTENT_TYPE, - href=self.url_for( - "annotations", - library_short_name=self.library.short_name, - _external=True, - ), - ) - feed.add_link_to_feed(feed.feed, **annotations_link) - - if lane and lane.uses_customlists: - name = None - if hasattr(lane, "customlists") and len(lane.customlists) == 1: - name = lane.customlists[0].name - else: - _db = Session.object_session(self.library) - customlist = lane.get_customlists(_db) - if customlist: - name = customlist[0].name - - if name: - crawlable_url = self.url_for( - "crawlable_list_feed", - list_name=name, - library_short_name=self.library.short_name, - _external=True, - ) - crawlable_link = dict( - rel="http://opds-spec.org/crawlable", - type=OPDSFeed.ACQUISITION_FEED_TYPE, - href=crawlable_url, - ) - feed.add_link_to_feed(feed.feed, **crawlable_link) - - self.add_configuration_links(feed) - - def add_configuration_links(self, feed): - _db = Session.object_session(self.library) - - def _add_link(l): - if isinstance(feed, OPDSFeed): - feed.add_link_to_feed(feed.feed, **l) - else: - # This is an ElementTree object. - link = OPDSFeed.link(**l) - feed.append(link) - - library = self.library - if library.settings.terms_of_service: - _add_link( - dict( - rel="terms-of-service", - href=library.settings.terms_of_service, - type="text/html", - ) - ) - - if library.settings.privacy_policy: - _add_link( - dict( - rel="privacy-policy", - href=library.settings.privacy_policy, - type="text/html", - ) - ) - - if library.settings.copyright: - _add_link( - dict( - rel="copyright", - href=library.settings.copyright, - type="text/html", - ) - ) - - if library.settings.about: - _add_link( - dict( - rel="about", - href=library.settings.about, - type="text/html", - ) - ) - - if library.settings.license: - _add_link( - dict( - rel="license", - href=library.settings.license, - type="text/html", - ) - ) - - navigation_urls = self.library.settings.web_header_links - navigation_labels = self.library.settings.web_header_labels - for url, label in zip(navigation_urls, navigation_labels): - d = dict( - href=url, - title=label, - type="text/html", - rel="related", - role="navigation", - ) - _add_link(d) - - for type, value in Configuration.help_uris(self.library): - d = dict(href=value, rel="help") - if type: - d["type"] = type - _add_link(d) - - def acquisition_links( - self, - active_license_pool, - active_loan, - active_hold, - active_fulfillment, - feed, - identifier, - direct_fulfillment_delivery_mechanisms=None, - mock_api=None, - ): - """Generate one or more tags that can be used to borrow, - reserve, or fulfill a book, depending on the state of the book - and the current patron. - - :param active_license_pool: The LicensePool for which we're trying to - generate tags. - :param active_loan: A Loan object representing the current patron's - existing loan for this title, if any. - :param active_hold: A Hold object representing the current patron's - existing hold on this title, if any. - :param active_fulfillment: A LicensePoolDeliveryMechanism object - representing the mechanism, if any, which the patron has chosen - to fulfill this work. - :param feed: The OPDSFeed that will eventually contain these - tags. - :param identifier: The Identifier of the title for which we're - trying to generate tags. - :param direct_fulfillment_delivery_mechanisms: A list of - LicensePoolDeliveryMechanisms for the given LicensePool - that should have fulfillment-type tags generated for - them, even if this method wouldn't normally think that - makes sense. - :param mock_api: A mock object to stand in for the API to the - vendor who provided this LicensePool. If this is not provided, a - live API for that vendor will be used. - """ - direct_fulfillment_delivery_mechanisms = ( - direct_fulfillment_delivery_mechanisms or [] - ) - api = mock_api - if not api and self.circulation and active_license_pool: - api = self.circulation.api_for_license_pool(active_license_pool) - if api: - set_mechanism_at_borrow = ( - api.SET_DELIVERY_MECHANISM_AT == BaseCirculationAPI.BORROW_STEP - ) - if active_license_pool and not self.identifies_patrons and not active_loan: - for lpdm in active_license_pool.delivery_mechanisms: - if api.can_fulfill_without_loan(None, active_license_pool, lpdm): - # This title can be fulfilled without an - # active loan, so we're going to add an acquisition - # link that goes directly to the fulfillment step - # without the 'borrow' step. - direct_fulfillment_delivery_mechanisms.append(lpdm) - else: - # This is most likely an open-access book. Just put one - # borrow link and figure out the rest later. - set_mechanism_at_borrow = False - - return super().acquisition_links( - active_license_pool, - active_loan, - active_hold, - active_fulfillment, - feed, - identifier, - can_hold=self.library.settings.allow_holds, - can_revoke_hold=( - active_hold - and ( - not self.circulation - or self.circulation.can_revoke_hold( - active_license_pool, active_hold - ) - ) - ), - set_mechanism_at_borrow=set_mechanism_at_borrow, - direct_fulfillment_delivery_mechanisms=direct_fulfillment_delivery_mechanisms, - add_open_access_links=(not self.identifies_patrons), - ) - - def revoke_link(self, active_license_pool, active_loan, active_hold): - if not self.identifies_patrons: - return - url = self.url_for( - "revoke_loan_or_hold", - license_pool_id=active_license_pool.id, - library_short_name=self.library.short_name, - _external=True, - ) - kw = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) - revoke_link_tag = OPDSFeed.makeelement("link", **kw) - return revoke_link_tag - - def borrow_link( - self, - active_license_pool, - borrow_mechanism, - fulfillment_mechanisms, - active_hold=None, - ): - if not self.identifies_patrons: - return - identifier = active_license_pool.identifier - if borrow_mechanism: - # Following this link will both borrow the book and set - # its delivery mechanism. - mechanism_id = borrow_mechanism.delivery_mechanism.id - else: - # Following this link will borrow the book but not set - # its delivery mechanism. - mechanism_id = None - borrow_url = self.url_for( - "borrow", - identifier_type=identifier.type, - identifier=identifier.identifier, - mechanism_id=mechanism_id, - library_short_name=self.library.short_name, - _external=True, - ) - rel = OPDSFeed.BORROW_REL - borrow_link = AcquisitionFeed.link( - rel=rel, href=borrow_url, type=OPDSFeed.ENTRY_TYPE - ) - - indirect_acquisitions = [] - for lpdm in fulfillment_mechanisms: - # We have information about one or more delivery - # mechanisms that will be available at the point of - # fulfillment. To the extent possible, put information - # about these mechanisms into the tag as - # tags. - - # These are the formats mentioned in the indirect - # acquisition. - format_types = AcquisitionFeed.format_types(lpdm.delivery_mechanism) - - # If we can borrow this book, add this delivery mechanism - # to the borrow link as an . - if format_types: - indirect_acquisition = AcquisitionFeed.indirect_acquisition( - format_types - ) - indirect_acquisitions.append(indirect_acquisition) - - if not indirect_acquisitions: - # If there's no way to actually get the book, cancel the creation - # of an OPDS entry altogether. - raise UnfulfillableWork() - - borrow_link.extend(indirect_acquisitions) - return borrow_link - - def fulfill_link( - self, - license_pool, - active_loan, - delivery_mechanism, - rel=OPDSFeed.ACQUISITION_REL, - ): - """Create a new fulfillment link. - - This link may include tags from the OPDS Extensions for DRM. - """ - if not self.identifies_patrons and rel != OPDSFeed.OPEN_ACCESS_REL: - return - if isinstance(delivery_mechanism, LicensePoolDeliveryMechanism): - logging.warning( - "LicensePoolDeliveryMechanism passed into fulfill_link instead of DeliveryMechanism!" - ) - delivery_mechanism = delivery_mechanism.delivery_mechanism - format_types = AcquisitionFeed.format_types(delivery_mechanism) - if not format_types: - return None - - fulfill_url = self.url_for( - "fulfill", - license_pool_id=license_pool.id, - mechanism_id=delivery_mechanism.id, - library_short_name=self.library.short_name, - _external=True, - ) - - link_tag = AcquisitionFeed.acquisition_link( - rel=rel, href=fulfill_url, types=format_types, active_loan=active_loan - ) - - children = AcquisitionFeed.license_tags(license_pool, active_loan, None) - link_tag.extend(children) - - drm_tags = self.drm_extension_tags( - license_pool, active_loan, delivery_mechanism - ) - link_tag.extend(drm_tags) - return link_tag - - def open_access_link(self, pool, lpdm): - link_tag = super().open_access_link(pool, lpdm) - fulfill_url = self.url_for( - "fulfill", - license_pool_id=pool.id, - mechanism_id=lpdm.delivery_mechanism.id, - library_short_name=self.library.short_name, - _external=True, - ) - link_tag.attrib.update(dict(href=fulfill_url)) - return link_tag - - def drm_extension_tags(self, license_pool, active_loan, delivery_mechanism): - """Construct OPDS Extensions for DRM tags that explain how to - register a device with the DRM server that manages this loan. - :param delivery_mechanism: A DeliveryMechanism - """ - if not active_loan or not delivery_mechanism or not self.identifies_patrons: - return [] - - if delivery_mechanism.drm_scheme == DeliveryMechanism.ADOBE_DRM: - # Get an identifier for the patron that will be registered - # with the DRM server. - patron = active_loan.patron - - # Generate a tag that can feed into the - # Vendor ID service. - return self.adobe_id_tags(patron) - - if delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM: - # Generate a tag that can be used for the loan - # in the mobile apps. - - return self.lcp_key_retrieval_tags(active_loan) - - return [] - - def adobe_id_tags(self, patron_identifier): - """Construct tags using the DRM Extensions for OPDS standard that - explain how to get an Adobe ID for this patron, and how to - manage their list of device IDs. - :param delivery_mechanism: A DeliveryMechanism - :return: If Adobe Vendor ID delegation is configured, a list - containing a tag. If not, an empty list. - """ - # CirculationManagerAnnotators are created per request. - # Within the context of a single request, we can cache the - # tags that explain how the patron can get an Adobe ID, and - # reuse them across tags. This saves a little time, - # makes tests more reliable, and stops us from providing a - # different Short Client Token for every tag. - if isinstance(patron_identifier, Patron): - cache_key = patron_identifier.id - else: - cache_key = patron_identifier - cached = self._adobe_id_tags.get(cache_key) - if cached is None: - cached = [] - authdata = None - try: - authdata = AuthdataUtility.from_config(self.library) - except CannotLoadConfiguration as e: - logging.error( - "Cannot load Short Client Token configuration; outgoing OPDS entries will not have DRM autodiscovery support", - exc_info=e, - ) - return [] - if authdata: - vendor_id, token = authdata.short_client_token_for_patron( - patron_identifier - ) - drm_licensor = OPDSFeed.makeelement("{%s}licensor" % OPDSFeed.DRM_NS) - vendor_attr = "{%s}vendor" % OPDSFeed.DRM_NS - drm_licensor.attrib[vendor_attr] = vendor_id - patron_key = OPDSFeed.makeelement("{%s}clientToken" % OPDSFeed.DRM_NS) - patron_key.text = token - drm_licensor.append(patron_key) - cached = [drm_licensor] - - self._adobe_id_tags[cache_key] = cached - else: - cached = copy.deepcopy(cached) - return cached - - def lcp_key_retrieval_tags(self, active_loan): - # In the case of LCP we have to include a patron's hashed passphrase - # inside the acquisition link so client applications can use it to open the LCP license - # without having to ask the user to enter their password - # https://readium.org/lcp-specs/notes/lcp-key-retrieval.html#including-a-hashed-passphrase-in-an-opds-1-catalog - - db = Session.object_session(active_loan) - lcp_credential_factory = LCPCredentialFactory() - - response = [] - - try: - hashed_passphrase: LCPHashedPassphrase = ( - lcp_credential_factory.get_hashed_passphrase(db, active_loan.patron) - ) - hashed_passphrase_element = OPDSFeed.makeelement( - "{%s}hashed_passphrase" % OPDSFeed.LCP_NS - ) - hashed_passphrase_element.text = hashed_passphrase.hashed - response.append(hashed_passphrase_element) - except LCPError: - # The patron's passphrase wasn't generated yet and not present in the database. - pass - - return response - - def add_patron(self, feed_obj): - if not self.identifies_patrons: - return - patron_details = {} - if self.patron.username: - patron_details[ - "{%s}username" % OPDSFeed.SIMPLIFIED_NS - ] = self.patron.username - if self.patron.authorization_identifier: - patron_details[ - "{%s}authorizationIdentifier" % OPDSFeed.SIMPLIFIED_NS - ] = self.patron.authorization_identifier - - patron_tag = OPDSFeed.makeelement( - "{%s}patron" % OPDSFeed.SIMPLIFIED_NS, patron_details - ) - feed_obj.feed.append(patron_tag) - - def add_authentication_document_link(self, feed_obj): - """Create a tag that points to the circulation - manager's Authentication for OPDS document - for the current library. - """ - # Even if self.identifies_patrons is false, we include this link, - # because this document is the one that explains there is no - # patron authentication at this library. - feed_obj.add_link_to_feed( - feed_obj.feed, - rel="http://opds-spec.org/auth/document", - href=self.url_for( - "authentication_document", - library_short_name=self.library.short_name, - _external=True, - ), - ) - - def active_licensepool_for(self, work): - """Get an active licensepool, always within the scope of the library""" - return super().active_licensepool_for(work, library=self.library) - - -class LibraryLoanAndHoldAnnotator(LibraryAnnotator): - @classmethod - def active_loans_for(cls, circulation, patron, test_mode=False, **response_kwargs): - db = Session.object_session(patron) - active_loans_by_work = {} - for loan in patron.loans: - work = loan.work - if work: - active_loans_by_work[work] = loan - - # There might be multiple holds for the same work so we gather all of them and choose the best one. - all_holds_by_work = {} - for hold in patron.holds: - work = hold.work - if not work: - continue - - if work not in all_holds_by_work: - all_holds_by_work[work] = [] - - all_holds_by_work[work].append(hold) - - active_holds_by_work = {} - for work, list_of_holds in all_holds_by_work.items(): - active_holds_by_work[work] = cls.choose_best_hold_for_work(list_of_holds) - - annotator = cls( - circulation, - None, - patron.library, - patron, - active_loans_by_work, - active_holds_by_work, - test_mode=test_mode, - ) - url = annotator.url_for( - "active_loans", library_short_name=patron.library.short_name, _external=True - ) - works = patron.works_on_loan_or_on_hold() - - feed_obj = AcquisitionFeed(db, "Active loans and holds", url, works, annotator) - annotator.annotate_feed(feed_obj, None) - response = feed_obj.as_response(max_age=0, private=True) - last_modified = patron.last_loan_activity_sync - if last_modified: - response.last_modified = last_modified - return response - - @staticmethod - def choose_best_hold_for_work(list_of_holds): - # We don't want holds that are connected to license pools without any licenses owned. Also, we want hold that - # would result in the least wait time for the patron. - - best = list_of_holds[0] - - for hold in list_of_holds: - # We don't want holds with LPs with 0 licenses owned. - if hold.license_pool.licenses_owned == 0: - continue - - # Our current hold's LP owns some licenses but maybe the best one wasn't changed yet. - if best.license_pool.licenses_owned == 0: - best = hold - continue - - # Since these numbers are updated by different processes there might be situation where we don't have - # all data filled out. - hold_position = ( - hold.position or hold.license_pool.patrons_in_hold_queue or 0 - ) - best_position = ( - best.position or best.license_pool.patrons_in_hold_queue or 0 - ) - - # Both the best hold and current hold own some licenses, try to figure out which one is better. - if ( - hold_position / hold.license_pool.licenses_owned - < best_position / best.license_pool.licenses_owned - ): - best = hold - - return best - - @classmethod - def single_item_feed( - cls, - circulation: Any, - item: LicensePool | Loan, - fulfillment: FulfillmentInfo | None = None, - test_mode=False, - feed_class=AcquisitionFeed, - **response_kwargs, - ) -> OPDSEntryResponse | ProblemDetail: - """Construct a response containing a single OPDS entry representing an active loan - or hold. - - :param circulation: A CirculationAPI - :param item: A Loan, Hold, or LicensePool if first two are missing. - :param fulfillment: A FulfillmentInfo representing the format in which an active loan - should be fulfilled. - :param test_mode: Passed along to the constructor for this annotator class. - :param feed_class: A drop-in replacement for AcquisitionFeed, for use in tests. - :param response_kwargs: Extra keyword arguments to be passed into the OPDSEntryResponse - constructor. - - :return: An OPDSEntryResponse or ProblemDetail - """ - if not item: - raise ValueError("Argument 'item' must be non-empty") - - if isinstance(item, LicensePool): - license_pool = item - library = circulation.library - elif isinstance(item, (Loan, Hold)): - license_pool = item.license_pool - library = item.library - else: - raise ValueError( - "Argument 'item' must be an instance of {}, {}, or {} classes".format( - Loan, Hold, LicensePool - ) - ) - - log = logging.getLogger(cls.__name__) - - # Sometimes the pool or work may be None - # In those cases we have to protect against the exceptions - try: - work = license_pool.work or license_pool.presentation_edition.work - except AttributeError as ex: - log.error(f"Error retrieving a Work Object {ex}") - log.error( - f"Error Data: {license_pool} | {license_pool and license_pool.presentation_edition}" - ) - return NOT_FOUND_ON_REMOTE - - if not work: - return NOT_FOUND_ON_REMOTE - - _db = Session.object_session(item) - active_loans_by_work: Any = {} - active_holds_by_work: Any = {} - active_fulfillments_by_work = {} - item_dictionary = None - - if isinstance(item, Loan): - item_dictionary = active_loans_by_work - elif isinstance(item, Hold): - item_dictionary = active_holds_by_work - - if item_dictionary is not None: - item_dictionary[work] = item - - if fulfillment: - active_fulfillments_by_work[work] = fulfillment - - annotator = cls( - circulation, - None, - library, - active_loans_by_work=active_loans_by_work, - active_holds_by_work=active_holds_by_work, - active_fulfillments_by_work=active_fulfillments_by_work, - test_mode=test_mode, - ) - identifier = license_pool.identifier - url = annotator.url_for( - "loan_or_hold_detail", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=library.short_name, - _external=True, - ) - return annotator._single_entry_response( - _db, work, annotator, url, feed_class, **response_kwargs - ) - - def drm_device_registration_feed_tags(self, patron): - """Return tags that provide information on DRM device deregistration - independent of any particular loan. These tags will go under - the tag. - - This allows us to deregister an Adobe ID, in preparation for - logout, even if there is no active loan that requires one. - """ - tags = copy.deepcopy(self.adobe_id_tags(patron)) - attr = "{%s}scheme" % OPDSFeed.DRM_NS - for tag in tags: - tag.attrib[attr] = "http://librarysimplified.org/terms/drm/scheme/ACS" - return tags - - @property - def user_profile_management_protocol_link(self): - """Create a tag that points to the circulation - manager's User Profile Management Protocol endpoint - for the current patron. - """ - link = OPDSFeed.makeelement("link") - link.attrib["rel"] = "http://librarysimplified.org/terms/rel/user-profile" - link.attrib["href"] = self.url_for( - "patron_profile", library_short_name=self.library.short_name, _external=True - ) - return link - - def annotate_feed(self, feed, lane): - """Annotate the feed with top-level DRM device registration tags - and a link to the User Profile Management Protocol endpoint. - """ - super().annotate_feed(feed, lane) - if self.patron: - tags = self.drm_device_registration_feed_tags(self.patron) - tags.append(self.user_profile_management_protocol_link) - for tag in tags: - feed.feed.append(tag) - - def annotate_work_entry( - self, work, active_license_pool, edition, identifier, feed, entry - ): - super().annotate_work_entry( - work, active_license_pool, edition, identifier, feed, entry - ) - # Only OPDS for Distributors should get the time tracking link - # And only if there is an active loan for the work - if ( - edition.medium == EditionConstants.AUDIO_MEDIUM - and active_license_pool - and active_license_pool.collection.protocol - == ExternalIntegration.OPDS_FOR_DISTRIBUTORS - and work in self.active_loans_by_work - ): - feed.add_link_to_entry( - entry, - rel=LinkRelations.TIME_TRACKING, - type=MediaTypes.APPLICATION_JSON_MEDIA_TYPE, - href=self.url_for( - "track_playtime_events", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - collection_id=active_license_pool.collection.id, - _external=True, - ), - ) diff --git a/api/opds2.py b/api/opds2.py index f52f39efc1..123b53f1f6 100644 --- a/api/opds2.py +++ b/api/opds2.py @@ -2,18 +2,12 @@ from typing import TYPE_CHECKING -from flask import url_for from uritemplate import URITemplate from api.circulation import CirculationFulfillmentPostProcessor, FulfillmentInfo from api.circulation_exceptions import CannotFulfill -from core.lane import Facets from core.model import ConfigurationSetting, DataSource, ExternalIntegration -from core.model.edition import Edition -from core.model.identifier import Identifier from core.model.licensing import LicensePoolDeliveryMechanism -from core.model.resource import Hyperlink -from core.opds2 import OPDS2Annotator from core.problem_details import INVALID_CREDENTIALS from core.util.http import HTTP from core.util.log import LoggerMixin @@ -23,68 +17,6 @@ from core.model import LicensePool, Patron -class OPDS2PublicationsAnnotator(OPDS2Annotator): - """API level implementation for the publications feed OPDS2 annotator""" - - def loan_link(self, edition: Edition) -> dict: - identifier: Identifier = edition.primary_identifier - return { - "href": url_for( - "borrow", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - ), - "rel": Hyperlink.BORROW, - } - - def self_link(self, edition: Edition) -> dict: - identifier: Identifier = edition.primary_identifier - return { - "href": url_for( - "permalink", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=self.library.short_name, - ), - "rel": "self", - } - - @classmethod - def facet_url(cls, facets: Facets) -> str: - name = facets.library.short_name if facets.library else None - return url_for( - "opds2_publications", - _external=True, - library_short_name=name, - **dict(facets.items()), - ) - - -class OPDS2NavigationsAnnotator(OPDS2Annotator): - """API level implementation for the navigation feed OPDS2 annotator""" - - def navigation_collection(self) -> list[dict]: - """The OPDS2 navigation collection, currently only serves the publications link""" - return [ - { - "href": url_for( - "opds2_publications", library_short_name=self.library.short_name - ), - "title": "OPDS2 Publications Feed", - "type": self.OPDS2_TYPE, - } - ] - - def feed_metadata(self): - return {"title": self.title} - - def feed_links(self): - return [ - {"href": self.url, "rel": "self", "type": self.OPDS2_TYPE}, - ] - - class TokenAuthenticationFulfillmentProcessor( CirculationFulfillmentPostProcessor, LoggerMixin ): diff --git a/api/routes.py b/api/routes.py index 375df01cd8..9838031d43 100644 --- a/api/routes.py +++ b/api/routes.py @@ -307,24 +307,6 @@ def crawlable_collection_feed(collection_name): return app.manager.opds_feeds.crawlable_collection_feed(collection_name) -@library_route("/opds2/publications") -@has_library -@allows_patron_web -@returns_problem_detail -@compressible -def opds2_publications(): - return app.manager.opds2_feeds.publications() - - -@library_route("/opds2/navigation") -@has_library -@allows_patron_web -@returns_problem_detail -@compressible -def opds2_navigation(): - return app.manager.opds2_feeds.navigation() - - @library_route("/marc") @has_library @returns_problem_detail diff --git a/bin/opds_entry_coverage b/bin/opds_entry_coverage deleted file mode 100755 index 6126b34900..0000000000 --- a/bin/opds_entry_coverage +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -"""Make sure all presentation-ready works have up-to-date OPDS entries.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) -from core.coverage import OPDSEntryWorkCoverageProvider -from core.scripts import RunWorkCoverageProviderScript - -RunWorkCoverageProviderScript(OPDSEntryWorkCoverageProvider).run() diff --git a/bin/repair/opds_entries b/bin/repair/opds_entries deleted file mode 100755 index c095e9ad5c..0000000000 --- a/bin/repair/opds_entries +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -"""Ensure that all presentation-ready works have an up-to-date OPDS feed.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..", "..") -sys.path.append(os.path.abspath(package_dir)) -from core.monitor import OPDSEntryCacheMonitor -from core.scripts import RunMonitorScript - -RunMonitorScript(OPDSEntryCacheMonitor).run() diff --git a/core/bin/opds_entry_coverage b/core/bin/opds_entry_coverage deleted file mode 100755 index a57c961e6a..0000000000 --- a/core/bin/opds_entry_coverage +++ /dev/null @@ -1,8 +0,0 @@ -#!/usr/bin/env python -"""Make sure all presentation-ready works have up-to-date OPDS entries.""" -import startup # noqa: autoflake - -from core.coverage import OPDSEntryWorkCoverageProvider -from core.scripts import RunWorkCoverageProviderScript - -RunWorkCoverageProviderScript(OPDSEntryWorkCoverageProvider).run() diff --git a/core/coverage.py b/core/coverage.py index 38df3a7f3b..459e6dddf0 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1547,24 +1547,6 @@ class WorkPresentationProvider(PresentationReadyWorkCoverageProvider): DEFAULT_BATCH_SIZE = 100 -class OPDSEntryWorkCoverageProvider(WorkPresentationProvider): - """Make sure all presentation-ready works have an up-to-date OPDS - entry. - - This is different from the OPDSEntryCacheMonitor, which sweeps - over all presentation-ready works, even ones which are already - covered. - """ - - SERVICE_NAME = "OPDS Entry Work Coverage Provider" - OPERATION = WorkCoverageRecord.GENERATE_OPDS_OPERATION - DEFAULT_BATCH_SIZE = 1000 - - def process_item(self, work): - work.calculate_opds_entries() - return work - - class MARCRecordWorkCoverageProvider(WorkPresentationProvider): """Make sure all presentation-ready works have an up-to-date MARC record. @@ -1606,12 +1588,7 @@ class WorkPresentationEditionCoverageProvider(WorkPresentationProvider): # operation (COVER_OPERATION), but it's a little complicated because # that's not a WorkCoverageRecord operation. choose_cover=True, - # We do this even though it's redundant with - # OPDSEntryWorkCoverageProvider. If you change a - # Work's presentation edition but don't update its OPDS entry, - # it effectively didn't happen. - regenerate_opds_entries=True, - # Same logic for the search index. This will flag the Work as + # This will flag the Work as # needing a search index update, and SearchIndexCoverageProvider # will take care of it. update_search_index=True, diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py index 56ab4ed8f3..090fb14c55 100644 --- a/core/feed/acquisition.py +++ b/core/feed/acquisition.py @@ -26,7 +26,7 @@ LibraryAnnotator, ) from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator -from core.feed.opds import BaseOPDSFeed +from core.feed.opds import BaseOPDSFeed, UnfulfillableWork from core.feed.types import FeedData, Link, WorkEntry from core.feed.util import strftime from core.lane import Facets, FacetsWithEntryPoint, Lane, Pagination, SearchFacets @@ -36,7 +36,6 @@ from core.model.licensing import LicensePool from core.model.patron import Hold, Loan, Patron from core.model.work import Work -from core.opds import UnfulfillableWork from core.problem_details import INVALID_INPUT from core.util.datetime_helpers import utc_now from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py index 995d597914..d04465f1c9 100644 --- a/core/feed/annotator/circulation.py +++ b/core/feed/annotator/circulation.py @@ -24,6 +24,7 @@ from core.entrypoint import EverythingEntryPoint from core.external_search import WorkSearchResult from core.feed.annotator.base import Annotator +from core.feed.opds import UnfulfillableWork from core.feed.types import ( Acquisition, FeedData, @@ -50,7 +51,6 @@ ) from core.model.patron import Hold, Loan, Patron from core.model.work import Work -from core.opds import UnfulfillableWork from core.util.datetime_helpers import from_timestamp from core.util.opds_writer import OPDSFeed diff --git a/core/feed/navigation.py b/core/feed/navigation.py index 70abc7e218..ae2111a9e8 100644 --- a/core/feed/navigation.py +++ b/core/feed/navigation.py @@ -7,11 +7,10 @@ from werkzeug.datastructures import MIMEAccept from core.feed.annotator.circulation import CirculationManagerAnnotator -from core.feed.opds import BaseOPDSFeed +from core.feed.opds import BaseOPDSFeed, NavigationFacets from core.feed.types import DataEntry, DataEntryTypes, Link from core.feed.util import strftime from core.lane import Facets, Pagination, WorkList -from core.opds import NavigationFacets from core.util.datetime_helpers import utc_now from core.util.flask_util import OPDSFeedResponse from core.util.opds_writer import OPDSFeed diff --git a/core/feed/opds.py b/core/feed/opds.py index e8bad3c3d1..3943bce58a 100644 --- a/core/feed/opds.py +++ b/core/feed/opds.py @@ -10,6 +10,8 @@ from core.feed.serializer.opds import OPDS1Serializer from core.feed.serializer.opds2 import OPDS2Serializer from core.feed.types import FeedData, WorkEntry +from core.lane import FeaturedFacets +from core.model.cachedfeed import CachedFeed from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse from core.util.opds_writer import OPDSMessage @@ -44,10 +46,6 @@ def __init__( self._feed = FeedData() self.log = logging.getLogger(self.__class__.__name__) - def serialize(self, mime_types: Optional[MIMEAccept] = None) -> str: - serializer = get_serializer(mime_types) - return serializer.serialize_feed(self._feed) - def add_link(self, href: str, rel: Optional[str] = None, **kwargs: Any) -> None: self._feed.add_link(href, rel=rel, **kwargs) @@ -94,3 +92,18 @@ def entry_as_response( # Only OPDS2 has the same content type for feed and entry response.content_type = serializer.content_type() return response + + +class UnfulfillableWork(Exception): + """Raise this exception when it turns out a Work currently cannot be + fulfilled through any means, *and* this is a problem sufficient to + cancel the creation of an for the Work. + + For commercial works, this might be because the collection + contains no licenses. For open-access works, it might be because + none of the delivery mechanisms could be mirrored. + """ + + +class NavigationFacets(FeaturedFacets): + CACHED_FEED_TYPE = CachedFeed.NAVIGATION_TYPE diff --git a/core/model/__init__.py b/core/model/__init__.py index 3c1bf65684..8627923774 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -210,7 +210,6 @@ def __init__( choose_summary=True, calculate_quality=True, choose_cover=True, - regenerate_opds_entries=False, regenerate_marc_record=False, update_search_index=False, verbose=True, @@ -232,8 +231,6 @@ def __init__( quality of the Work? :param choose_cover: Should we reconsider which of the available cover images is the best? - :param regenerate_opds_entries: Should we recreate the OPDS entries - for this Work? :param regenerate_marc_record: Should we regenerate the MARC record for this Work? :param update_search_index: Should we reindex this Work's @@ -268,14 +265,9 @@ def __init__( self.calculate_quality = calculate_quality self.choose_cover = choose_cover - # We will regenerate OPDS entries if any of the metadata - # changes, but if regenerate_opds_entries is True we will - # _always_ do so. This is so we can regenerate _all_ the OPDS - # entries if the OPDS presentation algorithm changes. - # The same is true for the MARC records, except that they will + # Regenerate MARC records, except that they will # never be generated unless a MARC organization code is set # in a sitewide configuration setting. - self.regenerate_opds_entries = regenerate_opds_entries self.regenerate_marc_record = regenerate_marc_record # Similarly for update_search_index. @@ -293,7 +285,6 @@ def recalculate_everything(cls): everything, even when it doesn't seem necessary. """ return PresentationCalculationPolicy( - regenerate_opds_entries=True, regenerate_marc_record=True, update_search_index=True, ) diff --git a/core/model/collection.py b/core/model/collection.py index dec8041d3f..801f3095f7 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -15,14 +15,7 @@ exists, func, ) -from sqlalchemy.orm import ( - Mapped, - backref, - contains_eager, - joinedload, - mapper, - relationship, -) +from sqlalchemy.orm import Mapped, backref, joinedload, mapper, relationship from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ @@ -31,7 +24,7 @@ from core.model import Base, create, get_one, get_one_or_create from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.constants import EditionConstants -from core.model.coverage import CoverageRecord, WorkCoverageRecord +from core.model.coverage import CoverageRecord from core.model.datasource import DataSource from core.model.edition import Edition from core.model.hassessioncache import HasSessionCache @@ -753,51 +746,6 @@ def unresolved_catalog(self, _db, data_source_name, operation): return query - def licensepools_with_works_updated_since(self, _db, timestamp): - """Finds all LicensePools in a collection's catalog whose Works' OPDS - entries have been updated since the timestamp. Used by the - metadata wrangler. - - :param _db: A database connection, - :param timestamp: A datetime.timestamp object - - :return: a Query that yields LicensePools. The Work and - Identifier associated with each LicensePool have been - pre-loaded, giving the caller all the information - necessary to create full OPDS entries for the works. - """ - opds_operation = WorkCoverageRecord.GENERATE_OPDS_OPERATION - qu = ( - _db.query(LicensePool) - .join( - LicensePool.work, - ) - .join( - LicensePool.identifier, - ) - .join( - Work.coverage_records, - ) - .join( - CollectionIdentifier, - Identifier.id == CollectionIdentifier.identifier_id, - ) - ) - qu = qu.filter( - WorkCoverageRecord.operation == opds_operation, - CollectionIdentifier.collection_id == self.id, - ) - qu = qu.options( - contains_eager(LicensePool.work), - contains_eager(LicensePool.identifier), - ) - - if timestamp: - qu = qu.filter(WorkCoverageRecord.timestamp > timestamp) - - qu = qu.order_by(WorkCoverageRecord.timestamp) - return qu - def isbns_updated_since(self, _db, timestamp): """Finds all ISBNs in a collection's catalog that have been updated since the timestamp but don't have a Work to show for it. Used in diff --git a/core/model/coverage.py b/core/model/coverage.py index ef36fe784e..e1204fe9ea 100644 --- a/core/model/coverage.py +++ b/core/model/coverage.py @@ -614,7 +614,6 @@ class WorkCoverageRecord(Base, BaseCoverageRecord): CLASSIFY_OPERATION = "classify" SUMMARY_OPERATION = "summary" QUALITY_OPERATION = "quality" - GENERATE_OPDS_OPERATION = "generate-opds" GENERATE_MARC_OPERATION = "generate-marc" UPDATE_SEARCH_INDEX_OPERATION = "update-search-index" diff --git a/core/model/identifier.py b/core/model/identifier.py index d922d381a4..35245e3733 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -1078,60 +1078,6 @@ def missing_coverage_from( return qu - def opds_entry(self): - """Create an OPDS entry using only resources directly - associated with this Identifier. - This makes it possible to create an OPDS entry even when there - is no Edition. - Currently the only things in this OPDS entry will be description, - cover image, and popularity. - NOTE: The timestamp doesn't take into consideration when the - description was added. Rather than fixing this it's probably - better to get rid of this hack and create real Works where we - would be using this method. - """ - id = self.urn - cover_image = None - description = None - most_recent_update = None - timestamps = [] - for link in self.links: - resource = link.resource - if link.rel == LinkRelations.IMAGE: - if not cover_image or ( - not cover_image.representation.thumbnails - and resource.representation.thumbnails - ): - cover_image = resource - if cover_image.representation: - # This is technically redundant because - # minimal_opds_entry will redo this work, - # but just to be safe. - mirrored_at = cover_image.representation.mirrored_at - if mirrored_at: - timestamps.append(mirrored_at) - elif link.rel == LinkRelations.DESCRIPTION: - if not description or resource.quality > description.quality: - description = resource - - if self.coverage_records: - timestamps.extend( - [c.timestamp for c in self.coverage_records if c.timestamp] - ) - if timestamps: - most_recent_update = max(timestamps) - - quality = Measurement.overall_quality(self.measurements) - from core.opds import AcquisitionFeed - - return AcquisitionFeed.minimal_opds_entry( - identifier=self, - cover=cover_image, - description=description, - quality=quality, - most_recent_update=most_recent_update, - ) - def __eq__(self, other): """Equality implementation for total_ordering.""" # We don't want an Identifier to be == an IdentifierData diff --git a/core/model/work.py b/core/model/work.py index f9db4ccd27..c226c90265 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -1039,9 +1039,6 @@ def calculate_presentation( # change it. self.last_update_time = utc_now() - if changed or policy.regenerate_opds_entries: - self.calculate_opds_entries() - if changed or policy.regenerate_marc_record: self.calculate_marc_record() @@ -1172,19 +1169,6 @@ def _ensure(s): l = [_ensure(s) for s in l] return "\n".join(l) - def calculate_opds_entries(self, verbose=True): - from core.opds import AcquisitionFeed, Annotator, VerboseAnnotator - - _db = Session.object_session(self) - simple = AcquisitionFeed.single_entry(_db, self, Annotator, force_create=True) - if verbose is True: - verbose = AcquisitionFeed.single_entry( - _db, self, VerboseAnnotator, force_create=True - ) - WorkCoverageRecord.add_for( - self, operation=WorkCoverageRecord.GENERATE_OPDS_OPERATION - ) - def calculate_marc_record(self): from core.marc import Annotator, MARCExporter diff --git a/core/monitor.py b/core/monitor.py index 79e64c7c63..00f9f41382 100644 --- a/core/monitor.py +++ b/core/monitor.py @@ -689,21 +689,6 @@ def scope_to_collection(self, qu: Query, collection: Collection) -> Query: # SweepMonitors that do something specific. -class OPDSEntryCacheMonitor(PresentationReadyWorkSweepMonitor): - """A Monitor that recalculates the OPDS entries for every - presentation-ready Work. - - This is different from the OPDSEntryWorkCoverageProvider, - which only processes works that are missing a WorkCoverageRecord - with the 'generate-opds' operation. - """ - - SERVICE_NAME = "ODPS Entry Cache Monitor" - - def process_item(self, work): - work.calculate_opds_entries() - - class PermanentWorkIDRefreshMonitor(EditionSweepMonitor): """A monitor that calculates or recalculates the permanent work ID for every edition. diff --git a/core/opds.py b/core/opds.py deleted file mode 100644 index 58d5543e5c..0000000000 --- a/core/opds.py +++ /dev/null @@ -1,2117 +0,0 @@ -from __future__ import annotations - -import datetime -import logging -from collections import defaultdict -from typing import TYPE_CHECKING, Optional -from urllib.parse import quote - -from lxml import etree -from sqlalchemy.orm import joinedload -from sqlalchemy.orm.session import Session - -from core.classifier import Classifier -from core.entrypoint import EntryPoint -from core.external_search import ExternalSearchIndex, QueryParseException -from core.facets import FacetConstants -from core.lane import ( - Facets, - FacetsWithEntryPoint, - FeaturedFacets, - Lane, - Pagination, - SearchFacets, -) -from core.model import ( - CachedFeed, - Contributor, - DataSource, - Edition, - Hyperlink, - Identifier, - Measurement, - PresentationCalculationPolicy, - Subject, - Work, -) -from core.problem_details import INVALID_INPUT -from core.util.datetime_helpers import utc_now -from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse -from core.util.opds_writer import AtomFeed, OPDSFeed, OPDSMessage - -# Import related models when doing type checking -if TYPE_CHECKING: - from core.model import Library, LicensePool # noqa: autoflake - - -class UnfulfillableWork(Exception): - """Raise this exception when it turns out a Work currently cannot be - fulfilled through any means, *and* this is a problem sufficient to - cancel the creation of an for the Work. - - For commercial works, this might be because the collection - contains no licenses. For open-access works, it might be because - none of the delivery mechanisms could be mirrored. - """ - - -class Annotator: - """The Annotator knows how to present an OPDS feed in a specific - application context. - """ - - opds_cache_field = Work.simple_opds_entry.name - - def is_work_entry_solo(self, work: Work) -> bool: - """Return a boolean value indicating whether the work's OPDS catalog entry is served by itself, - rather than as a part of the feed. - - :param work: Work object - :return: Boolean value indicating whether the work's OPDS catalog entry is served by itself, - rather than as a part of the feed - """ - return False - - def annotate_work_entry( - self, work, active_license_pool, edition, identifier, feed, entry, updated=None - ): - """Make any custom modifications necessary to integrate this - OPDS entry into the application's workflow. - - :work: The Work whose OPDS entry is being annotated. - :active_license_pool: Of all the LicensePools associated with this - Work, the client has expressed interest in this one. - :edition: The Edition to use when associating bibliographic - metadata with this entry. You will probably not need to use - this, because bibliographic metadata was associated with - the entry when it was created. - :identifier: Of all the Identifiers associated with this - Work, the client has expressed interest in this one. - :param feed: An OPDSFeed -- the feed in which this entry will be - situated. - :param entry: An lxml Element object, the entry that will be added - to the feed. - """ - - # First, try really hard to find an Identifier that we can - # use to make the tag. - if not identifier: - if active_license_pool: - identifier = active_license_pool.identifier - elif edition: - identifier = edition.primary_identifier - - if identifier: - entry.append(AtomFeed.id(identifier.urn)) - - # Add a permalink if one is available. - permalink_uri, permalink_type = self.permalink_for( - work, active_license_pool, identifier - ) - if permalink_uri: - OPDSFeed.add_link_to_entry( - entry, rel="alternate", href=permalink_uri, type=permalink_type - ) - - if self.is_work_entry_solo(work): - OPDSFeed.add_link_to_entry( - entry, rel="self", href=permalink_uri, type=permalink_type - ) - - if active_license_pool: - data_source = active_license_pool.data_source.name - if data_source != DataSource.INTERNAL_PROCESSING: - # INTERNAL_PROCESSING indicates a dummy LicensePool - # created as a stand-in, e.g. by the metadata wrangler. - # This component is not actually distributing the book, - # so it should not have a bibframe:distribution tag. - provider_name_attr = "{%s}ProviderName" % AtomFeed.BIBFRAME_NS - kwargs = {provider_name_attr: data_source} - data_source_tag = AtomFeed.makeelement( - "{%s}distribution" % AtomFeed.BIBFRAME_NS, **kwargs - ) - entry.extend([data_source_tag]) - - # We use Atom 'published' for the date the book first became - # available to people using this application. - avail = active_license_pool.availability_time - if avail: - now = utc_now() - today = datetime.date.today() - if isinstance(avail, datetime.datetime): - avail = avail.date() - if avail <= today: # Avoid obviously wrong values. - availability_tag = AtomFeed.makeelement("published") - # TODO: convert to local timezone. - availability_tag.text = AtomFeed._strftime(avail) - entry.extend([availability_tag]) - - # If this OPDS entry is being used as part of a grouped feed - # (which is up to the Annotator subclass), we need to add a - # group link. - group_uri, group_title = self.group_uri(work, active_license_pool, identifier) - if group_uri: - OPDSFeed.add_link_to_entry( - entry, rel=OPDSFeed.GROUP_REL, href=group_uri, title=str(group_title) - ) - - if not updated and work.last_update_time: - # NOTE: This is a default that works in most cases. When - # ordering Opensearch results by last update time, - # `work` is a WorkSearchResult object containing a more - # reliable value that you can use if you want. - updated = work.last_update_time - if updated: - entry.extend([AtomFeed.updated(AtomFeed._strftime(updated))]) - - @classmethod - def annotate_feed(cls, feed, lane, list=None): - """Make any custom modifications necessary to integrate this - OPDS feed into the application's workflow. - """ - - @classmethod - def group_uri(cls, work, license_pool, identifier): - """The URI to be associated with this Work when making it part of - a grouped feed. - - By default, this does nothing. See circulation/LibraryAnnotator - for a subclass that does something. - - :return: A 2-tuple (URI, title) - """ - return None, "" - - @classmethod - def rating_tag(cls, type_uri, value): - """Generate a schema:Rating tag for the given type and value.""" - rating_tag = AtomFeed.makeelement(AtomFeed.schema_("Rating")) - value_key = AtomFeed.schema_("ratingValue") - rating_tag.set(value_key, "%.4f" % value) - if type_uri: - type_key = AtomFeed.schema_("additionalType") - rating_tag.set(type_key, type_uri) - return rating_tag - - @classmethod - def samples(cls, edition: Edition) -> list[Hyperlink]: - if not edition: - return [] - _db = Session.object_session(edition) - links = ( - _db.query(Hyperlink) - .filter( - Hyperlink.rel == Hyperlink.SAMPLE, - Hyperlink.identifier_id == edition.primary_identifier_id, - ) - .options(joinedload(Hyperlink.resource)) - .all() - ) - return links - - @classmethod - def cover_links(cls, work): - """Return all links to be used as cover links for this work. - - In a distribution application, each work will have only one - link. In a content server-type application, each work may have - a large number of links. - - :return: A 2-tuple (thumbnail_links, full_links) - """ - thumbnails = [] - full = [] - if work: - _db = Session.object_session(work) - if work.cover_thumbnail_url: - thumbnails = [work.cover_thumbnail_url] - - if work.cover_full_url: - full = [work.cover_full_url] - return thumbnails, full - - @classmethod - def categories(cls, work): - """Return all relevant classifications of this work. - - :return: A dictionary mapping 'scheme' URLs to dictionaries of - attribute-value pairs. - - Notable attributes: 'term', 'label', 'http://schema.org/ratingValue' - """ - if not work: - return {} - - categories = {} - - fiction_term = None - if work.fiction == True: - fiction_term = "Fiction" - elif work.fiction == False: - fiction_term = "Nonfiction" - if fiction_term: - fiction_scheme = Subject.SIMPLIFIED_FICTION_STATUS - categories[fiction_scheme] = [ - dict(term=fiction_scheme + fiction_term, label=fiction_term) - ] - - simplified_genres = [] - for wg in work.work_genres: - simplified_genres.append(wg.genre.name) - - if simplified_genres: - categories[Subject.SIMPLIFIED_GENRE] = [ - dict(term=Subject.SIMPLIFIED_GENRE + quote(x), label=x) - for x in simplified_genres - ] - - # Add the appeals as a category of schema - # http://librarysimplified.org/terms/appeal - schema_url = AtomFeed.SIMPLIFIED_NS + "appeals/" - appeals = [] - categories[schema_url] = appeals - for name, value in ( - (Work.CHARACTER_APPEAL, work.appeal_character), - (Work.LANGUAGE_APPEAL, work.appeal_language), - (Work.SETTING_APPEAL, work.appeal_setting), - (Work.STORY_APPEAL, work.appeal_story), - ): - if value: - appeal = dict(term=schema_url + name, label=name) - weight_field = AtomFeed.schema_("ratingValue") - appeal[weight_field] = value - appeals.append(appeal) - - # Add the audience as a category of schema - # http://schema.org/audience - if work.audience: - audience_uri = AtomFeed.SCHEMA_NS + "audience" - categories[audience_uri] = [dict(term=work.audience, label=work.audience)] - - # Any book can have a target age, but the target age - # is only relevant for childrens' and YA books. - audiences_with_target_age = ( - Classifier.AUDIENCE_CHILDREN, - Classifier.AUDIENCE_YOUNG_ADULT, - ) - if work.target_age and work.audience in audiences_with_target_age: - uri = Subject.uri_lookup[Subject.AGE_RANGE] - target_age = work.target_age_string - if target_age: - categories[uri] = [dict(term=target_age, label=target_age)] - - return categories - - @classmethod - def authors(cls, work, edition): - """Create one or more and tags for the given - Work. - - :param work: The Work under consideration. - :param edition: The Edition to use as a reference - for bibliographic information, including the list of - Contributions. - """ - authors = list() - state = defaultdict(set) - for contribution in edition.contributions: - tag = cls.contributor_tag(contribution, state) - if tag is None: - # contributor_tag decided that this contribution doesn't - # need a tag. - continue - authors.append(tag) - - if authors: - return authors - - # We have no author information, so we add empty tag - # to avoid the implication (per RFC 4287 4.2.1) that this book - # was written by whoever wrote the OPDS feed. - return [AtomFeed.author(AtomFeed.name(""))] - - @classmethod - def contributor_tag(cls, contribution, state): - """Build an or tag for a Contribution. - - :param contribution: A Contribution. - :param state: A defaultdict of sets, which may be used to keep - track of what happened during previous calls to - contributor_tag for a given Work. - :return: A Tag, or None if creating a Tag for this Contribution - would be redundant or of low value. - - """ - contributor = contribution.contributor - role = contribution.role - - if role in Contributor.AUTHOR_ROLES: - tag_f = AtomFeed.author - marc_role = None - else: - tag_f = AtomFeed.contributor - marc_role = Contributor.MARC_ROLE_CODES.get(role) - if not marc_role: - # This contribution is not one that we publish as - # a tag. Skip it. - return None - - name = contributor.display_name or contributor.sort_name - name_key = name.lower() - if name_key in state[marc_role]: - # We've already credited this person with this - # MARC role. Returning a tag would be redundant. - return None - - # Okay, we're creating a tag. - properties = dict() - if marc_role: - properties["{%s}role" % AtomFeed.OPF_NS] = marc_role - tag = tag_f(AtomFeed.name(name), **properties) - - # Record the fact that we credited this person with this role, - # so that we don't do it again on a subsequent call. - state[marc_role].add(name_key) - - return tag - - @classmethod - def series(cls, series_name, series_position): - """Generate a schema:Series tag for the given name and position.""" - if not series_name: - return None - series_details = dict() - series_details["name"] = series_name - if series_position != None: - series_details[AtomFeed.schema_("position")] = str(series_position) - series_tag = AtomFeed.makeelement(AtomFeed.schema_("Series"), **series_details) - return series_tag - - @classmethod - def content(cls, work): - """Return an HTML summary of this work.""" - summary = "" - if work: - if work.summary_text != None: - summary = work.summary_text - elif work.summary and work.summary.content: - work.summary_text = work.summary.content - summary = work.summary_text - return summary - - @classmethod - def lane_id(cls, lane): - return cls.featured_feed_url(lane) - - @classmethod - def work_id(cls, work): - return work.presentation_edition.primary_identifier.urn - - @classmethod - def permalink_for(cls, work, license_pool, identifier): - """Generate a permanent link a client can follow for information about - this entry, and only this entry. - - Note that permalink is distinct from the Atom , - which is always the identifier's URN. - - :return: A 2-tuple (URL, media type). If a single value is - returned, the media type will be presumed to be that of an - OPDS entry. - """ - # In the absence of any specific controllers, there is no - # permalink. This method must be defined in a subclass. - return None, None - - @classmethod - def lane_url(cls, lane, facets=None): - raise NotImplementedError() - - @classmethod - def feed_url(cls, lane, facets=None, pagination=None): - raise NotImplementedError() - - @classmethod - def groups_url(cls, lane, facets=None): - raise NotImplementedError() - - @classmethod - def search_url(cls, lane, query, pagination, facets=None): - raise NotImplementedError() - - @classmethod - def default_lane_url(cls): - raise NotImplementedError() - - @classmethod - def featured_feed_url(cls, lane, order=None, facets=None): - raise NotImplementedError() - - @classmethod - def facet_url(cls, facets, facet=None): - return None - - @classmethod - def navigation_url(cls, lane): - raise NotImplementedError() - - @classmethod - def active_licensepool_for( - cls, work: Work, library: Library | None = None - ) -> LicensePool | None: - """Which license pool would be/has been used to issue a license for - this work? - """ - if not work: - return None - - return work.active_license_pool(library=library) - - def sort_works_for_groups_feed(self, works, **kwargs): - return works - - -class VerboseAnnotator(Annotator): - """The default Annotator for machine-to-machine integration. - - This Annotator describes all categories and authors for the book - in great detail. - """ - - opds_cache_field = Work.verbose_opds_entry.name - - def annotate_work_entry( - self, work, active_license_pool, edition, identifier, feed, entry - ): - super().annotate_work_entry( - work, active_license_pool, edition, identifier, feed, entry - ) - self.add_ratings(work, entry) - - @classmethod - def add_ratings(cls, work, entry): - """Add a quality rating to the work.""" - for type_uri, value in [ - (Measurement.QUALITY, work.quality), - (None, work.rating), - (Measurement.POPULARITY, work.popularity), - ]: - if value: - entry.append(cls.rating_tag(type_uri, value)) - - @classmethod - def categories(cls, work, policy=None): - """Send out _all_ categories for the work. - - (So long as the category type has a URI associated with it in - Subject.uri_lookup.) - - :param policy: A PresentationCalculationPolicy to - use when deciding how deep to go when finding equivalent - identifiers for the work. - """ - policy = policy or PresentationCalculationPolicy( - equivalent_identifier_cutoff=100 - ) - _db = Session.object_session(work) - by_scheme_and_term = dict() - identifier_ids = work.all_identifier_ids(policy=policy) - classifications = Identifier.classifications_for_identifier_ids( - _db, identifier_ids - ) - for c in classifications: - subject = c.subject - if subject.type in Subject.uri_lookup: - scheme = Subject.uri_lookup[subject.type] - term = subject.identifier - weight_field = AtomFeed.schema_("ratingValue") - key = (scheme, term) - if not key in by_scheme_and_term: - value = dict(term=subject.identifier) - if subject.name: - value["label"] = subject.name - value[weight_field] = 0 - by_scheme_and_term[key] = value - by_scheme_and_term[key][weight_field] += c.weight - - # Collapse by_scheme_and_term to by_scheme - by_scheme = defaultdict(list) - for (scheme, term), value in list(by_scheme_and_term.items()): - by_scheme[scheme].append(value) - by_scheme.update(super().categories(work)) - return by_scheme - - @classmethod - def authors(cls, work, edition): - """Create a detailed tag for each author.""" - return [cls.detailed_author(author) for author in edition.author_contributors] - - @classmethod - def detailed_author(cls, contributor): - """Turn a Contributor into a detailed tag.""" - children = [] - children.append(AtomFeed.name(contributor.display_name or "")) - sort_name = AtomFeed.makeelement("{%s}sort_name" % AtomFeed.SIMPLIFIED_NS) - sort_name.text = contributor.sort_name - - children.append(sort_name) - - if contributor.family_name: - family_name = AtomFeed.makeelement(AtomFeed.schema_("family_name")) - family_name.text = contributor.family_name - children.append(family_name) - - if contributor.wikipedia_name: - wikipedia_name = AtomFeed.makeelement( - "{%s}wikipedia_name" % AtomFeed.SIMPLIFIED_NS - ) - wikipedia_name.text = contributor.wikipedia_name - children.append(wikipedia_name) - - if contributor.viaf: - viaf_tag = AtomFeed.makeelement(AtomFeed.schema_("sameas")) - viaf_tag.text = "http://viaf.org/viaf/%s" % contributor.viaf - children.append(viaf_tag) - - if contributor.lc: - lc_tag = AtomFeed.makeelement(AtomFeed.schema_("sameas")) - lc_tag.text = "http://id.loc.gov/authorities/names/%s" % contributor.lc - children.append(lc_tag) - - return AtomFeed.author(*children) - - -class AcquisitionFeed(OPDSFeed): - FACET_REL = "http://opds-spec.org/facet" - - @classmethod - def groups( - cls, - _db, - title, - url, - worklist, - annotator, - pagination=None, - facets=None, - max_age=None, - search_engine=None, - search_debug=False, - **response_kwargs, - ): - """The acquisition feed for 'featured' items from a given lane's - sublanes, organized into per-lane groups. - - NOTE: If the lane has no sublanes, a grouped feed will - probably be unsatisfying. Call page() instead with an - appropriate Facets object. - - :param pagination: A Pagination object. No single child of this lane - will contain more than `pagination.size` items. - :param facets: A GroupsFacet object. - - :param response_kwargs: Extra keyword arguments to pass into - the OPDSFeedResponse constructor. - - :return: An OPDSFeedResponse containing the feed. - """ - annotator = cls._make_annotator(annotator) - facets = facets or FeaturedFacets.default(worklist.get_library(_db)) - - def refresh(): - return cls._generate_groups( - _db=_db, - title=title, - url=url, - worklist=worklist, - annotator=annotator, - pagination=pagination, - facets=facets, - search_engine=search_engine, - search_debug=search_debug, - ) - - return CachedFeed.fetch( - _db=_db, - worklist=worklist, - pagination=pagination, - facets=facets, - refresher_method=refresh, - max_age=max_age, - **response_kwargs, - ) - - @classmethod - def _generate_groups( - cls, - _db, - title, - url, - worklist, - annotator, - pagination, - facets, - search_engine, - search_debug, - ): - """Internal method called by groups() when a grouped feed - must be regenerated. - """ - - # Try to get a set of (Work, WorkList) 2-tuples - # to make a normal grouped feed. - works_and_lanes = [ - x - for x in worklist.groups( - _db=_db, - pagination=pagination, - facets=facets, - search_engine=search_engine, - debug=search_debug, - ) - ] - # Make a typical grouped feed. - all_works = [] - for work, sublane in works_and_lanes: - if sublane == worklist: - # We are looking at the groups feed for (e.g.) - # "Science Fiction", and we're seeing a book - # that is featured within "Science Fiction" itself - # rather than one of the sublanes. - # - # We want to assign this work to a group called "All - # Science Fiction" and point its 'group URI' to - # the linear feed of the "Science Fiction" lane - # (as opposed to the groups feed, which is where we - # are now). - v = dict( - lane=worklist, - label=worklist.display_name_for_all, - link_to_list_feed=True, - ) - else: - # We are looking at the groups feed for (e.g.) - # "Science Fiction", and we're seeing a book - # that is featured within one of its sublanes, - # such as "Space Opera". - # - # We want to assign this work to a group derived - # from the sublane. - v = dict(lane=sublane) - - annotator.lanes_by_work[work].append(v) - all_works.append(work) - - all_works = annotator.sort_works_for_groups_feed(all_works) - feed = AcquisitionFeed(_db, title, url, all_works, annotator) - - # Regardless of whether or not the entries in feed can be - # grouped together, we want to apply certain feed-level - # annotations. - - # A grouped feed may link to alternate entry points into - # the data. - entrypoints = facets.selectable_entrypoints(worklist) - if entrypoints: - - def make_link(ep): - return annotator.groups_url( - worklist, facets=facets.navigate(entrypoint=ep) - ) - - cls.add_entrypoint_links(feed, make_link, entrypoints, facets.entrypoint) - - # A grouped feed may have breadcrumb links. - feed.add_breadcrumb_links(worklist, facets.entrypoint) - - # Miscellaneous. - annotator.annotate_feed(feed, worklist) - - return feed - - @classmethod - def page( - cls, - _db, - title, - url, - worklist, - annotator, - facets=None, - pagination=None, - max_age=None, - search_engine: Optional[ExternalSearchIndex] = None, - search_debug=False, - **response_kwargs, - ): - """Create a feed representing one page of works from a given lane. - - :param response_kwargs: Extra keyword arguments to pass into - the OPDSFeedResponse constructor. - - :return: An OPDSFeedResponse containing the feed. - """ - library = worklist.get_library(_db) - facets = facets or Facets.default(library) - pagination = pagination or Pagination.default() - annotator = cls._make_annotator(annotator) - - def refresh(): - return cls._generate_page( - _db, - title, - url, - worklist, - annotator, - facets, - pagination, - search_engine, - search_debug, - ) - - response_kwargs.setdefault("max_age", max_age) - return CachedFeed.fetch( - _db, - worklist=worklist, - pagination=pagination, - facets=facets, - refresher_method=refresh, - **response_kwargs, - ) - - @classmethod - def _generate_page( - cls, - _db, - title, - url, - lane, - annotator, - facets, - pagination, - search_engine, - search_debug, - ): - """Internal method called by page() when a cached feed - must be regenerated. - """ - works = lane.works( - _db, - pagination=pagination, - facets=facets, - search_engine=search_engine, - debug=search_debug, - ) - - if not isinstance(works, list): - # It's possible that works() returned a database query or - # other generator-like object, but at this point we want - # an actual list of Work objects. - works = [x for x in works] - - if not pagination.page_has_loaded: - # Depending on how the works were obtained, - # Pagination.page_loaded may or may not have been called - # yet. - pagination.page_loaded(works) - feed = cls(_db, title, url, works, annotator) - - entrypoints = facets.selectable_entrypoints(lane) - if entrypoints: - # A paginated feed may have multiple entry points into the - # same dataset. - def make_link(ep): - return annotator.feed_url(lane, facets=facets.navigate(entrypoint=ep)) - - cls.add_entrypoint_links(feed, make_link, entrypoints, facets.entrypoint) - - # Add URLs to change faceted views of the collection. - for args in cls.facet_links(annotator, facets): - OPDSFeed.add_link_to_feed(feed=feed.feed, **args) - - if len(works) > 0 and pagination.has_next_page: - # There are works in this list. Add a 'next' link. - OPDSFeed.add_link_to_feed( - feed=feed.feed, - rel="next", - href=annotator.feed_url(lane, facets, pagination.next_page), - ) - - if pagination.offset > 0: - OPDSFeed.add_link_to_feed( - feed=feed.feed, - rel="first", - href=annotator.feed_url(lane, facets, pagination.first_page), - ) - - previous_page = pagination.previous_page - if previous_page: - OPDSFeed.add_link_to_feed( - feed=feed.feed, - rel="previous", - href=annotator.feed_url(lane, facets, previous_page), - ) - - if isinstance(facets, FacetsWithEntryPoint): - feed.add_breadcrumb_links(lane, facets.entrypoint) - - annotator.annotate_feed(feed, lane) - return feed - - @classmethod - def from_query(cls, query, _db, feed_name, url, pagination, url_fn, annotator): - """Build a feed representing one page of a given list. Currently used for - creating an OPDS feed for a custom list and not cached. - - TODO: This is used by the circulation manager admin interface. - Investigate changing the code that uses this to use the search - index -- this is inefficient and creates an alternate code path - that may harbor bugs. - - TODO: This cannot currently return OPDSFeedResponse because the - admin interface modifies the feed after it's generated. - - """ - page_of_works = pagination.modify_database_query(_db, query) - pagination.total_size = int(query.count()) - - feed = cls(_db, feed_name, url, page_of_works, annotator) - - if pagination.total_size > 0 and pagination.has_next_page: - OPDSFeed.add_link_to_feed( - feed=feed.feed, rel="next", href=url_fn(pagination.next_page.offset) - ) - if pagination.offset > 0: - OPDSFeed.add_link_to_feed( - feed=feed.feed, rel="first", href=url_fn(pagination.first_page.offset) - ) - if pagination.previous_page: - OPDSFeed.add_link_to_feed( - feed=feed.feed, - rel="previous", - href=url_fn(pagination.previous_page.offset), - ) - - return feed - - def as_response(self, **kwargs): - """Convert this feed into an OPDSFEedResponse.""" - return OPDSFeedResponse(self, **kwargs) - - def as_error_response(self, **kwargs): - """Convert this feed into an OPDSFEedResponse that should be treated - by intermediaries as an error -- that is, treated as private - and not cached. - """ - kwargs["max_age"] = 0 - kwargs["private"] = True - return self.as_response(**kwargs) - - @classmethod - def _make_annotator(cls, annotator): - """Helper method to make sure there's some kind of Annotator.""" - if not annotator: - annotator = Annotator - if callable(annotator): - annotator = annotator() - return annotator - - @classmethod - def facet_link(cls, href, title, facet_group_name, is_active): - """Build a set of attributes for a facet link. - - :param href: Destination of the link. - :param title: Human-readable description of the facet. - :param facet_group_name: The facet group to which the facet belongs, - e.g. "Sort By". - :param is_active: True if this is the client's currently - selected facet. - - :return: A dictionary of attributes, suitable for passing as - keyword arguments into OPDSFeed.add_link_to_feed. - """ - args = dict(href=href, title=title) - args["rel"] = cls.FACET_REL - args["{%s}facetGroup" % AtomFeed.OPDS_NS] = facet_group_name - if is_active: - args["{%s}activeFacet" % AtomFeed.OPDS_NS] = "true" - return args - - @classmethod - def add_entrypoint_links( - cls, feed, url_generator, entrypoints, selected_entrypoint, group_name="Formats" - ): - """Add links to a feed forming an OPDS facet group for a set of - EntryPoints. - - :param feed: A lxml Tag object. - :param url_generator: A callable that returns the entry point - URL when passed an EntryPoint. - :param entrypoints: A list of all EntryPoints in the facet group. - :param selected_entrypoint: The current EntryPoint, if selected. - """ - if len(entrypoints) == 1 and selected_entrypoint in (None, entrypoints[0]): - # There is only one entry point. Unless the currently - # selected entry point is somehow different, there's no - # need to put any links at all here -- a facet group with - # one one facet might as well not be there. - return - - is_default = True - for entrypoint in entrypoints: - link = cls._entrypoint_link( - url_generator, entrypoint, selected_entrypoint, is_default, group_name - ) - if link is not None: - cls.add_link_to_feed(feed.feed, **link) - is_default = False - - @classmethod - def _entrypoint_link( - cls, url_generator, entrypoint, selected_entrypoint, is_default, group_name - ): - """Create arguments for add_link_to_feed for a link that navigates - between EntryPoints. - """ - display_title = EntryPoint.DISPLAY_TITLES.get(entrypoint) - if not display_title: - # Shouldn't happen. - return - - url = url_generator(entrypoint) - is_selected = entrypoint is selected_entrypoint - link = cls.facet_link(url, display_title, group_name, is_selected) - - # Unlike a normal facet group, every link in this facet - # group has an additional attribute marking it as an entry - # point. - # - # In OPDS 2 this can become an additional rel value, - # removing the need for a custom attribute. - link[ - "{%s}facetGroupType" % AtomFeed.SIMPLIFIED_NS - ] = FacetConstants.ENTRY_POINT_REL - return link - - def add_breadcrumb_links(self, lane, entrypoint=None): - """Add information necessary to find your current place in the - site's navigation. - - A link with rel="start" points to the start of the site - - A section describes the current entry point. - - A section contains a sequence of - breadcrumb links. - """ - # Add the top-level link with rel='start' - xml = self.feed - annotator = self.annotator - top_level_title = annotator.top_level_title() or "Collection Home" - self.add_link_to_feed( - feed=xml, - rel="start", - href=annotator.default_lane_url(), - title=top_level_title, - ) - - # Add a link to the direct parent with rel="up". - # - # TODO: the 'direct parent' may be the same lane but without - # the entry point specified. Fixing this would also be a good - # opportunity to refactor the code for figuring out parent and - # parent_title. - parent = None - if isinstance(lane, Lane): - parent = lane.parent - if parent and parent.display_name: - parent_title = parent.display_name - else: - parent_title = top_level_title - - if parent: - up_uri = annotator.lane_url(parent) - self.add_link_to_feed(feed=xml, href=up_uri, rel="up", title=parent_title) - self.add_breadcrumbs(lane, entrypoint=entrypoint) - - # Annotate the feed with a simplified:entryPoint for the - # current EntryPoint. - self.show_current_entrypoint(entrypoint) - - @classmethod - def search( - cls, - _db, - title, - url, - lane, - search_engine, - query, - pagination=None, - facets=None, - annotator=None, - **response_kwargs, - ): - """Run a search against the given search engine and return - the results as a Flask Response. - - :param _db: A database connection - :param title: The title of the resulting OPDS feed. - :param url: The URL from which the feed will be served. - :param search_engine: An ExternalSearchIndex. - :param query: The search query - :param pagination: A Pagination - :param facets: A Facets - :param annotator: An Annotator - :param response_kwargs: Keyword arguments to pass into the OPDSFeedResponse - constructor. - :return: An ODPSFeedResponse - """ - facets = facets or SearchFacets() - pagination = pagination or Pagination.default() - try: - results = lane.search( - _db, query, search_engine, pagination=pagination, facets=facets - ) - except QueryParseException as e: - return INVALID_INPUT.detailed(e.detail) - - opds_feed = AcquisitionFeed(_db, title, url, results, annotator=annotator) - AcquisitionFeed.add_link_to_feed( - feed=opds_feed.feed, - rel="start", - href=annotator.default_lane_url(), - title=annotator.top_level_title(), - ) - - # A feed of search results may link to alternate entry points - # into those results. - entrypoints = facets.selectable_entrypoints(lane) - if entrypoints: - - def make_link(ep): - return annotator.search_url( - lane, query, pagination=None, facets=facets.navigate(entrypoint=ep) - ) - - cls.add_entrypoint_links( - opds_feed, make_link, entrypoints, facets.entrypoint - ) - - if len(results) > 0: - # There are works in this list. Add a 'next' link. - next_url = annotator.search_url(lane, query, pagination.next_page, facets) - AcquisitionFeed.add_link_to_feed( - feed=opds_feed.feed, rel="next", href=next_url - ) - - if pagination.offset > 0: - first_url = annotator.search_url(lane, query, pagination.first_page, facets) - AcquisitionFeed.add_link_to_feed( - feed=opds_feed.feed, rel="first", href=first_url - ) - - previous_page = pagination.previous_page - if previous_page: - previous_url = annotator.search_url(lane, query, previous_page, facets) - AcquisitionFeed.add_link_to_feed( - feed=opds_feed.feed, rel="previous", href=previous_url - ) - - # Add "up" link. - AcquisitionFeed.add_link_to_feed( - feed=opds_feed.feed, - rel="up", - href=annotator.lane_url(lane), - title=str(lane.display_name), - ) - - # We do not add breadcrumbs to this feed since you're not - # technically searching the this lane; you are searching the - # library's entire collection, using _some_ of the constraints - # imposed by this lane (notably language and audience). - - annotator.annotate_feed(opds_feed, lane) - return OPDSFeedResponse(response=str(opds_feed), **response_kwargs) - - @classmethod - def single_entry( - cls, - _db, - work, - annotator, - force_create=False, - raw=False, - use_cache=True, - **response_kwargs, - ): - """Create a single-entry OPDS document for one specific work. - - :param _db: A database connection. - :param work: A Work - :param work: An Annotator - :param force_create: Create the OPDS entry from scratch even - if there's already a cached one. - :param raw: If this is False (the default), a Flask Response will be returned, - ready to be sent over the network. Otherwise an object representing - the underlying OPDS entry will be returned. - :param use_cache: Boolean value determining whether the OPDS cache shall be used. - :param response_kwargs: These keyword arguments will be passed into the Response - constructor, if it is invoked. - :return: A Response, if `raw` is false. Otherwise, an OPDSMessage - or an etree._Element -- whatever was returned by - OPDSFeed.create_entry. - """ - - feed = cls(_db, "", "", [], annotator=annotator) - if not isinstance(work, Edition) and not work.presentation_edition: - return None - entry = feed.create_entry( - work, - even_if_no_license_pool=True, - force_create=force_create, - use_cache=use_cache, - ) - - # Since this tag is going to be the root of an XML - # document it's essential that it include an up-to-date nsmap, - # even if it was generated from an old cached tag that - # had an older nsmap. - if isinstance(entry, etree._Element) and not "drm" in entry.nsmap: - # This workaround (creating a brand new tag) is necessary - # because the nsmap attribute is immutable. See - # https://bugs.launchpad.net/lxml/+bug/555602 - nsmap = entry.nsmap - nsmap["drm"] = AtomFeed.DRM_NS - new_root = etree.Element(entry.tag, nsmap=nsmap) - new_root[:] = entry[:] - entry = new_root - if raw or entry is None: - return entry - if isinstance(entry, OPDSMessage): - entry = str(entry) - # This is probably an error message; don't cache it - # even if it would otherwise be cached. - response_kwargs["max_age"] = 0 - response_kwargs["private"] = True - elif isinstance(entry, etree._Element): - entry = etree.tostring(entry, encoding="unicode") - - # It's common for a single OPDS entry to be returned as the - # result of an unsafe operation, so we will default to setting - # the response as private and uncacheable. - response_kwargs.setdefault("max_age", 0) - response_kwargs.setdefault("private", True) - - return OPDSEntryResponse(response=entry, **response_kwargs) - - @classmethod - def error_message(cls, identifier, error_status, error_message): - """Turn an error result into an OPDSMessage suitable for - adding to a feed. - """ - return OPDSMessage(identifier.urn, error_status, error_message) - - @classmethod - def facet_links(cls, annotator, facets): - """Create links for this feed's navigational facet groups. - - This does not create links for the entry point facet group, - because those links should only be present in certain - circumstances, and this method doesn't know if those - circumstances apply. You need to decide whether to call - add_entrypoint_links in addition to calling this method. - """ - for group, value, new_facets, selected in facets.facet_groups: - url = annotator.facet_url(new_facets) - if not url: - continue - group_title = Facets.GROUP_DISPLAY_TITLES.get(group) - facet_title = Facets.FACET_DISPLAY_TITLES.get(value) - if not facet_title: - display_lambda = Facets.FACET_DISPLAY_TITLES_DYNAMIC.get(group) - facet_title = display_lambda(new_facets) if display_lambda else None - if not (group_title and facet_title): - # This facet group or facet, is not recognized by the - # system. It may be left over from an earlier version, - # or just weird junk data. - continue - yield cls.facet_link(url, str(facet_title), str(group_title), selected) - - def __init__(self, _db, title, url, works, annotator=None, precomposed_entries=[]): - """Turn a list of works, messages, and precomposed entries - into a feed. - """ - if not annotator: - annotator = Annotator - if callable(annotator): - annotator = annotator() - self.annotator = annotator - - super().__init__(title, url) - - for work in works: - self.add_entry(work) - - # Add the precomposed entries and the messages. - for entry in precomposed_entries: - if isinstance(entry, OPDSMessage): - entry = entry.tag - self.feed.append(entry) - - def add_entry(self, work): - """Attempt to create an OPDS . If successful, append it to - the feed. - """ - entry = self.create_entry(work) - - if entry is not None: - if isinstance(entry, OPDSMessage): - entry = entry.tag - self.feed.append(entry) - return entry - - def create_entry( - self, - work: Work | Edition | None, - even_if_no_license_pool=False, - force_create=False, - use_cache=True, - ) -> etree.Element | OPDSMessage: - """Turn a work into an entry for an acquisition feed.""" - identifier = None - if isinstance(work, Edition): - active_edition = work - identifier = active_edition.primary_identifier - active_license_pool = None - work = None - else: - active_license_pool = self.annotator.active_licensepool_for(work) - if not work: - # We have a license pool but no work. Most likely we don't have - # metadata for this work yet. - return None - - if active_license_pool: - identifier = active_license_pool.identifier - active_edition = active_license_pool.presentation_edition - elif work.presentation_edition: - active_edition = work.presentation_edition - identifier = active_edition.primary_identifier - - # There's no reason to present a book that has no active license pool. - if not identifier: - logging.warning("%r HAS NO IDENTIFIER", work) - return None - - if not active_license_pool and not even_if_no_license_pool: - logging.warning("NO ACTIVE LICENSE POOL FOR %r", work) - return self.error_message( - identifier, - 403, - "I've heard about this work but have no active licenses for it.", - ) - - if not active_edition: - logging.warning("NO ACTIVE EDITION FOR %r", active_license_pool) - return self.error_message( - identifier, - 403, - "I've heard about this work but have no metadata for it.", - ) - - try: - return self._create_entry( - work, - active_license_pool, - active_edition, - identifier, - force_create, - use_cache, - ) - except UnfulfillableWork as e: - logging.info( - "Work %r is not fulfillable, refusing to create an .", - work, - ) - return self.error_message( - identifier, - 403, - "I know about this work but can offer no way of fulfilling it.", - ) - except Exception as e: - logging.error("Exception generating OPDS entry for %r", work, exc_info=e) - return None - - def _create_entry( - self, - work, - active_license_pool, - edition, - identifier, - force_create=False, - use_cache=True, - ): - """Build a complete OPDS entry for the given Work. - - The OPDS entry will contain bibliographic information about - the Work, as well as information derived from a specific - LicensePool and Identifier associated with the Work. - - :param work: The Work whose OPDS entry the client is interested in. - :active_license_pool: Of all the LicensePools associated with this - Work, the client has expressed interest in this one. - :param edition: The edition to use as the presentation edition - when creating the entry. If this is not present, the work's - existing presentation edition will be used. - :identifier: Of all the Identifiers associated with this - Work, the client has expressed interest in this one. - :param force_create: Create this entry even if there's already - a cached one. - :param use_cache: If true, a newly created entry will be cached - in the appropriate storage field of Work -- either - simple_opds_entry or verbose_opds_entry. (NOTE: this has some - overlap with force_create which is difficult to explain.) - :return: An lxml Element object - """ - xml = None - field = self.annotator.opds_cache_field - - if field and work and not force_create and use_cache: - xml = getattr(work, field) - - if xml: - xml = etree.fromstring(xml) - else: - xml = self._make_entry_xml(work, edition) - data = etree.tounicode(xml) - if field and use_cache: - setattr(work, field, data) - - # Now add the stuff specific to the selected Identifier - # and LicensePool. - self.annotator.annotate_work_entry( - work, active_license_pool, edition, identifier, self, xml - ) - - return xml - - def _make_entry_xml(self, work, edition): - """Create a new (incomplete) OPDS entry for the given work. - - It will be completed later, in an application-specific way, - in annotate_work_entry(). - - :param work: The Work that needs an OPDS entry. - :param edition: The edition to use as the presentation edition - when creating the entry. - """ - if not work: - return None - - if not edition: - edition = work.presentation_edition - - # Find the .epub link - epub_href = None - p = None - - links = [] - cover_quality = 0 - qualities = [] - if work: - qualities.append(("Work quality", work.quality)) - full_url = None - - thumbnail_urls, full_urls = self.annotator.cover_links(work) - for rel, urls in ( - (Hyperlink.IMAGE, full_urls), - (Hyperlink.THUMBNAIL_IMAGE, thumbnail_urls), - ): - for url in urls: - # TODO: This is suboptimal. We know the media types - # associated with these URLs when they are - # Representations, but we don't have a way to connect - # the cover_full_url with the corresponding - # Representation, and performance considerations make - # it impractical to follow the object reference every - # time. - image_type = "image/png" - if url.endswith(".jpeg") or url.endswith(".jpg"): - image_type = "image/jpeg" - elif url.endswith(".gif"): - image_type = "image/gif" - links.append(AtomFeed.link(rel=rel, href=url, type=image_type)) - - sample_links = self.annotator.samples(edition) - for link in sample_links: - links.append( - AtomFeed.link( - rel=Hyperlink.CLIENT_SAMPLE, - href=link.resource.url, - type=link.resource.representation.media_type, - ) - ) - - content = self.annotator.content(work) - if isinstance(content, bytes): - content = content.decode("utf8") - - content_type = "html" - kw = {} - if edition.medium: - additional_type = Edition.medium_to_additional_type.get(edition.medium) - if not additional_type: - logging.warning("No additionalType for medium %s", edition.medium) - additional_type_field = AtomFeed.schema_("additionalType") - kw[additional_type_field] = additional_type - - entry = AtomFeed.entry(AtomFeed.title(edition.title or OPDSFeed.NO_TITLE), **kw) - if edition.subtitle: - subtitle_tag = AtomFeed.makeelement(AtomFeed.schema_("alternativeHeadline")) - subtitle_tag.text = edition.subtitle - entry.append(subtitle_tag) - - author_tags = self.annotator.authors(work, edition) - entry.extend(author_tags) - - if edition.series: - entry.extend( - [self.annotator.series(edition.series, edition.series_position)] - ) - - if content: - entry.extend([AtomFeed.summary(content, type=content_type)]) - - permanent_work_id_tag = AtomFeed.makeelement( - "{%s}pwid" % AtomFeed.SIMPLIFIED_NS - ) - permanent_work_id_tag.text = edition.permanent_work_id - entry.append(permanent_work_id_tag) - - entry.extend(links) - - categories_by_scheme = self.annotator.categories(work) - category_tags = [] - for scheme, categories in list(categories_by_scheme.items()): - for category in categories: - if isinstance(category, (bytes, str)): - category = dict(term=category) - category = dict( - list(map(str, (k, v))) for k, v in list(category.items()) - ) - category_tag = AtomFeed.category(scheme=scheme, **category) - category_tags.append(category_tag) - entry.extend(category_tags) - - # print(" ID %s TITLE %s AUTHORS %s" % (tag, work.title, work.authors)) - language = edition.language_code - if language: - language_tag = AtomFeed.makeelement("{%s}language" % AtomFeed.DCTERMS_NS) - language_tag.text = language - entry.append(language_tag) - - if edition.publisher: - publisher_tag = AtomFeed.makeelement("{%s}publisher" % AtomFeed.DCTERMS_NS) - publisher_tag.text = edition.publisher - entry.extend([publisher_tag]) - - if edition.imprint: - imprint_tag = AtomFeed.makeelement( - "{%s}publisherImprint" % AtomFeed.BIB_SCHEMA_NS - ) - imprint_tag.text = edition.imprint - entry.extend([imprint_tag]) - - # Entry.issued is the date the ebook came out, as distinct - # from Entry.published (which may refer to the print edition - # or some original edition way back when). - # - # For Dublin Core 'issued' we use Entry.issued if we have it - # and Entry.published if not. In general this means we use - # issued date for Gutenberg and published date for other - # sources. - # - # For the date the book was added to our collection we use - # atom:published. - # - # Note: feedparser conflates dc:issued and atom:published, so - # it can't be used to extract this information. However, these - # tags are consistent with the OPDS spec. - issued = edition.issued or edition.published - if isinstance(issued, datetime.datetime) or isinstance(issued, datetime.date): - now = utc_now() - today = datetime.date.today() - issued_already = False - if isinstance(issued, datetime.datetime): - issued_already = issued <= now - elif isinstance(issued, datetime.date): - issued_already = issued <= today - if issued_already: - issued_tag = AtomFeed.makeelement("{%s}issued" % AtomFeed.DCTERMS_NS) - # Use datetime.isoformat instead of datetime.strftime because - # strftime only works on dates after 1890, and we have works - # that were issued much earlier than that. - # TODO: convert to local timezone, not that it matters much. - issued_tag.text = issued.isoformat().split("T")[0] - entry.extend([issued_tag]) - - return entry - - CURRENT_ENTRYPOINT_ATTRIBUTE = "{%s}entryPoint" % AtomFeed.SIMPLIFIED_NS - - def show_current_entrypoint(self, entrypoint): - """Annotate this given feed with a simplified:entryPoint - attribute pointing to the current entrypoint's TYPE_URI. - - This gives clients an overall picture of the type of works in - the feed, and a way to distinguish between one EntryPoint - and another. - - :param entrypoint: An EntryPoint. - """ - if not entrypoint: - return - - if not entrypoint.URI: - return - self.feed.attrib[self.CURRENT_ENTRYPOINT_ATTRIBUTE] = entrypoint.URI - - def add_breadcrumbs(self, lane, include_lane=False, entrypoint=None): - """Add list of ancestor links in a breadcrumbs element. - - :param lane: Add breadcrumbs from up to this lane. - :param include_lane: Include `lane` itself in the breadcrumbs. - :param entrypoint: The currently selected entrypoint, if any. - - TODO: The switchover from "no entry point" to "entry point" needs - its own breadcrumb link. - """ - if entrypoint is None: - entrypoint_query = "" - else: - entrypoint_query = "?entrypoint=" + entrypoint.INTERNAL_NAME - - # Breadcrumbs for lanes may be end up being cut off by a - # patron-type-specific root lane. If so, that lane -- not the - # site root -- should become the first breadcrumb. - site_root_lane = None - usable_parentage = [] - if lane is not None: - for ancestor in [lane] + list(lane.parentage): - if isinstance(ancestor, Lane) and ancestor.root_for_patron_type: - # Root lane for a specific patron type. The root is - # treated specially, so it should not be added to - # usable_parentage. Any lanes between this lane and the - # library root should not be included at all. - site_root_lane = ancestor - break - - if ancestor != lane or include_lane: - # A lane may appear in its own breadcrumbs - # only if include_lane is True. - usable_parentage.append(ancestor) - - annotator = self.annotator - if lane == site_root_lane or ( - site_root_lane is None - and annotator.lane_url(lane) == annotator.default_lane_url() - ): - # There are no extra breadcrumbs: either we are at the - # site root, or we are at a lane that is the root for a - # specific patron type. - return - - # Start work on a simplified:breadcrumbs tag. - breadcrumbs = AtomFeed.makeelement("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) - - # Add root link. This is either the link to the site root - # or to the root lane for some patron type. - if site_root_lane is None: - root_url = annotator.default_lane_url() - root_title = annotator.top_level_title() - else: - root_url = annotator.lane_url(site_root_lane) - root_title = site_root_lane.display_name - root_link = AtomFeed.link(title=root_title, href=root_url) - breadcrumbs.append(root_link) - - # Add entrypoint selection link - if entrypoint: - breadcrumbs.append( - AtomFeed.link( - title=entrypoint.INTERNAL_NAME, href=root_url + entrypoint_query - ) - ) - - # Add links for all usable lanes between `lane` and `site_root_lane` - # (possibly including `lane` itself). - for ancestor in reversed(usable_parentage): - lane_url = annotator.lane_url(ancestor) - if lane_url == root_url: - # Root lane for the entire site. - break - - breadcrumbs.append( - AtomFeed.link( - title=ancestor.display_name, href=lane_url + entrypoint_query - ) - ) - - # Append the breadcrumbs to the feed. - self.feed.append(breadcrumbs) - - @classmethod - def minimal_opds_entry( - cls, identifier, cover, description, quality, most_recent_update=None - ): - elements = [] - representations = [] - if cover: - cover_representation = cover.representation - representations.append(cover.representation) - cover_link = AtomFeed.makeelement( - "link", - href=cover_representation.public_url, - type=cover_representation.media_type, - rel=Hyperlink.IMAGE, - ) - elements.append(cover_link) - if cover_representation.thumbnails: - thumbnail = cover_representation.thumbnails[0] - representations.append(thumbnail) - thumbnail_link = AtomFeed.makeelement( - "link", - href=thumbnail.public_url, - type=thumbnail.media_type, - rel=Hyperlink.THUMBNAIL_IMAGE, - ) - elements.append(thumbnail_link) - if description: - content = description.representation.content - if isinstance(content, bytes): - content = content.decode("utf8") - description_e = AtomFeed.summary(content, type="html") - elements.append(description_e) - representations.append(description.representation) - - if quality: - elements.append(Annotator.rating_tag(Measurement.QUALITY, quality)) - - # The update date is the most recent date any of these - # resources were mirrored/fetched. - potential_update_dates = [ - r.mirrored_at or r.fetched_at - for r in representations - if r.mirrored_at or r.fetched_at - ] - if most_recent_update: - potential_update_dates.append(most_recent_update) - - if potential_update_dates: - update_date = max(potential_update_dates) - elements.append(AtomFeed.updated(AtomFeed._strftime(update_date))) - entry = AtomFeed.entry( - AtomFeed.id(identifier.urn), AtomFeed.title(OPDSFeed.NO_TITLE), *elements - ) - return entry - - @classmethod - def link(cls, rel, href, type): - return AtomFeed.makeelement("link", type=type, rel=rel, href=href) - - @classmethod - def acquisition_link(cls, rel, href, types, active_loan=None): - if types: - initial_type = types[0] - indirect_types = types[1:] - else: - initial_type = None - indirect_types = [] - link = cls.link(rel, href, initial_type) - indirect = cls.indirect_acquisition(indirect_types) - - if indirect is not None: - link.append(indirect) - return link - - @classmethod - def indirect_acquisition(cls, indirect_types): - top_level_parent = None - parent = None - for t in indirect_types: - indirect_link = AtomFeed.makeelement( - "{%s}indirectAcquisition" % AtomFeed.OPDS_NS, type=t - ) - if parent is not None: - parent.extend([indirect_link]) - parent = indirect_link - if top_level_parent is None: - top_level_parent = indirect_link - return top_level_parent - - @classmethod - def license_tags(cls, license_pool, loan, hold): - # Generate a list of licensing tags. These should be inserted - # into a tag. - tags = [] - availability_tag_name = None - suppress_since = False - status = None - since = None - until = None - - if not license_pool: - return - default_loan_period = default_reservation_period = None - collection = license_pool.collection - if (loan or hold) and not license_pool.open_access: - if loan: - obj = loan - elif hold: - obj = hold - default_loan_period = datetime.timedelta( - collection.default_loan_period(obj.library) - ) - if loan: - status = "available" - since = loan.start - if not loan.license_pool.unlimited_access: - until = loan.until(default_loan_period) - elif hold: - if not license_pool.open_access: - default_reservation_period = datetime.timedelta( - collection.default_reservation_period - ) - until = hold.until(default_loan_period, default_reservation_period) - if hold.position == 0: - status = "ready" - since = None - else: - status = "reserved" - since = hold.start - elif ( - license_pool.open_access - or license_pool.unlimited_access - or (license_pool.licenses_available > 0 and license_pool.licenses_owned > 0) - ): - status = "available" - else: - status = "unavailable" - - kw = dict(status=status) - if since: - kw["since"] = AtomFeed._strftime(since) - if until: - kw["until"] = AtomFeed._strftime(until) - tag_name = "{%s}availability" % AtomFeed.OPDS_NS - availability_tag = AtomFeed.makeelement(tag_name, **kw) - tags.append(availability_tag) - - # Open-access pools do not need to display or . - if license_pool.open_access or license_pool.unlimited_access: - return tags - - holds_kw = dict() - total = license_pool.patrons_in_hold_queue or 0 - - if hold: - if hold.position is None: - # This shouldn't happen, but if it does, assume we're last - # in the list. - position = total - else: - position = hold.position - - if position > 0: - holds_kw["position"] = str(position) - if position > total: - # The patron's hold position appears larger than the total - # number of holds. This happens frequently because the - # number of holds and a given patron's hold position are - # updated by different processes. Don't propagate this - # appearance to the client. - total = position - elif position == 0 and total == 0: - # The book is reserved for this patron but they're not - # counted as having it on hold. This is the only case - # where we know that the total number of holds is - # *greater* than the hold position. - total = 1 - holds_kw["total"] = str(total) - - holds = AtomFeed.makeelement("{%s}holds" % AtomFeed.OPDS_NS, **holds_kw) - tags.append(holds) - - copies_kw = dict( - total=str(license_pool.licenses_owned or 0), - available=str(license_pool.licenses_available or 0), - ) - copies = AtomFeed.makeelement("{%s}copies" % AtomFeed.OPDS_NS, **copies_kw) - tags.append(copies) - - return tags - - @classmethod - def format_types(cls, delivery_mechanism): - """Generate a set of types suitable for passing into - acquisition_link(). - """ - types = [] - # If this is a streaming book, you have to get an OPDS entry, then - # get a direct link to the streaming reader from that. - if delivery_mechanism.is_streaming: - types.append(OPDSFeed.ENTRY_TYPE) - - # If this is a DRM-encrypted book, you have to get through the DRM - # to get the goodies inside. - drm = delivery_mechanism.drm_scheme_media_type - if drm: - types.append(drm) - - # Finally, you get the goodies. - media = delivery_mechanism.content_type_media_type - if media: - types.append(media) - - return types - - -class LookupAcquisitionFeed(AcquisitionFeed): - """Used when the user has requested a lookup of a specific identifier, - which may be different from the identifier used by the Work's - default LicensePool. - """ - - def create_entry(self, work): - """Turn an Identifier and a Work into an entry for an acquisition - feed. - """ - identifier, work = work - - # Unless the client is asking for something impossible - # (e.g. the Identifier is not really associated with the - # Work), we should be able to use the cached OPDS entry for - # the Work. - if identifier.licensed_through: - active_licensepool = identifier.licensed_through[0] - else: - # Use the default active LicensePool for the Work. - active_licensepool = self.annotator.active_licensepool_for(work) - - error_status = error_message = None - if not active_licensepool: - error_status = 404 - error_message = "Identifier not found in collection" - elif identifier.work != work: - error_status = 500 - error_message = ( - 'I tried to generate an OPDS entry for the identifier "%s" using a Work not associated with that identifier.' - % identifier.urn - ) - - if error_status: - return self.error_message(identifier, error_status, error_message) - - if active_licensepool: - edition = active_licensepool.presentation_edition - else: - edition = work.presentation_edition - try: - return self._create_entry(work, active_licensepool, edition, identifier) - except UnfulfillableWork as e: - logging.info( - "Work %r is not fulfillable, refusing to create an .", work - ) - return self.error_message( - identifier, - 403, - "I know about this work but can offer no way of fulfilling it.", - ) - - -class NavigationFacets(FeaturedFacets): - CACHED_FEED_TYPE = CachedFeed.NAVIGATION_TYPE - - -class NavigationFeed(OPDSFeed): - @classmethod - def navigation( - cls, - _db, - title, - url, - worklist, - annotator, - facets=None, - max_age=None, - **response_kwargs, - ): - """The navigation feed with links to a given lane's sublanes. - - :param response_kwargs: Extra keyword arguments to pass into - the OPDSFeedResponse constructor. - - :return: A Response - """ - - annotator = AcquisitionFeed._make_annotator(annotator) - facets = facets or NavigationFacets.default(worklist) - - def refresh(): - return cls._generate_navigation(_db, title, url, worklist, annotator) - - response_kwargs.setdefault("mimetype", OPDSFeed.NAVIGATION_FEED_TYPE) - return CachedFeed.fetch( - _db, - worklist=worklist, - pagination=None, - facets=facets, - refresher_method=refresh, - max_age=max_age, - **response_kwargs, - ) - - @classmethod - def _generate_navigation(cls, _db, title, url, worklist, annotator): - feed = NavigationFeed(title, url) - - if not worklist.children: - # We can't generate links to children, since this Worklist - # has no children, so we'll generate a link to the - # Worklist's page-type feed instead. - title = "All " + worklist.display_name - page_url = annotator.feed_url(worklist) - feed.add_entry(page_url, title, cls.ACQUISITION_FEED_TYPE) - - for child in worklist.visible_children: - title = child.display_name - if child.children: - child_url = annotator.navigation_url(child) - feed.add_entry(child_url, title, cls.NAVIGATION_FEED_TYPE) - else: - child_url = annotator.feed_url(child) - feed.add_entry(child_url, title, cls.ACQUISITION_FEED_TYPE) - - annotator.annotate_feed(feed, worklist) - return feed - - def add_entry(self, url, title, type=OPDSFeed.NAVIGATION_FEED_TYPE): - """Create an OPDS navigation entry for a URL.""" - entry = AtomFeed.entry(AtomFeed.title(title)) - entry.extend([AtomFeed.id(url)]) - entry.extend([AtomFeed.link(rel="subsection", href=url, type=type)]) - self.feed.append(entry) - - -# Mock annotators for use in unit tests. - - -class MockAnnotator(Annotator): - def __init__(self): - self.lanes_by_work = defaultdict(list) - - @classmethod - def lane_url(cls, lane): - if lane and lane.has_visible_children: - return cls.groups_url(lane) - elif lane: - return cls.feed_url(lane) - else: - return "" - - @classmethod - def feed_url(cls, lane, facets=None, pagination=None): - if isinstance(lane, Lane): - base = "http://%s/" % lane.url_name - else: - base = "http://%s/" % lane.display_name - sep = "?" - if facets: - base += sep + facets.query_string - sep = "&" - if pagination: - base += sep + pagination.query_string - return base - - @classmethod - def search_url(cls, lane, query, pagination, facets=None): - if isinstance(lane, Lane): - base = "http://%s/" % lane.url_name - else: - base = "http://%s/" % lane.display_name - sep = "?" - if pagination: - base += sep + pagination.query_string - sep = "&" - if facets: - facet_query_string = facets.query_string - if facet_query_string: - base += sep + facet_query_string - return base - - @classmethod - def groups_url(cls, lane, facets=None): - if lane and isinstance(lane, Lane): - identifier = lane.id - else: - identifier = "" - if facets: - facet_string = "?" + facets.query_string - else: - facet_string = "" - - return f"http://groups/{identifier}{facet_string}" - - @classmethod - def default_lane_url(cls): - return cls.groups_url(None) - - @classmethod - def facet_url(cls, facets): - return "http://facet/" + "&".join( - [f"{k}={v}" for k, v in sorted(facets.items())] - ) - - @classmethod - def navigation_url(cls, lane): - if lane and isinstance(lane, Lane): - identifier = lane.id - else: - identifier = "" - return "http://navigation/%s" % identifier - - @classmethod - def top_level_title(cls): - return "Test Top Level Title" - - -class MockAnnotatorWithGroup(MockAnnotator): - def group_uri(self, work, license_pool, identifier): - lanes = self.lanes_by_work.get(work, None) - - if lanes: - lane_dic = lanes.pop(0) - lane_name = lane_dic["lane"].display_name - else: - lane_name = str(work.id) - return ("http://group/%s" % lane_name, "Group Title for %s!" % lane_name) - - def group_uri_for_lane(self, lane): - if lane: - return ( - "http://groups/%s" % lane.display_name, - "Groups of %s" % lane.display_name, - ) - else: - return "http://groups/", "Top-level groups" - - def top_level_title(self): - return "Test Top Level Title" - - -class MockUnfulfillableAnnotator(MockAnnotator): - """Raise an UnfulfillableWork exception when asked to annotate an entry.""" - - def annotate_work_entry(self, *args, **kwargs): - raise UnfulfillableWork() diff --git a/core/opds2.py b/core/opds2.py deleted file mode 100644 index 2cc0bceda7..0000000000 --- a/core/opds2.py +++ /dev/null @@ -1,353 +0,0 @@ -import json -from collections import defaultdict -from typing import Any, Dict, List, Optional -from urllib.parse import urlencode - -from core.external_search import ExternalSearchIndex -from core.lane import Facets, Pagination, WorkList -from core.model.cachedfeed import CachedFeed -from core.model.classification import Genre, Subject -from core.model.contributor import Contribution, Contributor -from core.model.edition import Edition -from core.model.licensing import LicensePool -from core.model.resource import Hyperlink -from core.model.work import Work -from core.opds import AcquisitionFeed -from core.opds_import import OPDSXMLParser - - -class OPDS2Feed: - pass - - -class OPDS2Annotator: - """Annotate a feed following the OPDS2 spec""" - - OPDS2_TYPE = "application/opds+json" - - def __init__(self, url, facets, pagination, library, title="OPDS2 Feed") -> None: - self.url = url - self.facets: Facets = facets - self.library = library - self.title = title - self.pagination = pagination or Pagination() - - def metadata_for_work(self, work: Work) -> Optional[Dict[str, Any]]: - """Create the metadata json for a work item - using the schema https://drafts.opds.io/schema/publication.schema.json""" - - # TODO: What happens when there is no presentation edition? - edition: Edition = work.presentation_edition - if not edition: - return None - - pool = self._pool_for_library(edition) - result: Dict[str, Any] = {} - result["@type"] = Edition.medium_to_additional_type.get(str(edition.medium)) - result["title"] = edition.title - result["subtitle"] = edition.subtitle - result["identifier"] = edition.primary_identifier.urn - result["sortAs"] = edition.sort_title - result.update(self._contributors(edition)) - result["language"] = edition.language_code - - subjects = [] - genre: Genre - for genre in work.genres: - subjects.append( - { - "scheme": Subject.SIMPLIFIED_GENRE, - "name": genre.name, - "sortAs": genre.name, - } - ) - if subjects: - result["subject"] = subjects - - # TODO: numberOfPages. we don't store this - # TODO: duration. we don't store this - # TODO: abridged. we don't store this - if edition.publisher: - result["publisher"] = {"name": edition.publisher} - if edition.imprint: - result["imprint"] = {"name": edition.imprint} - if work.last_update_time: - result["modified"] = work.last_update_time.isoformat() - if pool and pool.availability_time: - result["published"] = pool.availability_time.date().isoformat() - result["description"] = work.summary_text - - belongs_to = {} - if work.series: - belongs_to["series"] = { - "name": work.series, - "position": work.series_position - if work.series_position is not None - else 1, - } - - if belongs_to: - result["belongsTo"] = belongs_to - - # TODO: Collection, what does this stand for? - - links = self._work_metadata_links(edition) - image_links = self.resource_links( - edition, Hyperlink.IMAGE, Hyperlink.THUMBNAIL_IMAGE, Hyperlink.ILLUSTRATION - ) - - return dict(metadata=result, links=links, images=image_links) - - def _work_metadata_links(self, edition: Edition): - """Create links for works in the publication""" - samples = self.resource_links(edition, Hyperlink.SAMPLE) - open_access = self.resource_links(edition, Hyperlink.OPEN_ACCESS_DOWNLOAD) - loan_link = self.loan_link(edition) - self_link = self.self_link(edition) - links = [] - if open_access: - links.extend(open_access) - if samples: - links.extend(samples) - if loan_link: - links.append(loan_link) - if self_link: - links.append(self_link) - return links - - def resource_links(self, edition: Edition, *rels) -> List[Dict]: - """Create a link entry based on a stored Resource""" - link: Hyperlink - samples = [] - for link in edition.primary_identifier.links: - if link.rel in rels: - samples.append( - { - "href": link.resource.url, - "rel": link.rel, - "type": link.resource.representation.media_type, - } - ) - return samples - - def loan_link(self, edition: Edition) -> Optional[Dict]: - """Create a Loan link for an edition, needs access to the API layer - Must be implemented in the API layer""" - return None - - def self_link(self, edition: Edition) -> Optional[Dict]: - """Create a Self link for an edition, needs access to the API layer - Must be implemented in the API layer""" - return None - - def _pool_for_library(self, edition: Edition) -> Optional[LicensePool]: - """Fetch the licensepool of an edition that is part of the library we're annotating with""" - collection_ids = [c.id for c in self.library.all_collections] - for pool in edition.license_pools: - if pool.collection_id in collection_ids: - return pool - return None - - def _contributors(self, edition: Edition) -> Dict: - """Create the contributor type entries""" - authors = {} - contribution: Contribution - key_mapping = { - Contributor.PRIMARY_AUTHOR_ROLE: "author", - Contributor.TRANSLATOR_ROLE: "translator", - Contributor.EDITOR_ROLE: "editor", - Contributor.ILLUSTRATOR_ROLE: "illustrator", - Contributor.ARTIST_ROLE: "artist", - Contributor.COLORIST_ROLE: "colorist", - Contributor.INKER_ROLE: "inker", - Contributor.PENCILER_ROLE: "pencilor", - Contributor.LETTERER_ROLE: "letterer", - Contributor.NARRATOR_ROLE: "narrator", - Contributor.CONTRIBUTOR_ROLE: "contributor", - } - for contribution in edition.contributions: - if contribution.role in key_mapping: - contributor = contribution.contributor - meta = {"name": contributor.display_name} - if contributor.aliases and len(contributor.aliases) > 0: - meta["additionalName"] = contributor.aliases[0] - - # TODO: Marketplace adds links for the author based search - # should we do the same? - authors[key_mapping[contribution.role]] = meta - return authors - - def feed_links(self): - """Create links for a publication feed""" - links = [ - {"href": self.url, "rel": "self", "type": self.OPDS2_TYPE}, - ] - # If another page is present, then add the next link - if self.pagination.has_next_page: - next_query_string = urlencode( - { - **dict(self.pagination.next_page.items()), - **dict(self.facets.items()), - }, - doseq=True, - ) - next_url = self.url.split("?", 1)[0] + "?" + next_query_string - links.append({"href": next_url, "rel": "next", "type": self.OPDS2_TYPE}) - - return links - - def feed_metadata(self): - """Create the metadata for a publication feed""" - return { - "title": self.title, - "itemsPerPage": self.pagination.size, - } - - @classmethod - def facet_url(cls, facets): - """Should be overwritten in the OPDS2PublicationsAnnottator""" - return None - - -class FeedTypes: - """The types of feeds supported for OPDS2""" - - PUBLICATIONS = "publications" - NAVIGATION = "navigation" - - -class AcquisitonFeedOPDS2(OPDS2Feed): - """Creates different kinds of OPDS2 feeds - Currently supports publications and navigation""" - - @classmethod - def publications( - cls, - _db, - worklist: WorkList, - facets: Facets, - pagination: Pagination, - search_engine: ExternalSearchIndex, - annotator: OPDS2Annotator, - max_age: Optional[int] = None, - ): - """The publication feed, cached""" - - # do some caching magic - # then do the publication - def refresh(): - return cls._generate_publications( - _db, worklist, facets, pagination, search_engine, annotator - ) - - return CachedFeed.fetch( - _db, - worklist=worklist, - facets=facets, - pagination=pagination, - refresher_method=refresh, - max_age=max_age, - ) - - @classmethod - def _generate_publications( - cls, - _db, - worklist: WorkList, - facets: Facets, - pagination: Pagination, - search_engine: ExternalSearchIndex, - annotator: OPDS2Annotator, - ): - publications = [] - - for work in worklist.works( - _db, facets=facets, search_engine=search_engine, pagination=pagination - ): - publications.append(work) - - return cls( - _db, - publications, - annotator, - facets, - ) - - @classmethod - def navigation(cls, _db, annotator: OPDS2Annotator): - """The navigation feed""" - return cls(_db, [], annotator, None, feed_type=FeedTypes.NAVIGATION) - - def __init__( - self, - _db, - works: List[Work], - annotator: OPDS2Annotator, - facets: Optional[Facets], - feed_type=FeedTypes.PUBLICATIONS, - ): - self._db = _db - self.works = works - self.annotator = annotator - self.facets = facets - self.feed_type = feed_type - - def json(self): - """The a json feed based on the FeedType""" - if self.feed_type == FeedTypes.PUBLICATIONS: - return self.publications_json() - elif self.feed_type == FeedTypes.NAVIGATION: - return self.navigation_json() - - def navigation_json(self): - return { - "metadata": self.annotator.feed_metadata(), - "links": self.annotator.feed_links(), - "navigation": self.annotator.navigation_collection(), - } - - def publications_json(self): - result = {} - - entries = [] - for work in self.works: - entry = self.annotator.metadata_for_work(work) - if entry: - entries.append(entry) - - result["publications"] = entries - result["links"] = self.annotator.feed_links() - result["facets"] = self._facet_links() - result["metadata"] = self.annotator.feed_metadata() - return result - - def _facet_links(self) -> List: - """Reuse the AcquisitionFeed.facet_links method to create the available facets meta""" - links: Dict = AcquisitionFeed.facet_links(self.annotator, self.facets) - facet_meta = [] - group_meta = defaultdict(list) - - # The AcquisitionFeed adds the OPDS namespace to the keys, so must read them as such - ns = OPDSXMLParser.NAMESPACES["opds"] - - for link in links: - meta = { - "href": link["href"], - "title": link["title"], - "type": "application/opds+json", - } - - # If this is the active facet, set the rel to "self" - if link.get(f"{{{ns}}}activeFacet") == "true": - meta["rel"] = "self" - - # Grouped by the facet group name - group_meta[link[f"{{{ns}}}facetGroup"]].append(meta) - - for name, facet_links in group_meta.items(): - facet_meta.append({"metadata": {"title": name}, "links": facet_links}) - return facet_meta - - def __str__(self): - """Make the serialized OPDS2 feed""" - return json.dumps(self.json()) diff --git a/core/scripts.py b/core/scripts.py index 7e55eee156..eaa0cdd3c5 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -1562,7 +1562,6 @@ def do_run(self): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_opds_entries=True, regenerate_marc_record=True, update_search_index=True, verbose=True, @@ -1732,7 +1731,6 @@ class WorkClassificationScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_opds_entries=False, regenerate_marc_record=False, update_search_index=False, ) @@ -1883,7 +1881,6 @@ class WorkOPDSScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_opds_entries=True, regenerate_marc_record=True, update_search_index=True, ) diff --git a/core/util/opds_writer.py b/core/util/opds_writer.py index c5b36ace4f..34feed5b47 100644 --- a/core/util/opds_writer.py +++ b/core/util/opds_writer.py @@ -82,14 +82,6 @@ def _strftime(cls, date): return date.strftime(fmt) - @classmethod - def add_link_to_feed(cls, feed, children=None, **kwargs): - link = cls.E.link(**kwargs) - feed.append(link) - if children: - for i in children: - link.append(i) - @classmethod def add_link_to_entry(cls, entry, children=None, **kwargs): if "title" in kwargs: @@ -101,12 +93,8 @@ def add_link_to_entry(cls, entry, children=None, **kwargs): link.append(i) @classmethod - def author(cls, *args, **kwargs): - return cls.E.author(*args, **kwargs) - - @classmethod - def contributor(cls, *args, **kwargs): - return cls.E.contributor(*args, **kwargs) + def link(cls, *args, **kwargs): + return cls.E.link(*args, **kwargs) @classmethod def category(cls, *args, **kwargs): @@ -116,42 +104,6 @@ def category(cls, *args, **kwargs): def entry(cls, *args, **kwargs): return cls.E.entry(*args, **kwargs) - @classmethod - def id(cls, *args, **kwargs): - return cls.E.id(*args, **kwargs) - - @classmethod - def link(cls, *args, **kwargs): - return cls.E.link(*args, **kwargs) - - @classmethod - def makeelement(cls, *args, **kwargs): - return cls.E._makeelement(*args, **kwargs) - - @classmethod - def name(cls, *args, **kwargs): - return cls.E.name(*args, **kwargs) - - @classmethod - def schema_(cls, field_name): - return f"{{{cls.SCHEMA_NS}}}{field_name}" - - @classmethod - def summary(cls, *args, **kwargs): - return cls.E.summary(*args, **kwargs) - - @classmethod - def title(cls, *args, **kwargs): - return cls.E.title(*args, **kwargs) - - @classmethod - def update(cls, *args, **kwargs): - return cls.E.update(*args, **kwargs) - - @classmethod - def updated(cls, *args, **kwargs): - return cls.E.updated(*args, **kwargs) - def __init__(self, title, url, **kwargs): """Constructor. diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 4a06baa594..4cfdafc931 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -34,7 +34,6 @@ HOME=/var/www/circulation 30 22 * * * root core/bin/run work_classify_unchecked_subjects >> /var/log/cron.log 2>&1 # If any works have out-of-date OPDS entries or MARC records, rebuild them, -40 22 * * * root core/bin/run opds_entry_coverage >> /var/log/cron.log 2>&1 40 23 * * * root core/bin/run marc_record_coverage >> /var/log/cron.log 2>&1 # Remove miscellaneous expired things from the database diff --git a/tests/api/admin/controller/test_dashboard.py b/tests/api/admin/controller/test_dashboard.py index 8891d5d05e..7a7260fc89 100644 --- a/tests/api/admin/controller/test_dashboard.py +++ b/tests/api/admin/controller/test_dashboard.py @@ -5,7 +5,7 @@ import pytest -from api.admin.opds import AdminAnnotator +from core.feed.annotator.admin import AdminAnnotator from core.model import CirculationEvent, Genre, WorkGenre, get_one_or_create from core.util.datetime_helpers import utc_now from tests.fixtures.api_admin import AdminControllerFixture @@ -63,7 +63,7 @@ def test_circulation_events(self, dashboard_fixture: DashboardFixture): url = AdminAnnotator( dashboard_fixture.manager.d_circulation, # type: ignore dashboard_fixture.ctrl.db.default_library(), - ).permalink_for(dashboard_fixture.english_1, lp, lp.identifier) + ).permalink_for(lp.identifier) events = response["circulation_events"] assert types[::-1] == [event["type"] for event in events] @@ -80,7 +80,7 @@ def test_circulation_events(self, dashboard_fixture: DashboardFixture): url = AdminAnnotator( dashboard_fixture.manager.d_circulation, # type: ignore dashboard_fixture.ctrl.db.default_library(), - ).permalink_for(dashboard_fixture.english_1, lp, lp.identifier) + ).permalink_for(lp.identifier) assert 2 == len(response["circulation_events"]) diff --git a/tests/api/admin/controller/test_discovery_services.py b/tests/api/admin/controller/test_discovery_services.py index 2abf538c39..23e3b6e2c8 100644 --- a/tests/api/admin/controller/test_discovery_services.py +++ b/tests/api/admin/controller/test_discovery_services.py @@ -318,7 +318,7 @@ def test_discovery_service_delete( service = get_one( settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, + IntegrationConfiguration, id=discovery_service.id, ) assert None == service diff --git a/tests/api/admin/controller/test_work_editor.py b/tests/api/admin/controller/test_work_editor.py index 10ea9f0372..fbe7b8c626 100644 --- a/tests/api/admin/controller/test_work_editor.py +++ b/tests/api/admin/controller/test_work_editor.py @@ -288,11 +288,8 @@ def staff_edition_count(): ) assert 200 == response.status_code assert "New title" == work_fixture.english_1.title - assert "New title" in work_fixture.english_1.simple_opds_entry assert "New subtitle" == work_fixture.english_1.subtitle - assert "New subtitle" in work_fixture.english_1.simple_opds_entry assert "New Author" == work_fixture.english_1.author - assert "New Author" in work_fixture.english_1.simple_opds_entry [author, narrator] = sorted( work_fixture.english_1.presentation_edition.contributions, key=lambda x: x.contributor.display_name, @@ -304,9 +301,7 @@ def staff_edition_count(): assert "Narrator, New" == narrator.contributor.sort_name assert "Narrator" == narrator.role assert "New series" == work_fixture.english_1.series - assert "New series" in work_fixture.english_1.simple_opds_entry assert 144 == work_fixture.english_1.series_position - assert "144" in work_fixture.english_1.simple_opds_entry assert "Audio" == work_fixture.english_1.presentation_edition.medium assert "fre" == work_fixture.english_1.presentation_edition.language assert "New Publisher" == work_fixture.english_1.publisher @@ -317,10 +312,6 @@ def staff_edition_count(): ) assert 0.25 == work_fixture.english_1.quality assert "

New summary

" == work_fixture.english_1.summary_text - assert ( - "<p>New summary</p>" - in work_fixture.english_1.simple_opds_entry - ) assert 1 == staff_edition_count() with work_fixture.request_context_with_library_and_admin("/"): @@ -351,7 +342,6 @@ def staff_edition_count(): ) assert 200 == response.status_code assert "abcd" == work_fixture.english_1.summary_text - assert "New summary" not in work_fixture.english_1.simple_opds_entry [author, narrator, author2] = sorted( work_fixture.english_1.presentation_edition.contributions, key=lambda x: x.contributor.display_name, @@ -395,11 +385,6 @@ def staff_edition_count(): assert None == work_fixture.english_1.series assert None == work_fixture.english_1.series_position assert "" == work_fixture.english_1.summary_text - assert "New subtitle" not in work_fixture.english_1.simple_opds_entry - assert "Narrator" not in work_fixture.english_1.simple_opds_entry - assert "New series" not in work_fixture.english_1.simple_opds_entry - assert "144" not in work_fixture.english_1.simple_opds_entry - assert "abcd" not in work_fixture.english_1.simple_opds_entry assert 1 == staff_edition_count() with work_fixture.request_context_with_library_and_admin("/"): @@ -421,13 +406,6 @@ def staff_edition_count(): assert "Final series" == work_fixture.english_1.series assert 169 == work_fixture.english_1.series_position assert "

Final summary

" == work_fixture.english_1.summary_text - assert "Final subtitle" in work_fixture.english_1.simple_opds_entry - assert "Final series" in work_fixture.english_1.simple_opds_entry - assert "169" in work_fixture.english_1.simple_opds_entry - assert ( - "<p>Final summary</p>" - in work_fixture.english_1.simple_opds_entry - ) assert 1 == staff_edition_count() # Make sure a non-librarian of this library can't edit. diff --git a/tests/api/admin/test_opds.py b/tests/api/admin/test_opds.py deleted file mode 100644 index 9d0b31f4d7..0000000000 --- a/tests/api/admin/test_opds.py +++ /dev/null @@ -1,217 +0,0 @@ -import feedparser - -from api.admin.opds import AdminAnnotator, AdminFeed -from api.opds import AcquisitionFeed -from core.lane import Pagination -from core.model import DataSource, Measurement -from tests.fixtures.database import DatabaseTransactionFixture - - -class TestOPDS: - def links(self, entry, rel=None): - if "feed" in entry: - entry = entry["feed"] - links = sorted(entry["links"], key=lambda x: (x["rel"], x.get("title"))) - r = [] - for l in links: - if ( - not rel - or l["rel"] == rel - or (isinstance(rel, list) and l["rel"] in rel) - ): - r.append(l) - return r - - def test_feed_includes_staff_rating(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - staff_data_source = DataSource.lookup(db.session, DataSource.LIBRARY_STAFF) - lp.identifier.add_measurement( - staff_data_source, Measurement.RATING, 3, weight=1000 - ) - - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, db.default_library(), test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - rating = entry["schema_rating"] - assert 3 == float(rating["schema:ratingvalue"]) - assert Measurement.RATING == rating["additionaltype"] - - def test_feed_includes_refresh_link(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - lp.suppressed = False - db.session.commit() - - # If the metadata wrangler isn't configured, the link is left out. - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, db.default_library(), test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - assert [] == [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/refresh" - ] - - def test_feed_includes_suppress_link(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - lp.suppressed = False - db.session.commit() - - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, db.default_library(), test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - [suppress_link] = [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/hide" - ] - assert lp.identifier.identifier in suppress_link["href"] - unsuppress_links = [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/restore" - ] - assert 0 == len(unsuppress_links) - - lp.suppressed = True - db.session.commit() - - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, db.default_library(), test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - [unsuppress_link] = [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/restore" - ] - assert lp.identifier.identifier in unsuppress_link["href"] - suppress_links = [ - x - for x in entry["links"] - if x["rel"] == "http://librarysimplified.org/terms/rel/hide" - ] - assert 0 == len(suppress_links) - - def test_feed_includes_edit_link(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - lp = work.license_pools[0] - - feed = AcquisitionFeed( - db.session, - "test", - "url", - [work], - AdminAnnotator(None, db.default_library(), test_mode=True), - ) - [entry] = feedparser.parse(str(feed))["entries"] - [edit_link] = [x for x in entry["links"] if x["rel"] == "edit"] - assert lp.identifier.identifier in edit_link["href"] - - def test_suppressed_feed(self, db: DatabaseTransactionFixture): - # Test the ability to show a paginated feed of suppressed works. - - work1 = db.work(with_open_access_download=True) - work1.license_pools[0].suppressed = True - - work2 = db.work(with_open_access_download=True) - work2.license_pools[0].suppressed = True - - # This work won't be included in the feed since its - # suppressed pool is superceded. - work3 = db.work(with_open_access_download=True) - work3.license_pools[0].suppressed = True - work3.license_pools[0].superceded = True - - pagination = Pagination(size=1) - annotator = MockAnnotator(db.default_library()) - titles = [work1.title, work2.title] - - def make_page(pagination): - return AdminFeed.suppressed( - _db=db.session, - title="Hidden works", - url=db.fresh_url(), - annotator=annotator, - pagination=pagination, - ) - - first_page = make_page(pagination) - parsed = feedparser.parse(str(first_page)) - assert 1 == len(parsed["entries"]) - assert parsed["entries"][0].title in titles - titles.remove(parsed["entries"][0].title) - [remaining_title] = titles - - # Make sure the links are in place. - [start] = self.links(parsed, "start") - assert annotator.groups_url(None) == start["href"] - assert annotator.top_level_title() == start["title"] - - [up] = self.links(parsed, "up") - assert annotator.groups_url(None) == up["href"] - assert annotator.top_level_title() == up["title"] - - [next_link] = self.links(parsed, "next") - assert annotator.suppressed_url(pagination.next_page) == next_link["href"] - - # This was the first page, so no previous link. - assert [] == self.links(parsed, "previous") - - # Now get the second page and make sure it has a 'previous' link. - second_page = make_page(pagination.next_page) - parsed = feedparser.parse(str(second_page)) - [previous] = self.links(parsed, "previous") - assert annotator.suppressed_url(pagination) == previous["href"] - assert 1 == len(parsed["entries"]) - assert remaining_title == parsed["entries"][0]["title"] - - # The third page is empty. - third_page = make_page(pagination.next_page.next_page) - parsed = feedparser.parse(str(third_page)) - [previous] = self.links(parsed, "previous") - assert annotator.suppressed_url(pagination.next_page) == previous["href"] - assert 0 == len(parsed["entries"]) - - -class MockAnnotator(AdminAnnotator): - def __init__(self, library): - super().__init__(None, library, test_mode=True) - - def groups_url(self, lane): - if lane: - name = lane.name - else: - name = "" - return "http://groups/%s" % name - - def suppressed_url(self, pagination): - base = "http://suppressed/" - sep = "?" - if pagination: - base += sep + pagination.query_string - return base - - def annotate_feed(self, feed): - super().annotate_feed(feed) diff --git a/tests/api/feed/equivalence/test_feed_equivalence.py b/tests/api/feed/equivalence/test_feed_equivalence.py deleted file mode 100644 index 53e9db82e1..0000000000 --- a/tests/api/feed/equivalence/test_feed_equivalence.py +++ /dev/null @@ -1,317 +0,0 @@ -from __future__ import annotations - -from lxml import etree - -from api.admin.opds import AdminAnnotator as OldAdminAnnotator -from api.admin.opds import AdminFeed as OldAdminFeed -from api.app import app -from api.opds import LibraryAnnotator as OldLibraryAnnotator -from api.opds import LibraryLoanAndHoldAnnotator as OldLibraryLoanAndHoldAnnotator -from core.feed.acquisition import OPDSAcquisitionFeed -from core.feed.admin import AdminFeed -from core.feed.annotator.admin import AdminAnnotator -from core.feed.annotator.circulation import LibraryAnnotator -from core.feed.navigation import NavigationFeed -from core.lane import Facets, Pagination -from core.model.work import Work -from core.opds import AcquisitionFeed -from core.opds import NavigationFeed as OldNavigationFeed -from tests.api.feed.test_library_annotator import ( # noqa - LibraryAnnotatorFixture, - annotator_fixture, - patch_url_for, -) -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import ExternalSearchFixture -from tests.mocks.search import ExternalSearchIndexFake - - -def format_tags(tags1, tags2): - result = "" - result += "TAG1\n" - for tag in tags1: - result += f"{tag[1:]}\n" - result += "TAG2\n" - for tag in tags2: - result += f"{tag[1:]}\n" - return result - - -def assert_equal_xmls(xml1: str | etree._Element, xml2: str | etree._Element): - if isinstance(xml1, str) or isinstance(xml1, bytes): - parsed1 = etree.fromstring(xml1) - else: - parsed1 = xml1 - - if isinstance(xml2, str) or isinstance(xml2, bytes): - parsed2 = etree.fromstring(xml2) - else: - parsed2 = xml2 - - # Pull out comparable information - tags1 = [(tag, tag.tag, tag.text, tag.attrib) for tag in parsed1[1:]] - tags2 = [(tag, tag.tag, tag.text, tag.attrib) for tag in parsed2[1:]] - # Sort the tags on the information so it's easy to compare sequentially - tags1.sort(key=lambda x: (x[1], x[2] or "", x[3].values())) - tags2.sort(key=lambda x: (x[1], x[2] or "", x[3].values())) - - assert len(tags1) == len(tags2), format_tags(tags1, tags2) - - # Assert every tag is equal - for ix, tag1 in enumerate(tags1): - tag2 = tags2[ix] - # Comparable information should be equivalent - if tag1[1:] == tag2[1:]: - assert_equal_xmls(tag1[0], tag2[0]) - break - else: - assert False, format_tags([tag1], tags2) - - -class TestFeedEquivalence: - def test_page_feed( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fixture: ExternalSearchFixture, - ): - db = annotator_fixture.db - lane = annotator_fixture.lane - library = db.default_library() - - work1 = db.work(with_license_pool=True) - work2 = db.work(with_open_access_download=True) - - search_index = ExternalSearchIndexFake(db.session) - search_index.mock_query_works_multi([work1, work2]) - - with app.test_request_context("/"): - new_annotator = LibraryAnnotator(None, lane, library) - new_feed = OPDSAcquisitionFeed.page( - db.session, - lane.display_name, - "http://test-url/", - lane, - new_annotator, - Facets.default(library), - Pagination.default(), - search_index, - ) - - old_annotator = OldLibraryAnnotator(None, lane, library) - old_feed = AcquisitionFeed.page( - db.session, - lane.display_name, - "http://test-url/", - lane, - old_annotator, - Facets.default(library), - Pagination.default(), - search_engine=search_index, - ) - - assert_equal_xmls(str(old_feed), new_feed.serialize()) - - def test_page_feed_with_loan_annotator( - self, annotator_fixture: LibraryAnnotatorFixture - ): - db = annotator_fixture.db - library = db.default_library() - work1 = db.work(with_license_pool=True) - patron = db.patron() - work1.active_license_pool(library).loan_to(patron) - - with app.test_request_context("/"): - new_feed = OPDSAcquisitionFeed.active_loans_for(None, patron).as_response() - old_feed = OldLibraryLoanAndHoldAnnotator.active_loans_for(None, patron) - - assert_equal_xmls(str(old_feed), str(new_feed)) - - def test_groups_feed( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fixture: ExternalSearchFixture, - ): - db = annotator_fixture.db - lane = annotator_fixture.lane - de_lane = db.lane(parent=lane, languages=["de"]) - library = db.default_library() - - work1 = db.work(with_license_pool=True) - work2 = db.work(with_open_access_download=True, language="de") - - search_index = ExternalSearchIndexFake(db.session) - search_index.mock_query_works_multi([work1, work2], [work1, work2]) - - patron = db.patron() - work1.active_license_pool(library).loan_to(patron) - - with app.test_request_context("/"): - new_annotator = LibraryAnnotator(None, lane, library) - new_feed = OPDSAcquisitionFeed.groups( - db.session, - "Groups", - "http://groups/", - lane, - new_annotator, - Pagination.default(), - Facets.default(library), - search_index, - ) - - old_annotator = OldLibraryAnnotator(None, lane, library) - old_feed = AcquisitionFeed.groups( - db.session, - "Groups", - "http://groups/", - lane, - old_annotator, - pagination=Pagination.default(), - facets=Facets.default(library), - search_engine=search_index, - ) - - assert_equal_xmls(str(old_feed), new_feed.serialize()) - - def test_search_feed( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fixture: ExternalSearchFixture, - ): - db = annotator_fixture.db - lane = annotator_fixture.lane - de_lane = db.lane(parent=lane, languages=["de"]) - library = db.default_library() - - work1 = db.work(with_license_pool=True) - work2 = db.work(with_open_access_download=True, language="de") - - search_index = ExternalSearchIndexFake(db.session) - search_index.mock_query_works_multi([work1, work2]) - - patron = db.patron() - work1.active_license_pool(library).loan_to(patron) - - with app.test_request_context("/"): - new_annotator = LibraryAnnotator(None, lane, library) - new_feed = OPDSAcquisitionFeed.search( # type: ignore[union-attr] - db.session, - "Search", - "http://search/", - lane, - search_index, - "query", - new_annotator, - Pagination.default(), - Facets.default(library), - ).as_response() - - old_annotator = OldLibraryAnnotator(None, lane, library) - old_feed = AcquisitionFeed.search( - db.session, - "Search", - "http://search/", - lane, - search_index, - "query", - Pagination.default(), - Facets.default(library), - old_annotator, - ) - - assert_equal_xmls(str(old_feed), str(new_feed)) - - def test_from_query_feed( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fixture: ExternalSearchFixture, - ): - db = annotator_fixture.db - lane = annotator_fixture.lane - de_lane = db.lane(parent=lane, languages=["de"]) - library = db.default_library() - - work1 = db.work(with_license_pool=True) - work2 = db.work(with_open_access_download=True, language="de") - - search_index = ExternalSearchIndexFake(db.session) - search_index.mock_query_works_multi([work1, work2]) - - patron = db.patron() - work1.active_license_pool(library).loan_to(patron) - - def url_fn(page): - return f"http://pagination?page={page}" - - query = db.session.query(Work) - - with app.test_request_context("/"): - new_annotator = LibraryAnnotator(None, lane, library) - new_feed = OPDSAcquisitionFeed.from_query( - query, - db.session, - "Search", - "http://search/", - Pagination(), - url_fn, - new_annotator, - ) - - old_annotator = OldLibraryAnnotator(None, lane, library) - old_feed = AcquisitionFeed.from_query( - query, - db.session, - "Search", - "http://search/", - Pagination(), - url_fn, - old_annotator, - ) - - assert_equal_xmls(str(old_feed), new_feed.serialize()) - - -class TestAdminAnnotator: - def test_suppressed( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fixture: ExternalSearchFixture, - ): - db = annotator_fixture.db - library = db.default_library() - - work1 = db.work(with_open_access_download=True) - pool = work1.active_license_pool() - pool.suppressed = True - - with app.test_request_context("/"): - new_annotator = AdminAnnotator(None, library) - new_feed = AdminFeed.suppressed( - db.session, "", "http://verbose", new_annotator - ) - - old_annotator = OldAdminAnnotator(None, library) - old_feed = OldAdminFeed.suppressed( - db.session, "", "http://verbose", old_annotator - ) - - assert_equal_xmls(str(old_feed), new_feed.serialize()) - - -class TestNavigationFeed: - def test_feed(self, db: DatabaseTransactionFixture): - lane = db.lane() - child1 = db.lane(parent=lane) - child2 = db.lane(parent=lane) - - with app.test_request_context("/"): - new_annotator = LibraryAnnotator(None, lane, db.default_library()) - new_feed = NavigationFeed.navigation( - db.session, "Navigation", "http://navigation", lane, new_annotator - ) - - old_annotator = OldLibraryAnnotator(None, lane, db.default_library()) - old_feed = OldNavigationFeed.navigation( - db.session, "Navigation", "http://navigation", lane, old_annotator - ) - - assert_equal_xmls(str(old_feed), str(new_feed.as_response())) diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py index 8b9dc57a3b..d3c4f5929f 100644 --- a/tests/api/feed/test_annotators.py +++ b/tests/api/feed/test_annotators.py @@ -1,22 +1,31 @@ from datetime import timedelta +import feedparser +import pytest + from core.classifier import Classifier +from core.external_search import WorkSearchResult from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.base import Annotator from core.feed.annotator.circulation import CirculationManagerAnnotator from core.feed.annotator.verbose import VerboseAnnotator from core.feed.types import FeedEntryType, Link, WorkEntry from core.feed.util import strftime +from core.lane import WorkList from core.model import tuple_to_numericrange from core.model.classification import Subject +from core.model.constants import MediaTypes from core.model.contributor import Contributor from core.model.datasource import DataSource from core.model.edition import Edition +from core.model.formats import FormatPriorities +from core.model.integration import IntegrationConfiguration +from core.model.licensing import DeliveryMechanism, LicensePool, RightsStatus from core.model.measurement import Measurement from core.model.resource import Hyperlink, Resource from core.model.work import Work -from core.util.datetime_helpers import utc_now -from tests.core.test_opds import TestAnnotatorsFixture, annotators_fixture # noqa +from core.util.datetime_helpers import datetime_utc, utc_now +from tests.api.feed.fixtures import PatchedUrlFor, patch_url_for # noqa from tests.fixtures.database import ( # noqa DatabaseTransactionFixture, DBStatementCounter, @@ -24,12 +33,8 @@ class TestAnnotators: - def test_all_subjects(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_all_subjects(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(genre="Fiction", with_open_access_download=True) edition = work.presentation_edition @@ -130,19 +135,14 @@ def test_content(self, db: DatabaseTransactionFixture): assert Annotator.content(None) == "" - def test_appeals(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_appeals(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(with_open_access_download=True) work.appeal_language = 0.1 work.appeal_character = 0.2 work.appeal_story = 0.3 work.appeal_setting = 0.4 - work.calculate_opds_entries(verbose=True) category_tags = VerboseAnnotator.categories(work) appeal_tags = category_tags[Work.APPEALS_URI] @@ -155,8 +155,7 @@ def test_appeals(self, annotators_fixture: TestAnnotatorsFixture): actual = [(x["term"], x["label"], x["ratingValue"]) for x in appeal_tags] assert set(expect) == set(actual) - def test_authors(self, annotators_fixture: TestAnnotatorsFixture): - db = annotators_fixture.db + def test_authors(self, db: DatabaseTransactionFixture): edition = db.edition() [c_orig] = list(edition.contributors) @@ -176,12 +175,8 @@ def test_authors(self, annotators_fixture: TestAnnotatorsFixture): c_orig.sort_name, } - def test_detailed_author(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_detailed_author(self, db: DatabaseTransactionFixture): + session = db.session c, ignore = db.contributor("Familyname, Givenname") c.display_name = "Givenname Familyname" @@ -204,14 +199,8 @@ def test_detailed_author(self, annotators_fixture: TestAnnotatorsFixture): [same_tag] = VerboseAnnotator.authors(work.presentation_edition)["authors"] assert same_tag.dict() == author.dict() - def test_duplicate_author_names_are_ignored( - self, annotators_fixture: TestAnnotatorsFixture - ): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_duplicate_author_names_are_ignored(self, db: DatabaseTransactionFixture): + session = db.session # Ignores duplicate author names work = db.work(with_license_pool=True) @@ -224,13 +213,9 @@ def test_duplicate_author_names_are_ignored( assert 1 == len(Annotator.authors(edition)["authors"]) def test_all_annotators_mention_every_relevant_author( - self, annotators_fixture: TestAnnotatorsFixture + self, db: DatabaseTransactionFixture ): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + session = db.session work = db.work(authors=[], with_license_pool=True) edition = work.presentation_edition @@ -268,18 +253,13 @@ def test_all_annotators_mention_every_relevant_author( assert 0 == len(tags["contributors"]) assert [None, None] == [x.role for x in (tags["authors"])] - def test_ratings(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_ratings(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(with_license_pool=True, with_open_access_download=True) work.quality = 1.0 / 3 work.popularity = 0.25 work.rating = 0.6 - work.calculate_opds_entries(verbose=True) entry = OPDSAcquisitionFeed._create_entry( work, work.active_license_pool(), @@ -303,16 +283,11 @@ def test_ratings(self, annotators_fixture: TestAnnotatorsFixture): ] assert set(expected) == set(ratings) - def test_subtitle(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_subtitle(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(with_license_pool=True, with_open_access_download=True) work.presentation_edition.subtitle = "Return of the Jedi" - work.calculate_opds_entries() feed = OPDSAcquisitionFeed( db.fresh_str(), @@ -328,7 +303,6 @@ def test_subtitle(self, annotators_fixture: TestAnnotatorsFixture): # If there's no subtitle, the subtitle tag isn't included. work.presentation_edition.subtitle = None - work.calculate_opds_entries() feed = OPDSAcquisitionFeed( db.fresh_str(), db.fresh_url(), @@ -340,17 +314,12 @@ def test_subtitle(self, annotators_fixture: TestAnnotatorsFixture): assert computed is not None assert computed.subtitle == None - def test_series(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_series(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(with_license_pool=True, with_open_access_download=True) work.presentation_edition.series = "Harry Otter and the Lifetime of Despair" work.presentation_edition.series_position = 4 - work.calculate_opds_entries() feed = OPDSAcquisitionFeed( db.fresh_str(), @@ -369,7 +338,6 @@ def test_series(self, annotators_fixture: TestAnnotatorsFixture): # The series position can be 0, for a prequel for example. work.presentation_edition.series_position = 0 - work.calculate_opds_entries() feed = OPDSAcquisitionFeed( db.fresh_str(), @@ -387,7 +355,6 @@ def test_series(self, annotators_fixture: TestAnnotatorsFixture): # If there's no series title, the series tag isn't included. work.presentation_edition.series = None - work.calculate_opds_entries() feed = OPDSAcquisitionFeed( db.fresh_str(), db.fresh_url(), @@ -401,12 +368,8 @@ def test_series(self, annotators_fixture: TestAnnotatorsFixture): # No series name assert Annotator.series(None, "") == None - def test_samples(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) + def test_samples(self, db: DatabaseTransactionFixture): + session = db.session work = db.work(with_license_pool=True) edition = work.presentation_edition @@ -488,3 +451,243 @@ def test_annotate_work_entry(self, db: DatabaseTransactionFixture): # Missing values assert data.language == None assert data.updated == FeedEntryType(text=strftime(now)) + + +class CirculationManagerAnnotatorFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.work = db.work(with_open_access_download=True) + self.lane = db.lane(display_name="Fantasy") + self.annotator = CirculationManagerAnnotator( + self.lane, + ) + + +@pytest.fixture(scope="function") +def circulation_fixture( + db: DatabaseTransactionFixture, patch_url_for: PatchedUrlFor +) -> CirculationManagerAnnotatorFixture: + return CirculationManagerAnnotatorFixture(db) + + +class TestCirculationManagerAnnotator: + def test_open_access_link( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + # The resource URL associated with a LicensePoolDeliveryMechanism + # becomes the `href` of an open-access `link` tag. + pool = circulation_fixture.work.license_pools[0] + [lpdm] = pool.delivery_mechanisms + + # Temporarily disconnect the Resource's Representation so we + # can verify that this works even if there is no + # Representation. + representation = lpdm.resource.representation + lpdm.resource.representation = None + lpdm.resource.url = "http://foo.com/thefile.epub" + link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) + assert lpdm.resource.url == link_tag.href + + # The dcterms:rights attribute may provide a more detailed + # explanation of the book's copyright status. + assert lpdm.rights_status.uri == link_tag.rights + + # If the Resource has a Representation, the public URL is used + # instead of the original Resource URL. + lpdm.resource.representation = representation + link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) + assert representation.public_url == link_tag.href + + # If there is no Representation, the Resource's original URL is used. + lpdm.resource.representation = None + link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) + assert lpdm.resource.url == link_tag.href + + def test_default_lane_url( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + default_lane_url = circulation_fixture.annotator.default_lane_url() + assert "feed" in default_lane_url + assert str(circulation_fixture.lane.id) not in default_lane_url + + def test_feed_url(self, circulation_fixture: CirculationManagerAnnotatorFixture): + feed_url_fantasy = circulation_fixture.annotator.feed_url( + circulation_fixture.lane + ) + assert "feed" in feed_url_fantasy + assert str(circulation_fixture.lane.id) in feed_url_fantasy + assert ( + str(circulation_fixture.db.default_library().name) not in feed_url_fantasy + ) + + def test_navigation_url( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + navigation_url_fantasy = circulation_fixture.annotator.navigation_url( + circulation_fixture.lane + ) + assert "navigation" in navigation_url_fantasy + assert str(circulation_fixture.lane.id) in navigation_url_fantasy + + def test_visible_delivery_mechanisms( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + # By default, all delivery mechanisms are visible + [pool] = circulation_fixture.work.license_pools + [epub] = list(circulation_fixture.annotator.visible_delivery_mechanisms(pool)) + assert "application/epub+zip" == epub.delivery_mechanism.content_type + + # Create an annotator that hides PDFs. + no_pdf = CirculationManagerAnnotator( + circulation_fixture.lane, + hidden_content_types=["application/pdf"], + ) + + # This has no effect on the EPUB. + [epub2] = list(no_pdf.visible_delivery_mechanisms(pool)) + assert epub == epub2 + + # Create an annotator that hides EPUBs. + no_epub = CirculationManagerAnnotator( + circulation_fixture.lane, + hidden_content_types=["application/epub+zip"], + ) + + # The EPUB is hidden, and this license pool has no delivery + # mechanisms. + assert [] == list(no_epub.visible_delivery_mechanisms(pool)) + + def test_visible_delivery_mechanisms_configured_0( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + """Test that configuration options do affect OPDS feeds. + Exhaustive testing of different configuration values isn't necessary + here: See the tests for FormatProperties to see the actual semantics + of the configuration values.""" + edition = circulation_fixture.db.edition() + pool: LicensePool = circulation_fixture.db.licensepool(edition) + + pool.set_delivery_mechanism( + MediaTypes.EPUB_MEDIA_TYPE, + DeliveryMechanism.NO_DRM, + RightsStatus.UNKNOWN, + None, + ) + pool.set_delivery_mechanism( + MediaTypes.EPUB_MEDIA_TYPE, + DeliveryMechanism.LCP_DRM, + RightsStatus.UNKNOWN, + None, + ) + pool.set_delivery_mechanism( + MediaTypes.PDF_MEDIA_TYPE, + DeliveryMechanism.LCP_DRM, + RightsStatus.UNKNOWN, + None, + ) + + config: IntegrationConfiguration = pool.collection.integration_configuration + DatabaseTransactionFixture.set_settings( + config, + **{ + FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY: [ + f"{DeliveryMechanism.LCP_DRM}", + ], + FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY: [ + f"{MediaTypes.PDF_MEDIA_TYPE}" + ], + }, + ) + circulation_fixture.db.session.commit() + + annotator = CirculationManagerAnnotator( + circulation_fixture.lane, + hidden_content_types=[], + ) + + # DRM-free types appear first. + # Then our LCP'd PDF. + # Then our LCP'd EPUB. + # Then our Adobe DRM'd EPUB. + results = annotator.visible_delivery_mechanisms(pool) + assert results[0].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE + assert results[0].delivery_mechanism.drm_scheme == None + assert results[1].delivery_mechanism.content_type == MediaTypes.PDF_MEDIA_TYPE + assert results[1].delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM + assert results[2].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE + assert results[2].delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM + assert results[3].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE + assert results[3].delivery_mechanism.drm_scheme == DeliveryMechanism.ADOBE_DRM + assert len(results) == 4 + + def test_rights_attributes( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + m = circulation_fixture.annotator.rights_attributes + + # Given a LicensePoolDeliveryMechanism with a RightsStatus, + # rights_attributes creates a dictionary mapping the dcterms:rights + # attribute to the URI associated with the RightsStatus. + lp = circulation_fixture.db.licensepool(None) + [lpdm] = lp.delivery_mechanisms + assert {"rights": lpdm.rights_status.uri} == m(lpdm) + + # If any link in the chain is broken, rights_attributes returns + # an empty dictionary. + old_uri = lpdm.rights_status.uri + lpdm.rights_status.uri = None + assert {} == m(lpdm) + lpdm.rights_status.uri = old_uri + + lpdm.rights_status = None + assert {} == m(lpdm) + + assert {} == m(None) + + def test_work_entry_includes_updated( + self, circulation_fixture: CirculationManagerAnnotatorFixture + ): + # By default, the 'updated' date is the value of + # Work.last_update_time. + work = circulation_fixture.db.work(with_open_access_download=True) + # This date is later, but we don't check it. + work.license_pools[0].availability_time = datetime_utc(2019, 1, 1) + work.last_update_time = datetime_utc(2018, 2, 4) + + def entry_for(work): + worklist = WorkList() + worklist.initialize(None) + annotator = CirculationManagerAnnotator(worklist) + feed = ( + OPDSAcquisitionFeed("test", "url", [work], annotator).as_response().data + ) + [entry] = feedparser.parse(str(feed)).entries + return entry + + entry = entry_for(work) + assert "2018-02-04" in entry.get("updated") + + # If the work passed in is a WorkSearchResult that indicates + # the search index found a later 'update time', then the later + # time is used. This value isn't always present -- it's only + # calculated when the list is being _ordered_ by 'update time'. + # Otherwise it's too slow to bother. + class MockHit: + def __init__(self, last_update): + # Store the time the way we get it from Opensearch -- + # as a single-element list containing seconds since epoch. + self.last_update = [ + (last_update - datetime_utc(1970, 1, 1)).total_seconds() + ] + + hit = MockHit(datetime_utc(2018, 2, 5)) + result = WorkSearchResult(work, hit) + entry = entry_for(result) + assert "2018-02-05" in entry.get("updated") + + # Any 'update time' provided by Opensearch is used even if + # it's clearly earlier than Work.last_update_time. + hit = MockHit(datetime_utc(2017, 1, 1)) + result._hit = hit + entry = entry_for(result) + assert "2017-01-01" in entry.get("updated") diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 694fabd407..9afa8c8937 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -23,6 +23,7 @@ from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.circulation import LibraryAnnotator from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator +from core.feed.opds import UnfulfillableWork from core.feed.types import FeedData, WorkEntry from core.feed.util import strftime from core.lane import Facets, FacetsWithEntryPoint, Pagination @@ -39,7 +40,6 @@ RightsStatus, Work, ) -from core.opds import UnfulfillableWork from core.opds_import import OPDSXMLParser from core.util.datetime_helpers import utc_now from core.util.flask_util import OPDSFeedResponse @@ -799,7 +799,7 @@ def test_work_entry_includes_contributor_links( # When there are two authors, they each get a contributor link. work.presentation_edition.add_contributor("Oprah", Contributor.AUTHOR_ROLE) work.calculate_presentation( - PresentationCalculationPolicy(regenerate_opds_entries=True), + PresentationCalculationPolicy(), ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries contributor_links = [ @@ -818,7 +818,7 @@ def test_work_entry_includes_contributor_links( annotator_fixture.db.session.delete(work.presentation_edition.contributions[1]) annotator_fixture.db.session.commit() work.calculate_presentation( - PresentationCalculationPolicy(regenerate_opds_entries=True), + PresentationCalculationPolicy(), ) [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries assert [] == [l.link for l in entry.computed.authors if l.link] diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py index f18dbcd958..b240a58fa7 100644 --- a/tests/api/feed/test_opds2_serializer.py +++ b/tests/api/feed/test_opds2_serializer.py @@ -148,6 +148,7 @@ def test__serialize_acquisition_link(self): acquisition = Acquisition( href="http://acquisition", rel="acquisition", + type="html", availability_status="available", availability_since="2022-02-02", availability_until="2222-02-02", @@ -166,6 +167,7 @@ def test__serialize_acquisition_link(self): assert result["href"] == acquisition.href assert result["rel"] == acquisition.rel + assert result["type"] == acquisition.type assert result["properties"] == dict( availability={ "since": "2022-02-02", diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index a4b794679e..8e2089b217 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -26,12 +26,11 @@ from core.feed.annotator.loan_and_hold import LibraryLoanAndHoldAnnotator from core.feed.annotator.verbose import VerboseAnnotator from core.feed.navigation import NavigationFeed -from core.feed.opds import BaseOPDSFeed +from core.feed.opds import BaseOPDSFeed, UnfulfillableWork from core.feed.types import FeedData, Link, WorkEntry, WorkEntryData from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList from core.model import DeliveryMechanism, Representation from core.model.constants import LinkRelations -from core.opds import MockUnfulfillableAnnotator from core.util.datetime_helpers import utc_now from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse from core.util.opds_writer import OPDSFeed, OPDSMessage @@ -39,6 +38,11 @@ from tests.fixtures.database import DatabaseTransactionFixture +class MockUnfulfillableAnnotator(Annotator): + def annotate_work_entry(self, *args, **kwargs): + raise UnfulfillableWork() + + class TestOPDSFeedProtocol: def test_entry_as_response(self, db: DatabaseTransactionFixture): work = db.work() @@ -490,7 +494,6 @@ def test_single_entry(self, db: DatabaseTransactionFixture): # If the edition was issued before 1980, no datetime formatting error # is raised. - work.simple_opds_entry = work.verbose_opds_entry = None five_hundred_years = datetime.timedelta(days=(500 * 365)) work.presentation_edition.issued = utc_now() - five_hundred_years @@ -560,7 +563,7 @@ def test_unfilfullable_work(self, db: DatabaseTransactionFixture): [pool] = work.license_pools response = OPDSAcquisitionFeed.single_entry( work, - MockUnfulfillableAnnotator(), # type: ignore[arg-type] + MockUnfulfillableAnnotator(), ) assert isinstance(response, OPDSMessage) expect = OPDSAcquisitionFeed.error_message( diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 52930fdb5f..3a8af5028d 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -57,11 +57,11 @@ IntegrationLibraryConfiguration, ) from core.model.library import LibraryLogo -from core.opds import OPDSFeed from core.user_profile import ProfileController from core.util.authentication_for_opds import AuthenticationForOPDSDocument from core.util.datetime_helpers import utc_now from core.util.http import IntegrationException, RemoteIntegrationException +from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemDetail from tests.fixtures.announcements import AnnouncementFixture from tests.fixtures.library import LibraryFixture diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index 585b2a4315..345eadcb82 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -15,9 +15,9 @@ from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.circulation import LibraryAnnotator from core.feed.navigation import NavigationFeed +from core.feed.opds import NavigationFacets from core.lane import Facets, FeaturedFacets, Pagination, SearchFacets, WorkList from core.model import CachedFeed, Edition -from core.opds import NavigationFacets from core.util.flask_util import Response from tests.fixtures.api_controller import CirculationControllerFixture, WorkSpec from tests.fixtures.library import LibraryFixture diff --git a/tests/api/test_opds.py b/tests/api/test_opds.py deleted file mode 100644 index 3decbf4040..0000000000 --- a/tests/api/test_opds.py +++ /dev/null @@ -1,2359 +0,0 @@ -import datetime -import re -from collections import defaultdict -from typing import Any, List -from unittest.mock import MagicMock, create_autospec - -import dateutil -import feedparser -import pytest -from freezegun import freeze_time -from lxml import etree - -from api.adobe_vendor_id import AuthdataUtility -from api.app import app -from api.circulation import BaseCirculationAPI, CirculationAPI, FulfillmentInfo -from api.lanes import ContributorLane -from api.novelist import NoveListAPI -from api.opds import ( - CirculationManagerAnnotator, - LibraryAnnotator, - LibraryLoanAndHoldAnnotator, -) -from api.problem_details import NOT_FOUND_ON_REMOTE -from core.analytics import Analytics -from core.classifier import ( # type: ignore[attr-defined] - Classifier, - Fantasy, - Urban_Fantasy, -) -from core.entrypoint import AudiobooksEntryPoint, EverythingEntryPoint -from core.external_search import WorkSearchResult -from core.lane import FacetsWithEntryPoint, WorkList -from core.lcp.credential import LCPCredentialFactory, LCPHashedPassphrase -from core.model import ( - CirculationEvent, - Contributor, - DataSource, - DeliveryMechanism, - ExternalIntegration, - Hyperlink, - MediaTypes, - PresentationCalculationPolicy, - Representation, - RightsStatus, - Work, - get_one, -) -from core.model.constants import EditionConstants, LinkRelations -from core.model.formats import FormatPriorities -from core.model.integration import IntegrationConfiguration -from core.model.licensing import LicensePool -from core.model.patron import Loan -from core.opds import AcquisitionFeed, MockAnnotator, UnfulfillableWork -from core.opds_import import OPDSXMLParser -from core.util.datetime_helpers import datetime_utc, utc_now -from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse -from core.util.opds_writer import AtomFeed, OPDSFeed -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.library import LibraryFixture -from tests.fixtures.search import ExternalSearchFixtureFake -from tests.fixtures.vendor_id import VendorIDFixture - -_strftime = AtomFeed._strftime - - -class CirculationManagerAnnotatorFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.work = db.work(with_open_access_download=True) - self.lane = db.lane(display_name="Fantasy") - self.annotator = CirculationManagerAnnotator( - self.lane, - test_mode=True, - ) - - -@pytest.fixture(scope="function") -def circulation_fixture( - db: DatabaseTransactionFixture, -) -> CirculationManagerAnnotatorFixture: - return CirculationManagerAnnotatorFixture(db) - - -class TestCirculationManagerAnnotator: - def test_open_access_link( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - # The resource URL associated with a LicensePoolDeliveryMechanism - # becomes the `href` of an open-access `link` tag. - pool = circulation_fixture.work.license_pools[0] - [lpdm] = pool.delivery_mechanisms - - # Temporarily disconnect the Resource's Representation so we - # can verify that this works even if there is no - # Representation. - representation = lpdm.resource.representation - lpdm.resource.representation = None - lpdm.resource.url = "http://foo.com/thefile.epub" - link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) - assert lpdm.resource.url == link_tag.get("href") - - # The dcterms:rights attribute may provide a more detailed - # explanation of the book's copyright status. - rights = link_tag.attrib["{http://purl.org/dc/terms/}rights"] - assert lpdm.rights_status.uri == rights - - # If the Resource has a Representation, the public URL is used - # instead of the original Resource URL. - lpdm.resource.representation = representation - link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) - assert representation.public_url == link_tag.get("href") - - # If there is no Representation, the Resource's original URL is used. - lpdm.resource.representation = None - link_tag = circulation_fixture.annotator.open_access_link(pool, lpdm) - assert lpdm.resource.url == link_tag.get("href") - - def test_default_lane_url( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - default_lane_url = circulation_fixture.annotator.default_lane_url() - assert "feed" in default_lane_url - assert str(circulation_fixture.lane.id) not in default_lane_url - - def test_feed_url(self, circulation_fixture: CirculationManagerAnnotatorFixture): - feed_url_fantasy = circulation_fixture.annotator.feed_url( - circulation_fixture.lane, dict(), dict() - ) - assert "feed" in feed_url_fantasy - assert str(circulation_fixture.lane.id) in feed_url_fantasy - assert circulation_fixture.db.default_library().name not in feed_url_fantasy - - def test_navigation_url( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - navigation_url_fantasy = circulation_fixture.annotator.navigation_url( - circulation_fixture.lane - ) - assert "navigation" in navigation_url_fantasy - assert str(circulation_fixture.lane.id) in navigation_url_fantasy - - def test_visible_delivery_mechanisms( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - # By default, all delivery mechanisms are visible - [pool] = circulation_fixture.work.license_pools - [epub] = list(circulation_fixture.annotator.visible_delivery_mechanisms(pool)) - assert "application/epub+zip" == epub.delivery_mechanism.content_type - - # Create an annotator that hides PDFs. - no_pdf = CirculationManagerAnnotator( - circulation_fixture.lane, - hidden_content_types=["application/pdf"], - test_mode=True, - ) - - # This has no effect on the EPUB. - [epub2] = list(no_pdf.visible_delivery_mechanisms(pool)) - assert epub == epub2 - - # Create an annotator that hides EPUBs. - no_epub = CirculationManagerAnnotator( - circulation_fixture.lane, - hidden_content_types=["application/epub+zip"], - test_mode=True, - ) - - # The EPUB is hidden, and this license pool has no delivery - # mechanisms. - assert [] == list(no_epub.visible_delivery_mechanisms(pool)) - - def test_visible_delivery_mechanisms_configured_0( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - """Test that configuration options do affect OPDS feeds. - Exhaustive testing of different configuration values isn't necessary - here: See the tests for FormatProperties to see the actual semantics - of the configuration values.""" - edition = circulation_fixture.db.edition() - pool: LicensePool = circulation_fixture.db.licensepool(edition) - - pool.set_delivery_mechanism( - MediaTypes.EPUB_MEDIA_TYPE, - DeliveryMechanism.NO_DRM, - RightsStatus.UNKNOWN, - None, - ) - pool.set_delivery_mechanism( - MediaTypes.EPUB_MEDIA_TYPE, - DeliveryMechanism.LCP_DRM, - RightsStatus.UNKNOWN, - None, - ) - pool.set_delivery_mechanism( - MediaTypes.PDF_MEDIA_TYPE, - DeliveryMechanism.LCP_DRM, - RightsStatus.UNKNOWN, - None, - ) - - config: IntegrationConfiguration = pool.collection.integration_configuration - DatabaseTransactionFixture.set_settings( - config, - **{ - FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY: [ - f"{DeliveryMechanism.LCP_DRM}", - ], - FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY: [ - f"{MediaTypes.PDF_MEDIA_TYPE}" - ], - }, - ) - circulation_fixture.db.session.commit() - - annotator = CirculationManagerAnnotator( - circulation_fixture.lane, - hidden_content_types=[], - test_mode=True, - ) - - # DRM-free types appear first. - # Then our LCP'd PDF. - # Then our LCP'd EPUB. - # Then our Adobe DRM'd EPUB. - results = annotator.visible_delivery_mechanisms(pool) - assert results[0].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE - assert results[0].delivery_mechanism.drm_scheme == None - assert results[1].delivery_mechanism.content_type == MediaTypes.PDF_MEDIA_TYPE - assert results[1].delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM - assert results[2].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE - assert results[2].delivery_mechanism.drm_scheme == DeliveryMechanism.LCP_DRM - assert results[3].delivery_mechanism.content_type == MediaTypes.EPUB_MEDIA_TYPE - assert results[3].delivery_mechanism.drm_scheme == DeliveryMechanism.ADOBE_DRM - assert len(results) == 4 - - def test_rights_attributes( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - m = circulation_fixture.annotator.rights_attributes - - # Given a LicensePoolDeliveryMechanism with a RightsStatus, - # rights_attributes creates a dictionary mapping the dcterms:rights - # attribute to the URI associated with the RightsStatus. - lp = circulation_fixture.db.licensepool(None) - [lpdm] = lp.delivery_mechanisms - assert {"{http://purl.org/dc/terms/}rights": lpdm.rights_status.uri} == m(lpdm) - - # If any link in the chain is broken, rights_attributes returns - # an empty dictionary. - old_uri = lpdm.rights_status.uri - lpdm.rights_status.uri = None - assert {} == m(lpdm) - lpdm.rights_status.uri = old_uri - - lpdm.rights_status = None - assert {} == m(lpdm) - - assert {} == m(None) - - def test_work_entry_includes_updated( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - # By default, the 'updated' date is the value of - # Work.last_update_time. - work = circulation_fixture.db.work(with_open_access_download=True) - # This date is later, but we don't check it. - work.license_pools[0].availability_time = datetime_utc(2019, 1, 1) - work.last_update_time = datetime_utc(2018, 2, 4) - - def entry_for(work): - worklist = WorkList() - worklist.initialize(None) - annotator = CirculationManagerAnnotator(worklist, test_mode=True) - feed = AcquisitionFeed( - circulation_fixture.db.session, "test", "url", [work], annotator - ) - feed = feedparser.parse(str(feed)) - [entry] = feed.entries - return entry - - entry = entry_for(work) - assert "2018-02-04" in entry.get("updated") - - # If the work passed in is a WorkSearchResult that indicates - # the search index found a later 'update time', then the later - # time is used. This value isn't always present -- it's only - # calculated when the list is being _ordered_ by 'update time'. - # Otherwise it's too slow to bother. - class MockHit: - def __init__(self, last_update): - # Store the time the way we get it from Opensearch -- - # as a single-element list containing seconds since epoch. - self.last_update = [ - (last_update - datetime_utc(1970, 1, 1)).total_seconds() - ] - - hit = MockHit(datetime_utc(2018, 2, 5)) - result = WorkSearchResult(work, hit) - entry = entry_for(result) - assert "2018-02-05" in entry.get("updated") - - # Any 'update time' provided by Opensearch is used even if - # it's clearly earlier than Work.last_update_time. - hit = MockHit(datetime_utc(2017, 1, 1)) - result._hit = hit - entry = entry_for(result) - assert "2017-01-01" in entry.get("updated") - - def test__single_entry_response( - self, circulation_fixture: CirculationManagerAnnotatorFixture - ): - # Test the helper method that makes OPDSEntryResponse objects. - - m = CirculationManagerAnnotator._single_entry_response - - # Test the case where we accept the defaults. - work = circulation_fixture.db.work() - url = circulation_fixture.db.fresh_url() - annotator = MockAnnotator() - response = m(circulation_fixture.db.session, work, annotator, url) - assert isinstance(response, OPDSEntryResponse) - assert "%s" % work.title in response.get_data(as_text=True) - - # By default, the representation is private but can be cached - # by the recipient. - assert True == response.private - assert 30 * 60 == response.max_age - - # Test the case where we override the defaults. - response = m( - circulation_fixture.db.session, - work, - annotator, - url, - max_age=12, - private=False, - ) - assert False == response.private - assert 12 == response.max_age - - # Test the case where the Work we thought we were providing is missing. - work = None - response = m(circulation_fixture.db.session, work, annotator, url) - - # Instead of an entry based on the Work, we get an empty feed. - assert isinstance(response, OPDSFeedResponse) - response_data = response.get_data(as_text=True) - assert "Unknown work" in response_data - assert "" not in response_data - - # Since it's an error message, the representation is private - # and not to be cached. - assert 0 == response.max_age - assert True == response.private - - -class LibraryAnnotatorFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.work = db.work(with_open_access_download=True) - parent = db.lane(display_name="Fiction", languages=["eng"], fiction=True) - self.lane = db.lane(display_name="Fantasy", languages=["eng"]) - self.lane.add_genre(Fantasy.name) - self.lane.parent = parent - self.annotator = LibraryAnnotator( - None, - self.lane, - db.default_library(), - test_mode=True, - top_level_title="Test Top Level Title", - ) - - # Initialize library with Adobe Vendor ID details - db.default_library().library_registry_short_name = "FAKE" - db.default_library().library_registry_shared_secret = "s3cr3t5" - - # A ContributorLane to test code that handles it differently. - self.contributor, ignore = db.contributor("Someone") - self.contributor_lane = ContributorLane( - db.default_library(), self.contributor, languages=["eng"], audiences=None - ) - - -@pytest.fixture(scope="function") -def annotator_fixture(db: DatabaseTransactionFixture) -> LibraryAnnotatorFixture: - return LibraryAnnotatorFixture(db) - - -class TestLibraryAnnotator: - def test_add_configuration_links( - self, - annotator_fixture: LibraryAnnotatorFixture, - library_fixture: LibraryFixture, - ): - mock_feed: List[Any] = [] - - # Set up configuration settings for links. - library = annotator_fixture.db.default_library() - settings = library_fixture.settings(library) - settings.terms_of_service = "http://terms/" # type: ignore[assignment] - settings.privacy_policy = "http://privacy/" # type: ignore[assignment] - settings.copyright = "http://copyright/" # type: ignore[assignment] - settings.about = "http://about/" # type: ignore[assignment] - settings.license = "http://license/" # type: ignore[assignment] - settings.help_email = "help@me" # type: ignore[assignment] - settings.help_web = "http://help/" # type: ignore[assignment] - - # Set up settings for navigation links. - settings.web_header_links = ["http://example.com/1", "http://example.com/2"] - settings.web_header_labels = ["one", "two"] - - annotator_fixture.annotator.add_configuration_links(mock_feed) - - assert 9 == len(mock_feed) - - mock_feed_links = sorted(mock_feed, key=lambda x: x.attrib["rel"]) - expected_links = [ - (link.attrib["href"], link.attrib.get("type")) - for link in mock_feed_links - if link.attrib["rel"] != "related" - ] - - # They are the links we'd expect. - assert [ - ("http://about/", "text/html"), - ("http://copyright/", "text/html"), - ("mailto:help@me", None), - ("http://help/", "text/html"), - ("http://license/", "text/html"), - ("http://privacy/", "text/html"), - ("http://terms/", "text/html"), - ] == expected_links - - # There are two navigation links. - navigation_links = [x for x in mock_feed_links if x.attrib["rel"] == "related"] - assert {"navigation"} == {x.attrib["role"] for x in navigation_links} - assert {"http://example.com/1", "http://example.com/2"} == { - x.attrib["href"] for x in navigation_links - } - assert {"one", "two"} == {x.attrib["title"] for x in navigation_links} - - def test_top_level_title(self, annotator_fixture: LibraryAnnotatorFixture): - assert "Test Top Level Title" == annotator_fixture.annotator.top_level_title() - - def test_group_uri_with_flattened_lane( - self, annotator_fixture: LibraryAnnotatorFixture - ): - spanish_lane = annotator_fixture.db.lane( - display_name="Spanish", languages=["spa"] - ) - flat_spanish_lane = dict( - {"lane": spanish_lane, "label": "All Spanish", "link_to_list_feed": True} - ) - spanish_work = annotator_fixture.db.work( - title="Spanish Book", with_license_pool=True, language="spa" - ) - lp = spanish_work.license_pools[0] - annotator_fixture.annotator.lanes_by_work[spanish_work].append( - flat_spanish_lane - ) - - feed_url = annotator_fixture.annotator.feed_url(spanish_lane) - group_uri = annotator_fixture.annotator.group_uri( - spanish_work, lp, lp.identifier - ) - assert (feed_url, "All Spanish") == group_uri - - def test_lane_url(self, annotator_fixture: LibraryAnnotatorFixture): - fantasy_lane_with_sublanes = annotator_fixture.db.lane( - display_name="Fantasy with sublanes", languages=["eng"] - ) - fantasy_lane_with_sublanes.add_genre(Fantasy.name) - - urban_fantasy_lane = annotator_fixture.db.lane(display_name="Urban Fantasy") - urban_fantasy_lane.add_genre(Urban_Fantasy.name) - fantasy_lane_with_sublanes.sublanes.append(urban_fantasy_lane) - - fantasy_lane_without_sublanes = annotator_fixture.db.lane( - display_name="Fantasy without sublanes", languages=["eng"] - ) - fantasy_lane_without_sublanes.add_genre(Fantasy.name) - - default_lane_url = annotator_fixture.annotator.lane_url(None) - assert default_lane_url == annotator_fixture.annotator.default_lane_url() - - facets = dict(entrypoint="Book") - default_lane_url = annotator_fixture.annotator.lane_url(None, facets=facets) - assert default_lane_url == annotator_fixture.annotator.default_lane_url( - facets=facets - ) - - groups_url = annotator_fixture.annotator.lane_url(fantasy_lane_with_sublanes) - assert groups_url == annotator_fixture.annotator.groups_url( - fantasy_lane_with_sublanes - ) - - groups_url = annotator_fixture.annotator.lane_url( - fantasy_lane_with_sublanes, facets=facets - ) - assert groups_url == annotator_fixture.annotator.groups_url( - fantasy_lane_with_sublanes, facets=facets - ) - - feed_url = annotator_fixture.annotator.lane_url(fantasy_lane_without_sublanes) - assert feed_url == annotator_fixture.annotator.feed_url( - fantasy_lane_without_sublanes - ) - - feed_url = annotator_fixture.annotator.lane_url( - fantasy_lane_without_sublanes, facets=facets - ) - assert feed_url == annotator_fixture.annotator.feed_url( - fantasy_lane_without_sublanes, facets=facets - ) - - def test_fulfill_link_issues_only_open_access_links_when_library_does_not_identify_patrons( - self, annotator_fixture: LibraryAnnotatorFixture - ): - # This library doesn't identify patrons. - annotator_fixture.annotator.identifies_patrons = False - - # Because of this, normal fulfillment links are not generated. - [pool] = annotator_fixture.work.license_pools - [lpdm] = pool.delivery_mechanisms - assert None == annotator_fixture.annotator.fulfill_link(pool, None, lpdm) - - # However, fulfillment links _can_ be generated with the - # 'open-access' link relation. - link = annotator_fixture.annotator.fulfill_link( - pool, None, lpdm, OPDSFeed.OPEN_ACCESS_REL - ) - assert OPDSFeed.OPEN_ACCESS_REL == link.attrib["rel"] - - # We freeze the test time here, because this test checks that the client token - # in the feed matches a generated client token. The client token contains an - # expiry date based on the current time, so this test can be flaky in a slow - # integration environment unless we make sure the clock does not change as this - # test is being performed. - @freeze_time("1867-07-01") - def test_fulfill_link_includes_device_registration_tags( - self, - annotator_fixture: LibraryAnnotatorFixture, - vendor_id_fixture: VendorIDFixture, - ): - """Verify that when Adobe Vendor ID delegation is included, the - fulfill link for an Adobe delivery mechanism includes instructions - on how to get a Vendor ID. - """ - vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) - [pool] = annotator_fixture.work.license_pools - identifier = pool.identifier - patron = annotator_fixture.db.patron() - old_credentials = list(patron.credentials) - - loan, ignore = pool.loan_to(patron, start=utc_now()) - adobe_delivery_mechanism, ignore = DeliveryMechanism.lookup( - annotator_fixture.db.session, "text/html", DeliveryMechanism.ADOBE_DRM - ) - other_delivery_mechanism, ignore = DeliveryMechanism.lookup( - annotator_fixture.db.session, "text/html", DeliveryMechanism.OVERDRIVE_DRM - ) - - # The fulfill link for non-Adobe DRM does not - # include the drm:licensor tag. - link = annotator_fixture.annotator.fulfill_link( - pool, loan, other_delivery_mechanism - ) - for child in link: - assert child.tag != "{http://librarysimplified.org/terms/drm}licensor" - - # No new Credential has been associated with the patron. - assert old_credentials == patron.credentials - - # The fulfill link for Adobe DRM includes information - # on how to get an Adobe ID in the drm:licensor tag. - link = annotator_fixture.annotator.fulfill_link( - pool, loan, adobe_delivery_mechanism - ) - licensor = link[-1] - assert "{http://librarysimplified.org/terms/drm}licensor" == licensor.tag - - # An Adobe ID-specific identifier has been created for the patron. - [adobe_id_identifier] = [ - x for x in patron.credentials if x not in old_credentials - ] - assert ( - AuthdataUtility.ADOBE_ACCOUNT_ID_PATRON_IDENTIFIER - == adobe_id_identifier.type - ) - assert DataSource.INTERNAL_PROCESSING == adobe_id_identifier.data_source.name - assert None == adobe_id_identifier.expires - - # The drm:licensor tag is the one we get by calling - # adobe_id_tags() on that identifier. - [expect] = annotator_fixture.annotator.adobe_id_tags( - adobe_id_identifier.credential - ) - assert etree.tostring(expect, method="c14n2") == etree.tostring( - licensor, method="c14n2" - ) - - def test_no_adobe_id_tags_when_vendor_id_not_configured( - self, annotator_fixture: LibraryAnnotatorFixture - ): - """When vendor ID delegation is not configured, adobe_id_tags() - returns an empty list. - """ - assert [] == annotator_fixture.annotator.adobe_id_tags("patron identifier") - - def test_adobe_id_tags_when_vendor_id_configured( - self, - annotator_fixture: LibraryAnnotatorFixture, - vendor_id_fixture: VendorIDFixture, - ): - """When vendor ID delegation is configured, adobe_id_tags() - returns a list containing a single tag. The tag contains - the information necessary to get an Adobe ID and a link to the local - DRM Device Management Protocol endpoint. - """ - library = annotator_fixture.db.default_library() - vendor_id_fixture.initialize_adobe(library) - patron_identifier = "patron identifier" - [element] = annotator_fixture.annotator.adobe_id_tags(patron_identifier) - assert "{http://librarysimplified.org/terms/drm}licensor" == element.tag - - key = "{http://librarysimplified.org/terms/drm}vendor" - assert vendor_id_fixture.TEST_VENDOR_ID == element.attrib[key] - - [token] = element - - assert "{http://librarysimplified.org/terms/drm}clientToken" == token.tag - # token.text is a token which we can decode, since we know - # the secret. - token = token.text - authdata = AuthdataUtility.from_config(library) - assert authdata is not None - decoded = authdata.decode_short_client_token(token) - expected_url = library.settings.website - assert (expected_url, patron_identifier) == decoded - - # If we call adobe_id_tags again we'll get a distinct tag - # object that renders to the same XML. - [same_tag] = annotator_fixture.annotator.adobe_id_tags(patron_identifier) - assert same_tag is not element - assert etree.tostring(element, method="c14n2") == etree.tostring( - same_tag, method="c14n2" - ) - - # If the Adobe Vendor ID configuration is present but - # incomplete, adobe_id_tags does nothing. - - # Delete one setting from the registration to check this. - vendor_id_fixture.registration.short_name = None - assert [] == annotator_fixture.annotator.adobe_id_tags("new identifier") - - def test_lcp_acquisition_link_contains_hashed_passphrase( - self, annotator_fixture: LibraryAnnotatorFixture - ): - [pool] = annotator_fixture.work.license_pools - identifier = pool.identifier - patron = annotator_fixture.db.patron() - - hashed_password = LCPHashedPassphrase("hashed password") - - # Setup LCP credentials - lcp_credential_factory = LCPCredentialFactory() - lcp_credential_factory.set_hashed_passphrase( - annotator_fixture.db.session, patron, hashed_password - ) - - loan, ignore = pool.loan_to(patron, start=utc_now()) - lcp_delivery_mechanism, ignore = DeliveryMechanism.lookup( - annotator_fixture.db.session, "text/html", DeliveryMechanism.LCP_DRM - ) - other_delivery_mechanism, ignore = DeliveryMechanism.lookup( - annotator_fixture.db.session, "text/html", DeliveryMechanism.OVERDRIVE_DRM - ) - - # The fulfill link for non-LCP DRM does not include the hashed_passphrase tag. - link = annotator_fixture.annotator.fulfill_link( - pool, loan, other_delivery_mechanism - ) - for child in link: - assert child.tag != "{%s}hashed_passphrase" % OPDSFeed.LCP_NS - - # The fulfill link for lcp DRM includes hashed_passphrase - link = annotator_fixture.annotator.fulfill_link( - pool, loan, lcp_delivery_mechanism - ) - hashed_passphrase = link[-1] - assert hashed_passphrase.tag == "{%s}hashed_passphrase" % OPDSFeed.LCP_NS - assert hashed_passphrase.text == hashed_password.hashed - - def test_default_lane_url(self, annotator_fixture: LibraryAnnotatorFixture): - default_lane_url = annotator_fixture.annotator.default_lane_url() - assert "groups" in default_lane_url - assert str(annotator_fixture.lane.id) not in default_lane_url - - facets = dict(entrypoint="Book") - default_lane_url = annotator_fixture.annotator.default_lane_url(facets=facets) - assert "entrypoint=Book" in default_lane_url - - def test_groups_url(self, annotator_fixture: LibraryAnnotatorFixture): - groups_url_no_lane = annotator_fixture.annotator.groups_url(None) - assert "groups" in groups_url_no_lane - assert str(annotator_fixture.lane.id) not in groups_url_no_lane - - groups_url_fantasy = annotator_fixture.annotator.groups_url( - annotator_fixture.lane - ) - assert "groups" in groups_url_fantasy - assert str(annotator_fixture.lane.id) in groups_url_fantasy - - facets = dict(arg="value") - groups_url_facets = annotator_fixture.annotator.groups_url(None, facets=facets) - assert "arg=value" in groups_url_facets - - def test_feed_url(self, annotator_fixture: LibraryAnnotatorFixture): - # A regular Lane. - feed_url_fantasy = annotator_fixture.annotator.feed_url( - annotator_fixture.lane, dict(facet="value"), dict() - ) - assert "feed" in feed_url_fantasy - assert "facet=value" in feed_url_fantasy - assert str(annotator_fixture.lane.id) in feed_url_fantasy - assert annotator_fixture.db.default_library().name in feed_url_fantasy - - # A QueryGeneratedLane. - annotator_fixture.annotator.lane = annotator_fixture.contributor_lane - feed_url_contributor = annotator_fixture.annotator.feed_url( - annotator_fixture.contributor_lane, dict(), dict() - ) - assert annotator_fixture.contributor_lane.ROUTE in feed_url_contributor - assert ( - annotator_fixture.contributor_lane.contributor_key in feed_url_contributor - ) - assert annotator_fixture.db.default_library().name in feed_url_contributor - - def test_search_url(self, annotator_fixture: LibraryAnnotatorFixture): - search_url = annotator_fixture.annotator.search_url( - annotator_fixture.lane, "query", dict(), dict(facet="value") - ) - assert "search" in search_url - assert "query" in search_url - assert "facet=value" in search_url - assert str(annotator_fixture.lane.id) in search_url - - def test_facet_url(self, annotator_fixture: LibraryAnnotatorFixture): - # A regular Lane. - facets = dict(collection="main") - facet_url = annotator_fixture.annotator.facet_url(facets) - assert "collection=main" in facet_url - assert str(annotator_fixture.lane.id) in facet_url - - # A QueryGeneratedLane. - annotator_fixture.annotator.lane = annotator_fixture.contributor_lane - - facet_url_contributor = annotator_fixture.annotator.facet_url(facets) - assert "collection=main" in facet_url_contributor - assert annotator_fixture.contributor_lane.ROUTE in facet_url_contributor - assert ( - annotator_fixture.contributor_lane.contributor_key in facet_url_contributor - ) - - def test_alternate_link_is_permalink( - self, annotator_fixture: LibraryAnnotatorFixture - ): - work = annotator_fixture.db.work(with_open_access_download=True) - works = annotator_fixture.db.session.query(Work) - annotator = LibraryAnnotator( - None, - annotator_fixture.lane, - annotator_fixture.db.default_library(), - test_mode=True, - ) - pool = annotator.active_licensepool_for(work) - - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed["entries"] - assert entry["id"] == pool.identifier.urn - - [(alternate, type)] = [ - (x["href"], x["type"]) for x in entry["links"] if x["rel"] == "alternate" - ] - permalink, permalink_type = annotator_fixture.annotator.permalink_for( - work, pool, pool.identifier - ) - assert alternate == permalink - assert OPDSFeed.ENTRY_TYPE == type - assert permalink_type == type - - # Make sure we are using the 'permalink' controller -- we were using - # 'work' and that was wrong. - assert "/host/permalink" in permalink - - def test_annotate_work_entry(self, annotator_fixture: LibraryAnnotatorFixture): - lane = annotator_fixture.db.lane() - - # Create a Work. - work = annotator_fixture.db.work(with_license_pool=True) - [pool] = work.license_pools - identifier = pool.identifier - edition = pool.presentation_edition - - # Try building an entry for this Work with and without - # patron authentication turned on -- each setting is valid - # but will result in different links being available. - linksets = [] - for auth in (True, False): - annotator = LibraryAnnotator( - None, - lane, - annotator_fixture.db.default_library(), - test_mode=True, - library_identifies_patrons=auth, - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry(work, pool, edition, identifier, feed, entry) - parsed = feedparser.parse(etree.tostring(entry)) - [entry_parsed] = parsed["entries"] - linksets.append({x["rel"] for x in entry_parsed["links"]}) - - with_auth, no_auth = linksets - - # Some links are present no matter what. - for expect in ["alternate", "related"]: - assert expect in with_auth - assert expect in no_auth - - # A library with patron authentication offers some additional - # links -- one to borrow the book and one to annotate the - # book. - for expect in [ - "http://www.w3.org/ns/oa#annotationservice", - "http://opds-spec.org/acquisition/borrow", - ]: - assert expect in with_auth - assert expect not in no_auth - - # We can also build an entry for a work with no license pool, - # but it will have no borrow link. - work = annotator_fixture.db.work(with_license_pool=False) - edition = work.presentation_edition - identifier = edition.primary_identifier - - annotator = LibraryAnnotator( - None, - lane, - annotator_fixture.db.default_library(), - test_mode=True, - library_identifies_patrons=True, - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry(work, None, edition, identifier, feed, entry) - parsed = feedparser.parse(etree.tostring(entry)) - [entry_parsed] = parsed["entries"] - links = {x["rel"] for x in entry_parsed["links"]} - - # These links are still present. - for expect in [ - "alternate", - "related", - "http://www.w3.org/ns/oa#annotationservice", - ]: - assert expect in links - - # But the borrow link is gone. - assert "http://opds-spec.org/acquisition/borrow" not in links - - # There are no links to create analytics events for this title, - # because the library has no analytics configured. - open_book_rel = "http://librarysimplified.org/terms/rel/analytics/open-book" - assert open_book_rel not in links - - # If analytics are configured, a link is added to - # create an 'open_book' analytics event for this title. - Analytics.GLOBAL_ENABLED = True - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry(work, None, edition, identifier, feed, entry) - parsed = feedparser.parse(etree.tostring(entry)) - [entry_parsed] = parsed["entries"] - [analytics_link] = [ - x["href"] for x in entry_parsed["links"] if x["rel"] == open_book_rel - ] - expect = annotator.url_for( - "track_analytics_event", - identifier_type=identifier.type, - identifier=identifier.identifier, - event_type=CirculationEvent.OPEN_BOOK, - library_short_name=annotator_fixture.db.default_library().short_name, - _external=True, - ) - assert expect == analytics_link - - # Test sample link with media types - link, _ = edition.primary_identifier.add_link( - Hyperlink.SAMPLE, - "http://example.org/sample", - edition.data_source, - media_type="application/epub+zip", - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry(work, None, edition, identifier, feed, entry) - parsed = feedparser.parse(etree.tostring(entry)) - [entry_parsed] = parsed["entries"] - [feed_link] = [ - l for l in entry_parsed["links"] if l.rel == Hyperlink.CLIENT_SAMPLE - ] - assert feed_link["href"] == link.resource.url - assert feed_link["type"] == link.resource.representation.media_type - - def test_annotate_feed(self, annotator_fixture: LibraryAnnotatorFixture): - lane = annotator_fixture.db.lane() - linksets = [] - for auth in (True, False): - annotator = LibraryAnnotator( - None, - lane, - annotator_fixture.db.default_library(), - test_mode=True, - library_identifies_patrons=auth, - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - annotator.annotate_feed(feed, lane) - parsed = feedparser.parse(str(feed)) - linksets.append([x["rel"] for x in parsed["feed"]["links"]]) - - with_auth, without_auth = linksets - - # There's always a self link, a search link, and an auth - # document link. - for rel in ("self", "search", "http://opds-spec.org/auth/document"): - assert rel in with_auth - assert rel in without_auth - - # But there's only a bookshelf link and an annotation link - # when patron authentication is enabled. - for rel in ( - "http://opds-spec.org/shelf", - "http://www.w3.org/ns/oa#annotationservice", - ): - assert rel in with_auth - assert rel not in without_auth - - def get_parsed_feed( - self, annotator_fixture: LibraryAnnotatorFixture, works, lane=None, **kwargs - ): - if not lane: - lane = annotator_fixture.db.lane(display_name="Main Lane") - feed = AcquisitionFeed( - annotator_fixture.db.session, - "test", - "url", - works, - LibraryAnnotator( - None, - lane, - annotator_fixture.db.default_library(), - test_mode=True, - **kwargs, - ), - ) - return feedparser.parse(str(feed)) - - def assert_link_on_entry( - self, entry, link_type=None, rels=None, partials_by_rel=None - ): - """Asserts that a link with a certain 'rel' value exists on a - given feed or entry, as well as its link 'type' value and parts - of its 'href' value. - """ - - def get_link_by_rel(rel): - try: - [link] = [x for x in entry["links"] if x["rel"] == rel] - except ValueError as e: - raise AssertionError - if link_type: - assert link_type == link.type - return link - - if rels: - [get_link_by_rel(rel) for rel in rels] - - partials_by_rel = partials_by_rel or dict() - for rel, uri_partials in list(partials_by_rel.items()): - link = get_link_by_rel(rel) - if not isinstance(uri_partials, list): - uri_partials = [uri_partials] - for part in uri_partials: - assert part in link.href - - def test_work_entry_includes_open_access_or_borrow_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - open_access_work = annotator_fixture.db.work(with_open_access_download=True) - licensed_work = annotator_fixture.db.work(with_license_pool=True) - licensed_work.license_pools[0].open_access = False - - feed = self.get_parsed_feed( - annotator_fixture, [open_access_work, licensed_work] - ) - [open_access_entry, licensed_entry] = feed.entries - - self.assert_link_on_entry(open_access_entry, rels=[OPDSFeed.BORROW_REL]) - self.assert_link_on_entry(licensed_entry, rels=[OPDSFeed.BORROW_REL]) - - def test_language_and_audience_key_from_work( - self, annotator_fixture: LibraryAnnotatorFixture - ): - work = annotator_fixture.db.work( - language="eng", audience=Classifier.AUDIENCE_CHILDREN - ) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ("eng", "Children") == result - - work = annotator_fixture.db.work( - language="fre", audience=Classifier.AUDIENCE_YOUNG_ADULT - ) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ("fre", "All+Ages,Children,Young+Adult") == result - - work = annotator_fixture.db.work( - language="spa", audience=Classifier.AUDIENCE_ADULT - ) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ("spa", "Adult,Adults+Only,All+Ages,Children,Young+Adult") == result - - work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_ADULTS_ONLY) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ("eng", "Adult,Adults+Only,All+Ages,Children,Young+Adult") == result - - work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_RESEARCH) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ( - "eng", - "Adult,Adults+Only,All+Ages,Children,Research,Young+Adult", - ) == result - - work = annotator_fixture.db.work(audience=Classifier.AUDIENCE_ALL_AGES) - result = annotator_fixture.annotator.language_and_audience_key_from_work(work) - assert ("eng", "All+Ages,Children") == result - - def test_work_entry_includes_contributor_links( - self, - annotator_fixture: LibraryAnnotatorFixture, - external_search_fake_fixture: ExternalSearchFixtureFake, - ): - """ContributorLane links are added to works with contributors""" - work = annotator_fixture.db.work(with_open_access_download=True) - contributor1 = work.presentation_edition.author_contributors[0] - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - - expected_rel_and_partial = dict(contributor="/contributor") - self.assert_link_on_entry( - entry, - link_type=OPDSFeed.ACQUISITION_FEED_TYPE, - partials_by_rel=expected_rel_and_partial, - ) - - # When there are two authors, they each get a contributor link. - work.presentation_edition.add_contributor("Oprah", Contributor.AUTHOR_ROLE) - work.calculate_presentation( - PresentationCalculationPolicy(regenerate_opds_entries=True), - external_search_fake_fixture.external_search, - ) - [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries - contributor_links = [l for l in entry.links if l.rel == "contributor"] - assert 2 == len(contributor_links) - contributor_links.sort(key=lambda l: l.href) - for l in contributor_links: - assert l.type == OPDSFeed.ACQUISITION_FEED_TYPE - assert "/contributor" in l.href - assert contributor1.sort_name in contributor_links[0].href - assert "Oprah" in contributor_links[1].href - - # When there's no author, there's no contributor link. - annotator_fixture.db.session.delete(work.presentation_edition.contributions[0]) - annotator_fixture.db.session.delete(work.presentation_edition.contributions[1]) - annotator_fixture.db.session.commit() - work.calculate_presentation( - PresentationCalculationPolicy(regenerate_opds_entries=True), - external_search_fake_fixture.external_search, - ) - [entry] = self.get_parsed_feed(annotator_fixture, [work]).entries - assert [] == [l for l in entry.links if l.rel == "contributor"] - - def test_work_entry_includes_series_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - """A series lane link is added to the work entry when its in a series""" - work = annotator_fixture.db.work( - with_open_access_download=True, series="Serious Cereals Series" - ) - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - expected_rel_and_partial = dict(series="/series") - self.assert_link_on_entry( - entry, - link_type=OPDSFeed.ACQUISITION_FEED_TYPE, - partials_by_rel=expected_rel_and_partial, - ) - - # When there's no series, there's no series link. - work = annotator_fixture.db.work(with_open_access_download=True) - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - assert [] == [l for l in entry.links if l.rel == "series"] - - def test_work_entry_includes_recommendations_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - work = annotator_fixture.db.work(with_open_access_download=True) - - # If NoveList Select isn't configured, there's no recommendations link. - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - assert [] == [l for l in entry.links if l.rel == "recommendations"] - - # There's a recommendation link when configuration is found, though! - NoveListAPI.IS_CONFIGURED = None - annotator_fixture.db.external_integration( - ExternalIntegration.NOVELIST, - goal=ExternalIntegration.METADATA_GOAL, - username="library", - password="sure", - libraries=[annotator_fixture.db.default_library()], - ) - - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - expected_rel_and_partial = dict(recommendations="/recommendations") - self.assert_link_on_entry( - entry, - link_type=OPDSFeed.ACQUISITION_FEED_TYPE, - partials_by_rel=expected_rel_and_partial, - ) - - def test_work_entry_includes_annotations_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - work = annotator_fixture.db.work(with_open_access_download=True) - identifier_str = work.license_pools[0].identifier.identifier - uri_parts = ["/annotations", identifier_str] - annotation_rel = "http://www.w3.org/ns/oa#annotationservice" - rel_with_partials = {annotation_rel: uri_parts} - - feed = self.get_parsed_feed(annotator_fixture, [work]) - [entry] = feed.entries - self.assert_link_on_entry(entry, partials_by_rel=rel_with_partials) - - # If the library does not authenticate patrons, no link to the - # annotation service is provided. - feed = self.get_parsed_feed( - annotator_fixture, [work], library_identifies_patrons=False - ) - [entry] = feed.entries - assert annotation_rel not in [x["rel"] for x in entry["links"]] - - def test_active_loan_feed( - self, - annotator_fixture: LibraryAnnotatorFixture, - vendor_id_fixture: VendorIDFixture, - ): - vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) - patron = annotator_fixture.db.patron() - patron.last_loan_activity_sync = utc_now() - cls = LibraryLoanAndHoldAnnotator - - response = cls.active_loans_for(None, patron, test_mode=True) - - # The feed is private and should not be cached. - assert isinstance(response, OPDSFeedResponse) - assert 0 == response.max_age - assert True == response.private - - # Instead, the Last-Modified header is set to the last time - # we successfully brought the patron's bookshelf in sync with - # the vendor APIs. - # - # (The timestamps aren't exactly the same because - # last_loan_activity_sync is tracked at the millisecond level - # and Last-Modified is tracked at the second level.) - - assert response.last_modified is not None - assert ( - patron.last_loan_activity_sync - response.last_modified - ).total_seconds() < 1 - - # No entries in the feed... - raw = str(response) - feed = feedparser.parse(raw) - assert 0 == len(feed["entries"]) - - # ... but we have a link to the User Profile Management - # Protocol endpoint... - links = feed["feed"]["links"] - [upmp_link] = [ - x - for x in links - if x["rel"] == "http://librarysimplified.org/terms/rel/user-profile" - ] - annotator = cls( - None, None, library=patron.library, patron=patron, test_mode=True - ) - expect_url = annotator.url_for( - "patron_profile", - library_short_name=patron.library.short_name, - _external=True, - ) - assert expect_url == upmp_link["href"] - - # ... and we have DRM licensing information. - tree = etree.fromstring(response.get_data(as_text=True)) - parser = OPDSXMLParser() - licensor = parser._xpath1(tree, "//atom:feed/drm:licensor") - assert licensor is not None - - adobe_patron_identifier = AuthdataUtility._adobe_patron_identifier(patron) - - # The DRM licensing information includes the Adobe vendor ID - # and the patron's patron identifier for Adobe purposes. - assert ( - vendor_id_fixture.TEST_VENDOR_ID - == licensor.attrib["{http://librarysimplified.org/terms/drm}vendor"] - ) - [client_token] = licensor - assert vendor_id_fixture.registration.short_name is not None - expected = vendor_id_fixture.registration.short_name.upper() - assert client_token.text.startswith(expected) - assert adobe_patron_identifier in client_token.text - - # Unlike other places this tag shows up, we use the - # 'scheme' attribute to explicitly state that this - # tag is talking about an ACS licensing - # scheme. Since we're in a and not a to a - # specific book, that context would otherwise be lost. - assert ( - "http://librarysimplified.org/terms/drm/scheme/ACS" - == licensor.attrib["{http://librarysimplified.org/terms/drm}scheme"] - ) - - # Since we're taking a round trip to and from OPDS, which only - # represents times with second precision, generate the current - # time with second precision to make later comparisons - # possible. - now = utc_now().replace(microsecond=0) - tomorrow = now + datetime.timedelta(days=1) - - # A loan of an open-access book is open-ended. - work1 = annotator_fixture.db.work( - language="eng", with_open_access_download=True - ) - loan1 = work1.license_pools[0].loan_to(patron, start=now) - - # A loan of some other kind of book has an end point. - work2 = annotator_fixture.db.work(language="eng", with_license_pool=True) - loan2 = work2.license_pools[0].loan_to(patron, start=now, end=tomorrow) - unused = annotator_fixture.db.work( - language="eng", with_open_access_download=True - ) - - # Get the feed. - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - raw = str(feed_obj) - feed = feedparser.parse(raw) - - # The only entries in the feed is the work currently out on loan - # to this patron. - assert 2 == len(feed["entries"]) - e1, e2 = sorted(feed["entries"], key=lambda x: x["title"]) - assert work1.title == e1["title"] - assert work2.title == e2["title"] - - # Make sure that the start and end dates from the loan are present - # in an child of the acquisition link. - tree = etree.fromstring(raw) - parser = OPDSXMLParser() - acquisitions = parser._xpath( - tree, "//atom:entry/atom:link[@rel='http://opds-spec.org/acquisition']" - ) - assert 2 == len(acquisitions) - - availabilities = [] - for acquisition in acquisitions: - availability = parser._xpath1(acquisition, "opds:availability") - assert availability is not None - availabilities.append(availability) - - # One of these availability tags has 'since' but not 'until'. - # The other one has both. - [no_until] = [x for x in availabilities if "until" not in x.attrib] - assert now == dateutil.parser.parse(no_until.attrib["since"]) - - [has_until] = [x for x in availabilities if "until" in x.attrib] - assert now == dateutil.parser.parse(has_until.attrib["since"]) - assert tomorrow == dateutil.parser.parse(has_until.attrib["until"]) - - def test_loan_feed_includes_patron( - self, annotator_fixture: LibraryAnnotatorFixture - ): - patron = annotator_fixture.db.patron() - - patron.username = "bellhooks" - patron.authorization_identifier = "987654321" - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - raw = str(feed_obj) - feed_details = feedparser.parse(raw)["feed"] - - assert "simplified:authorizationIdentifier" in raw - assert "simplified:username" in raw - assert ( - patron.username == feed_details["simplified_patron"]["simplified:username"] - ) - assert ( - "987654321" - == feed_details["simplified_patron"]["simplified:authorizationidentifier"] - ) - - def test_loans_feed_includes_annotations_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - patron = annotator_fixture.db.patron() - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - raw = str(feed_obj) - feed = feedparser.parse(raw)["feed"] - links = feed["links"] - - [annotations_link] = [ - x - for x in links - if x["rel"].lower() == "http://www.w3.org/ns/oa#annotationService".lower() - ] - assert "/annotations" in annotations_link["href"] - - def test_active_loan_feed_ignores_inconsistent_local_data( - self, annotator_fixture: LibraryAnnotatorFixture - ): - patron = annotator_fixture.db.patron() - - work1 = annotator_fixture.db.work(language="eng", with_license_pool=True) - loan, ignore = work1.license_pools[0].loan_to(patron) - work2 = annotator_fixture.db.work(language="eng", with_license_pool=True) - hold, ignore = work2.license_pools[0].on_hold_to(patron) - - # Uh-oh, our local loan data is bad. - loan.license_pool.identifier = None - - # Our local hold data is also bad. - hold.license_pool = None - - # We can still get a feed... - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - - # ...but it's empty. - assert "" not in str(feed_obj) - - def test_acquisition_feed_includes_license_information( - self, annotator_fixture: LibraryAnnotatorFixture - ): - work = annotator_fixture.db.work(with_open_access_download=True) - pool = work.license_pools[0] - - # These numbers are impossible, but it doesn't matter for - # purposes of this test. - pool.open_access = False - pool.licenses_owned = 100 - pool.licenses_available = 50 - pool.patrons_in_hold_queue = 25 - - feed = AcquisitionFeed( - annotator_fixture.db.session, - "title", - "url", - [work], - annotator_fixture.annotator, - ) - u = str(feed) - holds_re = re.compile(r'', re.S) - assert holds_re.search(u) is not None - - copies_re = re.compile(']+available="50"', re.S) - assert copies_re.search(u) is not None - - copies_re = re.compile(']+total="100"', re.S) - assert copies_re.search(u) is not None - - def test_loans_feed_includes_fulfill_links( - self, - annotator_fixture: LibraryAnnotatorFixture, - library_fixture: LibraryFixture, - ): - patron = annotator_fixture.db.patron() - - work = annotator_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - pool = work.license_pools[0] - pool.open_access = False - mech1 = pool.delivery_mechanisms[0] - mech2 = pool.set_delivery_mechanism( - Representation.PDF_MEDIA_TYPE, - DeliveryMechanism.ADOBE_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - streaming_mech = pool.set_delivery_mechanism( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - DeliveryMechanism.OVERDRIVE_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - - now = utc_now() - loan, ignore = pool.loan_to(patron, start=now) - - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - raw = str(feed_obj) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - # Before we fulfill the loan, there are fulfill links for all three mechanisms. - fulfill_links = [ - link for link in links if link["rel"] == "http://opds-spec.org/acquisition" - ] - assert 3 == len(fulfill_links) - - assert { - mech1.delivery_mechanism.drm_scheme_media_type, - mech2.delivery_mechanism.drm_scheme_media_type, - OPDSFeed.ENTRY_TYPE, - } == {link["type"] for link in fulfill_links} - - # If one of the content types is hidden, the corresponding - # delivery mechanism does not have a link. - library = annotator_fixture.db.default_library() - settings = library_fixture.settings(library) - settings.hidden_content_types = [mech1.delivery_mechanism.content_type] - LibraryLoanAndHoldAnnotator.active_loans_for(None, patron, test_mode=True) - assert { - mech2.delivery_mechanism.drm_scheme_media_type, - OPDSFeed.ENTRY_TYPE, - } == {link["type"] for link in fulfill_links} - settings.hidden_content_types = [] - - # When the loan is fulfilled, there are only fulfill links for that mechanism - # and the streaming mechanism. - loan.fulfillment = mech1 - - feed_obj = LibraryLoanAndHoldAnnotator.active_loans_for( - None, patron, test_mode=True - ) - raw = str(feed_obj) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - fulfill_links = [ - link for link in links if link["rel"] == "http://opds-spec.org/acquisition" - ] - assert 2 == len(fulfill_links) - - assert { - mech1.delivery_mechanism.drm_scheme_media_type, - OPDSFeed.ENTRY_TYPE, - } == {link["type"] for link in fulfill_links} - - def test_incomplete_catalog_entry_contains_an_alternate_link_to_the_complete_entry( - self, annotator_fixture: LibraryAnnotatorFixture - ): - circulation = create_autospec(spec=CirculationAPI) - circulation.library = annotator_fixture.db.default_library() - work = annotator_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - pool = work.license_pools[0] - - feed_obj = LibraryLoanAndHoldAnnotator.single_item_feed( - circulation, pool, test_mode=True - ) - raw = str(feed_obj) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - # We want to make sure that an incomplete catalog entry contains an alternate link to the complete entry. - alternate_links = [ - link - for link in links - if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" - ] - assert 1 == len(alternate_links) - - def test_complete_catalog_entry_with_fulfillment_link_contains_self_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - patron = annotator_fixture.db.patron() - circulation = create_autospec(spec=CirculationAPI) - circulation.library = annotator_fixture.db.default_library() - work = annotator_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - pool = work.license_pools[0] - loan, _ = pool.loan_to(patron) - - feed_obj = LibraryLoanAndHoldAnnotator.single_item_feed( - circulation, loan, test_mode=True - ) - raw = str(feed_obj) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - # We want to make sure that a complete catalog entry contains an alternate link - # because it's required by some clients (for example, an Android version of SimplyE). - alternate_links = [ - link - for link in links - if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" - ] - assert 1 == len(alternate_links) - - # We want to make sure that the complete catalog entry contains a self link. - self_links = [ - link - for link in links - if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "self" - ] - assert 1 == len(self_links) - - # We want to make sure that alternate and self links are the same. - assert alternate_links[0]["href"] == self_links[0]["href"] - - def test_complete_catalog_entry_with_fulfillment_info_contains_self_link( - self, annotator_fixture: LibraryAnnotatorFixture - ): - patron = annotator_fixture.db.patron() - circulation = create_autospec(spec=CirculationAPI) - circulation.library = annotator_fixture.db.default_library() - work = annotator_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - pool = work.license_pools[0] - loan, _ = pool.loan_to(patron) - fulfillment = FulfillmentInfo( - pool.collection, - pool.data_source.name, - pool.identifier.type, - pool.identifier.identifier, - "http://link", - Representation.EPUB_MEDIA_TYPE, - None, - None, - ) - - feed_obj = LibraryLoanAndHoldAnnotator.single_item_feed( - circulation, loan, fulfillment, test_mode=True - ) - raw = str(feed_obj) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - # We want to make sure that a complete catalog entry contains an alternate link - # because it's required by some clients (for example, an Android version of SimplyE). - alternate_links = [ - link - for link in links - if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "alternate" - ] - assert 1 == len(alternate_links) - - # We want to make sure that the complete catalog entry contains a self link. - self_links = [ - link - for link in links - if link["type"] == OPDSFeed.ENTRY_TYPE and link["rel"] == "self" - ] - assert 1 == len(self_links) - - # We want to make sure that alternate and self links are the same. - assert alternate_links[0]["href"] == self_links[0]["href"] - - def test_fulfill_feed(self, annotator_fixture: LibraryAnnotatorFixture): - patron = annotator_fixture.db.patron() - - work = annotator_fixture.db.work( - with_license_pool=True, with_open_access_download=False - ) - pool = work.license_pools[0] - pool.open_access = False - streaming_mech = pool.set_delivery_mechanism( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - DeliveryMechanism.OVERDRIVE_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - - now = utc_now() - loan, ignore = pool.loan_to(patron, start=now) - fulfillment = FulfillmentInfo( - pool.collection, - pool.data_source.name, - pool.identifier.type, - pool.identifier.identifier, - "http://streaming_link", - Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE, - None, - None, - ) - - response = LibraryLoanAndHoldAnnotator.single_item_feed( - None, loan, fulfillment, test_mode=True - ) - assert isinstance(response, OPDSEntryResponse) - raw = response.get_data(as_text=True) - - entries = feedparser.parse(raw)["entries"] - assert 1 == len(entries) - - links = entries[0]["links"] - - # The feed for a single fulfillment only includes one fulfill link. - fulfill_links = [ - link for link in links if link["rel"] == "http://opds-spec.org/acquisition" - ] - assert 1 == len(fulfill_links) - - assert ( - Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE - == fulfill_links[0]["type"] - ) - assert "http://streaming_link" == fulfill_links[0]["href"] - - def test_drm_device_registration_feed_tags( - self, - annotator_fixture: LibraryAnnotatorFixture, - vendor_id_fixture: VendorIDFixture, - ): - """Check that drm_device_registration_feed_tags returns - a generic drm:licensor tag, except with the drm:scheme attribute - set. - """ - vendor_id_fixture.initialize_adobe(annotator_fixture.db.default_library()) - annotator = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - patron = annotator_fixture.db.patron() - [feed_tag] = annotator.drm_device_registration_feed_tags(patron) - [generic_tag] = annotator.adobe_id_tags(patron) - - # The feed-level tag has the drm:scheme attribute set. - key = "{http://librarysimplified.org/terms/drm}scheme" - assert ( - "http://librarysimplified.org/terms/drm/scheme/ACS" == feed_tag.attrib[key] - ) - - # If we remove that attribute, the feed-level tag is the same as the - # generic tag. - del feed_tag.attrib[key] - assert etree.tostring(feed_tag, method="c14n2") == etree.tostring( - generic_tag, method="c14n2" - ) - - def test_borrow_link_raises_unfulfillable_work( - self, annotator_fixture: LibraryAnnotatorFixture - ): - edition, pool = annotator_fixture.db.edition(with_license_pool=True) - kindle_mechanism = pool.set_delivery_mechanism( - DeliveryMechanism.KINDLE_CONTENT_TYPE, - DeliveryMechanism.KINDLE_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - epub_mechanism = pool.set_delivery_mechanism( - Representation.EPUB_MEDIA_TYPE, - DeliveryMechanism.ADOBE_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - data_source_name = pool.data_source.name - identifier = pool.identifier - - annotator = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - - # If there's no way to fulfill the book, borrow_link raises - # UnfulfillableWork. - pytest.raises(UnfulfillableWork, annotator.borrow_link, pool, None, []) - - pytest.raises( - UnfulfillableWork, annotator.borrow_link, pool, None, [kindle_mechanism] - ) - - # If there's a fulfillable mechanism, everything's fine. - link = annotator.borrow_link(pool, None, [epub_mechanism]) - assert link != None - - link = annotator.borrow_link(pool, None, [epub_mechanism, kindle_mechanism]) - assert link != None - - def test_feed_includes_lane_links(self, annotator_fixture: LibraryAnnotatorFixture): - def annotated_links(lane, annotator): - # Create an AcquisitionFeed is using the given Annotator. - # extract its links and return a dictionary that maps link - # relations to URLs. - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - annotator.annotate_feed(feed, lane) - raw = str(feed) - parsed = feedparser.parse(raw)["feed"] - links = parsed["links"] - - d = defaultdict(list) - for link in links: - d[link["rel"].lower()].append(link["href"]) - return d - - # When an EntryPoint is explicitly selected, it shows up in the - # link to the search controller. - facets = FacetsWithEntryPoint(entrypoint=AudiobooksEntryPoint) - lane = annotator_fixture.db.lane() - annotator = LibraryAnnotator( - None, - lane, - annotator_fixture.db.default_library(), - test_mode=True, - facets=facets, - ) - [url] = annotated_links(lane, annotator)["search"] - assert "/lane_search" in url - assert "entrypoint=%s" % AudiobooksEntryPoint.INTERNAL_NAME in url - assert str(lane.id) in url - - # When the selected EntryPoint is a default, it's not used -- - # instead, we search everything. - annotator.facets.entrypoint_is_default = True - links = annotated_links(lane, annotator) - [url] = links["search"] - assert "entrypoint=%s" % EverythingEntryPoint.INTERNAL_NAME in url - - # This lane isn't based on a custom list, so there's no crawlable link. - assert [] == links["http://opds-spec.org/crawlable"] - - # It's also not crawlable if it's based on multiple lists. - list1, ignore = annotator_fixture.db.customlist() - list2, ignore = annotator_fixture.db.customlist() - lane.customlists = [list1, list2] - links = annotated_links(lane, annotator) - assert [] == links["http://opds-spec.org/crawlable"] - - # A lane based on a single list gets a crawlable link. - lane.customlists = [list1] - links = annotated_links(lane, annotator) - [crawlable] = links["http://opds-spec.org/crawlable"] - assert "/crawlable_list_feed" in crawlable - assert str(list1.name) in crawlable - - def test_acquisition_links( - self, - annotator_fixture: LibraryAnnotatorFixture, - library_fixture: LibraryFixture, - ): - annotator = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - - patron = annotator_fixture.db.patron() - - now = utc_now() - tomorrow = now + datetime.timedelta(days=1) - - # Loan of an open-access book. - work1 = annotator_fixture.db.work(with_open_access_download=True) - loan1, ignore = work1.license_pools[0].loan_to(patron, start=now) - - # Loan of a licensed book. - work2 = annotator_fixture.db.work(with_license_pool=True) - loan2, ignore = work2.license_pools[0].loan_to(patron, start=now, end=tomorrow) - - # Hold on a licensed book. - work3 = annotator_fixture.db.work(with_license_pool=True) - hold, ignore = work3.license_pools[0].on_hold_to( - patron, start=now, end=tomorrow - ) - - # Book with no loans or holds yet. - work4 = annotator_fixture.db.work(with_license_pool=True) - - # Loan of a licensed book without a loan end. - work5 = annotator_fixture.db.work(with_license_pool=True) - loan5, ignore = work5.license_pools[0].loan_to(patron, start=now) - - # Ensure the state variable - assert annotator.identifies_patrons == True - - loan1_links = annotator.acquisition_links( - loan1.license_pool, loan1, None, None, feed, loan1.license_pool.identifier - ) - # Fulfill, and revoke. - [revoke, fulfill] = sorted(loan1_links, key=lambda x: x.attrib.get("rel")) - assert "revoke_loan_or_hold" in revoke.attrib.get("href") - assert "http://librarysimplified.org/terms/rel/revoke" == revoke.attrib.get( - "rel" - ) - assert "fulfill" in fulfill.attrib.get("href") - assert "http://opds-spec.org/acquisition" == fulfill.attrib.get("rel") - - # Allow direct open-access downloads - # This will also filter out loan revoke links - annotator.identifies_patrons = False - loan1_links = annotator.acquisition_links( - loan1.license_pool, loan1, None, None, feed, loan1.license_pool.identifier - ) - assert len(loan1_links) == 1 - assert {"http://opds-spec.org/acquisition/open-access"} == { - link.attrib.get("rel") for link in loan1_links - } - - # Work 2 has no open access links - loan2_links = annotator.acquisition_links( - loan2.license_pool, loan2, None, None, feed, loan2.license_pool.identifier - ) - assert len(loan2_links) == 0 - - # Revert the annotator state - annotator.identifies_patrons = True - - opds_parser = OPDSXMLParser() - - availability = opds_parser._xpath1(fulfill, "opds:availability") - assert availability is not None - assert _strftime(loan1.start) == availability.attrib.get("since") - assert loan1.end == availability.attrib.get("until") - assert None == loan1.end - - loan2_links = annotator.acquisition_links( - loan2.license_pool, loan2, None, None, feed, loan2.license_pool.identifier - ) - # Fulfill and revoke. - [revoke, fulfill] = sorted(loan2_links, key=lambda x: x.attrib.get("rel")) - assert "revoke_loan_or_hold" in revoke.attrib.get("href") - assert "http://librarysimplified.org/terms/rel/revoke" == revoke.attrib.get( - "rel" - ) - assert "fulfill" in fulfill.attrib.get("href") - assert "http://opds-spec.org/acquisition" == fulfill.attrib.get("rel") - - availability = opds_parser._xpath1(fulfill, "opds:availability") - assert _strftime(loan2.start) == availability.attrib.get("since") - assert _strftime(loan2.end) == availability.attrib.get("until") - - # If a book is ready to be fulfilled, but the library has - # hidden all of its available content types, the fulfill link does - # not show up -- only the revoke link. - library = annotator_fixture.db.default_library() - settings = library_fixture.settings(library) - available_types = [ - lpdm.delivery_mechanism.content_type - for lpdm in loan2.license_pool.delivery_mechanisms - ] - settings.hidden_content_types = available_types - - # The list of hidden content types is stored in the Annotator - # constructor, so this particular test needs a fresh Annotator. - annotator_with_hidden_types = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - loan2_links = annotator_with_hidden_types.acquisition_links( - loan2.license_pool, loan2, None, None, feed, loan2.license_pool.identifier - ) - [revoke] = loan2_links - assert "http://librarysimplified.org/terms/rel/revoke" == revoke.attrib.get( - "rel" - ) - # Un-hide the content types so the test can continue. - settings.hidden_content_types = [] - - hold_links = annotator.acquisition_links( - hold.license_pool, None, hold, None, feed, hold.license_pool.identifier - ) - # Borrow and revoke. - [revoke, borrow] = sorted(hold_links, key=lambda x: x.attrib.get("rel")) - assert "revoke_loan_or_hold" in revoke.attrib.get("href") - assert "http://librarysimplified.org/terms/rel/revoke" == revoke.attrib.get( - "rel" - ) - assert "borrow" in borrow.attrib.get("href") - assert "http://opds-spec.org/acquisition/borrow" == borrow.attrib.get("rel") - - work4_links = annotator.acquisition_links( - work4.license_pools[0], - None, - None, - None, - feed, - work4.license_pools[0].identifier, - ) - # Borrow only. - [borrow] = work4_links - assert "borrow" in borrow.attrib.get("href") - assert "http://opds-spec.org/acquisition/borrow" == borrow.attrib.get("rel") - - loan5_links = annotator.acquisition_links( - loan5.license_pool, loan5, None, None, feed, loan5.license_pool.identifier - ) - # Fulfill and revoke. - [revoke, fulfill] = sorted(loan5_links, key=lambda x: x.attrib.get("rel")) - assert "revoke_loan_or_hold" in revoke.attrib.get("href") - assert "http://librarysimplified.org/terms/rel/revoke" == revoke.attrib.get( - "rel" - ) - assert "fulfill" in fulfill.attrib.get("href") - assert "http://opds-spec.org/acquisition" == fulfill.attrib.get("rel") - - availability = opds_parser._xpath1(fulfill, "opds:availability") - assert _strftime(loan5.start) == availability.attrib.get("since") - # TODO: This currently fails, it should be uncommented when the CM 21 day loan bug is fixed - # assert loan5.end == availability.attrib.get("until") - assert None == loan5.end - - # If patron authentication is turned off for the library, then - # only open-access links are displayed. - annotator.identifies_patrons = False - - [open_access] = annotator.acquisition_links( - loan1.license_pool, loan1, None, None, feed, loan1.license_pool.identifier - ) - assert "http://opds-spec.org/acquisition/open-access" == open_access.attrib.get( - "rel" - ) - - # This may include links with the open-access relation for - # non-open-access works that are available without - # authentication. To get such link, you pass in a list of - # LicensePoolDeliveryMechanisms as - # `direct_fufillment_delivery_mechanisms`. - [lp4] = work4.license_pools - [lpdm4] = lp4.delivery_mechanisms - lpdm4.set_rights_status(RightsStatus.IN_COPYRIGHT) - [not_open_access] = annotator.acquisition_links( - lp4, - None, - None, - None, - feed, - lp4.identifier, - direct_fulfillment_delivery_mechanisms=[lpdm4], - ) - - # The link relation is OPDS 'open-access', which just means the - # book can be downloaded with no hassle. - assert ( - "http://opds-spec.org/acquisition/open-access" - == not_open_access.attrib.get("rel") - ) - - # The dcterms:rights attribute provides a more detailed - # explanation of the book's copyright status -- note that it's - # not "open access" in the typical sense. - rights = not_open_access.attrib["{http://purl.org/dc/terms/}rights"] - assert RightsStatus.IN_COPYRIGHT == rights - - # Hold links are absent even when there are active holds in the - # database -- there is no way to distinguish one patron from - # another so the concept of a 'hold' is meaningless. - hold_links = annotator.acquisition_links( - hold.license_pool, None, hold, None, feed, hold.license_pool.identifier - ) - assert [] == hold_links - - def test_acquisition_links_multiple_links( - self, - annotator_fixture: LibraryAnnotatorFixture, - library_fixture: LibraryFixture, - ): - annotator = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - feed = AcquisitionFeed( - annotator_fixture.db.session, "test", "url", [], annotator - ) - - # This book has two delivery mechanisms - work = annotator_fixture.db.work(with_license_pool=True) - [pool] = work.license_pools - [mech1] = pool.delivery_mechanisms - mech2 = pool.set_delivery_mechanism( - Representation.PDF_MEDIA_TYPE, - DeliveryMechanism.NO_DRM, - RightsStatus.IN_COPYRIGHT, - None, - ) - - # The vendor API for LicensePools of this type requires that a - # delivery mechanism be chosen at the point of borrowing. - class MockAPI: - SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP - - # This means that two different acquisition links will be - # generated -- one for each delivery mechanism. - links = annotator.acquisition_links( - pool, None, None, None, feed, pool.identifier, mock_api=MockAPI() - ) - assert 2 == len(links) - - mech1_param = "mechanism_id=%s" % mech1.delivery_mechanism.id - mech2_param = "mechanism_id=%s" % mech2.delivery_mechanism.id - - # Instead of sorting, which may be wrong if the id is greater than 10 - # due to how double digits are sorted, extract the links associated - # with the expected delivery mechanism. - if mech1_param in links[0].attrib["href"]: - [mech1_link, mech2_link] = links - else: - [mech2_link, mech1_link] = links - - indirects = [] - for link in [mech1_link, mech2_link]: - # Both links should have the same subtags. - [availability, copies, holds, indirect] = sorted(link, key=lambda x: x.tag) - assert availability.tag.endswith("availability") - assert copies.tag.endswith("copies") - assert holds.tag.endswith("holds") - assert indirect.tag.endswith("indirectAcquisition") - indirects.append(indirect) - - # The target of the top-level link is different. - assert mech1_param in mech1_link.attrib["href"] - assert mech2_param in mech2_link.attrib["href"] - - # So is the media type seen in the indirectAcquisition subtag. - [mech1_indirect, mech2_indirect] = indirects - - # The first delivery mechanism (created when the Work was created) - # uses Adobe DRM, so that shows up as the first indirect acquisition - # type. - assert mech1.delivery_mechanism.drm_scheme == mech1_indirect.attrib["type"] - - # The second delivery mechanism doesn't use DRM, so the content - # type shows up as the first (and only) indirect acquisition type. - assert mech2.delivery_mechanism.content_type == mech2_indirect.attrib["type"] - - # If we configure the library to hide one of the content types, - # we end up with only one link -- the one for the delivery - # mechanism that's not hidden. - library = annotator_fixture.db.default_library() - settings = library_fixture.settings(library) - settings.hidden_content_types = [mech1.delivery_mechanism.content_type] - annotator = LibraryLoanAndHoldAnnotator( - None, None, annotator_fixture.db.default_library(), test_mode=True - ) - [link] = annotator.acquisition_links( - pool, None, None, None, feed, pool.identifier, mock_api=MockAPI() - ) - [availability, copies, holds, indirect] = sorted(link, key=lambda x: x.tag) - assert mech2.delivery_mechanism.content_type == indirect.attrib["type"] - - -class TestLibraryLoanAndHoldAnnotator: - def test_single_item_feed(self, db: DatabaseTransactionFixture): - # Test the generation of single-item OPDS feeds for loans (with and - # without fulfillment) and holds. - class MockAnnotator(LibraryLoanAndHoldAnnotator): - def url_for(self, controller, **kwargs): - self.url_for_called_with = (controller, kwargs) - return "a URL" - - def _single_entry_response(self, *args, **kwargs): - self._single_entry_response_called_with = (args, kwargs) - # Return the annotator itself so we can look at it. - return self - - def test_annotator(item, fulfillment=None): - # Call MockAnnotator.single_item_feed with certain arguments - # and make some general assertions about the return value. - circulation = object() - test_mode = object() - feed_class = object() - result = MockAnnotator.single_item_feed( - circulation, item, fulfillment, test_mode, feed_class, extra_arg="value" - ) - - # The final result is a MockAnnotator object. This isn't - # normal; it's because - # MockAnnotator._single_entry_response returns the - # MockAnnotator it creates, for us to examine. - assert isinstance(result, MockAnnotator) - - # Let's examine the MockAnnotator itself. - assert circulation == result.circulation - assert db.default_library() == result.library - assert test_mode == result.test_mode - - # Now let's see what we did with it after calling its - # constructor. - - # First, we generated a URL to the "loan_or_hold_detail" - # controller for the license pool's identifier. - url_call = result.url_for_called_with - controller_name, kwargs = url_call - assert "loan_or_hold_detail" == controller_name - assert db.default_library().short_name == kwargs.pop("library_short_name") - assert pool.identifier.type == kwargs.pop("identifier_type") - assert pool.identifier.identifier == kwargs.pop("identifier") - assert True == kwargs.pop("_external") - assert {} == kwargs - - # The return value of that was the string "a URL". We then - # passed that into _single_entry_response, along with - # `item` and a number of arguments that we made up. - response_call = result._single_entry_response_called_with - (_db, _work, annotator, url, _feed_class), kwargs = response_call - assert db.session == _db - assert work == _work - assert result == annotator - assert "a URL" == url - assert feed_class == _feed_class - - # The only keyword argument is an extra argument propagated from - # the single_item_feed call. - assert "value" == kwargs.pop("extra_arg") - - # Return the MockAnnotator for further examination. - return result - - # Now we're going to call test_annotator a couple times in - # different situations. - work = db.work(with_license_pool=True) - [pool] = work.license_pools - patron = db.patron() - loan, ignore = pool.loan_to(patron) - - # First, let's ask for a single-item feed for a loan. - annotator = test_annotator(loan) - - # Everything tested by test_annotator happened, but _also_, - # when the annotator was created, the Loan was stored in - # active_loans_by_work. - assert {work: loan} == annotator.active_loans_by_work - - # Since we passed in a loan rather than a hold, - # active_holds_by_work is empty. - assert {} == annotator.active_holds_by_work - - # Since we didn't pass in a fulfillment for the loan, - # active_fulfillments_by_work is empty. - assert {} == annotator.active_fulfillments_by_work - - # Now try it again, but give the loan a fulfillment. - fulfillment = object() - annotator = test_annotator(loan, fulfillment) - assert {work: loan} == annotator.active_loans_by_work - assert {work: fulfillment} == annotator.active_fulfillments_by_work - - # Finally, try it with a hold. - hold, ignore = pool.on_hold_to(patron) - annotator = test_annotator(hold) - assert {work: hold} == annotator.active_holds_by_work - assert {} == annotator.active_loans_by_work - assert {} == annotator.active_fulfillments_by_work - - def test_single_item_feed_without_work(self, db: DatabaseTransactionFixture): - """If a licensepool has no work or edition the single_item_feed mustn't raise an exception""" - mock = MagicMock() - # A loan without a pool - loan = Loan() - loan.patron = db.patron() - assert ( - LibraryLoanAndHoldAnnotator.single_item_feed(mock, loan) - == NOT_FOUND_ON_REMOTE - ) - - work = db.work(with_license_pool=True) - pool = get_one(db.session, LicensePool, work_id=work.id) - assert isinstance(pool, LicensePool) - # Pool with no work, and the presentation edition has no work either - pool.work_id = None - work.presentation_edition_id = None - db.session.commit() - assert ( - LibraryLoanAndHoldAnnotator.single_item_feed(mock, pool) - == NOT_FOUND_ON_REMOTE - ) - - # pool with no work and no presentation edition - pool.presentation_edition_id = None - db.session.commit() - assert ( - LibraryLoanAndHoldAnnotator.single_item_feed(mock, pool) - == NOT_FOUND_ON_REMOTE - ) - - def test_choose_best_hold_for_work(self, db: DatabaseTransactionFixture): - # First create two license pools for the same work so we could create two holds for the same work. - patron = db.patron() - - coll_1 = db.collection(name="Collection 1") - coll_2 = db.collection(name="Collection 2") - - work = db.work() - - pool_1 = db.licensepool( - edition=work.presentation_edition, open_access=False, collection=coll_1 - ) - pool_2 = db.licensepool( - edition=work.presentation_edition, open_access=False, collection=coll_2 - ) - - hold_1, _ = pool_1.on_hold_to(patron) - hold_2, _ = pool_2.on_hold_to(patron) - - # When there is no licenses_owned/available on one license pool the LibraryLoanAndHoldAnnotator should choose - # hold associated with the other license pool. - pool_1.licenses_owned = 0 - pool_1.licenses_available = 0 - - assert hold_2 == LibraryLoanAndHoldAnnotator.choose_best_hold_for_work( - [hold_1, hold_2] - ) - - # Now we have different number of licenses owned across two LPs and the same hold position. - # Hold associated with LP with more owned licenses will be chosen as best. - pool_1.licenses_owned = 2 - - pool_2.licenses_owned = 3 - pool_2.licenses_available = 0 - - hold_1.position = 7 - hold_2.position = 7 - - assert hold_2 == LibraryLoanAndHoldAnnotator.choose_best_hold_for_work( - [hold_1, hold_2] - ) - - def test_annotate_work_entry(self, db: DatabaseTransactionFixture): - library = db.default_library() - patron = db.patron() - identifier = db.identifier() - lane = WorkList() - lane.initialize( - library, - ) - annotator = LibraryLoanAndHoldAnnotator(None, lane, library, patron) - feed = AcquisitionFeed(db.session, "title", "url", [], annotator=annotator) - - # Annotate time tracking - opds_for_distributors = db.collection( - protocol=ExternalIntegration.OPDS_FOR_DISTRIBUTORS - ) - work = db.work(with_license_pool=True, collection=opds_for_distributors) - edition = work.presentation_edition - edition.medium = EditionConstants.AUDIO_MEDIUM - loan, _ = work.active_license_pool().loan_to(patron) - annotator.active_loans_by_work = {work: loan} - - entry = feed._make_entry_xml(work, edition) - with app.test_request_context("/") as request: - request.library = library # type: ignore [attr-defined] - annotator.annotate_work_entry( - work, work.active_license_pool(), edition, identifier, feed, entry - ) - - time_tracking_links = entry.findall( - f"link[@rel='{LinkRelations.TIME_TRACKING}']" - ) - assert len(time_tracking_links) == 1 - assert ( - time_tracking_links[0].get("type") - == MediaTypes.APPLICATION_JSON_MEDIA_TYPE - ) - assert time_tracking_links[0].get("href") == annotator.url_for( - "track_playtime_events", - identifier_type=identifier.type, - identifier=identifier.identifier, - library_short_name=annotator.library.short_name, - collection_id=opds_for_distributors.id, - _external=True, - ) - - # No active loan means no tracking link - annotator.active_loans_by_work = {} - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry( - work, work.active_license_pool(), edition, identifier, feed, entry - ) - - time_tracking_links = entry.findall( - f"link[@rel='{LinkRelations.TIME_TRACKING}']" - ) - assert len(time_tracking_links) == 0 - - # Add the loan back in - annotator.active_loans_by_work = {work: loan} - - # Book mediums don't get time tracking - edition.medium = EditionConstants.BOOK_MEDIUM - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry( - work, work.active_license_pool(), edition, identifier, feed, entry - ) - - time_tracking_links = entry.findall( - f"link[@rel='{LinkRelations.TIME_TRACKING}']" - ) - assert len(time_tracking_links) == 0 - - # Non OPDS for distributor works do not get links either - work = db.work(with_license_pool=True) - edition = work.presentation_edition - edition.medium = EditionConstants.AUDIO_MEDIUM - - entry = feed._make_entry_xml(work, edition) - annotator.annotate_work_entry( - work, work.active_license_pool(), edition, identifier, feed, entry - ) - - time_tracking_links = entry.findall( - f"link[@rel='{LinkRelations.TIME_TRACKING}']" - ) - assert len(time_tracking_links) == 0 diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py index 5a57845b4f..74cd8868ee 100644 --- a/tests/api/test_opds2.py +++ b/tests/api/test_opds2.py @@ -1,176 +1,26 @@ import io -import json from unittest.mock import MagicMock, patch -from urllib.parse import parse_qs, quote, urlparse import pytest from requests import Response from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from werkzeug import Response as wkResponse -from api.app import app from api.circulation import FulfillmentInfo from api.circulation_exceptions import CannotFulfill from api.controller import CirculationManager -from api.opds2 import ( - OPDS2NavigationsAnnotator, - OPDS2PublicationsAnnotator, - TokenAuthenticationFulfillmentProcessor, -) -from core.lane import Facets, Pagination +from api.opds2 import TokenAuthenticationFulfillmentProcessor from core.model.collection import Collection from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.datasource import DataSource from core.model.patron import Loan -from core.model.resource import Hyperlink from core.opds2_import import OPDS2Importer, RWPMManifestParser from core.problem_details import INVALID_CREDENTIALS -from tests.fixtures.api_controller import ( - CirculationControllerFixture, - ControllerFixture, -) +from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.opds2_files import OPDS2FilesFixture -class OPDS2FeedControllerFixture: - def __init__(self, circulation_fixture: CirculationControllerFixture): - self.db = circulation_fixture.db - self.circulation_fixture = circulation_fixture - self.annotator = OPDS2PublicationsAnnotator( - "https://example.org/opds2", - Facets.default(self.db.default_library()), - Pagination.default(), - self.db.default_library(), - ) - self.controller = self.circulation_fixture.manager.opds2_feeds - - -@pytest.fixture(scope="function") -def opds2_feed_controller( - circulation_fixture: CirculationControllerFixture, -) -> OPDS2FeedControllerFixture: - return OPDS2FeedControllerFixture(circulation_fixture) - - -class TestOPDS2FeedController: - def test_publications_feed(self, opds2_feed_controller: OPDS2FeedControllerFixture): - circ = opds2_feed_controller.circulation_fixture - with circ.request_context_with_library("/"): - response = opds2_feed_controller.controller.publications() - assert response.status_code == 200 - feed = json.loads(response.data) - assert "metadata" in feed - assert "links" in feed - assert "publications" in feed - - -class OPDS2PublicationAnnotatorFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.annotator = OPDS2PublicationsAnnotator( - "https://example.org/opds2", - Facets.default(db.default_library()), - Pagination.default(), - db.default_library(), - ) - - -@pytest.fixture(scope="function") -def opds2_publication_annotator( - db: DatabaseTransactionFixture, -) -> OPDS2PublicationAnnotatorFixture: - return OPDS2PublicationAnnotatorFixture(db) - - -class TestOPDS2PublicationAnnotator: - def test_loan_link( - self, opds2_publication_annotator: OPDS2PublicationAnnotatorFixture - ): - work = opds2_publication_annotator.db.work() - idn = work.presentation_edition.primary_identifier - with app.test_request_context("/"): - link = opds2_publication_annotator.annotator.loan_link( - work.presentation_edition - ) - assert Hyperlink.BORROW == link["rel"] - assert ( - quote( - f"/{opds2_publication_annotator.db.default_library().short_name}/works/{idn.type}/{idn.identifier}/borrow" - ) - == link["href"] - ) - - def test_self_link( - self, opds2_publication_annotator: OPDS2PublicationAnnotatorFixture - ): - work = opds2_publication_annotator.db.work() - idn = work.presentation_edition.primary_identifier - with app.test_request_context("/"): - link = opds2_publication_annotator.annotator.self_link( - work.presentation_edition - ) - assert link["rel"] == "self" - assert ( - quote( - f"/{opds2_publication_annotator.db.default_library().short_name}/works/{idn.type}/{idn.identifier}" - ) - == link["href"] - ) - - def test_facet_url( - self, opds2_publication_annotator: OPDS2PublicationAnnotatorFixture - ): - db = opds2_publication_annotator.db - facet = Facets( - db.default_library(), Facets.COLLECTION_FEATURED, None, None, None, None - ) - with app.test_request_context("/"): - link = opds2_publication_annotator.annotator.facet_url(facet) - parsed = urlparse(link) - assert parsed.hostname == "localhost" - assert parsed.path == f"/{db.default_library().short_name}/opds2/publications" - assert parse_qs(parsed.query) == dict( - order=["author"], - available=["all"], - collection=["featured"], - distributor=["All"], - collectionName=["All"], - ) - - -class OPDS2NavigationAnnotatorFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.db = db - self.annotator = OPDS2NavigationsAnnotator( - "/", - Facets.default(db.default_library()), - Pagination.default(), - db.default_library(), - title="Navigation", - ) - - -@pytest.fixture(scope="function") -def opds2_navigation_annotator( - db: DatabaseTransactionFixture, -) -> OPDS2NavigationAnnotatorFixture: - return OPDS2NavigationAnnotatorFixture(db) - - -class TestOPDS2NavigationAnnotator: - def test_navigation( - self, opds2_navigation_annotator: OPDS2NavigationAnnotatorFixture - ): - with app.test_request_context("/"): - navigation = opds2_navigation_annotator.annotator.navigation_collection() - assert len(navigation) == 1 - assert ( - navigation[0]["href"] - == f"/{opds2_navigation_annotator.db.default_library().short_name}/opds2/publications" - ) - - class TestTokenAuthenticationFulfillmentProcessor: @patch("api.opds2.HTTP") def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 883907683f..225cade1ac 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -29,9 +29,9 @@ SessionManager, create, ) -from core.opds import AcquisitionFeed from core.util.datetime_helpers import datetime_utc, utc_now from core.util.flask_util import OPDSFeedResponse, Response +from core.util.opds_writer import OPDSFeed from scripts import ( AdobeAccountIDResetScript, CacheFacetListsPerLane, @@ -377,7 +377,7 @@ def page(cls, **kwargs): # we get a Flask Response containing an OPDS feed. response = script.do_generate(lane, facets, pagination) assert isinstance(response, OPDSFeedResponse) - assert AcquisitionFeed.ACQUISITION_FEED_TYPE == response.content_type + assert OPDSFeed.ACQUISITION_FEED_TYPE == response.content_type assert response.get_data(as_text=True).startswith(" 0) - - if isinstance(work_or_edition, Work): - # It also removes the link from the cached OPDS entries. - for url in [full_url, thumbnail_url]: - assert url not in work.simple_opds_entry - assert url not in work.verbose_opds_entry - return True def reset_cover(): @@ -908,9 +898,6 @@ def reset_cover(): work.calculate_presentation(search_index_client=index) assert full_url == work.cover_full_url assert thumbnail_url == work.cover_thumbnail_url - for url in [full_url, thumbnail_url]: - assert url in work.simple_opds_entry - assert url in work.verbose_opds_entry # Suppressing the cover removes the cover from the work. index = external_search_fake_fixture.external_search @@ -1647,27 +1634,6 @@ def test_for_unchecked_subjects(self, db: DatabaseTransactionFixture): classification2.subject.checked = True assert [] == qu.all() - def test_calculate_opds_entries(self, db: DatabaseTransactionFixture): - """Verify that calculate_opds_entries sets both simple and verbose - entries. - """ - work = db.work() - - work.calculate_opds_entries(verbose=False) - simple_entry = work.simple_opds_entry - assert simple_entry.startswith(" timestamp may be different. - assert len(simple_entry) == len(work.simple_opds_entry) - - # The verbose OPDS entry is longer than the simple one. - assert work.verbose_opds_entry.startswith(" len(simple_entry) - def test_calculate_marc_record(self, db: DatabaseTransactionFixture): work = db.work(with_license_pool=True) diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index 0a7b095ed8..3054d08852 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -9,7 +9,6 @@ CoverageProviderProgress, IdentifierCoverageProvider, MARCRecordWorkCoverageProvider, - OPDSEntryWorkCoverageProvider, PresentationReadyWorkCoverageProvider, WorkClassificationCoverageProvider, WorkPresentationEditionCoverageProvider, @@ -2195,7 +2194,6 @@ def test_process_item(self, db: DatabaseTransactionFixture): policy.choose_edition, policy.set_edition_metadata, policy.choose_cover, - policy.regenerate_opds_entries, policy.update_search_index, ] ) @@ -2218,7 +2216,6 @@ def test_process_item(self, db: DatabaseTransactionFixture): policy.choose_edition, policy.set_edition_metadata, policy.choose_cover, - policy.regenerate_opds_entries, policy.update_search_index, policy.classify, policy.choose_summary, @@ -2227,26 +2224,6 @@ def test_process_item(self, db: DatabaseTransactionFixture): ) -class TestOPDSEntryWorkCoverageProvider: - def test_run(self, db: DatabaseTransactionFixture): - provider = OPDSEntryWorkCoverageProvider(db.session) - work = db.work() - work.simple_opds_entry = "old junk" - work.verbose_opds_entry = "old long junk" - - # The work is not presentation-ready, so nothing happens. - provider.run() - assert "old junk" == work.simple_opds_entry - assert "old long junk" == work.verbose_opds_entry - - # The work is presentation-ready, so its OPDS entries are - # regenerated. - work.presentation_ready = True - provider.run() - assert work.simple_opds_entry.startswith(" tag indicates a role of 'author', so there's no - # need for an explicitly specified role property. - assert "author" == author.tag - [name] = author - assert "name" == name.tag - assert "King, Steven" == name.text - assert {} == author.attrib - - # The tag includes an explicitly specified role - # property to explain the nature of the contribution. - assert "contributor" == contributor.tag - [name] = contributor - assert "name" == name.tag - assert "Frakes, Jonathan" == name.text - role_attrib = "{%s}role" % AtomFeed.OPF_NS - assert ( - Contributor.MARC_ROLE_CODES[Contributor.NARRATOR_ROLE] - == contributor.attrib[role_attrib] - ) - - def test_annotate_work_entry_adds_tags(self, db: DatabaseTransactionFixture): - work = db.work(with_license_pool=True, with_open_access_download=True) - work.last_update_time = datetime_utc(2018, 2, 5, 7, 39, 49, 580651) - [pool] = work.license_pools - pool.availability_time = datetime_utc(2015, 1, 1) - - entry: list = [] - # This will create four extra tags which could not be - # generated in the cached entry because they depend on the - # active LicensePool or identifier: the Atom ID, the distributor, - # the date published and the date updated. - annotator = Annotator() - annotator.annotate_work_entry(work, pool, None, None, None, entry) - [id, distributor, published, updated] = entry - - id_tag = etree.tounicode(id) - assert "id" in id_tag - assert pool.identifier.urn in id_tag - - assert 'ProviderName="Gutenberg"' in etree.tounicode(distributor) - - published_tag = etree.tounicode(published) - assert "published" in published_tag - assert "2015-01-01" in published_tag - - updated_tag = etree.tounicode(updated) - assert "updated" in updated_tag - assert "2018-02-05" in updated_tag - - entry = [] - # We can pass in a specific update time to override the one - # found in work.last_update_time. - annotator.annotate_work_entry( - work, - pool, - None, - None, - None, - entry, - updated=datetime_utc(2017, 1, 2, 3, 39, 49, 580651), - ) - [id, distributor, published, updated] = entry - assert "updated" in etree.tounicode(updated) - assert "2017-01-02" in etree.tounicode(updated) - - -class TestAnnotatorsFixture: - db: DatabaseTransactionFixture - session: Session - - -@pytest.fixture -def annotators_fixture( - db, -) -> TestAnnotatorsFixture: - fix = TestAnnotatorsFixture() - fix.db = db - fix.session = db.session - return fix - - -class TestAnnotators: - def test_all_subjects(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(genre="Fiction", with_open_access_download=True) - edition = work.presentation_edition - identifier = edition.primary_identifier - source1 = DataSource.lookup(session, DataSource.GUTENBERG) - source2 = DataSource.lookup(session, DataSource.OCLC) - - subjects = [ - (source1, Subject.FAST, "fast1", "name1", 1), - (source1, Subject.LCSH, "lcsh1", "name2", 1), - (source2, Subject.LCSH, "lcsh1", "name2", 1), - (source1, Subject.LCSH, "lcsh2", "name3", 3), - ( - source1, - Subject.DDC, - "300", - "Social sciences, sociology & anthropology", - 1, - ), - ] - - for source, subject_type, subject, name, weight in subjects: - identifier.classify(source, subject_type, subject, name, weight=weight) - - # Mock Work.all_identifier_ids (called by VerboseAnnotator.categories) - # so we can track the value that was passed in for `cutoff`. - def mock_all_identifier_ids(policy=None): - work.called_with_policy = policy - # Do the actual work so that categories() gets the - # correct information. - return work.original_all_identifier_ids(policy) - - work.original_all_identifier_ids = work.all_identifier_ids - work.all_identifier_ids = mock_all_identifier_ids - category_tags = VerboseAnnotator.categories(work) - - # When we are generating subjects as part of an OPDS feed, by - # default we set a cutoff of 100 equivalent identifiers. This - # gives us reasonable worst-case performance at the cost of - # not showing every single random subject under which an - # extremely popular book is filed. - assert 100 == work.called_with_policy.equivalent_identifier_cutoff - - ddc_uri = Subject.uri_lookup[Subject.DDC] - rating_value = "{http://schema.org/}ratingValue" - assert [ - { - "term": "300", - rating_value: 1, - "label": "Social sciences, sociology & anthropology", - } - ] == category_tags[ddc_uri] - - fast_uri = Subject.uri_lookup[Subject.FAST] - assert [{"term": "fast1", "label": "name1", rating_value: 1}] == category_tags[ - fast_uri - ] - - lcsh_uri = Subject.uri_lookup[Subject.LCSH] - assert [ - {"term": "lcsh1", "label": "name2", rating_value: 2}, - {"term": "lcsh2", "label": "name3", rating_value: 3}, - ] == sorted(category_tags[lcsh_uri], key=lambda x: x[rating_value]) - - genre_uri = Subject.uri_lookup[Subject.SIMPLIFIED_GENRE] - assert [ - dict(label="Fiction", term=Subject.SIMPLIFIED_GENRE + "Fiction") - ] == category_tags[genre_uri] - - def test_appeals(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(with_open_access_download=True) - work.appeal_language = 0.1 - work.appeal_character = 0.2 - work.appeal_story = 0.3 - work.appeal_setting = 0.4 - work.calculate_opds_entries(verbose=True) - - category_tags = VerboseAnnotator.categories(work) - appeal_tags = category_tags[Work.APPEALS_URI] - expect = [ - (Work.APPEALS_URI + Work.LANGUAGE_APPEAL, Work.LANGUAGE_APPEAL, 0.1), - (Work.APPEALS_URI + Work.CHARACTER_APPEAL, Work.CHARACTER_APPEAL, 0.2), - (Work.APPEALS_URI + Work.STORY_APPEAL, Work.STORY_APPEAL, 0.3), - (Work.APPEALS_URI + Work.SETTING_APPEAL, Work.SETTING_APPEAL, 0.4), - ] - actual = [ - (x["term"], x["label"], x["{http://schema.org/}ratingValue"]) - for x in appeal_tags - ] - assert set(expect) == set(actual) - - def test_detailed_author(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - c, ignore = db.contributor("Familyname, Givenname") - c.display_name = "Givenname Familyname" - c.family_name = "Familyname" - c.wikipedia_name = "Givenname Familyname (Author)" - c.viaf = "100" - c.lc = "n100" - - author_tag = VerboseAnnotator.detailed_author(c) - - tag_string = etree.tounicode(author_tag) - assert "Givenname FamilynameFamilyname, GivennameGivenname Familyname (Author)http://viaf.org/viaf/100http://id.loc.gov/authorities/names/n100 tags and one - # tag, for the illustrator. - assert ["author", "author", "contributor"] == [x.tag for x in tags] - assert [None, None, illustrator_code] == [ - x.attrib.get(role_attrib) for x in tags - ] - - def test_ratings(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(with_license_pool=True, with_open_access_download=True) - work.quality = 1.0 / 3 - work.popularity = 0.25 - work.rating = 0.6 - work.calculate_opds_entries(verbose=True) - feed = AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - VerboseAnnotator, - ) - tag = feed.create_entry(work, None) - assert isinstance(tag, etree._Element) - - nsmap = dict(schema="http://schema.org/") - ratings = [ - ( - rating.get("{http://schema.org/}ratingValue"), - rating.get("{http://schema.org/}additionalType"), - ) - for rating in tag.xpath("schema:Rating", namespaces=nsmap) - ] - expected = [ - ("0.3333", Measurement.QUALITY), - ("0.2500", Measurement.POPULARITY), - ("0.6000", None), - ] - assert set(expected) == set(ratings) - - def test_subtitle(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(with_license_pool=True, with_open_access_download=True) - work.presentation_edition.subtitle = "Return of the Jedi" - work.calculate_opds_entries() - - raw_feed = str( - AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - Annotator, - ) - ) - assert "schema:alternativeHeadline" in raw_feed - assert work.presentation_edition.subtitle in raw_feed - - feed = feedparser.parse(str(raw_feed)) - alternative_headline = feed["entries"][0]["schema_alternativeheadline"] - assert work.presentation_edition.subtitle == alternative_headline - - # If there's no subtitle, the subtitle tag isn't included. - work.presentation_edition.subtitle = None - work.calculate_opds_entries() - raw_feed = str( - AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - Annotator, - ) - ) - - assert "schema:alternativeHeadline" not in raw_feed - assert "Return of the Jedi" not in raw_feed - [entry] = feedparser.parse(str(raw_feed))["entries"] - assert "schema_alternativeheadline" not in list(entry.items()) - - def test_series(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(with_license_pool=True, with_open_access_download=True) - work.presentation_edition.series = "Harry Otter and the Lifetime of Despair" - work.presentation_edition.series_position = 4 - work.calculate_opds_entries() - - raw_feed = str( - AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - Annotator, - ) - ) - assert "schema:Series" in raw_feed - assert work.presentation_edition.series in raw_feed - - feed = feedparser.parse(str(raw_feed)) - schema_entry = feed["entries"][0]["schema_series"] - assert work.presentation_edition.series == schema_entry["name"] - assert ( - str(work.presentation_edition.series_position) - == schema_entry["schema:position"] - ) - - # The series position can be 0, for a prequel for example. - work.presentation_edition.series_position = 0 - work.calculate_opds_entries() - - raw_feed = str( - AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - Annotator, - ) - ) - assert "schema:Series" in raw_feed - assert work.presentation_edition.series in raw_feed - - feed = feedparser.parse(str(raw_feed)) - schema_entry = feed["entries"][0]["schema_series"] - assert work.presentation_edition.series == schema_entry["name"] - assert ( - str(work.presentation_edition.series_position) - == schema_entry["schema:position"] - ) - - # If there's no series title, the series tag isn't included. - work.presentation_edition.series = None - work.calculate_opds_entries() - raw_feed = str( - AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [work], - Annotator, - ) - ) - - assert "schema:Series" not in raw_feed - assert "Lifetime of Despair" not in raw_feed - [entry] = feedparser.parse(str(raw_feed))["entries"] - assert "schema_series" not in list(entry.items()) - - def test_samples(self, annotators_fixture: TestAnnotatorsFixture): - data, db, session = ( - annotators_fixture, - annotators_fixture.db, - annotators_fixture.session, - ) - - work = db.work(with_license_pool=True) - edition = work.presentation_edition - - resource = Resource(url="sampleurl") - session.add(resource) - session.commit() - - sample_link = Hyperlink( - rel=Hyperlink.SAMPLE, - resource_id=resource.id, - identifier_id=edition.primary_identifier_id, - data_source_id=2, - ) - session.add(sample_link) - session.commit() - - with DBStatementCounter(db.database.connection) as counter: - links = Annotator.samples(edition) - count = counter.count - - assert len(links) == 1 - assert links[0].id == sample_link.id - assert links[0].resource.url == "sampleurl" - # accessing resource should not be another query - assert counter.count == count - - -class TestOPDSFixture: - db: DatabaseTransactionFixture - fiction: Lane - fantasy: Lane - romance: Lane - contemporary_romance: Lane - conf: WorkList - history: Lane - ya: Lane - - def _fake_hit(self, work: Work): - return Hit({"_source": dict(work_id=work.id)}) - - -@pytest.fixture -def opds_fixture(db: DatabaseTransactionFixture) -> TestOPDSFixture: - data = TestOPDSFixture() - data.db = db - data.fiction = db.lane("Fiction") - data.fiction.fiction = True - data.fiction.audiences = [Classifier.AUDIENCE_ADULT] - - data.fantasy = db.lane("Fantasy", parent=data.fiction, genres="Fantasy") - data.history = db.lane("History", genres="History") - data.ya = db.lane("Young Adult") - data.ya.audiences = [Classifier.AUDIENCE_YOUNG_ADULT] - data.romance = db.lane("Romance", genres="Romance") - data.romance.fiction = True - data.contemporary_romance = db.lane( - "Contemporary Romance", parent=data.romance, genres="Contemporary Romance" - ) - - data.conf = WorkList() - data.conf.initialize( - db.default_library(), - children=[data.fiction, data.fantasy, data.history, data.ya, data.romance], - ) - return data - - -class TestOPDS: - @staticmethod - def _links(entry, rel=None): - if "feed" in entry: - entry = entry["feed"] - links = sorted(entry["links"], key=lambda x: (x["rel"], x.get("title"))) - r = [] - for l in links: - if ( - not rel - or l["rel"] == rel - or (isinstance(rel, list) and l["rel"] in rel) - ): - r.append(l) - return r - - @staticmethod - def _assert_xml_equal(a, b): - # Compare xml is the same, we use etree to canonicalize the xml - # then compare the canonical versions - assert etree.tostring(a, method="c14n2") == etree.tostring( - etree.fromstring(b), method="c14n2" - ) - - def test_acquisition_link(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - m = AcquisitionFeed.acquisition_link - rel = AcquisitionFeed.BORROW_REL - href = db.fresh_url() - - # A doubly-indirect acquisition link. - a = m(rel, href, ["text/html", "text/plain", "application/pdf"]) - self._assert_xml_equal( - a, - '' % href, - ) - - # A direct acquisition link. - b = m(rel, href, ["application/epub"]) - self._assert_xml_equal( - b, - '' - % href, - ) - - # A direct acquisition link to a document with embedded access restriction rules. - c = m( - rel, - href, - [ - "application/audiobook+json;profile=http://www.feedbooks.com/audiobooks/access-restriction" - ], - ) - self._assert_xml_equal( - c, - '' - % href, - ) - - def test_group_uri(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True, authors="Alice") - [lp] = work.license_pools - - annotator = MockAnnotatorWithGroup() - feed = AcquisitionFeed( - session, "test", "http://the-url.com/", [work], annotator - ) - u = str(feed) - parsed = feedparser.parse(u) - [group_link] = parsed.entries[0]["links"] - expect_uri, expect_title = annotator.group_uri(work, lp, lp.identifier) - assert OPDSFeed.GROUP_REL == group_link["rel"] - assert expect_uri == group_link["href"] - assert expect_title == group_link["title"] - - def test_acquisition_feed(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True, authors="Alice") - feed = AcquisitionFeed(session, "test", "http://the-url.com/", [work]) - u = str(feed) - assert '' in u - parsed = feedparser.parse(u) - [with_author] = parsed["entries"] - assert "Alice" == with_author["authors"][0]["name"] - - def test_acquisition_feed_includes_license_source( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - feed = AcquisitionFeed(session, "test", "http://the-url.com/", [work]) - gutenberg = DataSource.lookup(session, DataSource.GUTENBERG) - - # The tag containing the license - # source should show up once and only once. (At one point a - # bug caused it to be added to the generated OPDS twice.) - expect = '' % ( - gutenberg.name - ) - assert 1 == str(feed).count(expect) - - # If the LicensePool is a stand-in produced for internal - # processing purposes, it does not represent an actual license for - # the book, and the tag is not - # included. - internal = DataSource.lookup(session, DataSource.INTERNAL_PROCESSING) - work.license_pools[0].data_source = internal - feed = AcquisitionFeed(session, "test", "http://the-url.com/", [work]) - assert "" in u - - def test_acquisition_feed_includes_permanent_work_id( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - feed = AcquisitionFeed(session, "test", "http://the-url.com/", [work]) - u = str(feed) - parsed = feedparser.parse(u) - entry = parsed["entries"][0] - assert work.presentation_edition.permanent_work_id == entry["simplified_pwid"] - - def test_lane_feed_contains_facet_links( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - lane = db.lane() - facets = Facets.default(db.default_library()) - - migration = end_to_end_search_fixture.external_search_index.start_migration() - assert migration is not None - migration.finish() - - cached_feed = AcquisitionFeed.page( - session, - "title", - "http://the-url.com/", - lane, - MockAnnotator, - facets=facets, - search_engine=end_to_end_search_fixture.external_search_index, - ) - - u = str(cached_feed) - parsed = feedparser.parse(u) - by_title = parsed["feed"] - - [self_link] = self._links(by_title, "self") - assert "http://the-url.com/" == self_link["href"] - facet_links = self._links(by_title, AcquisitionFeed.FACET_REL) - - library = db.default_library() - - def link_for_facets(facets): - return [x for x in facet_links if facets.query_string in x["href"]] - - facets = Facets(library, None, None, None, None, None) - for i1, i2, new_facets, selected in facets.facet_groups: - links = link_for_facets(new_facets) - if selected: - # This facet set is already selected, so it should - # show up three times--once for every facet group. - assert 4 == len(links) - else: - # This facet set is not selected, so it should have one - # transition link. - assert 1 == len(links) - - # As we'll see below, the feed parser parses facetGroup as - # facetgroup and activeFacet as activefacet. As we see here, - # that's not a problem with the generator code. - assert "opds:facetgroup" not in u - assert "opds:facetGroup" in u - assert "opds:activefacet" not in u - assert "opds:activeFacet" in u - - def test_acquisition_feed_includes_available_and_issued_tag( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - today = datetime.date.today() - today_s = today.strftime("%Y-%m-%d") - the_past = today - datetime.timedelta(days=2) - the_past_s = the_past.strftime("%Y-%m-%d") - the_distant_past = today - datetime.timedelta(days=100) - the_distant_past_s = the_distant_past.strftime(AtomFeed.TIME_FORMAT_NAIVE) - the_future = today + datetime.timedelta(days=2) - - # This work has both issued and published. issued will be used - # for the dc:issued tag. - work1 = db.work(with_open_access_download=True) - work1.presentation_edition.issued = today - work1.presentation_edition.published = the_past - work1.license_pools[0].availability_time = the_distant_past - - # This work only has published. published will be used for the - # dc:issued tag. - work2 = db.work(with_open_access_download=True) - work2.presentation_edition.published = the_past - work2.license_pools[0].availability_time = the_distant_past - - # This work has neither published nor issued. There will be no - # dc:issued tag. - work3 = db.work(with_open_access_download=True) - work3.license_pools[0].availability_time = None - - # This work is issued in the future. Since this makes no - # sense, there will be no dc:issued tag. - work4 = db.work(with_open_access_download=True) - work4.presentation_edition.issued = the_future - work4.presentation_edition.published = the_future - work4.license_pools[0].availability_time = None - - for w in work1, work2, work3, work4: - w.calculate_opds_entries(verbose=False) - - session.commit() - works = session.query(Work) - with_times = AcquisitionFeed(session, "test", "url", works, MockAnnotator) - u = str(with_times) - assert "dcterms:issued" in u - - with_times = etree.parse(StringIO(u)) - entries = OPDSXMLParser._xpath(with_times, "/atom:feed/atom:entry") - parsed = [] - for entry in entries: - title_element = OPDSXMLParser._xpath1(entry, "atom:title") - assert title_element is not None - title = title_element.text - issued = OPDSXMLParser._xpath1(entry, "dcterms:issued") - if issued is not None: - issued = issued.text - published = OPDSXMLParser._xpath1(entry, "atom:published") - if published is not None: - published = published.text - parsed.append( - dict( - title=title, - issued=issued, - published=published, - ) - ) - e1, e2, e3, e4 = sorted(parsed, key=lambda x: x["title"]) - assert today_s == e1["issued"] - assert the_distant_past_s == e1["published"] - - assert the_past_s == e2["issued"] - assert the_distant_past_s == e2["published"] - - assert None == e3["issued"] - assert None == e3["published"] - - assert None == e4["issued"] - assert None == e4["published"] - - def test_acquisition_feed_includes_sample_links( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - edition = work.presentation_edition - - representation, _ = db.representation( - "sampleurl", media_type="application/epub+zip" - ) - resource = Resource(url="sampleurl", representation_id=representation.id) - link = Hyperlink( - rel=Hyperlink.SAMPLE, - identifier_id=edition.primary_identifier_id, - data_source_id=2, - ) - link.resource = resource - - work1 = db.work(with_open_access_download=True) - edition1 = work1.presentation_edition - - link1 = Hyperlink( - rel=Hyperlink.SAMPLE, - identifier_id=edition1.primary_identifier_id, - data_source_id=2, - ) - resource1 = Resource(url="sampleurl1", representation_id=representation.id) - link1.resource = resource1 - - # unrelated work/link should not show up - work2 = db.work(with_open_access_download=True) - edition2 = work2.presentation_edition - - link2 = Hyperlink( - rel=Hyperlink.SAMPLE, - identifier_id=edition2.primary_identifier_id, - data_source_id=2, - ) - link2.resource = Resource( - url="notsampleurl", representation_id=representation.id - ) - - session.add_all([resource, link, link1, link2]) - - # clear cache before links were added - work.simple_opds_entry = None - work1.simple_opds_entry = None - work2.simple_opds_entry = None - - session.commit() - - feed = AcquisitionFeed(session, "TestFeed", "http://some-url", [work, work1]) - - atom_links = OPDSXMLParser._xpath( - etree.parse(StringIO(str(feed))), - f"atom:entry/atom:link[@rel='{Hyperlink.CLIENT_SAMPLE}']", - ) - - assert len(atom_links) == 2 - assert atom_links[0].attrib["href"] == resource.url - assert atom_links[1].attrib["href"] == resource1.url - - def test_acquisition_feed_includes_publisher_and_imprint_tag( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - work.presentation_edition.publisher = "The Publisher" - work.presentation_edition.imprint = "The Imprint" - work2 = db.work(with_open_access_download=True) - work2.presentation_edition.publisher = None - - session.commit() - for w in work, work2: - w.calculate_opds_entries(verbose=False) - - works = session.query(Work) - feed_with_publisher = AcquisitionFeed( - session, "test", "url", works, MockAnnotator - ) - with_publisher = feedparser.parse(str(feed_with_publisher)) - entries = sorted(with_publisher["entries"], key=lambda x: x["title"]) - assert "The Publisher" == entries[0]["dcterms_publisher"] - assert "The Imprint" == entries[0]["bib_publisherimprint"] - assert "publisher" not in entries[1] - - def test_acquisition_feed_includes_audience_as_category( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - work.audience = "Young Adult" - work2 = db.work(with_open_access_download=True) - work2.audience = "Children" - work2.target_age = NumericRange(7, 9, "[]") - work3 = db.work(with_open_access_download=True) - work3.audience = None - work4 = db.work(with_open_access_download=True) - work4.audience = "Adult" - work4.target_age = NumericRange(18) - - session.commit() - - for w in work, work2, work3, work4: - w.calculate_opds_entries(verbose=False) - - works = session.query(Work) - feed_with_audience = AcquisitionFeed(session, "test", "url", works) - with_audience = feedparser.parse(str(feed_with_audience)) - ya, children, no_audience, adult = sorted( - with_audience["entries"], key=lambda x: int(x["title"]) - ) - scheme = "http://schema.org/audience" - assert [("Young Adult", "Young Adult")] == [ - (x["term"], x["label"]) for x in ya["tags"] if x["scheme"] == scheme - ] - - assert [("Children", "Children")] == [ - (x["term"], x["label"]) for x in children["tags"] if x["scheme"] == scheme - ] - - age_scheme = Subject.uri_lookup[Subject.AGE_RANGE] - assert [("7-9", "7-9")] == [ - (x["term"], x["label"]) - for x in children["tags"] - if x["scheme"] == age_scheme - ] - - assert [] == [ - (x["term"], x["label"]) - for x in no_audience["tags"] - if x["scheme"] == scheme - ] - - # Even though the 'Adult' book has a target age, the target - # age is not shown, because target age is only a relevant - # concept for children's and YA books. - assert [] == [ - (x["term"], x["label"]) for x in adult["tags"] if x["scheme"] == age_scheme - ] - - def test_acquisition_feed_includes_category_tags_for_appeals( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - work.appeal_language = 0.1 - work.appeal_character = 0.2 - work.appeal_story = 0.3 - work.appeal_setting = 0.4 - - work2 = db.work(with_open_access_download=True) - - for w in work, work2: - w.calculate_opds_entries(verbose=False) - - session.commit() - works = session.query(Work) - acq_feed = AcquisitionFeed(session, "test", "url", works) - feed = feedparser.parse(str(acq_feed)) - entries = sorted(feed["entries"], key=lambda x: int(x["title"])) - - tags = entries[0]["tags"] - matches = [ - (x["term"], x["label"]) for x in tags if x["scheme"] == Work.APPEALS_URI - ] - assert [ - (Work.APPEALS_URI + "Character", "Character"), - (Work.APPEALS_URI + "Language", "Language"), - (Work.APPEALS_URI + "Setting", "Setting"), - (Work.APPEALS_URI + "Story", "Story"), - ] == sorted(matches) - - tags = entries[1]["tags"] - matches = [ - (x["term"], x["label"]) for x in tags if x["scheme"] == Work.APPEALS_URI - ] - assert [] == matches - - def test_acquisition_feed_includes_category_tags_for_fiction_status( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - work.fiction = False - - work2 = db.work(with_open_access_download=True) - work2.fiction = True - - for w in work, work2: - w.calculate_opds_entries(verbose=False) - - session.commit() - works = session.query(Work) - acq_feed = AcquisitionFeed(session, "test", "url", works) - feed = feedparser.parse(str(acq_feed)) - entries = sorted(feed["entries"], key=lambda x: int(x["title"])) - - scheme = "http://librarysimplified.org/terms/fiction/" - - assert [(scheme + "Nonfiction", "Nonfiction")] == [ - (x["term"], x["label"]) for x in entries[0]["tags"] if x["scheme"] == scheme - ] - assert [(scheme + "Fiction", "Fiction")] == [ - (x["term"], x["label"]) for x in entries[1]["tags"] if x["scheme"] == scheme - ] - - def test_acquisition_feed_includes_category_tags_for_genres( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(with_open_access_download=True) - g1, ignore = Genre.lookup(session, "Science Fiction") - g2, ignore = Genre.lookup(session, "Romance") - work.genres = [g1, g2] - - work.calculate_opds_entries(verbose=False) - - session.commit() - works = session.query(Work) - acq_feed = AcquisitionFeed(session, "test", "url", works) - feed = feedparser.parse(str(acq_feed)) - entries = sorted(feed["entries"], key=lambda x: int(x["title"])) - - scheme = Subject.SIMPLIFIED_GENRE - assert [ - (scheme + "Romance", "Romance"), - (scheme + "Science%20Fiction", "Science Fiction"), - ] == sorted( - (x["term"], x["label"]) for x in entries[0]["tags"] if x["scheme"] == scheme - ) - - def test_acquisition_feed_omits_works_with_no_active_license_pool( - self, opds_fixture: TestOPDSFixture - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(title="open access", with_open_access_download=True) - no_license_pool = db.work(title="no license pool", with_license_pool=False) - no_download = db.work(title="no download", with_license_pool=True) - no_download.license_pools[0].open_access = True - not_open_access = db.work("not open access", with_license_pool=True) - not_open_access.license_pools[0].open_access = False - session.commit() - - # We get a feed with two entries--the open-access book and - # the non-open-access book--and two error messages--the book with - # no license pool and the book but with no download. - works = session.query(Work) - by_title_feed = AcquisitionFeed(session, "test", "url", works) - by_title_raw = str(by_title_feed) - by_title = feedparser.parse(by_title_raw) - - # We have two entries... - assert 2 == len(by_title["entries"]) - assert ["not open access", "open access"] == sorted( - x["title"] for x in by_title["entries"] - ) - - # ...and two messages. - assert 2 == by_title_raw.count( - "I've heard about this work but have no active licenses for it." - ) - - def test_acquisition_feed_includes_image_links(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work = db.work(genre=Fantasy, with_open_access_download=True) - work.presentation_edition.cover_thumbnail_url = "http://thumbnail/b" - work.presentation_edition.cover_full_url = "http://full/a" - work.calculate_opds_entries(verbose=False) - - feed = feedparser.parse(str(work.simple_opds_entry)) - links = sorted( - x["href"] for x in feed["entries"][0]["links"] if "image" in x["rel"] - ) - assert ["http://full/a", "http://thumbnail/b"] == links - - def test_messages(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - """Test the ability to include OPDSMessage objects for a given URN in - lieu of a proper ODPS entry. - """ - messages = [ - OPDSMessage("urn:foo", 400, _("msg1")), - OPDSMessage("urn:bar", 500, _("msg2")), - ] - feed = str( - AcquisitionFeed( - session, "test", "http://the-url.com/", [], precomposed_entries=messages - ) - ) - for m in messages: - assert m.urn in feed - assert str(m.status_code) in feed - assert str(m.message) in feed - - def test_precomposed_entries(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - # Test the ability to include precomposed OPDS entries - # in a feed. - - entry = AcquisitionFeed.E.entry() - entry.text = "foo" - feed = str( - AcquisitionFeed( - session, - "test", - "http://the-url.com/", - works=[], - precomposed_entries=[entry], - ) - ) - assert "foo" in feed - - def test_page_feed( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - # Test the ability to create a paginated feed of works for a given - # lane. - lane = data.contemporary_romance - work1 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - work2 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - - search_engine = end_to_end_search_fixture.external_search_index - docs = search_engine.start_migration() - assert docs is not None - docs.add_documents( - search_engine.create_search_documents_from_works([work1, work2]) - ) - docs.finish() - - facets = Facets.default(db.default_library()) - pagination = Pagination(size=1) - - def make_page(pagination): - return AcquisitionFeed.page( - session, - "test", - db.fresh_url(), - lane, - MockAnnotator, - pagination=pagination, - search_engine=search_engine, - ) - - cached_works = str(make_page(pagination)) - parsed = feedparser.parse(cached_works) - assert work1.title == parsed["entries"][0]["title"] - - # Make sure the links are in place. - [up_link] = self._links(parsed, "up") - assert MockAnnotator.groups_url(lane.parent) == up_link["href"] - assert lane.parent.display_name == up_link["title"] - - [start] = self._links(parsed, "start") - assert MockAnnotator.groups_url(None) == start["href"] - assert MockAnnotator.top_level_title() == start["title"] - - [next_link] = self._links(parsed, "next") - assert ( - MockAnnotator.feed_url(lane, facets, pagination.next_page) - == next_link["href"] - ) - - # This was the first page, so no previous link. - assert [] == self._links(parsed, "previous") - - # Now get the second page and make sure it has a 'previous' link. - cached_works = str(make_page(pagination.next_page)) - parsed = feedparser.parse(cached_works) - [previous] = self._links(parsed, "previous") - assert MockAnnotator.feed_url(lane, facets, pagination) == previous["href"] - assert work2.title == parsed["entries"][0]["title"] - - # The feed has breadcrumb links - parentage = list(lane.parentage) - root = ET.fromstring(cached_works) - breadcrumbs = root.find("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) - links = breadcrumbs - - # There's one breadcrumb link for each parent Lane, plus one for - # the top-level. - assert isinstance(links, ET.Element) - assert len(parentage) + 1 == len(links) - assert MockAnnotator.top_level_title() == links[0].get("title") - assert MockAnnotator.default_lane_url() == links[0].get("href") - for i, lane in enumerate(parentage): - assert lane.display_name == links[i + 1].get("title") - assert MockAnnotator.lane_url(lane) == links[i + 1].get("href") - - def test_page_feed_for_worklist( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - # Test the ability to create a paginated feed of works for a - # WorkList instead of a Lane. - lane = data.conf - work1 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - work2 = db.work(genre=Contemporary_Romance, with_open_access_download=True) - - search_engine = end_to_end_search_fixture.external_search_index - docs = search_engine.start_migration() - assert docs is not None - docs.add_documents( - search_engine.create_search_documents_from_works([work1, work2]) - ) - docs.finish() - - facets = Facets.default(db.default_library()) - pagination = Pagination(size=1) - - def make_page(pagination): - return AcquisitionFeed.page( - session, - "test", - db.fresh_url(), - lane, - MockAnnotator, - pagination=pagination, - search_engine=search_engine, - ) - - cached_works = make_page(pagination) - parsed = feedparser.parse(str(cached_works)) - assert work1.title == parsed["entries"][0]["title"] - - # Make sure the links are in place. - # This is the top-level, so no up link. - assert [] == self._links(parsed, "up") - - [start] = self._links(parsed, "start") - assert MockAnnotator.groups_url(None) == start["href"] - assert MockAnnotator.top_level_title() == start["title"] - - [next_link] = self._links(parsed, "next") - assert ( - MockAnnotator.feed_url(lane, facets, pagination.next_page) - == next_link["href"] - ) - - # This was the first page, so no previous link. - assert [] == self._links(parsed, "previous") - - # Now get the second page and make sure it has a 'previous' link. - cached_works = str(make_page(pagination.next_page)) - parsed = feedparser.parse(cached_works) - [previous] = self._links(parsed, "previous") - assert MockAnnotator.feed_url(lane, facets, pagination) == previous["href"] - assert work2.title == parsed["entries"][0]["title"] - - # The feed has no parents, so no breadcrumbs. - root = ET.fromstring(cached_works) - breadcrumbs = root.find("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) - assert None == breadcrumbs - - def test_from_query(self, opds_fixture: TestOPDSFixture): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - """Test creating a feed for a custom list from a query.""" - - display_name = "custom_list" - staff_data_source = DataSource.lookup(session, DataSource.LIBRARY_STAFF) - list, ignore = create( - session, - CustomList, - name=db.fresh_str(), - library=db.default_library(), - data_source=staff_data_source, - ) - work = db.work(with_license_pool=True) - work2 = db.work(with_license_pool=True) - list.add_entry(work) - list.add_entry(work2) - - # get all the entries from a custom list - query = ( - session.query(Work) - .join(Work.custom_list_entries) - .filter(CustomListEntry.list_id == list.id) - ) - - pagination = Pagination(size=1) - worklist = WorkList() - worklist.initialize( - db.default_library(), customlists=[list], display_name=display_name - ) - - def url_for_custom_list(library, list): - def url_fn(after): - base = "http://%s/" % display_name - if after: - base += "?after=%s&size=1" % after - return base - - return url_fn - - url_fn = url_for_custom_list(db.default_library(), list) - - def from_query(pagination): - return AcquisitionFeed.from_query( - query, - session, - list.name, - "url", - pagination, - url_fn, - MockAnnotator, - ) - - works = from_query(pagination) - parsed = feedparser.parse(str(works)) - assert 1 == len(parsed["entries"]) - assert list.name == parsed["feed"].title - - [next_link] = self._links(parsed, "next") - assert ( - MockAnnotator.feed_url(worklist, pagination=pagination.next_page) - == next_link["href"] - ) - - # This was the first page, so no previous link. - assert [] == self._links(parsed, "previous") - - # Now get the second page and make sure it has a 'previous' link. - works = from_query(pagination.next_page) - parsed = feedparser.parse(str(works)) - [previous_link] = self._links(parsed, "previous") - assert ( - MockAnnotator.feed_url(worklist, pagination=pagination.previous_page) - == previous_link["href"] - ) - assert 1 == len(parsed["entries"]) - assert [] == self._links(parsed, "next") - - def test_groups_feed( - self, - opds_fixture: TestOPDSFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - # Test the ability to create a grouped feed of recommended works for - # a given lane. - - # Every time it's invoked, the mock search index is going to - # return everything in its index. That's fine -- we're only - # concerned with _how_ it's invoked -- how many times and in - # what context. - # - # So it's sufficient to create a single work, and the details - # of the work don't matter. It just needs to have a LicensePool - # so it'll show up in the OPDS feed. - work = db.work(title="An epic tome", with_open_access_download=True) - search_engine = MagicMock(spec=ExternalSearchIndex) - # We expect 1 hit per lane - search_engine.query_works_multi.return_value = [ - [data._fake_hit(work)], - [data._fake_hit(work)], - [data._fake_hit(work)], - ] - - # The lane setup does matter a lot -- that's what controls - # how many times the search functionality is invoked. - epic_fantasy = db.lane( - "Epic Fantasy", parent=data.fantasy, genres=["Epic Fantasy"] - ) - urban_fantasy = db.lane( - "Urban Fantasy", parent=data.fantasy, genres=["Urban Fantasy"] - ) - - annotator = MockAnnotatorWithGroup() - private = object() - cached_groups = AcquisitionFeed.groups( - session, - "test", - db.fresh_url(), - data.fantasy, - annotator, - max_age=0, - search_engine=search_engine, - search_debug=True, - private=private, - ) - - # The result is an OPDSFeedResponse object. The 'private' - # argument, unused by groups(), was passed along into the - # constructor. - assert isinstance(cached_groups, OPDSFeedResponse) - assert private == cached_groups.private - # One query per lane available - assert len(search_engine.query_works_multi.call_args[0][0]) == 3 - - parsed = feedparser.parse(cached_groups.data) - - # There are three entries in three lanes. - e1, e2, e3 = parsed["entries"] - - # Each entry has one and only one link. - [l1], [l2], [l3] = (x["links"] for x in parsed["entries"]) - - # Those links are 'collection' links that classify the - # works under their subgenres. - assert all([l["rel"] == "collection" for l in (l1, l2)]) - - assert l1["href"] == "http://group/Epic Fantasy" - assert l1["title"] == "Group Title for Epic Fantasy!" - assert l2["href"] == "http://group/Urban Fantasy" - assert l2["title"] == "Group Title for Urban Fantasy!" - assert l3["href"] == "http://group/Fantasy" - assert l3["title"] == "Group Title for Fantasy!" - - # The feed itself has an 'up' link which points to the - # groups for Fiction, and a 'start' link which points to - # the top-level groups feed. - [up_link] = self._links(parsed["feed"], "up") - assert "http://groups/%s" % data.fiction.id == up_link["href"] - assert "Fiction" == up_link["title"] - - [start_link] = self._links(parsed["feed"], "start") - assert "http://groups/" == start_link["href"] - assert annotator.top_level_title() == start_link["title"] - - # The feed has breadcrumb links - ancestors = list(data.fantasy.parentage) - root = ET.fromstring(cached_groups.data) - breadcrumbs = root.find("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) - links = breadcrumbs - assert isinstance(links, ET.Element) - assert len(ancestors) + 1 == len(links) - assert annotator.top_level_title() == links[0].get("title") - assert annotator.default_lane_url() == links[0].get("href") - for i, lane in enumerate(reversed(ancestors)): - assert lane.display_name == links[i + 1].get("title") - assert annotator.lane_url(lane) == links[i + 1].get("href") - - def test_empty_groups_feed( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - search_engine = end_to_end_search_fixture.external_search_index - docs = search_engine.start_migration() - assert docs is not None - docs.finish() - - # Test the case where a grouped feed turns up nothing. - - # A Lane, and a Work not in the Lane. - test_lane = db.lane("Test Lane", genres=["Mystery"]) - - # Mock Annotator. - class Mock(MockAnnotator): - def annotate_feed(self, feed, worklist): - self.called = True - - annotator = Mock() - - # Build a grouped feed for the lane. - feed = AcquisitionFeed.groups( - session, - "test", - db.fresh_url(), - test_lane, - annotator, - max_age=0, - search_engine=search_engine, - ) - - # A grouped feed was cached for the lane, but there were no - # relevant works found,. - cached = get_one(session, CachedFeed, lane=test_lane) - assert isinstance(cached, CachedFeed) - assert CachedFeed.GROUPS_TYPE == cached.type - - # So the feed contains no entries. - parsed = feedparser.parse(str(feed)) - assert [] == parsed["entries"] - - # but our mock Annotator got a chance to modify the feed in place. - assert True == annotator.called - - def test_search_feed( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - # Test the ability to create a paginated feed of works for a given - # search query. - fantasy_lane = data.fantasy - work1 = db.work(genre=Epic_Fantasy, with_open_access_download=True) - work2 = db.work(genre=Epic_Fantasy, with_open_access_download=True) - - pagination = Pagination(size=1) - search_client = ExternalSearchIndexFake(session) - search_client.mock_query_works([work1, work2]) - facets = SearchFacets(order="author", min_score=10) - - private = object() - - def make_page(pagination): - return AcquisitionFeed.search( - session, - "test", - db.fresh_url(), - fantasy_lane, - search_client, - "fantasy", - pagination=pagination, - facets=facets, - annotator=MockAnnotator, - private=private, - ) - - response = make_page(pagination) - assert isinstance(response, OPDSFeedResponse) - assert OPDSFeed.DEFAULT_MAX_AGE == response.max_age - assert OPDSFeed.ACQUISITION_FEED_TYPE == response.content_type - assert private == response.private - - parsed = feedparser.parse(response.data) - assert work1.title == parsed["entries"][0]["title"] - - # Make sure the links are in place. - [start] = self._links(parsed, "start") - assert MockAnnotator.groups_url(None) == start["href"] - assert MockAnnotator.top_level_title() == start["title"] - - [next_link] = self._links(parsed, "next") - expect = MockAnnotator.search_url( - fantasy_lane, "test", pagination.next_page, facets=facets - ) - assert expect == next_link["href"] - - # This is tested elsewhere, but let's make sure - # SearchFacets-specific fields like order and min_score are - # propagated to the next-page URL. - assert all(x in expect for x in ("order=author", "min_score=10")) - - # This was the first page, so no previous link. - assert [] == self._links(parsed, "previous") - - # Make sure there's an "up" link to the lane that was searched - [up_link] = self._links(parsed, "up") - uplink_url = MockAnnotator.lane_url(fantasy_lane) - assert uplink_url == up_link["href"] - assert fantasy_lane.display_name == up_link["title"] - - # Now get the second page and make sure it has a 'previous' link. - feed = str(make_page(pagination.next_page)) - parsed = feedparser.parse(feed) - [previous] = self._links(parsed, "previous") - expect = MockAnnotator.search_url( - fantasy_lane, "test", pagination, facets=facets - ) - assert expect == previous["href"] - assert all(x in expect for x in ("order=author", "min_score=10")) - - assert work2.title == parsed["entries"][0]["title"] - - # The feed has no breadcrumb links, since we're not - # searching the lane -- just using some aspects of the lane - # to guide the search. - root = ET.fromstring(feed) - breadcrumbs = root.find("{%s}breadcrumbs" % AtomFeed.SIMPLIFIED_NS) - assert None == breadcrumbs - - def test_cache( - self, - opds_fixture: TestOPDSFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - opds_fixture, - opds_fixture.db, - opds_fixture.db.session, - ) - - work1 = db.work( - title="The Original Title", - genre=Epic_Fantasy, - with_open_access_download=True, - ) - fantasy_lane = data.fantasy - - search_engine = end_to_end_search_fixture.external_search_index - docs = search_engine.start_migration() - assert docs is not None - errors = docs.add_documents( - search_engine.create_search_documents_from_works([work1]) - ) - assert errors == [] - docs.finish() - - def make_page(): - return AcquisitionFeed.page( - session, - "test", - db.fresh_url(), - fantasy_lane, - MockAnnotator, - pagination=Pagination.default(), - search_engine=search_engine, - ) - - response1 = make_page() - assert work1.title in str(response1) - cached = get_one(session, CachedFeed, lane=fantasy_lane) - assert isinstance(cached, CachedFeed) - old_timestamp = cached.timestamp - - work2 = db.work( - title="A Brand New Title", - genre=Epic_Fantasy, - with_open_access_download=True, - ) - recv = search_engine.start_updating_search_documents() - recv.add_documents(search_engine.create_search_documents_from_works([work2])) - recv.finish() - - # The new work does not show up in the feed because - # we get the old cached version. - response2 = make_page() - assert work2.title not in str(response2) - assert cached.timestamp == old_timestamp - - # Change the WorkList's MAX_CACHE_AGE to disable caching, and - # we get a brand new page with the new work. - fantasy_lane.MAX_CACHE_AGE = 0 - response3 = make_page() - assert isinstance(cached.timestamp, datetime.datetime) - assert isinstance(old_timestamp, datetime.datetime) - assert cached.timestamp > old_timestamp - assert work2.title in str(response3) - - -class TestAcquisitionFeed: - def test_page( - self, - db, - external_search_fake_fixture: ExternalSearchFixtureFake, - ): - session = db.session - client = external_search_fake_fixture.search.search_multi_client() - - # The search client is supposed to return a set of result sets. - fake_work = MagicMock() - fake_work.work_id = 23 - client.execute = Mock(return_value=[[fake_work]]) - # The code calls "add" on the search client, which is supposed to return a new - # search client with the old search client embedded into it. We don't do that - # here as we're completely faking the search results anyway. - client.add = Mock(return_value=client) - - # Verify that AcquisitionFeed.page() returns an appropriate OPDSFeedResponse - - wl = WorkList() - wl.initialize(db.default_library()) - private = object() - response = AcquisitionFeed.page( - session, - "feed title", - "url", - wl, - MockAnnotator, - max_age=10, - private=private, - search_engine=external_search_fake_fixture.external_search, - ) - - # The result is an OPDSFeedResponse. The 'private' argument, - # unused by page(), was passed along into the constructor. - assert isinstance(response, OPDSFeedResponse) - assert 10 == response.max_age - assert private == response.private - - assert "feed title" in str(response) - - def test_as_response(self, db: DatabaseTransactionFixture): - session = db.session - - # Verify the ability to convert an AcquisitionFeed object to an - # OPDSFeedResponse containing the feed. - feed = AcquisitionFeed(session, "feed title", "http://url/", [], MockAnnotator) - - # Some other piece of code set expectations for how this feed should - # be cached. - response = feed.as_response(max_age=101, private=False) - assert 200 == response.status_code - - # We get an OPDSFeedResponse containing the feed in its - # entity-body. - assert isinstance(response, OPDSFeedResponse) - assert "feed title" in str(response) - - # The caching expectations are respected. - assert 101 == response.max_age - assert False == response.private - - def test_as_error_response(self, db: DatabaseTransactionFixture): - session = db.session - - # Verify the ability to convert an AcquisitionFeed object to an - # OPDSFeedResponse that is to be treated as an error message. - feed = AcquisitionFeed(session, "feed title", "http://url/", [], MockAnnotator) - - # Some other piece of code set expectations for how this feed should - # be cached. - kwargs = dict(max_age=101, private=False) - - # But we know that something has gone wrong and the feed is - # being served as an error message. - response = feed.as_error_response(**kwargs) - assert isinstance(response, OPDSFeedResponse) - - # The content of the feed is unchanged. - assert 200 == response.status_code - assert "feed title" in str(response) - - # But the max_age and private settings have been overridden. - assert 0 == response.max_age - assert True == response.private - - def test_add_entrypoint_links(self): - """Verify that add_entrypoint_links calls _entrypoint_link - on every EntryPoint passed in. - """ - - class Mock: - attrs = dict(href="the response") - - def __init__(self): - self.calls = [] - - def __call__(self, *args): - self.calls.append(args) - return self.attrs - - mock = Mock() - old_entrypoint_link = AcquisitionFeed._entrypoint_link - AcquisitionFeed._entrypoint_link = mock - - xml = etree.fromstring("") - feed = OPDSFeed("title", "url") - feed.feed = xml - entrypoints = [AudiobooksEntryPoint, EbooksEntryPoint] - url_generator = object() - AcquisitionFeed.add_entrypoint_links( - feed, url_generator, entrypoints, EbooksEntryPoint, "Some entry points" - ) - - # Two different calls were made to the mock method. - c1, c2 = mock.calls - - # The first entry point is not selected. - assert c1 == ( - url_generator, - AudiobooksEntryPoint, - EbooksEntryPoint, - True, - "Some entry points", - ) - # The second one is selected. - assert c2 == ( - url_generator, - EbooksEntryPoint, - EbooksEntryPoint, - False, - "Some entry points", - ) - - # Two identical tags were added to the tag, one - # for each call to the mock method. - l1, l2 = list(xml.iterchildren()) - for l in l1, l2: - assert "link" == l.tag - assert mock.attrs == l.attrib - AcquisitionFeed._entrypoint_link = old_entrypoint_link - - # If there is only one facet in the facet group, no links are - # added. - xml = etree.fromstring("") - feed.feed = xml - mock.calls = [] - entrypoints = [EbooksEntryPoint] - AcquisitionFeed.add_entrypoint_links( - feed, url_generator, entrypoints, EbooksEntryPoint, "Some entry points" - ) - assert [] == mock.calls - - def test_entrypoint_link(self): - """Test the _entrypoint_link method's ability to create - attributes for tags. - """ - m = AcquisitionFeed._entrypoint_link - - def g(entrypoint): - """A mock URL generator.""" - return "%s" % (entrypoint.INTERNAL_NAME) - - # If the entry point is not registered, None is returned. - assert None == m(g, object(), object(), True, "group") - - # Now make a real set of link attributes. - l = m(g, AudiobooksEntryPoint, AudiobooksEntryPoint, False, "Grupe") - - # The link is identified as belonging to an entry point-type - # facet group. - assert l["rel"] == AcquisitionFeed.FACET_REL - assert ( - l["{http://librarysimplified.org/terms/}facetGroupType"] - == FacetConstants.ENTRY_POINT_REL - ) - assert "Grupe" == l["{http://opds-spec.org/2010/catalog}facetGroup"] - - # This facet is the active one in the group. - assert "true" == l["{http://opds-spec.org/2010/catalog}activeFacet"] - - # The URL generator was invoked to create the href. - assert l["href"] == g(AudiobooksEntryPoint) - - # The facet title identifies it as a way to look at audiobooks. - assert EntryPoint.DISPLAY_TITLES[AudiobooksEntryPoint] == l["title"] - - # Now try some variants. - - # Here, the entry point is the default one. - l = m(g, AudiobooksEntryPoint, AudiobooksEntryPoint, True, "Grupe") - - # This may affect the URL generated for the facet link. - assert l["href"] == g(AudiobooksEntryPoint) - - # Here, the entry point for which we're generating the link is - # not the selected one -- EbooksEntryPoint is. - l = m(g, AudiobooksEntryPoint, EbooksEntryPoint, True, "Grupe") - - # This means the 'activeFacet' attribute is not present. - assert "{http://opds-spec.org/2010/catalog}activeFacet" not in l - - def test_license_tags_no_loan_or_hold(self, db: DatabaseTransactionFixture): - edition, pool = db.edition(with_license_pool=True) - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, None) - assert dict(status="available") == availability.attrib - assert dict(total="0") == holds.attrib - assert dict(total="1", available="1") == copies.attrib - - def test_license_tags_hold_position(self, db: DatabaseTransactionFixture): - # When a book is placed on hold, it typically takes a while - # for the LicensePool to be updated with the new number of - # holds. This test verifies the normal and exceptional - # behavior used to generate the opds:holds tag in different - # scenarios. - edition, pool = db.edition(with_license_pool=True) - patron = db.patron() - - # If the patron's hold position is less than the total number - # of holds+reserves, that total is used as opds:total. - pool.patrons_in_hold_queue = 3 - hold, is_new = pool.on_hold_to(patron, position=1) - - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, hold) - assert "1" == holds.attrib["position"] - assert "3" == holds.attrib["total"] - - # If the patron's hold position is missing, we assume they - # are last in the list. - hold.position = None - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, hold) - assert "3" == holds.attrib["position"] - assert "3" == holds.attrib["total"] - - # If the patron's current hold position is greater than the - # total recorded number of holds+reserves, their position will - # be used as the value of opds:total. - hold.position = 5 - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, hold) - assert "5" == holds.attrib["position"] - assert "5" == holds.attrib["total"] - - # A patron earlier in the holds queue may see a different - # total number of holds, but that's fine -- it doesn't matter - # very much to that person the precise number of people behind - # them in the queue. - hold.position = 4 - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, hold) - assert "4" == holds.attrib["position"] - assert "4" == holds.attrib["total"] - - # If the patron's hold position is zero (because the book is - # reserved to them), we do not represent them as having a hold - # position (so no opds:position), but they still count towards - # opds:total in the case where the LicensePool's information - # is out of date. - hold.position = 0 - pool.patrons_in_hold_queue = 0 - availability, holds, copies = AcquisitionFeed.license_tags(pool, None, hold) - assert "position" not in holds.attrib - assert "1" == holds.attrib["total"] - - def test_license_tags_show_unlimited_access_books( - self, db: DatabaseTransactionFixture - ): - # Arrange - edition, pool = db.edition(with_license_pool=True) - pool.open_access = False - pool.unlimited_access = True - - # Act - tags = AcquisitionFeed.license_tags(pool, None, None) - - # Assert - assert 1 == len(tags) - - [tag] = tags - - assert ("status" in tag.attrib) == True - assert "available" == tag.attrib["status"] - assert ("holds" in tag.attrib) == False - assert ("copies" in tag.attrib) == False - - def test_unlimited_access_pool_loan(self, db: DatabaseTransactionFixture): - patron = db.patron() - feed = AcquisitionFeed(db.session, "title", "url", [], annotator=None) - work = db.work(unlimited_access=True, with_license_pool=True) - pool = work.active_license_pool() - loan, _ = pool.loan_to(patron) - tags: List[ET.Element] = feed.license_tags(pool, loan, None) - - [tag] = tags - assert "since" in tag.attrib - assert "until" not in tag.attrib - - def test_single_entry(self, db: DatabaseTransactionFixture): - session = db.session - - # Here's a Work with two LicensePools. - work = db.work(with_open_access_download=True) - original_pool = work.license_pools[0] - edition, new_pool = db.edition( - with_license_pool=True, with_open_access_download=True - ) - work.license_pools.append(new_pool) - - # The presentation edition of the Work is associated with - # the first LicensePool added to it. - assert work.presentation_edition == original_pool.presentation_edition - - # This is the edition used when we create an tag for - # this Work. - private = object() - entry = AcquisitionFeed.single_entry( - session, work, MockAnnotator, private=private - ) - assert isinstance(entry, OPDSEntryResponse) - - # We provided a value for private, which was used. We didn't - # provide value for max_age, and zero was used instead of the - # ten-minute default typical for OPDS feeds. - assert 0 == entry.max_age - assert entry.private == private - - entry_data = str(entry) - assert original_pool.presentation_edition.title in entry_data - assert new_pool.presentation_edition.title not in entry_data - - # If the edition was issued before 1980, no datetime formatting error - # is raised. - work.simple_opds_entry = work.verbose_opds_entry = None - five_hundred_years = datetime.timedelta(days=(500 * 365)) - work.presentation_edition.issued = utc_now() - five_hundred_years - - entry = AcquisitionFeed.single_entry(session, work, MockAnnotator) - - expected = str(work.presentation_edition.issued.date()) - assert expected in str(entry) - - def test_single_entry_is_opds_message(self, db: DatabaseTransactionFixture): - session = db.session - - # When single_entry has to deal with an 'OPDS entry' that - # turns out to be an error message, caching rules are - # overridden to treat the 'entry' as a private error message. - work = db.work() - - # We plan on caching the OPDS entry as a public, long-lived - # document. - is_public = dict(max_age=200, private=False) - - # But something goes wrong in create_entry() and we get an - # error instead. - class MockAcquisitionFeed(AcquisitionFeed): - def create_entry(*args, **kwargs): - return OPDSMessage("urn", 500, "oops") - - response = MockAcquisitionFeed.single_entry( - session, work, object(), **is_public - ) - - # We got an OPDS entry containing the message. - assert isinstance(response, OPDSEntryResponse) - assert 200 == response.status_code - assert "500" in str(response) - assert "oops" in str(response) - - # Our caching preferences were overridden. - assert True == response.private - assert 0 == response.max_age - - def test_entry_cache_adds_missing_drm_namespace( - self, db: DatabaseTransactionFixture - ): - session = db.session - - work = db.work(with_open_access_download=True) - - # This work's OPDS entry was created with a namespace map - # that did not include the drm: namespace. - work.simple_opds_entry = "bar" - - # But now the annotator is set up to insert a tag with that - # namespace. - class AddDRMTagAnnotator(MockAnnotator): - @classmethod - def annotate_work_entry( - cls, work, license_pool, edition, identifier, feed, entry - ): - drm_link = OPDSFeed.makeelement("{%s}licensor" % OPDSFeed.DRM_NS) - entry.extend([drm_link]) - - # The entry is retrieved from cache and the appropriate - # namespace inserted. - entry = AcquisitionFeed.single_entry(session, work, AddDRMTagAnnotator) - assert ( - 'bar' - == str(entry) - ) - - def test_error_when_work_has_no_identifier(self, db: DatabaseTransactionFixture): - session = db.session - - # We cannot create an OPDS entry for a Work that cannot be associated - # with an Identifier. - work = db.work(title="Hello, World!", with_license_pool=True) - work.license_pools[0].identifier = None - work.presentation_edition.primary_identifier = None - entry = AcquisitionFeed.single_entry(session, work, MockAnnotator) - assert entry == None - - def test_error_when_work_has_no_licensepool(self, db: DatabaseTransactionFixture): - session = db.session - - work = db.work() - feed = AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [], - annotator=Annotator, - ) - entry = feed.create_entry(work) - expect = AcquisitionFeed.error_message( - work.presentation_edition.primary_identifier, - 403, - "I've heard about this work but have no active licenses for it.", - ) - assert expect == entry - - def test_error_when_work_has_no_presentation_edition( - self, db: DatabaseTransactionFixture - ): - session = db.session - - """We cannot create an OPDS entry (or even an error message) for a - Work that is disconnected from any Identifiers. - """ - work = db.work(title="Hello, World!", with_license_pool=True) - work.license_pools[0].presentation_edition = None - work.presentation_edition = None - feed = AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [], - annotator=Annotator, - ) - entry = feed.create_entry(work) - assert None == entry - - def test_cache_usage(self, db: DatabaseTransactionFixture): - session = db.session - - work = db.work(with_open_access_download=True) - feed = AcquisitionFeed( - session, - db.fresh_str(), - db.fresh_url(), - [], - annotator=Annotator, - ) - - # Set the Work's cached OPDS entry to something that's clearly wrong. - tiny_entry = "cached entry" - work.simple_opds_entry = tiny_entry - - # If we pass in use_cache=True, the cached value is used as a basis - # for the annotated entry. - entry = feed.create_entry(work, use_cache=True) - assert tiny_entry == work.simple_opds_entry - - # We know what the final value looks like -- it's the cached entry - # run through `Annotator.annotate_work_entry`. - [pool] = work.license_pools - xml = etree.fromstring(work.simple_opds_entry) - annotator = Annotator() - annotator.annotate_work_entry( - work, pool, pool.presentation_edition, pool.identifier, feed, xml - ) - assert etree.tounicode(xml) == etree.tounicode(entry) - - # If we pass in use_cache=False, a new OPDS entry is created - # from scratch, but the cache is not updated. - entry = feed.create_entry(work, use_cache=False) - assert etree.tounicode(entry) != tiny_entry - assert tiny_entry == work.simple_opds_entry - - # If we pass in force_create, a new OPDS entry is created - # and the cache is updated. - entry = feed.create_entry(work, force_create=True) - entry_string = etree.tounicode(entry) - assert entry_string != tiny_entry - assert work.simple_opds_entry != tiny_entry - - # Again, we got entry_string by running the (new) cached value - # through `Annotator.annotate_work_entry`. - full_entry = etree.fromstring(work.simple_opds_entry) - annotator.annotate_work_entry( - work, pool, pool.presentation_edition, pool.identifier, feed, full_entry - ) - assert entry_string == etree.tounicode(full_entry) - - def test_exception_during_entry_creation_is_not_reraised( - self, db: DatabaseTransactionFixture - ): - session = db.session - - # This feed will raise an exception whenever it's asked - # to create an entry. - class DoomedFeed(AcquisitionFeed): - def _create_entry(self, *args, **kwargs): - raise Exception("I'm doomed!") - - feed = DoomedFeed( - session, - db.fresh_str(), - db.fresh_url(), - [], - annotator=Annotator, - ) - work = db.work(with_open_access_download=True) - - # But calling create_entry() doesn't raise an exception, it - # just returns None. - entry = feed.create_entry(work) - assert entry == None - - def test_unfilfullable_work(self, db: DatabaseTransactionFixture): - session = db.session - - work = db.work(with_open_access_download=True) - [pool] = work.license_pools - response = AcquisitionFeed.single_entry( - session, - work, - MockUnfulfillableAnnotator, - ) - assert isinstance(response, Response) - expect = AcquisitionFeed.error_message( - pool.identifier, - 403, - "I know about this work but can offer no way of fulfilling it.", - ) - # The status code equivalent inside the OPDS message has not affected - # the status code of the Response itself. - assert 200 == response.status_code - assert str(expect) == str(response) - - def test_format_types(self, db: DatabaseTransactionFixture): - session = db.session - - m = AcquisitionFeed.format_types - - epub_no_drm, ignore = DeliveryMechanism.lookup( - session, Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM - ) - assert [Representation.EPUB_MEDIA_TYPE] == m(epub_no_drm) - - epub_adobe_drm, ignore = DeliveryMechanism.lookup( - session, Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM - ) - assert [DeliveryMechanism.ADOBE_DRM, Representation.EPUB_MEDIA_TYPE] == m( - epub_adobe_drm - ) - - overdrive_streaming_text, ignore = DeliveryMechanism.lookup( - session, - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - DeliveryMechanism.OVERDRIVE_DRM, - ) - assert [ - OPDSFeed.ENTRY_TYPE, - Representation.TEXT_HTML_MEDIA_TYPE + DeliveryMechanism.STREAMING_PROFILE, - ] == m(overdrive_streaming_text) - - audiobook_drm, ignore = DeliveryMechanism.lookup( - session, - Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE, - DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM, - ) - - assert [ - Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE - + DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_PROFILE - ] == m(audiobook_drm) - - # Test a case where there is a DRM scheme but no underlying - # content type. - findaway_manifest, ignore = DeliveryMechanism.lookup( - session, DeliveryMechanism.FINDAWAY_DRM, None - ) - assert [DeliveryMechanism.FINDAWAY_DRM] == AcquisitionFeed.format_types( - findaway_manifest - ) - - def test_add_breadcrumbs(self, db: DatabaseTransactionFixture): - session = db.session - _db = session - - def getElementChildren(feed): - f = feed.feed[0] - children = f - return children - - class MockFeed(AcquisitionFeed): - def __init__(self): - super().__init__(_db, "", "", [], annotator=MockAnnotator()) - self.feed = [] - - lane = db.lane(display_name="lane") - sublane = db.lane(parent=lane, display_name="sublane") - subsublane = db.lane(parent=sublane, display_name="subsublane") - subsubsublane = db.lane(parent=subsublane, display_name="subsubsublane") - - top_level = object() - ep = AudiobooksEntryPoint - - def assert_breadcrumbs(expect_breadcrumbs_for, lane, **add_breadcrumbs_kwargs): - # Create breadcrumbs leading up to `lane` and verify that - # there is a breadcrumb for everything in - # `expect_breadcrumbs_for` -- Lanes, EntryPoints, and the - # top-level lane. Verify that the titles and URLs of the - # breadcrumbs match what we expect. - # - # For easier reading, all assertions in this test are - # written as calls to this function. - feed = MockFeed() - annotator = MockAnnotator() - - feed.add_breadcrumbs(lane, **add_breadcrumbs_kwargs) - - if not expect_breadcrumbs_for: - # We are expecting no breadcrumbs at all; - # nothing should have been added to the feed. - assert [] == feed.feed - return - - # At this point we expect at least one breadcrumb. - crumbs = getElementChildren(feed) - - entrypoint_selected = False - entrypoint_query = "?entrypoint=" - - # First, compare the titles of the breadcrumbs to what was - # passed in. This makes test writing much easier. - def title(x): - if x is top_level: - return annotator.top_level_title() - elif x is ep: - return x.INTERNAL_NAME - else: - return x.display_name - - expect_titles = [title(x) for x in expect_breadcrumbs_for] - actual_titles = [x.attrib.get("title") for x in crumbs] - assert expect_titles == actual_titles - - # Now, compare the URLs of the breadcrumbs. This is - # trickier, mainly because the URLs change once an - # entrypoint is selected. - previous_breadcrumb_url = None - - for i, crumb in enumerate(crumbs): - expect = expect_breadcrumbs_for[i] - actual_url = crumb.attrib.get("href") - - if expect is top_level: - # Breadcrumb for the library root. - expect_url = annotator.default_lane_url() - elif expect is ep: - # Breadcrumb for the entrypoint selection. - - # Beyond this point all URLs must propagate the - # selected entrypoint. - entrypoint_selected = True - entrypoint_query += expect.INTERNAL_NAME - - # The URL for this breadcrumb is the URL for the - # previous breadcrumb with the addition of the - # entrypoint selection query. - expect_url = previous_breadcrumb_url + entrypoint_query - else: - # Breadcrumb for a lane. - - # The breadcrumb URL is determined by the - # Annotator. - lane_url = annotator.lane_url(expect) - if entrypoint_selected: - # All breadcrumbs after the entrypoint selection - # must propagate the entrypoint. - expect_url = lane_url + entrypoint_query - else: - expect_url = lane_url - - logging.debug( - "%s: expect=%s actual=%s", expect_titles[i], expect_url, actual_url - ) - assert expect_url == actual_url - - # Keep track of the URL just used, in case the next - # breadcrumb is the same URL but with an entrypoint - # selection appended. - previous_breadcrumb_url = actual_url - - # That was a complicated method, but now our assertions - # are very easy to write and understand. - - # At the top level, there are no breadcrumbs whatsoever. - assert_breadcrumbs([], None) - - # It doesn't matter if an entrypoint is selected. - assert_breadcrumbs([], None, entrypoint=ep) - - # A lane with no entrypoint -- note that the breadcrumbs stop - # _before_ the lane in question. - assert_breadcrumbs([top_level], lane) - - # If you pass include_lane=True into add_breadcrumbs, the lane - # itself is included. - assert_breadcrumbs([top_level, lane], lane, include_lane=True) - - # A lane with an entrypoint selected - assert_breadcrumbs([top_level, ep], lane, entrypoint=ep) - assert_breadcrumbs( - [top_level, ep, lane], lane, entrypoint=ep, include_lane=True - ) - - # One lane level down. - assert_breadcrumbs([top_level, lane], sublane) - assert_breadcrumbs([top_level, ep, lane], sublane, entrypoint=ep) - assert_breadcrumbs( - [top_level, ep, lane, sublane], sublane, entrypoint=ep, include_lane=True - ) - - # Two lane levels down. - assert_breadcrumbs([top_level, lane, sublane], subsublane) - assert_breadcrumbs([top_level, ep, lane, sublane], subsublane, entrypoint=ep) - - # Three lane levels down. - assert_breadcrumbs( - [top_level, lane, sublane, subsublane], - subsubsublane, - ) - - assert_breadcrumbs( - [top_level, ep, lane, sublane, subsublane], subsubsublane, entrypoint=ep - ) - - # Make the sublane a root lane for a certain patron type, and - # the breadcrumbs will be start at that lane -- we won't see - # the sublane's parent or the library root. - sublane.root_for_patron_type = ["ya"] - assert_breadcrumbs([], sublane) - - assert_breadcrumbs([sublane, subsublane], subsubsublane) - - assert_breadcrumbs( - [sublane, subsublane, subsubsublane], subsubsublane, include_lane=True - ) - - # However, if an entrypoint is selected we will see a - # breadcrumb for it between the patron root lane and its - # child. - assert_breadcrumbs([sublane, ep, subsublane], subsubsublane, entrypoint=ep) - - assert_breadcrumbs( - [sublane, ep, subsublane, subsubsublane], - subsubsublane, - entrypoint=ep, - include_lane=True, - ) - - def test_add_breadcrumb_links(self, db: DatabaseTransactionFixture): - class MockFeed(AcquisitionFeed): - add_link_calls = [] - add_breadcrumbs_call = None - current_entrypoint = None - - def add_link_to_feed(self, **kwargs): - self.add_link_calls.append(kwargs) - - def add_breadcrumbs(self, lane, entrypoint): - self.add_breadcrumbs_call = (lane, entrypoint) - - def show_current_entrypoint(self, entrypoint): - self.current_entrypoint = entrypoint - - annotator = MockAnnotator - feed = MockFeed(db.session, "title", "url", [], annotator=annotator) - - lane = db.lane() - sublane = db.lane(parent=lane) - ep = AudiobooksEntryPoint - feed.add_breadcrumb_links(sublane, ep) - - # add_link_to_feed was called twice, to create the 'start' and - # 'up' links. - start, up = feed.add_link_calls - assert "start" == start["rel"] - assert annotator.top_level_title() == start["title"] - - assert "up" == up["rel"] - assert lane.display_name == up["title"] - - # The Lane and EntryPoint were passed into add_breadcrumbs. - assert (sublane, ep) == feed.add_breadcrumbs_call - - # The EntryPoint was passed into show_current_entrypoint. - assert ep == feed.current_entrypoint - - def test_show_current_entrypoint(self, db: DatabaseTransactionFixture): - """Calling AcquisitionFeed.show_current_entrypoint annotates - the top-level tag with information about the currently - selected entrypoint, if any. - """ - feed = AcquisitionFeed(db.session, "title", "url", [], annotator=None) - assert feed.CURRENT_ENTRYPOINT_ATTRIBUTE not in feed.feed.attrib - - # No entry point, no annotation. - feed.show_current_entrypoint(None) - - ep = AudiobooksEntryPoint - feed.show_current_entrypoint(ep) - assert ep.URI == feed.feed.attrib[feed.CURRENT_ENTRYPOINT_ATTRIBUTE] - - def test_facet_links_unrecognized_facets(self): - # AcquisitionFeed.facet_links does not produce links for any - # facet groups or facets not known to the current version of - # the system, because it doesn't know what the links should look - # like. - class MockAnnotator: - def facet_url(self, new_facets): - return "url: " + new_facets - - class MockFacets: - @property - def facet_groups(self): - """Yield a facet group+facet 4-tuple that passes the test we're - running (which will be turned into a link), and then a - bunch that don't (which will be ignored). - """ - - # Real facet group, real facet - yield ( - Facets.COLLECTION_FACET_GROUP_NAME, - Facets.COLLECTION_FULL, - "try the featured collection instead", - True, - ) - - # Real facet group, nonexistent facet - yield ( - Facets.COLLECTION_FACET_GROUP_NAME, - "no such facet", - "this facet does not exist", - True, - ) - - # Nonexistent facet group, real facet - yield ( - "no such group", - Facets.COLLECTION_FULL, - "this facet exists but it's in a nonexistent group", - True, - ) - - # Nonexistent facet group, nonexistent facet - yield ( - "no such group", - "no such facet", - "i just don't know", - True, - ) - - class MockFeed(AcquisitionFeed): - links = [] - - @classmethod - def facet_link(cls, url, facet_title, group_title, selected): - # Return the passed-in objects as is. - return (url, facet_title, group_title, selected) - - annotator = MockAnnotator() - facets = MockFacets() - - # The only 4-tuple yielded by facet_groups was passed on to us. - # The link was run through MockAnnotator.facet_url(), - # and the human-readable titles were found using lookups. - # - # The other three 4-tuples were ignored since we don't know - # how to generate human-readable titles for them. - [[url, facet, group, selected]] = MockFeed.facet_links(annotator, facets) - assert "url: try the featured collection instead" == url - assert Facets.FACET_DISPLAY_TITLES[Facets.COLLECTION_FULL] == facet - assert Facets.GROUP_DISPLAY_TITLES[Facets.COLLECTION_FACET_GROUP_NAME] == group - assert True == selected - - -class TestLookupAcquisitionFeed: - @staticmethod - def _feed(session: Session, annotator=VerboseAnnotator, **kwargs): - """Helper method to create a LookupAcquisitionFeed.""" - return LookupAcquisitionFeed( - session, - "Feed Title", - "http://whatever.io", - [], - annotator=annotator, - **kwargs, - ) - - @staticmethod - def _entry( - session: Session, identifier, work, annotator=VerboseAnnotator, **kwargs - ): - """Helper method to create an entry.""" - feed = TestLookupAcquisitionFeed._feed(session, annotator, **kwargs) - entry = feed.create_entry((identifier, work)) - if isinstance(entry, OPDSMessage): - return feed, entry - if entry is not None: - entry = etree.tounicode(entry) - return feed, entry - - def test_create_entry_uses_specified_identifier( - self, db: DatabaseTransactionFixture - ): - # Here's a Work with two LicensePools. - work = db.work(with_open_access_download=True) - original_pool = work.license_pools[0] - edition, new_pool = db.edition( - with_license_pool=True, with_open_access_download=True - ) - work.license_pools.append(new_pool) - - # We can generate two different OPDS entries for a single work - # depending on which identifier we look up. - ignore, e1 = self._entry(db.session, original_pool.identifier, work) - assert original_pool.identifier.urn in e1 - assert original_pool.presentation_edition.title in e1 - assert new_pool.identifier.urn not in e1 - assert new_pool.presentation_edition.title not in e1 - - # Passing in the other identifier gives an OPDS entry with the - # same bibliographic data (taken from the original pool's - # presentation edition) but with different identifier - # information. - i = new_pool.identifier - ignore, e2 = self._entry(db.session, i, work) - assert new_pool.identifier.urn in e2 - assert new_pool.presentation_edition.title not in e2 - assert original_pool.presentation_edition.title in e2 - assert original_pool.identifier.urn not in e2 - - def test_error_on_mismatched_identifier(self, db: DatabaseTransactionFixture): - """We get an error if we try to make it look like an Identifier lookup - retrieved a Work that's not actually associated with that Identifier. - """ - work = db.work(with_open_access_download=True) - - # Here's an identifier not associated with any LicensePool or - # Work. - identifier = db.identifier() - - # It doesn't make sense to make an OPDS feed out of that - # Identifier and a totally random Work. - expect_error = 'I tried to generate an OPDS entry for the identifier "%s" using a Work not associated with that identifier.' - feed, entry = self._entry(db.session, identifier, work) - assert entry == OPDSMessage(identifier.urn, 500, expect_error % identifier.urn) - - # Even if the Identifier does have a Work, if the Works don't - # match, we get the same error. - edition, lp = db.edition(with_license_pool=True) - feed, entry = self._entry(db.session, lp.identifier, work) - assert entry == OPDSMessage( - lp.identifier.urn, 500, expect_error % lp.identifier.urn - ) - - def test_error_when_work_has_no_licensepool(self, db: DatabaseTransactionFixture): - """Under most circumstances, a Work must have at least one - LicensePool for a lookup to succeed. - """ - - # Here's a work with no LicensePools. - work = db.work(title="Hello, World!", with_license_pool=False) - identifier = work.presentation_edition.primary_identifier - feed, entry = self._entry(db.session, identifier, work) - # By default, a work is treated as 'not in the collection' if - # there is no LicensePool for it. - isinstance(entry, OPDSMessage) - assert 404 == entry.status_code - assert "Identifier not found in collection" == entry.message - - def test_unfilfullable_work(self, db: DatabaseTransactionFixture): - work = db.work(with_open_access_download=True) - [pool] = work.license_pools - feed, entry = self._entry( - db.session, pool.identifier, work, MockUnfulfillableAnnotator - ) - expect = AcquisitionFeed.error_message( - pool.identifier, - 403, - "I know about this work but can offer no way of fulfilling it.", - ) - assert expect == entry - - def test_create_entry_uses_cache_for_all_licensepools_for_work( - self, db: DatabaseTransactionFixture - ): - """A Work's cached OPDS entries can be reused by all LicensePools for - that Work, even LicensePools associated with different - identifiers. - """ - - class InstrumentableActiveLicensePool(VerboseAnnotator): - """A mock class that lets us control the output of - active_license_pool. - """ - - ACTIVE = None - - @classmethod - def active_licensepool_for(cls, work): - return cls.ACTIVE - - feed = self._feed(db.session, annotator=InstrumentableActiveLicensePool()) - - # Here are two completely different LicensePools for the same work. - work = db.work(with_license_pool=True) - work.verbose_opds_entry = "Cached" - [pool1] = work.license_pools - - collection2 = db.collection() - edition2 = db.edition() - pool2 = db.licensepool(edition=edition2, collection=collection2) - identifier2 = pool2.identifier - work.license_pools.append(pool2) - - # Regardless of which LicensePool the annotator thinks is - # 'active', passing in (identifier, work) will use the cache. - m = feed.create_entry - annotator = feed.annotator - - annotator.ACTIVE = pool1 - assert "Cached" == m((pool1.identifier, work)).text - - annotator.ACTIVE = pool2 - assert "Cached" == m((pool2.identifier, work)).text - - # If for some reason we pass in an identifier that is not - # associated with the active license pool, we don't get - # anything. - work.license_pools = [pool1] - result = m((identifier2, work)) - assert isinstance(result, OPDSMessage) - assert "using a Work not associated with that identifier." in result.message - - -class TestEntrypointLinkInsertionFixture: - db: DatabaseTransactionFixture - mock: Any - no_eps: WorkList - entrypoints: List[MediumEntryPoint] - wl: WorkList - lane: Lane - annotator: Type[MockAnnotator] - old_add_entrypoint_links: Callable - - -@pytest.fixture() -def entrypoint_link_insertion_fixture( - db, -) -> Generator[TestEntrypointLinkInsertionFixture, None, None]: - data = TestEntrypointLinkInsertionFixture() - data.db = db - - # Mock for AcquisitionFeed.add_entrypoint_links - class Mock: - def add_entrypoint_links(self, *args): - self.called_with = args - - data.mock = Mock() - - # A WorkList with no EntryPoints -- should not call the mock method. - data.no_eps = WorkList() - data.no_eps.initialize(library=db.default_library(), display_name="no_eps") - - # A WorkList with two EntryPoints -- may call the mock method - # depending on circumstances. - data.entrypoints = [AudiobooksEntryPoint, EbooksEntryPoint] # type: ignore[list-item] - data.wl = WorkList() - # The WorkList must have at least one child, or we won't generate - # a real groups feed for it. - data.lane = db.lane() - data.wl.initialize( - library=db.default_library(), - display_name="wl", - entrypoints=data.entrypoints, - children=[data.lane], - ) - - def works(_db, **kwargs): - """Mock WorkList.works so we don't need any actual works - to run the test. - """ - return [] - - data.no_eps.works = works # type: ignore[method-assign, assignment] - data.wl.works = works # type: ignore[method-assign, assignment] - - data.annotator = MockAnnotator - data.old_add_entrypoint_links = AcquisitionFeed.add_entrypoint_links - AcquisitionFeed.add_entrypoint_links = data.mock.add_entrypoint_links - yield data - AcquisitionFeed.add_entrypoint_links = data.old_add_entrypoint_links - - -class TestEntrypointLinkInsertion: - """Verify that the three main types of OPDS feeds -- grouped, - paginated, and search results -- will all include links to the same - feed but through a different entry point. - """ - - def test_groups( - self, - entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - data, db, session = ( - entrypoint_link_insertion_fixture, - entrypoint_link_insertion_fixture.db, - entrypoint_link_insertion_fixture.db.session, - ) - end_to_end_search_fixture.external_search_index.start_migration().finish() # type: ignore [union-attr] - - # When AcquisitionFeed.groups() generates a grouped - # feed, it will link to different entry points into the feed, - # assuming the WorkList has different entry points. - def run(wl=None, facets=None): - """Call groups() and see what add_entrypoint_links - was called with. - """ - data.mock.called_with = None - AcquisitionFeed.groups( - session, - "title", - "url", - wl, - data.annotator, - max_age=0, - facets=facets, - search_engine=end_to_end_search_fixture.external_search_index, - ) - return data.mock.called_with - - # This WorkList has no entry points, so the mock method is not - # even called. - assert None == run(data.no_eps) - - # A WorkList with entry points does cause the mock method - # to be called. - facets = FeaturedFacets( - minimum_featured_quality=db.default_library().settings.minimum_featured_quality, - entrypoint=EbooksEntryPoint, - ) - feed, make_link, entrypoints, selected = run(data.wl, facets) - - # add_entrypoint_links was passed both possible entry points - # and the selected entry point. - assert data.wl.entrypoints == entrypoints - assert selected == EbooksEntryPoint - - # The make_link function that was passed in calls - # TestAnnotator.groups_url() when passed an EntryPoint. - assert "http://groups/?entrypoint=Book" == make_link(EbooksEntryPoint) - - def test_page( - self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture - ): - data, db, session = ( - entrypoint_link_insertion_fixture, - entrypoint_link_insertion_fixture.db, - entrypoint_link_insertion_fixture.db.session, - ) - - # When AcquisitionFeed.page() generates the first page of a paginated - # list, it will link to different entry points into the list, - # assuming the WorkList has different entry points. - - def run(wl=None, facets=None, pagination=None): - """Call page() and see what add_entrypoint_links - was called with. - """ - data.mock.called_with = None - private = object() - AcquisitionFeed.page( - session, - "title", - "url", - wl, - data.annotator, - max_age=0, - facets=facets, - pagination=pagination, - private=private, - ) - - return data.mock.called_with - - # The WorkList has no entry points, so the mock method is not - # even called. - assert None == run(data.no_eps) - - # Let's give the WorkList two possible entry points, and choose one. - facets = Facets.default(db.default_library()).navigate( - entrypoint=EbooksEntryPoint - ) - feed, make_link, entrypoints, selected = run(data.wl, facets) - - # This time, add_entrypoint_links was called, and passed both - # possible entry points and the selected entry point. - assert data.wl.entrypoints == entrypoints - assert selected == EbooksEntryPoint - - # The make_link function that was passed in calls - # TestAnnotator.feed_url() when passed an EntryPoint. The - # Facets object's other facet groups are propagated in this URL. - first_page_url = "http://wl/?available=all&collection=full&collectionName=All&distributor=All&entrypoint=Book&order=author" - assert first_page_url == make_link(EbooksEntryPoint) - - # Pagination information is not propagated through entry point links - # -- you always start at the beginning of the list. - pagination = Pagination(offset=100) - feed, make_link, entrypoints, selected = run(data.wl, facets, pagination) - assert first_page_url == make_link(EbooksEntryPoint) - - def test_search( - self, entrypoint_link_insertion_fixture: TestEntrypointLinkInsertionFixture - ): - data, db, session = ( - entrypoint_link_insertion_fixture, - entrypoint_link_insertion_fixture.db, - entrypoint_link_insertion_fixture.db.session, - ) - - # When AcquisitionFeed.search() generates the first page of - # search results, it will link to related searches for different - # entry points, assuming the WorkList has different entry points. - def run(wl=None, facets=None, pagination=None): - """Call search() and see what add_entrypoint_links - was called with. - """ - data.mock.called_with = None - AcquisitionFeed.search( - session, - "title", - "url", - wl, - None, - None, - annotator=data.annotator, - facets=facets, - pagination=pagination, - ) - return data.mock.called_with - - # Mock search() so it never tries to return anything. - def mock_search(self, *args, **kwargs): - return [] - - data.no_eps.search = mock_search # type: ignore[method-assign, assignment] - data.wl.search = mock_search # type: ignore[method-assign, assignment] - - # This WorkList has no entry points, so the mock method is not - # even called. - assert None == run(data.no_eps) - - # The mock method is called for a WorkList that does have - # entry points. - facets = SearchFacets().navigate(entrypoint=EbooksEntryPoint) - assert isinstance(facets, SearchFacets) - feed, make_link, entrypoints, selected = run(data.wl, facets) - - # Since the SearchFacets has more than one entry point, - # the EverythingEntryPoint is prepended to the list of possible - # entry points. - assert [ - EverythingEntryPoint, - AudiobooksEntryPoint, - EbooksEntryPoint, - ] == entrypoints - - # add_entrypoint_links was passed the three possible entry points - # and the selected entry point. - assert selected == EbooksEntryPoint - - # The make_link function that was passed in calls - # TestAnnotator.search_url() when passed an EntryPoint. - first_page_url = "http://wl/?available=all&collection=full&entrypoint=Book&order=relevance&search_type=default" - assert first_page_url == make_link(EbooksEntryPoint) - - # Pagination information is not propagated through entry point links - # -- you always start at the beginning of the list. - pagination = Pagination(offset=100) - feed, make_link, entrypoints, selected = run(data.wl, facets, pagination) - assert first_page_url == make_link(EbooksEntryPoint) - - -class TestNavigationFacets: - def test_feed_type(self): - # If a navigation feed is built via CachedFeed.fetch, it will be - # filed as a navigation feed. - assert CachedFeed.NAVIGATION_TYPE == NavigationFacets.CACHED_FEED_TYPE - - -class TestNavigationFeedFixture: - db: DatabaseTransactionFixture - fiction: Lane - fantasy: Lane - romance: Lane - contemporary_romance: Lane - - -@pytest.fixture() -def navigation_feed_fixture( - db, -) -> TestNavigationFeedFixture: - data = TestNavigationFeedFixture() - data.db = db - data.fiction = db.lane("Fiction") - data.fantasy = db.lane("Fantasy", parent=data.fiction) - data.romance = db.lane("Romance", parent=data.fiction) - data.contemporary_romance = db.lane("Contemporary Romance", parent=data.romance) - return data - - -class TestNavigationFeed: - def test_add_entry(self): - feed = NavigationFeed("title", "http://navigation") - feed.add_entry("http://example.com", "Example", "text/html") - parsed = feedparser.parse(str(feed)) - [entry] = parsed["entries"] - assert "Example" == entry["title"] - [link] = entry["links"] - assert "http://example.com" == link["href"] - assert "text/html" == link["type"] - assert "subsection" == link["rel"] - - def test_navigation_with_sublanes( - self, navigation_feed_fixture: TestNavigationFeedFixture - ): - data, db, session = ( - navigation_feed_fixture, - navigation_feed_fixture.db, - navigation_feed_fixture.db.session, - ) - - private = object() - response = NavigationFeed.navigation( - session, - "Navigation", - "http://navigation", - data.fiction, - MockAnnotator, - max_age=42, - private=private, - ) - - # We got an OPDSFeedResponse back. The values we passed in for - # max_age and private were propagated to the response - # constructor. - assert isinstance(response, OPDSFeedResponse) - assert 42 == response.max_age - assert private == response.private - - # The media type of this response is different than from the - # typical OPDSFeedResponse. - assert OPDSFeed.NAVIGATION_FEED_TYPE == response.content_type - - parsed = feedparser.parse(response.data) - - assert "Navigation" == parsed["feed"]["title"] - [self_link] = parsed["feed"]["links"] - assert "http://navigation" == self_link["href"] - assert "self" == self_link["rel"] - assert "http://navigation" == parsed["feed"]["id"] - [fantasy, romance] = sorted(parsed["entries"], key=lambda x: x["title"]) - - assert data.fantasy.display_name == fantasy["title"] - assert "http://%s/" % data.fantasy.id == fantasy["id"] - [fantasy_link] = fantasy["links"] - assert "http://%s/" % data.fantasy.id == fantasy_link["href"] - assert "subsection" == fantasy_link["rel"] - assert NavigationFeed.ACQUISITION_FEED_TYPE == fantasy_link["type"] - - assert data.romance.display_name == romance["title"] - assert "http://navigation/%s" % data.romance.id == romance["id"] - [romance_link] = romance["links"] - assert "http://navigation/%s" % data.romance.id == romance_link["href"] - assert "subsection" == romance_link["rel"] - assert NavigationFeed.NAVIGATION_FEED_TYPE == romance_link["type"] - - # The feed was cached. - cached = get_one(session, CachedFeed) - assert isinstance(cached, CachedFeed) - assert isinstance(cached.content, str) - assert "http://%s/" % data.fantasy.id in cached.content - - def test_navigation_without_sublanes( - self, navigation_feed_fixture: TestNavigationFeedFixture - ): - data, db, session = ( - navigation_feed_fixture, - navigation_feed_fixture.db, - navigation_feed_fixture.db.session, - ) - - feed = NavigationFeed.navigation( - session, "Navigation", "http://navigation", data.fantasy, MockAnnotator - ) - parsed = feedparser.parse(str(feed)) - assert "Navigation" == parsed["feed"]["title"] - [self_link] = parsed["feed"]["links"] - assert "http://navigation" == self_link["href"] - assert "self" == self_link["rel"] - assert "http://navigation" == parsed["feed"]["id"] - [fantasy] = parsed["entries"] - - assert "All " + data.fantasy.display_name == fantasy["title"] - assert "http://%s/" % data.fantasy.id == fantasy["id"] - [fantasy_link] = fantasy["links"] - assert "http://%s/" % data.fantasy.id == fantasy_link["href"] - assert "subsection" == fantasy_link["rel"] - assert NavigationFeed.ACQUISITION_FEED_TYPE == fantasy_link["type"] diff --git a/tests/core/test_opds2.py b/tests/core/test_opds2.py deleted file mode 100644 index 4fa66e9973..0000000000 --- a/tests/core/test_opds2.py +++ /dev/null @@ -1,310 +0,0 @@ -import json -from datetime import datetime -from unittest.mock import Mock - -import pytest - -from api.app import app -from api.opds2 import OPDS2PublicationsAnnotator -from core.classifier import Classifier -from core.external_search import ExternalSearchIndex, SortKeyPagination -from core.lane import Facets, Lane, Pagination, SearchFacets -from core.model import ExternalIntegration -from core.model.classification import Subject -from core.model.datasource import DataSource -from core.model.edition import Edition -from core.model.identifier import Identifier -from core.model.resource import Hyperlink -from core.opds2 import AcquisitonFeedOPDS2, OPDS2Annotator -from core.util.flask_util import OPDSFeedResponse -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.search import EndToEndSearchFixture -from tests.mocks.search import SearchServiceFake - - -class TestOPDS2FeedFixture: - transaction: DatabaseTransactionFixture - search_engine: ExternalSearchIndex - fiction: Lane - search_fixture: EndToEndSearchFixture - - -@pytest.fixture -def opds2_feed_fixture( - db: DatabaseTransactionFixture, end_to_end_search_fixture: EndToEndSearchFixture -) -> TestOPDS2FeedFixture: - data = TestOPDS2FeedFixture() - data.transaction = db - data.search_fixture = end_to_end_search_fixture - data.search_engine = data.search_fixture.external_search_index - data.fiction = db.lane("Fiction") - data.fiction.fiction = True - data.fiction.audiences = [Classifier.AUDIENCE_ADULT] - return data - - -class TestOPDS2Feed: - def test_publications_feed(self, opds2_feed_fixture: TestOPDS2FeedFixture): - data, transaction, session = ( - opds2_feed_fixture, - opds2_feed_fixture.transaction, - opds2_feed_fixture.transaction.session, - ) - - work = transaction.work( - with_open_access_download=True, authors="Author Name", fiction=True - ) - - docs = data.search_engine.start_migration() - assert docs is not None - docs.add_documents( - data.search_engine.create_search_documents_from_works([work]) - ) - docs.finish() - - result = AcquisitonFeedOPDS2.publications( - session, - data.fiction, - SearchFacets(library=transaction.default_library()), - Pagination.default(), - data.search_engine, - OPDS2Annotator( - "/", SearchFacets(), Pagination.default(), transaction.default_library() - ), - ) - - assert type(result) == OPDSFeedResponse - - def test_publications_feed_json(self, opds2_feed_fixture: TestOPDS2FeedFixture): - data, transaction, session = ( - opds2_feed_fixture, - opds2_feed_fixture.transaction, - opds2_feed_fixture.transaction.session, - ) - works = [ - transaction.work( - with_open_access_download=True, - title="title1", - authors="Author Name1", - fiction=True, - ), - transaction.work( - with_open_access_download=True, - title="title2", - authors="Author Name2", - fiction=True, - ), - transaction.work( - with_open_access_download=True, - title="title3", - authors="Author Name3", - fiction=True, - ), - transaction.work( - with_open_access_download=True, - title="title4", - authors="Author Name4", - fiction=True, - ), - ] - - docs = data.search_engine.start_migration() - assert docs is not None - docs.add_documents(data.search_engine.create_search_documents_from_works(works)) - docs.finish() - - annotator = OPDS2Annotator( - "/", - Facets.default(transaction.default_library()), - Pagination.default(), - transaction.default_library(), - ) - result: OPDSFeedResponse = AcquisitonFeedOPDS2.publications( - session, - data.fiction, - SearchFacets(library=transaction.default_library()), - Pagination.default(), - data.search_engine, - annotator, - ) - result_dict = json.loads(result.data) - assert len(result_dict["publications"]) == len(works) - - def test_acquisition_facet_links(self, opds2_feed_fixture: TestOPDS2FeedFixture): - transaction, session = ( - opds2_feed_fixture.transaction, - opds2_feed_fixture.transaction.session, - ) - - with app.test_request_context("/"): - transaction.default_collection().data_source = DataSource.AMAZON - facets = Facets.default( - transaction.default_library(), distributor=DataSource.AMAZON - ) - publication = AcquisitonFeedOPDS2( - session, - [], - OPDS2PublicationsAnnotator( - "/", facets, Pagination.default(), transaction.default_library() - ), - facets, - ).publications_json() - - assert "facets" in publication - publication_facets = publication["facets"] - assert len(publication_facets) == 5 - list(map(lambda x: x["metadata"]["title"], publication_facets)).sort() == [ - "available", - "collection", - "collectionName", - "distributor", - "order", - ] - - -class TestOPDS2AnnotatorFixture: - transaction: DatabaseTransactionFixture - search_engine: ExternalSearchIndex - fiction: Lane - annotator: OPDS2Annotator - search_integration: ExternalIntegration - - -@pytest.fixture -def opds2_annotator_fixture( - db: DatabaseTransactionFixture, -) -> TestOPDS2AnnotatorFixture: - data = TestOPDS2AnnotatorFixture() - data.transaction = db - data.search_integration = db.external_integration( - ExternalIntegration.OPENSEARCH, - goal=ExternalIntegration.SEARCH_GOAL, - url="http://does-not-matter.com", # It doesn't matter what URL we specify, because the search service is fake - settings={ - ExternalSearchIndex.WORKS_INDEX_PREFIX_KEY: "test_index", - ExternalSearchIndex.TEST_SEARCH_TERM_KEY: "test_search_term", - }, - ) - data.search_engine = ExternalSearchIndex( - _db=db.session, custom_client_service=SearchServiceFake() - ) - data.fiction = db.lane("Fiction") - data.fiction.fiction = True - data.fiction.audiences = [Classifier.AUDIENCE_ADULT] - data.annotator = OPDS2Annotator( - "http://example.org/feed", - Facets.default(db.default_library()), - SortKeyPagination("lastitemonpage"), - db.default_library(), - ) - return data - - -class TestOPDS2Annotator: - def test_feed_links(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture): - # Mock the pagination - m = Mock() - m.meta = Mock() - m.meta.sort = ["Item"] - opds2_annotator_fixture.annotator.pagination.page_loaded([m]) - links = opds2_annotator_fixture.annotator.feed_links() - assert len(links) == 2 - assert links[0] == { - "rel": "self", - "href": "http://example.org/feed", - "type": "application/opds+json", - } - assert "key=%5B%22Item%22%5D" in links[1]["href"] - - def test_image_links(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture): - data, transaction, session = ( - opds2_annotator_fixture, - opds2_annotator_fixture.transaction, - opds2_annotator_fixture.transaction.session, - ) - - work = transaction.work() - edition = work.presentation_edition - idn: Identifier = edition.primary_identifier - idn.add_link( - Hyperlink.IMAGE, - "https://example.org/image", - edition.data_source, - media_type="image/png", - ) - idn.add_link( - Hyperlink.THUMBNAIL_IMAGE, - "https://example.org/thumb", - edition.data_source, - media_type="image/png", - ) - - docs = data.search_engine.start_updating_search_documents() - docs.add_documents( - data.search_engine.create_search_documents_from_works([work]) - ) - docs.finish() - result = data.annotator.metadata_for_work(work) - assert isinstance(result, dict) - - assert "images" in result - assert len(result["images"]) == 2 - assert result["images"] == [ - dict( - rel=Hyperlink.IMAGE, href="https://example.org/image", type="image/png" - ), - dict( - rel=Hyperlink.THUMBNAIL_IMAGE, - href="https://example.org/thumb", - type="image/png", - ), - ] - - def test_work_metadata(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture): - data, transaction, session = ( - opds2_annotator_fixture, - opds2_annotator_fixture.transaction, - opds2_annotator_fixture.transaction.session, - ) - - work = transaction.work( - authors="Author Person", genre="Science", with_license_pool=True - ) - edition: Edition = work.presentation_edition - idn: Identifier = edition.primary_identifier - - modified = datetime.now() - work.last_update_time = modified - edition.license_pools[0].availability_time = modified - edition.series = "A series" - edition.series_position = 4 - - docs = data.search_engine.start_updating_search_documents() - docs.add_documents( - data.search_engine.create_search_documents_from_works([work]) - ) - docs.finish() - result = data.annotator.metadata_for_work(work) - assert isinstance(result, dict) - - meta = result["metadata"] - assert meta["@type"] == "http://schema.org/EBook" - assert meta["title"] == work.title - assert meta["subtitle"] == work.subtitle - assert meta["identifier"] == idn.urn - assert meta["modified"] == modified.isoformat() - assert meta["published"] == modified.date().isoformat() - assert meta["language"] == "en" - assert meta["sortAs"] == work.sort_title - assert meta["author"] == {"name": "Author Person"} - assert meta["subject"] == [ - {"name": "Science", "sortAs": "Science", "scheme": Subject.SIMPLIFIED_GENRE} - ] - assert meta["belongsTo"] == {"series": {"name": "A series", "position": 4}} - - def test_feed_metadata(self, opds2_annotator_fixture: TestOPDS2AnnotatorFixture): - meta = opds2_annotator_fixture.annotator.feed_metadata() - assert meta == { - "title": "OPDS2 Feed", - "itemsPerPage": Pagination.DEFAULT_SIZE, - } diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index 38154fb1c2..812128b9e8 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -1929,10 +1929,6 @@ def test_explain(self, db: DatabaseTransactionFixture): # printed out. assert "OCLC Linked Data | an operation | success" in output - # WorkCoverageRecords associated with the work were - # printed out. - assert "generate-opds | success" in output - # There is an active LicensePool that is fulfillable and has # copies owned. assert "%s owned" % pool.licenses_owned in output diff --git a/tests/core/util/test_opds_writer.py b/tests/core/util/test_opds_writer.py index f2be7c81f3..f112cd6a65 100644 --- a/tests/core/util/test_opds_writer.py +++ b/tests/core/util/test_opds_writer.py @@ -53,13 +53,6 @@ def test_add_link_to_entry(self): method="c14n2", ) in etree.tostring(entry, method="c14n2") - def test_contributor(self): - kwargs = {"{%s}role" % AtomFeed.OPF_NS: "ctb"} - tag = etree.tounicode(AtomFeed.author(**kwargs)) - assert tag.startswith("') - @pytest.mark.parametrize( "_,obj,formatted", [ diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 54d1c98f7a..a143b67413 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -413,7 +413,6 @@ def work( # This is probably going to be used in an OPDS feed, so # fake that the work is presentation ready. work.presentation_ready = True - work.calculate_opds_entries(verbose=False) return work @@ -843,7 +842,6 @@ def slow_work(self, *args, **kwargs): """ work = self.work(*args, **kwargs) work.calculate_presentation_edition() - work.calculate_opds_entries(verbose=False) return work def sample_ecosystem(self): From 05719cacf3dd970972da2992b832643b0f726d3a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 11 Oct 2023 06:29:04 -0300 Subject: [PATCH 100/262] Fully type hint `api/axis.py` (PP-500) (#1441) --- api/axis.py | 313 +++++++++++++++++++----------- core/util/datetime_helpers.py | 2 +- pyproject.toml | 1 + tests/api/test_axis.py | 76 ++++---- tests/api/test_controller_loan.py | 16 +- 5 files changed, 253 insertions(+), 155 deletions(-) diff --git a/api/axis.py b/api/axis.py index d2e8ca74b0..1c42af4972 100644 --- a/api/axis.py +++ b/api/axis.py @@ -1,9 +1,9 @@ from __future__ import annotations +import base64 import datetime import html import json -import logging import re import socket import ssl @@ -12,11 +12,14 @@ from datetime import timedelta from typing import ( Any, + Callable, Dict, + Generator, Generic, List, Literal, Mapping, + Sequence, Tuple, Type, TypeVar, @@ -30,6 +33,7 @@ from lxml import etree from lxml.etree import _Element from pydantic import validator +from requests import Response as RequestsResponse from api.admin.validator import Validator from api.circulation import ( @@ -62,6 +66,7 @@ Metadata, ReplacementPolicy, SubjectData, + TimestampData, ) from core.model import ( Classification, @@ -88,7 +93,6 @@ from core.util.http import HTTP, RequestNetworkException from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail -from core.util.string_helpers import base64 from core.util.xmlparser import XMLProcessor @@ -139,7 +143,7 @@ class Axis360Settings(BaseSettings): ) @validator("url") - def _validate_url(cls, v): + def _validate_url(cls, v: str) -> str: # Validate if the url provided is valid http or a valid nickname valid_names = list(Axis360APIConstants.SERVER_NICKNAMES.keys()) if not Validator._is_url(v, valid_names): @@ -194,20 +198,22 @@ class Axis360API( } @classmethod - def settings_class(cls): + def settings_class(cls) -> Type[Axis360Settings]: return Axis360Settings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> Type[Axis360LibrarySettings]: return Axis360LibrarySettings - def label(self): - return self.NAME + @classmethod + def label(cls) -> str: + return cls.NAME - def description(self): + @classmethod + def description(cls) -> str: return "" - def __init__(self, _db, collection): + def __init__(self, _db: Session, collection: Collection) -> None: if collection.protocol != ExternalIntegration.AXIS_360: raise ValueError( "Collection protocol is %s, but passed into Axis360API!" @@ -215,7 +221,7 @@ def __init__(self, _db, collection): ) super().__init__(_db, collection) - self.library_id = collection.external_account_id + self.library_id = collection.external_account_id or "" config = self.configuration() self.username = config.username self.password = config.password @@ -231,32 +237,25 @@ def __init__(self, _db, collection): if not self.library_id or not self.username or not self.password: raise CannotLoadConfiguration("Axis 360 configuration is incomplete.") - # Use utf8 instead of unicode encoding - settings = [self.library_id, self.username, self.password] - self.library_id, self.username, self.password = ( - setting.encode("utf8") for setting in settings - ) - - self.token = None + self.token: Optional[str] = None self.verify_certificate: bool = ( config.verify_certificate if config.verify_certificate is not None else True ) @property - def source(self): - return DataSource.lookup(self._db, DataSource.AXIS_360) + def source(self) -> Optional[DataSource]: + return DataSource.lookup(self._db, DataSource.AXIS_360) # type: ignore[no-any-return] @property - def authorization_headers(self): - authorization = b":".join([self.username, self.password, self.library_id]) - authorization = authorization.decode("utf-8").encode("utf_16_le") - authorization = base64.standard_b64encode(authorization) - return dict(Authorization="Basic " + authorization) - - def external_integration(self, _db): - return self.collection.external_integration + def authorization_headers(self) -> Dict[str, str]: + authorization = ":".join([self.username, self.password, self.library_id]) + authorization_encoded = authorization.encode("utf_16_le") + authorization_b64 = base64.standard_b64encode(authorization_encoded).decode( + "utf-8" + ) + return dict(Authorization="Basic " + authorization_b64) - def _run_self_tests(self, _db): + def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: result = self.run_test("Refreshing bearer token", self.refresh_bearer_token) yield result if not result.success: @@ -264,7 +263,7 @@ def _run_self_tests(self, _db): # the rest of the tests. return - def _count_events(): + def _count_events() -> str: now = utc_now() five_minutes_ago = now - timedelta(minutes=5) count = len(list(self.recent_activity(since=five_minutes_ago))) @@ -274,13 +273,16 @@ def _count_events(): "Asking for circulation events for the last five minutes", _count_events ) - for result in self.default_patrons(self.collection): - if isinstance(result, SelfTestResult): - yield result + if self.collection is None: + raise ValueError("Collection is None") + + for library_result in self.default_patrons(self.collection): + if isinstance(library_result, SelfTestResult): + yield library_result continue - library, patron, pin = result + library, patron, pin = library_result - def _count_activity(): + def _count_activity() -> str: result = self.patron_activity(patron, pin) return "Found %d loans/holds" % len(result) @@ -293,7 +295,7 @@ def _count_activity(): for result in super()._run_self_tests(_db): yield result - def refresh_bearer_token(self): + def refresh_bearer_token(self) -> str: url = self.base_url + self.access_token_endpoint headers = self.authorization_headers response = self._make_request( @@ -303,19 +305,21 @@ def refresh_bearer_token(self): def request( self, - url, - method="get", - extra_headers={}, - data=None, - params=None, - exception_on_401=False, - **kwargs, - ): + url: str, + method: str = "get", + extra_headers: Optional[Dict[str, str]] = None, + data: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + exception_on_401: bool = False, + **kwargs: Any, + ) -> RequestsResponse: """Make an HTTP request, acquiring/refreshing a bearer token if necessary. """ if not self.token: self.token = self.refresh_bearer_token() + if not extra_headers: + extra_headers = {} headers = dict(extra_headers) headers["Authorization"] = "Bearer " + self.token headers["Library"] = self.library_id @@ -350,12 +354,17 @@ def request( else: return response - def availability(self, patron_id=None, since=None, title_ids=[]): + def availability( + self, + patron_id: Optional[str] = None, + since: Optional[datetime.datetime] = None, + title_ids: Optional[List[str]] = None, + ) -> RequestsResponse: url = self.base_url + self.availability_endpoint args = dict() if since: - since = since.strftime(self.DATE_FORMAT) - args["updatedDate"] = since + since_str = since.strftime(self.DATE_FORMAT) + args["updatedDate"] = since_str if patron_id: args["patronId"] = patron_id if title_ids: @@ -363,13 +372,13 @@ def availability(self, patron_id=None, since=None, title_ids=[]): response = self.request(url, params=args, timeout=None) return response - def get_fulfillment_info(self, transaction_id): + def get_fulfillment_info(self, transaction_id: str) -> RequestsResponse: """Make a call to the getFulfillmentInfoAPI.""" url = self.base_url + self.fulfillment_endpoint params = dict(TransactionID=transaction_id) return self.request(url, "POST", params=params) - def get_audiobook_metadata(self, findaway_content_id): + def get_audiobook_metadata(self, findaway_content_id: str) -> RequestsResponse: """Make a call to the getaudiobookmetadata endpoint.""" base_url = self.base_url url = base_url + self.audiobook_metadata_endpoint @@ -377,7 +386,7 @@ def get_audiobook_metadata(self, findaway_content_id): response = self.request(url, "POST", params=params) return response - def checkin(self, patron, pin, licensepool): + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: """Return a book early. :param patron: The Patron who wants to return their book. @@ -390,14 +399,28 @@ def checkin(self, patron, pin, licensepool): patron_id = patron.authorization_identifier response = self._checkin(title_id, patron_id) try: - return CheckinResponseParser(licensepool.collection).process_first( + CheckinResponseParser(licensepool.collection).process_first( response.content ) except etree.XMLSyntaxError as e: raise RemoteInitiatedServerError(response.content, self.SERVICE_NAME) - def _checkin(self, title_id, patron_id): + def _checkin( + self, title_id: Optional[str], patron_id: Optional[str] + ) -> RequestsResponse: """Make a request to the EarlyCheckInTitle endpoint.""" + if title_id is None: + self.log.warning( + f"Calling _checkin with title_id None. This is likely a bug. Patron_id: {patron_id}." + ) + title_id = "" + + if patron_id is None: + self.log.warning( + f"Calling _checkin with patron_id None. This is likely a bug. Title_id: {title_id}." + ) + patron_id = "" + url = self.base_url + "EarlyCheckInTitle/v3?itemID={}&patronID={}".format( urllib.parse.quote(title_id), urllib.parse.quote(patron_id), @@ -426,7 +449,9 @@ def checkout( except etree.XMLSyntaxError as e: raise RemoteInitiatedServerError(response.content, self.SERVICE_NAME) - def _checkout(self, title_id, patron_id, internal_format): + def _checkout( + self, title_id: Optional[str], patron_id: Optional[str], internal_format: str + ) -> RequestsResponse: url = self.base_url + "checkout/v2" args = dict(titleId=title_id, patronId=patron_id, format=internal_format) response = self.request(url, data=args, method="POST") @@ -466,7 +491,13 @@ def fulfill( # book checked out. raise NoActiveLoan() - def place_hold(self, patron, pin, licensepool, hold_notification_email): + def place_hold( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + hold_notification_email: Optional[str], + ) -> HoldInfo: if not hold_notification_email: hold_notification_email = self.default_notification_email_address( patron, pin @@ -483,6 +514,8 @@ def place_hold(self, patron, pin, licensepool, hold_notification_email): hold_info = HoldResponseParser(licensepool.collection).process_first( response.content ) + if not hold_info: + raise CannotHold() if not hold_info.identifier: # The Axis 360 API doesn't return the identifier of the # item that was placed on hold, so we have to fill it in @@ -491,7 +524,7 @@ def place_hold(self, patron, pin, licensepool, hold_notification_email): hold_info.identifier = identifier.identifier return hold_info - def release_hold(self, patron, pin, licensepool): + def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: url = self.base_url + "removeHold/v2" identifier = licensepool.identifier title_id = identifier.identifier @@ -506,10 +539,17 @@ def release_hold(self, patron, pin, licensepool): # Fine, it wasn't on hold and now it's still not on hold. pass # If we didn't raise an exception, we're fine. - return True + return None - def patron_activity(self, patron, pin, identifier=None, internal_format=None): + def patron_activity( + self, + patron: Patron, + pin: Optional[str], + identifier: Optional[Identifier] = None, + internal_format: Optional[str] = None, + ) -> List[LoanInfo | HoldInfo]: if identifier: + assert identifier.identifier is not None title_ids = [identifier.identifier] else: title_ids = None @@ -522,14 +562,16 @@ def patron_activity(self, patron, pin, identifier=None, internal_format=None): ) ) - def update_availability(self, licensepool): + def update_availability(self, licensepool: LicensePool) -> None: """Update the availability information for a single LicensePool. Part of the CirculationAPI interface. """ self.update_licensepools_for_identifiers([licensepool.identifier]) - def update_licensepools_for_identifiers(self, identifiers): + def update_licensepools_for_identifiers( + self, identifiers: List[Identifier] + ) -> None: """Update availability and bibliographic information for a list of books. @@ -554,7 +596,12 @@ def update_licensepools_for_identifiers(self, identifiers): for removed_identifier in remainder: self._reap(removed_identifier) - def update_book(self, bibliographic, availability, analytics=None): + def update_book( + self, + bibliographic: Metadata, + availability: CirculationData, + analytics: Optional[Analytics] = None, + ) -> Tuple[Edition, bool, LicensePool, bool]: """Create or update a single book based on bibliographic and availability data from the Axis 360 API. @@ -585,17 +632,19 @@ def update_book(self, bibliographic, availability, analytics=None): availability.apply(self._db, self.collection, replace=policy) return edition, new_edition, license_pool, new_license_pool - def _fetch_remote_availability(self, identifiers): + def _fetch_remote_availability( + self, identifiers: List[Identifier] + ) -> Generator[Tuple[Metadata, CirculationData], None, None]: """Retrieve availability information for the specified identifiers. :yield: A stream of (Metadata, CirculationData) 2-tuples. """ identifier_strings = self.create_identifier_strings(identifiers) response = self.availability(title_ids=identifier_strings) - parser = BibliographicParser(self.collection) + parser = BibliographicParser() return parser.process_all(response.content) - def _reap(self, identifier): + def _reap(self, identifier: Identifier) -> None: """Update our local circulation information to reflect the fact that the identified book has been removed from the remote collection. @@ -625,20 +674,25 @@ def _reap(self, identifier): self._db, collection, ReplacementPolicy.from_license_source(self._db) ) - def recent_activity(self, since): + def recent_activity( + self, since: datetime.datetime + ) -> Generator[Tuple[Metadata, CirculationData], None, None]: """Find books that have had recent activity. :yield: A sequence of (Metadata, CirculationData) 2-tuples """ availability = self.availability(since=since) content = availability.content - yield from BibliographicParser(self.collection).process_all(content) + yield from BibliographicParser().process_all(content) @classmethod - def create_identifier_strings(cls, identifiers): + def create_identifier_strings( + cls, identifiers: Sequence[Identifier | str] + ) -> List[str]: identifier_strings = [] for i in identifiers: if isinstance(i, Identifier): + assert i.identifier is not None value = i.identifier else: value = i @@ -647,11 +701,19 @@ def create_identifier_strings(cls, identifiers): return identifier_strings @classmethod - def parse_token(cls, token): + def parse_token(cls, token: bytes) -> str: data = json.loads(token) - return data["access_token"] + return data["access_token"] # type: ignore[no-any-return] - def _make_request(self, url, method, headers, data=None, params=None, **kwargs): + def _make_request( + self, + url: str, + method: str, + headers: Mapping[str, str], + data: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> RequestsResponse: """Actually make an HTTP request.""" return HTTP.request_with_timeout( method, url, headers=headers, data=data, params=params, **kwargs @@ -670,7 +732,14 @@ class Axis360CirculationMonitor(CollectionMonitor, TimelineMonitor): DEFAULT_START_TIME = datetime_utc(1970, 1, 1) - def __init__(self, _db, collection, api_class=Axis360API): + def __init__( + self, + _db: Session, + collection: Collection, + api_class: Union[ + Axis360API, Callable[[Session, Collection], Axis360API] + ] = Axis360API, + ): super().__init__(_db, collection) if isinstance(api_class, Axis360API): # Use a preexisting Axis360API instance rather than @@ -684,7 +753,12 @@ def __init__(self, _db, collection, api_class=Axis360API): collection, api_class=self.api ) - def catch_up_from(self, start, cutoff, progress): + def catch_up_from( + self, + start: datetime.datetime, + cutoff: Optional[datetime.datetime], + progress: TimestampData, + ) -> None: """Find Axis 360 books that changed recently. :progress: A TimestampData representing the time previously @@ -698,7 +772,9 @@ def catch_up_from(self, start, cutoff, progress): self._db.commit() progress.achievements = "Modified titles: %d." % count - def process_book(self, bibliographic, circulation): + def process_book( + self, bibliographic: Metadata, circulation: CirculationData + ) -> Tuple[Edition, LicensePool]: edition, new_edition, license_pool, new_license_pool = self.api.update_book( bibliographic, circulation ) @@ -728,7 +804,14 @@ class Axis360BibliographicCoverageProvider(BibliographicCoverageProvider): INPUT_IDENTIFIER_TYPES = Identifier.AXIS_360_ID DEFAULT_BATCH_SIZE = 25 - def __init__(self, collection, api_class=Axis360API, **kwargs): + def __init__( + self, + collection: Collection, + api_class: Union[ + Axis360API, Callable[[Session, Collection], Axis360API] + ] = Axis360API, + **kwargs: Any, + ) -> None: """Constructor. :param collection: Provide bibliographic coverage to all @@ -747,7 +830,9 @@ def __init__(self, collection, api_class=Axis360API, **kwargs): self.api = api_class(_db, collection) self.parser = BibliographicParser() - def process_batch(self, identifiers): + def process_batch( + self, identifiers: List[Identifier] + ) -> List[CoverageFailure | Identifier]: identifier_strings = self.api.create_identifier_strings(identifiers) response = self.api.availability(title_ids=identifier_strings) seen_identifiers = set() @@ -778,10 +863,10 @@ def process_batch(self, identifiers): batch_results.append(result) return batch_results - def handle_success(self, identifier): - return self.set_presentation_ready(identifier) + def handle_success(self, identifier: Identifier) -> Identifier | CoverageFailure: + return self.set_presentation_ready(identifier) # type: ignore[no-any-return] - def process_item(self, identifier): + def process_item(self, identifier: Identifier) -> Identifier | CoverageFailure: results = self.process_batch([identifier]) return results[0] @@ -795,7 +880,14 @@ class AxisCollectionReaper(IdentifierSweepMonitor): INTERVAL_SECONDS = 3600 * 12 PROTOCOL = ExternalIntegration.AXIS_360 - def __init__(self, _db, collection, api_class=Axis360API): + def __init__( + self, + _db: Session, + collection: Collection, + api_class: Union[ + Axis360API, Callable[[Session, Collection], Axis360API] + ] = Axis360API, + ) -> None: super().__init__(_db, collection) if isinstance(api_class, Axis360API): # Use a preexisting Axis360API instance rather than @@ -804,7 +896,7 @@ def __init__(self, _db, collection, api_class=Axis360API): else: self.api = api_class(_db, collection) - def process_items(self, identifiers): + def process_items(self, identifiers: List[Identifier]) -> None: self.api.update_licensepools_for_identifiers(identifiers) @@ -850,9 +942,7 @@ def _xpath1_date( return self._pd(value) -class BibliographicParser( - Axis360Parser[Tuple[Optional[Metadata], Optional[CirculationData]]], LoggerMixin -): +class BibliographicParser(Axis360Parser[Tuple[Metadata, CirculationData]], LoggerMixin): DELIVERY_DATA_FOR_AXIS_FORMAT = { "Blio": None, # Legacy format, handled the same way as AxisNow "Acoustik": (None, DeliveryMechanism.FINDAWAY_DRM), # Audiobooks @@ -870,12 +960,6 @@ def parse_list(cls, l: str) -> List[str]: """ return [x.strip() for x in l.split(";")] - def __init__( - self, include_availability: bool = True, include_bibliographic: bool = True - ): - self.include_availability = include_availability - self.include_bibliographic = include_bibliographic - @property def xpath_expression(self) -> str: return "//axis:title" @@ -1027,11 +1111,13 @@ def extract_bibliographic( ) ) - publication_date = self.text_of_optional_subtag( + publication_date_str = self.text_of_optional_subtag( element, "axis:publicationDate", ns ) - if publication_date: - publication_date = strptime_utc(publication_date, self.SHORT_DATE_FORMAT) + if publication_date_str: + publication_date = strptime_utc( + publication_date_str, self.SHORT_DATE_FORMAT + ) series = self.text_of_optional_subtag(element, "axis:series", ns) publisher = self.text_of_optional_subtag(element, "axis:publisher", ns) @@ -1174,22 +1260,16 @@ def extract_bibliographic( def process_one( self, element: _Element, ns: Optional[Dict[str, str]] - ) -> Tuple[Optional[Metadata], Optional[CirculationData]]: - if self.include_bibliographic: - bibliographic = self.extract_bibliographic(element, ns) - else: - bibliographic = None + ) -> Tuple[Metadata, CirculationData]: + bibliographic = self.extract_bibliographic(element, ns) passed_availability = None if bibliographic and bibliographic.circulation: passed_availability = bibliographic.circulation - if self.include_availability: - availability = self.extract_availability( - circulation_data=passed_availability, element=element, ns=ns - ) - else: - availability = None + availability = self.extract_availability( + circulation_data=passed_availability, element=element, ns=ns + ) return bibliographic, availability @@ -1263,7 +1343,7 @@ def _raise_exception_on_error( custom_error_classes: Optional[ Mapping[int | Tuple[int, str], Type[IntegrationException]] ] = None, - ignore_error_codes=None, + ignore_error_codes: Optional[List[int]] = None, ) -> Tuple[int, str]: try: code = int(code) @@ -1310,7 +1390,7 @@ def raise_exception_on_error( Mapping[int | Tuple[int, str], Type[IntegrationException]] ] = None, ignore_error_codes: Optional[List[int]] = None, - ): + ) -> Tuple[int, str]: """Raise an error if the given lxml node represents an Axis 360 error condition. @@ -1436,9 +1516,7 @@ def process_one( return True -class AvailabilityResponseParser( - XMLResponseParser[Optional[Union[LoanInfo, HoldInfo]]] -): +class AvailabilityResponseParser(XMLResponseParser[Union[LoanInfo, HoldInfo]]): def __init__(self, api: Axis360API, internal_format: Optional[str] = None) -> None: """Constructor. @@ -1649,7 +1727,7 @@ def _parse( self, parsed: Dict[str, Any], license_pool: Optional[LicensePool] = None, - **kwargs, + **kwargs: Any, ) -> Tuple[Union[FindawayManifest, AxisNowManifest], datetime.datetime]: """Extract all useful information from a parsed FulfillmentInfo response. @@ -1730,7 +1808,7 @@ class AudiobookMetadataParser( @classmethod def _parse( - cls, parsed: Dict[str, Any], **kwargs + cls, parsed: Dict[str, Any], **kwargs: Any ) -> Tuple[Optional[str], List[SpineItem]]: spine_items = [] accountId = parsed.get("fndaccountid", None) @@ -1741,7 +1819,7 @@ def _parse( return accountId, spine_items @classmethod - def _extract_spine_item(cls, part): + def _extract_spine_item(cls, part: Dict[str, str | int | float]) -> SpineItem: """Convert an element of the 'readingOrder' list to a SpineItem.""" title = part.get("title") # Incoming duration is measured in seconds. @@ -1772,7 +1850,7 @@ def __str__(self) -> str: return json.dumps(data, sort_keys=True) -class Axis360FulfillmentInfo(APIAwareFulfillmentInfo): +class Axis360FulfillmentInfo(APIAwareFulfillmentInfo, LoggerMixin): """An Axis 360-specific FulfillmentInfo implementation for audiobooks and books served through AxisNow. @@ -1782,10 +1860,15 @@ class Axis360FulfillmentInfo(APIAwareFulfillmentInfo): those requests. """ - def do_fetch(self): + def do_fetch(self) -> None: _db = self.api._db license_pool = self.license_pool(_db) transaction_id = self.key + if not isinstance(self.api, Axis360API): + self.log.error( + f"Called with wrong API type {self.api.__class__.__name__} should be {Axis360API.__name__}" + ) + raise ValueError("Axis360FulfillmentInfo can only be used with Axis360API") response = self.api.get_fulfillment_info(transaction_id) parser = Axis360FulfillmentInfoResponseParser(self.api) manifest, expires = parser.parse(response.content, license_pool=license_pool) @@ -1794,7 +1877,7 @@ def do_fetch(self): self._content_expires = expires -class Axis360AcsFulfillmentInfo(FulfillmentInfo): +class Axis360AcsFulfillmentInfo(FulfillmentInfo, LoggerMixin): """This implements a Axis 360 specific FulfillmentInfo for ACS content served through AxisNow. The AxisNow API gives us a link that we can use to get the ACSM file that we serve to the mobile apps. @@ -1826,15 +1909,13 @@ class Axis360AcsFulfillmentInfo(FulfillmentInfo): code path than most of our external HTTP requests. """ - logger = logging.getLogger(__name__) - - def __init__(self, verify: bool, **kwargs): + def __init__(self, verify: bool, **kwargs: Any) -> None: super().__init__(**kwargs) self.verify: bool = verify def problem_detail_document(self, error_details: str) -> ProblemDetail: service_name = urlparse(self.content_link).netloc - self.logger.warning(error_details) + self.log.warning(error_details) return INTEGRATION_ERROR.detailed( _(RequestNetworkException.detail, service=service_name), title=RequestNetworkException.title, diff --git a/core/util/datetime_helpers.py b/core/util/datetime_helpers.py index 6acfdc0a51..137ec1fa49 100644 --- a/core/util/datetime_helpers.py +++ b/core/util/datetime_helpers.py @@ -61,7 +61,7 @@ def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: return dt.astimezone(pytz.UTC) -def strptime_utc(date_string, format): +def strptime_utc(date_string: str, format: str) -> datetime.datetime: """Parse a string that describes a time but includes no timezone, into a timezone-aware datetime object set to UTC. diff --git a/pyproject.toml b/pyproject.toml index 3ed2fb9e8b..746b02ce20 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,6 +75,7 @@ module = [ "api.admin.form_data", "api.admin.model.dashboard_statistics", "api.adobe_vendor_id", + "api.axis", "api.circulation", "api.discovery.*", "api.integration.*", diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 5583df4ba1..0b7b90edd3 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -6,7 +6,7 @@ import ssl import urllib from functools import partial -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from unittest.mock import MagicMock, Mock, PropertyMock import pytest @@ -33,6 +33,7 @@ from api.circulation import FulfillmentInfo, HoldInfo, LoanInfo from api.circulation_exceptions import * from api.web_publication_manifest import FindawayManifest, SpineItem +from core.analytics import Analytics from core.coverage import CoverageFailure from core.metadata_layer import ( CirculationData, @@ -130,12 +131,6 @@ def axis360( class TestAxis360API: - def test_external_integration(self, axis360: Axis360Fixture): - assert ( - axis360.collection.external_integration - == axis360.api.external_integration(object()) - ) - def test__run_self_tests( self, axis360: Axis360Fixture, @@ -181,7 +176,7 @@ def patron_activity(self, patron, pin): patron_activity, pools_without_delivery, refresh_bearer_token, - ] = sorted(api._run_self_tests(axis360.db.session), key=lambda x: x.name) + ] = sorted(api._run_self_tests(axis360.db.session), key=lambda x: str(x.name)) assert "Refreshing bearer token" == refresh_bearer_token.name assert True == refresh_bearer_token.success assert "the new token" == refresh_bearer_token.result @@ -239,7 +234,8 @@ def refresh_bearer_token(self): api = Mock(axis360.db.session, axis360.collection) [failure] = api._run_self_tests(axis360.db.session) assert "Refreshing bearer token" == failure.name - assert False == failure.success + assert failure.success is False + assert failure.exception is not None assert "no way" == failure.exception.args[0] def test_create_identifier_strings(self, axis360: Axis360Fixture): @@ -361,8 +357,7 @@ def test_checkin_success(self, axis360: Axis360Fixture): patron = axis360.db.patron() barcode = axis360.db.fresh_str() patron.authorization_identifier = barcode - response = axis360.api.checkin(patron, "pin", pool) - assert response == True + axis360.api.checkin(patron, "pin", pool) # Verify the format of the HTTP request that was made. [request] = axis360.api.requests @@ -501,7 +496,7 @@ def test_patron_activity(self, axis360: Axis360Fixture): assert patron.authorization_identifier == kwargs["params"]["patronId"] # We got three results -- two holds and one loan. - [hold1, loan, hold2] = sorted(results, key=lambda x: x.identifier) + [hold1, loan, hold2] = sorted(results, key=lambda x: str(x.identifier)) assert isinstance(hold1, HoldInfo) assert isinstance(hold2, HoldInfo) assert isinstance(loan, LoanInfo) @@ -691,7 +686,9 @@ def test_update_book(self, axis360: Axis360Fixture): analytics = MockAnalyticsProvider() api = MockAxis360API(axis360.db.session, axis360.collection) e, e_new, lp, lp_new = api.update_book( - axis360.BIBLIOGRAPHIC_DATA, axis360.AVAILABILITY_DATA, analytics=analytics + axis360.BIBLIOGRAPHIC_DATA, + axis360.AVAILABILITY_DATA, + analytics=cast(Analytics, analytics), ) # A new LicensePool and Edition were created. assert True == lp_new @@ -727,7 +724,9 @@ def test_update_book(self, axis360: Axis360Fixture): ) e2, e_new, lp2, lp_new = api.update_book( - axis360.BIBLIOGRAPHIC_DATA, new_circulation, analytics=analytics + axis360.BIBLIOGRAPHIC_DATA, + new_circulation, + analytics=cast(Analytics, analytics), ) # The same LicensePool and Edition are returned -- no new ones @@ -842,14 +841,18 @@ class MockMonitor(Axis360CirculationMonitor): def process_book(self, bibliographic, circulation): self.processed.append((bibliographic, circulation)) - monitor = MockMonitor(axis360.db.session, axis360.collection, api_class=MockAPI) + mock_api = MockAPI(axis360.db.session, axis360.collection) + monitor = MockMonitor( + axis360.db.session, axis360.collection, api_class=mock_api + ) data = axis360.sample_data("single_item.xml") axis360.api.queue_response(200, content=data) progress = TimestampData() - monitor.catch_up_from("start", "cutoff", progress) + start_mock = MagicMock() + monitor.catch_up_from(start_mock, MagicMock(), progress) # The start time was passed into recent_activity. - assert "start" == monitor.api.recent_activity_called_with + assert start_mock == mock_api.recent_activity_called_with # process_book was called on each item returned by recent_activity. assert [(1, "a"), (2, "b")] == monitor.processed @@ -993,11 +996,12 @@ def test_bibliographic_parser(self, axis360: Axis360Fixture): data = axis360.sample_data("tiny_collection.xml") - [bib1, av1], [bib2, av2] = BibliographicParser(False, True).process_all(data) + [bib1, av1], [bib2, av2] = BibliographicParser().process_all(data) - # We didn't ask for availability information, so none was provided. - assert av1 is None - assert av2 is None + # We test for availability information in a separate test. + # Here we just make sure it is present. + assert av1 is not None + assert av2 is not None # But we did get bibliographic information. assert bib1 is not None @@ -1119,8 +1123,8 @@ def test_bibliographic_parser_audiobook(self, axis360: Axis360Fixture): # narrator information here. data = axis360.sample_data("availability_with_audiobook_fulfillment.xml") - [[bib, av]] = BibliographicParser(False, True).process_all(data) - assert av is None + [[bib, av]] = BibliographicParser().process_all(data) + assert av is not None assert bib is not None assert "Back Spin" == bib.title @@ -1143,8 +1147,8 @@ def test_bibliographic_parser_blio_format(self, axis360: Axis360Fixture): data = data.replace(b"Acoustik", b"Blio") data = data.replace(b"AxisNow", b"No Such Format") - [[bib, av]] = BibliographicParser(False, True).process_all(data) - assert av is None + [[bib, av]] = BibliographicParser().process_all(data) + assert av is not None assert bib is not None # A book in Blio format is treated as an AxisNow ebook. @@ -1160,8 +1164,8 @@ def test_bibliographic_parser_blio_and_axisnow_format( data = axis360.sample_data("availability_with_audiobook_fulfillment.xml") data = data.replace(b"Acoustik", b"Blio") - [[bib, av]] = BibliographicParser(False, True).process_all(data) - assert av is None + [[bib, av]] = BibliographicParser().process_all(data) + assert av is not None assert bib is not None # There is only one FormatData -- 'Blio' and 'AxisNow' mean the same thing. @@ -1175,8 +1179,8 @@ def test_bibliographic_parser_unsupported_format(self, axis360: Axis360Fixture): data = data.replace(b"Acoustik", b"No Such Format 1") data = data.replace(b"AxisNow", b"No Such Format 2") - [[bib, av]] = BibliographicParser(False, True).process_all(data) - assert av is None + [[bib, av]] = BibliographicParser().process_all(data) + assert av is not None assert bib is not None # We don't support any of the formats, so no FormatData objects were created. @@ -1221,11 +1225,12 @@ def test_availability_parser(self, axis360: Axis360Fixture): data = axis360.sample_data("tiny_collection.xml") - [bib1, av1], [bib2, av2] = BibliographicParser(True, False).process_all(data) + [bib1, av1], [bib2, av2] = BibliographicParser().process_all(data) - # We didn't ask for bibliographic information, so none was provided. - assert bib1 is None - assert bib2 is None + # We already tested the bibliographic information, so we just make sure + # it is present. + assert bib1 is not None + assert bib2 is not None # But we did get availability information. assert av1 is not None @@ -1807,10 +1812,11 @@ def test_unicode(self): class Axis360ProviderFixture(Axis360Fixture): def __init__(self, db: DatabaseTransactionFixture, files: AxisFilesFixture): super().__init__(db, files) + mock_api = MockAxis360API(db.session, self.collection) self.provider = Axis360BibliographicCoverageProvider( - self.collection, api_class=MockAxis360API + self.collection, api_class=mock_api ) - self.api = self.provider.api + self.api = mock_api @pytest.fixture(scope="function") diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index 4af9690604..f76515fd77 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -10,8 +10,14 @@ from flask import url_for from werkzeug import Response as wkResponse -from api.axis import Axis360FulfillmentInfo -from api.circulation import CirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.axis import Axis360API, Axis360FulfillmentInfo +from api.circulation import ( + BaseCirculationAPI, + CirculationAPI, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) from api.circulation_exceptions import ( AlreadyOnHold, NoAvailableCopies, @@ -877,7 +883,7 @@ def test_no_drm_fulfill(self, loan_fixture: LoanFixture): lpdm.delivery_mechanism.default_client_can_fulfill = True # Mock out the flow - api = MagicMock() + api = MagicMock(spec=BaseCirculationAPI) api.fulfill.return_value = FulfillmentInfo( loan_fixture.db.default_collection(), DataSource.OVERDRIVE, @@ -913,6 +919,7 @@ def test_no_drm_fulfill(self, loan_fixture: LoanFixture): assert response.location == "https://example.org/redirect_to_epub" # Axis360 variant + api = MagicMock(spec=Axis360API) api.collection = loan_fixture.db.default_collection() api._db = loan_fixture.db.session axis360_ff = Axis360FulfillmentInfo( @@ -933,6 +940,9 @@ def test_no_drm_fulfill(self, loan_fixture: LoanFixture): library=loan_fixture.db.default_library(), headers=dict(Authorization=loan_fixture.valid_auth), ): + controller.circulation.api_for_collection[ + loan_fixture.db.default_collection().id + ] = api response = controller.fulfill(pool.id, lpdm.delivery_mechanism.id) assert isinstance(response, wkResponse) From 3954b1f915eed4f9ea20c7a432da956a13816e9a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 11 Oct 2023 09:07:18 -0300 Subject: [PATCH 101/262] Use codecov flags (PP-499) (#1443) --- .github/codecov.yml | 12 ++++++++++-- .github/workflows/test-build.yml | 2 ++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/.github/codecov.yml b/.github/codecov.yml index 794b353619..4e25e26fc7 100644 --- a/.github/codecov.yml +++ b/.github/codecov.yml @@ -8,5 +8,13 @@ coverage: comment: # Only comment when coverage changes require_changes: true - # Require all builds to finish before comment is posted - after_n_builds: 8 + +flag_management: + default_rules: # the rules that will be followed for any flag added, generally + carryforward: true + statuses: + - type: project + target: auto + threshold: 0.2% + - type: patch + target: auto diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 426199d9ad..1d985105db 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -60,6 +60,7 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} files: ./coverage.xml + flags: ${{ matrix.module }} test-migrations: name: Migration Tests @@ -107,6 +108,7 @@ jobs: with: token: ${{ secrets.CODECOV_TOKEN }} files: ./coverage.xml + flags: migration docker-image-build: name: Docker build From 9ecbc942e4b0957e048a5229a6261e40e0d47498 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 11 Oct 2023 19:48:57 +0530 Subject: [PATCH 102/262] The pagination links are not reusable between page and search feeds (#1450) Reverted back to the pre-refactor logic for search pagination --- core/feed/acquisition.py | 14 +++++- tests/api/feed/test_opds_acquisition_feed.py | 48 ++++++++------------ 2 files changed, 31 insertions(+), 31 deletions(-) diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py index 090fb14c55..5fed809b4c 100644 --- a/core/feed/acquisition.py +++ b/core/feed/acquisition.py @@ -815,7 +815,19 @@ def make_link(ep: Type[EntryPoint]) -> str: facets.entrypoint, ) - feed.add_pagination_links(results, lane) + if len(results) > 0: + # There are works in this list. Add a 'next' link. + next_url = annotator.search_url(lane, query, pagination.next_page, facets) + feed.add_link(href=next_url, rel="next") + + if pagination.offset > 0: + first_url = annotator.search_url(lane, query, pagination.first_page, facets) + feed.add_link(rel="first", href=first_url) + + previous_page = pagination.previous_page + if previous_page: + previous_url = annotator.search_url(lane, query, previous_page, facets) + feed.add_link(rel="previous", href=previous_url) # Add "up" link. feed.add_link( diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index 8e2089b217..0d4222aa3f 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -8,6 +8,7 @@ from sqlalchemy.orm import Session from werkzeug.datastructures import MIMEAccept +from api.app import app from core.entrypoint import ( AudiobooksEntryPoint, EbooksEntryPoint, @@ -116,22 +117,6 @@ def feed_url(cls, lane, facets=None, pagination=None): base += sep + pagination.query_string return base - @classmethod - def search_url(cls, lane, query, pagination, facets=None): - if isinstance(lane, Lane): - base = "http://%s/" % lane.url_name - else: - base = "http://%s/" % lane.display_name - sep = "?" - if pagination: - base += sep + pagination.query_string - sep = "&" - if facets: - facet_query_string = facets.query_string - if facet_query_string: - base += sep + facet_query_string - return base - @classmethod def groups_url(cls, lane, facets=None): if lane and isinstance(lane, Lane): @@ -1191,17 +1176,18 @@ def run(wl=None, facets=None, pagination=None): was called with. """ data.mock.called_with = None - OPDSAcquisitionFeed.search( - session, - "title", - "url", - wl, - None, - None, - pagination=pagination, - facets=facets, - annotator=data.annotator(), - ) + with app.test_request_context("/"): + OPDSAcquisitionFeed.search( + session, + "title", + "url", + wl, + None, + None, + pagination=pagination, + facets=facets, + annotator=LibraryAnnotator(None, None, db.default_library()), + ) return data.mock.called_with # Mock search() so it never tries to return anything. @@ -1236,14 +1222,16 @@ def mock_search(self, *args, **kwargs): # The make_link function that was passed in calls # TestAnnotator.search_url() when passed an EntryPoint. - first_page_url = "http://wl/?available=all&collection=full&entrypoint=Book&order=relevance&search_type=default" - assert first_page_url == make_link(EbooksEntryPoint) + first_page_url = "http://localhost/default/search/?entrypoint=Book&order=relevance&available=all&collection=full&search_type=default" + with app.test_request_context("/"): + assert first_page_url == make_link(EbooksEntryPoint) # Pagination information is not propagated through entry point links # -- you always start at the beginning of the list. pagination = Pagination(offset=100) feed, make_link, entrypoints, selected = run(data.wl, facets, pagination) - assert first_page_url == make_link(EbooksEntryPoint) + with app.test_request_context("/"): + assert first_page_url == make_link(EbooksEntryPoint) class TestLookupAcquisitionFeed: From c06784bd480537e6d23ccd0c3d6887a0ad57e8c2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 11 Oct 2023 14:14:16 -0300 Subject: [PATCH 103/262] Bump mypy from 1.5.1 to 1.6.0 (#1453) * Bump mypy from 1.5.1 to 1.6.0 Bumps [mypy](https://github.com/python/mypy) from 1.5.1 to 1.6.0. - [Commits](https://github.com/python/mypy/compare/v1.5.1...v1.6.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Remove unneeded ignore. --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jonathan Green --- core/configuration/ignored_identifier.py | 2 +- poetry.lock | 56 ++++++++++++------------ 2 files changed, 29 insertions(+), 29 deletions(-) diff --git a/core/configuration/ignored_identifier.py b/core/configuration/ignored_identifier.py index db7d7bdca8..0b4b56ef63 100644 --- a/core/configuration/ignored_identifier.py +++ b/core/configuration/ignored_identifier.py @@ -57,7 +57,7 @@ def _get_ignored_identifier_types( "ignored_identifier_types", [] ) - return self._ignored_identifier_types # type: ignore[return-value] + return self._ignored_identifier_types def set_ignored_identifier_types( self, diff --git a/poetry.lock b/poetry.lock index f530576f50..7c5ee14d70 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2309,38 +2309,38 @@ files = [ [[package]] name = "mypy" -version = "1.5.1" +version = "1.6.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f33592ddf9655a4894aef22d134de7393e95fcbdc2d15c1ab65828eee5c66c70"}, - {file = "mypy-1.5.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:258b22210a4a258ccd077426c7a181d789d1121aca6db73a83f79372f5569ae0"}, - {file = "mypy-1.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9ec1f695f0c25986e6f7f8778e5ce61659063268836a38c951200c57479cc12"}, - {file = "mypy-1.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:abed92d9c8f08643c7d831300b739562b0a6c9fcb028d211134fc9ab20ccad5d"}, - {file = "mypy-1.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:a156e6390944c265eb56afa67c74c0636f10283429171018446b732f1a05af25"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6ac9c21bfe7bc9f7f1b6fae441746e6a106e48fc9de530dea29e8cd37a2c0cc4"}, - {file = "mypy-1.5.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:51cb1323064b1099e177098cb939eab2da42fea5d818d40113957ec954fc85f4"}, - {file = "mypy-1.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:596fae69f2bfcb7305808c75c00f81fe2829b6236eadda536f00610ac5ec2243"}, - {file = "mypy-1.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:32cb59609b0534f0bd67faebb6e022fe534bdb0e2ecab4290d683d248be1b275"}, - {file = "mypy-1.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:159aa9acb16086b79bbb0016145034a1a05360626046a929f84579ce1666b315"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f6b0e77db9ff4fda74de7df13f30016a0a663928d669c9f2c057048ba44f09bb"}, - {file = "mypy-1.5.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26f71b535dfc158a71264e6dc805a9f8d2e60b67215ca0bfa26e2e1aa4d4d373"}, - {file = "mypy-1.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fc3a600f749b1008cc75e02b6fb3d4db8dbcca2d733030fe7a3b3502902f161"}, - {file = "mypy-1.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:26fb32e4d4afa205b24bf645eddfbb36a1e17e995c5c99d6d00edb24b693406a"}, - {file = "mypy-1.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:82cb6193de9bbb3844bab4c7cf80e6227d5225cc7625b068a06d005d861ad5f1"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4a465ea2ca12804d5b34bb056be3a29dc47aea5973b892d0417c6a10a40b2d65"}, - {file = "mypy-1.5.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9fece120dbb041771a63eb95e4896791386fe287fefb2837258925b8326d6160"}, - {file = "mypy-1.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d28ddc3e3dfeab553e743e532fb95b4e6afad51d4706dd22f28e1e5e664828d2"}, - {file = "mypy-1.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:57b10c56016adce71fba6bc6e9fd45d8083f74361f629390c556738565af8eeb"}, - {file = "mypy-1.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:ff0cedc84184115202475bbb46dd99f8dcb87fe24d5d0ddfc0fe6b8575c88d2f"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8f772942d372c8cbac575be99f9cc9d9fb3bd95c8bc2de6c01411e2c84ebca8a"}, - {file = "mypy-1.5.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5d627124700b92b6bbaa99f27cbe615c8ea7b3402960f6372ea7d65faf376c14"}, - {file = "mypy-1.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:361da43c4f5a96173220eb53340ace68cda81845cd88218f8862dfb0adc8cddb"}, - {file = "mypy-1.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:330857f9507c24de5c5724235e66858f8364a0693894342485e543f5b07c8693"}, - {file = "mypy-1.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:c543214ffdd422623e9fedd0869166c2f16affe4ba37463975043ef7d2ea8770"}, - {file = "mypy-1.5.1-py3-none-any.whl", hash = "sha256:f757063a83970d67c444f6e01d9550a7402322af3557ce7630d3c957386fa8f5"}, - {file = "mypy-1.5.1.tar.gz", hash = "sha256:b031b9601f1060bf1281feab89697324726ba0c0bae9d7cd7ab4b690940f0b92"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, + {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, + {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, + {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, + {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, + {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, + {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, + {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, + {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, + {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, + {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, + {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, + {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, + {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, + {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, + {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, + {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, + {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, + {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, + {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, + {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, + {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, + {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, ] [package.dependencies] From 76773135bf433b82472a47b910be26581c3578ae Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 11 Oct 2023 14:35:23 -0300 Subject: [PATCH 104/262] Disable per flag status. (#1452) --- .github/codecov.yml | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/.github/codecov.yml b/.github/codecov.yml index 4e25e26fc7..cfae9da039 100644 --- a/.github/codecov.yml +++ b/.github/codecov.yml @@ -12,9 +12,4 @@ comment: flag_management: default_rules: # the rules that will be followed for any flag added, generally carryforward: true - statuses: - - type: project - target: auto - threshold: 0.2% - - type: patch - target: auto + statuses: [] From 900e2f3fe6279d095becc78978a024c4fd9510e4 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 12 Oct 2023 21:46:25 -0300 Subject: [PATCH 105/262] Fix some issues found during testing of push notification scripts (PP-570) (#1451) --- core/scripts.py | 2 +- core/util/notifications.py | 138 +++++++++++++++----------- pyproject.toml | 4 +- tests/core/util/test_notifications.py | 121 ++++++++++++++++++---- 4 files changed, 183 insertions(+), 82 deletions(-) diff --git a/core/scripts.py b/core/scripts.py index eaa0cdd3c5..e7b7cf2ca6 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -2868,7 +2868,7 @@ def process_loan(self, loan: Loan): # the same day if delta.days in self.LOAN_EXPIRATION_DAYS: self.log.info( - f"Patron {patron.external_identifier} has an expiring loan on ({loan.license_pool.identifier.urn})" + f"Patron {patron.authorization_identifier} has an expiring loan on ({loan.license_pool.identifier.urn})" ) PushNotifications.send_loan_expiry_message(loan, delta.days, tokens) diff --git a/core/util/notifications.py b/core/util/notifications.py index 9a287693a8..4be724a745 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -1,24 +1,22 @@ from __future__ import annotations -from typing import TYPE_CHECKING +from typing import Mapping, Optional, cast import firebase_admin from firebase_admin import credentials, messaging +from firebase_admin.exceptions import FirebaseError +from firebase_admin.messaging import UnregisteredError from sqlalchemy.orm import Session from core.config import Configuration from core.model.configuration import ConfigurationSetting from core.model.constants import NotificationConstants from core.model.devicetokens import DeviceToken, DeviceTokenTypes -from core.model.edition import Edition from core.model.identifier import Identifier from core.model.patron import Hold, Loan, Patron from core.model.work import Work from core.util.log import LoggerMixin -if TYPE_CHECKING: - from firebase_admin.messaging import SendResponse - class PushNotifications(LoggerMixin): # Should be set to true while unit testing @@ -37,7 +35,7 @@ def notifiable_tokens(cls, patron: Patron) -> list[DeviceToken]: ] @classmethod - def fcm_app(cls): + def fcm_app(cls) -> firebase_admin.App: if not cls._fcm_app: cls._fcm_app = firebase_admin.initialize_app( credentials.Certificate(Configuration.fcm_credentials()) @@ -50,22 +48,72 @@ def base_url(cls, _db: Session) -> str: cls._base_url = ConfigurationSetting.sitewide( _db, Configuration.BASE_URL_KEY ).value - return cls._base_url + return cast(str, cls._base_url) + + @classmethod + def send_messages( + cls, + tokens: list[DeviceToken], + notification: Optional[messaging.Notification], + data: Mapping[str, str | None], + ) -> list[str]: + responses = [] + + data_typed = {} + + # Make sure our data is all typed as strings for Firebase + for key, value in data.items(): + if value is None: + # Firebase doesn't like null values + cls.logger().warning( + f"Removing {key} from notification data because it is None" + ) + continue + elif not isinstance(value, str): + cls.logger().warning(f"Converting {key} from {type(value)} to str") # type: ignore[unreachable] + data_typed[key] = str(value) + else: + data_typed[key] = value + + for token in tokens: + try: + msg = messaging.Message( + token=token.device_token, + notification=notification, + data=data_typed, + ) + resp = messaging.send(msg, dry_run=cls.TESTING_MODE, app=cls.fcm_app()) + cls.logger().info( + f"Sent notification for patron {token.patron.authorization_identifier} " + f"notification ID: {resp}" + ) + responses.append(resp) + except UnregisteredError: + cls.logger().info( + f"Device token {token.device_token} for patron {token.patron.authorization_identifier} " + f"is no longer registered, deleting" + ) + db = Session.object_session(token) + db.delete(token) + except FirebaseError: + cls.logger().exception( + f"Failed to send notification for patron {token.patron.authorization_identifier}" + ) + return responses @classmethod def send_loan_expiry_message( - cls, loan: Loan, days_to_expiry, tokens: list[DeviceToken] - ) -> list[SendResponse]: + cls, loan: Loan, days_to_expiry: int, tokens: list[DeviceToken] + ) -> list[str]: """Send a loan expiry reminder to the mobile Apps, with enough information to identify two things - Which loan is being mentioned, in order to correctly deep link - Which patron and make the loans api request with the right authentication""" - responses = [] _db = Session.object_session(loan) url = cls.base_url(_db) - edition: Edition = loan.license_pool.presentation_edition - identifier: Identifier = loan.license_pool.identifier - library_short_name = loan.library and loan.library.short_name + edition = loan.license_pool.presentation_edition + identifier = loan.license_pool.identifier + library_short_name = loan.library.short_name title = f"Only {days_to_expiry} {'days' if days_to_expiry != 1 else 'day'} left on your loan!" body = f"Your loan on {edition.title} is expiring soon" data = dict( @@ -84,20 +132,12 @@ def send_loan_expiry_message( data["authorization_identifier"] = loan.patron.authorization_identifier cls.logger().info( - f"Patron {loan.patron.authorization_identifier} has {len(tokens)} device tokens." + f"Patron {loan.patron.authorization_identifier} has {len(tokens)} device tokens. " + f"Sending loan expiry notification(s)." + ) + return cls.send_messages( + tokens, messaging.Notification(title=title, body=body), data ) - for token in tokens: - msg = messaging.Message( - token=token.device_token, - notification=messaging.Notification(title=title, body=body), - data=data, - ) - resp = messaging.send(msg, dry_run=cls.TESTING_MODE, app=cls.fcm_app()) - cls.logger().info( - f"Sent loan expiry notification for {loan.patron.authorization_identifier} ID: {resp}" - ) - responses.append(resp) - return responses @classmethod def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: @@ -107,7 +147,7 @@ def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: if not patrons: return [] - msgs = [] + responses = [] _db = Session.object_session(patrons[0]) url = cls.base_url(_db) for patron in patrons: @@ -123,22 +163,14 @@ def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: data["authorization_identifier"] = patron.authorization_identifier cls.logger().info( - f"Must sync patron activity for {patron.authorization_identifier}, has {len(tokens)} device tokens." + f"Must sync patron activity for {patron.authorization_identifier}, has {len(tokens)} device tokens. " + f"Sending activity sync notification(s)." ) - for token in tokens: - msg = messaging.Message( - token=token.device_token, - data=data, - ) - msgs.append(msg) - batch: messaging.BatchResponse = messaging.send_all( - msgs, dry_run=cls.TESTING_MODE, app=cls.fcm_app() - ) - cls.logger().info( - f"Activity Sync Notifications: Successes {batch.success_count}, failures {batch.failure_count}." - ) - return [resp.message_id for resp in batch.responses] + resp = cls.send_messages(tokens, None, data) + responses.extend(resp) + + return responses @classmethod def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: @@ -146,14 +178,14 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: if not holds: return [] - msgs = [] + responses = [] _db = Session.object_session(holds[0]) url = cls.base_url(_db) for hold in holds: tokens = cls.notifiable_tokens(hold.patron) cls.logger().info( - f"Notifying patron {hold.patron.authorization_identifier or hold.patron.username} for hold: {hold.work.title}. " - f"Patron has {len(tokens)} device tokens." + f"Notifying patron {hold.patron.authorization_identifier or hold.patron.username} for " + f"hold: {hold.work.title}. Patron has {len(tokens)} device tokens." ) loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work @@ -172,17 +204,7 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: if hold.patron.authorization_identifier: data["authorization_identifier"] = hold.patron.authorization_identifier - for token in tokens: - msg = messaging.Message( - token=token.device_token, - notification=messaging.Notification(title=title), - data=data, - ) - msgs.append(msg) - batch: messaging.BatchResponse = messaging.send_all( - msgs, dry_run=cls.TESTING_MODE, app=cls.fcm_app() - ) - cls.logger().info( - f"Hold Notifications: Successes {batch.success_count}, failures {batch.failure_count}." - ) - return [resp.message_id for resp in batch.responses] + resp = cls.send_messages(tokens, messaging.Notification(title=title), data) + responses.extend(resp) + + return responses diff --git a/pyproject.toml b/pyproject.toml index 746b02ce20..3f7905768c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -96,6 +96,7 @@ module = [ "core.settings.*", "core.util.authentication_for_opds", "core.util.cache", + "core.util.notifications", "core.util.problem_detail", "core.util.xmlparser", "tests.fixtures.authenticator", @@ -129,8 +130,7 @@ module = [ "core._version", "expiringdict", "feedparser", - "firebase_admin", - "firebase_admin.messaging", + "firebase_admin.*", "flask_babel", "flask_pydantic_spec.*", "fuzzywuzzy", diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index 73fb52cc3c..c9cc374778 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,14 +1,18 @@ +import logging import re from typing import Generator from unittest import mock +from unittest.mock import MagicMock import firebase_admin import pytest +from firebase_admin.exceptions import FirebaseError +from firebase_admin.messaging import UnregisteredError from google.auth import credentials from requests_mock import Mocker from core.config import Configuration -from core.model import create, get_one_or_create +from core.model import create, get_one, get_one_or_create from core.model.configuration import ConfigurationSetting from core.model.constants import NotificationConstants from core.model.devicetokens import DeviceToken, DeviceTokenTypes @@ -51,11 +55,13 @@ def __init__(self, db: DatabaseTransactionFixture, app: firebase_admin.App) -> N def push_notf_fixture( db: DatabaseTransactionFixture, ) -> Generator[PushNotificationsFixture, None, None]: - app = firebase_admin.initialize_app( - MockCredential(), options=dict(projectId="mock-app-1"), name="testapp" - ) - yield PushNotificationsFixture(db, app) - firebase_admin.delete_app(app) + with mock.patch("core.util.notifications.PushNotifications.fcm_app") as mock_fcm: + app = firebase_admin.initialize_app( + MockCredential(), options=dict(projectId="mock-app-1"), name="testapp" + ) + mock_fcm.return_value = app + yield PushNotificationsFixture(db, app) + firebase_admin.delete_app(app) class TestPushNotifications: @@ -77,20 +83,15 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur # Test the data structuring down to the "send" method # If bad data is detected, the fcm "send" method will error out # If not, we are good - with mock.patch( - "core.util.notifications.PushNotifications.fcm_app" - ) as mock_fcm, Mocker() as mocker: + with Mocker() as mocker: mocker.post( re.compile("https://fcm.googleapis.com"), json=dict(name="mid-mock") ) - mock_fcm.return_value = push_notf_fixture.app assert PushNotifications.send_loan_expiry_message( loan, 1, [device_token] ) == ["mid-mock"] - with mock.patch( - "core.util.notifications.PushNotifications.fcm_app" - ) as mock_fcm, mock.patch("core.util.notifications.messaging") as messaging: + with mock.patch("core.util.notifications.messaging") as messaging: PushNotifications.send_loan_expiry_message(loan, 1, [device_token]) assert messaging.Message.call_count == 1 @@ -119,7 +120,7 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur assert messaging.send.call_count == 1 assert messaging.send.call_args_list[0] == [ (messaging.Message(),), - {"dry_run": True, "app": mock_fcm()}, + {"dry_run": True, "app": push_notf_fixture.app}, ] def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): @@ -149,15 +150,14 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): ) tokens.append(t) - with mock.patch( - "core.util.notifications.PushNotifications.fcm_app" - ) as fcm_app, mock.patch("core.util.notifications.messaging") as messaging: + with mock.patch("core.util.notifications.messaging") as messaging: # Notify 2 patrons of 3 total PushNotifications.send_activity_sync_message([patron1, patron2]) assert messaging.Message.call_count == 4 assert messaging.Message.call_args_list == [ mock.call( token=tokens[0].device_token, + notification=None, data=dict( event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", @@ -167,6 +167,7 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token=tokens[1].device_token, + notification=None, data=dict( event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", @@ -176,6 +177,7 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token=tokens[2].device_token, + notification=None, data=dict( event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", @@ -184,6 +186,7 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): ), mock.call( token=tokens[3].device_token, + notification=None, data=dict( event_type=NotificationConstants.ACTIVITY_SYNC_TYPE, loans_endpoint="http://localhost/default/loans", @@ -192,7 +195,7 @@ def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): ), ] - assert messaging.send_all.call_count == 1 + assert messaging.send.call_count == 4 def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): db = push_notf_fixture.db @@ -218,9 +221,7 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): hold1, _ = p1.on_hold_to(patron1, position=0) hold2, _ = p2.on_hold_to(patron2, position=0) - with mock.patch( - "core.util.notifications.PushNotifications.fcm_app" - ) as fcm_app, mock.patch("core.util.notifications.messaging") as messaging: + with mock.patch("core.util.notifications.messaging") as messaging: PushNotifications.send_holds_notifications([hold1, hold2]) loans_api = "http://localhost/default/loans" @@ -274,3 +275,81 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): ), ), ] + + def test_send_messages( + self, + push_notf_fixture: PushNotificationsFixture, + caplog: pytest.LogCaptureFixture, + ): + db = push_notf_fixture.db + patron1 = db.patron() + token = DeviceToken.create( + db.session, DeviceTokenTypes.FCM_IOS, "test-token", patron1 + ) + + caplog.set_level(logging.WARNING) + with mock.patch("core.util.notifications.messaging") as messaging: + PushNotifications.send_messages( + [token], + None, + dict(test_none=None, test_str="test", test_int=1, test_bool=True), # type: ignore[dict-item] + ) + assert messaging.Message.call_count == 1 + assert messaging.Message.call_args.kwargs["data"] == dict( + test_str="test", test_int="1", test_bool="True" + ) + + assert len(caplog.records) == 3 + assert ( + "Removing test_none from notification data because it is None" + in caplog.messages + ) + assert "Converting test_int from to str" in caplog.messages + assert "Converting test_bool from to str" in caplog.messages + + def test_send_messages_unregistered_error( + self, + push_notf_fixture: PushNotificationsFixture, + caplog: pytest.LogCaptureFixture, + ): + db = push_notf_fixture.db + patron1 = db.patron() + patron1.authorization_identifier = "auth1" + token = DeviceToken.create( + db.session, DeviceTokenTypes.FCM_IOS, "test-token", patron1 + ) + caplog.set_level(logging.INFO) + # When a token causes an UnregisteredError, it should be deleted + with mock.patch("core.util.notifications.messaging") as messaging: + messaging.send.side_effect = UnregisteredError("test") + PushNotifications.send_messages([token], None, {}) + assert messaging.Message.call_count == 1 + assert messaging.send.call_count == 1 + + assert get_one(db.session, DeviceToken, device_token="test-token") is None + assert ( + "Device token test-token for patron auth1 is no longer registered, deleting" + in caplog.text + ) + + def test_send_messages_firebase_error( + self, + push_notf_fixture: PushNotificationsFixture, + caplog: pytest.LogCaptureFixture, + ): + # When a token causes an FirebaseError, we should log it and move on + mock_token = MagicMock(spec=DeviceToken) + mock_token.patron.authorization_identifier = "12345" + + caplog.set_level(logging.ERROR) + with mock.patch("core.util.notifications.messaging") as messaging: + messaging.send.side_effect = FirebaseError("", "") + PushNotifications.send_messages([mock_token], None, {}) + assert messaging.Message.call_count == 1 + assert messaging.send.call_count == 1 + + # We logged the error + assert "Failed to send notification for patron 12345" in caplog.text + + # And the log contains a traceback + assert "Traceback" in caplog.text From b7e6fc96840e83c3d0ab946c0088710d26a78e64 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 12:15:51 +0000 Subject: [PATCH 106/262] Bump flask-babel from 2.0.0 to 4.0.0 (#1435) --- api/app.py | 10 +++++++++- api/routes.py | 9 +-------- poetry.lock | 36 ++++++++++++++++++------------------ pyproject.toml | 2 +- 4 files changed, 29 insertions(+), 28 deletions(-) diff --git a/api/app.py b/api/app.py index 58e2f68fc6..d4cde207c1 100644 --- a/api/app.py +++ b/api/app.py @@ -3,6 +3,7 @@ import urllib.parse import flask_babel +from flask import request from flask_babel import Babel from flask_pydantic_spec import FlaskPydanticSpec @@ -29,13 +30,20 @@ from core.util.cache import CachedData from scripts import InstanceInitializationScript + +def get_locale(): + """The localization selection function to be used with flask-babel""" + languages = Configuration.localization_languages() + return request.accept_languages.best_match(languages, "en") + + app = PalaceFlask(__name__) app._db = None # type: ignore [assignment] app.config["BABEL_DEFAULT_LOCALE"] = LanguageCodes.three_to_two[ Configuration.localization_languages()[0] ] app.config["BABEL_TRANSLATION_DIRECTORIES"] = "../translations" -babel = Babel(app) +babel = Babel(app, locale_selector=get_locale) # The autodoc spec, can be accessed at "/apidoc/swagger" api_spec = FlaskPydanticSpec( diff --git a/api/routes.py b/api/routes.py index 9838031d43..a1b61d99b8 100644 --- a/api/routes.py +++ b/api/routes.py @@ -6,8 +6,7 @@ from flask_cors.core import get_cors_options, set_cors_headers from flask_pydantic_spec import Response as SpecResponse -from api.app import api_spec, app, babel -from api.config import Configuration +from api.app import api_spec, app from api.model.patron_auth import PatronAuthAccessToken from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse from core.app_server import compressible, returns_problem_detail @@ -15,12 +14,6 @@ from core.util.problem_detail import ProblemDetail -@babel.localeselector -def get_locale(): - languages = Configuration.localization_languages() - return request.accept_languages.best_match(languages) - - @app.after_request def print_cache(response): if hasattr(app, "_db") and HasSessionCache.CACHE_ATTRIBUTE in app._db.info: diff --git a/poetry.lock b/poetry.lock index 7c5ee14d70..020649b759 100644 --- a/poetry.lock +++ b/poetry.lock @@ -57,17 +57,20 @@ wrapt = "*" [[package]] name = "babel" -version = "2.10.1" +version = "2.13.0" description = "Internationalization utilities" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "Babel-2.10.1-py3-none-any.whl", hash = "sha256:3f349e85ad3154559ac4930c3918247d319f21910d5ce4b25d439ed8693b98d2"}, - {file = "Babel-2.10.1.tar.gz", hash = "sha256:98aeaca086133efb3e1e2aad0396987490c8425929ddbcfe0550184fdc54cd13"}, + {file = "Babel-2.13.0-py3-none-any.whl", hash = "sha256:fbfcae1575ff78e26c7449136f1abbefc3c13ce542eeb13d43d50d8b047216ec"}, + {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, ] [package.dependencies] -pytz = ">=2015.7" +pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} + +[package.extras] +dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bcrypt" @@ -1157,23 +1160,20 @@ dotenv = ["python-dotenv"] [[package]] name = "flask-babel" -version = "2.0.0" -description = "Adds i18n/l10n support to Flask applications" +version = "4.0.0" +description = "Adds i18n/l10n support for Flask applications." optional = false -python-versions = "*" +python-versions = ">=3.8,<4.0" files = [ - {file = "Flask-Babel-2.0.0.tar.gz", hash = "sha256:f9faf45cdb2e1a32ea2ec14403587d4295108f35017a7821a2b1acb8cfd9257d"}, - {file = "Flask_Babel-2.0.0-py3-none-any.whl", hash = "sha256:e6820a052a8d344e178cdd36dd4bb8aea09b4bda3d5f9fa9f008df2c7f2f5468"}, + {file = "flask_babel-4.0.0-py3-none-any.whl", hash = "sha256:638194cf91f8b301380f36d70e2034c77ee25b98cb5d80a1626820df9a6d4625"}, + {file = "flask_babel-4.0.0.tar.gz", hash = "sha256:dbeab4027a3f4a87678a11686496e98e1492eb793cbdd77ab50f4e9a2602a593"}, ] [package.dependencies] -Babel = ">=2.3" -Flask = "*" -Jinja2 = ">=2.5" -pytz = "*" - -[package.extras] -dev = ["Pallets-Sphinx-Themes", "bumpversion", "ghp-import", "pytest", "pytest-mock", "sphinx"] +Babel = ">=2.12" +Flask = ">=2.0" +Jinja2 = ">=3.1" +pytz = ">=2022.7" [[package]] name = "flask-cors" @@ -4523,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "2ee6411718bb1a3c35657a7e0f80c29de793f528d0328fc297e4b6367a35c29a" +content-hash = "533a08640650bd864abb679d05302c22c3c23af358e378c3901119ad6e5ec7bb" diff --git a/pyproject.toml b/pyproject.toml index 3f7905768c..5277db7f7d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -181,7 +181,7 @@ expiringdict = "1.2.2" feedparser = "6.0.10" firebase-admin = "^6.0.1" Flask = "^2.2.3" -Flask-Babel = "2.0.0" +Flask-Babel = "4.0.0" Flask-Cors = "4.0.0" flask-pydantic-spec = "^0.5.0" fuzzywuzzy = "0.18.0" # fuzzywuzzy is for author name manipulations From 1eaccc6d925bc6f78792a25b3e6df669e7daab32 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 12:35:04 +0000 Subject: [PATCH 107/262] Bump flask from 2.3.3 to 3.0.0 (#1431) --- poetry.lock | 10 +++++----- pyproject.toml | 4 ++-- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 020649b759..32edda83bd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1137,13 +1137,13 @@ pyjwt = {version = ">=2.5.0", extras = ["crypto"]} [[package]] name = "flask" -version = "2.3.3" +version = "3.0.0" description = "A simple framework for building complex web applications." optional = false python-versions = ">=3.8" files = [ - {file = "flask-2.3.3-py3-none-any.whl", hash = "sha256:f69fcd559dc907ed196ab9df0e48471709175e696d6e698dd4dbe940f96ce66b"}, - {file = "flask-2.3.3.tar.gz", hash = "sha256:09c347a92aa7ff4a8e7f3206795f30d826654baf38b873d0744cd571ca609efc"}, + {file = "flask-3.0.0-py3-none-any.whl", hash = "sha256:21128f47e4e3b9d597a3e8521a329bf56909b690fcc3fa3e477725aa81367638"}, + {file = "flask-3.0.0.tar.gz", hash = "sha256:cfadcdb638b609361d29ec22360d6070a77d7463dcb3ab08d2c2f2f168845f58"}, ] [package.dependencies] @@ -1152,7 +1152,7 @@ click = ">=8.1.3" importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" -Werkzeug = ">=2.3.7" +Werkzeug = ">=3.0.0" [package.extras] async = ["asgiref (>=3.2)"] @@ -4523,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "533a08640650bd864abb679d05302c22c3c23af358e378c3901119ad6e5ec7bb" +content-hash = "654439c38d24a5629cdb02e2f268e89f7d477fc736e61667dbf6da1e1344c516" diff --git a/pyproject.toml b/pyproject.toml index 5277db7f7d..6f57426444 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -180,8 +180,8 @@ dependency-injector = "^4.41" expiringdict = "1.2.2" feedparser = "6.0.10" firebase-admin = "^6.0.1" -Flask = "^2.2.3" -Flask-Babel = "4.0.0" +Flask = "^3.0" +Flask-Babel = "^4.0" Flask-Cors = "4.0.0" flask-pydantic-spec = "^0.5.0" fuzzywuzzy = "0.18.0" # fuzzywuzzy is for author name manipulations From 3239f9ee11b1e0faafee7eaaa2196339d29550bb Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 13 Oct 2023 18:16:26 +0530 Subject: [PATCH 108/262] Added the LCP ns to the hashed passphrase (#1458) --- core/feed/serializer/opds.py | 1 + tests/api/feed/test_opds_serializer.py | 5 ++++- 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index 006cee4630..97f3ac2e39 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -28,6 +28,7 @@ "licensor": f"{{{OPDSFeed.DRM_NS}}}licensor", "patron": f"{{{OPDSFeed.SIMPLIFIED_NS}}}patron", "series": f"{{{OPDSFeed.SCHEMA_NS}}}series", + "hashed_passphrase": f"{{{OPDSFeed.LCP_NS}}}hashed_passphrase", } ATTRIBUTE_MAPPING = { diff --git a/tests/api/feed/test_opds_serializer.py b/tests/api/feed/test_opds_serializer.py index da0254a6f0..142d406e7a 100644 --- a/tests/api/feed/test_opds_serializer.py +++ b/tests/api/feed/test_opds_serializer.py @@ -116,7 +116,10 @@ def test__serialize_acquistion_link(self): f"{{{OPDSFeed.OPDS_NS}}}availability", lambda child: child.get("status") == "available", ), - ("hashed_passphrase", lambda child: child.text == "passphrase"), + ( + f"{{{OPDSFeed.LCP_NS}}}hashed_passphrase", + lambda child: child.text == "passphrase", + ), ( f"{{{OPDSFeed.DRM_NS}}}licensor", lambda child: child.get(f"{{{OPDSFeed.DRM_NS}}}vendor") == "vendor" From 3966c3936d1508a200015214428fb86c897c14e8 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 13 Oct 2023 09:50:32 -0300 Subject: [PATCH 109/262] Fix logging for monitors. (#1454) --- core/monitor.py | 5 +++++ tests/core/test_monitor.py | 10 ++++++++++ 2 files changed, 15 insertions(+) diff --git a/core/monitor.py b/core/monitor.py index 00f9f41382..349c85c9fe 100644 --- a/core/monitor.py +++ b/core/monitor.py @@ -32,6 +32,7 @@ get_one_or_create, ) from core.model.configuration import ConfigurationSetting +from core.service.container import container_instance from core.util.datetime_helpers import utc_now if TYPE_CHECKING: @@ -114,6 +115,10 @@ def __init__(self, _db, collection=None): if collection: self.collection_id = collection.id + # Make sure that logging is configured. + self.services = container_instance() + self.services.init_resources() + @property def log(self): if not hasattr(self, "_log"): diff --git a/tests/core/test_monitor.py b/tests/core/test_monitor.py index d39d2ceffc..55bf12d010 100644 --- a/tests/core/test_monitor.py +++ b/tests/core/test_monitor.py @@ -1,4 +1,5 @@ import datetime +from unittest.mock import MagicMock import pytest @@ -52,6 +53,7 @@ WorkReaper, WorkSweepMonitor, ) +from core.service import container from core.util.datetime_helpers import datetime_utc, utc_now from tests.core.mock import ( AlwaysSuccessfulCoverageProvider, @@ -269,6 +271,14 @@ def test_same_monitor_different_collections(self, db: DatabaseTransactionFixture assert isinstance(t2.start, datetime.datetime) assert t2.start > t1.start + def test_init_configures_logging(self, db: DatabaseTransactionFixture): + mock_services = MagicMock() + container._container_instance = mock_services + collection = db.collection() + MockMonitor(db.session, collection) + mock_services.init_resources.assert_called_once() + container._container_instance = None + class TestCollectionMonitor: """Test the special features of CollectionMonitor.""" From cc2d3f9232dcd0dbc74393feeeee7a051d3e1716 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 12:55:38 +0000 Subject: [PATCH 110/262] Bump python3-saml from 1.15.0 to 1.16.0 (#1446) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- .../api/saml/metadata/federations/test_loader.py | 16 ++++++++++++---- 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/poetry.lock b/poetry.lock index 32edda83bd..59f70b7fc4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3340,14 +3340,14 @@ cli = ["click (>=5.0)"] [[package]] name = "python3-saml" -version = "1.15.0" +version = "1.16.0" description = "Saml Python Toolkit. Add SAML support to your Python software using this library" optional = false python-versions = "*" files = [ - {file = "python3-saml-1.15.0.tar.gz", hash = "sha256:8c68b31739471faffb93dcdfe3bcab375b9d6a0459cab7fa9cb0d7d874ecf0b0"}, - {file = "python3_saml-1.15.0-py2-none-any.whl", hash = "sha256:3a76a17c6a2384313c5cdb450ea8b2e6d098f30836ee3dddbfe8e870903971d2"}, - {file = "python3_saml-1.15.0-py3-none-any.whl", hash = "sha256:cc0458351ddaa08270ebe29ffaf9e1a41dbd285ba43a176cbd70907af5944c66"}, + {file = "python3-saml-1.16.0.tar.gz", hash = "sha256:97c9669aecabc283c6e5fb4eb264f446b6e006f5267d01c9734f9d8bffdac133"}, + {file = "python3_saml-1.16.0-py2-none-any.whl", hash = "sha256:c49097863c278ff669a337a96c46dc1f25d16307b4bb2679d2d1733cc4f5176a"}, + {file = "python3_saml-1.16.0-py3-none-any.whl", hash = "sha256:20b97d11b04f01ee22e98f4a38242e2fea2e28fbc7fbc9bdd57cab5ac7fc2d0d"}, ] [package.dependencies] @@ -4523,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "654439c38d24a5629cdb02e2f268e89f7d477fc736e61667dbf6da1e1344c516" +content-hash = "9ba050560850ff05c9acdb9bafa2383b4017be67b0c30c9ba507cad2fc68f014" diff --git a/pyproject.toml b/pyproject.toml index 6f57426444..cb726c3e9f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -210,7 +210,7 @@ pyparsing = "3.1.1" pyspellchecker = "0.7.2" python = ">=3.8,<4" python-dateutil = "2.8.2" -python3-saml = "~1.15.0" # python-saml is required for SAML authentication +python3-saml = "^1.16" # python-saml is required for SAML authentication pytz = "^2023.3" pyyaml = "^6.0" redmail = "^0.6.0" diff --git a/tests/api/saml/metadata/federations/test_loader.py b/tests/api/saml/metadata/federations/test_loader.py index 007da2d41d..36b43a4ebf 100644 --- a/tests/api/saml/metadata/federations/test_loader.py +++ b/tests/api/saml/metadata/federations/test_loader.py @@ -29,8 +29,11 @@ def test_load_idp_metadata_raises_error_when_xml_is_incorrect(self, urlopen_mock with pytest.raises(SAMLMetadataLoadingError): metadata_loader.load_idp_metadata(url) + @patch("urllib.request.Request") @patch("urllib.request.urlopen") - def test_load_idp_metadata_correctly_loads_one_descriptor(self, urlopen_mock): + def test_load_idp_metadata_correctly_loads_one_descriptor( + self, urlopen_mock, request_mock + ): # Arrange url = "http://md.incommon.org/InCommon/metadata.xml" incorrect_xml = saml_strings.CORRECT_XML_WITH_IDP_1 @@ -43,11 +46,15 @@ def test_load_idp_metadata_correctly_loads_one_descriptor(self, urlopen_mock): xml_metadata = metadata_loader.load_idp_metadata(url) # Assert - urlopen_mock.assert_called_with(url, timeout=None) + request_mock.assert_called_with(url, headers={}) + urlopen_mock.assert_called_with(request_mock(), timeout=None) assert saml_strings.CORRECT_XML_WITH_IDP_1 == xml_metadata + @patch("urllib.request.Request") @patch("urllib.request.urlopen") - def test_load_idp_metadata_correctly_loads_multiple_descriptors(self, urlopen_mock): + def test_load_idp_metadata_correctly_loads_multiple_descriptors( + self, urlopen_mock, request_mock + ): # Arrange url = "http://md.incommon.org/InCommon/metadata.xml" incorrect_xml = saml_strings.CORRECT_XML_WITH_MULTIPLE_IDPS @@ -60,7 +67,8 @@ def test_load_idp_metadata_correctly_loads_multiple_descriptors(self, urlopen_mo xml_metadata = metadata_loader.load_idp_metadata(url) # Assert - urlopen_mock.assert_called_with(url, timeout=None) + request_mock.assert_called_with(url, headers={}) + urlopen_mock.assert_called_with(request_mock(), timeout=None) assert saml_strings.CORRECT_XML_WITH_MULTIPLE_IDPS == xml_metadata From 388bf6f2cd2a1b2eb2df0fdcef007f04a9d12055 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 13:19:04 +0000 Subject: [PATCH 111/262] Bump pyfakefs from 5.2.4 to 5.3.0 (#1456) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 59f70b7fc4..d0b3615d4d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2991,13 +2991,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyfakefs" -version = "5.2.4" +version = "5.3.0" description = "pyfakefs implements a fake file system that mocks the Python file system modules." optional = false python-versions = ">=3.7" files = [ - {file = "pyfakefs-5.2.4-py3-none-any.whl", hash = "sha256:8eb95f1dd1c4b8bdce30448fe169875e3a4451c32d3f9c37799157bd4eb7b789"}, - {file = "pyfakefs-5.2.4.tar.gz", hash = "sha256:3e040f3792086086a0dc2191b05fe709438e168aafe2e94fcdbef8e3859208d8"}, + {file = "pyfakefs-5.3.0-py3-none-any.whl", hash = "sha256:33c1f891078c727beec465e75cb314120635e2298456493cc2cc0539e2130cbb"}, + {file = "pyfakefs-5.3.0.tar.gz", hash = "sha256:e3e35f65ce55ee8ecc5e243d55cfdbb5d0aa24938f6e04e19f0fab062f255020"}, ] [[package]] @@ -4523,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "9ba050560850ff05c9acdb9bafa2383b4017be67b0c30c9ba507cad2fc68f014" +content-hash = "e8e70bcd3142c9d91190babad8d5970377bc5730a4a5d4af52600416d4bc055b" diff --git a/pyproject.toml b/pyproject.toml index cb726c3e9f..27edca9345 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -244,7 +244,7 @@ freezegun = "~1.2.2" Jinja2 = "^3.1.2" mypy = "^1.4.1" psycopg2-binary = "~2.9.5" -pyfakefs = "~5.2.0" +pyfakefs = "^5.3" pytest = ">=7.2.0" pytest-alembic = "^0.10.4" pytest-cov = "^4.0.0" From 4fe0fa5716f89b3579828203d3d2abda8e54d809 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 13:43:37 +0000 Subject: [PATCH 112/262] Bump pyinstrument from 4.5.3 to 4.6.0 (#1457) --- poetry.lock | 124 ++++++++++++++++++++++++------------------------- pyproject.toml | 2 +- 2 files changed, 63 insertions(+), 63 deletions(-) diff --git a/poetry.lock b/poetry.lock index d0b3615d4d..2c64911026 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3002,71 +3002,71 @@ files = [ [[package]] name = "pyinstrument" -version = "4.5.3" +version = "4.6.0" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:94f82899486441f0b31c53c4250cb65a9f20036cacb6fb75315069a7b1e3703b"}, - {file = "pyinstrument-4.5.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e295571bec2bfc1cfbb1ddd66aa5d06c54cf67179c46f0bbdcf709e8130533fd"}, - {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d2b2e9c0e6b6cf444716829a00855796a7f80b5bcabe07ddb29dd5c238e5014"}, - {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3741e001a2b06be9dc435329f14507b571b273aca8b243b8d2cffd786de1b205"}, - {file = "pyinstrument-4.5.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:92d450301957fa328391ab3da13a26249268233ea0fd1542613c148b8a635950"}, - {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4d6bccf4da8c13065c4096e4669ce483d1614698a279419090b9374f0b96328f"}, - {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:612c99a6fbad1bcabae0fe7571f5ede0ecd577d1d4a975d19fcfa281997f7075"}, - {file = "pyinstrument-4.5.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:bb637628274e819faec00532cada45d0da8ae4f4033baa84f9cdce559911a4a4"}, - {file = "pyinstrument-4.5.3-cp310-cp310-win32.whl", hash = "sha256:5490c4ddd0f946de2c503c22e1099b34b241d9f4ac80f27b3dc7e484818b734b"}, - {file = "pyinstrument-4.5.3-cp310-cp310-win_amd64.whl", hash = "sha256:48372e82347281c843f9cd710fc848cb5869634e225d5bffcc627673e7554ac9"}, - {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5739550f6a631250aac0b01778882d3e77b3e4ed5c01f4112769ec023cac345d"}, - {file = "pyinstrument-4.5.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7f83b475f90764beb9a44505539f19c005ca31526f35358cde0a02b140c09c4e"}, - {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118440b4c6a925f811d97fba02e99066fca8090710fa51c6873834dd37b39040"}, - {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ce3adea93d6f4ff54893428b49f1b771f9aa7294a79d812a207c7dd9cbe8161"}, - {file = "pyinstrument-4.5.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a5851bf8c253d37b415388a1511239a3486249d87a0436d47317480d1e9557b"}, - {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3bb877bd2bc9bf492257891e585287f65c6374a1511e64f888a1ad112c18103b"}, - {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4b9d7254b729571151070a61c7f6c86d02320d62145b9f664a96258fcc26ad1a"}, - {file = "pyinstrument-4.5.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f294980f636193fdb70f671d17fc98fd4f3624aef1ce061b36be14f53bbe84b4"}, - {file = "pyinstrument-4.5.3-cp311-cp311-win32.whl", hash = "sha256:c04e101c32102091280ac759578d991a3a71a41fe357c651cd78b8bbe9879daf"}, - {file = "pyinstrument-4.5.3-cp311-cp311-win_amd64.whl", hash = "sha256:d4cf26f0f813db178eb36db8fa0ae48cd600b7e3c0447beddd8e7e7cec26e992"}, - {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:82d49865f6aef776ab914b9f09c26ad6279397d8fd26a79a3008c1becab4d88c"}, - {file = "pyinstrument-4.5.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d4efe0cdccdd44514a6ae7c061dd88d221dd77ae7d7bfd2d743c1f51f90fa3e1"}, - {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33ec7d794ad206a2ad905160308cc27ad3a985691e99c31e79cfd8de53b75455"}, - {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:342923b5c7654c73bcd263733b1e9d2b990c2af60d429badcc7cfd5a21bb384b"}, - {file = "pyinstrument-4.5.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01c0d73fd1c7de4b8fca509b7c292709dbe1990527601c7d2307d4f9aca110df"}, - {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:2be28ec4efa59dd9539bd803381c768a2f2453b6de201e102bf02e17a3efd3f2"}, - {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:5ae906be229fa5ce649016206baa5d20f6a49bb7b6c7643d019f8024e2d11d66"}, - {file = "pyinstrument-4.5.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d06ef692650f24feb3817869e6519ac117c3358bfe6474c0ded2cbca53c69a5f"}, - {file = "pyinstrument-4.5.3-cp312-cp312-win32.whl", hash = "sha256:f27742fa4b40c2fde105c24b190fa7d54e76195bc4c8d8a4fc5fa1af663468d3"}, - {file = "pyinstrument-4.5.3-cp312-cp312-win_amd64.whl", hash = "sha256:ba6b864a8234f3faf1a3a52587368975d9aca6944a06a68114eb1153501679b4"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efa3140c8813056c5af939f39d750461bb917a0ba96b76cd0171c033939ae0bc"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70c7542c8edfbaee7d2263b07997e668daf6c73e8386abdd1b1a243e88c29da3"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:df3e9fdea7f4a2a39a4403044c06efd5d00674807b9f8c104d24f5bf1412e33f"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64aefe67e6ad5a8254f36e0cadaa06f873539d34a3e18b883b8fa7278752f541"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d1949f4c4f92ea674415c74a6e5d2105b92175019b03b4808bb61d9a777baffc"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:376924b603278f9df034a8b4a4826ef708abb99acd161b65b66e8b62d596b7c9"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:59caa57aa868098cbe81c842aeac24efef861a9fb1a1f34aa227b6d57b497e57"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-win32.whl", hash = "sha256:2b9da8eb4f947aba804f61cc311f466105161deebbe49b0a651c20cc0bd804b9"}, - {file = "pyinstrument-4.5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9c08df4e0b3615df56affdb0898f89c3a964779b344b11f9edae4b5b7ac6d033"}, - {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:944c8a1e8451b9114cff42a0d7d59e482bbf060ccc3ef927d351f8d383f52678"}, - {file = "pyinstrument-4.5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:80f89f8f296005eb1f8616cd602ffbdf9efcc069e145a35f35654270c2b7641f"}, - {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:520c67144da37e93dc03445f8138ef5a9af6f68f89baacb658731d886763f018"}, - {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad9677beb345b3a3fe9967e90dfbbcf458f73ae8fc522fdbfda5bab75a1e5014"}, - {file = "pyinstrument-4.5.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e77f1a708a895f25300f7dc9b4fd5b34218ecc9c7084733d5ebb849e3ff5af99"}, - {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:057feb33230caead5bfe25d488060d07065a1bf7f19f5b2004e661a38dddc9e3"}, - {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:31e4a23672cfb8f9864bebea6246182d9398a9131606dc53bce124955258705f"}, - {file = "pyinstrument-4.5.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:087ff4e3faca326da071bc73214d73b98c9d7ebea53e70fbe1c033bb6c75f847"}, - {file = "pyinstrument-4.5.3-cp38-cp38-win32.whl", hash = "sha256:e7ab85c0090fd21b5c7910ef01da37be25b574db2cbdc7584e4e2371cb1f13b0"}, - {file = "pyinstrument-4.5.3-cp38-cp38-win_amd64.whl", hash = "sha256:2ef3c856d0ab98372e08e444f6a81efc93dc160d867e3aee1bf4702bd779535d"}, - {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f168dfa4328c25c0c3444b62cc8445ac7c0dbbb6cdaf79022267571e12d78d3c"}, - {file = "pyinstrument-4.5.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7241f588e770bfe642cd19e2c8b7560a9cf9e0c2998c0a70ee0ea6333d7404b3"}, - {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:093d1119e20fc68a9f991a1de0bc046fb29e996298d0442c928415738b2546ae"}, - {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:afa5fdcd65ae4d2c11871da01576c3c2c19f70135f6b107cb7550a334441b4f8"}, - {file = "pyinstrument-4.5.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5f5d219b5f52b33462179ecf33ad8651672bc9410f6f6dfd3edf2095acae42"}, - {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c9dc5c501ca01c8649a967442d52eedaee63c52fcdc0fd4fb69974bc4d678978"}, - {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:37d25f3aafb4f24080dd4b0966d9a022f660735f8136b7234ec2c7b8ceab14c4"}, - {file = "pyinstrument-4.5.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:33ee99f15ac13d02a0319c2d7671dd2ccc19c615c167a9f5fbba43b50c225102"}, - {file = "pyinstrument-4.5.3-cp39-cp39-win32.whl", hash = "sha256:f467f9308a613fec0be43fa49469ad2f2c99e62e801802e8d59d938acc4acda9"}, - {file = "pyinstrument-4.5.3-cp39-cp39-win_amd64.whl", hash = "sha256:b027951df515c896243145239e91aeb63b19b642d0f4d5ff702a9393dd4736fa"}, - {file = "pyinstrument-4.5.3.tar.gz", hash = "sha256:0885b01a901231d071cb182de33012e9b8cbd958fb048236ee2a6e760c6c6e21"}, + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, + {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, + {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, + {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, + {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, + {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, + {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, + {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, + {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, + {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, + {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, + {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, + {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, + {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, + {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, + {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, + {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, + {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, + {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, + {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, + {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, ] [package.extras] @@ -4523,4 +4523,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "e8e70bcd3142c9d91190babad8d5970377bc5730a4a5d4af52600416d4bc055b" +content-hash = "c96cd038c5187aba1670c495d8dc0a03aae508ae60d76362782143168ce6a652" diff --git a/pyproject.toml b/pyproject.toml index 27edca9345..9b17a8bfc0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -201,7 +201,7 @@ palace-webpub-manifest-parser = "^3.1" pillow = "^10.0" pycryptodome = "^3.18" pydantic = {version = "^1.10.9", extras = ["dotenv", "email"]} -pyinstrument = "<4.6" +pyinstrument = "^4.6" PyJWT = "^2.8" PyLD = "2.0.3" pymarc = "5.1.0" From b023b9008b4e8d4ab6e64c1c33c29282c09cba8b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:42:34 +0000 Subject: [PATCH 113/262] Bump aws-xray-sdk from 2.12.0 to 2.12.1 (#1459) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2c64911026..8a48845e51 100644 --- a/poetry.lock +++ b/poetry.lock @@ -42,13 +42,13 @@ tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pyte [[package]] name = "aws-xray-sdk" -version = "2.12.0" +version = "2.12.1" description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "aws-xray-sdk-2.12.0.tar.gz", hash = "sha256:295afc237073a80956d7d4f27c31830edcb9a8ccca9ef8aa44990badef15e5b7"}, - {file = "aws_xray_sdk-2.12.0-py2.py3-none-any.whl", hash = "sha256:30886e23cc2daadc1c06a76f25b071205e84848419d1ddf097b62a565e156542"}, + {file = "aws-xray-sdk-2.12.1.tar.gz", hash = "sha256:0bbfdbc773cfef4061062ac940b85e408297a2242f120bcdfee2593209b1e432"}, + {file = "aws_xray_sdk-2.12.1-py2.py3-none-any.whl", hash = "sha256:f6803832dc08d18cc265e2327a69bfa9ee41c121fac195edc9745d04b7a566c3"}, ] [package.dependencies] From 5108fb804be45f4b19f02261d42f42c26fd9a4b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 17:42:57 +0000 Subject: [PATCH 114/262] Bump types-aws-xray-sdk from 2.12.0.2 to 2.12.0.3 (#1460) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8a48845e51..9acdc2fb52 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4108,13 +4108,13 @@ telegram = ["requests"] [[package]] name = "types-aws-xray-sdk" -version = "2.12.0.2" +version = "2.12.0.3" description = "Typing stubs for aws-xray-sdk" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-aws-xray-sdk-2.12.0.2.tar.gz", hash = "sha256:42249602d1d46579318df24111eee6f68e358eec8da88882653723ec1f05af9a"}, - {file = "types_aws_xray_sdk-2.12.0.2-py3-none-any.whl", hash = "sha256:38e7f286e6c4395b79a92fd4ede575eaf87f2e45ab974e68fdc177eac3bd7386"}, + {file = "types-aws-xray-sdk-2.12.0.3.tar.gz", hash = "sha256:f7d56120015563421e824688137db7584b155e246ee2acd0318d26f9cde64f6b"}, + {file = "types_aws_xray_sdk-2.12.0.3-py3-none-any.whl", hash = "sha256:31f83187953bc6a239f5a72814edb924589467c09f386bd8db15a354052414bc"}, ] [[package]] From 1961cd2837806c58ebb3fd3a31a8e834b9468fb4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 13 Oct 2023 14:44:38 -0300 Subject: [PATCH 115/262] Bump pre-commit from 3.4.0 to 3.5.0 (#1461) Bumps [pre-commit](https://github.com/pre-commit/pre-commit) from 3.4.0 to 3.5.0. - [Release notes](https://github.com/pre-commit/pre-commit/releases) - [Changelog](https://github.com/pre-commit/pre-commit/blob/main/CHANGELOG.md) - [Commits](https://github.com/pre-commit/pre-commit/compare/v3.4.0...v3.5.0) --- updated-dependencies: - dependency-name: pre-commit dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9acdc2fb52..a5aae8c889 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2705,13 +2705,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.4.0" +version = "3.5.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.4.0-py2.py3-none-any.whl", hash = "sha256:96d529a951f8b677f730a7212442027e8ba53f9b04d217c4c67dc56c393ad945"}, - {file = "pre_commit-3.4.0.tar.gz", hash = "sha256:6bbd5129a64cad4c0dfaeeb12cd8f7ea7e15b77028d985341478c8af3c759522"}, + {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, + {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, ] [package.dependencies] From 0f7b25ea83075c5eb4456a180eddd03109c7e1a6 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 16 Oct 2023 10:44:15 -0300 Subject: [PATCH 116/262] Type hint enki.py (PP-501) (#1463) * Type hint enki api * Fix tests * Restore hold parsing stub and add some logging. --- api/circulation.py | 7 +- api/enki.py | 263 ++++++++++++++++++++++++++--------------- pyproject.toml | 1 + tests/api/test_enki.py | 30 +++-- 4 files changed, 194 insertions(+), 107 deletions(-) diff --git a/api/circulation.py b/api/circulation.py index b0a3b0409e..a7267be4c9 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -12,6 +12,7 @@ Any, Dict, Generic, + Iterable, List, Literal, Tuple, @@ -705,7 +706,9 @@ def fulfill( ... @abstractmethod - def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: + def patron_activity( + self, patron: Patron, pin: str + ) -> Iterable[LoanInfo | HoldInfo]: """Return a patron's current checkouts and holds.""" ... @@ -1659,7 +1662,7 @@ def __init__( self.api = api self.patron = patron self.pin = pin - self.activity: Optional[List[LoanInfo | HoldInfo]] = None + self.activity: Optional[Iterable[LoanInfo | HoldInfo]] = None self.exception: Optional[Exception] = None self.trace: Tuple[ Type[BaseException], BaseException, TracebackType diff --git a/api/enki.py b/api/enki.py index db586ecad5..b79b389631 100644 --- a/api/enki.py +++ b/api/enki.py @@ -1,12 +1,17 @@ +from __future__ import annotations + import datetime import json import logging import time +from typing import Any, Callable, Generator, Mapping, Tuple, cast from flask_babel import lazy_gettext as _ from pydantic import HttpUrl +from requests import Response as RequestsResponse +from sqlalchemy.orm import Session -from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo +from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics @@ -25,6 +30,7 @@ Metadata, ReplacementPolicy, SubjectData, + TimestampData, ) from core.model import ( Classification, @@ -34,6 +40,7 @@ Edition, Hyperlink, Identifier, + Library, LicensePool, LicensePoolDeliveryMechanism, Patron, @@ -106,20 +113,22 @@ class EnkiAPI( SERVICE_NAME = "Enki" @classmethod - def settings_class(cls): + def settings_class(cls) -> type[EnkiSettings]: return EnkiSettings @classmethod - def library_settings_class(cls): + def library_settings_class(cls) -> type[EnkiLibrarySettings]: return EnkiLibrarySettings - def label(self): - return self.NAME + @classmethod + def label(cls) -> str: + return cls.NAME - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls) -> str: + return cls.DESCRIPTION # type: ignore[no-any-return] - def __init__(self, _db, collection): + def __init__(self, _db: Session, collection: Collection): self._db = _db if collection.protocol != self.ENKI: raise ValueError( @@ -131,22 +140,19 @@ def __init__(self, _db, collection): self.collection_id = collection.id self.base_url = self.configuration().url or self.PRODUCTION_BASE_URL - def external_integration(self, _db): - return self.collection.external_integration - - def enki_library_id(self, library): + def enki_library_id(self, library: Library) -> Optional[str]: """Find the Enki library ID for the given library.""" - if config := self.library_configuration(library.id): - return config.enki_library_id - - @property - def collection(self): - return Collection.by_id(self._db, id=self.collection_id) + if library.id is None: + return None + config = self.library_configuration(library.id) + if config is None: + return None + return config.enki_library_id - def _run_self_tests(self, _db): + def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: now = utc_now() - def count_recent_loans_and_holds(): + def count_recent_loans_and_holds() -> str: """Count recent circulation events that affected loans or holds.""" one_hour_ago = now - datetime.timedelta(hours=1) count = len(list(self.recent_activity(one_hour_ago, now))) @@ -156,7 +162,7 @@ def count_recent_loans_and_holds(): "Counting recent circulation changes.", count_recent_loans_and_holds ) - def count_title_changes(): + def count_title_changes() -> str: """Count changes to title metadata (usually because of new titles). """ @@ -170,6 +176,9 @@ def count_title_changes(): count_title_changes, ) + if self.collection is None: + raise ValueError("Collection is None") + for result in self.default_patrons(self.collection): if isinstance(result, SelfTestResult): yield result @@ -180,7 +189,7 @@ def count_title_changes(): % library.name ) - def count_patron_loans_and_holds(patron, pin): + def count_patron_loans_and_holds(patron: Patron, pin: Optional[str]) -> str: activity = list(self.patron_activity(patron, pin)) return "Total loans and holds: %s" % len(activity) @@ -188,17 +197,16 @@ def count_patron_loans_and_holds(patron, pin): def request( self, - url, - method="get", - extra_headers={}, - data=None, - params=None, - retry_on_timeout=True, - **kwargs, - ): + url: str, + method: str = "get", + extra_headers: Optional[Mapping[str, str]] = None, + data: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + retry_on_timeout: bool = True, + **kwargs: Any, + ) -> RequestsResponse: """Make an HTTP request to the Enki API.""" - headers = dict(extra_headers) - response = None + headers = dict(extra_headers) if extra_headers else {} try: response = self._request( method, url, headers=headers, data=data, params=params, **kwargs @@ -225,7 +233,15 @@ def request( raise RemoteIntegrationException(url, "An unknown error occured") return response - def _request(self, method, url, headers, data, params, **kwargs): + def _request( + self, + url: str, + method: str, + headers: Mapping[str, str], + data: Optional[Mapping[str, Any]] = None, + params: Optional[Mapping[str, Any]] = None, + **kwargs: Any, + ) -> RequestsResponse: """Actually make an HTTP request. MockEnkiAPI overrides this method. @@ -242,7 +258,7 @@ def _request(self, method, url, headers, data, params, **kwargs): ) @classmethod - def _minutes_since(cls, since): + def _minutes_since(cls, since: datetime.datetime) -> int: """How many minutes have elapsed since `since`? This is a helper method to create the `minutes` parameter to @@ -251,7 +267,9 @@ def _minutes_since(cls, since): now = utc_now() return int((now - since).total_seconds() / 60) - def recent_activity(self, start, end): + def recent_activity( + self, start: datetime.datetime, end: datetime.datetime + ) -> Generator[CirculationData, None, None]: """Find circulation events from a certain timeframe that affected loans or holds. @@ -259,23 +277,29 @@ def recent_activity(self, start, end): :yield: A sequence of CirculationData objects. """ epoch = from_timestamp(0) - start = int((start - epoch).total_seconds()) - end = int((end - epoch).total_seconds()) + start_int = int((start - epoch).total_seconds()) + end_int = int((end - epoch).total_seconds()) url = self.base_url + self.item_endpoint - args = dict(method="getRecentActivityTime", stime=str(start), etime=str(end)) + args = dict( + method="getRecentActivityTime", stime=str(start_int), etime=str(end_int) + ) response = self.request(url, params=args) data = json.loads(response.content) parser = BibliographicParser() for element in data["result"]["recentactivity"]: identifier = IdentifierData(Identifier.ENKI_ID, element["id"]) - yield parser.extract_circulation( + data = parser.extract_circulation( identifier, element["availability"], None, # The recent activity API does not include format info ) + if data: + yield data - def updated_titles(self, since): + def updated_titles( + self, since: datetime.datetime + ) -> Generator[Metadata, None, None]: """Find recent changes to book metadata. NOTE: getUpdateTitles will return a maximum of 1000 items, so @@ -298,7 +322,7 @@ def updated_titles(self, since): response = self.request(url, params=args) yield from BibliographicParser().process_all(response.content) - def get_item(self, enki_id): + def get_item(self, enki_id: Optional[str]) -> Optional[Metadata]: """Retrieve bibliographic and availability information for a specific title. @@ -327,7 +351,9 @@ def get_item(self, enki_id): return BibliographicParser().extract_bibliographic(book) return None - def get_all_titles(self, strt=0, qty=10): + def get_all_titles( + self, strt: int = 0, qty: int = 10 + ) -> Generator[Metadata, None, None]: """Retrieve a single page of items from the Enki collection. Iterating over the entire collection is very expensive and @@ -340,16 +366,12 @@ def get_all_titles(self, strt=0, qty=10): "requesting : " + str(qty) + " books starting at econtentRecord" + str(strt) ) url = str(self.base_url) + str(self.list_endpoint) - args = dict() - args["method"] = "getAllTitles" - args["id"] = "secontent" - args["strt"] = strt - args["qty"] = qty + args = {"method": "getAllTitles", "id": "secontent", "strt": strt, "qty": qty} response = self.request(url, params=args) yield from BibliographicParser().process_all(response.content) @classmethod - def _epoch_to_struct(cls, epoch_string): + def _epoch_to_struct(cls, epoch_string: str) -> datetime.datetime: # This will turn the time string we get from Enki into a # struct that the Circulation Manager can make use of. time_format = "%Y-%m-%dT%H:%M:%S" @@ -399,19 +421,27 @@ def checkout( ) return loan - def checkin(self, patron, pin, licensepool): + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: """This api does not support returning books early, so we just implement this as a no-op.""" + ... - def loan_request(self, barcode, pin, book_id, enki_library_id): + def loan_request( + self, + barcode: Optional[str], + pin: Optional[str], + book_id: Optional[str], + enki_library_id: Optional[str], + ) -> RequestsResponse: self.log.debug("Sending checkout request for %s" % book_id) url = str(self.base_url) + str(self.user_endpoint) - args = dict() - args["method"] = "getSELink" - args["username"] = barcode - args["password"] = pin - args["lib"] = enki_library_id - args["id"] = book_id + args = { + "method": "getSELink", + "username": barcode, + "password": pin, + "lib": enki_library_id, + "id": book_id, + } response = self.request(url, method="get", params=args) return response @@ -467,15 +497,19 @@ def fulfill( content_expires=expires, ) - def parse_fulfill_result(self, result): + def parse_fulfill_result( + self, result: Mapping[str, Any] + ) -> tuple[str, str, datetime.datetime]: links = result["checkedOutItems"][0]["links"][0] url = links["url"] item_type = links["item_type"] due_date = result["checkedOutItems"][0]["duedate"] expires = self._epoch_to_struct(due_date) - return (url, item_type, expires) + return url, item_type, expires - def patron_activity(self, patron, pin): + def patron_activity( + self, patron: Patron, pin: Optional[str] + ) -> Generator[LoanInfo | HoldInfo, None, None]: enki_library_id = self.enki_library_id(patron.library) response = self.patron_request( patron.authorization_identifier, pin, enki_library_id @@ -496,24 +530,31 @@ def patron_activity(self, patron, pin): yield self.parse_patron_loans(loan) for type, holds in list(result["holds"].items()): for hold in holds: - yield self.parse_patron_holds(hold) + hold_info = self.parse_patron_holds(hold) + if hold_info: + yield hold_info - def patron_request(self, patron, pin, enki_library_id): + def patron_request( + self, patron: Optional[str], pin: Optional[str], enki_library_id: Optional[str] + ) -> RequestsResponse: self.log.debug("Querying Enki for information on patron %s" % patron) url = str(self.base_url) + str(self.user_endpoint) - args = dict() - args["method"] = "getSEPatronData" - args["username"] = patron - args["password"] = pin - args["lib"] = enki_library_id + args = { + "method": "getSEPatronData", + "username": patron, + "password": pin, + "lib": enki_library_id, + } return self.request(url, method="get", params=args) - def parse_patron_loans(self, checkout_data): + def parse_patron_loans(self, checkout_data: Mapping[str, Any]) -> LoanInfo: # We should receive a list of JSON objects enki_id = checkout_data["id"] start_date = self._epoch_to_struct(checkout_data["checkoutdate"]) end_date = self._epoch_to_struct(checkout_data["duedate"]) + if self.collection is None: + raise ValueError("Collection is None") return LoanInfo( self.collection, DataSource.ENKI, @@ -524,16 +565,26 @@ def parse_patron_loans(self, checkout_data): fulfillment_info=None, ) - def parse_patron_holds(self, hold_data): - pass + def parse_patron_holds(self, hold_data: Mapping[str, Any]) -> Optional[HoldInfo]: + self.log.warning( + "Hold information received, but parsing patron holds is not implemented. %r", + hold_data, + ) + return None - def place_hold(self, patron, pin, licensepool, notification_email_address): - pass + def place_hold( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + notification_email_address: Optional[str], + ) -> HoldInfo: + raise NotImplementedError() - def release_hold(self, patron, pin, licensepool): - pass + def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: + raise NotImplementedError() - def update_availability(self, licensepool): + def update_availability(self, licensepool: LicensePool) -> None: pass @@ -552,16 +603,19 @@ class BibliographicParser: "Spanish": "spa", } - def process_all(self, json_data): - if isinstance(json_data, (bytes, str)): - json_data = json.loads(json_data) - returned_titles = json_data.get("result", {}).get("titles", []) + def process_all( + self, json_data: bytes | str | Mapping[str, Any] + ) -> Generator[Metadata, None, None]: + data = ( + json.loads(json_data) if isinstance(json_data, (bytes, str)) else json_data + ) + returned_titles = data.get("result", {}).get("titles", []) for book in returned_titles: data = self.extract_bibliographic(book) if data: yield data - def extract_bibliographic(self, element): + def extract_bibliographic(self, element: Mapping[str, str]) -> Metadata: """Extract Metadata and CirculationData from a dictionary of information from Enki. @@ -663,13 +717,18 @@ def extract_bibliographic(self, element): ) circulationdata = self.extract_circulation( primary_identifier, - element.get("availability", {}), + cast(Mapping[str, str], element.get("availability", {})), element.get("formattype", None), ) metadata.circulation = circulationdata return metadata - def extract_circulation(self, primary_identifier, availability, formattype): + def extract_circulation( + self, + primary_identifier: IdentifierData, + availability: Mapping[str, str], + formattype: Optional[str], + ) -> Optional[CirculationData]: """Turn the 'availability' portion of an Enki API response into a CirculationData. """ @@ -717,7 +776,13 @@ class EnkiImport(CollectionMonitor, TimelineMonitor): FIVE_MINUTES = datetime.timedelta(minutes=5) DEFAULT_START_TIME = CollectionMonitor.NEVER - def __init__(self, _db, collection, api_class=EnkiAPI, analytics=None): + def __init__( + self, + _db: Session, + collection: Collection, + api_class: EnkiAPI | Callable[..., EnkiAPI] = EnkiAPI, + analytics: Optional[Analytics] = None, + ): """Constructor.""" super().__init__(_db, collection) self._db = _db @@ -730,10 +795,15 @@ def __init__(self, _db, collection, api_class=EnkiAPI, analytics=None): self.analytics = analytics or Analytics(_db) @property - def collection(self): + def collection(self) -> Collection | None: return Collection.by_id(self._db, id=self.collection_id) - def catch_up_from(self, start, cutoff, progress): + def catch_up_from( + self, + start: Optional[datetime.datetime], + cutoff: Optional[datetime.datetime], + progress: TimestampData, + ) -> None: """Find Enki books that changed recently. :param start: Find all books that changed since this date. @@ -757,7 +827,7 @@ def catch_up_from(self, start, cutoff, progress): % (new_titles, circulation_updates) ) - def full_import(self): + def full_import(self) -> int: """Import the entire Enki collection, page by page.""" id_start = 0 batch_size = self.DEFAULT_BATCH_SIZE @@ -775,7 +845,7 @@ def full_import(self): id_start += self.DEFAULT_BATCH_SIZE return total_items - def incremental_import(self, since): + def incremental_import(self, since: datetime.datetime) -> tuple[int, int]: # Take care of new titles and titles with updated metadata. new_titles = 0 for metadata in self.api.updated_titles(since): @@ -788,7 +858,7 @@ def incremental_import(self, since): self._db.commit() return new_titles, circulation_changes - def update_circulation(self, since): + def update_circulation(self, since: datetime.datetime) -> int: """Process circulation events that happened since `since`. :return: The total number of circulation events. @@ -805,7 +875,9 @@ def update_circulation(self, since): circulation_changes += self._update_circulation(start, end) return circulation_changes - def _update_circulation(self, start, end): + def _update_circulation( + self, start: datetime.datetime, end: datetime.datetime + ) -> int: """Process circulation events that happened between `start` and `end`. @@ -831,7 +903,7 @@ def _update_circulation(self, start, end): return circulation_changes - def process_book(self, bibliographic): + def process_book(self, bibliographic: Metadata) -> Tuple[Edition, LicensePool]: """Make the local database reflect the state of the remote Enki collection for the given book. @@ -861,7 +933,12 @@ class EnkiCollectionReaper(IdentifierSweepMonitor): INTERVAL_SECONDS = 3600 * 4 PROTOCOL = "Enki" - def __init__(self, _db, collection, api_class=EnkiAPI): + def __init__( + self, + _db: Session, + collection: Collection, + api_class: EnkiAPI | Callable[..., EnkiAPI] = EnkiAPI, + ): self._db = _db super().__init__(self._db, collection) if callable(api_class): @@ -870,12 +947,12 @@ def __init__(self, _db, collection, api_class=EnkiAPI): api = api_class self.api = api - def process_item(self, identifier): + def process_item(self, identifier: Identifier) -> Optional[CirculationData]: self.log.debug("Seeing if %s needs reaping", identifier.identifier) metadata = self.api.get_item(identifier.identifier) if metadata: # This title is still in the collection. Do nothing. - return + return None # Get this collection's license pool for this identifier. # We'll reap it by setting its licenses_owned to 0. @@ -883,7 +960,7 @@ def process_item(self, identifier): if not pool or pool.licenses_owned == 0: # It's already been reaped. - return + return None if pool.presentation_edition: self.log.warn("Removing %r from circulation", pool.presentation_edition) diff --git a/pyproject.toml b/pyproject.toml index 9b17a8bfc0..acb3dc7b98 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -78,6 +78,7 @@ module = [ "api.axis", "api.circulation", "api.discovery.*", + "api.enki", "api.integration.*", "api.lcp.hash", "api.odl", diff --git a/tests/api/test_enki.py b/tests/api/test_enki.py index e29cf94b2d..9cdbb3cbb0 100644 --- a/tests/api/test_enki.py +++ b/tests/api/test_enki.py @@ -2,7 +2,7 @@ import datetime import json -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, cast from unittest.mock import MagicMock import pytest @@ -10,6 +10,7 @@ from api.circulation import FulfillmentInfo, LoanInfo from api.circulation_exceptions import * from api.enki import BibliographicParser, EnkiAPI, EnkiCollectionReaper, EnkiImport +from core.analytics import Analytics from core.metadata_layer import CirculationData, Metadata, TimestampData from core.model import ( Contributor, @@ -40,7 +41,9 @@ def __init__(self, db: DatabaseTransactionFixture, files: EnkiFilesFixture): self.db = db self.files = files self.api = MockEnkiAPI(db.session, db.default_library()) - self.collection = self.api.collection + collection = self.api.collection + assert collection is not None + self.collection = collection @pytest.fixture(scope="function") @@ -66,11 +69,6 @@ def test_constructor(self, enki_test_fixture: EnkiTestFixure): collection.protocol = ExternalIntegration.ENKI EnkiAPI(db.session, collection) - def test_external_integration(self, enki_test_fixture: EnkiTestFixure): - db = enki_test_fixture.db - integration = enki_test_fixture.api.external_integration(db.session) - assert ExternalIntegration.ENKI == integration.protocol - def test_enki_library_id(self, enki_test_fixture: EnkiTestFixure): db = enki_test_fixture.db # The default library has already had this value set on its @@ -126,6 +124,7 @@ def patron_activity(self, patron, pin): # Collection used in the API -- one library with a default # patron and one without. no_default_patron = db.library() + assert api.collection is not None api.collection.libraries.append(no_default_patron) with_default_patron = db.default_library() @@ -137,7 +136,7 @@ def patron_activity(self, patron, pin): default_patron_activity, circulation_changes, collection_changes, - ) = sorted(api._run_self_tests(db.session), key=lambda x: x.name) + ) = sorted(api._run_self_tests(db.session), key=lambda x: str(x.name)) # Verify that each test method was called and returned the # expected SelfTestResult object. @@ -810,7 +809,9 @@ def process_book(self, data): api = MockAPI(pages) # Do the 'import'. - importer = Mock(db.session, enki_test_fixture.collection, api_class=api) + importer = Mock( + db.session, enki_test_fixture.collection, api_class=cast(EnkiAPI, api) + ) importer.full_import() # get_all_titles was called three times, once for the first two @@ -842,8 +843,10 @@ def update_circulation(self, since): self.update_circulation_called_with = since api = MockAPI() - importer = Mock(db.session, enki_test_fixture.collection, api_class=api) - since = object() + importer = Mock( + db.session, enki_test_fixture.collection, api_class=cast(EnkiAPI, api) + ) + since = MagicMock() importer.incremental_import(since) # The 'since' value was passed into both methods. @@ -951,7 +954,10 @@ def test__update_circulation(self, enki_test_fixture: EnkiTestFixure): analytics = MockAnalyticsProvider() monitor = EnkiImport( - db.session, enki_test_fixture.collection, api_class=api, analytics=analytics + db.session, + enki_test_fixture.collection, + api_class=api, + analytics=cast(Analytics, analytics), ) end = utc_now() From fe6f5836c48082835777dcb81ac92e4e38769113 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 16 Oct 2023 20:13:13 +0000 Subject: [PATCH 117/262] Bump pillow from 10.0.1 to 10.1.0 (#1465) --- poetry.lock | 110 ++++++++++++++++++++++++++-------------------------- 1 file changed, 55 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index a5aae8c889..4c76f4d173 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2597,65 +2597,65 @@ uritemplate = ">=4.1,<5.0" [[package]] name = "pillow" -version = "10.0.1" +version = "10.1.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, - {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, - {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, - {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, - {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, - {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, - {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, - {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, - {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, - {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, - {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, - {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, - {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, - {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, - {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, - {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, - {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, - {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, - {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, - {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, - {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, - {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, - {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, - {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, + {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, + {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, + {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, + {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, + {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, + {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, + {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, + {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, + {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, + {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, + {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, + {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, + {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, + {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, + {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, + {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, + {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, + {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, + {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, + {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, + {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, + {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, + {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, ] [package.extras] From 8aff110289b17cb0547875f42cde29049801f54f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 17 Oct 2023 15:04:56 -0300 Subject: [PATCH 118/262] Bump types-pillow from 10.0.0.3 to 10.1.0.0 (#1467) Bumps [types-pillow](https://github.com/python/typeshed) from 10.0.0.3 to 10.1.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pillow dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4c76f4d173..26e4fa8fbf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4158,13 +4158,13 @@ referencing = "*" [[package]] name = "types-pillow" -version = "10.0.0.3" +version = "10.1.0.0" description = "Typing stubs for Pillow" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-Pillow-10.0.0.3.tar.gz", hash = "sha256:ae0c877d363da349bbb82c5463c9e78037290cc07d3714cb0ceaf5d2f7f5c825"}, - {file = "types_Pillow-10.0.0.3-py3-none-any.whl", hash = "sha256:54a49f3c6a3f5e95ebeee396d7773dde22ce2515d594f9c0596c0a983558f0d4"}, + {file = "types-Pillow-10.1.0.0.tar.gz", hash = "sha256:0f5e7cf010ed226800cb5821e87781e5d0e81257d948a9459baa74a8c8b7d822"}, + {file = "types_Pillow-10.1.0.0-py3-none-any.whl", hash = "sha256:f97f596b6a39ddfd26da3eb67421062193e10732d2310f33898d36f9694331b5"}, ] [[package]] From 1279e34282238144097c983018c5e533eaa4596b Mon Sep 17 00:00:00 2001 From: dbernstein Date: Tue, 17 Oct 2023 12:00:49 -0700 Subject: [PATCH 119/262] Add PALACE_REPORTING_NAME env variable to time report subject (#1455) * Pulls the PALACE_CM_NAME env variable into the subject of the audio time tracking report email. Resolves: Fulfills step one of https://ebce-lyrasis.atlassian.net/browse/PP-539 --- README.md | 5 +++++ core/config.py | 3 +++ core/jobs/playtime_entries.py | 10 ++++++++-- docker-compose.yml | 1 + tests/core/jobs/test_playtime_entries.py | 7 +++++-- 5 files changed, 22 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 39eb39cb93..a5eb443339 100644 --- a/README.md +++ b/README.md @@ -186,6 +186,11 @@ a storage service, you can set the following environment variables: - `{key}`: The key of the file. - `{region}`: The region of the storage service. +#### Reporting + +- `PALACE_REPORTING_NAME`: (Optional) A name used to identify the CM instance associated with generated reports. +- `SIMPLIFIED_REPORTING_EMAIL`: (Required) Email address of recipient of reports. + #### Logging The application uses the [Python logging](https://docs.python.org/3/library/logging.html) module for logging. Optionally diff --git a/core/config.py b/core/config.py index 00b17048e3..2ae72cfd6f 100644 --- a/core/config.py +++ b/core/config.py @@ -61,6 +61,9 @@ class Configuration(ConfigurationConstants): # Environment variable for temporary reporting email REPORTING_EMAIL_ENVIRONMENT_VARIABLE = "SIMPLIFIED_REPORTING_EMAIL" + # Environment variable for used to distinguish one CM environment from another in reports + REPORTING_NAME_ENVIRONMENT_VARIABLE = "PALACE_REPORTING_NAME" + # ConfigurationSetting key for the base url of the app. BASE_URL_KEY = "base_url" diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index 06313e5bab..6e5917ed84 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -127,9 +127,15 @@ def do_run(self): formatted_start_date = start.strftime(self.REPORT_DATE_FORMAT) formatted_until_date = until.strftime(self.REPORT_DATE_FORMAT) report_date_label = f"{formatted_start_date} - {formatted_until_date}" - email_subject = ( - f"Playtime Summaries {formatted_start_date} - {formatted_until_date}" + + reporting_name = os.environ.get( + Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE, "" ) + + if len(reporting_name) > 0: + reporting_name += ": " + + email_subject = f"{reporting_name}Playtime Summaries {formatted_start_date} - {formatted_until_date}" attachment_extension = "csv" attachment_name = f"playtime-summary-{formatted_start_date}-{formatted_until_date}.{attachment_extension}" diff --git a/docker-compose.yml b/docker-compose.yml index a9996f8b7c..2588473739 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,6 +10,7 @@ x-cm-env-variables: &cm-env-variables PALACE_STORAGE_PUBLIC_ACCESS_BUCKET: "public" PALACE_STORAGE_ANALYTICS_BUCKET: "analytics" PALACE_STORAGE_URL_TEMPLATE: "http://localhost:9000/{bucket}/{key}" + PALACE_REPORTING_NAME: "TEST CM" services: # example docker compose configuration for testing and development diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index fabcee2e36..438293e44c 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -247,13 +247,16 @@ def test_do_run(self, db: DatabaseTransactionFixture): # collection2 library2 playtime(db.session, identifier, collection2, library2, date3m(3), 300) + reporting_name = "test-cm" + # Horrible unbracketted syntax for python 3.8 with patch("core.jobs.playtime_entries.csv.writer") as writer, patch( "core.jobs.playtime_entries.EmailManager" ) as email, patch( "core.jobs.playtime_entries.os.environ", new={ - Configuration.REPORTING_EMAIL_ENVIRONMENT_VARIABLE: "reporting@test.email" + Configuration.REPORTING_EMAIL_ENVIRONMENT_VARIABLE: "reporting@test.email", + Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE: reporting_name, }, ): PlaytimeEntriesEmailReportsScript(db.session).run() @@ -290,7 +293,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): assert email.send_email.call_count == 1 assert email.send_email.call_args == call( - f"Playtime Summaries {cutoff} - {until}", + f"{reporting_name}: Playtime Summaries {cutoff} - {until}", receivers=["reporting@test.email"], text="", attachments={ From 480d0f958e42b8999c34e9ea7a0dc3cc76665f53 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 18 Oct 2023 10:50:15 +0530 Subject: [PATCH 120/262] PP-483 Added last notified date to loan and hold models (#1464) * Added last notified date to loan and hold models This allows us to run the notification scripts multiple times a day without bombarding the same patron multiple times with the same content * We only update patron_last_notified if there is atleast one notification success * Updated the notification crons to run every 2 hours --- ...8f391d_loan_and_hold_notification_times.py | 34 +++++++++++ core/jobs/holds_notification.py | 11 +++- core/model/patron.py | 2 + core/scripts.py | 16 +++-- core/util/notifications.py | 11 +++- docker/services/cron/cron.d/circulation | 4 +- tests/core/test_holds_notifications.py | 8 ++- tests/core/test_scripts.py | 40 +++++++++---- tests/core/util/test_notifications.py | 59 +++++++++++++++++++ 9 files changed, 164 insertions(+), 21 deletions(-) create mode 100644 alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py diff --git a/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py b/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py new file mode 100644 index 0000000000..4c2abc3cbb --- /dev/null +++ b/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py @@ -0,0 +1,34 @@ +"""Loan and hold notification times + +Revision ID: 21a65b8f391d +Revises: 5d71a80073d5 +Create Date: 2023-10-16 09:46:58.743018+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "21a65b8f391d" +down_revision = "5d71a80073d5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # ### commands auto generated by Alembic ### + op.add_column( + "holds", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) + ) + op.add_column( + "loans", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) + ) + # ### end Alembic commands ### + + +def downgrade() -> None: + # ### commands auto generated by Alembic ### + op.drop_column("loans", "patron_last_notified") + op.drop_column("holds", "patron_last_notified") + # ### end Alembic commands ### diff --git a/core/jobs/holds_notification.py b/core/jobs/holds_notification.py index 60b5504033..2e4a13ca32 100644 --- a/core/jobs/holds_notification.py +++ b/core/jobs/holds_notification.py @@ -2,11 +2,14 @@ from typing import TYPE_CHECKING +from sqlalchemy import or_ + from core.config import Configuration, ConfigurationConstants from core.model import Base from core.model.configuration import ConfigurationSetting from core.model.patron import Hold from core.monitor import SweepMonitor +from core.util.datetime_helpers import utc_now from core.util.notifications import PushNotifications if TYPE_CHECKING: @@ -38,7 +41,13 @@ def scope_to_collection(self, qu: Query, collection: Collection) -> Query: def item_query(self) -> Query: query = super().item_query() - query = query.filter(Hold.position == 0) + query = query.filter( + Hold.position == 0, + or_( + Hold.patron_last_notified != utc_now().date(), + Hold.patron_last_notified == None, + ), + ) return query def process_items(self, items: list[Hold]) -> None: diff --git a/core/model/patron.py b/core/model/patron.py index 3e5a7ee8a5..4e686b1295 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -552,6 +552,7 @@ class Loan(Base, LoanAndHoldMixin): # Some distributors (e.g. Feedbooks) may have an identifier that can # be used to check the status of a specific Loan. external_identifier = Column(Unicode, unique=True, nullable=True) + patron_last_notified = Column(DateTime, nullable=True) __table_args__ = (UniqueConstraint("patron_id", "license_pool_id"),) @@ -583,6 +584,7 @@ class Hold(Base, LoanAndHoldMixin): end = Column(DateTime(timezone=True), index=True) position = Column(Integer, index=True) external_identifier = Column(Unicode, unique=True, nullable=True) + patron_last_notified = Column(DateTime, nullable=True) patron: Mapped[Patron] = relationship( "Patron", back_populates="holds", lazy="joined" diff --git a/core/scripts.py b/core/scripts.py index e7b7cf2ca6..fb35ee56e7 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -11,7 +11,7 @@ from enum import Enum from typing import Generator, Optional, Type -from sqlalchemy import and_, exists, tuple_ +from sqlalchemy import and_, exists, or_, tuple_ from sqlalchemy.orm import Query, Session, defer from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound @@ -2824,7 +2824,16 @@ def do_run(self): ) return - _query = self._db.query(Loan).order_by(Loan.id) + _query = ( + self._db.query(Loan) + .filter( + or_( + Loan.patron_last_notified != utc_now().date(), + Loan.patron_last_notified == None, + ) + ) + .order_by(Loan.id) + ) last_loan_id = None processed_loans = 0 @@ -2863,9 +2872,6 @@ def process_loan(self, loan: Loan): self.log.warning(f"Loan: {loan.id} has no end date, skipping") return delta: datetime.timedelta = loan.end - now - # We assume this script runs ONCE A DAY - # else this will send notifications multiple times for - # the same day if delta.days in self.LOAN_EXPIRATION_DAYS: self.log.info( f"Patron {patron.authorization_identifier} has an expiring loan on ({loan.license_pool.identifier.urn})" diff --git a/core/util/notifications.py b/core/util/notifications.py index 4be724a745..4c11f9af74 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -15,6 +15,7 @@ from core.model.identifier import Identifier from core.model.patron import Hold, Loan, Patron from core.model.work import Work +from core.util.datetime_helpers import utc_now from core.util.log import LoggerMixin @@ -135,9 +136,13 @@ def send_loan_expiry_message( f"Patron {loan.patron.authorization_identifier} has {len(tokens)} device tokens. " f"Sending loan expiry notification(s)." ) - return cls.send_messages( + responses = cls.send_messages( tokens, messaging.Notification(title=title, body=body), data ) + if len(responses) > 0: + # Atleast one notification succeeded + loan.patron_last_notified = utc_now().date() + return responses @classmethod def send_activity_sync_message(cls, patrons: list[Patron]) -> list[str]: @@ -205,6 +210,10 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: data["authorization_identifier"] = hold.patron.authorization_identifier resp = cls.send_messages(tokens, messaging.Notification(title=title), data) + if len(resp) > 0: + # Atleast one notification succeeded + hold.patron_last_notified = utc_now().date() + responses.extend(resp) return responses diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 4cfdafc931..b261593dc5 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -111,8 +111,8 @@ HOME=/var/www/circulation # Notifications # -10 12 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 -15 12 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 +10 */2 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 +15 */2 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 0 1 * * * root core/bin/run patron_activity_sync_notifications >> /var/log/cron.log 2>&1 # Audiobook playtimes diff --git a/tests/core/test_holds_notifications.py b/tests/core/test_holds_notifications.py index a97b350b12..505e66ddc1 100644 --- a/tests/core/test_holds_notifications.py +++ b/tests/core/test_holds_notifications.py @@ -1,3 +1,4 @@ +import datetime from unittest.mock import call, patch import pytest @@ -5,6 +6,7 @@ from core.config import Configuration, ConfigurationConstants from core.jobs.holds_notification import HoldsNotificationMonitor from core.model.configuration import ConfigurationSetting +from core.util.datetime_helpers import utc_now from tests.fixtures.database import DatabaseTransactionFixture @@ -27,12 +29,16 @@ def test_item_query(self, holds_fixture: HoldsNotificationFixture): work2 = db.work(with_license_pool=True) work3 = db.work(with_license_pool=True) work4 = db.work(with_license_pool=True) + work5 = db.work(with_license_pool=True) hold1, _ = work1.active_license_pool().on_hold_to(patron1, position=1) hold2, _ = work2.active_license_pool().on_hold_to(patron1, position=0) hold3, _ = work3.active_license_pool().on_hold_to(patron1, position=0) hold4, _ = work4.active_license_pool().on_hold_to(patron1, position=None) + hold5, _ = work5.active_license_pool().on_hold_to(patron1, position=0) + hold5.patron_last_notified = utc_now().date() + hold2.patron_last_notified = utc_now().date() - datetime.timedelta(days=1) - # Only position 0 holds should be queried for + # Only position 0 holds, that haven't bene notified today, should be queried for assert holds_fixture.monitor.item_query().all() == [hold2, hold3] def test_script_run(self, holds_fixture: HoldsNotificationFixture): diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index 812128b9e8..289140cd33 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -2523,32 +2523,50 @@ def test_loan_notification(self, db: DatabaseTransactionFixture): ) def test_do_run(self, db: DatabaseTransactionFixture): + now = utc_now() self._setup_method(db) loan, _ = self.work.active_license_pool().loan_to( self.patron, - utc_now(), - utc_now() + datetime.timedelta(days=1, hours=1), + now, + now + datetime.timedelta(days=1, hours=1), ) work2 = db.work(with_license_pool=True) loan2, _ = work2.active_license_pool().loan_to( self.patron, - utc_now(), - utc_now() + datetime.timedelta(days=2, hours=1), + now, + now + datetime.timedelta(days=2, hours=1), + ) + + work3 = db.work(with_license_pool=True) + p = work3.active_license_pool() + loan3, _ = p.loan_to( + self.patron, + now, + now + datetime.timedelta(days=1, hours=1), + ) + # loan 3 was notified today already, so should get skipped + loan3.patron_last_notified = now.date() + + work4 = db.work(with_license_pool=True) + p = work4.active_license_pool() + loan4, _ = p.loan_to( + self.patron, + now, + now + datetime.timedelta(days=1, hours=1), ) + # loan 4 was notified yesterday, so should NOT get skipped + loan4.patron_last_notified = now.date() - datetime.timedelta(days=1) self.script.process_loan = MagicMock() self.script.BATCH_SIZE = 1 self.script.do_run() - assert self.script.process_loan.call_count == 2 + assert self.script.process_loan.call_count == 3 assert self.script.process_loan.call_args_list == [ - call( - loan, - ), - call( - loan2, - ), + call(loan), + call(loan2), + call(loan4), ] # Sitewide notifications are turned off diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index c9cc374778..07dd2af3f3 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,11 +1,13 @@ import logging import re +from datetime import datetime from typing import Generator from unittest import mock from unittest.mock import MagicMock import firebase_admin import pytest +import pytz from firebase_admin.exceptions import FirebaseError from firebase_admin.messaging import UnregisteredError from google.auth import credentials @@ -17,6 +19,7 @@ from core.model.constants import NotificationConstants from core.model.devicetokens import DeviceToken, DeviceTokenTypes from core.model.work import Work +from core.util.datetime_helpers import utc_now from core.util.notifications import PushNotifications from tests.fixtures.database import DatabaseTransactionFixture @@ -90,6 +93,7 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur assert PushNotifications.send_loan_expiry_message( loan, 1, [device_token] ) == ["mid-mock"] + assert loan.patron_last_notified == utc_now().date() with mock.patch("core.util.notifications.messaging") as messaging: PushNotifications.send_loan_expiry_message(loan, 1, [device_token]) @@ -123,6 +127,58 @@ def test_send_loan_notification(self, push_notf_fixture: PushNotificationsFixtur {"dry_run": True, "app": push_notf_fixture.app}, ] + def test_patron_last_notified_updated( + self, push_notf_fixture: PushNotificationsFixture + ): + db = push_notf_fixture.db + patron = db.patron(external_identifier="xyz1") + patron.authorization_identifier = "abc1" + + device_token, _ = get_one_or_create( + db.session, + DeviceToken, + device_token="atoken", + token_type=DeviceTokenTypes.FCM_ANDROID, + patron=patron, + ) + work: Work = db.work(with_license_pool=True) + loan, _ = work.active_license_pool().loan_to(patron) # type: ignore + work2: Work = db.work(with_license_pool=True) + hold, _ = work2.active_license_pool().on_hold_to(patron) # type: ignore + + with mock.patch( + "core.util.notifications.PushNotifications.send_messages" + ) as mock_send, mock.patch("core.util.notifications.utc_now") as mock_now: + # Loan expiry + # No messages sent + mock_send.return_value = [] + responses = PushNotifications.send_loan_expiry_message( + loan, 1, [device_token] + ) + assert responses == [] + assert loan.patron_last_notified == None + + # One message sent + mock_now.return_value = datetime(2020, 1, 1, tzinfo=pytz.UTC) + mock_send.return_value = ["mock-mid"] + responses = PushNotifications.send_loan_expiry_message( + loan, 1, [device_token] + ) + assert responses == ["mock-mid"] + # last notified gets updated + assert loan.patron_last_notified == datetime(2020, 1, 1).date() + + # Now hold expiry + mock_send.return_value = [] + responses = PushNotifications.send_holds_notifications([hold]) + assert responses == [] + assert hold.patron_last_notified == None + + mock_send.return_value = ["mock-mid"] + responses = PushNotifications.send_holds_notifications([hold]) + assert responses == ["mock-mid"] + assert hold.patron_last_notified == datetime(2020, 1, 1).date() + def test_send_activity_sync(self, push_notf_fixture: PushNotificationsFixture): db = push_notf_fixture.db # Only patron 1 will get authorization identifiers @@ -224,6 +280,9 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): with mock.patch("core.util.notifications.messaging") as messaging: PushNotifications.send_holds_notifications([hold1, hold2]) + assert ( + hold1.patron_last_notified == hold2.patron_last_notified == utc_now().date() + ) loans_api = "http://localhost/default/loans" assert messaging.Message.call_count == 3 assert messaging.Message.call_args_list == [ From e22dade6c2c099f0cc57f5eccaca3f6dd697e7cc Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 18 Oct 2023 10:51:04 +0530 Subject: [PATCH 121/262] Fixed an error while serializing LCP passphrases (#1466) --- core/feed/serializer/opds2.py | 2 +- tests/api/feed/test_opds2_serializer.py | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index 3b98532077..fca7c93a6b 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -165,7 +165,7 @@ def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: props["indirectAcquisition"].append(_indirect(indirect)) if link.lcp_hashed_passphrase: - props["lcp_hashed_passphrase"] = link.lcp_hashed_passphrase + props["lcp_hashed_passphrase"] = link.lcp_hashed_passphrase.text if link.drm_licensor: props["licensor"] = { diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py index b240a58fa7..506a21d961 100644 --- a/tests/api/feed/test_opds2_serializer.py +++ b/tests/api/feed/test_opds2_serializer.py @@ -152,6 +152,7 @@ def test__serialize_acquisition_link(self): availability_status="available", availability_since="2022-02-02", availability_until="2222-02-02", + lcp_hashed_passphrase=FeedEntryType(text="LCPPassphrase"), indirect_acquisitions=[ IndirectAcquisition( type="indirect1", @@ -180,6 +181,7 @@ def test__serialize_acquisition_link(self): "child": [{"type": "indirect1-1"}, {"type": "indirect1-2"}], } ], + lcp_hashed_passphrase="LCPPassphrase", ) # Test availability states From f854e56682388ed134518a4b3b1f6d5d4d6aec08 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 12:27:58 +0000 Subject: [PATCH 122/262] Bump urllib3 from 1.26.17 to 1.26.18 (#1468) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 26e4fa8fbf..bdbf2105ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4295,13 +4295,13 @@ files = [ [[package]] name = "urllib3" -version = "1.26.17" +version = "1.26.18" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.17-py2.py3-none-any.whl", hash = "sha256:94a757d178c9be92ef5539b8840d48dc9cf1b2709c9d6b588232a055c524458b"}, - {file = "urllib3-1.26.17.tar.gz", hash = "sha256:24d6a242c28d29af46c3fae832c36db3bbebcc533dd1bb549172cd739c82df21"}, + {file = "urllib3-1.26.18-py2.py3-none-any.whl", hash = "sha256:34b97092d7e0a3a8cf7cd10e386f401b3737364026c45e622aa02903dffe0f07"}, + {file = "urllib3-1.26.18.tar.gz", hash = "sha256:f8ecc1bba5667413457c529ab955bf8c67b45db799d159066261719e328580a0"}, ] [package.extras] From f79db781066e591682fe8ba73408bc3d3ac60d6f Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 18 Oct 2023 13:25:35 -0300 Subject: [PATCH 123/262] Give OPDS importers an API class (PP-501) (#1442) Previously OPDS importers were a special case, they had no API class, there there was a special code path for checkouts and fulfillment taken. Now they are treated like every other license source, with their own BaseCirculationAPI. This is another step along the path to removing external integrations from license source integrations. --- api/admin/controller/integration_settings.py | 2 +- api/admin/controller/patron_auth_services.py | 6 +- api/admin/controller/settings.py | 11 +- api/authentication/base.py | 28 +- api/authentication/basic.py | 26 +- api/authentication/basic_token.py | 21 +- api/authenticator.py | 10 +- api/axis.py | 24 +- api/bibliotheca.py | 18 +- api/circulation.py | 260 +++++++----------- api/controller.py | 2 +- api/discovery/opds_registration.py | 4 +- api/enki.py | 18 +- api/firstbook2.py | 8 +- api/integration/registry/license_providers.py | 47 +--- api/integration/registry/patron_auth.py | 4 +- api/kansas_patron.py | 8 +- api/millenium_patron.py | 4 +- api/monitor.py | 2 +- api/odilo.py | 29 +- api/odl.py | 155 +++++------ api/odl2.py | 46 ++-- api/opds_for_distributors.py | 32 +-- api/overdrive.py | 22 +- api/saml/provider.py | 6 +- api/simple_authentication.py | 8 +- api/sip/__init__.py | 4 +- api/sirsidynix_authentication_provider.py | 6 +- core/configuration/ignored_identifier.py | 89 ------ core/connection_config.py | 4 +- core/importers.py | 13 - core/integration/base.py | 19 +- core/integration/settings.py | 8 +- core/opds2_import.py | 57 ++-- core/opds_import.py | 180 +++++++++--- core/scripts.py | 7 +- .../api/admin/controller/test_collections.py | 7 +- tests/api/admin/controller/test_settings.py | 28 +- tests/api/mockapi/circulation.py | 12 +- tests/api/mockapi/opds_for_distributors.py | 4 +- tests/api/test_authenticator.py | 14 +- tests/api/test_circulationapi.py | 133 ++------- tests/api/test_controller_loan.py | 30 ++ tests/api/test_controller_odl_notify.py | 6 +- tests/api/test_monitor.py | 2 +- tests/api/test_odl2.py | 2 +- tests/api/test_selftest.py | 7 +- tests/core/test_opds2_import.py | 6 +- tests/core/test_opds_import.py | 185 ++++++++++++- tests/fixtures/api_odl.py | 4 +- tests/fixtures/authenticator.py | 4 +- tests/fixtures/odl.py | 66 +++-- tests/migration/test_20230531_0af587ff8595.py | 8 - 53 files changed, 924 insertions(+), 782 deletions(-) delete mode 100644 core/configuration/ignored_identifier.py delete mode 100644 core/importers.py diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index 804ae73220..b491a508fe 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -30,7 +30,7 @@ from core.util.log import LoggerMixin from core.util.problem_detail import ProblemError -T = TypeVar("T", bound=HasIntegrationConfiguration) +T = TypeVar("T", bound=HasIntegrationConfiguration[BaseSettings]) class UpdatedLibrarySettingsTuple(NamedTuple): diff --git a/api/admin/controller/patron_auth_services.py b/api/admin/controller/patron_auth_services.py index ec58c28e5e..ff3ae352f5 100644 --- a/api/admin/controller/patron_auth_services.py +++ b/api/admin/controller/patron_auth_services.py @@ -10,7 +10,7 @@ ) from api.admin.form_data import ProcessFormData from api.admin.problem_details import * -from api.authentication.base import AuthenticationProvider +from api.authentication.base import AuthenticationProviderType from api.authentication.basic import BasicAuthenticationProvider from api.integration.registry.patron_auth import PatronAuthRegistry from core.integration.goals import Goals @@ -25,10 +25,10 @@ class PatronAuthServicesController( - IntegrationSettingsController[AuthenticationProvider], + IntegrationSettingsController[AuthenticationProviderType], AdminPermissionsControllerMixin, ): - def default_registry(self) -> IntegrationRegistry[AuthenticationProvider]: + def default_registry(self) -> IntegrationRegistry[AuthenticationProviderType]: return PatronAuthRegistry() @property diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 3e84dc5eda..277605c802 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -76,14 +76,21 @@ def _get_integration_protocols( protocols = [] _db = self._db for api in provider_apis: + is_integration = issubclass(api, HasIntegrationConfiguration) protocol = dict() - name = getattr(api, protocol_name_attr) + name = ( + getattr(api, protocol_name_attr) if not is_integration else api.label() + ) protocol["name"] = name label = getattr(api, "NAME", name) protocol["label"] = label - description = getattr(api, "DESCRIPTION", None) + description = ( + getattr(api, "DESCRIPTION", None) + if not is_integration + else api.description() + ) if description != None: protocol["description"] = description diff --git a/api/authentication/base.py b/api/authentication/base.py index 9624ed4a13..913fd8676d 100644 --- a/api/authentication/base.py +++ b/api/authentication/base.py @@ -1,6 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod +from typing import TypeVar from money import Money from sqlalchemy.orm import Session @@ -27,9 +28,15 @@ class AuthProviderLibrarySettings(BaseSettings): ... +SettingsType = TypeVar("SettingsType", bound=AuthProviderSettings, covariant=True) +LibrarySettingsType = TypeVar( + "LibrarySettingsType", bound=AuthProviderLibrarySettings, covariant=True +) + + class AuthenticationProvider( OPDSAuthenticationFlow, - HasLibraryIntegrationConfiguration, + HasLibraryIntegrationConfiguration[SettingsType, LibrarySettingsType], HasSelfTestsIntegrationConfiguration, LoggerMixin, ABC, @@ -40,8 +47,8 @@ def __init__( self, library_id: int, integration_id: int, - settings: AuthProviderSettings, - library_settings: AuthProviderLibrarySettings, + settings: SettingsType, + library_settings: LibrarySettingsType, analytics: Analytics | None = None, ): self.library_id = library_id @@ -58,16 +65,6 @@ def integration(self, _db: Session) -> IntegrationConfiguration | None: .one_or_none() ) - @classmethod - def settings_class(cls) -> type[AuthProviderSettings]: - return AuthProviderSettings - - @classmethod - def library_settings_class( - cls, - ) -> type[AuthProviderLibrarySettings]: - return AuthProviderLibrarySettings - @property @abstractmethod def identifies_individuals(self): @@ -111,6 +108,11 @@ def get_credential_from_header(self, auth: Authorization) -> str | None: ... +AuthenticationProviderType = AuthenticationProvider[ + AuthProviderSettings, AuthProviderLibrarySettings +] + + class CannotCreateLocalPatron(Exception): """A remote system provided information about a patron, but we could not put it into our database schema. diff --git a/api/authentication/basic.py b/api/authentication/basic.py index da98cd989e..7e9871aa1c 100644 --- a/api/authentication/basic.py +++ b/api/authentication/basic.py @@ -3,7 +3,7 @@ import re from abc import ABC, abstractmethod from enum import Enum -from typing import Any, Dict, Generator, List, Optional, Pattern +from typing import Any, Dict, Generator, List, Optional, Pattern, TypeVar from flask import url_for from pydantic import PositiveInt, validator @@ -259,7 +259,15 @@ def validate_restriction_criteria( return v -class BasicAuthenticationProvider(AuthenticationProvider, ABC): +SettingsType = TypeVar("SettingsType", bound=BasicAuthProviderSettings, covariant=True) +LibrarySettingsType = TypeVar( + "LibrarySettingsType", bound=BasicAuthProviderLibrarySettings, covariant=True +) + + +class BasicAuthenticationProvider( + AuthenticationProvider[SettingsType, LibrarySettingsType], ABC +): """Verify a username/password, obtained through HTTP Basic Auth, with a remote source of truth. """ @@ -268,8 +276,8 @@ def __init__( self, library_id: int, integration_id: int, - settings: BasicAuthProviderSettings, - library_settings: BasicAuthProviderLibrarySettings, + settings: SettingsType, + library_settings: LibrarySettingsType, analytics: Analytics | None = None, ): """Create a BasicAuthenticationProvider.""" @@ -337,16 +345,6 @@ def authentication_realm(self) -> str: def flow_type(self) -> str: return "http://opds-spec.org/auth/basic" - @classmethod - def settings_class(cls) -> type[BasicAuthProviderSettings]: - return BasicAuthProviderSettings - - @classmethod - def library_settings_class( - cls, - ) -> type[BasicAuthProviderLibrarySettings]: - return BasicAuthProviderLibrarySettings - @abstractmethod def remote_patron_lookup( self, patron_or_patrondata: PatronData | Patron diff --git a/api/authentication/basic_token.py b/api/authentication/basic_token.py index 628210e685..5e04072313 100644 --- a/api/authentication/basic_token.py +++ b/api/authentication/basic_token.py @@ -1,15 +1,20 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Generator, cast +from typing import TYPE_CHECKING, Generator, Type, cast from flask import url_for from sqlalchemy.orm import Session from werkzeug.datastructures import Authorization from api.authentication.access_token import AccessTokenProvider -from api.authentication.base import AuthenticationProvider +from api.authentication.base import ( + AuthenticationProvider, + AuthProviderLibrarySettings, + AuthProviderSettings, +) from api.authentication.basic import BasicAuthenticationProvider from api.problem_details import PATRON_AUTH_ACCESS_TOKEN_INVALID +from core.integration.base import LibrarySettingsType, SettingsType from core.model import Patron, Session, get_one from core.selftest import SelfTestResult from core.util.problem_detail import ProblemDetail, ProblemError @@ -18,11 +23,21 @@ from core.model import Library -class BasicTokenAuthenticationProvider(AuthenticationProvider): +class BasicTokenAuthenticationProvider( + AuthenticationProvider[AuthProviderSettings, AuthProviderLibrarySettings] +): """Patron Authentication based on a CM generated Access Token It is a companion to the basic authentication, and has no meaning without it. """ + @classmethod + def library_settings_class(cls) -> Type[LibrarySettingsType]: + raise NotImplementedError() + + @classmethod + def settings_class(cls) -> Type[SettingsType]: + raise NotImplementedError() + FLOW_TYPE = "http://thepalaceproject.org/authtype/basic-token" def __init__( diff --git a/api/authenticator.py b/api/authenticator.py index 865d36d3f5..4dd42f0079 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -16,7 +16,11 @@ from api.adobe_vendor_id import AuthdataUtility from api.annotations import AnnotationWriter from api.authentication.access_token import AccessTokenProvider -from api.authentication.base import AuthenticationProvider +from api.authentication.base import ( + AuthenticationProvider, + LibrarySettingsType, + SettingsType, +) from api.authentication.basic import BasicAuthenticationProvider from api.authentication.basic_token import BasicTokenAuthenticationProvider from api.config import CannotLoadConfiguration, Configuration @@ -842,7 +846,9 @@ def bearer_token_signing_secret(cls, db): return ConfigurationSetting.sitewide_secret(db, cls.BEARER_TOKEN_SIGNING_SECRET) -class BaseSAMLAuthenticationProvider(AuthenticationProvider, BearerTokenSigner, ABC): +class BaseSAMLAuthenticationProvider( + AuthenticationProvider[SettingsType, LibrarySettingsType], BearerTokenSigner, ABC +): """ Base class for SAML authentication providers """ diff --git a/api/axis.py b/api/axis.py index 1c42af4972..dc3b3e5cbf 100644 --- a/api/axis.py +++ b/api/axis.py @@ -44,6 +44,7 @@ FulfillmentInfo, HoldInfo, LoanInfo, + PatronActivityCirculationAPI, ) from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult @@ -158,16 +159,13 @@ class Axis360LibrarySettings(BaseCirculationLoanSettings): class Axis360API( - BaseCirculationAPI[Axis360Settings, Axis360LibrarySettings], + PatronActivityCirculationAPI[Axis360Settings, Axis360LibrarySettings], HasCollectionSelfTests, CirculationInternalFormatsMixin, Axis360APIConstants, ): - NAME = ExternalIntegration.AXIS_360 - SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP - SERVICE_NAME = "Axis 360" DATE_FORMAT = "%m-%d-%Y %H:%M:%S" access_token_endpoint = "accesstoken" @@ -207,7 +205,7 @@ def library_settings_class(cls) -> Type[Axis360LibrarySettings]: @classmethod def label(cls) -> str: - return cls.NAME + return ExternalIntegration.AXIS_360 @classmethod def description(cls) -> str: @@ -222,12 +220,12 @@ def __init__(self, _db: Session, collection: Collection) -> None: super().__init__(_db, collection) self.library_id = collection.external_account_id or "" - config = self.configuration() - self.username = config.username - self.password = config.password + settings = self.settings + self.username = settings.username + self.password = settings.password # Convert the nickname for a server into an actual URL. - base_url = config.url or self.PRODUCTION_BASE_URL + base_url = settings.url or self.PRODUCTION_BASE_URL if base_url in self.SERVER_NICKNAMES: base_url = self.SERVER_NICKNAMES[base_url] if not base_url.endswith("/"): @@ -239,7 +237,9 @@ def __init__(self, _db: Session, collection: Collection) -> None: self.token: Optional[str] = None self.verify_certificate: bool = ( - config.verify_certificate if config.verify_certificate is not None else True + settings.verify_certificate + if settings.verify_certificate is not None + else True ) @property @@ -403,7 +403,7 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: response.content ) except etree.XMLSyntaxError as e: - raise RemoteInitiatedServerError(response.content, self.SERVICE_NAME) + raise RemoteInitiatedServerError(response.content, self.label()) def _checkin( self, title_id: Optional[str], patron_id: Optional[str] @@ -447,7 +447,7 @@ def checkout( raise CannotLoan() return loan_info except etree.XMLSyntaxError as e: - raise RemoteInitiatedServerError(response.content, self.SERVICE_NAME) + raise RemoteInitiatedServerError(response.content, self.label()) def _checkout( self, title_id: Optional[str], patron_id: Optional[str], internal_format: str diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 8dcdfdb556..64e81448bd 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -25,6 +25,7 @@ FulfillmentInfo, HoldInfo, LoanInfo, + PatronActivityCirculationAPI, ) from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult @@ -118,10 +119,9 @@ class BibliothecaLibrarySettings(BaseCirculationLoanSettings): class BibliothecaAPI( - BaseCirculationAPI[BibliothecaSettings, BibliothecaLibrarySettings], + PatronActivityCirculationAPI[BibliothecaSettings, BibliothecaLibrarySettings], HasCollectionSelfTests, ): - NAME = ExternalIntegration.BIBLIOTHECA AUTH_TIME_FORMAT = "%a, %d %b %Y %H:%M:%S GMT" ARGUMENT_TIME_FORMAT = "%Y-%m-%dT%H:%M:%S" AUTHORIZATION_FORMAT = "3MCLAUTH %s:%s" @@ -147,10 +147,12 @@ def settings_class(cls): def library_settings_class(cls): return BibliothecaLibrarySettings - def label(self): - return self.NAME + @classmethod + def label(cls): + return ExternalIntegration.BIBLIOTHECA - def description(self): + @classmethod + def description(cls): return "" def __init__(self, _db, collection): @@ -163,10 +165,10 @@ def __init__(self, _db, collection): super().__init__(_db, collection) self._db = _db - config = self.configuration() + settings = self.settings self.version = self.DEFAULT_VERSION - self.account_id = config.username - self.account_key = config.password + self.account_id = settings.username + self.account_key = settings.password self.library_id = collection.external_account_id self.base_url = self.DEFAULT_BASE_URL diff --git a/api/circulation.py b/api/circulation.py index a7267be4c9..d35da11c2d 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -11,7 +11,6 @@ TYPE_CHECKING, Any, Dict, - Generic, Iterable, List, Literal, @@ -27,9 +26,7 @@ from sqlalchemy.orm import Query from api.circulation_exceptions import * -from api.integration.registry.license_providers import ( - CirculationLicenseProvidersRegistry, -) +from api.integration.registry.license_providers import LicenseProvidersRegistry from api.util.patron import PatronUtility from core.analytics import Analytics from core.config import CannotLoadConfiguration @@ -542,39 +539,6 @@ class BaseCirculationLoanSettings(BaseSettings): ) -SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) -LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) - - -class CirculationConfigurationMixin( - Generic[SettingsType, LibrarySettingsType], HasLibraryIntegrationConfiguration, ABC -): - _db: Session - _integration_configuration_id: Optional[int] - - def integration_configuration(self) -> IntegrationConfiguration: - config = get_one( - self._db, IntegrationConfiguration, id=self._integration_configuration_id - ) - if config is None: - raise ValueError( - f"No Configuration available for {self.__class__.__name__} (id={self._integration_configuration_id})" - ) - return config - - # We have to ignore the return values due to a known bug in mypy - # https://github.com/python/mypy/issues/10003 - def library_configuration(self, library_id: int) -> LibrarySettingsType | None: - libconfig = self.integration_configuration().for_library(library_id=library_id) - if libconfig: - config = self.library_settings_class()(**libconfig.settings_dict) - return config # type: ignore[return-value] - return None - - def configuration(self) -> SettingsType: - return self.settings_class()(**self.integration_configuration().settings_dict) # type: ignore[return-value] - - class CirculationInternalFormatsMixin: """A mixin for CirculationAPIs that have internal formats.""" @@ -608,9 +572,12 @@ def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> s return internal_format +SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) +LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) + + class BaseCirculationAPI( - CirculationConfigurationMixin[SettingsType, LibrarySettingsType], - HasLibraryIntegrationConfiguration, + HasLibraryIntegrationConfiguration[SettingsType, LibrarySettingsType], LoggerMixin, ABC, ): @@ -656,6 +623,30 @@ def default_notification_email_address( library = library_or_patron return library.settings.default_notification_email_address + def integration_configuration(self) -> IntegrationConfiguration: + config = get_one( + self._db, IntegrationConfiguration, id=self._integration_configuration_id + ) + if config is None: + raise ValueError( + f"No Configuration available for {self.__class__.__name__} (id={self._integration_configuration_id})" + ) + return config + + @property + def settings(self) -> SettingsType: + return self.settings_class()(**self.integration_configuration().settings_dict) + + def library_settings(self, library: Library | int) -> LibrarySettingsType | None: + library_id = library.id if isinstance(library, Library) else library + if library_id is None: + return None + libconfig = self.integration_configuration().for_library(library_id=library_id) + if libconfig is None: + return None + config = self.library_settings_class()(**libconfig.settings_dict) + return config + @abstractmethod def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: """Return a book early. @@ -705,13 +696,6 @@ def fulfill( """Get the actual resource file to the patron.""" ... - @abstractmethod - def patron_activity( - self, patron: Patron, pin: str - ) -> Iterable[LoanInfo | HoldInfo]: - """Return a patron's current checkouts and holds.""" - ... - @abstractmethod def place_hold( self, @@ -742,6 +726,22 @@ def update_availability(self, licensepool: LicensePool) -> None: ... +class PatronActivityCirculationAPI( + BaseCirculationAPI[SettingsType, LibrarySettingsType], ABC +): + """ + A CirculationAPI that can return a patron's current checkouts and holds, that + were made outside the Palace platform. + """ + + @abstractmethod + def patron_activity( + self, patron: Patron, pin: str + ) -> Iterable[LoanInfo | HoldInfo]: + """Return a patron's current checkouts and holds.""" + ... + + class CirculationFulfillmentPostProcessor(ABC): """Generic interface for a circulation fulfillment post-processor, i.e., a class adding additional logic to the fulfillment process AFTER the circulation item has been fulfilled. @@ -821,7 +821,7 @@ def __init__( self.library_id = library.id self.analytics = analytics self.initialization_exceptions = dict() - self.registry = registry or CirculationLicenseProvidersRegistry() + self.registry = registry or LicenseProvidersRegistry() fulfillment_post_processors_mapping = ( fulfillment_post_processors_map or self.default_fulfillment_post_processors_map @@ -855,7 +855,8 @@ def __init__( self.initialization_exceptions[collection.id] = exception if api: self.api_for_collection[collection.id] = api - self.collection_ids_for_sync.append(collection.id) + if isinstance(api, PatronActivityCirculationAPI): + self.collection_ids_for_sync.append(collection.id) if ( collection.protocol in fulfillment_post_processors_mapping @@ -1049,15 +1050,6 @@ def borrow( now = utc_now() api = self.api_for_license_pool(licensepool) - if licensepool.open_access or (not api and licensepool.unlimited_access): - # We can 'loan' open-access content ourselves just by - # putting a row in the database. - __transaction = self._db.begin_nested() - loan, is_new = licensepool.loan_to(patron, start=now, end=None) - __transaction.commit() - self._collect_checkout_event(patron, licensepool) - return loan, None, is_new - # Okay, it's not an open-access book. This means we need to go # to an external service to get the book. @@ -1084,8 +1076,9 @@ def borrow( loan_info = None hold_info = None - if existing_loan: - # Sync with the API to see if the loan still exists. If + if existing_loan and isinstance(api, PatronActivityCirculationAPI): + # If we are able to sync patrons loans and holds from the + # remote API, we do that to see if the loan still exists. If # it does, we still want to perform a 'checkout' operation # on the API, because that's how loans are renewed, but # certain error conditions (like NoAvailableCopies) mean @@ -1311,6 +1304,12 @@ def enforce_limits(self, patron: Patron, pool: LicensePool) -> None: :raises PatronHoldLimitReached: If `pool` is currently unavailable and the patron is at their hold limit. """ + if pool.open_access or pool.unlimited_access: + # Open-access books and books with unlimited access + # are able to be checked out even if the patron is + # at their loan limit. + return + at_loan_limit = self.patron_at_loan_limit(patron) at_hold_limit = self.patron_at_hold_limit(patron) @@ -1429,10 +1428,14 @@ def fulfill( license_pool=licensepool, on_multiple="interchangeable", ) + api = self.api_for_license_pool(licensepool) + if not api: + raise CannotFulfill() + if not loan and not self.can_fulfill_without_loan( patron, licensepool, delivery_mechanism ): - if sync_on_failure: + if sync_on_failure and isinstance(api, PatronActivityCirculationAPI): # Sync and try again. # TODO: Pass in only the single collection or LicensePool # that needs to be synced. @@ -1459,37 +1462,18 @@ def fulfill( ) ) - api = self.api_for_license_pool(licensepool) - - if licensepool.open_access or (not api and licensepool.unlimited_access): - # We ignore the vendor-specific arguments when doing - # open-access fulfillment, because we just don't support - # partial fulfillment of open-access content. - fulfillment = self.fulfill_open_access( - licensepool, - delivery_mechanism.delivery_mechanism, - ) - - fulfillment = self._post_process_fulfillment( - patron, pin, licensepool, delivery_mechanism, fulfillment - ) - - else: - if not api: - raise CannotFulfill() - - fulfillment = api.fulfill( - patron, - pin, - licensepool, - delivery_mechanism=delivery_mechanism, - ) - if not fulfillment or not (fulfillment.content_link or fulfillment.content): - raise NoAcceptableFormat() + fulfillment = api.fulfill( + patron, + pin, + licensepool, + delivery_mechanism=delivery_mechanism, + ) + if not fulfillment or not (fulfillment.content_link or fulfillment.content): + raise NoAcceptableFormat() - fulfillment = self._post_process_fulfillment( - patron, pin, licensepool, delivery_mechanism, fulfillment - ) + fulfillment = self._post_process_fulfillment( + patron, pin, licensepool, delivery_mechanism, fulfillment + ) # Send out an analytics event to record the fact that # a fulfillment was initiated through the circulation @@ -1511,58 +1495,6 @@ def fulfill( return fulfillment - def fulfill_open_access( - self, licensepool: LicensePool, delivery_mechanism: DeliveryMechanism - ) -> FulfillmentInfo: - """Fulfill an open-access LicensePool through the requested - DeliveryMechanism. - - :param licensepool: The title to be fulfilled. - :param delivery_mechanism: A DeliveryMechanism. - """ - if isinstance(delivery_mechanism, LicensePoolDeliveryMechanism): - self.log.warning( - "LicensePoolDeliveryMechanism passed into fulfill_open_access, should be DeliveryMechanism." - ) - delivery_mechanism = delivery_mechanism.delivery_mechanism - fulfillment = None - for lpdm in licensepool.delivery_mechanisms: - if not ( - lpdm.resource - and lpdm.resource.representation - and lpdm.resource.representation.url - ): - # This LicensePoolDeliveryMechanism can't actually - # be used for fulfillment. - continue - if lpdm.delivery_mechanism == delivery_mechanism: - # We found it! This is how the patron wants - # the book to be delivered. - fulfillment = lpdm - break - - if not fulfillment: - # There is just no way to fulfill this loan the way the - # patron wants. - raise FormatNotAvailable() - - rep = fulfillment.resource.representation - if rep: - content_link = rep.public_url - else: - content_link = fulfillment.resource.url - media_type = rep.media_type - return FulfillmentInfo( - licensepool.collection, - licensepool.data_source.name, - identifier_type=licensepool.identifier.type, - identifier=licensepool.identifier.identifier, - content_link=content_link, - content_type=media_type, - content=None, - content_expires=None, - ) - def revoke_loan( self, patron: Patron, pin: str, licensepool: LicensePool ) -> Literal[True]: @@ -1576,13 +1508,17 @@ def revoke_loan( ) if loan is not None: api = self.api_for_license_pool(licensepool) - if not (api is None or licensepool.open_access): - try: - api.checkin(patron, pin, licensepool) - except NotCheckedOut as e: - # The book wasn't checked out in the first - # place. Everything's fine. - pass + if api is None: + self.log.error( + f"Patron: {patron!r} tried to revoke loan for licensepool: {licensepool!r} but no api was found." + ) + raise CannotReturn("No API available.") + try: + api.checkin(patron, pin, licensepool) + except NotCheckedOut as e: + # The book wasn't checked out in the first + # place. Everything's fine. + pass __transaction = self._db.begin_nested() logging.info(f"In revoke_loan(), deleting loan #{loan.id}") @@ -1610,16 +1546,15 @@ def release_hold( license_pool=licensepool, on_multiple="interchangeable", ) - if not licensepool.open_access: - api = self.api_for_license_pool(licensepool) - if api is None: - raise TypeError(f"No api for licensepool: {licensepool}") - try: - api.release_hold(patron, pin, licensepool) - except NotOnHold as e: - # The book wasn't on hold in the first place. Everything's - # fine. - pass + api = self.api_for_license_pool(licensepool) + if api is None: + raise TypeError(f"No api for licensepool: {licensepool}") + try: + api.release_hold(patron, pin, licensepool) + except NotOnHold: + # The book wasn't on hold in the first place. Everything's + # fine. + pass # Any other CannotReleaseHold exception will be propagated # upwards at this point if hold: @@ -1655,7 +1590,7 @@ def patron_activity( class PatronActivityThread(Thread): def __init__( self, - api: BaseCirculationAPI[BaseSettings, BaseSettings], + api: PatronActivityCirculationAPI[BaseSettings, BaseSettings], patron: Patron, pin: str, ) -> None: @@ -1692,8 +1627,9 @@ def run(self) -> None: threads = [] before = time.time() for api in list(self.api_for_collection.values()): - thread = PatronActivityThread(api, patron, pin) - threads.append(thread) + if isinstance(api, PatronActivityCirculationAPI): + thread = PatronActivityThread(api, patron, pin) + threads.append(thread) for thread in threads: thread.start() for thread in threads: diff --git a/api/controller.py b/api/controller.py index 1908fc549a..1339297daf 100644 --- a/api/controller.py +++ b/api/controller.py @@ -2394,7 +2394,7 @@ def notify(self, loan_id): return NO_ACTIVE_LOAN.detailed(_("No loan was found for this identifier.")) collection = loan.license_pool.collection - if collection.protocol not in (ODLAPI.NAME, ODL2API.NAME): + if collection.protocol not in (ODLAPI.label(), ODL2API.label()): return INVALID_LOAN_FOR_ODL_NOTIFICATION api = self.manager.circulation_apis[library.id].api_for_license_pool( diff --git a/api/discovery/opds_registration.py b/api/discovery/opds_registration.py index 49cb6bae08..f5fc22555d 100644 --- a/api/discovery/opds_registration.py +++ b/api/discovery/opds_registration.py @@ -54,7 +54,9 @@ class OpdsRegistrationServiceSettings(BaseSettings): ) -class OpdsRegistrationService(HasIntegrationConfiguration): +class OpdsRegistrationService( + HasIntegrationConfiguration[OpdsRegistrationServiceSettings] +): """A circulation manager's view of a remote service that supports the OPDS Directory Registration Protocol: diff --git a/api/enki.py b/api/enki.py index b79b389631..bf8137dfac 100644 --- a/api/enki.py +++ b/api/enki.py @@ -11,7 +11,13 @@ from requests import Response as RequestsResponse from sqlalchemy.orm import Session -from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation import ( + BaseCirculationAPI, + FulfillmentInfo, + HoldInfo, + LoanInfo, + PatronActivityCirculationAPI, +) from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics @@ -87,7 +93,7 @@ class EnkiLibrarySettings(BaseSettings): class EnkiAPI( - BaseCirculationAPI[EnkiSettings, EnkiLibrarySettings], + PatronActivityCirculationAPI[EnkiSettings, EnkiLibrarySettings], HasCollectionSelfTests, EnkiConstants, ): @@ -138,16 +144,16 @@ def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) self.collection_id = collection.id - self.base_url = self.configuration().url or self.PRODUCTION_BASE_URL + self.base_url = self.settings.url or self.PRODUCTION_BASE_URL def enki_library_id(self, library: Library) -> Optional[str]: """Find the Enki library ID for the given library.""" if library.id is None: return None - config = self.library_configuration(library.id) - if config is None: + settings = self.library_settings(library.id) + if settings is None: return None - return config.enki_library_id + return settings.enki_library_id def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: now = utc_now() diff --git a/api/firstbook2.py b/api/firstbook2.py index 6a88da5b40..f9e2dbd2fd 100644 --- a/api/firstbook2.py +++ b/api/firstbook2.py @@ -59,7 +59,9 @@ class FirstBookAuthSettings(BasicAuthProviderSettings): ) -class FirstBookAuthenticationAPI(BasicAuthenticationProvider): +class FirstBookAuthenticationAPI( + BasicAuthenticationProvider[FirstBookAuthSettings, BasicAuthProviderLibrarySettings] +): @classmethod def label(cls) -> str: return "First Book" @@ -75,6 +77,10 @@ def description(cls) -> str: def settings_class(cls) -> type[FirstBookAuthSettings]: return FirstBookAuthSettings + @classmethod + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: + return BasicAuthProviderLibrarySettings + @property def login_button_image(self) -> str | None: return "FirstBookLoginButton280.png" diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index 9338cc0283..fe9ac4e7dc 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -1,29 +1,16 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Union +from typing import TYPE_CHECKING from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry -from core.model.configuration import ExternalIntegration if TYPE_CHECKING: from api.circulation import BaseCirculationAPI # noqa: autoflake from core.integration.settings import BaseSettings # noqa: autoflake - from core.opds_import import BaseOPDSImporter # noqa: autoflake class LicenseProvidersRegistry( - IntegrationRegistry[ - Union["BaseCirculationAPI[BaseSettings, BaseSettings]", "BaseOPDSImporter"] - ] -): - def __init__(self) -> None: - super().__init__(Goals.LICENSE_GOAL) - self.update(CirculationLicenseProvidersRegistry()) - self.update(OpenAccessLicenseProvidersRegistry()) - - -class CirculationLicenseProvidersRegistry( IntegrationRegistry["BaseCirculationAPI[BaseSettings, BaseSettings]"] ): def __init__(self) -> None: @@ -37,22 +24,16 @@ def __init__(self) -> None: from api.odl2 import ODL2API from api.opds_for_distributors import OPDSForDistributorsAPI from api.overdrive import OverdriveAPI - - self.register(OverdriveAPI, canonical=ExternalIntegration.OVERDRIVE) - self.register(OdiloAPI, canonical=ExternalIntegration.ODILO) - self.register(BibliothecaAPI, canonical=ExternalIntegration.BIBLIOTHECA) - self.register(Axis360API, canonical=ExternalIntegration.AXIS_360) - self.register(EnkiAPI, canonical=EnkiAPI.ENKI_EXTERNAL) - self.register(OPDSForDistributorsAPI, canonical=OPDSForDistributorsAPI.NAME) - self.register(ODLAPI, canonical=ODLAPI.NAME) - self.register(ODL2API, canonical=ODL2API.NAME) - - -class OpenAccessLicenseProvidersRegistry(IntegrationRegistry["BaseOPDSImporter"]): - def __init__(self) -> None: - super().__init__(Goals.LICENSE_GOAL) - from core.opds2_import import OPDS2Importer - from core.opds_import import OPDSImporter - - self.register(OPDSImporter, canonical=OPDSImporter.NAME) - self.register(OPDS2Importer, canonical=OPDS2Importer.NAME) + from core.opds2_import import OPDS2API + from core.opds_import import OPDSAPI + + self.register(OverdriveAPI, canonical=OverdriveAPI.label()) + self.register(OdiloAPI, canonical=OdiloAPI.label()) + self.register(BibliothecaAPI, canonical=BibliothecaAPI.label()) + self.register(Axis360API, canonical=Axis360API.label()) + self.register(EnkiAPI, canonical=EnkiAPI.label()) + self.register(OPDSForDistributorsAPI, canonical=OPDSForDistributorsAPI.label()) + self.register(ODLAPI, canonical=ODLAPI.label()) + self.register(ODL2API, canonical=ODL2API.label()) + self.register(OPDSAPI, canonical=OPDSAPI.label()) + self.register(OPDS2API, canonical=OPDS2API.label()) diff --git a/api/integration/registry/patron_auth.py b/api/integration/registry/patron_auth.py index 2587055666..80cb1cd5fb 100644 --- a/api/integration/registry/patron_auth.py +++ b/api/integration/registry/patron_auth.py @@ -6,10 +6,10 @@ from core.integration.registry import IntegrationRegistry if TYPE_CHECKING: - from api.authentication.base import AuthenticationProvider # noqa: autoflake + from api.authentication.base import AuthenticationProviderType # noqa: autoflake -class PatronAuthRegistry(IntegrationRegistry["AuthenticationProvider"]): +class PatronAuthRegistry(IntegrationRegistry["AuthenticationProviderType"]): def __init__(self) -> None: super().__init__(Goals.PATRON_AUTH_GOAL) from api.firstbook2 import FirstBookAuthenticationAPI diff --git a/api/kansas_patron.py b/api/kansas_patron.py index 9f6603974c..8d44bb32c3 100644 --- a/api/kansas_patron.py +++ b/api/kansas_patron.py @@ -25,7 +25,9 @@ class KansasAuthSettings(BasicAuthProviderSettings): ) -class KansasAuthenticationAPI(BasicAuthenticationProvider): +class KansasAuthenticationAPI( + BasicAuthenticationProvider[KansasAuthSettings, BasicAuthProviderLibrarySettings] +): @classmethod def label(cls) -> str: return "Kansas" @@ -38,6 +40,10 @@ def description(cls) -> str: def settings_class(cls) -> Type[KansasAuthSettings]: return KansasAuthSettings + @classmethod + def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + return BasicAuthProviderLibrarySettings + def __init__( self, library_id: int, diff --git a/api/millenium_patron.py b/api/millenium_patron.py index 552b9ff3d3..a5085a7b80 100644 --- a/api/millenium_patron.py +++ b/api/millenium_patron.py @@ -166,7 +166,9 @@ class MilleniumPatronLibrarySettings(BasicAuthProviderLibrarySettings): ) -class MilleniumPatronAPI(BasicAuthenticationProvider): +class MilleniumPatronAPI( + BasicAuthenticationProvider[MilleniumPatronSettings, MilleniumPatronLibrarySettings] +): @classmethod def label(cls) -> str: return "Millenium" diff --git a/api/monitor.py b/api/monitor.py index 209f41b9b2..cfe3619253 100644 --- a/api/monitor.py +++ b/api/monitor.py @@ -17,7 +17,7 @@ class LoanlikeReaperMonitor(ReaperMonitor): SOURCE_OF_TRUTH_PROTOCOLS = [ - ODLAPI.NAME, + ODLAPI.label(), ExternalIntegration.OPDS_FOR_DISTRIBUTORS, ] diff --git a/api/odilo.py b/api/odilo.py index e92d9c9cce..4e2e985ad1 100644 --- a/api/odilo.py +++ b/api/odilo.py @@ -14,6 +14,7 @@ FulfillmentInfo, HoldInfo, LoanInfo, + PatronActivityCirculationAPI, ) from api.circulation_exceptions import * from api.selftest import HasCollectionSelfTests, SelfTestResult @@ -349,7 +350,7 @@ class OdiloLibrarySettings(BaseSettings): class OdiloAPI( - BaseCirculationAPI[OdiloSettings, OdiloLibrarySettings], + PatronActivityCirculationAPI[OdiloSettings, OdiloLibrarySettings], HasCollectionSelfTests, CirculationInternalFormatsMixin, ): @@ -406,11 +407,13 @@ def settings_class(cls): def library_settings_class(cls): return OdiloLibrarySettings - def label(self): - return self.NAME + @classmethod + def label(cls): + return cls.NAME - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls): + return cls.DESCRIPTION def __init__(self, _db, collection): self.odilo_bibliographic_coverage_provider = OdiloBibliographicCoverageProvider( @@ -428,10 +431,10 @@ def __init__(self, _db, collection): self.collection_id = collection.id self.token = None - config = self.configuration() - self.client_key = config.username - self.client_secret = config.password - self.library_api_base_url = config.library_api_base_url + settings = self.settings + self.client_key = settings.username + self.client_secret = settings.password + self.library_api_base_url = settings.library_api_base_url if ( not self.client_key @@ -441,9 +444,13 @@ def __init__(self, _db, collection): raise CannotLoadConfiguration("Odilo configuration is incomplete.") # Use utf8 instead of unicode encoding - settings = [self.client_key, self.client_secret, self.library_api_base_url] + settings_encoded = [ + self.client_key, + self.client_secret, + self.library_api_base_url, + ] self.client_key, self.client_secret, self.library_api_base_url = ( - setting.encode("utf8") for setting in settings + setting.encode("utf8") for setting in settings_encoded ) # Get set up with up-to-date credentials from the API. diff --git a/api/odl.py b/api/odl.py index 62e224e0fe..289ba2532b 100644 --- a/api/odl.py +++ b/api/odl.py @@ -5,7 +5,7 @@ import json import uuid from abc import ABC -from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type +from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, TypeVar import dateutil from flask import url_for @@ -22,12 +22,12 @@ FulfillmentInfo, HoldInfo, LoanInfo, + PatronActivityCirculationAPI, ) from api.circulation_exceptions import * from api.lcp.hash import Hasher, HasherFactory, HashingAlgorithm from core import util from core.analytics import Analytics -from core.importers import BaseImporterSettings from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, @@ -63,6 +63,7 @@ from core.opds_import import ( BaseOPDSImporter, OPDSImporter, + OPDSImporterSettings, OPDSImportMonitor, OPDSXMLParser, ) @@ -74,16 +75,12 @@ class ODLAPIConstants: DEFAULT_PASSPHRASE_HINT = "View the help page for more information." DEFAULT_PASSPHRASE_HINT_URL = "https://lyrasis.zendesk.com/" - DEFAULT_ENCRYPTION_ALGORITHM = HashingAlgorithm.SHA256.value -class ODLSettings(BaseImporterSettings): +class ODLSettings(OPDSImporterSettings): external_account_id: Optional[HttpUrl] = FormField( - key=Collection.EXTERNAL_ACCOUNT_ID_KEY, form=ConfigurationFormItem( label=_("ODL feed URL"), - description="", - type=ConfigurationFormItemType.TEXT, required=True, ), ) @@ -91,8 +88,6 @@ class ODLSettings(BaseImporterSettings): username: str = FormField( form=ConfigurationFormItem( label=_("Library's API username"), - description="", - type=ConfigurationFormItemType.TEXT, required=True, ) ) @@ -101,21 +96,10 @@ class ODLSettings(BaseImporterSettings): key=ExternalIntegration.PASSWORD, form=ConfigurationFormItem( label=_("Library's API password"), - description="", - type=ConfigurationFormItemType.TEXT, required=True, ), ) - data_source: str = FormField( - form=ConfigurationFormItem( - label=_("Data source name"), - description="", - type=ConfigurationFormItemType.TEXT, - required=True, - ) - ) - default_reservation_period: Optional[PositiveInt] = FormField( default=Collection.STANDARD_DEFAULT_RESERVATION_PERIOD, form=ConfigurationFormItem( @@ -152,14 +136,14 @@ class ODLSettings(BaseImporterSettings): ), ) - encryption_algorithm: Optional[str] = FormField( - default=ODLAPIConstants.DEFAULT_ENCRYPTION_ALGORITHM, + encryption_algorithm: HashingAlgorithm = FormField( + default=HashingAlgorithm.SHA256, form=ConfigurationFormItem( label=_("Passphrase encryption algorithm"), description=_("Algorithm used for encrypting the passphrase."), type=ConfigurationFormItemType.SELECT, required=False, - options=ConfigurationFormItemType.options_from_enum(HashingAlgorithm), + options={alg: alg.name for alg in HashingAlgorithm}, ), ) @@ -168,9 +152,13 @@ class ODLLibrarySettings(BaseCirculationEbookLoanSettings): pass -class ODLAPI( - BaseCirculationAPI[ODLSettings, ODLLibrarySettings], -): +SettingsType = TypeVar("SettingsType", bound=ODLSettings, covariant=True) +LibrarySettingsType = TypeVar( + "LibrarySettingsType", bound=ODLLibrarySettings, covariant=True +) + + +class BaseODLAPI(PatronActivityCirculationAPI[SettingsType, LibrarySettingsType], ABC): """ODL (Open Distribution to Libraries) is a specification that allows libraries to manage their own loans and holds. It offers a deeper level of control to the library, but it requires the circulation manager to @@ -179,16 +167,8 @@ class ODLAPI( In addition to circulating books to patrons of a library on the current circulation manager, this API can be used to circulate books to patrons of external libraries. - Only one circulation manager per ODL collection should use an ODLAPI - - the others should use a SharedODLAPI and configure it to connect to the main - circulation manager. """ - NAME = ExternalIntegration.ODL - DESCRIPTION = _( - "Import books from a distributor that uses ODL (Open Distribution to Libraries)." - ) - SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.FULFILL_STEP # Possible status values in the License Status Document: @@ -220,74 +200,36 @@ class ODLAPI( EXPIRED_STATUS, ] - @classmethod - def settings_class(cls) -> Type[ODLSettings]: - return ODLSettings - - @classmethod - def library_settings_class(cls) -> Type[ODLLibrarySettings]: - return ODLLibrarySettings - - @classmethod - def label(cls) -> str: - return cls.NAME - - @classmethod - def description(cls) -> str: - return cls.DESCRIPTION # type: ignore[no-any-return] - def __init__(self, _db: Session, collection: Collection) -> None: super().__init__(_db, collection) - if collection.protocol != self.NAME: + if collection.protocol != self.label(): raise ValueError( "Collection protocol is %s, but passed into %s!" % (collection.protocol, self.__class__.__name__) ) self.collection_id = collection.id - config = self.configuration() - self.data_source_name = config.data_source + settings = self.settings + self.data_source_name = settings.data_source # Create the data source if it doesn't exist yet. DataSource.lookup(_db, self.data_source_name, autocreate=True) - self.username = config.username - self.password = config.password + self.username = settings.username + self.password = settings.password self.analytics = Analytics(_db) self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() self._hasher_instance: Optional[Hasher] = None - def external_integration(self, db: Session) -> ExternalIntegration: - """Return an external integration associated with this object. - - :param db: Database session - :return: External integration associated with this object - """ - return self.collection.external_integration - - @property - def collection(self) -> Collection: - """Return a collection associated with this object. - - :param db: Database session - :return: Collection associated with this object - """ - collection = super().collection - if not collection: - raise ValueError(f"Collection not found: {self.collection_id}") - return collection - def _get_hasher(self) -> Hasher: """Returns a Hasher instance :return: Hasher instance """ - config = self.configuration() + settings = self.settings if self._hasher_instance is None: self._hasher_instance = self._hasher_factory.create( - config.encryption_algorithm # type: ignore[arg-type] - if config.encryption_algorithm - else ODLAPIConstants.DEFAULT_ENCRYPTION_ALGORITHM + settings.encryption_algorithm ) return self._hasher_instance @@ -326,6 +268,8 @@ def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: else: id = loan.license.identifier checkout_id = str(uuid.uuid1()) + if self.collection is None: + raise ValueError(f"Collection not found: {self.collection_id}") default_loan_period = self.collection.default_loan_period( loan.patron.library ) @@ -355,7 +299,6 @@ def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: _external=True, ) - config = self.configuration() checkout_url = str(loan.license.checkout_url) url_template = URITemplate(checkout_url) url = url_template.expand( @@ -365,8 +308,8 @@ def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: expires=expires.isoformat(), notification_url=notification_url, passphrase=encoded_pass, - hint=config.passphrase_hint, - hint_url=config.passphrase_hint_url, + hint=self.settings.passphrase_hint, + hint_url=self.settings.passphrase_hint_url, ) response = self._get(url) @@ -383,7 +326,7 @@ def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: ) return status_doc # type: ignore[no-any-return] - def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> bool: # type: ignore[override] + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: """Return a loan early.""" _db = Session.object_session(patron) @@ -395,7 +338,7 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> bool: if loan.count() < 1: raise NotCheckedOut() loan_result = loan.one() - return self._checkin(loan_result) + self._checkin(loan_result) def _checkin(self, loan: Loan) -> bool: _db = Session.object_session(loan) @@ -673,6 +616,8 @@ def _update_hold_end_date( self._update_hold_position(holdinfo, pool) assert holdinfo.hold_position is not None + if self.collection is None: + raise ValueError(f"Collection not found: {self.collection_id}") default_loan_period = self.collection.default_loan_period(library) default_reservation_period = self.collection.default_reservation_period @@ -954,11 +899,41 @@ def update_availability(self, licensepool: LicensePool) -> None: pass +class ODLAPI( + BaseODLAPI[ODLSettings, ODLLibrarySettings], +): + """ODL (Open Distribution to Libraries) is a specification that allows + libraries to manage their own loans and holds. It offers a deeper level + of control to the library, but it requires the circulation manager to + keep track of individual copies rather than just license pools, and + manage its own holds queues. + + In addition to circulating books to patrons of a library on the current circulation + manager, this API can be used to circulate books to patrons of external libraries. + """ + + @classmethod + def settings_class(cls) -> Type[ODLSettings]: + return ODLSettings + + @classmethod + def library_settings_class(cls) -> Type[ODLLibrarySettings]: + return ODLLibrarySettings + + @classmethod + def label(cls) -> str: + return ExternalIntegration.ODL + + @classmethod + def description(cls) -> str: + return "Import books from a distributor that uses ODL (Open Distribution to Libraries)." + + class ODLXMLParser(OPDSXMLParser): NAMESPACES = dict(OPDSXMLParser.NAMESPACES, odl="http://opds-spec.org/odl") -class BaseODLImporter(BaseOPDSImporter, ABC): +class BaseODLImporter(BaseOPDSImporter[SettingsType], ABC): FEEDBOOKS_AUDIO = "{}; protection={}".format( MediaTypes.AUDIOBOOK_MANIFEST_MEDIA_TYPE, DeliveryMechanism.FEEDBOOKS_AUDIOBOOK_DRM, @@ -1131,20 +1106,24 @@ def get_license_data( return parsed_license -class ODLImporter(OPDSImporter, BaseODLImporter): +class ODLImporter(OPDSImporter, BaseODLImporter[ODLSettings]): """Import information and formats from an ODL feed. The only change from OPDSImporter is that this importer extracts format information from 'odl:license' tags. """ - NAME = ODLAPI.NAME + NAME = ODLAPI.label() PARSER_CLASS = ODLXMLParser # The media type for a License Info Document, used to get information # about the license. LICENSE_INFO_DOCUMENT_MEDIA_TYPE = "application/vnd.odl.info+json" + @classmethod + def settings_class(cls) -> Type[ODLSettings]: + return ODLSettings + @classmethod def _detail_for_elementtree_entry( cls, @@ -1288,7 +1267,7 @@ class ODLHoldReaper(CollectionMonitor): the holds queues for their pools.""" SERVICE_NAME = "ODL Hold Reaper" - PROTOCOL = ODLAPI.NAME + PROTOCOL = ODLAPI.label() def __init__( self, diff --git a/api/odl2.py b/api/odl2.py index 0c2296eb11..d5654bab64 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -10,7 +10,7 @@ from webpub_manifest_parser.opds2.registry import OPDS2LinkRelationsRegistry from api.circulation_exceptions import PatronHoldLimitReached, PatronLoanLimitReached -from api.odl import ODLAPI, BaseODLImporter, ODLSettings +from api.odl import BaseODLAPI, BaseODLImporter, ODLLibrarySettings, ODLSettings from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, @@ -19,7 +19,12 @@ from core.metadata_layer import FormatData from core.model import Edition, RightsStatus from core.model.configuration import ExternalIntegration -from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser +from core.opds2_import import ( + OPDS2Importer, + OPDS2ImporterSettings, + OPDS2ImportMonitor, + RWPMManifestParser, +) from core.util import first_or_default from core.util.datetime_helpers import to_utc @@ -32,8 +37,8 @@ from core.model.patron import Hold, Loan, Patron -class ODL2Settings(ODLSettings): - skipped_license_formats: Optional[List[str]] = FormField( +class ODL2Settings(OPDS2ImporterSettings, ODLSettings): + skipped_license_formats: List[str] = FormField( default=["text/html"], alias="odl2_skipped_license_formats", form=ConfigurationFormItem( @@ -73,18 +78,27 @@ class ODL2Settings(ODLSettings): ) -class ODL2API(ODLAPI): - NAME = ExternalIntegration.ODL2 - +class ODL2API(BaseODLAPI[ODL2Settings, ODLLibrarySettings]): @classmethod def settings_class(cls) -> Type[ODL2Settings]: return ODL2Settings + @classmethod + def library_settings_class(cls) -> Type[ODLLibrarySettings]: + return ODLLibrarySettings + + @classmethod + def label(cls) -> str: + return ExternalIntegration.ODL2 + + @classmethod + def description(cls) -> str: + return "Import books from a distributor that uses OPDS2 + ODL (Open Distribution to Libraries)." + def __init__(self, _db: Session, collection: Collection) -> None: super().__init__(_db, collection) - config = self.configuration() - self.loan_limit = config.loan_limit # type: ignore[attr-defined] - self.hold_limit = config.hold_limit # type: ignore[attr-defined] + self.loan_limit = self.settings.loan_limit + self.hold_limit = self.settings.hold_limit def _checkout( self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None @@ -115,17 +129,17 @@ def _place_hold(self, patron: Patron, licensepool: LicensePool) -> HoldInfo: return super()._place_hold(patron, licensepool) -class ODL2Importer(OPDS2Importer, BaseODLImporter): +class ODL2Importer(BaseODLImporter[ODL2Settings], OPDS2Importer): """Import information and formats from an ODL feed. The only change from OPDS2Importer is that this importer extracts FormatData and LicenseData from ODL 2.x's "licenses" arrays. """ - NAME = ODL2API.NAME + NAME = ODL2API.label() @classmethod - def settings_class(cls) -> Type[ODL2Settings]: # type: ignore[override] + def settings_class(cls) -> Type[ODL2Settings]: return ODL2Settings def __init__( @@ -186,9 +200,7 @@ def _extract_publication_metadata( licenses = [] medium = None - skipped_license_formats = self.configuration().skipped_license_formats # type: ignore[attr-defined] - if skipped_license_formats: - skipped_license_formats = set(skipped_license_formats) + skipped_license_formats = set(self.settings.skipped_license_formats) if publication.licenses: for odl_license in publication.licenses: @@ -285,7 +297,7 @@ def _extract_publication_metadata( class ODL2ImportMonitor(OPDS2ImportMonitor): """Import information from an ODL feed.""" - PROTOCOL = ODL2Importer.NAME + PROTOCOL = ODL2API.label() SERVICE_NAME = "ODL 2.x Import Monitor" def __init__( diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 24b7234024..0bf4848ec8 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -7,14 +7,13 @@ import feedparser from flask_babel import lazy_gettext as _ -from api.circulation import BaseCirculationAPI, FulfillmentInfo, LoanInfo +from api.circulation import FulfillmentInfo, LoanInfo, PatronActivityCirculationAPI from api.circulation_exceptions import ( CannotFulfill, DeliveryMechanismError, LibraryAuthorizationFailedException, ) from api.selftest import HasCollectionSelfTests -from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField from core.metadata_layer import FormatData, TimestampData from core.model import ( @@ -30,7 +29,7 @@ Session, get_one, ) -from core.opds_import import BaseOPDSImporterSettings, OPDSImporter, OPDSImportMonitor +from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor from core.util.datetime_helpers import utc_now from core.util.http import HTTP from core.util.string_helpers import base64 @@ -45,7 +44,7 @@ from core.selftest import SelfTestResult -class OPDSForDistributorsSettings(BaseOPDSImporterSettings): +class OPDSForDistributorsSettings(OPDSImporterSettings): username: str = FormField( form=ConfigurationFormItem( label=_("Library's username or access key"), @@ -66,14 +65,11 @@ class OPDSForDistributorsLibrarySettings(BaseSettings): class OPDSForDistributorsAPI( - BaseCirculationAPI[OPDSForDistributorsSettings, OPDSForDistributorsLibrarySettings], + PatronActivityCirculationAPI[ + OPDSForDistributorsSettings, OPDSForDistributorsLibrarySettings + ], HasCollectionSelfTests, - HasLibraryIntegrationConfiguration, ): - NAME = "OPDS for Distributors" - DESCRIPTION = _( - "Import books from a distributor that requires authentication to get the OPDS feed and download books." - ) BEARER_TOKEN_CREDENTIAL_TYPE = "OPDS For Distributors Bearer Token" # In OPDS For Distributors, all items are gated through the @@ -98,20 +94,20 @@ def library_settings_class(cls) -> Type[OPDSForDistributorsLibrarySettings]: @classmethod def description(cls) -> str: - return cls.DESCRIPTION # type: ignore[no-any-return] + return "Import books from a distributor that requires authentication to get the OPDS feed and download books." @classmethod def label(cls) -> str: - return cls.NAME + return "OPDS for Distributors" def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) self.external_integration_id = collection.external_integration.id - config = self.configuration() - self.data_source_name = config.data_source - self.username = config.username - self.password = config.password + settings = self.settings + self.data_source_name = settings.data_source + self.username = settings.username + self.password = settings.password self.feed_url = collection.external_account_id self.auth_url: Optional[str] = None @@ -391,10 +387,10 @@ def update_availability(self, licensepool: LicensePool) -> None: class OPDSForDistributorsImporter(OPDSImporter): - NAME = OPDSForDistributorsAPI.NAME + NAME = OPDSForDistributorsAPI.label() @classmethod - def settings_class(cls) -> Type[OPDSForDistributorsSettings]: # type: ignore[override] + def settings_class(cls) -> Type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings def update_work_for_edition( diff --git a/api/overdrive.py b/api/overdrive.py index 48d0cd45b2..30cf7e1825 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -30,14 +30,15 @@ FulfillmentInfo, HoldInfo, LoanInfo, + PatronActivityCirculationAPI, ) from api.circulation_exceptions import * from api.circulation_exceptions import CannotFulfill from api.selftest import HasCollectionSelfTests, SelfTestResult from core.analytics import Analytics from core.config import CannotLoadConfiguration, Configuration +from core.connection_config import ConnectionSetting from core.coverage import BibliographicCoverageProvider -from core.importers import BaseImporterSettings from core.integration.base import HasChildIntegrationConfiguration from core.integration.settings import ( BaseSettings, @@ -130,7 +131,7 @@ class OverdriveConstants: ILS_NAME_DEFAULT = "default" -class OverdriveSettings(BaseImporterSettings): +class OverdriveSettings(ConnectionSetting): """The basic Overdrive configuration""" external_account_id: Optional[str] = FormField( @@ -203,17 +204,12 @@ class OverdriveChildSettings(BaseSettings): class OverdriveAPI( - BaseCirculationAPI, + PatronActivityCirculationAPI, CirculationInternalFormatsMixin, HasCollectionSelfTests, HasChildIntegrationConfiguration, OverdriveConstants, ): - NAME = ExternalIntegration.OVERDRIVE - DESCRIPTION = _( - "Integrate an Overdrive collection. For an Overdrive Advantage collection, select the consortium's Overdrive collection as the parent." - ) - SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.FULFILL_STEP # Create a lookup table between common DeliveryMechanism identifiers @@ -355,11 +351,13 @@ def library_settings_class(cls): def child_settings_class(cls): return OverdriveChildSettings - def label(self): - return self.NAME + @classmethod + def label(cls): + return ExternalIntegration.OVERDRIVE - def description(self): - return self.DESCRIPTION + @classmethod + def description(cls): + return "Integrate an Overdrive collection. For an Overdrive Advantage collection, select the consortium's Overdrive collection as the parent." def __init__(self, _db, collection): super().__init__(_db, collection) diff --git a/api/saml/provider.py b/api/saml/provider.py index 35f9d0e5fc..17271dc136 100644 --- a/api/saml/provider.py +++ b/api/saml/provider.py @@ -29,7 +29,11 @@ ) -class SAMLWebSSOAuthenticationProvider(BaseSAMLAuthenticationProvider): +class SAMLWebSSOAuthenticationProvider( + BaseSAMLAuthenticationProvider[ + SAMLWebSSOAuthSettings, SAMLWebSSOAuthLibrarySettings + ] +): """SAML authentication provider implementing Web Browser SSO profile using the following bindings: - HTTP-Redirect Binding for requests - HTTP-POST Binding for responses diff --git a/api/simple_authentication.py b/api/simple_authentication.py index 51789c6880..1cdc5b0b8c 100644 --- a/api/simple_authentication.py +++ b/api/simple_authentication.py @@ -50,7 +50,9 @@ class SimpleAuthSettings(BasicAuthProviderSettings): ) -class SimpleAuthenticationProvider(BasicAuthenticationProvider): +class SimpleAuthenticationProvider( + BasicAuthenticationProvider[SimpleAuthSettings, BasicAuthProviderLibrarySettings] +): """An authentication provider that authenticates a single patron. This serves only one purpose: to set up a working circulation @@ -73,6 +75,10 @@ def description(cls) -> str: def settings_class(cls) -> Type[SimpleAuthSettings]: return SimpleAuthSettings + @classmethod + def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + return BasicAuthProviderLibrarySettings + def __init__( self, library_id: int, diff --git a/api/sip/__init__.py b/api/sip/__init__.py index 9d0cac867a..07788a2014 100644 --- a/api/sip/__init__.py +++ b/api/sip/__init__.py @@ -189,7 +189,9 @@ class SIP2LibrarySettings(BasicAuthProviderLibrarySettings): ) -class SIP2AuthenticationProvider(BasicAuthenticationProvider): +class SIP2AuthenticationProvider( + BasicAuthenticationProvider[SIP2Settings, SIP2LibrarySettings] +): DATE_FORMATS = ["%Y%m%d", "%Y%m%d%Z%H%M%S", "%Y%m%d %H%M%S"] # Map the reasons why SIP2 might report a patron is blocked to the diff --git a/api/sirsidynix_authentication_provider.py b/api/sirsidynix_authentication_provider.py index 66bd117909..d096df38e9 100644 --- a/api/sirsidynix_authentication_provider.py +++ b/api/sirsidynix_authentication_provider.py @@ -90,7 +90,11 @@ class SirsiDynixHorizonAuthLibrarySettings(BasicAuthProviderLibrarySettings): ) -class SirsiDynixHorizonAuthenticationProvider(BasicAuthenticationProvider): +class SirsiDynixHorizonAuthenticationProvider( + BasicAuthenticationProvider[ + SirsiDynixHorizonAuthSettings, SirsiDynixHorizonAuthLibrarySettings + ] +): """SirsiDynix Authentication API implementation. Currently, is only used to authenticate patrons, there is no CRUD implemented for patron profiles. diff --git a/core/configuration/ignored_identifier.py b/core/configuration/ignored_identifier.py deleted file mode 100644 index 0b4b56ef63..0000000000 --- a/core/configuration/ignored_identifier.py +++ /dev/null @@ -1,89 +0,0 @@ -from typing import List, Optional, Sequence, Set, Union - -from flask_babel import lazy_gettext as _ - -from core.integration.settings import ( - BaseSettings, - ConfigurationFormItem, - ConfigurationFormItemType, - FormField, -) -from core.model.constants import IdentifierType -from core.model.integration import IntegrationConfiguration - -ALL_IGNORED_IDENTIFIER_TYPES = { - identifier_type.value for identifier_type in IdentifierType -} - - -class IgnoredIdentifierSettings(BaseSettings): - ignored_identifier_types: Optional[List[IdentifierType]] = FormField( - alias="IGNORED_IDENTIFIER_TYPE", - default=[], - form=ConfigurationFormItem( - label=_("List of identifiers that will be skipped"), - description=_( - "Circulation Manager will not be importing publications with identifiers having one of the selected types." - ), - type=ConfigurationFormItemType.MENU, - required=False, - options={ - identifier_type: identifier_type - for identifier_type in ALL_IGNORED_IDENTIFIER_TYPES - }, - format="narrow", - ), - ) - - -class IgnoredIdentifierImporterMixin: - """ - Mixin to track ignored identifiers within importers - The child class must contain an IgnoredIdentifierConfiguration - """ - - def __init__(self, *args, **kargs) -> None: - super().__init__(*args, **kargs) - self._ignored_identifier_types: Optional[Union[Set[str], list]] = None - - def _get_ignored_identifier_types( - self, configuration: IntegrationConfiguration - ) -> Union[Set[str], list]: - """Return a set of ignored identifier types. - :return: Set of ignored identifier types - """ - if self._ignored_identifier_types is None: - self._ignored_identifier_types = configuration.settings_dict.get( - "ignored_identifier_types", [] - ) - - return self._ignored_identifier_types - - def set_ignored_identifier_types( - self, - value: Sequence[Union[str, IdentifierType]], - configuration: IntegrationConfiguration, - ) -> None: - """Update the list of ignored identifier types. - - :param value: New list of ignored identifier types - """ - if not isinstance(value, (list, set)): - raise ValueError("Argument 'value' must be either a list of set") - - ignored_identifier_types = [] - - for item in value: - if isinstance(item, str): - ignored_identifier_types.append(item) - elif isinstance(item, IdentifierType): - ignored_identifier_types.append(item.value) - else: - raise ValueError( - "Argument 'value' must contain string or IdentifierType enumeration's items only" - ) - - settings = configuration.settings_dict.copy() - settings["ignored_identifier_types"] = ignored_identifier_types - configuration.settings_dict = settings - self._ignored_identifier_types = None diff --git a/core/connection_config.py b/core/connection_config.py index cf798db2f0..915631debe 100644 --- a/core/connection_config.py +++ b/core/connection_config.py @@ -1,5 +1,3 @@ -from typing import Optional - from flask_babel import lazy_gettext as _ from pydantic import PositiveInt @@ -12,7 +10,7 @@ class ConnectionSetting(BaseSettings): - max_retry_count: Optional[PositiveInt] = FormField( + max_retry_count: PositiveInt = FormField( default=3, alias="connection_max_retry_count", form=ConfigurationFormItem( diff --git a/core/importers.py b/core/importers.py deleted file mode 100644 index a7c4802bbf..0000000000 --- a/core/importers.py +++ /dev/null @@ -1,13 +0,0 @@ -from core.configuration.ignored_identifier import IgnoredIdentifierSettings -from core.connection_config import ConnectionSetting -from core.model.formats import FormatPrioritiesSettings -from core.saml.wayfless import SAMLWAYFlessSetttings - - -class BaseImporterSettings( - ConnectionSetting, - SAMLWAYFlessSetttings, - FormatPrioritiesSettings, - IgnoredIdentifierSettings, -): - pass diff --git a/core/integration/base.py b/core/integration/base.py index 80a2e53742..9be80b3f6c 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -1,14 +1,17 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Type +from typing import Any, Generic, Type, TypeVar from sqlalchemy.orm import Session from core.integration.settings import BaseSettings +SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) +LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) -class HasIntegrationConfiguration(ABC): + +class HasIntegrationConfiguration(Generic[SettingsType], ABC): @classmethod @abstractmethod def label(cls) -> str: @@ -23,7 +26,7 @@ def description(cls) -> str: @classmethod @abstractmethod - def settings_class(cls) -> Type[BaseSettings]: + def settings_class(cls) -> Type[SettingsType]: """Get the settings for this integration""" ... @@ -37,15 +40,19 @@ def protocol_details(cls, db: Session) -> dict[str, Any]: return {} -class HasLibraryIntegrationConfiguration(HasIntegrationConfiguration, ABC): +class HasLibraryIntegrationConfiguration( + Generic[SettingsType, LibrarySettingsType], + HasIntegrationConfiguration[SettingsType], + ABC, +): @classmethod @abstractmethod - def library_settings_class(cls) -> Type[BaseSettings]: + def library_settings_class(cls) -> Type[LibrarySettingsType]: """Get the library settings for this integration""" ... -class HasChildIntegrationConfiguration(HasIntegrationConfiguration, ABC): +class HasChildIntegrationConfiguration(HasIntegrationConfiguration[SettingsType], ABC): @classmethod @abstractmethod def child_settings_class(cls) -> Type[BaseSettings]: diff --git a/core/integration/settings.py b/core/integration/settings.py index 49aaa2b5a1..ec4eeb8071 100644 --- a/core/integration/settings.py +++ b/core/integration/settings.py @@ -8,9 +8,9 @@ Callable, Dict, List, + Mapping, Optional, Tuple, - Type, Union, ) @@ -154,10 +154,6 @@ class ConfigurationFormItemType(Enum): COLOR = "color-picker" IMAGE = "image" - @classmethod - def options_from_enum(cls, enum_: Type[Enum]) -> Dict[Enum | str, str]: - return {e.value: e.name for e in enum_} - @dataclass(frozen=True) class ConfigurationFormItem: @@ -186,7 +182,7 @@ class ConfigurationFormItem: # When the type is SELECT, LIST, or MENU, the options are used to populate the # field in the admin interface. This can either be a callable that returns a # dictionary of options or a dictionary of options. - options: Callable[[Session], Dict[Enum | str, str]] | Dict[ + options: Callable[[Session], Dict[Enum | str, str]] | Mapping[ Enum | str, str ] | None = None diff --git a/core/opds2_import.py b/core/opds2_import.py index 12a3e3da93..251860c3d9 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -9,6 +9,7 @@ Callable, Dict, Iterable, + List, Literal, Optional, Tuple, @@ -29,7 +30,6 @@ ) from webpub_manifest_parser.utils import encode, first_or_default -from core.configuration.ignored_identifier import IgnoredIdentifierImporterMixin from core.coverage import CoverageFailure from core.integration.settings import ( ConfigurationFormItem, @@ -61,7 +61,9 @@ Subject, ) from core.model.configuration import ConfigurationSetting +from core.model.constants import IdentifierType from core.opds_import import ( + BaseOPDSAPI, BaseOPDSImporter, OPDSImporterLibrarySettings, OPDSImporterSettings, @@ -130,7 +132,7 @@ def parse_manifest( class OPDS2ImporterSettings(OPDSImporterSettings): - custom_accept_header: Optional[str] = FormField( + custom_accept_header: str = FormField( default="{}, {};q=0.9, */*;q=0.1".format( OPDS2MediaTypesRegistry.OPDS_FEED.key, "application/json" ), @@ -144,18 +146,30 @@ class OPDS2ImporterSettings(OPDSImporterSettings): ), ) + ignored_identifier_types: List[str] = FormField( + alias="IGNORED_IDENTIFIER_TYPE", + default=[], + form=ConfigurationFormItem( + label=_("List of identifiers that will be skipped"), + description=_( + "Circulation Manager will not be importing publications with identifiers having one of the selected types." + ), + type=ConfigurationFormItemType.MENU, + required=False, + options={ + identifier_type.value: identifier_type.value + for identifier_type in IdentifierType + }, + format="narrow", + ), + ) + class OPDS2ImporterLibrarySettings(OPDSImporterLibrarySettings): pass -class OPDS2Importer(IgnoredIdentifierImporterMixin, BaseOPDSImporter): - """Imports editions and license pools from an OPDS 2.0 feed.""" - - NAME: str = ExternalIntegration.OPDS2_IMPORT - DESCRIPTION: str = _("Import books from a publicly-accessible OPDS 2.0 feed.") - NEXT_LINK_RELATION: str = "next" - +class OPDS2API(BaseOPDSAPI): @classmethod def settings_class(cls) -> Type[OPDS2ImporterSettings]: return OPDS2ImporterSettings @@ -166,11 +180,23 @@ def library_settings_class(cls) -> Type[OPDS2ImporterLibrarySettings]: @classmethod def label(cls) -> str: - return cls.NAME + return "OPDS 2.0 Import" @classmethod def description(cls) -> str: - return cls.DESCRIPTION + return "Import books from a publicly-accessible OPDS 2.0 feed." + + +class OPDS2Importer(BaseOPDSImporter[OPDS2ImporterSettings]): + """Imports editions and license pools from an OPDS 2.0 feed.""" + + NAME: str = ExternalIntegration.OPDS2_IMPORT + DESCRIPTION: str = _("Import books from a publicly-accessible OPDS 2.0 feed.") + NEXT_LINK_RELATION: str = "next" + + @classmethod + def settings_class(cls) -> Type[OPDS2ImporterSettings]: + return OPDS2ImporterSettings def __init__( self, @@ -196,6 +222,7 @@ def __init__( """ super().__init__(db, collection, data_source_name, http_get) self._parser = parser + self.ignored_identifier_types = self.settings.ignored_identifier_types def assert_importable_content( self, feed: str, feed_url: str, max_get_attempts: int = 5 @@ -205,16 +232,10 @@ def assert_importable_content( def _is_identifier_allowed(self, identifier: Identifier) -> bool: """Check the identifier and return a boolean value indicating whether CM can import it. - NOTE: Currently, this method hard codes allowed identifier types. - The next PR will add an additional configuration setting allowing to override this behaviour - and configure allowed identifier types in the CM Admin UI. - :param identifier: Identifier object :return: Boolean value indicating whether CM can import the identifier """ - return identifier.type not in self._get_ignored_identifier_types( - self.integration_configuration() - ) + return identifier.type not in self.ignored_identifier_types def _extract_subjects(self, subjects: list[core_ast.Subject]) -> list[SubjectData]: """Extract a list of SubjectData objects from the webpub-manifest-parser's subject. diff --git a/core/opds_import.py b/core/opds_import.py index ce3eb33561..7253e99f32 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -12,6 +12,7 @@ Callable, Dict, Generator, + Generic, Iterable, List, Literal, @@ -19,6 +20,7 @@ Sequence, Tuple, Type, + TypeVar, overload, ) from urllib.parse import urljoin, urlparse @@ -32,12 +34,13 @@ from pydantic import HttpUrl from sqlalchemy.orm.session import Session -from api.circulation import CirculationConfigurationMixin +from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold from api.selftest import HasCollectionSelfTests from core.classifier import Classifier from core.config import IntegrationException +from core.connection_config import ConnectionSetting from core.coverage import CoverageFailure -from core.importers import BaseImporterSettings from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -64,14 +67,18 @@ Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, Measurement, + Patron, Representation, RightsStatus, Subject, get_one, ) from core.model.configuration import HasExternalIntegration +from core.model.formats import FormatPrioritiesSettings from core.monitor import CollectionMonitor +from core.saml.wayfless import SAMLWAYFlessSetttings from core.selftest import SelfTestResult from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException @@ -97,7 +104,11 @@ class OPDSXMLParser(XMLParser): } -class BaseOPDSImporterSettings(BaseSettings): +class OPDSImporterSettings( + ConnectionSetting, + SAMLWAYFlessSetttings, + FormatPrioritiesSettings, +): _NO_DEFAULT_AUDIENCE = "" external_account_id: Optional[HttpUrl] = FormField( @@ -125,12 +136,9 @@ class BaseOPDSImporterSettings(BaseSettings): {audience: audience for audience in sorted(Classifier.AUDIENCES)} ), required=False, - # readOnly=True, ), ) - -class OPDSImporterSettings(BaseImporterSettings, BaseOPDSImporterSettings): username: Optional[str] = FormField( form=ConfigurationFormItem( label=_("Username"), @@ -151,7 +159,7 @@ class OPDSImporterSettings(BaseImporterSettings, BaseOPDSImporterSettings): ) ) - custom_accept_header: Optional[str] = FormField( + custom_accept_header: str = FormField( default=",".join( [ OPDSFeed.ACQUISITION_FEED_TYPE, @@ -190,8 +198,103 @@ class OPDSImporterLibrarySettings(BaseSettings): pass +class BaseOPDSAPI( + BaseCirculationAPI[OPDSImporterSettings, OPDSImporterLibrarySettings], ABC +): + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: + # All the CM side accounting for this loan is handled by CirculationAPI + # since we don't have any remote API we need to call this method is + # just a no-op. + pass + + def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: + # Since there is no such thing as a hold, there is no such + # thing as releasing a hold. + raise NotOnHold() + + def place_hold( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + notification_email_address: Optional[str], + ) -> HoldInfo: + # Because all OPDS content is assumed to be simultaneously + # available to all patrons, there is no such thing as a hold. + raise CurrentlyAvailable() + + def update_availability(self, licensepool: LicensePool) -> None: + # We already know all the availability information we're going + # to know, so we don't need to do anything. + pass + + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + requested_mechanism = delivery_mechanism.delivery_mechanism + fulfillment = None + for lpdm in licensepool.delivery_mechanisms: + if ( + lpdm.resource is None + or lpdm.resource.representation is None + or lpdm.resource.representation.public_url is None + ): + # This LicensePoolDeliveryMechanism can't actually + # be used for fulfillment. + continue + if lpdm.delivery_mechanism == requested_mechanism: + # We found it! This is how the patron wants + # the book to be delivered. + fulfillment = lpdm + break + + if not fulfillment: + # There is just no way to fulfill this loan the way the + # patron wants. + raise FormatNotAvailable() + + rep = fulfillment.resource.representation + content_link = rep.public_url + media_type = rep.media_type + + return FulfillmentInfo( + licensepool.collection, + licensepool.data_source.name, + identifier_type=licensepool.identifier.type, + identifier=licensepool.identifier.identifier, + content_link=content_link, + content_type=media_type, + content=None, + content_expires=None, + ) + + def checkout( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> LoanInfo: + return LoanInfo(licensepool.collection, None, None, None, None, None) + + def can_fulfill_without_loan( + self, + patron: Optional[Patron], + pool: LicensePool, + lpdm: LicensePoolDeliveryMechanism, + ) -> bool: + return True + + +SettingsType = TypeVar("SettingsType", bound=OPDSImporterSettings, covariant=True) + + class BaseOPDSImporter( - CirculationConfigurationMixin[OPDSImporterSettings, OPDSImporterLibrarySettings], + Generic[SettingsType], LoggerMixin, ABC, ): @@ -224,6 +327,14 @@ def __init__( # we don't, e.g. accidentally get our IP banned from # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get + self.settings = self.settings_class().construct( + **collection.integration_configuration.settings_dict + ) + + @classmethod + @abstractmethod + def settings_class(cls) -> Type[SettingsType]: + ... @abstractmethod def extract_feed_data( @@ -461,7 +572,25 @@ def import_from_feed( ) -class OPDSImporter(BaseOPDSImporter): +class OPDSAPI(BaseOPDSAPI): + @classmethod + def settings_class(cls) -> Type[OPDSImporterSettings]: + return OPDSImporterSettings + + @classmethod + def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: + return OPDSImporterLibrarySettings + + @classmethod + def description(cls) -> str: + return "Import books from a publicly-accessible OPDS feed." + + @classmethod + def label(cls) -> str: + return "OPDS Import" + + +class OPDSImporter(BaseOPDSImporter[OPDSImporterSettings]): """Imports editions and license pools from an OPDS feed. Creates Edition, LicensePool and Work rows in the database, if those don't already exist. @@ -483,18 +612,6 @@ class OPDSImporter(BaseOPDSImporter): def settings_class(cls) -> Type[OPDSImporterSettings]: return OPDSImporterSettings - @classmethod - def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: - return OPDSImporterLibrarySettings - - @classmethod - def label(cls) -> str: - return "OPDS Importer" - - @classmethod - def description(cls) -> str: - return cls.DESCRIPTION # type: ignore[no-any-return] - def __init__( self, _db: Session, @@ -1572,7 +1689,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[BaseOPDSImporter], + import_class: Type[BaseOPDSImporter[OPDSImporterSettings]], force_reimport: bool = False, **import_class_kwargs: Any, ) -> None: @@ -1600,21 +1717,12 @@ def __init__( self.force_reimport = force_reimport self.importer = import_class(_db, collection=collection, **import_class_kwargs) - config = self.importer.configuration() - self.username = config.username - self.password = config.password + settings = self.importer.settings + self.username = settings.username + self.password = settings.password - # Not all inherited settings have these - # OPDSforDistributors does not use this setting - settings = self.importer.configuration() - try: - self.custom_accept_header = settings.custom_accept_header - except AttributeError: - self.custom_accept_header = None - try: - self._max_retry_count: int | None = settings.max_retry_count - except AttributeError: - self._max_retry_count = 0 + self.custom_accept_header = settings.custom_accept_header + self._max_retry_count = settings.max_retry_count parsed_url = urlparse(self.feed_url) self._feed_base_url = f"{parsed_url.scheme}://{parsed_url.hostname}{(':' + str(parsed_url.port)) if parsed_url.port else ''}/" diff --git a/core/scripts.py b/core/scripts.py index fb35ee56e7..0c0c56a44b 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -923,7 +923,6 @@ def do_run(self, *args, **kwargs): parsed = self.parse_command_line(self._db, *args, **kwargs) for library in parsed.libraries: api_map = dict(CirculationAPI(self._db, library).registry) - api_map[ExternalIntegration.OPDS_IMPORT] = OPDSImportMonitor self.out.write("Testing %s\n" % library.name) for collection in library.collections: try: @@ -940,11 +939,7 @@ def test_collection(self, collection, api_map, extra_args=None): return self.out.write(" Running self-test for %s.\n" % collection.name) - # Some HasSelfTests classes require extra arguments to their - # constructors. - extra_args = extra_args or { - OPDSImportMonitor: [OPDSImporter], - } + extra_args = extra_args or {} extra = extra_args.get(tester, []) constructor_args = [self._db, collection] + list(extra) results_dict, results_list = tester.run_self_tests( diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 1417bb7226..2444944542 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -18,6 +18,7 @@ PROTOCOL_DOES_NOT_SUPPORT_PARENTS, UNKNOWN_PROTOCOL, ) +from api.integration.registry.license_providers import LicenseProvidersRegistry from api.selftest import HasCollectionSelfTests from core.model import ( Admin, @@ -46,9 +47,9 @@ def test_collections_get_with_no_collections( ) assert response.get("collections") == [] - names = [p.get("name") for p in response.get("protocols")] - assert ExternalIntegration.OVERDRIVE in names - assert ExternalIntegration.OPDS_IMPORT in names + names = {p.get("name") for p in response.get("protocols")} + expected_names = {k for k, v in LicenseProvidersRegistry()} + assert names == expected_names def test_collections_get_collections_with_multiple_collections( self, settings_ctrl_fixture: SettingsControllerFixture diff --git a/tests/api/admin/controller/test_settings.py b/tests/api/admin/controller/test_settings.py index a9823c0fd2..12da7a33dd 100644 --- a/tests/api/admin/controller/test_settings.py +++ b/tests/api/admin/controller/test_settings.py @@ -29,9 +29,6 @@ def test_get_integration_protocols( """Test the _get_integration_protocols helper method.""" class Protocol(HasChildIntegrationConfiguration): - __module__ = "my name" - NAME = "my label" - DESCRIPTION = "my description" SITEWIDE = True LIBRARY_SETTINGS = [6] CARDINALITY = 1 @@ -47,6 +44,14 @@ def child_settings_class(cls): def settings_class(cls): return BaseSettings + @classmethod + def label(cls): + return "my label" + + @classmethod + def description(cls): + return "my description" + [result] = SettingsController( admin_ctrl_fixture.manager )._get_integration_protocols([Protocol]) @@ -58,21 +63,10 @@ def settings_class(cls): child_settings=[{"label": "key", "key": "key", "required": True}], label="my label", cardinality=1, - name="my name", + name="my label", ) assert expect == result - # Remove the CARDINALITY setting - del Protocol.CARDINALITY - - # And look in a different place for the name. - [result] = SettingsController( - admin_ctrl_fixture.manager - )._get_integration_protocols([Protocol], protocol_name_attr="NAME") - - assert "my label" == result["name"] - assert "cardinality" not in result - def test_get_integration_info( self, settings_ctrl_fixture: SettingsControllerFixture ): @@ -275,7 +269,9 @@ class P1LibrarySettings(BaseSettings): key: str value: str - class Protocol1(HasLibraryIntegrationConfiguration): + class Protocol1( + HasLibraryIntegrationConfiguration[BaseSettings, P1LibrarySettings] + ): @classmethod def library_settings_class(cls): return P1LibrarySettings diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index 77a5d13e55..8caece5d55 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -5,7 +5,13 @@ from sqlalchemy.orm import Session -from api.circulation import BaseCirculationAPI, CirculationAPI, HoldInfo, LoanInfo +from api.circulation import ( + BaseCirculationAPI, + CirculationAPI, + HoldInfo, + LoanInfo, + PatronActivityCirculationAPI, +) from api.controller import CirculationManager from core.external_search import ExternalSearchIndex from core.integration.settings import BaseSettings @@ -15,7 +21,7 @@ from tests.mocks.search import ExternalSearchIndexFake -class MockBaseCirculationAPI(BaseCirculationAPI, ABC): +class MockPatronActivityCirculationAPI(PatronActivityCirculationAPI, ABC): @classmethod def label(cls) -> str: return "" @@ -33,7 +39,7 @@ def library_settings_class(cls) -> Type[BaseSettings]: return BaseSettings -class MockRemoteAPI(MockBaseCirculationAPI): +class MockRemoteAPI(MockPatronActivityCirculationAPI): def __init__( self, set_delivery_mechanism_at=True, can_revoke_hold_when_reserved=True ): diff --git a/tests/api/mockapi/opds_for_distributors.py b/tests/api/mockapi/opds_for_distributors.py index 8560573387..038bb5de05 100644 --- a/tests/api/mockapi/opds_for_distributors.py +++ b/tests/api/mockapi/opds_for_distributors.py @@ -29,10 +29,10 @@ def mock_collection( ), ) integration = collection.create_external_integration( - protocol=OPDSForDistributorsAPI.NAME + protocol=OPDSForDistributorsAPI.label() ) config = collection.create_integration_configuration( - OPDSForDistributorsAPI.NAME + OPDSForDistributorsAPI.label() ) config.settings_dict = dict( username="a", password="b", data_source="data_source" diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 3a8af5028d..e76c4a3f4b 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -76,7 +76,11 @@ from tests.fixtures.vendor_id import VendorIDFixture -class MockBasic(BasicAuthenticationProvider): +class MockBasic( + BasicAuthenticationProvider[ + BasicAuthProviderSettings, BasicAuthProviderLibrarySettings + ] +): """A second mock basic authentication provider for use in testing the workflow around Basic Auth. """ @@ -101,6 +105,14 @@ def __init__( lookup_patrondata = patrondata self.lookup_patrondata = lookup_patrondata + @classmethod + def settings_class(cls) -> type[BasicAuthProviderSettings]: + return BasicAuthProviderSettings + + @classmethod + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: + return BasicAuthProviderLibrarySettings + @classmethod def label(cls) -> str: return "Mock" diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index 965bed8b5e..216b92370c 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -31,15 +31,15 @@ Hyperlink, Identifier, Loan, - MediaTypes, Representation, RightsStatus, ) +from core.opds_import import OPDSAPI from core.util.datetime_helpers import utc_now from tests.api.mockapi.bibliotheca import MockBibliothecaAPI from tests.api.mockapi.circulation import ( - MockBaseCirculationAPI, MockCirculationAPI, + MockPatronActivityCirculationAPI, MockRemoteAPI, ) from tests.fixtures.api_bibliotheca_files import BibliothecaFilesFixture @@ -171,30 +171,6 @@ def test_borrow_sends_analytics_event(self, circulation_api: CirculationAPIFixtu loan, hold, is_new = self.borrow(circulation_api) assert 3 == circulation_api.analytics.count - def test_borrowing_of_unlimited_access_book_succeeds( - self, circulation_api: CirculationAPIFixture - ): - """Ensure that unlimited access books that don't belong to collections - having a custom CirculationAPI implementation (e.g., OPDS 1.x, OPDS 2.x collections) - are checked out in the same way as OA and self-hosted books.""" - # Arrange - - # Reset the API map, this book belongs to the "basic" collection, - # i.e. collection without a custom CirculationAPI implementation. - circulation_api.circulation.api_for_license_pool = MagicMock(return_value=None) - - # Mark the book as unlimited access. - circulation_api.pool.unlimited_access = True - - # Act - loan, hold, is_new = self.borrow(circulation_api) - - # Assert - assert True == is_new - assert circulation_api.pool == loan.license_pool - assert circulation_api.patron == loan.patron - assert hold is None - def test_attempt_borrow_with_existing_remote_loan( self, circulation_api: CirculationAPIFixture ): @@ -596,7 +572,7 @@ def test_borrow_calls_enforce_limits(self, circulation_api: CirculationAPIFixtur # is to call enforce_limits() before trying to check out the # book. - mock_api = MagicMock(spec=MockBaseCirculationAPI) + mock_api = MagicMock(spec=MockPatronActivityCirculationAPI) mock_api.checkout.side_effect = NotImplementedError() mock_circulation = CirculationAPI( @@ -771,6 +747,8 @@ def patron_at_hold_limit(self, patron): # patron = circulation_api.db.patron(library=library) pool = MagicMock() + pool.open_access = False + pool.unlimited_access = False circulation.at_loan_limit = False circulation.at_hold_limit = False @@ -904,9 +882,12 @@ def test_borrow_hold_limit_reached( loan, hold, is_new = self.borrow(circulation_api) assert hold != None - def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): + def test_fulfill_errors(self, circulation_api: CirculationAPIFixture): # Here's an open-access title. circulation_api.pool.open_access = True + circulation_api.circulation.remotes[ + circulation_api.pool.data_source.name + ] = OPDSAPI(circulation_api.db.session, circulation_api.collection) # The patron has the title on loan. circulation_api.pool.loan_to(circulation_api.patron) @@ -917,15 +898,7 @@ def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): assert None == broken_lpdm.resource i_want_an_epub = broken_lpdm.delivery_mechanism - # fulfill_open_access() and fulfill() will both raise - # FormatNotAvailable. - pytest.raises( - FormatNotAvailable, - circulation_api.circulation.fulfill_open_access, - circulation_api.pool, - i_want_an_epub, - ) - + # fulfill() will raise FormatNotAvailable. pytest.raises( FormatNotAvailable, circulation_api.circulation.fulfill, @@ -956,9 +929,12 @@ def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): assert None == link.resource.representation pytest.raises( FormatNotAvailable, - circulation_api.circulation.fulfill_open_access, + circulation_api.circulation.fulfill, + circulation_api.patron, + "1234", circulation_api.pool, - i_want_an_epub, + broken_lpdm, + sync_on_failure=False, ) # Let's add a Representation to the Resource. @@ -971,16 +947,6 @@ def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): link.resource.representation = representation # We can finally fulfill a loan. - result = circulation_api.circulation.fulfill_open_access( - circulation_api.pool, broken_lpdm - ) - assert isinstance(result, FulfillmentInfo) - assert result.content_link == link.resource.representation.public_url - assert result.content_type == i_want_an_epub.content_type - - # Now, if we try to call fulfill() with the broken - # LicensePoolDeliveryMechanism we get a result from the - # working DeliveryMechanism with the same format. result = circulation_api.circulation.fulfill( circulation_api.patron, "1234", circulation_api.pool, broken_lpdm ) @@ -988,17 +954,6 @@ def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): assert result.content_link == link.resource.representation.public_url assert result.content_type == i_want_an_epub.content_type - # We get the right result even if the code calling - # fulfill_open_access() is incorrectly written and passes in - # the broken LicensePoolDeliveryMechanism (as opposed to its - # generic DeliveryMechanism). - result = circulation_api.circulation.fulfill_open_access( - circulation_api.pool, broken_lpdm - ) - assert isinstance(result, FulfillmentInfo) - assert result.content_link == link.resource.representation.public_url - assert result.content_type == i_want_an_epub.content_type - # If we change the working LPDM so that it serves a different # media type than the one we're asking for, we're back to # FormatNotAvailable errors. @@ -1010,62 +965,14 @@ def test_fulfill_open_access(self, circulation_api: CirculationAPIFixture): working_lpdm.delivery_mechanism = irrelevant_delivery_mechanism pytest.raises( FormatNotAvailable, - circulation_api.circulation.fulfill_open_access, - circulation_api.pool, - i_want_an_epub, - ) - - def test_fulfilment_of_unlimited_access_book_succeeds( - self, circulation_api: CirculationAPIFixture - ): - """Ensure that unlimited access books that don't belong to collections - having a custom CirculationAPI implementation (e.g., OPDS 1.x, OPDS 2.x collections) - are fulfilled in the same way as OA and self-hosted books.""" - # Reset the API map, this book belongs to the "basic" collection, - # i.e. collection without a custom CirculationAPI implementation. - circulation_api.circulation.api_for_license_pool = MagicMock(return_value=None) - - # Mark the book as unlimited access. - circulation_api.pool.unlimited_access = True - - media_type = MediaTypes.EPUB_MEDIA_TYPE - - # Create a borrow link. - link, _ = circulation_api.pool.identifier.add_link( - Hyperlink.BORROW, - circulation_api.db.fresh_url(), - circulation_api.pool.data_source, - ) - - # Create a license pool delivery mechanism. - circulation_api.pool.set_delivery_mechanism( - media_type, - DeliveryMechanism.ADOBE_DRM, - RightsStatus.IN_COPYRIGHT, - link.resource, - ) - - # Create a representation. - representation, _ = circulation_api.db.representation( - link.resource.url, media_type, "Dummy content", mirrored=True - ) - link.resource.representation = representation - - # Act - circulation_api.pool.loan_to(circulation_api.patron) - - result = circulation_api.circulation.fulfill( + circulation_api.circulation.fulfill, circulation_api.patron, "1234", circulation_api.pool, - circulation_api.pool.delivery_mechanisms[0], + broken_lpdm, + sync_on_failure=False, ) - # The fulfillment looks good. - assert isinstance(result, FulfillmentInfo) - assert result.content_link == link.resource.representation.public_url - assert result.content_type == media_type - def test_fulfill(self, circulation_api: CirculationAPIFixture): circulation_api.pool.loan_to(circulation_api.patron) @@ -1563,7 +1470,7 @@ def test_can_fulfill_without_loan(self, circulation_api: CirculationAPIFixture): the BaseCirculationAPI implementation for that title's colelction. """ - class Mock(MockBaseCirculationAPI): + class Mock(MockPatronActivityCirculationAPI): def can_fulfill_without_loan(self, patron, pool, lpdm): return "yep" @@ -1571,7 +1478,7 @@ def can_fulfill_without_loan(self, patron, pool, lpdm): circulation = CirculationAPI( circulation_api.db.session, circulation_api.db.default_library() ) - mock = MagicMock(spec=MockBaseCirculationAPI) + mock = MagicMock(spec=MockPatronActivityCirculationAPI) mock.can_fulfill_without_loan = MagicMock(return_value="yep") circulation.api_for_collection[pool.collection.id] = mock assert "yep" == circulation.can_fulfill_without_loan(None, pool, MagicMock()) diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index f76515fd77..a5d556e683 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -179,6 +179,22 @@ def test_patron_circulation_retrieval(self, loan_fixture: LoanFixture): assert (hold, other_pool) == result def test_borrow_success(self, loan_fixture: LoanFixture): + # Create a loanable LicensePool. + work = loan_fixture.db.work( + with_license_pool=True, with_open_access_download=False + ) + pool = work.license_pools[0] + loan_fixture.manager.d_circulation.queue_checkout( + pool, + LoanInfo( + pool.collection, + pool.data_source.name, + pool.identifier.type, + pool.identifier.identifier, + utc_now(), + utc_now() + datetime.timedelta(seconds=3600), + ), + ) with loan_fixture.request_context_with_library( "/", headers=dict(Authorization=loan_fixture.valid_auth) ): @@ -233,6 +249,20 @@ def test_borrow_success(self, loan_fixture: LoanFixture): assert loan_fixture.mech1.resource.representation.url is not None # Now let's try to fulfill the loan using the first delivery mechanism. + fulfillment = FulfillmentInfo( + loan_fixture.pool.collection, + loan_fixture.pool.data_source, + loan_fixture.pool.identifier.type, + loan_fixture.pool.identifier.identifier, + content_link=fulfillable_mechanism.resource.representation.public_url, + content_type=fulfillable_mechanism.resource.representation.media_type, + content=None, + content_expires=None, + ) + loan_fixture.manager.d_circulation.queue_fulfill( + loan_fixture.pool, fulfillment + ) + assert isinstance(loan_fixture.pool.id, int) response = loan_fixture.manager.loans.fulfill( loan_fixture.pool.id, diff --git a/tests/api/test_controller_odl_notify.py b/tests/api/test_controller_odl_notify.py index eb6f1f8cbe..4f855c8f22 100644 --- a/tests/api/test_controller_odl_notify.py +++ b/tests/api/test_controller_odl_notify.py @@ -61,7 +61,7 @@ def setup(self, available, concurrency, left=None, expires=None): @staticmethod def integration_protocol(): - return ODLAPI.NAME + return ODLAPI.label() @pytest.fixture(scope="function") @@ -76,8 +76,8 @@ class TestODLNotificationController: @pytest.mark.parametrize( "protocol", [ - pytest.param(ODLAPI.NAME, id="ODL 1.x collection"), - pytest.param(ODL2API.NAME, id="ODL 2.x collection"), + pytest.param(ODLAPI.label(), id="ODL 1.x collection"), + pytest.param(ODL2API.label(), id="ODL 2.x collection"), ], ) def test_notify_success( diff --git a/tests/api/test_monitor.py b/tests/api/test_monitor.py index 526c462a53..c933019bfd 100644 --- a/tests/api/test_monitor.py +++ b/tests/api/test_monitor.py @@ -21,7 +21,7 @@ def test_source_of_truth_protocols(self): will be exempt from the reaper. """ for i in ( - ODLAPI.NAME, + ODLAPI.label(), ExternalIntegration.OPDS_FOR_DISTRIBUTORS, ): assert i in LoanlikeReaperMonitor.SOURCE_OF_TRUTH_PROTOCOLS diff --git a/tests/api/test_odl2.py b/tests/api/test_odl2.py index 444e9b5fbe..b5a384bb76 100644 --- a/tests/api/test_odl2.py +++ b/tests/api/test_odl2.py @@ -90,7 +90,7 @@ def test_import( feed = api_odl2_files_fixture.sample_text("feed.json") config = odl2_importer.collection.integration_configuration - odl2_importer.set_ignored_identifier_types([IdentifierConstants.URI], config) + odl2_importer.ignored_identifier_types = [IdentifierConstants.URI] DatabaseTransactionFixture.set_settings( config, odl2_skipped_license_formats=["text/html"] ) diff --git a/tests/api/test_selftest.py b/tests/api/test_selftest.py index b7eab7ae90..18381ac08b 100644 --- a/tests/api/test_selftest.py +++ b/tests/api/test_selftest.py @@ -13,8 +13,7 @@ from api.circulation import CirculationAPI from api.selftest import HasCollectionSelfTests, HasSelfTests, SelfTestResult from core.exceptions import IntegrationException -from core.model import ExternalIntegration, Patron -from core.opds_import import OPDSImportMonitor +from core.model import Patron from core.scripts import RunSelfTestsScript from core.util.problem_detail import ProblemDetail @@ -200,10 +199,6 @@ def test_collection(self, collection, api_map): for k, v in registry: assert api_map[k] == v - # But a couple of things were added to the map that are not in - # CirculationAPI. - assert api_map[ExternalIntegration.OPDS_IMPORT] == OPDSImportMonitor - # If test_collection raises an exception, the exception is recorded, # and we move on. class MockScript2(MockScript): diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index fcae09a314..185afaed78 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -402,9 +402,9 @@ def test_opds2_importer_skips_publications_with_unsupported_identifier_types( # Arrange # Update the list of supported identifier types in the collection's configuration settings # and set the identifier type passed as a parameter as the only supported identifier type. - data.importer.set_ignored_identifier_types( - ignore_identifier_type, data.importer.integration_configuration() - ) + data.importer.ignored_identifier_types = [ + t.value for t in ignore_identifier_type + ] content_server_feed = opds2_files_fixture.sample_text("feed.json") diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 2aa3fee55c..2045207b41 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -2,14 +2,15 @@ from functools import partial from io import StringIO from typing import Optional -from unittest.mock import MagicMock, patch +from unittest.mock import MagicMock, PropertyMock, patch import pytest import requests_mock from lxml import etree from psycopg2.extras import NumericRange -from api.circulation import CirculationAPI +from api.circulation import CirculationAPI, FulfillmentInfo, LoanInfo +from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold from api.saml.credential import SAMLCredentialManager from api.saml.metadata.model import ( SAMLAttributeStatement, @@ -30,15 +31,18 @@ ExternalIntegration, Hyperlink, Identifier, + LicensePool, + LicensePoolDeliveryMechanism, Measurement, MediaTypes, Representation, + Resource, RightsStatus, Subject, Work, WorkCoverageRecord, ) -from core.opds_import import OPDSImporter, OPDSImportMonitor, OPDSXMLParser +from core.opds_import import OPDSAPI, OPDSImporter, OPDSImportMonitor, OPDSXMLParser from core.util import first_or_default from core.util.datetime_helpers import datetime_utc from core.util.http import BadResponseException @@ -2293,3 +2297,178 @@ def test_retry(self, opds_importer_fixture: OPDSImporterFixture): status_forcelist=[429, 500, 502, 503, 504], backoff_factor=1.0, ) + + +class OPDSAPIFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.session = db.session + self.collection = db.collection(protocol=OPDSAPI.label()) + self.api = OPDSAPI(self.session, self.collection) + + self.mock_patron = MagicMock() + self.mock_pin = MagicMock(spec=str) + self.mock_licensepool = MagicMock(spec=LicensePool) + self.mock_licensepool.collection = self.collection + + +@pytest.fixture +def opds_api_fixture(db: DatabaseTransactionFixture) -> OPDSAPIFixture: + return OPDSAPIFixture(db) + + +class TestOPDSAPI: + def test_checkin(self, opds_api_fixture: OPDSAPIFixture) -> None: + # Make sure we can call checkin() without getting an exception. + # The function is a no-op for this api, so we don't need to + # test anything else. + opds_api_fixture.api.checkin( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + ) + + def test_release_hold(self, opds_api_fixture: OPDSAPIFixture) -> None: + # This api doesn't support holds. So we expect an exception. + with pytest.raises(NotOnHold): + opds_api_fixture.api.release_hold( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + ) + + def test_place_hold(self, opds_api_fixture: OPDSAPIFixture) -> None: + # This api doesn't support holds. So we expect an exception. + with pytest.raises(CurrentlyAvailable): + opds_api_fixture.api.place_hold( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + None, + ) + + def test_update_availability(self, opds_api_fixture: OPDSAPIFixture) -> None: + # This function is a no-op since we already know the availability + # of the license pool for any OPDS content. So we just make sure + # we can call it without getting an exception. + opds_api_fixture.api.update_availability(opds_api_fixture.mock_licensepool) + + def test_checkout(self, opds_api_fixture: OPDSAPIFixture) -> None: + # Make sure checkout returns a LoanInfo object with the correct + # collection id. + mock_collection_property = PropertyMock( + return_value=opds_api_fixture.collection + ) + type(opds_api_fixture.mock_licensepool).collection = mock_collection_property + delivery_mechanism = MagicMock(spec=LicensePoolDeliveryMechanism) + loan = opds_api_fixture.api.checkout( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + delivery_mechanism, + ) + assert isinstance(loan, LoanInfo) + assert mock_collection_property.call_count == 1 + assert loan.collection_id == opds_api_fixture.collection.id + + def test_can_fulfill_without_loan(self, opds_api_fixture: OPDSAPIFixture) -> None: + # This should always return True. + mock_lpdm = MagicMock(spec=LicensePoolDeliveryMechanism) + assert ( + opds_api_fixture.api.can_fulfill_without_loan( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + is True + ) + + def test_fulfill(self, opds_api_fixture: OPDSAPIFixture) -> None: + # We only fulfill if the requested format matches an available format + # for the license pool. + mock_mechanism = MagicMock(spec=DeliveryMechanism) + mock_lpdm = MagicMock(spec=LicensePoolDeliveryMechanism) + mock_lpdm.delivery_mechanism = mock_mechanism + + # This license pool has no available formats. + opds_api_fixture.mock_licensepool.delivery_mechanisms = [] + with pytest.raises(FormatNotAvailable): + opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + + # This license pool has a delivery mechanism, but it's not the one + # we're looking for. + opds_api_fixture.mock_licensepool.delivery_mechanisms = [ + MagicMock(), + MagicMock(), + ] + with pytest.raises(FormatNotAvailable): + opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + + # This license pool has the delivery mechanism we're looking for, but + # it does not have a resource. + mock_lpdm.resource = None + opds_api_fixture.mock_licensepool.delivery_mechanisms = [mock_lpdm] + with pytest.raises(FormatNotAvailable): + opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + + # This license pool has the delivery mechanism we're looking for, and + # it has a resource, but the resource doesn't have a representation. + mock_lpdm.resource = MagicMock(spec=Resource) + mock_lpdm.resource.representation = None + opds_api_fixture.mock_licensepool.delivery_mechanisms = [mock_lpdm] + with pytest.raises(FormatNotAvailable): + opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + + # This license pool has the delivery mechanism we're looking for, and + # it has a resource, the resource has a representation, but the + # representation doesn't have a URL. + mock_lpdm.resource.representation = MagicMock(spec=Representation) + mock_lpdm.resource.representation.public_url = None + opds_api_fixture.mock_licensepool.delivery_mechanisms = [mock_lpdm] + with pytest.raises(FormatNotAvailable): + opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + + # This license pool has everything we need, so we can fulfill. + mock_lpdm.resource.representation.public_url = "http://foo.com/bar.epub" + opds_api_fixture.mock_licensepool.delivery_mechanisms = [ + MagicMock(), + MagicMock(), + mock_lpdm, + ] + fulfillment = opds_api_fixture.api.fulfill( + opds_api_fixture.mock_patron, + opds_api_fixture.mock_pin, + opds_api_fixture.mock_licensepool, + mock_lpdm, + ) + assert isinstance(fulfillment, FulfillmentInfo) + assert fulfillment.content_link == mock_lpdm.resource.representation.public_url + assert fulfillment.content_type == mock_lpdm.resource.representation.media_type + assert fulfillment.content is None + assert fulfillment.content_expires is None + assert fulfillment.collection_id == opds_api_fixture.collection.id diff --git a/tests/fixtures/api_odl.py b/tests/fixtures/api_odl.py index 5aeffb0e76..9bdef0ad8a 100644 --- a/tests/fixtures/api_odl.py +++ b/tests/fixtures/api_odl.py @@ -9,7 +9,7 @@ from jinja2 import Template from api.odl import BaseODLImporter, ODLImporter -from api.odl2 import ODL2Importer +from api.odl2 import ODL2API, ODL2Importer from core.coverage import CoverageFailure from core.model import Edition, LicensePool, Work from tests.fixtures.files import APIFilesFixture @@ -150,7 +150,7 @@ def odl2_importer( library = odl_test_fixture.library() return ODL2Importer( db.session, - collection=odl_test_fixture.collection(library), + collection=odl_test_fixture.collection(library, ODL2API), http_get=odl_mock_get.get, ) diff --git a/tests/fixtures/authenticator.py b/tests/fixtures/authenticator.py index 57323f28a4..8794e98658 100644 --- a/tests/fixtures/authenticator.py +++ b/tests/fixtures/authenticator.py @@ -2,7 +2,7 @@ import pytest -from api.authentication.base import AuthenticationProvider +from api.authentication.base import AuthenticationProviderType from api.integration.registry.patron_auth import PatronAuthRegistry from api.millenium_patron import MilleniumPatronAPI from api.saml.provider import SAMLWebSSOAuthenticationProvider @@ -76,7 +76,7 @@ class AuthProtocolFixture: def __init__(self, registry: PatronAuthRegistry): self.registry = registry - def __call__(self, protocol: Type[AuthenticationProvider]) -> str: + def __call__(self, protocol: Type[AuthenticationProviderType]) -> str: return self.registry.get_protocol(protocol, "") diff --git a/tests/fixtures/odl.py b/tests/fixtures/odl.py index 4e8119387e..420e342ae7 100644 --- a/tests/fixtures/odl.py +++ b/tests/fixtures/odl.py @@ -1,12 +1,12 @@ import json import types -from typing import Any, Callable, Optional, Tuple +from typing import Any, Callable, Optional, Tuple, Type import pytest from _pytest.monkeypatch import MonkeyPatch from api.circulation import LoanInfo -from api.odl import ODLAPI +from api.odl import ODLAPI, BaseODLAPI from api.odl2 import ODL2API from core.model import ( Collection, @@ -33,29 +33,41 @@ class MonkeyPatchedODLFixture: def __init__(self, monkeypatch: MonkeyPatch): self.monkeypatch = monkeypatch + @staticmethod + def _queue_response(patched_self, status_code, headers={}, content=None): + patched_self.responses.insert( + 0, MockRequestsResponse(status_code, headers, content) + ) -@pytest.fixture(scope="function") -def monkey_patch_odl(monkeypatch) -> MonkeyPatchedODLFixture: - """A fixture that patches the ODLAPI to make it possible to intercept HTTP requests for testing.""" - - def queue_response(self, status_code, headers={}, content=None): - self.responses.insert(0, MockRequestsResponse(status_code, headers, content)) - - def _get(self, url, headers=None): - self.requests.append([url, headers]) - response = self.responses.pop() + @staticmethod + def _get(patched_self, url, headers=None): + patched_self.requests.append([url, headers]) + response = patched_self.responses.pop() return HTTP._process_response(url, response) - def _url_for(self, *args, **kwargs): + @staticmethod + def _url_for(patched_self, *args, **kwargs): del kwargs["_external"] return "http://{}?{}".format( "/".join(args), "&".join([f"{key}={val}" for key, val in list(kwargs.items())]), ) - monkeypatch.setattr(ODLAPI, "_get", _get) - monkeypatch.setattr(ODLAPI, "_url_for", _url_for) - monkeypatch.setattr(ODLAPI, "queue_response", queue_response, raising=False) + def __call__(self, api: Type[BaseODLAPI]): + # We monkeypatch the ODLAPI class to intercept HTTP requests and responses + # these monkeypatched methods are staticmethods on this class. They take + # a patched_self argument, which is the instance of the ODLAPI class that + # they have been monkeypatched onto. + self.monkeypatch.setattr(api, "_get", self._get) + self.monkeypatch.setattr(api, "_url_for", self._url_for) + self.monkeypatch.setattr( + api, "queue_response", self._queue_response, raising=False + ) + + +@pytest.fixture(scope="function") +def monkey_patch_odl(monkeypatch) -> MonkeyPatchedODLFixture: + """A fixture that patches the ODLAPI to make it possible to intercept HTTP requests for testing.""" return MonkeyPatchedODLFixture(monkeypatch) @@ -71,17 +83,18 @@ def __init__( self.db = db self.files = files self.patched = patched + patched(ODLAPI) def library(self): return self.db.default_library() - def collection(self, library): + def collection(self, library, api_class=ODLAPI): """Create a mock ODL collection to use in tests.""" - integration_protocol = ODLAPI.NAME + integration_protocol = api_class.label() collection, ignore = get_one_or_create( self.db.session, Collection, - name="Test ODL Collection", + name=f"Test {api_class.__name__} Collection", create_method_kwargs=dict( external_account_id="http://odl", ), @@ -264,8 +277,19 @@ def odl_api_test_fixture(odl_test_fixture: ODLTestFixture) -> ODLAPITestFixture: class ODL2TestFixture(ODLTestFixture): """An ODL2 test fixture that mirrors the ODL test fixture except for the API class being used""" - def collection(self, library) -> Collection: - collection = super().collection(library) + def __init__( + self, + db: DatabaseTransactionFixture, + files: APIFilesFixture, + patched: MonkeyPatchedODLFixture, + ): + super().__init__(db, files, patched) + patched(ODL2API) + + def collection( + self, library: Library, api_class: Type[ODL2API] = ODL2API + ) -> Collection: + collection = super().collection(library, api_class) collection.name = "Test ODL2 Collection" collection.integration_configuration.protocol = ExternalIntegration.ODL2 return collection diff --git a/tests/migration/test_20230531_0af587ff8595.py b/tests/migration/test_20230531_0af587ff8595.py index ae9001b396..26451e2bca 100644 --- a/tests/migration/test_20230531_0af587ff8595.py +++ b/tests/migration/test_20230531_0af587ff8595.py @@ -118,12 +118,6 @@ def test_key_rename( "overdrive_client_secret", integration_id, ) - create_config_setting( - connection, "prioritized_drm_schemes", '["P1", "P2"]', integration_id - ) - create_config_setting( - connection, "IGNORED_IDENTIFIER_TYPE", '["Overdrive ID"]', integration_id - ) create_collection( connection, "Test Overdrive", integration_id, "ExternalAccountID" ) @@ -145,6 +139,4 @@ def test_key_rename( "overdrive_client_key": "overdrive_client_key", "overdrive_client_secret": "overdrive_client_secret", "external_account_id": "ExternalAccountID", - "ignored_identifier_types": ["Overdrive ID"], - "prioritized_drm_schemes": ["P1", "P2"], } From 9463ca0248b4a8bb415ad93598f8ee5d892317b9 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 18 Oct 2023 13:26:40 -0300 Subject: [PATCH 124/262] Upgrade to Ubuntu 22.04 and Python 3.10 (PP-506) (#1462) * Upgrade to Ubuntu 22.04 and Python 3.10. * Increase timeout for ARM images. --- docker/Dockerfile.baseimage | 6 +++--- docker/ci/test_webapp.sh | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/Dockerfile.baseimage b/docker/Dockerfile.baseimage index 7353686365..2e8423db01 100644 --- a/docker/Dockerfile.baseimage +++ b/docker/Dockerfile.baseimage @@ -11,14 +11,14 @@ # # Main repo for this image is here: # https://github.com/phusion/baseimage-docker -FROM phusion/baseimage:focal-1.2.0 As baseimage +FROM phusion/baseimage:jammy-1.0.1 As baseimage # Make sure base system is up to date RUN apt-get update && \ apt-get upgrade -y --no-install-recommends -o Dpkg::Options::="--force-confold" && \ /bd_build/cleanup.sh -ARG POETRY_VERSION=1.5.1 +ARG POETRY_VERSION=1.6.1 # Install required packages including python, pip, compiliers and libraries needed # to build the python wheels we need and poetry. @@ -61,7 +61,7 @@ COPY --chown=simplified:simplified poetry.lock pyproject.toml /var/www/circulati # to work from which speeds up the final image build. RUN python3 -m venv env && \ SIMPLIFIED_ENVIRONMENT=/var/www/circulation/environment.sh && \ - echo "if [[ -f $SIMPLIFIED_ENVIRONMENT ]]; then source $SIMPLIFIED_ENVIRONMENT; fi" >> env/bin/activate && \ + echo "if [ -f $SIMPLIFIED_ENVIRONMENT ]; then source $SIMPLIFIED_ENVIRONMENT; fi" >> env/bin/activate && \ . env/bin/activate && \ pip install --upgrade pip && \ poetry install --only main,pg --sync && \ diff --git a/docker/ci/test_webapp.sh b/docker/ci/test_webapp.sh index 1fc6c906de..aa5680b1ce 100755 --- a/docker/ci/test_webapp.sh +++ b/docker/ci/test_webapp.sh @@ -17,7 +17,7 @@ check_service_status "$container" /etc/service/nginx check_service_status "$container" /etc/service/uwsgi # Wait for UWSGI to be ready to accept connections. -timeout 120s grep -q 'WSGI app .* ready in [0-9]* seconds' <(docker logs "$container" -f 2>&1) +timeout 240s grep -q 'WSGI app .* ready in [0-9]* seconds' <(docker logs "$container" -f 2>&1) # Make sure the web server is running. healthcheck=$(docker exec "$container" curl --write-out "%{http_code}" --silent --output /dev/null http://localhost/healthcheck.html) From cbfaab12e7281066be993183a2769711b2443d1b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 18 Oct 2023 17:12:31 +0000 Subject: [PATCH 125/262] Bump mypy from 1.6.0 to 1.6.1 (#1471) --- poetry.lock | 56 ++++++++++++++++++++++++++--------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index bdbf2105ae..c837f38bfe 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2309,38 +2309,38 @@ files = [ [[package]] name = "mypy" -version = "1.6.0" +version = "1.6.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:091f53ff88cb093dcc33c29eee522c087a438df65eb92acd371161c1f4380ff0"}, - {file = "mypy-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:eb7ff4007865833c470a601498ba30462b7374342580e2346bf7884557e40531"}, - {file = "mypy-1.6.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49499cf1e464f533fc45be54d20a6351a312f96ae7892d8e9f1708140e27ce41"}, - {file = "mypy-1.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:4c192445899c69f07874dabda7e931b0cc811ea055bf82c1ababf358b9b2a72c"}, - {file = "mypy-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:3df87094028e52766b0a59a3e46481bb98b27986ed6ded6a6cc35ecc75bb9182"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c8835a07b8442da900db47ccfda76c92c69c3a575872a5b764332c4bacb5a0a"}, - {file = "mypy-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:24f3de8b9e7021cd794ad9dfbf2e9fe3f069ff5e28cb57af6f873ffec1cb0425"}, - {file = "mypy-1.6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:856bad61ebc7d21dbc019b719e98303dc6256cec6dcc9ebb0b214b81d6901bd8"}, - {file = "mypy-1.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89513ddfda06b5c8ebd64f026d20a61ef264e89125dc82633f3c34eeb50e7d60"}, - {file = "mypy-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:9f8464ed410ada641c29f5de3e6716cbdd4f460b31cf755b2af52f2d5ea79ead"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:971104bcb180e4fed0d7bd85504c9036346ab44b7416c75dd93b5c8c6bb7e28f"}, - {file = "mypy-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab98b8f6fdf669711f3abe83a745f67f50e3cbaea3998b90e8608d2b459fd566"}, - {file = "mypy-1.6.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a69db3018b87b3e6e9dd28970f983ea6c933800c9edf8c503c3135b3274d5ad"}, - {file = "mypy-1.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:dccd850a2e3863891871c9e16c54c742dba5470f5120ffed8152956e9e0a5e13"}, - {file = "mypy-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:f8598307150b5722854f035d2e70a1ad9cc3c72d392c34fffd8c66d888c90f17"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fea451a3125bf0bfe716e5d7ad4b92033c471e4b5b3e154c67525539d14dc15a"}, - {file = "mypy-1.6.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e28d7b221898c401494f3b77db3bac78a03ad0a0fff29a950317d87885c655d2"}, - {file = "mypy-1.6.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4b7a99275a61aa22256bab5839c35fe8a6887781862471df82afb4b445daae6"}, - {file = "mypy-1.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7469545380dddce5719e3656b80bdfbb217cfe8dbb1438532d6abc754b828fed"}, - {file = "mypy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:7807a2a61e636af9ca247ba8494031fb060a0a744b9fee7de3a54bed8a753323"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d2dad072e01764823d4b2f06bc7365bb1d4b6c2f38c4d42fade3c8d45b0b4b67"}, - {file = "mypy-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b19006055dde8a5425baa5f3b57a19fa79df621606540493e5e893500148c72f"}, - {file = "mypy-1.6.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31eba8a7a71f0071f55227a8057468b8d2eb5bf578c8502c7f01abaec8141b2f"}, - {file = "mypy-1.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e0db37ac4ebb2fee7702767dfc1b773c7365731c22787cb99f507285014fcaf"}, - {file = "mypy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:c69051274762cccd13498b568ed2430f8d22baa4b179911ad0c1577d336ed849"}, - {file = "mypy-1.6.0-py3-none-any.whl", hash = "sha256:9e1589ca150a51d9d00bb839bfeca2f7a04f32cd62fad87a847bc0818e15d7dc"}, - {file = "mypy-1.6.0.tar.gz", hash = "sha256:4f3d27537abde1be6d5f2c96c29a454da333a2a271ae7d5bc7110e6d4b7beb3f"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, + {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, + {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, + {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, + {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, + {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, + {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, + {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, + {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, + {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, + {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, + {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, + {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, + {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, + {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, + {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, + {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, + {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, + {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, + {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, + {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, + {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, + {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, ] [package.dependencies] From 54fa9ef53973139c04328b4d35c5d0f716bbd2d5 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 19 Oct 2023 17:45:21 +0530 Subject: [PATCH 126/262] PP-598 Removed the google analytics provider (#1469) * Removed the google analytics provider * Data migration to nuke all google analytics provider data --- ...da_cleanup_google_anaytics_integrations.py | 47 ++++ api/admin/controller/analytics_services.py | 2 - api/google_analytics_provider.py | 156 ------------ .../controller/test_analytics_services.py | 156 ++++-------- tests/api/test_google_analytics_provider.py | 232 ------------------ tests/migration/test_20231019_0739d5558dda.py | 57 +++++ 6 files changed, 147 insertions(+), 503 deletions(-) create mode 100644 alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py delete mode 100644 api/google_analytics_provider.py delete mode 100644 tests/api/test_google_analytics_provider.py create mode 100644 tests/migration/test_20231019_0739d5558dda.py diff --git a/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py new file mode 100644 index 0000000000..13f071a200 --- /dev/null +++ b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py @@ -0,0 +1,47 @@ +"""Cleanup google anaytics integrations + +Revision ID: 0739d5558dda +Revises: 21a65b8f391d +Create Date: 2023-10-19 05:23:00.694886+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0739d5558dda" +down_revision = "21a65b8f391d" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + """Remove all references to google analytics providers from the DB""" + conn = op.get_bind() + rows = conn.execute( + "SELECT id from externalintegrations where goal='analytics' and protocol='api.google_analytics_provider';" + ).all() + analytics_ids = tuple(r[0] for r in rows) + + if len(analytics_ids): + conn.execute( + sa.text( + "DELETE from externalintegrations_libraries where externalintegration_id IN :id_list;" + ), + id_list=analytics_ids, + ) + conn.execute( + sa.text( + "DELETE from configurationsettings where external_integration_id IN :id_list" + ), + id_list=analytics_ids, + ) + conn.execute( + sa.text("DELETE from externalintegrations where id IN :id_list;"), + id_list=analytics_ids, + ) + + +def downgrade() -> None: + pass diff --git a/api/admin/controller/analytics_services.py b/api/admin/controller/analytics_services.py index 6025813506..6a9dec59a9 100644 --- a/api/admin/controller/analytics_services.py +++ b/api/admin/controller/analytics_services.py @@ -3,7 +3,6 @@ from api.admin.controller.settings import SettingsController from api.admin.problem_details import INCOMPLETE_CONFIGURATION, MISSING_ANALYTICS_NAME -from api.google_analytics_provider import GoogleAnalyticsProvider from api.s3_analytics_provider import S3AnalyticsProvider from core.local_analytics_provider import LocalAnalyticsProvider from core.model import ExternalIntegration @@ -15,7 +14,6 @@ class AnalyticsServicesController(SettingsController): def __init__(self, manager): super().__init__(manager) provider_apis = [ - GoogleAnalyticsProvider, LocalAnalyticsProvider, S3AnalyticsProvider, ] diff --git a/api/google_analytics_provider.py b/api/google_analytics_provider.py deleted file mode 100644 index f2586ae0fc..0000000000 --- a/api/google_analytics_provider.py +++ /dev/null @@ -1,156 +0,0 @@ -import re -import unicodedata -import urllib.parse -import uuid - -from flask_babel import lazy_gettext as _ - -from api.config import CannotLoadConfiguration -from core.model import ConfigurationSetting, ExternalIntegration, Session -from core.service.container import Services -from core.util.http import HTTP - - -class GoogleAnalyticsProvider: - NAME = _("Google Analytics") - DESCRIPTION = _("How to Configure a Google Analytics Integration") - INSTRUCTIONS = _( - "

In order to track usage statistics, you can configure the Palace Collection Manager " - + "to connect to Google Analytics.

" - + "

Create a Google Analytics account, " - + "or sign into your existing one.

" - + "

To capture data from the Palace Collection Manager in your Google Analytics account, " - + "you must set up a property in Google Analytics for Palace Collection Manager. In your Google Analytics " - + "account, on the administration page for the property, go to Custom Definitions > Custom Dimensions, " - + "and add the following dimensions, in this order:

    " - + "
  1. time
  2. " - + "
  3. identifier
  4. " - + "
  5. identifier_type
  6. " - + "
  7. title
  8. " - + "
  9. author
  10. " - + "
  11. fiction
  12. " - + "
  13. audience
  14. " - + "
  15. target_age
  16. " - + "
  17. publisher
  18. " - + "
  19. language
  20. " - + "
  21. genre
  22. " - + "
  23. open_access
  24. " - + "
  25. distributor
  26. " - + "
  27. medium
  28. " - + "
  29. library
  30. " - + "

" - + "

Each dimension should have the scope set to 'Hit' and the 'Active' box checked.

" - + "

Then go to Tracking Info and get the tracking id for the property. Select your " - + "library from the dropdown below, and enter the tracking id into the form.

" - ) - - TRACKING_ID = "tracking_id" - DEFAULT_URL = "http://www.google-analytics.com/collect" - - SETTINGS = [ - { - "key": ExternalIntegration.URL, - "label": _("URL"), - "default": DEFAULT_URL, - "required": True, - "format": "url", - }, - ] - - LIBRARY_SETTINGS = [ - {"key": TRACKING_ID, "label": _("Tracking ID"), "required": True}, - ] - - def __init__(self, integration, services: Services, library=None): - _db = Session.object_session(integration) - if not library: - raise CannotLoadConfiguration( - "Google Analytics can't be configured without a library." - ) - url_setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.URL, integration - ) - self.url = url_setting.value or self.DEFAULT_URL - self.tracking_id = ConfigurationSetting.for_library_and_externalintegration( - _db, - self.TRACKING_ID, - library, - integration, - ).value - if not self.tracking_id: - raise CannotLoadConfiguration( - "Missing tracking id for library %s" % library.short_name - ) - - def collect_event(self, library, license_pool, event_type, time, **kwargs): - # Explicitly destroy any neighborhood information -- we don't - # want to send this to third-party sources. - kwargs.pop("neighborhood", None) - - client_id = uuid.uuid4() - fields = { - "v": 1, - "tid": self.tracking_id, - "cid": client_id, - "aip": 1, # anonymize IP - "ds": "Circulation Manager", - "t": "event", - "ec": "circulation", - "ea": event_type, - "cd1": time, - } - - if license_pool: - fields.update( - { - "cd2": license_pool.identifier.identifier, - "cd3": license_pool.identifier.type, - } - ) - - work = license_pool.work - edition = license_pool.presentation_edition - if work and edition: - fields.update( - { - "cd4": edition.title, - "cd5": edition.author, - "cd6": "fiction" if work.fiction else "nonfiction", - "cd7": work.audience, - "cd8": work.target_age_string, - "cd9": edition.publisher, - "cd10": edition.language, - "cd11": work.top_genre(), - "cd12": "true" if license_pool.open_access else "false", - } - ) - - # Backwards compatibility requires that new dimensions be - # added to the end of the list. For the sake of - # consistency, this code that sets values for those new - # dimensions runs after the original implementation. - fields.update({"cd13": license_pool.data_source.name}) - if work and edition: - fields.update({"cd14": edition.medium}) - if library: - fields.update({"cd15": library.short_name}) - - if license_pool and work and edition: - fields.update({"cd16": license_pool.collection.name}) - - # urlencode doesn't like unicode strings so we convert them to utf8 - fields = { - k: unicodedata.normalize("NFKD", str(v)).encode("utf8") - for k, v in list(fields.items()) - } - - params = re.sub(r"=None(&?)", r"=\1", urllib.parse.urlencode(fields)) - self.post(self.url, params) - - def post(self, url, params): - response = HTTP.post_with_timeout(url, params) - - -# The Analytics class looks for the name "Provider". -Provider = GoogleAnalyticsProvider diff --git a/tests/api/admin/controller/test_analytics_services.py b/tests/api/admin/controller/test_analytics_services.py index b7ffd79db6..c95f5424f0 100644 --- a/tests/api/admin/controller/test_analytics_services.py +++ b/tests/api/admin/controller/test_analytics_services.py @@ -15,7 +15,7 @@ NO_SUCH_LIBRARY, UNKNOWN_PROTOCOL, ) -from api.google_analytics_provider import GoogleAnalyticsProvider +from api.s3_analytics_provider import S3AnalyticsProvider from core.local_analytics_provider import LocalAnalyticsProvider from core.model import ( AdminRole, @@ -41,7 +41,7 @@ def test_analytics_services_get_with_one_default_service( assert local_analytics.get("protocol") == LocalAnalyticsProvider.__module__ protocols = response.get("protocols") - assert GoogleAnalyticsProvider.NAME in [p.get("label") for p in protocols] + assert S3AnalyticsProvider.NAME in [p.get("label") for p in protocols] assert "settings" in protocols[0] def test_analytics_services_get_with_one_service( @@ -56,48 +56,6 @@ def test_analytics_services_get_with_one_service( settings_ctrl_fixture.ctrl.db.session.delete(local_analytics_default) - ga_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=GoogleAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - ga_service.url = settings_ctrl_fixture.ctrl.db.fresh_str() - - with settings_ctrl_fixture.request_context_with_admin("/"): - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - [service] = response.get("analytics_services") - - assert ga_service.id == service.get("id") - assert ga_service.protocol == service.get("protocol") - assert ga_service.url == service.get("settings").get( - ExternalIntegration.URL - ) - - ga_service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - GoogleAnalyticsProvider.TRACKING_ID, - settings_ctrl_fixture.ctrl.db.default_library(), - ga_service, - ).value = "trackingid" - with settings_ctrl_fixture.request_context_with_admin("/"): - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - [service] = response.get("analytics_services") - - [library] = service.get("libraries") - assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name - == library.get("short_name") - ) - assert "trackingid" == library.get(GoogleAnalyticsProvider.TRACKING_ID) - - settings_ctrl_fixture.ctrl.db.session.delete(ga_service) - local_service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, @@ -169,20 +127,18 @@ def test_analytics_services_post_errors( ) assert response.uri == MISSING_SERVICE.uri - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=GoogleAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - name="name", + [local_analytics] = ( + settings_ctrl_fixture.ctrl.db.session.query(ExternalIntegration) + .filter(ExternalIntegration.goal == ExternalIntegration.ANALYTICS_GOAL) + .all() ) with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - assert isinstance(service.name, str) + assert isinstance(local_analytics.name, str) flask.request.form = ImmutableMultiDict( [ - ("name", service.name), - ("protocol", GoogleAnalyticsProvider.__module__), + ("name", local_analytics.name), + ("protocol", S3AnalyticsProvider.__module__), ("url", "http://test"), ] ) @@ -194,7 +150,7 @@ def test_analytics_services_post_errors( service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, - protocol=GoogleAnalyticsProvider.__module__, + protocol=S3AnalyticsProvider.__module__, goal=ExternalIntegration.ANALYTICS_GOAL, ) @@ -217,7 +173,7 @@ def test_analytics_services_post_errors( [ ("id", str(service.id)), ("name", "analytics name"), - ("protocol", GoogleAnalyticsProvider.__module__), + ("protocol", S3AnalyticsProvider.__module__), ("url", ""), ] ) @@ -230,7 +186,7 @@ def test_analytics_services_post_errors( flask.request.form = ImmutableMultiDict( [ ("id", str(service.id)), - ("protocol", GoogleAnalyticsProvider.__module__), + ("protocol", S3AnalyticsProvider.__module__), ("name", "some other analytics name"), (ExternalIntegration.URL, "http://test"), ("libraries", json.dumps([{"short_name": "not-a-library"}])), @@ -250,7 +206,7 @@ def test_analytics_services_post_errors( flask.request.form = ImmutableMultiDict( [ ("id", str(service.id)), - ("protocol", GoogleAnalyticsProvider.__module__), + ("protocol", S3AnalyticsProvider.__module__), ("name", "some other name"), (ExternalIntegration.URL, ""), ("libraries", json.dumps([{"short_name": library.short_name}])), @@ -286,12 +242,11 @@ def test_analytics_services_post_create( with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("name", "Google analytics name"), - ("protocol", GoogleAnalyticsProvider.__module__), - (ExternalIntegration.URL, "http://test"), + ("name", "S3 analytics name"), + ("protocol", S3AnalyticsProvider.__module__), ( - "libraries", - json.dumps([{"short_name": "L", "tracking_id": "trackingid"}]), + "location_source", + S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD, ), ] ) @@ -304,19 +259,15 @@ def test_analytics_services_post_create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=GoogleAnalyticsProvider.__module__, + protocol=S3AnalyticsProvider.__module__, ) assert isinstance(service, ExternalIntegration) assert service.id == int(response.get_data()) - assert GoogleAnalyticsProvider.__module__ == service.protocol - assert "http://test" == service.url - assert [library] == service.libraries + assert S3AnalyticsProvider.__module__ == service.protocol assert ( - "trackingid" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - GoogleAnalyticsProvider.TRACKING_ID, - library, + "neighborhood" + == ConfigurationSetting.for_externalintegration( + S3AnalyticsProvider.LOCATION_SOURCE, service, ).value ) @@ -349,34 +300,22 @@ def test_analytics_services_post_create( def test_analytics_services_post_edit( self, settings_ctrl_fixture: SettingsControllerFixture ): - l1 = settings_ctrl_fixture.ctrl.db.library( - name="Library 1", - short_name="L1", - ) - l2 = settings_ctrl_fixture.ctrl.db.library( - name="Library 2", - short_name="L2", - ) - - ga_service, ignore = create( + s3_service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, - protocol=GoogleAnalyticsProvider.__module__, + protocol=S3AnalyticsProvider.__module__, goal=ExternalIntegration.ANALYTICS_GOAL, ) - ga_service.url = "oldurl" - ga_service.libraries = [l1] with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(ga_service.id)), + ("id", str(s3_service.id)), ("name", "some other analytics name"), - ("protocol", GoogleAnalyticsProvider.__module__), - (ExternalIntegration.URL, "http://test"), + ("protocol", S3AnalyticsProvider.__module__), ( - "libraries", - json.dumps([{"short_name": "L2", "tracking_id": "l2id"}]), + S3AnalyticsProvider.LOCATION_SOURCE, + S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD, ), ] ) @@ -385,23 +324,20 @@ def test_analytics_services_post_edit( ) assert response.status_code == 200 - assert ga_service.id == int(response.get_data()) - assert GoogleAnalyticsProvider.__module__ == ga_service.protocol - assert "http://test" == ga_service.url - assert [l2] == ga_service.libraries + assert s3_service.id == int(response.get_data()) + assert s3_service.name == "some other analytics name" + assert S3AnalyticsProvider.__module__ == s3_service.protocol assert ( - "l2id" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - GoogleAnalyticsProvider.TRACKING_ID, - l2, - ga_service, + S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD + == ConfigurationSetting.for_externalintegration( + S3AnalyticsProvider.LOCATION_SOURCE, + s3_service, ).value ) def test_check_name_unique(self, settings_ctrl_fixture: SettingsControllerFixture): kwargs = dict( - protocol=GoogleAnalyticsProvider.__module__, + protocol=S3AnalyticsProvider.__module__, goal=ExternalIntegration.ANALYTICS_GOAL, ) existing_service, ignore = create( @@ -435,34 +371,28 @@ def test_check_name_unique(self, settings_ctrl_fixture: SettingsControllerFixtur def test_analytics_service_delete( self, settings_ctrl_fixture: SettingsControllerFixture ): - l1 = settings_ctrl_fixture.ctrl.db.library( - name="Library 1", - short_name="L1", - ) - ga_service, ignore = create( + service, ignore = create( settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, - protocol=GoogleAnalyticsProvider.__module__, + protocol=S3AnalyticsProvider.__module__, goal=ExternalIntegration.ANALYTICS_GOAL, ) - ga_service.url = "oldurl" - ga_service.libraries = [l1] with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) pytest.raises( AdminNotAuthorized, settings_ctrl_fixture.manager.admin_analytics_services_controller.process_delete, - ga_service.id, + service.id, ) settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) response = settings_ctrl_fixture.manager.admin_analytics_services_controller.process_delete( - ga_service.id + service.id ) assert response.status_code == 200 - service = get_one( - settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, id=ga_service.id + service1 = get_one( + settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, id=service.id ) - assert None == service + assert None == service1 diff --git a/tests/api/test_google_analytics_provider.py b/tests/api/test_google_analytics_provider.py deleted file mode 100644 index 26682ceb9e..0000000000 --- a/tests/api/test_google_analytics_provider.py +++ /dev/null @@ -1,232 +0,0 @@ -import unicodedata -import urllib.parse -from unittest.mock import MagicMock - -import pytest -from psycopg2.extras import NumericRange - -from api.config import CannotLoadConfiguration -from api.google_analytics_provider import GoogleAnalyticsProvider -from core.model import ( - CirculationEvent, - ConfigurationSetting, - DataSource, - EditionConstants, - ExternalIntegration, - LicensePool, - create, - get_one_or_create, -) -from core.util.datetime_helpers import utc_now -from tests.fixtures.database import DatabaseTransactionFixture - - -class MockGoogleAnalyticsProvider(GoogleAnalyticsProvider): - def post(self, url, params): - self.count = self.count + 1 if hasattr(self, "count") else 1 - self.url = url - self.params = params - - -class TestGoogleAnalyticsProvider: - def test_init(self, db: DatabaseTransactionFixture): - integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="api.google_analytics_provider", - ) - - with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration, MagicMock()) - assert "Google Analytics can't be configured without a library." in str( - excinfo.value - ) - - with pytest.raises(CannotLoadConfiguration) as excinfo: - GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - assert ( - "Missing tracking id for library %s" % db.default_library().short_name - in str(excinfo.value) - ) - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - GoogleAnalyticsProvider.TRACKING_ID, - db.default_library(), - integration, - ).value = "faketrackingid" - ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - assert GoogleAnalyticsProvider.DEFAULT_URL == ga.url - assert "faketrackingid" == ga.tracking_id - - integration.url = db.fresh_str() - ga = GoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - assert integration.url == ga.url - assert "faketrackingid" == ga.tracking_id - - def test_collect_event_with_work(self, db: DatabaseTransactionFixture): - integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="api.google_analytics_provider", - ) - integration.url = db.fresh_str() - ConfigurationSetting.for_library_and_externalintegration( - db.session, - GoogleAnalyticsProvider.TRACKING_ID, - db.default_library(), - integration, - ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - - work = db.work( - title="pi\u00F1ata", - authors="chlo\u00E9", - fiction=True, - audience="audience", - language="lang", - with_license_pool=True, - genre="Folklore", - with_open_access_download=True, - ) - work.presentation_edition.publisher = "publisher" - work.target_age = NumericRange(10, 15) - [lp] = work.license_pools - now = utc_now() - ga.collect_event( - db.default_library(), - lp, - CirculationEvent.DISTRIBUTOR_CHECKIN, - now, - neighborhood="Neighborhood will not be sent", - ) - - # Neighborhood information is not being sent -- that's for - # local consumption only. - assert "Neighborhood" not in ga.params - - # Let's take a look at what _is_ being sent. - params = urllib.parse.parse_qs(ga.params) - - assert 1 == ga.count - assert integration.url == ga.url - assert "faketrackingid" == params["tid"][0] - assert "event" == params["t"][0] - assert "circulation" == params["ec"][0] - assert CirculationEvent.DISTRIBUTOR_CHECKIN == params["ea"][0] - assert str(now) == params["cd1"][0] - assert lp.identifier.identifier == params["cd2"][0] - assert lp.identifier.type == params["cd3"][0] - assert unicodedata.normalize("NFKD", work.title) == params["cd4"][0] - assert unicodedata.normalize("NFKD", work.author) == params["cd5"][0] - assert "fiction" == params["cd6"][0] - assert "audience" == params["cd7"][0] - assert work.target_age_string == params["cd8"][0] - assert "publisher" == params["cd9"][0] - assert "lang" == params["cd10"][0] - assert "Folklore" == params["cd11"][0] - assert "true" == params["cd12"][0] - assert DataSource.GUTENBERG == params["cd13"][0] - assert EditionConstants.BOOK_MEDIUM == params["cd14"][0] - assert db.default_library().short_name == params["cd15"][0] - assert lp.collection.name == params["cd16"][0] - - def test_collect_event_without_work(self, db: DatabaseTransactionFixture): - integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="api.google_analytics_provider", - ) - integration.url = db.fresh_str() - ConfigurationSetting.for_library_and_externalintegration( - db.session, - GoogleAnalyticsProvider.TRACKING_ID, - db.default_library(), - integration, - ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - - identifier = db.identifier() - source = DataSource.lookup(db.session, DataSource.GUTENBERG) - pool, is_new = get_one_or_create( - db.session, - LicensePool, - identifier=identifier, - data_source=source, - collection=db.default_collection(), - ) - - now = utc_now() - ga.collect_event( - db.default_library(), pool, CirculationEvent.DISTRIBUTOR_CHECKIN, now - ) - params = urllib.parse.parse_qs(ga.params) - - assert 1 == ga.count - assert integration.url == ga.url - assert "faketrackingid" == params["tid"][0] - assert "event" == params["t"][0] - assert "circulation" == params["ec"][0] - assert CirculationEvent.DISTRIBUTOR_CHECKIN == params["ea"][0] - assert str(now) == params["cd1"][0] - assert pool.identifier.identifier == params["cd2"][0] - assert pool.identifier.type == params["cd3"][0] - assert None == params.get("cd4") - assert None == params.get("cd5") - assert None == params.get("cd6") - assert None == params.get("cd7") - assert None == params.get("cd8") - assert None == params.get("cd9") - assert None == params.get("cd10") - assert None == params.get("cd11") - assert None == params.get("cd12") - assert [source.name] == params.get("cd13") - assert None == params.get("cd14") - assert [db.default_library().short_name] == params.get("cd15") - assert None == params.get("cd16") - - def test_collect_event_without_license_pool(self, db: DatabaseTransactionFixture): - integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="api.google_analytics_provider", - ) - integration.url = db.fresh_str() - ConfigurationSetting.for_library_and_externalintegration( - db.session, - GoogleAnalyticsProvider.TRACKING_ID, - db.default_library(), - integration, - ).value = "faketrackingid" - ga = MockGoogleAnalyticsProvider(integration, MagicMock(), db.default_library()) - - now = utc_now() - ga.collect_event(db.default_library(), None, CirculationEvent.NEW_PATRON, now) - params = urllib.parse.parse_qs(ga.params) - - assert 1 == ga.count - assert integration.url == ga.url - assert "faketrackingid" == params["tid"][0] - assert "event" == params["t"][0] - assert "circulation" == params["ec"][0] - assert CirculationEvent.NEW_PATRON == params["ea"][0] - assert str(now) == params["cd1"][0] - assert None == params.get("cd2") - assert None == params.get("cd3") - assert None == params.get("cd4") - assert None == params.get("cd5") - assert None == params.get("cd6") - assert None == params.get("cd7") - assert None == params.get("cd8") - assert None == params.get("cd9") - assert None == params.get("cd10") - assert None == params.get("cd11") - assert None == params.get("cd12") - assert None == params.get("cd13") - assert None == params.get("cd14") - assert [db.default_library().short_name] == params.get("cd15") - assert None == params.get("cd16") diff --git a/tests/migration/test_20231019_0739d5558dda.py b/tests/migration/test_20231019_0739d5558dda.py new file mode 100644 index 0000000000..4bbfbca57d --- /dev/null +++ b/tests/migration/test_20231019_0739d5558dda.py @@ -0,0 +1,57 @@ +from pytest_alembic import MigrationContext +from sqlalchemy.engine import Engine + +from tests.migration.conftest import ( + CreateConfigSetting, + CreateExternalIntegration, + CreateLibrary, +) + +MIGRATION_UID = "0739d5558dda" + + +def test_settings_deletion( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, + create_library: CreateLibrary, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_UID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + lib_id = create_library(conn, "Test") + ext_id = create_external_integration( + conn, + protocol="api.google_analytics_provider", + goal="analytics", + name="Google Analytics Test", + ) + key_id = create_config_setting( + conn, "tracking_id", "trackingid", ext_id, lib_id, associate_library=True + ) + + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as conn: + assert ( + conn.execute( + "SELECT id from externalintegrations where id=%s", ext_id + ).first() + is None + ) + assert ( + conn.execute( + "SELECT id from configurationsettings where external_integration_id=%s", + ext_id, + ).first() + is None + ) + assert ( + conn.execute( + "SELECT externalintegration_id from externalintegrations_libraries where externalintegration_id=%s", + ext_id, + ).first() + is None + ) From 1b835b08bd64390d35d13b25d96e733cd5645029 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 19 Oct 2023 20:52:10 -0300 Subject: [PATCH 127/262] Add some helper methods to load and save config settings (#1472) * Add some helper methods to load and save config settings. * Fix comment. --- .../patron_auth_service_self_tests.py | 6 +- api/authenticator.py | 16 +-- api/circulation.py | 4 +- api/discovery/opds_registration.py | 2 +- api/overdrive.py | 2 +- core/integration/base.py | 117 +++++++++++++++++- core/model/library.py | 7 +- core/opds_import.py | 5 +- .../controller/test_discovery_services.py | 7 +- tests/api/admin/controller/test_library.py | 2 +- .../api/admin/controller/test_patron_auth.py | 8 +- tests/api/test_authenticator.py | 12 +- tests/api/test_axis.py | 16 ++- tests/core/integration/test_base.py | 65 ++++++++++ 14 files changed, 214 insertions(+), 55 deletions(-) create mode 100644 tests/core/integration/test_base.py diff --git a/api/admin/controller/patron_auth_service_self_tests.py b/api/admin/controller/patron_auth_service_self_tests.py index f3608406db..6fae2a3d62 100644 --- a/api/admin/controller/patron_auth_service_self_tests.py +++ b/api/admin/controller/patron_auth_service_self_tests.py @@ -132,10 +132,8 @@ def run_tests(self, integration: IntegrationConfiguration) -> Dict[str, Any]: ) protocol_class = self.get_protocol_class(integration) - settings = protocol_class.settings_class()(**integration.settings_dict) - library_settings = protocol_class.library_settings_class()( - **library_configuration.settings_dict - ) + settings = protocol_class.settings_load(integration) + library_settings = protocol_class.library_settings_load(library_configuration) value, _ = protocol_class.run_self_tests( self.db, diff --git a/api/authenticator.py b/api/authenticator.py index 4dd42f0079..d120358860 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -348,20 +348,8 @@ def register_provider( f"Implementation class {impl_cls} is not an AuthenticationProvider." ) try: - if not isinstance(integration.parent.settings_dict, dict): - raise CannotLoadConfiguration( - f"Settings for {impl_cls.__name__} authentication provider for " - f"library {self.library_short_name} are not a dictionary." - ) - if not isinstance(integration.settings_dict, dict): - raise CannotLoadConfiguration( - f"Library settings for {impl_cls.__name__} authentication provider for " - f"library {self.library_short_name} are not a dictionary." - ) - settings = impl_cls.settings_class()(**integration.parent.settings_dict) - library_settings = impl_cls.library_settings_class()( - **integration.settings_dict - ) + settings = impl_cls.settings_load(integration.parent) + library_settings = impl_cls.library_settings_load(integration) provider = impl_cls( self.library_id, # type: ignore[arg-type] integration.parent_id, # type: ignore[arg-type] diff --git a/api/circulation.py b/api/circulation.py index d35da11c2d..512d0928fa 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -635,7 +635,7 @@ def integration_configuration(self) -> IntegrationConfiguration: @property def settings(self) -> SettingsType: - return self.settings_class()(**self.integration_configuration().settings_dict) + return self.settings_load(self.integration_configuration()) def library_settings(self, library: Library | int) -> LibrarySettingsType | None: library_id = library.id if isinstance(library, Library) else library @@ -644,7 +644,7 @@ def library_settings(self, library: Library | int) -> LibrarySettingsType | None libconfig = self.integration_configuration().for_library(library_id=library_id) if libconfig is None: return None - config = self.library_settings_class()(**libconfig.settings_dict) + config = self.library_settings_load(libconfig) return config @abstractmethod diff --git a/api/discovery/opds_registration.py b/api/discovery/opds_registration.py index f5fc22555d..67be3278e1 100644 --- a/api/discovery/opds_registration.py +++ b/api/discovery/opds_registration.py @@ -129,7 +129,7 @@ def for_integration( if integration_obj is None: return None - settings = cls.settings_class().construct(**integration_obj.settings_dict) + settings = cls.settings_load(integration_obj) return cls(integration_obj, settings) @staticmethod diff --git a/api/overdrive.py b/api/overdrive.py index 30cf7e1825..8e8da7b8cc 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -398,7 +398,7 @@ def __init__(self, _db, collection): # from the parent (the main Overdrive account), except for the # library ID, which we already set. parent_integration = collection.parent.integration_configuration - parent_config = self.settings_class()(**parent_integration.settings_dict) + parent_config = self.settings_load(parent_integration) for key in OverdriveConstants.OVERDRIVE_CONFIGURATION_KEYS: parent_value = getattr(parent_config, key, None) setattr(self._configuration, key, parent_value) diff --git a/core/integration/base.py b/core/integration/base.py index 9be80b3f6c..2aea7fa127 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -1,12 +1,75 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Generic, Type, TypeVar +from typing import TYPE_CHECKING, Any, Dict, Generic, Mapping, Protocol, Type, TypeVar from sqlalchemy.orm import Session +from sqlalchemy.orm.attributes import Mapped, flag_modified from core.integration.settings import BaseSettings +if TYPE_CHECKING: + from core.model import IntegrationConfiguration, IntegrationLibraryConfiguration + + +class IntegrationConfigurationProtocol(Protocol): + settings_dict: Mapped[Dict[str, Any]] + + +T = TypeVar("T", bound=BaseSettings) + + +def integration_settings_load( + settings_cls: Type[T], + integration: IntegrationConfigurationProtocol, +) -> T: + """ + Load the settings object for an integration from the database. + + These settings ARE NOT validated when loaded from the database. It is assumed that + the settings have already been validated when they were saved to the database. This + speeds up the loading of the settings from the database. + + :param settings_cls: The settings class that the settings should be loaded into. + :param integration: The integration to load the settings from. This should be a + SQLAlchemy model with a settings_dict JSONB column. + + :return: An instance of the settings class loaded with the settings from the database. + """ + settings_dict = integration.settings_dict + return settings_cls.construct(**settings_dict) + + +def integration_settings_update( + settings_cls: Type[BaseSettings], + integration: IntegrationConfigurationProtocol, + new_settings: BaseSettings | Mapping[str, Any], + merge: bool = False, +) -> None: + """ + Update the settings for an integration in the database. + + The settings are validated before being saved to the database, and SQLAlchemy is + notified that the settings_dict column has been modified. + + :param settings_cls: The settings class to use to validate the settings. + :param integration: The integration to update. This should be a SQLAlchemy model + with a settings_dict JSONB column. + :param new_settings: The new settings to update the integration with. This can either + be a BaseSettings object, or a dictionary of settings. + :param merge: If True, the new settings will be merged with the existing settings. With + the new settings taking precedence. If False, the new settings will replace the existing + settings. + """ + settings_dict = integration.settings_dict if merge else {} + new_settings_dict = ( + new_settings.dict() if isinstance(new_settings, BaseSettings) else new_settings + ) + settings_dict.update(new_settings_dict) + integration.settings_dict = settings_cls(**settings_dict).dict() + flag_modified(integration, "settings_dict") + + SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) @@ -30,6 +93,31 @@ def settings_class(cls) -> Type[SettingsType]: """Get the settings for this integration""" ... + @classmethod + def settings_load(cls, integration: IntegrationConfiguration) -> SettingsType: + """ + Load the settings object for this integration from the database. + + See the documentation for `integration_settings_load` for more details. + """ + return integration_settings_load(cls.settings_class(), integration) + + @classmethod + def settings_update( + cls, + integration: IntegrationConfiguration, + new_settings: BaseSettings | Mapping[str, Any], + merge: bool = False, + ) -> None: + """ + Update the settings for this integration in the database. + + See the documentation for `integration_settings_update` for more details. + """ + integration_settings_update( + cls.settings_class(), integration, new_settings, merge + ) + @classmethod def protocol_details(cls, db: Session) -> dict[str, Any]: """Add any additional details about this protocol to be @@ -51,6 +139,33 @@ def library_settings_class(cls) -> Type[LibrarySettingsType]: """Get the library settings for this integration""" ... + @classmethod + def library_settings_load( + cls, integration: IntegrationLibraryConfiguration + ) -> LibrarySettingsType: + """ + Load the library settings object for this integration from the database. + + See the documentation for `integration_settings_load` for more details. + """ + return integration_settings_load(cls.library_settings_class(), integration) + + @classmethod + def library_settings_update( + cls, + integration: IntegrationLibraryConfiguration, + new_settings: BaseSettings | Mapping[str, Any], + merge: bool = False, + ) -> None: + """ + Update the settings for this library integration in the database. + + See the documentation for `integration_settings_update` for more details. + """ + integration_settings_update( + cls.library_settings_class(), integration, new_settings, merge + ) + class HasChildIntegrationConfiguration(HasIntegrationConfiguration[SettingsType], ABC): @classmethod diff --git a/core/model/library.py b/core/model/library.py index b29b66d428..e56c9eb9d1 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -29,13 +29,13 @@ ) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, Query, relationship -from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.session import Session from sqlalchemy.sql.functions import func from core.configuration.library import LibrarySettings from core.entrypoint import EntryPoint from core.facets import FacetConstants +from core.integration.base import integration_settings_load, integration_settings_update from core.model import Base, get_one from core.model.announcements import Announcement from core.model.customlist import customlist_sharedlibrary @@ -300,15 +300,14 @@ def settings(self) -> LibrarySettings: "settings_dict for library %s is not a dict: %r" % (self.short_name, self.settings_dict) ) - settings = LibrarySettings.construct(**self.settings_dict) + settings = integration_settings_load(LibrarySettings, self) self._settings = settings return settings def update_settings(self, new_settings: LibrarySettings) -> None: """Update the settings for this integration""" self._settings = None - self.settings_dict.update(new_settings.dict()) - flag_modified(self, "settings_dict") + integration_settings_update(LibrarySettings, self, new_settings, merge=True) @property def all_collections(self) -> Generator[Collection, None, None]: diff --git a/core/opds_import.py b/core/opds_import.py index 7253e99f32..2cab8fbf96 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -41,6 +41,7 @@ from core.config import IntegrationException from core.connection_config import ConnectionSetting from core.coverage import CoverageFailure +from core.integration.base import integration_settings_load from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -327,8 +328,8 @@ def __init__( # we don't, e.g. accidentally get our IP banned from # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get - self.settings = self.settings_class().construct( - **collection.integration_configuration.settings_dict + self.settings = integration_settings_load( + self.settings_class(), collection.integration_configuration ) @classmethod diff --git a/tests/api/admin/controller/test_discovery_services.py b/tests/api/admin/controller/test_discovery_services.py index 23e3b6e2c8..5c5ed4d071 100644 --- a/tests/api/admin/controller/test_discovery_services.py +++ b/tests/api/admin/controller/test_discovery_services.py @@ -200,8 +200,7 @@ def test_discovery_services_post_create( assert service.id == int(response.get_data(as_text=True)) assert self.protocol == service.protocol assert ( - OpdsRegistrationService.settings_class()(**service.settings_dict).url - == "http://registry.url" + OpdsRegistrationService.settings_load(service).url == "http://registry.url" ) def test_discovery_services_post_edit( @@ -232,9 +231,7 @@ def test_discovery_services_post_edit( assert self.protocol == discovery_service.protocol assert ( "http://new_registry_url.com" - == OpdsRegistrationService.settings_class()( - **discovery_service.settings_dict - ).url + == OpdsRegistrationService.settings_load(discovery_service).url ) def test_check_name_unique( diff --git a/tests/api/admin/controller/test_library.py b/tests/api/admin/controller/test_library.py index 736a4135b9..096d1703d4 100644 --- a/tests/api/admin/controller/test_library.py +++ b/tests/api/admin/controller/test_library.py @@ -210,7 +210,7 @@ def test_libraries_get_with_multiple_libraries( FacetConstants.ORDER_TITLE, FacetConstants.ORDER_AUTHOR, ] == settings_dict.get("facets_enabled_order") - assert ["French"] == settings_dict.get("large_collection_languages") + assert ["fre"] == settings_dict.get("large_collection_languages") def test_libraries_post_errors(self, settings_ctrl_fixture): with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index c7c2844e5f..b1925c04b4 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -499,9 +499,7 @@ def test_patron_auth_services_post_create( assert auth_service is not None assert auth_service.id == int(response.response[0]) # type: ignore[index] assert SimpleAuthenticationProvider.__module__ == auth_service.protocol - settings = SimpleAuthenticationProvider.settings_class()( - **auth_service.settings_dict - ) + settings = SimpleAuthenticationProvider.settings_load(auth_service) assert settings.test_identifier == "user" assert settings.test_password == "pass" [library_config] = auth_service.library_configurations @@ -591,9 +589,7 @@ def test_patron_auth_services_post_edit( assert auth_service.id == int(response.response[0]) # type: ignore[index] assert SimpleAuthenticationProvider.__module__ == auth_service.protocol assert isinstance(auth_service.settings_dict, dict) - settings = SimpleAuthenticationProvider.settings_class()( - **auth_service.settings_dict - ) + settings = SimpleAuthenticationProvider.settings_load(auth_service) assert settings.test_identifier == "user" assert settings.test_password == "pass" [library_config] = auth_service.library_configurations diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index e76c4a3f4b..ca0ef63e27 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -682,9 +682,6 @@ def test_configuration_exception_during_from_config_stored( # propagated. # Create an integration destined to raise CannotLoadConfiguration.. library = db.default_library() - misconfigured, _ = create_millenium_auth_integration(library, url="millenium") - - # ... and one destined to raise ImportError. unknown, _ = create_auth_integration_configuration("unknown protocol", library) auth = LibraryAuthenticator.from_config(db.session, db.default_library()) @@ -692,14 +689,9 @@ def test_configuration_exception_during_from_config_stored( # The LibraryAuthenticator exists but has no AuthenticationProviders. assert auth.basic_auth_provider is None - # Both integrations have left their trace in - # initialization_exceptions. - not_configured = auth.initialization_exceptions[(misconfigured.id, library.id)] - assert isinstance(not_configured, CannotLoadConfiguration) - assert "Could not instantiate MilleniumPatronAPI" in str(not_configured) - + # The integration has left its trace in initialization_exceptions. not_found = auth.initialization_exceptions[(unknown.id, library.id)] - assert isinstance(not_configured, CannotLoadConfiguration) + assert isinstance(not_found, CannotLoadConfiguration) assert "Unable to load implementation for external integration" in str( not_found ) diff --git a/tests/api/test_axis.py b/tests/api/test_axis.py index 0b7b90edd3..b713f0ead3 100644 --- a/tests/api/test_axis.py +++ b/tests/api/test_axis.py @@ -21,6 +21,7 @@ Axis360CirculationMonitor, Axis360FulfillmentInfo, Axis360FulfillmentInfoResponseParser, + Axis360Settings, AxisCollectionReaper, AxisNowManifest, BibliographicParser, @@ -35,6 +36,7 @@ from api.web_publication_manifest import FindawayManifest, SpineItem from core.analytics import Analytics from core.coverage import CoverageFailure +from core.integration.base import integration_settings_update from core.metadata_layer import ( CirculationData, ContributorData, @@ -790,16 +792,22 @@ def test_integration_settings_url( self, setting, setting_value, is_valid, expected, axis360: Axis360Fixture ): config = axis360.collection.integration_configuration - settings = config.settings_dict.copy() - settings[setting] = setting_value - config.settings_dict = settings + config.settings_dict[setting] = setting_value if is_valid: + integration_settings_update( + Axis360Settings, config, {setting: setting_value}, merge=True + ) api = MockAxis360API(axis360.db.session, axis360.collection) assert api.base_url == expected else: pytest.raises( - ProblemError, MockAxis360API, axis360.db.session, axis360.collection + ProblemError, + integration_settings_update, + Axis360Settings, + config, + {setting: setting_value}, + merge=True, ) diff --git a/tests/core/integration/test_base.py b/tests/core/integration/test_base.py new file mode 100644 index 0000000000..bd4cb4ec3a --- /dev/null +++ b/tests/core/integration/test_base.py @@ -0,0 +1,65 @@ +from functools import partial +from unittest.mock import MagicMock, Mock, patch + +import pytest + +from core.integration.base import integration_settings_load, integration_settings_update +from core.integration.settings import BaseSettings +from core.model import IntegrationConfiguration + + +class BaseFixture: + def __init__(self, mock_flag_modified: Mock): + self.mock_settings_cls = MagicMock(spec=BaseSettings) + self.mock_integration = MagicMock(spec=IntegrationConfiguration) + self.mock_integration.settings_dict = {"test": "test", "number": 123} + self.mock_flag_modified = mock_flag_modified + + self.load = partial( + integration_settings_load, self.mock_settings_cls, self.mock_integration + ) + self.update = partial( + integration_settings_update, self.mock_settings_cls, self.mock_integration + ) + + +@pytest.fixture +def base_fixture(): + with patch("core.integration.base.flag_modified") as mock_flag_modified: + yield BaseFixture(mock_flag_modified=mock_flag_modified) + + +def test_integration_settings_load(base_fixture: BaseFixture) -> None: + return_value: BaseSettings = base_fixture.load() + base_fixture.mock_settings_cls.construct.assert_called_once_with( + test="test", number=123 + ) + assert return_value is base_fixture.mock_settings_cls.construct.return_value + + +def test_integration_settings_update_no_merge(base_fixture: BaseFixture) -> None: + base_fixture.update({"test": "foo"}, merge=False) + base_fixture.mock_settings_cls.assert_called_with(test="foo") + base_fixture.mock_flag_modified.assert_called_once_with( + base_fixture.mock_integration, "settings_dict" + ) + + +def test_integration_settings_update_merge(base_fixture: BaseFixture) -> None: + base_fixture.update({"test": "foo"}, merge=True) + base_fixture.mock_settings_cls.assert_called_with(test="foo", number=123) + base_fixture.mock_flag_modified.assert_called_once_with( + base_fixture.mock_integration, "settings_dict" + ) + + +def test_integration_settings_update_basesettings(base_fixture: BaseFixture) -> None: + mock_base = MagicMock(spec=BaseSettings) + mock_base.dict.return_value = {"test": "foo", "bool": True} + + base_fixture.update(mock_base, merge=True) + mock_base.dict.assert_called_once_with() + base_fixture.mock_settings_cls.assert_called_with(test="foo", number=123, bool=True) + base_fixture.mock_flag_modified.assert_called_once_with( + base_fixture.mock_integration, "settings_dict" + ) From 71ef82aed87a0825d58a45eab969a632fc84137a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 20 Oct 2023 09:33:34 -0300 Subject: [PATCH 128/262] Remove circulation api post processor (PP-501) (#1470) As a follow up to #1442, this removes the CirculationFulfillmentPostProcessor class. All the logic has been moved into the OPDSAPI and OPDS2API classes. CirculationFulfillmentPostProcessor were only used to work around OPDS importers not having an API class. Now that we have a proper API class there, there is no reason to keep them around. --- api/circulation.py | 155 ++------------------- api/opds2.py | 92 ------------- api/saml/wayfless.py | 130 ------------------ core/opds2_import.py | 86 +++++++++++- core/opds_import.py | 72 +++++++++- core/saml/wayfless.py | 5 + tests/api/saml/test_controller.py | 13 -- tests/api/test_circulationapi.py | 4 +- tests/api/test_opds2.py | 195 --------------------------- tests/core/test_opds2_import.py | 217 +++++++++++++++++++++++++++++- tests/core/test_opds_import.py | 2 +- 11 files changed, 391 insertions(+), 580 deletions(-) delete mode 100644 api/opds2.py delete mode 100644 api/saml/wayfless.py delete mode 100644 tests/api/test_opds2.py diff --git a/api/circulation.py b/api/circulation.py index 512d0928fa..92c8179b52 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -742,40 +742,6 @@ def patron_activity( ... -class CirculationFulfillmentPostProcessor(ABC): - """Generic interface for a circulation fulfillment post-processor, - i.e., a class adding additional logic to the fulfillment process AFTER the circulation item has been fulfilled. - - It takes a FulfillmentInfo object and transforms it according to its internal logic. - """ - - @abstractmethod - def __init__(self, collection: Collection) -> None: - ... - - @abstractmethod - def fulfill( - self, - patron: Patron, - pin: str, - licensepool: LicensePool, - delivery_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment: FulfillmentInfo, - ) -> FulfillmentInfo: - """Post-process an existing FulfillmentInfo object. - - :param patron: Library's patron - :param pin: The patron's alleged password - :param licensepool: Circulation item's license pool - :param delivery_mechanism: Object containing a delivery mechanism selected by the patron in the UI - (e.g., PDF, EPUB, etc.) - :param fulfillment: Existing FulfillmentInfo describing the circulation item - ready to be downloaded by the patron - :return: Processed FulfillmentInfo object - """ - ... - - class CirculationAPI: """Implement basic circulation logic and abstract away the details between different circulation APIs behind generic operations like @@ -790,49 +756,36 @@ def __init__( registry: Optional[ IntegrationRegistry[BaseCirculationAPI[BaseSettings, BaseSettings]] ] = None, - fulfillment_post_processors_map: Optional[ - Dict[int, Type[CirculationFulfillmentPostProcessor]] - ] = None, ): """Constructor. - :param db: A database session (probably a scoped session, which is - why we can't derive it from `library`). + :param db: A database session (probably a scoped session, which is + why we can't derive it from `library`). - :param library: A Library object representing the library - whose circulation we're concerned with. + :param library: A Library object representing the library + whose circulation we're concerned with. - :param analytics: An Analytics object for tracking - circulation events. + :param analytics: An Analytics object for tracking + circulation events. - :param registry: An IntegrationRegistry mapping Collection protocols to - API classes that should be instantiated to deal with these - protocols. The default registry will work fine unless you're a - unit test. + :param registry: An IntegrationRegistry mapping Collection protocols to + API classes that should be instantiated to deal with these + protocols. The default registry will work fine unless you're a + unit test. - Since instantiating these API classes may result in API - calls, we only instantiate one CirculationAPI per library, - and keep them around as long as possible. - - :param fulfillment_post_processors_map: A dictionary mapping Collection protocols - to fulfillment post-processors. + Since instantiating these API classes may result in API + calls, we only instantiate one CirculationAPI per library, + and keep them around as long as possible. """ self._db = db self.library_id = library.id self.analytics = analytics self.initialization_exceptions = dict() self.registry = registry or LicenseProvidersRegistry() - fulfillment_post_processors_mapping = ( - fulfillment_post_processors_map - or self.default_fulfillment_post_processors_map - ) # Each of the Library's relevant Collections is going to be # associated with an API object. self.api_for_collection = {} - self._fulfillment_post_processors_map: Dict[ - int, CirculationFulfillmentPostProcessor - ] = {} # When we get our view of a patron's loans and holds, we need # to include loans whose license pools are in one of the @@ -858,59 +811,18 @@ def __init__( if isinstance(api, PatronActivityCirculationAPI): self.collection_ids_for_sync.append(collection.id) - if ( - collection.protocol in fulfillment_post_processors_mapping - and collection.id - ): - fulfillment_post_processor = fulfillment_post_processors_mapping[ - collection.protocol - ](collection) - self._fulfillment_post_processors_map[ - collection.id - ] = fulfillment_post_processor - @property def library(self) -> Optional[Library]: if self.library_id is None: return None return Library.by_id(self._db, self.library_id) - @property - def default_fulfillment_post_processors_map( - self, - ) -> Dict[str, Type[CirculationFulfillmentPostProcessor]]: - """Return a default mapping of protocols to fulfillment post-processors. - - :return: Mapping of protocols to fulfillment post-processors. - """ - from api.opds2 import TokenAuthenticationFulfillmentProcessor - from api.saml.wayfless import SAMLWAYFlessAcquisitionLinkProcessor - from core.opds2_import import OPDS2Importer - from core.opds_import import OPDSImporter - - return { - OPDSImporter.NAME: SAMLWAYFlessAcquisitionLinkProcessor, - OPDS2Importer.NAME: TokenAuthenticationFulfillmentProcessor, - } - def api_for_license_pool( self, licensepool: LicensePool ) -> Optional[BaseCirculationAPI[BaseSettings, BaseSettings]]: """Find the API to use for the given license pool.""" return self.api_for_collection.get(licensepool.collection.id) - def fulfillment_post_processor_for_license_pool( - self, licensepool: LicensePool - ) -> Optional[CirculationFulfillmentPostProcessor]: - """Return a fulfillment post-processor to use for the given license pool. - - :param licensepool: License pool for which we need to get a fulfillment post-processor - :return: Fulfillment post-processor to use for the given license pool - """ - if not licensepool.collection.id: - return None - return self._fulfillment_post_processors_map.get(licensepool.collection.id) - def can_revoke_hold(self, licensepool: LicensePool, hold: Hold) -> bool: """Some circulation providers allow you to cancel a hold when the book is reserved to you. Others only allow you to cancel @@ -991,43 +903,6 @@ def _collect_checkout_event(self, patron: Patron, licensepool: LicensePool) -> N patron, licensepool, CirculationEvent.CM_CHECKOUT, include_neighborhood=True ) - def _post_process_fulfillment( - self, - patron: Patron, - pin: str, - licensepool: LicensePool, - delivery_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment: FulfillmentInfo, - ) -> FulfillmentInfo: - """Post-process an existing FulfillmentInfo object. - - :param patron: Library's patron - :param pin: The patron's alleged password - :param licensepool: Circulation item's license pool - :param delivery_mechanism: Object containing a delivery mechanism selected by the patron in the UI - (e.g., PDF, EPUB, etc.) - :param fulfillment: Existing FulfillmentInfo describing the circulation item - ready to be downloaded by the patron - :return: Processed FulfillmentInfo object - """ - processed_fulfillment = fulfillment - fulfillment_post_processor = self.fulfillment_post_processor_for_license_pool( - licensepool - ) - - self.log.debug(f"Fulfillment post-processor: {fulfillment_post_processor}") - - if fulfillment_post_processor: - processed_fulfillment = fulfillment_post_processor.fulfill( - patron, pin, licensepool, delivery_mechanism, fulfillment - ) - - self.log.debug( - f"Fulfillment {fulfillment} has been processed into {processed_fulfillment}" - ) - - return processed_fulfillment - def borrow( self, patron: Patron, @@ -1471,10 +1346,6 @@ def fulfill( if not fulfillment or not (fulfillment.content_link or fulfillment.content): raise NoAcceptableFormat() - fulfillment = self._post_process_fulfillment( - patron, pin, licensepool, delivery_mechanism, fulfillment - ) - # Send out an analytics event to record the fact that # a fulfillment was initiated through the circulation # manager. diff --git a/api/opds2.py b/api/opds2.py deleted file mode 100644 index 123b53f1f6..0000000000 --- a/api/opds2.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from uritemplate import URITemplate - -from api.circulation import CirculationFulfillmentPostProcessor, FulfillmentInfo -from api.circulation_exceptions import CannotFulfill -from core.model import ConfigurationSetting, DataSource, ExternalIntegration -from core.model.licensing import LicensePoolDeliveryMechanism -from core.problem_details import INVALID_CREDENTIALS -from core.util.http import HTTP -from core.util.log import LoggerMixin -from core.util.problem_detail import ProblemDetail - -if TYPE_CHECKING: - from core.model import LicensePool, Patron - - -class TokenAuthenticationFulfillmentProcessor( - CirculationFulfillmentPostProcessor, LoggerMixin -): - """In case a feed has a token auth endpoint and the content_link requires an authentication token - Then we must fetch the required authentication token from the token_auth endpoint and - expand the templated url with the received token. - The content link should also be a redirect and not a proxy download""" - - def __init__(self, collection) -> None: - pass - - def fulfill( - self, - patron: Patron, - pin: str, - licensepool: LicensePool, - delivery_mechanism: LicensePoolDeliveryMechanism | None, - fulfillment: FulfillmentInfo, - ) -> FulfillmentInfo: - if not fulfillment.content_link: - return fulfillment - - templated = URITemplate(fulfillment.content_link) - if "authentication_token" not in templated.variable_names: - return fulfillment - - # TODO: This needs to be refactored to use IntegrationConfiguration, - # but it has been temporarily rolled back, since the IntegrationConfiguration - # code caused problems fulfilling TOKEN_AUTH books in production. - # This should be fixed as part of the work PP-313 to fully remove - # ExternalIntegrations from our collections code. - token_auth = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, licensepool.collection.external_integration - ) - if not token_auth or token_auth.value is None: - return fulfillment - - token = self.get_authentication_token( - patron, licensepool.data_source, token_auth.value - ) - if isinstance(token, ProblemDetail): - raise CannotFulfill() - - fulfillment.content_link = templated.expand(authentication_token=token) - fulfillment.content_link_redirect = True - return fulfillment - - @classmethod - def get_authentication_token( - cls, patron: Patron, datasource: DataSource, token_auth_url: str - ) -> ProblemDetail | str: - """Get the authentication token for a patron""" - log = cls.logger() - - patron_id = patron.identifier_to_remote_service(datasource) - url = URITemplate(token_auth_url).expand(patron_id=patron_id) - response = HTTP.get_with_timeout(url) - if response.status_code != 200: - log.error( - f"Could not authenticate the patron (authorization identifier: '{patron.authorization_identifier}' " - f"external identifier: '{patron_id}'): {str(response.content)}" - ) - return INVALID_CREDENTIALS - - # The response should be the JWT token, not wrapped in any format like JSON - token = response.text - if not token: - log.error( - f"Could not authenticate the patron({patron_id}): {str(response.content)}" - ) - return INVALID_CREDENTIALS - - return token diff --git a/api/saml/wayfless.py b/api/saml/wayfless.py deleted file mode 100644 index 24967679cd..0000000000 --- a/api/saml/wayfless.py +++ /dev/null @@ -1,130 +0,0 @@ -import logging -import urllib -from typing import Optional - -import sqlalchemy -from sqlalchemy.orm import Session - -from api.circulation import CirculationFulfillmentPostProcessor, FulfillmentInfo -from api.saml.credential import SAMLCredentialManager -from core.exceptions import BaseError -from core.model import Collection, get_one -from core.model.configuration import ExternalIntegration, HasExternalIntegration -from core.saml.wayfless import SAMLWAYFlessConstants - - -class SAMLWAYFlessFulfillmentError(BaseError): - pass - - -class SAMLWAYFlessAcquisitionLinkProcessor( - CirculationFulfillmentPostProcessor, HasExternalIntegration -): - """Interface indicating that the collection implementing it has templated links. - - Example of templated links may be a WAYFless acquisition link. - A WAYFless URL, is specific to an institution with associated users and to a web-based service or resource. - It enables a user from an institution to gain federated SAML access to the service or resource in a way - that bypasses the "Where Are You From?" (WAYF) page or Discovery Service step in - SAML based authentication and access protocols. - """ - - _wayfless_url_template: Optional[str] - - def __init__(self, collection: Collection) -> None: - """Initialize a new instance of WAYFlessAcquisitionLinkProcessor class. - - :param collection: Circulation collection - """ - if not isinstance(collection, Collection): - raise ValueError( - f"Argument 'collection' must be an instance {Collection} class" - ) - if not collection.external_integration_id: - raise ValueError( - f"Collection {collection} does not have an external integration" - ) - - external: ExternalIntegration = collection.external_integration - self._wayfless_url_template: Optional[ - str - ] = collection.integration_configuration.settings_dict.get( - SAMLWAYFlessConstants.WAYFLESS_URL_TEMPLATE_KEY - ) - - self._external_integration_id: Optional[ - int - ] = collection.external_integration_id - self._logger: logging.Logger = logging.getLogger(__name__) - self._saml_credential_manager: SAMLCredentialManager = SAMLCredentialManager() - - def external_integration( - self, db: sqlalchemy.orm.session.Session - ) -> ExternalIntegration: - """Return an ExternalIntegration object associated with the collection with a WAYFless url. - - :param db: SQLAlchemy session - :return: ExternalIntegration object - """ - ext = get_one(db, ExternalIntegration, id=self._external_integration_id) - if ext is None: - raise ValueError( - f"External Integration not found: {self._external_integration_id}" - ) - return ext - - def fulfill( - self, patron, pin, licensepool, delivery_mechanism, fulfillment: FulfillmentInfo - ) -> FulfillmentInfo: - self._logger.debug( - f"WAYFless acquisition link template: {self._wayfless_url_template}" - ) - - if self._wayfless_url_template: - db = Session.object_session(patron) - saml_credential = self._saml_credential_manager.lookup_saml_token_by_patron( - db, patron - ) - - self._logger.debug(f"SAML credentials: {saml_credential}") - - if not saml_credential: - raise SAMLWAYFlessFulfillmentError( - f"There are no existing SAML credentials for patron {patron}" - ) - - saml_subject = self._saml_credential_manager.extract_saml_token( - saml_credential - ) - - self._logger.debug(f"SAML subject: {saml_subject}") - - if not saml_subject.idp: - raise SAMLWAYFlessFulfillmentError( - f"SAML subject {saml_subject} does not contain an IdP's entityID" - ) - - acquisition_link = self._wayfless_url_template.replace( - SAMLWAYFlessConstants.IDP_PLACEHOLDER, - urllib.parse.quote(saml_subject.idp, safe=""), - ) - if fulfillment.content_link is None: - self._logger.warning( - f"Fulfillment {fulfillment} has no content link, unable to transform it" - ) - content_link = "" - else: - content_link = fulfillment.content_link - - acquisition_link = acquisition_link.replace( - SAMLWAYFlessConstants.ACQUISITION_LINK_PLACEHOLDER, - urllib.parse.quote(content_link, safe=""), - ) - - self._logger.debug( - f"Old acquisition link {fulfillment.content_link} has been transformed to {acquisition_link}" - ) - - fulfillment.content_link = acquisition_link - - return fulfillment diff --git a/core/opds2_import.py b/core/opds2_import.py index 251860c3d9..3b2947651b 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -20,6 +20,7 @@ import webpub_manifest_parser.opds2.ast as opds2_ast from flask_babel import lazy_gettext as _ from sqlalchemy.orm import Session +from uritemplate import URITemplate from webpub_manifest_parser.core import ManifestParserFactory, ManifestParserResult from webpub_manifest_parser.core.analyzer import NodeFinder from webpub_manifest_parser.core.ast import Link, Manifestlike @@ -30,6 +31,8 @@ ) from webpub_manifest_parser.utils import encode, first_or_default +from api.circulation import FulfillmentInfo +from api.circulation_exceptions import CannotFulfill from core.coverage import CoverageFailure from core.integration.settings import ( ConfigurationFormItem, @@ -48,14 +51,17 @@ from core.model import ( Collection, Contributor, + DataSource, DeliveryMechanism, Edition, ExternalIntegration, Hyperlink, Identifier, LicensePool, + LicensePoolDeliveryMechanism, LinkRelations, MediaTypes, + Patron, Representation, RightsStatus, Subject, @@ -69,7 +75,7 @@ OPDSImporterSettings, OPDSImportMonitor, ) -from core.util.http import BadResponseException +from core.util.http import HTTP, BadResponseException from core.util.opds_writer import OPDSFeed if TYPE_CHECKING: @@ -186,6 +192,84 @@ def label(cls) -> str: def description(cls) -> str: return "Import books from a publicly-accessible OPDS 2.0 feed." + def __init__(self, _db: Session, collection: Collection): + super().__init__(_db, collection) + # TODO: This needs to be refactored to use IntegrationConfiguration, + # but it has been temporarily rolled back, since the IntegrationConfiguration + # code caused problems fulfilling TOKEN_AUTH books in production. + # This should be fixed as part of the work PP-313 to fully remove + # ExternalIntegrations from our collections code. + token_auth_configuration = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, collection.external_integration + ) + self.token_auth_configuration = ( + token_auth_configuration.value if token_auth_configuration else None + ) + + @classmethod + def get_authentication_token( + cls, patron: Patron, datasource: DataSource, token_auth_url: str + ) -> str: + """Get the authentication token for a patron""" + log = cls.logger() + + patron_id = patron.identifier_to_remote_service(datasource) + url = URITemplate(token_auth_url).expand(patron_id=patron_id) + response = HTTP.get_with_timeout(url) + if response.status_code != 200: + log.error( + f"Could not authenticate the patron (authorization identifier: '{patron.authorization_identifier}' " + f"external identifier: '{patron_id}'): {str(response.content)}" + ) + raise CannotFulfill() + + # The response should be the JWT token, not wrapped in any format like JSON + token = response.text + if not token: + log.error( + f"Could not authenticate the patron({patron_id}): {str(response.content)}" + ) + raise CannotFulfill() + + return token + + def fulfill_token_auth( + self, patron: Patron, licensepool: LicensePool, fulfillment: FulfillmentInfo + ) -> FulfillmentInfo: + if not fulfillment.content_link: + self.log.warning( + "No content link found in fulfillment, unable to fulfill via OPDS2 token auth." + ) + return fulfillment + + templated = URITemplate(fulfillment.content_link) + if "authentication_token" not in templated.variable_names: + self.log.warning( + "No authentication_token variable found in content_link, unable to fulfill via OPDS2 token auth." + ) + return fulfillment + + token = self.get_authentication_token( + patron, licensepool.data_source, self.token_auth_configuration + ) + fulfillment.content_link = templated.expand(authentication_token=token) + fulfillment.content_link_redirect = True + return fulfillment + + def fulfill( + self, + patron: Patron, + pin: str, + licensepool: LicensePool, + delivery_mechanism: LicensePoolDeliveryMechanism, + ) -> FulfillmentInfo: + fufillment_info = super().fulfill(patron, pin, licensepool, delivery_mechanism) + if self.token_auth_configuration: + fufillment_info = self.fulfill_token_auth( + patron, licensepool, fufillment_info + ) + return fufillment_info + class OPDS2Importer(BaseOPDSImporter[OPDS2ImporterSettings]): """Imports editions and license pools from an OPDS 2.0 feed.""" diff --git a/core/opds_import.py b/core/opds_import.py index 2cab8fbf96..49dd713fe5 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -2,6 +2,7 @@ import logging import traceback +import urllib from abc import ABC, abstractmethod from collections import defaultdict from datetime import datetime @@ -36,6 +37,7 @@ from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold +from api.saml.credential import SAMLCredentialManager from api.selftest import HasCollectionSelfTests from core.classifier import Classifier from core.config import IntegrationException @@ -79,7 +81,11 @@ from core.model.configuration import HasExternalIntegration from core.model.formats import FormatPrioritiesSettings from core.monitor import CollectionMonitor -from core.saml.wayfless import SAMLWAYFlessSetttings +from core.saml.wayfless import ( + SAMLWAYFlessConstants, + SAMLWAYFlessFulfillmentError, + SAMLWAYFlessSetttings, +) from core.selftest import SelfTestResult from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException @@ -202,6 +208,11 @@ class OPDSImporterLibrarySettings(BaseSettings): class BaseOPDSAPI( BaseCirculationAPI[OPDSImporterSettings, OPDSImporterLibrarySettings], ABC ): + def __init__(self, _db: Session, collection: Collection): + super().__init__(_db, collection) + self.saml_wayfless_url_template = self.settings.saml_wayfless_url_template + self.saml_credential_manager = SAMLCredentialManager() + def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: # All the CM side accounting for this loan is handled by CirculationAPI # since we don't have any remote API we need to call this method is @@ -229,6 +240,56 @@ def update_availability(self, licensepool: LicensePool) -> None: # to know, so we don't need to do anything. pass + def fulfill_saml_wayfless( + self, template: str, patron: Patron, fulfillment: FulfillmentInfo + ) -> FulfillmentInfo: + self.log.debug(f"WAYFless acquisition link template: {template}") + + db = Session.object_session(patron) + saml_credential = self.saml_credential_manager.lookup_saml_token_by_patron( + db, patron + ) + + self.log.debug(f"SAML credentials: {saml_credential}") + + if not saml_credential: + raise SAMLWAYFlessFulfillmentError( + f"There are no existing SAML credentials for patron {patron}" + ) + + saml_subject = self.saml_credential_manager.extract_saml_token(saml_credential) + + self.log.debug(f"SAML subject: {saml_subject}") + + if not saml_subject.idp: + raise SAMLWAYFlessFulfillmentError( + f"SAML subject {saml_subject} does not contain an IdP's entityID" + ) + + acquisition_link = template.replace( + SAMLWAYFlessConstants.IDP_PLACEHOLDER, + urllib.parse.quote(saml_subject.idp, safe=""), + ) + if fulfillment.content_link is None: + self.log.warning( + f"Fulfillment {fulfillment} has no content link, unable to transform it" + ) + content_link = "" + else: + content_link = fulfillment.content_link + + acquisition_link = acquisition_link.replace( + SAMLWAYFlessConstants.ACQUISITION_LINK_PLACEHOLDER, + urllib.parse.quote(content_link, safe=""), + ) + + self.log.debug( + f"Old acquisition link {fulfillment.content_link} has been transformed to {acquisition_link}" + ) + + fulfillment.content_link = acquisition_link + return fulfillment + def fulfill( self, patron: Patron, @@ -262,7 +323,7 @@ def fulfill( content_link = rep.public_url media_type = rep.media_type - return FulfillmentInfo( + fulfillment_info = FulfillmentInfo( licensepool.collection, licensepool.data_source.name, identifier_type=licensepool.identifier.type, @@ -273,6 +334,13 @@ def fulfill( content_expires=None, ) + if self.saml_wayfless_url_template: + fulfillment_info = self.fulfill_saml_wayfless( + self.saml_wayfless_url_template, patron, fulfillment_info + ) + + return fulfillment_info + def checkout( self, patron: Patron, diff --git a/core/saml/wayfless.py b/core/saml/wayfless.py index 1396f4d946..5a6e655dd3 100644 --- a/core/saml/wayfless.py +++ b/core/saml/wayfless.py @@ -2,6 +2,7 @@ from flask_babel import lazy_gettext as _ +from core.exceptions import BaseError from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -49,3 +50,7 @@ class SAMLWAYFlessSetttings(BaseSettings): required=False, ), ) + + +class SAMLWAYFlessFulfillmentError(BaseError): + pass diff --git a/tests/api/saml/test_controller.py b/tests/api/saml/test_controller.py index f74207c3b4..639457cdc0 100644 --- a/tests/api/saml/test_controller.py +++ b/tests/api/saml/test_controller.py @@ -22,13 +22,11 @@ SAMLUIInfo, ) from api.saml.provider import SAML_INVALID_SUBJECT, SAMLWebSSOAuthenticationProvider -from api.saml.wayfless import SAMLWAYFlessAcquisitionLinkProcessor from core.model import Credential, Library from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail from tests.api.saml import saml_strings from tests.fixtures.api_controller import ControllerFixture -from tests.fixtures.database import DatabaseTransactionFixture SERVICE_PROVIDER = SAMLServiceProviderMetadata( saml_strings.SP_ENTITY_ID, @@ -467,14 +465,3 @@ def test_saml_authentication_callback( controller_fixture.db.session, finish_authentication_result, ) - - -class TestSAMLWAYFlessAcquisitionLinkProcessor: - def test_fulfill_no_wayless_template_url(self, db: DatabaseTransactionFixture): - processor = SAMLWAYFlessAcquisitionLinkProcessor(db.default_collection()) - assert processor._wayfless_url_template == None - - mocked_fulfillment = MagicMock() - # As long as there is no template url, no actions should take place - response = processor.fulfill(None, None, None, None, mocked_fulfillment) - assert response == mocked_fulfillment diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index 216b92370c..2150cefa64 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -1,6 +1,7 @@ """Test the CirculationAPI.""" import datetime from datetime import timedelta +from typing import cast from unittest.mock import MagicMock import flask @@ -18,6 +19,7 @@ LoanInfo, ) from api.circulation_exceptions import * +from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry @@ -1086,7 +1088,7 @@ def collect_event(self, library, licensepool, name, neighborhood): lp1 = circulation_api.db.licensepool(edition=None) lp2 = circulation_api.db.licensepool(edition=None) - api = CirculationAPI(circulation_api.db.session, l1, analytics) # type: ignore[arg-type] + api = CirculationAPI(circulation_api.db.session, l1, cast(Analytics, analytics)) def assert_event(inp, outp): # Assert that passing `inp` into the mock _collect_event diff --git a/tests/api/test_opds2.py b/tests/api/test_opds2.py deleted file mode 100644 index 74cd8868ee..0000000000 --- a/tests/api/test_opds2.py +++ /dev/null @@ -1,195 +0,0 @@ -import io -from unittest.mock import MagicMock, patch - -import pytest -from requests import Response -from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory -from werkzeug import Response as wkResponse - -from api.circulation import FulfillmentInfo -from api.circulation_exceptions import CannotFulfill -from api.controller import CirculationManager -from api.opds2 import TokenAuthenticationFulfillmentProcessor -from core.model.collection import Collection -from core.model.configuration import ConfigurationSetting, ExternalIntegration -from core.model.datasource import DataSource -from core.model.patron import Loan -from core.opds2_import import OPDS2Importer, RWPMManifestParser -from core.problem_details import INVALID_CREDENTIALS -from tests.fixtures.api_controller import ControllerFixture -from tests.fixtures.database import DatabaseTransactionFixture -from tests.fixtures.opds2_files import OPDS2FilesFixture - - -class TestTokenAuthenticationFulfillmentProcessor: - @patch("api.opds2.HTTP") - def test_fulfill(self, mock_http, db: DatabaseTransactionFixture): - patron = db.patron() - collection: Collection = db.collection( - protocol=ExternalIntegration.OPDS2_IMPORT - ) - work = db.work(with_license_pool=True, collection=collection) - integration: ExternalIntegration = collection.create_external_integration( - ExternalIntegration.OPDS2_IMPORT - ) - setting: ConfigurationSetting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, integration - ) - setting.value = "http://example.org/token?userName={patron_id}" - - ff_info = FulfillmentInfo( - collection, - "datasource", - "proquest", - "11234", - "http://example.org/11234/fulfill?authToken={authentication_token}", - None, - None, - None, - ) - - resp = Response() - resp.status_code = 200 - resp.raw = io.BytesIO(b"plaintext-auth-token") - mock_http.get_with_timeout.return_value = resp - - processor = TokenAuthenticationFulfillmentProcessor(collection) - ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) - - patron_id = patron.identifier_to_remote_service( - work.license_pools[0].data_source - ) - - assert mock_http.get_with_timeout.call_count == 1 - assert ( - mock_http.get_with_timeout.call_args[0][0] - == f"http://example.org/token?userName={patron_id}" - ) - - assert ( - ff_info.content_link - == "http://example.org/11234/fulfill?authToken=plaintext-auth-token" - ) - assert ff_info.content_link_redirect == True - - # Alternative templating - ff_info.content_link = "http://example.org/11234/fulfill{?authentication_token}" - ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) - - assert ( - ff_info.content_link - == "http://example.org/11234/fulfill?authentication_token=plaintext-auth-token" - ) - - ## Test error case - # Reset the content link - ff_info.content_link = ( - "http://example.org/11234/fulfill?authToken={authentication_token}" - ) - # non-200 response - resp = Response() - resp.status_code = 400 - mock_http.reset_mock() - mock_http.get_with_timeout.return_value = resp - with pytest.raises(CannotFulfill): - processor.fulfill(patron, "", work.license_pools[0], None, ff_info) - - ## Pass through cases - # No templating in the url - ff_info.content_link = ( - "http://example.org/11234/fulfill?authToken=authentication_token" - ) - ff_info.content_link_redirect = False - ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) - assert ff_info.content_link_redirect == False - - # No token endpoint config - ff_info.content_link = ( - "http://example.org/11234/fulfill?authToken={authentication_token}" - ) - setting.value = None - ff_info = processor.fulfill(patron, "", work.license_pools[0], None, ff_info) - assert ff_info.content_link_redirect == False - - @patch("api.opds2.HTTP") - def test_get_authentication_token(self, mock_http, db: DatabaseTransactionFixture): - resp = Response() - resp.status_code = 200 - resp.raw = io.BytesIO(b"plaintext-auth-token") - mock_http.get_with_timeout.return_value = resp - patron = db.patron() - datasource = DataSource.lookup(db.session, "test", autocreate=True) - token = TokenAuthenticationFulfillmentProcessor.get_authentication_token( - patron, datasource, "http://example.org/token" - ) - - assert token == "plaintext-auth-token" - assert mock_http.get_with_timeout.call_count == 1 - - @patch("api.opds2.HTTP") - def test_get_authentication_token_errors( - self, mock_http, db: DatabaseTransactionFixture - ): - resp = Response() - resp.status_code = 400 - mock_http.get_with_timeout.return_value = resp - datasource = DataSource.lookup(db.session, "test", autocreate=True) - token = TokenAuthenticationFulfillmentProcessor.get_authentication_token( - db.patron(), datasource, "http://example.org/token" - ) - - assert token == INVALID_CREDENTIALS - - -class TestOPDS2WithTokens: - def test_opds2_with_authentication_tokens( - self, - controller_fixture: ControllerFixture, - opds2_files_fixture: OPDS2FilesFixture, - ): - """Test the end to end workflow from importing the feed to a fulfill""" - collection = controller_fixture.db.collection( - protocol=ExternalIntegration.OPDS2_IMPORT, - data_source_name=DataSource.PROQUEST, - ) - controller_fixture.db.default_library().collections.append(collection) - # Import the test feed first - importer: OPDS2Importer = OPDS2Importer( - controller_fixture.db.session, - collection, - RWPMManifestParser(OPDS2FeedParserFactory()), - ) - with opds2_files_fixture.sample_fd("auth_token_feed.json") as fp: - editions, pools, works, failures = importer.import_from_feed(fp.read()) - - work = works[0] - identifier = work.presentation_edition.primary_identifier - - manager = CirculationManager(controller_fixture.db.session, MagicMock()) - patron = controller_fixture.db.patron() - - # Borrow the book from the library - with controller_fixture.request_context_with_library("/") as ctx: - ctx.request.patron = patron - manager.loans.borrow(identifier.type, identifier.identifier) - - loans = controller_fixture.db.session.query(Loan).filter(Loan.patron == patron) - assert loans.count() == 1 - - loan = loans.first() - assert isinstance(loan, Loan) - mechanism_id = loan.license_pool.delivery_mechanisms[0].delivery_mechanism.id - manager.loans.authenticated_patron_from_request = lambda: patron - - # Fulfill (Download) the book, should redirect to an authenticated URL - with controller_fixture.request_context_with_library("/") as ctx, patch.object( - TokenAuthenticationFulfillmentProcessor, "get_authentication_token" - ) as mock_auth: - ctx.request.patron = patron - mock_auth.return_value = "plaintext-token" - assert isinstance(loan.license_pool.id, int) - response = manager.loans.fulfill(loan.license_pool.id, mechanism_id) - - assert isinstance(response, wkResponse) - assert response.status_code == 302 - assert "authToken=plaintext-token" in response.location diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index 185afaed78..567e57e310 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -1,9 +1,14 @@ import datetime -from typing import List, Union +from typing import Generator, List, Union +from unittest.mock import MagicMock, patch import pytest +from _pytest.logging import LogCaptureFixture +from requests import Response from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory +from api.circulation import CirculationAPI, FulfillmentInfo +from api.circulation_exceptions import CannotFulfill from core.model import ( ConfigurationSetting, Contribution, @@ -13,13 +18,16 @@ Edition, EditionConstants, ExternalIntegration, + Library, LicensePool, + LicensePoolDeliveryMechanism, + Loan, MediaTypes, Work, ) from core.model.collection import Collection from core.model.constants import IdentifierType -from core.opds2_import import OPDS2Importer, RWPMManifestParser +from core.opds2_import import OPDS2API, OPDS2Importer, RWPMManifestParser from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.opds2_files import OPDS2FilesFixture @@ -79,6 +87,7 @@ class TestOPDS2ImporterFixture: collection: Collection data_source: DataSource importer: OPDS2Importer + library: Library @pytest.fixture @@ -87,7 +96,9 @@ def opds2_importer_fixture( ) -> TestOPDS2ImporterFixture: data = TestOPDS2ImporterFixture() data.transaction = db - data.collection = db.default_collection() + data.collection = db.collection(protocol=OPDS2API.label()) + data.library = db.default_library() + data.library.collections.append(data.collection) data.data_source = DataSource.lookup( db.session, "OPDS 2.0 Data Source", autocreate=True ) @@ -447,3 +458,203 @@ def test_auth_token_feed( # Did the token endpoint get stored correctly? assert setting.value == "http://example.org/auth?userName={patron_id}" + + +class Opds2ApiFixture: + def __init__(self, db: DatabaseTransactionFixture, mock_http: MagicMock): + self.patron = db.patron() + self.collection: Collection = db.collection( + protocol=ExternalIntegration.OPDS2_IMPORT + ) + self.integration = self.collection.create_external_integration( + ExternalIntegration.OPDS2_IMPORT + ) + self.setting = ConfigurationSetting.for_externalintegration( + ExternalIntegration.TOKEN_AUTH, self.integration + ) + self.setting.value = "http://example.org/token?userName={patron_id}" + + self.mock_response = MagicMock(spec=Response) + self.mock_response.status_code = 200 + self.mock_response.text = "plaintext-auth-token" + + self.mock_http = mock_http + self.mock_http.get_with_timeout.return_value = self.mock_response + + self.data_source = DataSource.lookup(db.session, "test", autocreate=True) + + self.pool = MagicMock(spec=LicensePool) + self.mechanism = MagicMock(spec=LicensePoolDeliveryMechanism) + self.pool.delivery_mechanisms = [self.mechanism] + self.pool.data_source = self.data_source + self.mechanism.resource.representation.public_url = ( + "http://example.org/11234/fulfill?authToken={authentication_token}" + ) + + self.api = OPDS2API(db.session, self.collection) + + def fulfill(self) -> FulfillmentInfo: + return self.api.fulfill(self.patron, "", self.pool, self.mechanism) + + +@pytest.fixture +def opds2_api_fixture( + db: DatabaseTransactionFixture, +) -> Generator[Opds2ApiFixture, None, None]: + with patch("core.opds2_import.HTTP") as mock_http: + fixture = Opds2ApiFixture(db, mock_http) + yield fixture + + +class TestOpds2Api: + def test_opds2_with_authentication_tokens( + self, + db: DatabaseTransactionFixture, + opds2_importer_fixture: TestOPDS2ImporterFixture, + opds2_files_fixture: OPDS2FilesFixture, + ): + """Test the end to end workflow from importing the feed to a fulfill""" + content = opds2_files_fixture.sample_text("auth_token_feed.json") + ( + imported_editions, + pools, + works, + failures, + ) = opds2_importer_fixture.importer.import_from_feed(content) + + work = works[0] + + api = CirculationAPI(db.session, db.default_library()) + patron = db.patron() + + # Borrow the book from the library + api.borrow(patron, "pin", work.license_pools[0], MagicMock(), None) + + loans = db.session.query(Loan).filter(Loan.patron == patron) + assert loans.count() == 1 + + loan = loans.first() + assert isinstance(loan, Loan) + + epub_mechanism = None + for mechanism in loan.license_pool.delivery_mechanisms: + if mechanism.delivery_mechanism.content_type == "application/epub+zip": + epub_mechanism = mechanism + break + + assert epub_mechanism is not None + + # Fulfill (Download) the book, should redirect to an authenticated URL + with patch.object(OPDS2API, "get_authentication_token") as mock_auth: + mock_auth.return_value = "plaintext-token" + fulfillment = api.fulfill( + patron, "pin", work.license_pools[0], epub_mechanism + ) + + assert ( + fulfillment.content_link + == "http://example.org//getDrmFreeFile.action?documentId=1543720&mediaType=epub&authToken=plaintext-token" + ) + assert fulfillment.content_type == "application/epub+zip" + assert fulfillment.content is None + assert fulfillment.content_expires is None + assert fulfillment.content_link_redirect is True + + def test_token_fulfill(self, opds2_api_fixture: Opds2ApiFixture): + ff_info = opds2_api_fixture.fulfill() + + patron_id = opds2_api_fixture.patron.identifier_to_remote_service( + opds2_api_fixture.data_source + ) + + assert opds2_api_fixture.mock_http.get_with_timeout.call_count == 1 + assert ( + opds2_api_fixture.mock_http.get_with_timeout.call_args[0][0] + == f"http://example.org/token?userName={patron_id}" + ) + + assert ( + ff_info.content_link + == "http://example.org/11234/fulfill?authToken=plaintext-auth-token" + ) + assert ff_info.content_link_redirect is True + + def test_token_fulfill_alternate_template(self, opds2_api_fixture: Opds2ApiFixture): + # Alternative templating + opds2_api_fixture.mechanism.resource.representation.public_url = ( + "http://example.org/11234/fulfill{?authentication_token}" + ) + ff_info = opds2_api_fixture.fulfill() + + assert ( + ff_info.content_link + == "http://example.org/11234/fulfill?authentication_token=plaintext-auth-token" + ) + + def test_token_fulfill_400_response(self, opds2_api_fixture: Opds2ApiFixture): + # non-200 response + opds2_api_fixture.mock_response.status_code = 400 + with pytest.raises(CannotFulfill): + opds2_api_fixture.fulfill() + + def test_token_fulfill_no_template(self, opds2_api_fixture: Opds2ApiFixture): + # No templating in the url + opds2_api_fixture.mechanism.resource.representation.public_url = ( + "http://example.org/11234/fulfill" + ) + ff_info = opds2_api_fixture.fulfill() + assert ff_info.content_link_redirect is False + assert ( + ff_info.content_link + == opds2_api_fixture.mechanism.resource.representation.public_url + ) + + def test_token_fulfill_no_content_link( + self, opds2_api_fixture: Opds2ApiFixture, caplog: LogCaptureFixture + ): + # No content_link on the fulfillment info coming into the function + mock = MagicMock(spec=FulfillmentInfo) + mock.content_link = None + response = opds2_api_fixture.api.fulfill_token_auth( + opds2_api_fixture.patron, opds2_api_fixture.pool, mock + ) + assert response is mock + assert ( + "No content link found in fulfillment, unable to fulfill via OPDS2 token auth" + in caplog.text + ) + + def test_token_fulfill_no_endpoint_config(self, opds2_api_fixture: Opds2ApiFixture): + # No token endpoint config + opds2_api_fixture.api.token_auth_configuration = None + mock = MagicMock() + opds2_api_fixture.api.fulfill_token_auth = mock + opds2_api_fixture.fulfill() + # we never call the token auth function + assert mock.call_count == 0 + + def test_get_authentication_token(self, opds2_api_fixture: Opds2ApiFixture): + token = OPDS2API.get_authentication_token( + opds2_api_fixture.patron, opds2_api_fixture.data_source, "" + ) + + assert token == "plaintext-auth-token" + assert opds2_api_fixture.mock_http.get_with_timeout.call_count == 1 + + def test_get_authentication_token_400_response( + self, opds2_api_fixture: Opds2ApiFixture + ): + opds2_api_fixture.mock_response.status_code = 400 + with pytest.raises(CannotFulfill): + OPDS2API.get_authentication_token( + opds2_api_fixture.patron, opds2_api_fixture.data_source, "" + ) + + def test_get_authentication_token_bad_response( + self, opds2_api_fixture: Opds2ApiFixture + ): + opds2_api_fixture.mock_response.text = None + with pytest.raises(CannotFulfill): + OPDS2API.get_authentication_token( + opds2_api_fixture.patron, opds2_api_fixture.data_source, "" + ) diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 2045207b41..e0451bc128 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -18,7 +18,6 @@ SAMLNameIDFormat, SAMLSubject, ) -from api.saml.wayfless import SAMLWAYFlessFulfillmentError from core.config import IntegrationException from core.coverage import CoverageFailure from core.metadata_layer import CirculationData, LinkData, Metadata @@ -43,6 +42,7 @@ WorkCoverageRecord, ) from core.opds_import import OPDSAPI, OPDSImporter, OPDSImportMonitor, OPDSXMLParser +from core.saml.wayfless import SAMLWAYFlessFulfillmentError from core.util import first_or_default from core.util.datetime_helpers import datetime_utc from core.util.http import BadResponseException From 813eeb12a9c2d19e98d620eb262d05aedd680d45 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 20 Oct 2023 09:35:36 -0300 Subject: [PATCH 129/262] =?UTF-8?q?Remove=20odilo=20integration=20?= =?UTF-8?q?=F0=9F=94=A5=20(PP-605)=20(#1473)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Remove odilo integration. * Remove crontab entry --- api/integration/registry/license_providers.py | 2 - api/odilo.py | 1225 ----------------- bin/odilo_monitor_recent | 13 - core/model/configuration.py | 3 - core/model/constants.py | 4 - core/model/datasource.py | 1 - docker/services/cron/cron.d/circulation | 4 - tests/api/conftest.py | 1 - tests/api/files/odilo/checkin_ok.json | 8 - .../files/odilo/checkout_acsm_epub_ok.json | 10 - .../api/files/odilo/checkout_acsm_pdf_ok.json | 10 - .../odilo/checkout_ebook_streaming_ok.json | 10 - .../files/odilo/error_checkout_not_found.json | 8 - .../api/files/odilo/error_data_not_found.json | 7 - .../odilo/error_hold_already_in_hold.json | 8 - .../api/files/odilo/error_hold_not_found.json | 8 - .../files/odilo/error_patron_not_found.json | 8 - .../api/files/odilo/fulfill_ok_acsm_epub.acsm | 29 - .../api/files/odilo/fulfill_ok_acsm_pdf.acsm | 29 - tests/api/files/odilo/odilo_availability.json | 11 - .../odilo/odilo_availability_inactive.json | 10 - tests/api/files/odilo/odilo_metadata.json | 36 - .../files/odilo/odilo_metadata_inactive.json | 35 - tests/api/files/odilo/patron_checkouts.json | 22 - tests/api/files/odilo/patron_holds.json | 92 -- tests/api/files/odilo/place_hold_ok.json | 9 - .../api/files/odilo/record_availability.json | 11 - tests/api/files/odilo/records_metadata.json | 289 ---- tests/api/files/odilo/release_hold_ok.json | 8 - tests/api/mockapi/odilo.py | 88 -- tests/api/test_monitor.py | 2 +- tests/api/test_odilo.py | 973 ------------- tests/fixtures/api_odilo_files.py | 16 - 33 files changed, 1 insertion(+), 2989 deletions(-) delete mode 100644 api/odilo.py delete mode 100755 bin/odilo_monitor_recent delete mode 100644 tests/api/files/odilo/checkin_ok.json delete mode 100644 tests/api/files/odilo/checkout_acsm_epub_ok.json delete mode 100644 tests/api/files/odilo/checkout_acsm_pdf_ok.json delete mode 100644 tests/api/files/odilo/checkout_ebook_streaming_ok.json delete mode 100644 tests/api/files/odilo/error_checkout_not_found.json delete mode 100644 tests/api/files/odilo/error_data_not_found.json delete mode 100644 tests/api/files/odilo/error_hold_already_in_hold.json delete mode 100644 tests/api/files/odilo/error_hold_not_found.json delete mode 100644 tests/api/files/odilo/error_patron_not_found.json delete mode 100644 tests/api/files/odilo/fulfill_ok_acsm_epub.acsm delete mode 100644 tests/api/files/odilo/fulfill_ok_acsm_pdf.acsm delete mode 100644 tests/api/files/odilo/odilo_availability.json delete mode 100644 tests/api/files/odilo/odilo_availability_inactive.json delete mode 100644 tests/api/files/odilo/odilo_metadata.json delete mode 100644 tests/api/files/odilo/odilo_metadata_inactive.json delete mode 100644 tests/api/files/odilo/patron_checkouts.json delete mode 100644 tests/api/files/odilo/patron_holds.json delete mode 100644 tests/api/files/odilo/place_hold_ok.json delete mode 100644 tests/api/files/odilo/record_availability.json delete mode 100644 tests/api/files/odilo/records_metadata.json delete mode 100644 tests/api/files/odilo/release_hold_ok.json delete mode 100644 tests/api/mockapi/odilo.py delete mode 100644 tests/api/test_odilo.py delete mode 100644 tests/fixtures/api_odilo_files.py diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index fe9ac4e7dc..134ec494e1 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -19,7 +19,6 @@ def __init__(self) -> None: from api.axis import Axis360API from api.bibliotheca import BibliothecaAPI from api.enki import EnkiAPI - from api.odilo import OdiloAPI from api.odl import ODLAPI from api.odl2 import ODL2API from api.opds_for_distributors import OPDSForDistributorsAPI @@ -28,7 +27,6 @@ def __init__(self) -> None: from core.opds_import import OPDSAPI self.register(OverdriveAPI, canonical=OverdriveAPI.label()) - self.register(OdiloAPI, canonical=OdiloAPI.label()) self.register(BibliothecaAPI, canonical=BibliothecaAPI.label()) self.register(Axis360API, canonical=Axis360API.label()) self.register(EnkiAPI, canonical=EnkiAPI.label()) diff --git a/api/odilo.py b/api/odilo.py deleted file mode 100644 index 4e2e985ad1..0000000000 --- a/api/odilo.py +++ /dev/null @@ -1,1225 +0,0 @@ -import base64 -import datetime -import json -import logging - -import isbnlib -from flask_babel import lazy_gettext as _ -from pydantic import HttpUrl -from sqlalchemy.orm.session import Session - -from api.circulation import ( - BaseCirculationAPI, - CirculationInternalFormatsMixin, - FulfillmentInfo, - HoldInfo, - LoanInfo, - PatronActivityCirculationAPI, -) -from api.circulation_exceptions import * -from api.selftest import HasCollectionSelfTests, SelfTestResult -from core.analytics import Analytics -from core.config import CannotLoadConfiguration -from core.coverage import BibliographicCoverageProvider -from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField -from core.metadata_layer import ( - CirculationData, - ContributorData, - FormatData, - IdentifierData, - LinkData, - Metadata, - ReplacementPolicy, - SubjectData, -) -from core.model import ( - Classification, - Collection, - Contributor, - Credential, - DataSource, - DeliveryMechanism, - Edition, - ExternalIntegration, - Hyperlink, - Identifier, - LicensePool, - LicensePoolDeliveryMechanism, - Patron, - Representation, - Subject, -) -from core.monitor import CollectionMonitor, TimelineMonitor -from core.util.datetime_helpers import from_timestamp, strptime_utc, utc_now -from core.util.http import HTTP, BadResponseException -from core.util.personal_names import sort_name_to_display_name - - -class OdiloRepresentationExtractor: - """Extract useful information from Odilo's JSON representations.""" - - log = logging.getLogger("OdiloRepresentationExtractor") - ACSM = "ACSM" - ACSM_EPUB = "ACSM_EPUB" - ACSM_PDF = "ACSM_PDF" - EBOOK_STREAMING = "EBOOK_STREAMING" - - format_data_for_odilo_format = { - ACSM_PDF: (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM), - ACSM_EPUB: (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM), - EBOOK_STREAMING: ( - Representation.TEXT_HTML_MEDIA_TYPE, - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE, - ), - "MP3": ( - Representation.MP3_MEDIA_TYPE, - DeliveryMechanism.STREAMING_AUDIO_CONTENT_TYPE, - ), - "MP4": ( - Representation.MP4_MEDIA_TYPE, - DeliveryMechanism.STREAMING_VIDEO_CONTENT_TYPE, - ), - "WMV": ( - Representation.WMV_MEDIA_TYPE, - DeliveryMechanism.STREAMING_VIDEO_CONTENT_TYPE, - ), - "JPG": (Representation.JPEG_MEDIA_TYPE, DeliveryMechanism.NO_DRM), - "SCORM": (Representation.SCORM_MEDIA_TYPE, DeliveryMechanism.NO_DRM), - } - - odilo_medium_to_simplified_medium = { - ACSM_PDF: Edition.BOOK_MEDIUM, - ACSM_EPUB: Edition.BOOK_MEDIUM, - EBOOK_STREAMING: Edition.BOOK_MEDIUM, - "MP3": Edition.AUDIO_MEDIUM, - "MP4": Edition.VIDEO_MEDIUM, - "WMV": Edition.VIDEO_MEDIUM, - "JPG": Edition.IMAGE_MEDIUM, - "SCORM": Edition.COURSEWARE_MEDIUM, - } - - @classmethod - def record_info_to_circulation(cls, availability): - """Note: The json data passed into this method is from a different file/stream - from the json data that goes into the record_info_to_metadata() method. - """ - - if "recordId" not in availability: - return None - - record_id = availability["recordId"] - primary_identifier = IdentifierData( - Identifier.ODILO_ID, record_id - ) # We own this availability. - - licenses_owned = int(availability["totalCopies"]) - licenses_available = int(availability["availableCopies"]) - - # 'licenses_reserved' is the number of patrons who put the book on hold earlier, - # but who are now at the front of the queue and who could get the book right now if they wanted to. - if "notifiedHolds" in availability: - licenses_reserved = int(availability["notifiedHolds"]) - else: - licenses_reserved = 0 - - # 'patrons_in_hold_queue' contains the number of patrons who are currently waiting for a copy of the book. - if "holdsQueueSize" in availability: - patrons_in_hold_queue = int(availability["holdsQueueSize"]) - else: - patrons_in_hold_queue = 0 - - return CirculationData( - data_source=DataSource.ODILO, - primary_identifier=primary_identifier, - licenses_owned=licenses_owned, - licenses_available=licenses_available, - licenses_reserved=licenses_reserved, - patrons_in_hold_queue=patrons_in_hold_queue, - ) - - @classmethod - def image_link_to_linkdata(cls, link, rel): - if not link: - return None - - return LinkData(rel=rel, href=link, media_type=Representation.JPEG_MEDIA_TYPE) - - @classmethod - def record_info_to_metadata(cls, book, availability): - """Turn Odilo's JSON representation of a book into a Metadata - object. - - Note: The json data passed into this method is from a different file/stream - from the json data that goes into the book_info_to_circulation() method. - """ - if "id" not in book: - return None - - odilo_id = book["id"] - primary_identifier = IdentifierData(Identifier.ODILO_ID, odilo_id) - active = book.get("active") - - title = book.get("title") - subtitle = book.get("subtitle") - series = book.get("series").strip() or None - series_position = book.get("seriesPosition").strip() or None - - contributors = [] - sort_author = book.get("author") - if sort_author: - roles = [Contributor.AUTHOR_ROLE] - display_author = sort_name_to_display_name(sort_author) - contributor = ContributorData( - sort_name=sort_author, - display_name=display_author, - roles=roles, - biography=None, - ) - contributors.append(contributor) - - publisher = book.get("publisher") - - # Metadata --> Marc21 260$c - published = book.get("publicationDate") - if not published: - # yyyyMMdd --> record creation date - published = book.get("releaseDate") - - if published: - try: - published = strptime_utc(published, "%Y%m%d") - except ValueError as e: - cls.log.warn( - "Cannot parse publication date from: " - + published - + ", message: " - + str(e) - ) - - # yyyyMMdd --> record last modification date - last_update = book.get("modificationDate") - if last_update: - try: - last_update = strptime_utc(last_update, "%Y%m%d") - except ValueError as e: - cls.log.warn( - "Cannot parse last update date from: " - + last_update - + ", message: " - + str(e) - ) - - language = book.get("language", "spa") - - subjects = [] - trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - for subject in book.get("subjects", []): - subjects.append( - SubjectData(type=Subject.TAG, identifier=subject, weight=trusted_weight) - ) - - for subjectBisacCode in book.get("subjectsBisacCodes", []): - subjects.append( - SubjectData( - type=Subject.BISAC, - identifier=subjectBisacCode, - weight=trusted_weight, - ) - ) - - grade_level = book.get("gradeLevel") - if grade_level: - subject = SubjectData( - type=Subject.GRADE_LEVEL, identifier=grade_level, weight=trusted_weight - ) - subjects.append(subject) - - medium = None - file_format = book.get("fileFormat") - formats = [] - for format_received in book.get("formats", []): - if format_received in cls.format_data_for_odilo_format: - medium = cls.set_format(format_received, formats) - elif format_received == cls.ACSM and file_format: - medium = cls.set_format( - format_received + "_" + file_format.upper(), formats - ) - else: - cls.log.warn("Unrecognized format received: " + format_received) - - if not medium: - medium = Edition.BOOK_MEDIUM - - identifiers = [] - isbn = book.get("isbn") - if isbn: - if isbnlib.is_isbn10(isbn): - isbn = isbnlib.to_isbn13(isbn) - identifiers.append(IdentifierData(Identifier.ISBN, isbn, 1)) - - # A cover - links = [] - cover_image_url = book.get("coverImageUrl") - if cover_image_url: - image_data = cls.image_link_to_linkdata( - cover_image_url, Hyperlink.THUMBNAIL_IMAGE - ) - if image_data: - links.append(image_data) - - original_image_url = book.get("originalImageUrl") - if original_image_url: - image_data = cls.image_link_to_linkdata(original_image_url, Hyperlink.IMAGE) - if image_data: - links.append(image_data) - - # Descriptions become links. - description = book.get("description") - if description: - links.append( - LinkData( - rel=Hyperlink.DESCRIPTION, - content=description, - media_type="text/html", - ) - ) - - metadata = Metadata( - data_source=DataSource.ODILO, - title=title, - subtitle=subtitle, - language=language, - medium=medium, - series=series, - series_position=series_position, - publisher=publisher, - published=published, - primary_identifier=primary_identifier, - identifiers=identifiers, - subjects=subjects, - contributors=contributors, - links=links, - data_source_last_updated=last_update, - ) - - metadata.circulation = OdiloRepresentationExtractor.record_info_to_circulation( - availability - ) - # 'active' --> means that the book exists but it's no longer in the collection - # (it could be available again in the future) - if metadata.circulation: - if not active: - metadata.circulation.licenses_owned = 0 - metadata.circulation.formats = formats - - return metadata, active - - @classmethod - def set_format(cls, format_received, formats): - content_type, drm_scheme = cls.format_data_for_odilo_format.get(format_received) - formats.append(FormatData(content_type, drm_scheme)) - return cls.odilo_medium_to_simplified_medium.get(format_received) - - -class OdiloSettings(BaseSettings): - library_api_base_url: HttpUrl = FormField( - form=ConfigurationFormItem( - label=_("Library API base URL"), - description=_( - "This might look like https://[library].odilo.us/api/v2." - ), - required=True, - ) - ) - username: str = FormField( - form=ConfigurationFormItem( - label=_("Client Key"), - required=True, - ) - ) - password: str = FormField( - form=ConfigurationFormItem( - label=_("Client Secret"), - required=True, - ) - ) - - -class OdiloLibrarySettings(BaseSettings): - pass - - -class OdiloAPI( - PatronActivityCirculationAPI[OdiloSettings, OdiloLibrarySettings], - HasCollectionSelfTests, - CirculationInternalFormatsMixin, -): - LIBRARY_API_BASE_URL = "library_api_base_url" - - NAME = ExternalIntegration.ODILO - DESCRIPTION = _("Integrate an Odilo library collection.") - - # --- OAuth --- - TOKEN_ENDPOINT = "/token" - - # --- Discovery API --- - ALL_PRODUCTS_ENDPOINT = "/records" - RECORD_METADATA_ENDPOINT = "/records/{recordId}" - RECORD_AVAILABILITY_ENDPOINT = "/records/{recordId}/availability" - - # --- Circulation API --- - CHECKOUT_ENDPOINT = "/records/{recordId}/checkout" - CHECKIN_ENDPOINT = "/checkouts/{checkoutId}/return?patronId={patronId}" - - PLACE_HOLD_ENDPOINT = "/records/{recordId}/hold" - RELEASE_HOLD_ENDPOINT = "/holds/{holdId}/cancel" - - PATRON_CHECKOUTS_ENDPOINT = "/patrons/{patronId}/checkouts" - PATRON_HOLDS_ENDPOINT = "/patrons/{patronId}/holds" - - # --------------------------------------- - - PAGE_SIZE_LIMIT = 200 - - SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.BORROW_STEP - - # maps a 2-tuple (media_type, drm_mechanism) to the internal string used in Odilo API to describe that setup. - delivery_mechanism_to_internal_format = { - v: k - for k, v in list( - OdiloRepresentationExtractor.format_data_for_odilo_format.items() - ) - } - - error_to_exception = { - "TitleNotCheckedOut": NoActiveLoan, - "patronNotFound": PatronNotFoundOnRemote, - "ERROR_DATA_NOT_FOUND": NotFoundOnRemote, - "LOAN_ALREADY_RESERVED": AlreadyOnHold, - "CHECKOUT_NOT_FOUND": NotCheckedOut, - } - - @classmethod - def settings_class(cls): - return OdiloSettings - - @classmethod - def library_settings_class(cls): - return OdiloLibrarySettings - - @classmethod - def label(cls): - return cls.NAME - - @classmethod - def description(cls): - return cls.DESCRIPTION - - def __init__(self, _db, collection): - self.odilo_bibliographic_coverage_provider = OdiloBibliographicCoverageProvider( - collection, api_class=self - ) - if collection.protocol != ExternalIntegration.ODILO: - raise ValueError( - "Collection protocol is %s, but passed into OdiloAPI!" - % collection.protocol - ) - super().__init__(_db, collection) - - self._db = _db - self.analytics = Analytics(self._db) - - self.collection_id = collection.id - self.token = None - settings = self.settings - self.client_key = settings.username - self.client_secret = settings.password - self.library_api_base_url = settings.library_api_base_url - - if ( - not self.client_key - or not self.client_secret - or not self.library_api_base_url - ): - raise CannotLoadConfiguration("Odilo configuration is incomplete.") - - # Use utf8 instead of unicode encoding - settings_encoded = [ - self.client_key, - self.client_secret, - self.library_api_base_url, - ] - self.client_key, self.client_secret, self.library_api_base_url = ( - setting.encode("utf8") for setting in settings_encoded - ) - - # Get set up with up-to-date credentials from the API. - self.check_creds() - if not self.token: - raise CannotLoadConfiguration( - "Invalid credentials for %s, cannot intialize API %s" - % (self.client_key, self.library_api_base_url) - ) - - @property - def collection(self): - return Collection.by_id(self._db, id=self.collection_id) - - @property - def source(self): - return DataSource.lookup(self._db, DataSource.ODILO) - - def external_integration(self, _db): - return self.collection.external_integration - - def _run_self_tests(self, _db): - result = self.run_test( - "Obtaining a sitewide access token", self.check_creds, force_refresh=True - ) - yield result - if not result.success: - # We couldn't get a sitewide token, so there is no - # point in continuing. - return - - for result in self.default_patrons(self.collection): - if isinstance(result, SelfTestResult): - yield result - continue - - library, patron, pin = result - task = ( - "Viewing the active loans for the test patron for library %s" - % library.name - ) - yield self.run_test(task, self.get_patron_checkouts, patron, pin) - - def check_creds(self, force_refresh=False): - """If the Bearer Token has expired, update it.""" - if force_refresh: - refresh_on_lookup = lambda x: x - else: - refresh_on_lookup = self.refresh_creds - - credential = self.credential_object(refresh_on_lookup) - if force_refresh: - self.refresh_creds(credential) - self.token = credential.credential - - def credential_object(self, refresh): - """Look up the Credential object that allows us to use - the Odilo API. - """ - return Credential.lookup(self._db, DataSource.ODILO, None, None, refresh) - - def refresh_creds(self, credential): - """Fetch a new Bearer Token and update the given Credential object.""" - - response = self.token_post( - self.TOKEN_ENDPOINT, - dict(grant_type="client_credentials"), - allowed_response_codes=[200, 400], - ) - - # If you put in the wrong URL, this is where you'll run into - # problems, so it's useful to give a helpful error message if - # Odilo doesn't provide anything more specific. - generic_error = ( - "%s may not be the right base URL. Response document was: %r" - % (self.library_api_base_url, response.content.decode("utf-8")) - ) - generic_exception = BadResponseException(self.TOKEN_ENDPOINT, generic_error) - - try: - data = response.json() - except ValueError: - raise generic_exception - if response.status_code == 200: - self._update_credential(credential, data) - self.token = credential.credential - return - elif response.status_code == 400: - if data and "errors" in data and len(data["errors"]) > 0: - error = data["errors"][0] - if "description" in error: - message = error["description"] - else: - message = generic_error - raise BadResponseException(self.TOKEN_ENDPOINT, message) - raise generic_exception - - def patron_request( - self, - patron, - pin, - url, - extra_headers={}, - data=None, - exception_on_401=False, - method=None, - ): - """Make an HTTP request on behalf of a patron. - - The results are never cached. - """ - headers = dict(Authorization="Bearer %s" % self.token) - headers["Content-Type"] = "application/json" - headers.update(extra_headers) - - if method and method.lower() in ("get", "post", "put", "delete"): - method = method.lower() - else: - if data: - method = "post" - else: - method = "get" - - url = self._make_absolute_url(url) - response = HTTP.request_with_timeout( - method, url, headers=headers, data=data, timeout=60 - ) - - # TODO: If Odilo doesn't recognize the patron it will send - # 404 in this case. - if response.status_code == 401: - if exception_on_401: - # This is our second try. Give up. - raise Exception("Something's wrong with the patron OAuth Bearer Token!") - else: - # Refresh the token and try again. - self.check_creds(True) - return self.patron_request(patron, pin, url, extra_headers, data, True) - else: - return response - - def _make_absolute_url(self, url): - """Prepend the API base URL onto `url` unless it is already - an absolute HTTP URL. - """ - if not any(url.startswith(protocol) for protocol in ("http://", "https://")): - url = self.library_api_base_url.decode("utf-8") + url - return url - - def get(self, url, extra_headers={}, exception_on_401=False): - """Make an HTTP GET request using the active Bearer Token.""" - if extra_headers is None: - extra_headers = {} - headers = dict(Authorization="Bearer %s" % self.token) - headers.update(extra_headers) - status_code, headers, content = self._do_get( - self.library_api_base_url.decode("utf-8") + url, headers - ) - if status_code == 401: - if exception_on_401: - # This is our second try. Give up. - raise BadResponseException.from_response( - url, - "Something's wrong with the Odilo OAuth Bearer Token!", - (status_code, headers, content), - ) - else: - # Refresh the token and try again. - self.check_creds(True) - return self.get(url, extra_headers, True) - else: - return status_code, headers, content - - def token_post(self, url, payload, headers={}, **kwargs): - """Make an HTTP POST request for purposes of getting an OAuth token.""" - s = f"{self.client_key}:{self.client_secret}" - auth = base64.standard_b64encode(s).strip() - headers = dict(headers) - headers["Authorization"] = "Basic %s" % auth - headers["Content-Type"] = "application/x-www-form-urlencoded" - return self._do_post( - self.library_api_base_url + url, payload, headers, **kwargs - ) - - def checkout( - self, - patron: Patron, - pin: str, - licensepool: LicensePool, - delivery_mechanism: LicensePoolDeliveryMechanism, - ) -> LoanInfo: - """Check out a book on behalf of a patron. - - :param patron: a Patron object for the patron who wants - to check out the book. - - :param pin: The patron's alleged password. - - :param licensepool: Identifier of the book to be checked out is - attached to this licensepool. - - :param delivery_mechanism: Represents the patron's desired book format. - - :return: a LoanInfo object. - """ - record_id = licensepool.identifier.identifier - internal_format = self.internal_format(delivery_mechanism) - - # Data just as 'x-www-form-urlencoded', no JSON - - payload = dict( - patronId=patron.authorization_identifier, - format=self.internal_format, - ) - - response = self.patron_request( - patron, - pin, - self.CHECKOUT_ENDPOINT.format(recordId=record_id), - extra_headers={"Content-Type": "application/x-www-form-urlencoded"}, - data=payload, - ) - if response.content: - response_json = response.json() - if response.status_code == 404: - self.raise_exception_on_error( - response_json, default_exception_class=CannotLoan - ) - else: - return self.loan_info_from_odilo_checkout( - licensepool.collection, response_json - ) - - # TODO: we need to improve this at the API and use an error code - elif response.status_code == 400: - raise NoAcceptableFormat( - f"record_id: {record_id}, format: {internal_format}" - ) - - raise CannotLoan( - "patron: %s, record_id: %s, format: %s" - % (patron, record_id, internal_format) - ) - - def loan_info_from_odilo_checkout(self, collection, checkout): - start_date = self.extract_date(checkout, "startTime") - end_date = self.extract_date(checkout, "endTime") - - return LoanInfo( - collection, - DataSource.ODILO, - Identifier.ODILO_ID, - checkout["id"], - start_date, - end_date, - checkout["downloadUrl"], - ) - - def checkin(self, patron, pin, licensepool): - record_id = licensepool.identifier.identifier - loan = self.get_checkout(patron, pin, record_id) - url = self.CHECKIN_ENDPOINT.format( - checkoutId=loan["id"], patronId=patron.authorization_identifier - ) - - response = self.patron_request(patron, pin, url, method="POST") - if response.status_code == 200: - return response - - self.raise_exception_on_error( - response.json(), default_exception_class=CannotReturn - ) - - @classmethod - def extract_date(cls, data, field_name): - if field_name not in data or not data[field_name]: - d = None - else: - # OdiloAPI dates are timestamps in milliseconds - d = from_timestamp(float(data[field_name]) / 1000.0) - return d - - @classmethod - def raise_exception_on_error( - cls, data, default_exception_class=None, ignore_exception_codes=None - ): - if not data or "errors" not in data or len(data["errors"]) <= 0: - return "", "" - - error = data["errors"][0] - error_code = error["id"] - message = ("description" in error and error["description"]) or "" - - if not ignore_exception_codes or error_code not in ignore_exception_codes: - if error_code in cls.error_to_exception: - raise cls.error_to_exception[error_code](message) - elif default_exception_class: - raise default_exception_class(message) - - def get_checkout(self, patron, pin, record_id): - patron_checkouts = self.get_patron_checkouts(patron, pin) - for checkout in patron_checkouts: - if checkout["recordId"] == record_id: - return checkout - - raise NotFoundOnRemote( - "Could not find active loan for patron {}, record {}".format( - patron, record_id - ) - ) - - def get_hold(self, patron, pin, record_id): - patron_holds = self.get_patron_holds(patron, pin) - for hold in patron_holds: - if hold["recordId"] == record_id and hold["status"] in ( - "informed", - "waiting", - ): - return hold - - raise NotFoundOnRemote( - "Could not find active hold for patron {}, record {}".format( - patron, record_id - ) - ) - - def fulfill( - self, - patron: Patron, - pin: str, - licensepool: LicensePool, - delivery_mechanism: LicensePoolDeliveryMechanism, - ) -> FulfillmentInfo: - """Get the actual resource file to the patron.""" - internal_format = self.internal_format(delivery_mechanism) - record_id = licensepool.identifier.identifier - content_link, content, content_type = self.get_fulfillment_link( - patron, pin, record_id, internal_format - ) - - if not content_link and not content: - self.log.info( - "Odilo record_id %s was not available as %s" - % (record_id, internal_format) - ) - raise CannotFulfill() - - return FulfillmentInfo( - licensepool.collection, - DataSource.ODILO, - Identifier.ODILO_ID, - record_id, - content_link=content_link, - content=content, - content_type=content_type, - content_expires=None, - ) - - def get_fulfillment_link(self, patron, pin, record_id, format_type): - """Get the link corresponding to an existing checkout.""" - # Retrieve checkout with its download_ulr. It is necessary to generate a download token in our API - checkout = self.get_checkout(patron, pin, record_id) - loan_format = checkout["format"] - if ( - format_type - and loan_format - and ( - format_type == loan_format - or ( - loan_format == OdiloRepresentationExtractor.ACSM - and format_type - in ( - OdiloRepresentationExtractor.ACSM_EPUB, - OdiloRepresentationExtractor.ACSM_PDF, - ) - ) - ) - ): - if "downloadUrl" in checkout and checkout["downloadUrl"]: - content_link = checkout["downloadUrl"] - content = None - content_type = ( - OdiloRepresentationExtractor.format_data_for_odilo_format[ - format_type - ] - ) - - # Get also .acsm file - if format_type in ( - OdiloRepresentationExtractor.ACSM_EPUB, - OdiloRepresentationExtractor.ACSM_PDF, - ): - response = self.patron_request(patron, pin, content_link) - if response.status_code == 200: - content = response.content - elif response.status_code == 404 and response.content: - self.raise_exception_on_error(response.json(), CannotFulfill) - - return content_link, content, content_type - - raise CannotFulfill( - "Cannot obtain a download link for patron[%r], record_id[%s], format_type[%s].", - patron, - record_id, - format_type, - ) - - def get_patron_checkouts(self, patron, pin): - data = self.patron_request( - patron, - pin, - self.PATRON_CHECKOUTS_ENDPOINT.format( - patronId=patron.authorization_identifier - ), - ).json() - self.raise_exception_on_error(data) - return data - - def get_patron_holds(self, patron, pin): - data = self.patron_request( - patron, - pin, - self.PATRON_HOLDS_ENDPOINT.format(patronId=patron.authorization_identifier), - ).json() - self.raise_exception_on_error(data) - return data - - def patron_activity(self, patron, pin): - odilo_checkouts = self.get_patron_checkouts(patron, pin) - odilo_holds = self.get_patron_holds(patron, pin) - - loans_info = [] - holds_info = [] - - collection = self.collection - - for checkout in odilo_checkouts: - loan_info = self.loan_info_from_odilo_checkout(collection, checkout) - loans_info.append(loan_info) - - for hold in odilo_holds: - hold_info = self.hold_from_odilo_hold(collection, hold) - holds_info.append(hold_info) - - return loans_info + holds_info - - def hold_from_odilo_hold(self, collection, hold): - start = self.extract_date(hold, "startTime") - # end_date: The estimated date the title will be available for the patron to borrow. - end = self.extract_date(hold, "notifiedTime") - position = hold.get("holdQueuePosition") - - if position is not None: - position = int(position) - - # Patron already notified to borrow the title - if "informed" == hold["status"]: - position = 0 - - return HoldInfo( - collection, - DataSource.ODILO, - Identifier.ODILO_ID, - hold["id"], - start_date=start, - end_date=end, - hold_position=position, - ) - - def place_hold(self, patron, pin, licensepool, notification_email_address): - """Place a book on hold. - - :return: A HoldInfo object - """ - - record_id = licensepool.identifier.identifier - - # Data just as 'x-www-form-urlencoded', no JSON - payload = dict(patronId=patron.authorization_identifier) - - response = self.patron_request( - patron, - pin, - self.PLACE_HOLD_ENDPOINT.format(recordId=record_id), - extra_headers={"Content-Type": "application/x-www-form-urlencoded"}, - data=payload, - ) - - data = response.json() - if response.status_code == 200: - return self.hold_from_odilo_hold(licensepool.collection, data) - - self.raise_exception_on_error(data, CannotHold) - - def release_hold(self, patron, pin, licensepool): - """Release a patron's hold on a book.""" - - record_id = licensepool.identifier.identifier - hold = self.get_hold(patron, pin, record_id) - url = self.RELEASE_HOLD_ENDPOINT.format(holdId=hold["id"]) - payload = json.dumps(dict(patronId=patron.authorization_identifier)) - - response = self.patron_request( - patron, pin, url, extra_headers={}, data=payload, method="POST" - ) - if response.status_code == 200: - return True - - self.raise_exception_on_error( - response.json(), - default_exception_class=CannotReleaseHold, - ignore_exception_codes=["HOLD_NOT_FOUND"], - ) - return True - - @staticmethod - def _update_credential(credential, odilo_data): - """Copy Odilo OAuth data into a Credential object.""" - credential.credential = odilo_data["token"] - if odilo_data["expiresIn"] == -1: - # This token never expires. - credential.expires = None - else: - expires_in = odilo_data["expiresIn"] * 0.9 - credential.expires = utc_now() + datetime.timedelta(seconds=expires_in) - - def get_metadata(self, record_id): - identifier = record_id - if isinstance(record_id, Identifier): - identifier = record_id.identifier - - url = self.RECORD_METADATA_ENDPOINT.format(recordId=identifier) - - status_code, headers, content = self.get(url) - if status_code == 200 and content: - return content - else: - msg = ( - "Cannot retrieve metadata for record: " - + record_id - + " response http " - + status_code - ) - if content: - msg += " content: " + content - self.log.warn(msg) - return None - - def get_availability(self, record_id): - url = self.RECORD_AVAILABILITY_ENDPOINT.format(recordId=record_id) - status_code, headers, content = self.get(url) - content = json.loads(content) - - if status_code == 200 and len(content) > 0: - return content - else: - msg = ( - "Cannot retrieve availability for record: " - + record_id - + " response http " - + status_code - ) - if content: - msg += " content: " + content - self.log.warn(msg) - return None - - @staticmethod - def _do_get(url, headers, **kwargs): - # More time please - if "timeout" not in kwargs: - kwargs["timeout"] = 60 - - if "allow_redirects" not in kwargs: - kwargs["allow_redirects"] = True - - response = HTTP.get_with_timeout(url, headers=headers, **kwargs) - return response.status_code, response.headers, response.content - - @staticmethod - def _do_post(url, payload, headers, **kwargs): - # More time please - if "timeout" not in kwargs: - kwargs["timeout"] = 60 - - return HTTP.post_with_timeout(url, payload, headers=headers, **kwargs) - - def update_availability(self, licensepool): - pass - - -class OdiloCirculationMonitor(CollectionMonitor, TimelineMonitor): - """Maintain LicensePools for recently changed Odilo titles""" - - SERVICE_NAME = "Odilo Circulation Monitor" - INTERVAL_SECONDS = 500 - PROTOCOL = ExternalIntegration.ODILO - DEFAULT_START_TIME = CollectionMonitor.NEVER - - def __init__(self, _db, collection, api_class=OdiloAPI): - """Constructor.""" - super().__init__(_db, collection) - self.api = api_class(_db, collection) - - def catch_up_from(self, start, cutoff, progress): - """Find Odilo books that changed recently. - - :progress: A TimestampData representing the time previously - covered by this Monitor. - """ - - self.log.info( - "Starting recently_changed_ids, start: " - + str(start) - + ", cutoff: " - + str(cutoff) - ) - - start_time = utc_now() - updated, new = self.all_ids(start) - finish_time = utc_now() - - time_elapsed = finish_time - start_time - self.log.info("recently_changed_ids finished in: " + str(time_elapsed)) - progress.achievements = "Updated records: %d. New records: %d." % (updated, new) - - def all_ids(self, modification_date=None): - """Get IDs for every book in the system, from modification date if any""" - - retrieved = 0 - parsed = 0 - new = 0 - offset = 0 - limit = self.api.PAGE_SIZE_LIMIT - - if modification_date and isinstance(modification_date, datetime.date): - modification_date = modification_date.strftime( - "%Y-%m-%d" - ) # Format YYYY-MM-DD - - # Retrieve first group of records - url = self.get_url(limit, modification_date, offset) - status_code, headers, content = self.api.get(url) - content = json.loads(content) - - # Retrieve Odilo record in groups - while status_code == 200 and len(content) > 0: - offset += limit - retrieved += len(content) - self.log.info("Retrieved %i records" % retrieved) - - # Process a bunch of records retrieved - for record in content: - record_id = record["id"] - self.log.info( - "Processing record %i/%i: %s" % (parsed, retrieved, record_id) - ) - ( - identifier, - is_new, - ) = self.api.odilo_bibliographic_coverage_provider.process_item( - record_id, record - ) - - if is_new: - new += 1 - - parsed += 1 - - # Persist each bunch of retrieved records - self._db.commit() - - # Retrieve next group of records - url = self.get_url(limit, modification_date, offset) - status_code, headers, content = self.api.get(url) - content = json.loads(content) - - if status_code >= 400: - self.log.error( - "ERROR: Fail while retrieving data from remote source: HTTP " - + status_code - ) - if content: - self.log.error("ERROR response content: " + str(content)) - else: - self.log.info( - "Retrieving all ids finished ok. Retrieved %i records. New records: %i!!" - % (retrieved, new) - ) - return retrieved, new - - def get_url(self, limit, modification_date, offset): - url = "%s?limit=%i&offset=%i" % (self.api.ALL_PRODUCTS_ENDPOINT, limit, offset) - if modification_date: - url = f"{url}&modificationDate={modification_date}" - - return url - - -class OdiloBibliographicCoverageProvider(BibliographicCoverageProvider): - """Fill in bibliographic metadata for Odilo records. - - This will occasionally fill in some availability information for a - single Collection, but we rely on Monitors to keep availability - information up to date for all Collections. - """ - - SERVICE_NAME = "Odilo Bibliographic Coverage Provider" - DATA_SOURCE_NAME = DataSource.ODILO - PROTOCOL = ExternalIntegration.ODILO - INPUT_IDENTIFIER_TYPES = Identifier.ODILO_ID - - def __init__(self, collection, api_class=OdiloAPI, **kwargs): - """Constructor. - - :param collection: Provide bibliographic coverage to all - Odilo books in the given Collection. - :param api_class: Instantiate this class with the given Collection, - rather than instantiating OdiloAPI. - """ - super().__init__(collection, **kwargs) - if isinstance(api_class, OdiloAPI): - # Use a previously instantiated OdiloAPI instance - # rather than creating a new one. - self.api = api_class - else: - # A web application should not use this option because it - # will put a non-scoped session in the mix. - _db = Session.object_session(collection) - self.api = api_class(_db, collection) - - self.replacement_policy = ReplacementPolicy( - identifiers=True, - subjects=True, - contributions=True, - links=True, - formats=True, - rights=True, - link_content=True, - # even_if_not_apparently_updated=False, - analytics=Analytics(self._db), - ) - - def process_item(self, record_id, record=None): - if not record: - record = self.api.get_metadata(record_id) - - if not record: - return self.failure(record_id, "Record not found", transient=False) - - # Retrieve availability - availability = self.api.get_availability(record_id) - - metadata, is_active = OdiloRepresentationExtractor.record_info_to_metadata( - record, availability - ) - if not metadata: - e = "Could not extract metadata from Odilo data: %s" % record_id - return self.failure(record_id, e) - - identifier, made_new = metadata.primary_identifier.load(_db=self._db) - - if not identifier: - e = "Could not create identifier for Odilo data: %s" % record_id - return self.failure(identifier, e) - - identifier = self.set_metadata(identifier, metadata) - - # calls work.set_presentation_ready() for us - self.handle_success(identifier) - - return identifier, made_new diff --git a/bin/odilo_monitor_recent b/bin/odilo_monitor_recent deleted file mode 100755 index 42846ae61a..0000000000 --- a/bin/odilo_monitor_recent +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python -"""Updates an Odilo collection.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) - -from api.odilo import OdiloCirculationMonitor -from core.scripts import RunCollectionMonitorScript - -RunCollectionMonitorScript(OdiloCirculationMonitor).run() diff --git a/core/model/configuration.py b/core/model/configuration.py index a47218a8db..c68bca2722 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -75,7 +75,6 @@ class ExternalIntegration(Base): OPDS_IMPORT = "OPDS Import" OPDS2_IMPORT = "OPDS 2.0 Import" OVERDRIVE = DataSourceConstants.OVERDRIVE - ODILO = DataSourceConstants.ODILO BIBLIOTHECA = DataSourceConstants.BIBLIOTHECA AXIS_360 = DataSourceConstants.AXIS_360 OPDS_FOR_DISTRIBUTORS = "OPDS for Distributors" @@ -96,7 +95,6 @@ class ExternalIntegration(Base): LICENSE_PROTOCOLS = [ OPDS_IMPORT, OVERDRIVE, - ODILO, BIBLIOTHECA, AXIS_360, GUTENBERG, @@ -107,7 +105,6 @@ class ExternalIntegration(Base): # licenses come from a specific data source. DATA_SOURCE_FOR_LICENSE_PROTOCOL = { OVERDRIVE: DataSourceConstants.OVERDRIVE, - ODILO: DataSourceConstants.ODILO, BIBLIOTHECA: DataSourceConstants.BIBLIOTHECA, AXIS_360: DataSourceConstants.AXIS_360, ENKI: DataSourceConstants.ENKI, diff --git a/core/model/constants.py b/core/model/constants.py index 4457f45071..b0c5197f51 100644 --- a/core/model/constants.py +++ b/core/model/constants.py @@ -9,7 +9,6 @@ class DataSourceConstants: GUTENBERG = "Gutenberg" OVERDRIVE = "Overdrive" - ODILO = "Odilo" PROJECT_GITENBERG = "Project GITenberg" STANDARD_EBOOKS = "Standard Ebooks" UNGLUE_IT = "unglue.it" @@ -145,7 +144,6 @@ class EditionConstants: class IdentifierConstants: # Common types of identifiers. OVERDRIVE_ID = "Overdrive ID" - ODILO_ID = "Odilo ID" BIBLIOTHECA_ID = "Bibliotheca ID" GUTENBERG_ID = "Gutenberg ID" AXIS_360_ID = "Axis 360 ID" @@ -173,7 +171,6 @@ class IdentifierConstants: LICENSE_PROVIDING_IDENTIFIER_TYPES = [ BIBLIOTHECA_ID, OVERDRIVE_ID, - ODILO_ID, AXIS_360_ID, GUTENBERG_ID, ELIB_ID, @@ -195,7 +192,6 @@ class IdentifierType(Enum): """Enumeration of all available identifier types.""" OVERDRIVE_ID = IdentifierConstants.OVERDRIVE_ID - ODILO_ID = IdentifierConstants.ODILO_ID BIBLIOTHECA_ID = IdentifierConstants.BIBLIOTHECA_ID GUTENBERG_ID = IdentifierConstants.GUTENBERG_ID AXIS_360_ID = IdentifierConstants.AXIS_360_ID diff --git a/core/model/datasource.py b/core/model/datasource.py index 37106ab127..665fdddd45 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -237,7 +237,6 @@ def well_known_sources(cls, _db): IdentifierConstants.BIBLIOTHECA_ID, 60 * 60 * 6, ), - (cls.ODILO, True, False, IdentifierConstants.ODILO_ID, 0), (cls.AXIS_360, True, False, IdentifierConstants.AXIS_360_ID, 0), (cls.OCLC, False, False, None, None), (cls.OCLC_LINKED_DATA, False, False, None, None), diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index b261593dc5..0f3a9ddf86 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -93,10 +93,6 @@ HOME=/var/www/circulation # 45 * * * * root core/bin/run odl2_import_monitor >> /var/log/cron.log 2>&1 -# Odilo -# -*/15 * * * * root core/bin/run odilo_monitor_recent >> /var/log/cron.log 2>&1 - # SAML # 0 5 * * * root core/bin/run saml_monitor >> /var/log/cron.log 2>&1 diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 7ced547870..bda4e6f1e9 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -17,7 +17,6 @@ "tests.fixtures.api_millenium_files", "tests.fixtures.api_novelist_files", "tests.fixtures.api_nyt_files", - "tests.fixtures.api_odilo_files", "tests.fixtures.api_odl", "tests.fixtures.api_onix_files", "tests.fixtures.api_opds_dist_files", diff --git a/tests/api/files/odilo/checkin_ok.json b/tests/api/files/odilo/checkin_ok.json deleted file mode 100644 index 68cf2edcf1..0000000000 --- a/tests/api/files/odilo/checkin_ok.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "id": "4318", - "recordId": "00011055", - "startTime": "1510152832297", - "endTime": "1510312043738", - "renewable": false, - "returnable": true -} \ No newline at end of file diff --git a/tests/api/files/odilo/checkout_acsm_epub_ok.json b/tests/api/files/odilo/checkout_acsm_epub_ok.json deleted file mode 100644 index cfbd28c502..0000000000 --- a/tests/api/files/odilo/checkout_acsm_epub_ok.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "id": "4306", - "recordId": "00010982", - "downloadUrl": "http://localhost:8080/api/v2/checkouts/4306/download?token=e2f7721a11963ce5a572acb4f8599a&patronId=001000265", - "startTime": "1507716876495", - "endTime": "1516356876495", - "renewable": false, - "returnable": true, - "format": "ACSM_EPUB" -} \ No newline at end of file diff --git a/tests/api/files/odilo/checkout_acsm_pdf_ok.json b/tests/api/files/odilo/checkout_acsm_pdf_ok.json deleted file mode 100644 index 89aa87edf1..0000000000 --- a/tests/api/files/odilo/checkout_acsm_pdf_ok.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "id": "4306", - "recordId": "00010982", - "downloadUrl": "http://localhost:8080/api/v2/checkouts/4306/download?token=e2f7721a11963ce5a572acb4f8599a&patronId=001000265", - "startTime": "1507716876495", - "endTime": "1516356876495", - "renewable": false, - "returnable": true, - "format": "ACSM_PDF" -} \ No newline at end of file diff --git a/tests/api/files/odilo/checkout_ebook_streaming_ok.json b/tests/api/files/odilo/checkout_ebook_streaming_ok.json deleted file mode 100644 index 5f1302d739..0000000000 --- a/tests/api/files/odilo/checkout_ebook_streaming_ok.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "id": "4318", - "recordId": "00011055", - "downloadUrl": "http://localhost:8080/api/v2/checkouts/4318/download?token=1da29c1154cf34ab5b43d1bedc2ec116&patronId=001000265", - "startTime": "1510152832297", - "endTime": "1511448832297", - "renewable": false, - "returnable": true, - "format": "EBOOK_STREAMING" -} \ No newline at end of file diff --git a/tests/api/files/odilo/error_checkout_not_found.json b/tests/api/files/odilo/error_checkout_not_found.json deleted file mode 100644 index 3ff004e846..0000000000 --- a/tests/api/files/odilo/error_checkout_not_found.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "errors": [ - { - "id": "CHECKOUT_NOT_FOUND", - "description": "Checkout not found '331234'" - } - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/error_data_not_found.json b/tests/api/files/odilo/error_data_not_found.json deleted file mode 100644 index ff85243f13..0000000000 --- a/tests/api/files/odilo/error_data_not_found.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "errors": [ - { - "id": "ERROR_DATA_NOT_FOUND" - } - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/error_hold_already_in_hold.json b/tests/api/files/odilo/error_hold_already_in_hold.json deleted file mode 100644 index 9f266ce9b0..0000000000 --- a/tests/api/files/odilo/error_hold_already_in_hold.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "errors": [ - { - "id": "LOAN_ALREADY_RESERVED", - "description": "Required record has been already reserverd." - } - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/error_hold_not_found.json b/tests/api/files/odilo/error_hold_not_found.json deleted file mode 100644 index 55af6cf95e..0000000000 --- a/tests/api/files/odilo/error_hold_not_found.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "errors": [ - { - "id": "HOLD_NOT_FOUND", - "description": "Hold not found '00011074'" - } - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/error_patron_not_found.json b/tests/api/files/odilo/error_patron_not_found.json deleted file mode 100644 index e99cd02271..0000000000 --- a/tests/api/files/odilo/error_patron_not_found.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "errors": [ - { - "id": "patronNotFound", - "description": "Patron '123456789' not found " - } - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/fulfill_ok_acsm_epub.acsm b/tests/api/files/odilo/fulfill_ok_acsm_epub.acsm deleted file mode 100644 index a9d885a87b..0000000000 --- a/tests/api/files/odilo/fulfill_ok_acsm_epub.acsm +++ /dev/null @@ -1,29 +0,0 @@ - - urn:uuid:176c679c-1d79-4fc8-9db4-572a6125a69e - http://test.odilotk.es:80/fulfillment - I3K:001000265:A-725:X:4306 - 2017-11-09T12:53:27+00:00 - 2017-11-09T12:58:27+00:00 - - urn:uuid:72787032-26ae-4db4-8ac0-65d02f18ef20 - 0 - - Busy Brownies - E. Veale - - http://www.gutenberg.org/ebooks/54159 - application/epub+zip - es - - - urn:uuid:72787032-26ae-4db4-8ac0-65d02f18ef20 - - - - - - - - - umQylLEg0dv3o5mS9iu5XLFC72A= - \ No newline at end of file diff --git a/tests/api/files/odilo/fulfill_ok_acsm_pdf.acsm b/tests/api/files/odilo/fulfill_ok_acsm_pdf.acsm deleted file mode 100644 index de2aa253ee..0000000000 --- a/tests/api/files/odilo/fulfill_ok_acsm_pdf.acsm +++ /dev/null @@ -1,29 +0,0 @@ - - urn:uuid:176c679c-1d79-4fc8-9db4-572a6125a69e - http://test.odilotk.es:80/fulfillment - I3K:001000265:A-725:X:4306 - 2017-11-09T12:53:27+00:00 - 2017-11-09T12:58:27+00:00 - - urn:uuid:72787032-26ae-4db4-8ac0-65d02f18ef20 - 0 - - Busy Brownies - E. Veale - - http://www.gutenberg.org/ebooks/54159 - application/pdf - es - - - urn:uuid:72787032-26ae-4db4-8ac0-65d02f18ef20 - - - - - - - - - umQylLEg0dv3o5mS9iu5XLFC72A= - \ No newline at end of file diff --git a/tests/api/files/odilo/odilo_availability.json b/tests/api/files/odilo/odilo_availability.json deleted file mode 100644 index ea97ae67c2..0000000000 --- a/tests/api/files/odilo/odilo_availability.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "recordId": "00010982", - "totalCopies": 2, - "availableCopies": 1, - "holdsQueueSize": 2, - "notifiedHolds": 1, - "formats": [ - "ACSM", - "EBOOK_STREAMING" - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/odilo_availability_inactive.json b/tests/api/files/odilo/odilo_availability_inactive.json deleted file mode 100644 index 13b70ac4e5..0000000000 --- a/tests/api/files/odilo/odilo_availability_inactive.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "recordId": "00011135", - "totalCopies": 2, - "availableCopies": 0, - "holdsQueueSize": 0, - "notifiedHolds": 0, - "formats": [ - "EBOOK_STREAMING" - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/odilo_metadata.json b/tests/api/files/odilo/odilo_metadata.json deleted file mode 100644 index 6a45ed8e0d..0000000000 --- a/tests/api/files/odilo/odilo_metadata.json +++ /dev/null @@ -1,36 +0,0 @@ -{ - "id": "00010982", - "active": true, - "title": "Busy Brownies", - "subtitle": " (The Classic Fantasy Literature of Elves for Children)", - "author": "Veale, E.", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistUS/pg54159_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistUS/pg54159.jpg", - "description": "All the Brownies had promised to help, and when a Brownie undertakes a thing he works as busily as a beaver until it is accomplished.

Now this is what they determined to do. Brownies chopping wood The Fairy Queens palace had been destroyed—a wind that had swept through the forest carrying trees before it, and spreading ruin as it traveled, had lifted up the Fairy Queens home and dashed it all to pieces. Poor little Queen; how sorrowful she felt to lose the pretty house where the royal family of Fairyland had so long lived.

Illustration of the paint incident described below But true to their nature the Brownies came to the rescue, promising to build a palace far more beautiful than the one that was lost.
  • Such helpful little creatures as the Brownies never lived.
  • No chance of doing good to one in trouble ever escapes these generous little fellows and certainly this was a work to be hailed with much joy.
  • For a long while before they commenced, you could see them gathered in groups, discussing how and where they should begin, and how they could make the palace more beautiful.
  • They were a funny looking set when they started out for the place where the house was to be built. Each one carried something. One little fellow had an axe, another, hammer and nails, one the mortar hod and still another the plane, while the master worker could be seen with the square in his hand giving directions to the whole crowd.
  • They commenced their work one beautiful moonlight night. Brownies, you know, work when the darkness has put all the world to sleep. What a time they had getting all the things together. Arriving at the spot, some fell to chopping wood, while others mixed the mortar and rigged up the pulleys by which they were to raise the stuff to the roof. How the hammers rang out as they struck the bright little nails.
", - "formats": [ - "ACSM", - "EBOOK_STREAMING" - ], - "fileFormat": "epub", - "gradeLevel": "K-12", - "isbn": "9783736418837", - "language": "eng", - "publicationDate": "20130202", - "publisher": "ANBOCO", - "releaseDate": "20170213", - "modificationDate": "20170310", - "subject": "Children", - "subjects": [ - "Children", - "Fantasy", - "Classics" - ], - "subjectsBisacCodes": [ - "LIT009000", - "YAF019020", - "FIC004000" - ], - "type": "| || l|1| || ||", - "series": "The Classic Fantasy Literature for Children written in 1896 retold for Elves adventure.", - "seriesPosition": "1" -} diff --git a/tests/api/files/odilo/odilo_metadata_inactive.json b/tests/api/files/odilo/odilo_metadata_inactive.json deleted file mode 100644 index 1664a6e642..0000000000 --- a/tests/api/files/odilo/odilo_metadata_inactive.json +++ /dev/null @@ -1,35 +0,0 @@ -{ - "id": "00011135", - "active": false, - "title": "!Tention A Story of Boy-Life during the Peninsular War", - "subtitle": "A Story of Boy-Life during the Peninsular War", - "author": "Fenn, George Manville", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/_Tention_A_Story_of_Boy_Life_during_the_Peninsular_War/cover_bb3de17a-5cbf-4d24-ad65-ff52fbbf408f.jpg", - "originalImageUrl": "", - "description": "", - "formats": [ - "EBOOK_STREAMING" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "9876543219876", - "language": "eng", - "publicationDate": "20130202", - "publisher": "ANBOCO", - "releaseDate": "20170213", - "modificationDate": "20170310", - "subject": "Fiction and Literature", - "subjects": [ - "Adult", - "Fiction and Literature", - "War" - ], - "subjectsBisacCodes": [ - "YAF000000", - "FIC009090", - "FIC032000" - ], - "type": "| || l|1| || ||", - "series": "!Tention", - "seriesPosition": "1" -} diff --git a/tests/api/files/odilo/patron_checkouts.json b/tests/api/files/odilo/patron_checkouts.json deleted file mode 100644 index baacc1f797..0000000000 --- a/tests/api/files/odilo/patron_checkouts.json +++ /dev/null @@ -1,22 +0,0 @@ -[ - { - "id": "4306", - "recordId": "00010982", - "downloadUrl": "http://localhost:8080/api/v2/checkouts/4306/download?token=1f11af41ec385092951070cb3768e19&patronId=001000265", - "startTime": "1507716876495", - "endTime": "1516356876495", - "renewable": false, - "returnable": true, - "format": "ACSM" - }, - { - "id": "4318", - "recordId": "00011055", - "downloadUrl": "http://localhost:8080/api/v2/checkouts/4318/download?token=6f8534f69c4663fed4491d59fe29b2c&patronId=001000265", - "startTime": "1510152832297", - "endTime": "1511448832297", - "renewable": false, - "returnable": true, - "format": "EBOOK_STREAMING" - } -] \ No newline at end of file diff --git a/tests/api/files/odilo/patron_holds.json b/tests/api/files/odilo/patron_holds.json deleted file mode 100644 index b343d9b015..0000000000 --- a/tests/api/files/odilo/patron_holds.json +++ /dev/null @@ -1,92 +0,0 @@ -[ - { - "id": "000000145", - "recordId": "00011074", - "available": false, - "holdQueuePosition": 2, - "startTime": "1510237733000", - "notifiedTime": "", - "status": "waiting" - }, - { - "id": "000000144", - "recordId": "00010982", - "available": false, - "holdQueuePosition": 1, - "startTime": "1507712265000", - "notifiedTime": "1507712325396", - "status": "waiting" - }, - { - "id": "000000143", - "recordId": "00010982", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1507202962000", - "notifiedTime": "1507202967388", - "status": "informed" - }, - { - "id": "000000137", - "recordId": "00011052", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1501155907000", - "notifiedTime": "1501160148175", - "status": "donwloaded" - }, - { - "id": "000000132", - "recordId": "00011071", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1498469982000", - "notifiedTime": "1498645872428", - "status": "expired" - }, - { - "id": "000000130", - "recordId": "00011074", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1498132171000", - "notifiedTime": "1498645866947", - "status": "expired" - }, - { - "id": "000000129", - "recordId": "00010952", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1498132096000", - "notifiedTime": "1498142843675", - "status": "expired" - }, - { - "id": "000000125", - "recordId": "00011077", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1498124564000", - "notifiedTime": "1498204194249", - "status": "donwloaded" - }, - { - "id": "000000124", - "recordId": "00011052", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1497954965000", - "notifiedTime": "1498219771799", - "status": "expired" - }, - { - "id": "000000122", - "recordId": "00011074", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1497953944000", - "notifiedTime": "1498124705793", - "status": "donwloaded" - } -] \ No newline at end of file diff --git a/tests/api/files/odilo/place_hold_ok.json b/tests/api/files/odilo/place_hold_ok.json deleted file mode 100644 index 5360d10545..0000000000 --- a/tests/api/files/odilo/place_hold_ok.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "id": "000000145", - "recordId": "00011074", - "available": false, - "holdQueuePosition": 65535, - "startTime": "1510237733370", - "notifiedTime": "", - "status": "waiting" -} \ No newline at end of file diff --git a/tests/api/files/odilo/record_availability.json b/tests/api/files/odilo/record_availability.json deleted file mode 100644 index 4c310cd699..0000000000 --- a/tests/api/files/odilo/record_availability.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "recordId": "00010982", - "totalCopies": 2, - "availableCopies": 0, - "holdsQueueSize": 1, - "notifiedHolds": 0, - "formats": [ - "ACSM", - "EBOOK_STREAMING" - ] -} \ No newline at end of file diff --git a/tests/api/files/odilo/records_metadata.json b/tests/api/files/odilo/records_metadata.json deleted file mode 100644 index 9efb712387..0000000000 --- a/tests/api/files/odilo/records_metadata.json +++ /dev/null @@ -1,289 +0,0 @@ -[ - { - "id": "00011071", - "active": true, - "title": "Tiempo", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170623_tiempo_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170623_tiempo.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011070", - "active": true, - "title": "Diez Minutos", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170621_DM_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170621_DM.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00009960", - "active": true, - "title": "Flores oscuras", - "subtitle": "", - "author": "Sergio Ramírez", - "coverImageUrl": "http://pruebasotk.odilotk.es/Flores_oscuras/cover_9788420414560.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/Flores_oscuras/cover_9788420414560.jpg", - "description": "Sergio Ramírez, ganador del Premio Alfaguara 1998, se asoma a los misterios del alma humana en doce sorprendentes relatos El juez y su Conciencia, la sin par Mireya y el tragafuegos Luzbel, el Duende que Camina hacia el trono de la calavera, el petimetre y el diablo, el último combate del minimosca Gavilán, la suerte del exguerrillero Trinidad, alias «el Comandante»... En Flores oscuras, cada personaje batalla contra sus propios conflictos y esconde sus propios secretos. A medio camino entre la crónica periodística y el cuento, Sergio Ramírez se asoma a los misterios del alma humana en doce sorprendentes relatos llenos de colores vivos y negras sombras. «Una vez que atravesó la rompiente se dejó balancear sobre el lomo de las aguas grises de cara al cielo, y parecía disfrutarlo. Ya por último se le vio alzar la mano, lo que bien pudo ser tomado por un alegre saludo a su esposa que se bañaba con el agua a la rodilla, cuidadosa de sus cinco meses de embarazo, hasta que los reflejos del sol, un intenso reguero de escamas plateadas sobre la superficie en movimiento, ya no permitieron verlo más. (...) Un bromista. Eso es lo que era. Seguramente nadaba de regreso debajo del agua, ocultándose, y de pronto aparecería a su lado sacando la cabeza en medio de un estallido de espuma que la salpicaría toda. Nada de eso ocurrió.» Reseñas: «El primer cuentista vivo en el continente latinoamericano, y uno de los mejores en español, heredero de las armas de Cortázar y Monterroso... Por cada cuento un mundo.» Javier Sancho Más, Babelia «Sergio Ramírez se mueve como pez en el agua en el dominio del relato, escribiendo cuentos que no se cansan de sacarle punta al lápiz de la vida, adoptando enfoques insólitos -que acaban convirtiendo en sorprendente un suceso banal- y aclimatando con suma habilidad el humor a las catástrofes cotidianas ». Javier Aparicio, El País", - "formats": [ - "ACSM", - "EBOOK_STREAMING" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "20130320", - "publisher": "Castellano", - "releaseDate": "20141017", - "modificationDate": "20141017", - "subject": "Ficción moderna y contemporánea", - "subjects": [ - "Ficción moderna y contemporánea" - ], - "subjectsBisacCodes": [ - "FIC004000" - ], - "type": "", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011077", - "active": true, - "title": "Fotogramas", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170627_FG_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170627_FG.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011080", - "active": true, - "title": "Primera Linea", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170619_primeralinea_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170619_primeralinea.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170622", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011079", - "active": true, - "title": "Mi Casa", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170613_MC_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170613_MC.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011074", - "active": true, - "title": "QMD", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170619_QMD_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170619_QMD.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20170623", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011076", - "active": true, - "title": "Banca 15", - "subtitle": "", - "author": "", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170201_BANCA15_225x318.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistES/20170201_BANCA15.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING", - "ACSM" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "2017020115", - "language": "", - "publicationDate": "", - "publisher": "", - "releaseDate": "20170511", - "modificationDate": "20161205", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "| || l|1| || ||", - "series": "", - "seriesPosition": "" - }, - { - "id": "00009963", - "active": true, - "title": "Mar de mañana", - "subtitle": "", - "author": "Margaret Mazzantini", - "coverImageUrl": "http://pruebasotk.odilotk.es/Mar_de_ma_ana/cover_9788420414768.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/Mar_de_ma_ana/cover_9788420414768.jpg", - "description": "Dos madres, dos hijos, dos mundos: las dos orillas de un único mar.Una pequeña gran novela de una de las grandes autoras de la literatura italiana actual. Jamila tiene apenas veinte años y ya es viuda y madre. Su hijo Farid ha crecido rodeado del polvo rojo del desierto y nunca ha visto el mar. La guerra arrasa su país, Libia, y Jamila sueña con buscar refugio en Italia. Así, emprende con Farid un viaje en barcaza, prometiéndole que durará menos que una canción de cuna. Desde la otra orilla, Angelina ve los navíos procedentes de Trípoli llegar a puerto. Hace cuarenta años emprendió el mismo viaje y ahora rememora la imagen del temible Gadafi, los amigos árabes que la recibieron y a Alí, su promesa de amor. Los caminos de Angelina y Jamila nunca se cruzarán, pero ambas tejen distintas tramas de una misma historia. Ganadora de los premios Rapallo-Carige, Strega, Grinzane Cavour, Città di Bari, Zepter, Campiello, Pavese y Matteotii. Traducida a 35 idiomas con más de 4.000.000 de lectores. La crítica ha dicho: «Mazzantini logra que recuperemos el significado del amor... El resultado es milagroso.» Elvira Navarro, Qué Leer «Una crónica extraordinaria con la luz de un lenguaje vivo, veloz, intensamente femenino.»La Stampa «Con un cierto tono de novela negra y mucha dulzura, Mar de mañana es un libro que deja huella.»Vanity Fair «Una escritora de gran talento e inteligencia cuyo trabajo honra a nuestra literatura.» Oggi «La prosa de Mazzantini es singular, refinada, rica en imágenes. Cada frase parece haber sido escogida después de un cuidadoso escrutinio, como si se tratara de perlas hábilmente montadas.» Corriere della Sera «Los personajes de Mazzantini destilan una fuerza inusual en cada detalle.» La Repubblica «Margaret Mazzantini escribe de un modo tan apasionado como raras veces puede leerse.» Buchmagazin «Las descripciones que hace Mazzantini del amor, tanto maternal como romántico, son de una impactante crudeza.» Kirkus Review «Margaret Mazzantini habla del amor de forma tan intensa, dolorosa y bella que de vez en cuando uno se queda sin respiración.» Cosmopolitan «Mazzantini encuentra un lenguaje capaz de expresar el dolor y el sufrimiento de los vencidos y olvidados en las fronteras de la historia.» Le Monde des Livres «En la electrizante escritura de Mazzantini hay algo de Le Clézio, David Grossman o Pasolini. A estos nombres, la autora añade a las grandes mujeres trágicas del pasado siglo: Duras, Morante... Pero la belleza cristalina de Mar de mañana es exclusiva de Mazzantini.» Livres Hebdo", - "formats": [ - "ACSM", - "EBOOK_STREAMING" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "", - "publicationDate": "20130501", - "publisher": "Castellano", - "releaseDate": "20141017", - "modificationDate": "20141017", - "subject": "Ficción moderna y contemporánea", - "subjects": [ - "Ficción moderna y contemporánea" - ], - "subjectsBisacCodes": [ - "FIC004000" - ], - "type": "", - "series": "", - "seriesPosition": "" - }, - { - "id": "00011135", - "active": true, - "title": "!Tention A Story of Boy-Life during the Peninsular War", - "subtitle": "A Story of Boy-Life during the Peninsular War", - "author": "George Manville Fenn", - "coverImageUrl": "http://pruebasotk.odilotk.es/public/_Tention_A_Story_of_Boy_Life_during_the_Peninsular_War/cover_bb3de17a-5cbf-4d24-ad65-ff52fbbf408f.jpg", - "originalImageUrl": "http://pruebasotk.odilotk.es/public/_Tention_A_Story_of_Boy_Life_during_the_Peninsular_War/cover_bb3de17a-5cbf-4d24-ad65-ff52fbbf408f.jpg", - "description": "", - "formats": [ - "EBOOK_STREAMING" - ], - "fileFormat": "epub", - "gradeLevel": "", - "isbn": "", - "language": "en", - "publicationDate": "", - "publisher": "Project Gutenberg", - "releaseDate": "20170914", - "modificationDate": "20170914", - "subject": "", - "subjects": [], - "subjectsBisacCodes": [], - "type": "", - "series": "", - "seriesPosition": "" - } -] \ No newline at end of file diff --git a/tests/api/files/odilo/release_hold_ok.json b/tests/api/files/odilo/release_hold_ok.json deleted file mode 100644 index 01c0ab17b1..0000000000 --- a/tests/api/files/odilo/release_hold_ok.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "id": "000000144", - "recordId": "00010982", - "available": false, - "startTime": "2017-10-11 10:57:45.0", - "notifiedTime": "2017-11-10 12:07:24.954", - "status": "C" -} \ No newline at end of file diff --git a/tests/api/mockapi/odilo.py b/tests/api/mockapi/odilo.py deleted file mode 100644 index b8c23c773c..0000000000 --- a/tests/api/mockapi/odilo.py +++ /dev/null @@ -1,88 +0,0 @@ -import json - -from sqlalchemy.orm import Session - -from api.odilo import OdiloAPI -from core.model import Library, get_one_or_create -from core.model.collection import Collection -from core.model.configuration import ExternalIntegration -from core.util.http import HTTP -from tests.core.mock import MockRequestsResponse - - -class MockOdiloAPI(OdiloAPI): - def patron_request(self, patron, pin, *args, **kwargs): - response = self._make_request(*args, **kwargs) - - # Modify the record of the request to include the patron information. - original_data = self.requests[-1] - - # The last item in the record of the request is keyword arguments. - # Stick this information in there to minimize confusion. - original_data[-1]["_patron"] = patron - original_data[-1]["_pin"] = pin - return response - - @classmethod - def mock_collection(cls, _db: Session, library: Library) -> Collection: - collection, ignore = get_one_or_create( - _db, - Collection, - name="Test Odilo Collection", - create_method_kwargs=dict( - external_account_id="library_id_123", - ), - ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.ODILO - ) - config = collection.create_integration_configuration(ExternalIntegration.ODILO) - config.settings_dict = { - "username": "a", - "password": "b", - OdiloAPI.LIBRARY_API_BASE_URL: "http://library_api_base_url.test/api/v2", - } - config.for_library(library.id, create=True) - library.collections.append(collection) - - return collection - - def __init__(self, _db, collection, *args, **kwargs): - self.access_token_requests = [] - self.requests = [] - self.responses = [] - - self.access_token_response = self.mock_access_token_response("bearer token") - super().__init__(_db, collection, *args, **kwargs) - - def token_post(self, url, payload, headers={}, **kwargs): - """Mock the request for an OAuth token.""" - - self.access_token_requests.append((url, payload, headers, kwargs)) - response = self.access_token_response - return HTTP._process_response(url, response, **kwargs) - - def mock_access_token_response(self, credential, expires_in=-1): - token = dict(token=credential, expiresIn=expires_in) - return MockRequestsResponse(200, {}, json.dumps(token)) - - def queue_response(self, status_code, headers={}, content=None): - self.responses.insert(0, MockRequestsResponse(status_code, headers, content)) - - def _do_get(self, url, *args, **kwargs): - """Simulate Representation.simple_http_get.""" - response = self._make_request(url, *args, **kwargs) - return response.status_code, response.headers, response.content - - def _do_post(self, url, *args, **kwargs): - return self._make_request(url, *args, **kwargs) - - def _make_request(self, url, *args, **kwargs): - response = self.responses.pop() - self.requests.append((url, args, kwargs)) - return HTTP._process_response( - url, - response, - kwargs.get("allowed_response_codes"), - kwargs.get("disallowed_response_codes"), - ) diff --git a/tests/api/test_monitor.py b/tests/api/test_monitor.py index c933019bfd..2c464b347d 100644 --- a/tests/api/test_monitor.py +++ b/tests/api/test_monitor.py @@ -49,7 +49,7 @@ def test_reaping(self, db: DatabaseTransactionFixture): edition, open_access=False, data_source_name=DataSource.AXIS_360 ) not_open_access_4 = db.licensepool( - edition, open_access=False, data_source_name=DataSource.ODILO + edition, open_access=False, data_source_name=DataSource.BIBBLIO ) unlimited_access = db.licensepool( edition, unlimited_access=True, data_source_name=DataSource.AMAZON diff --git a/tests/api/test_odilo.py b/tests/api/test_odilo.py deleted file mode 100644 index 0b144a4ec1..0000000000 --- a/tests/api/test_odilo.py +++ /dev/null @@ -1,973 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -import pytest - -from api.circulation import CirculationAPI -from api.circulation_exceptions import * -from api.odilo import ( - OdiloAPI, - OdiloBibliographicCoverageProvider, - OdiloCirculationMonitor, - OdiloRepresentationExtractor, -) -from core.integration.goals import Goals -from core.integration.registry import IntegrationRegistry -from core.metadata_layer import TimestampData -from core.model import ( - Classification, - Contributor, - DataSource, - DeliveryMechanism, - Edition, - ExternalIntegration, - Hyperlink, - Identifier, - MediaTypes, - Representation, -) -from core.util.datetime_helpers import datetime_utc, utc_now -from core.util.http import BadResponseException -from tests.api.mockapi.odilo import MockOdiloAPI -from tests.core.mock import MockRequestsResponse - -if TYPE_CHECKING: - from tests.fixtures.api_odilo_files import OdiloFilesFixture - from tests.fixtures.authenticator import SimpleAuthIntegrationFixture - from tests.fixtures.database import DatabaseTransactionFixture - - -class OdiloFixture: - PIN = "c4ca4238a0b923820dcc509a6f75849b" - RECORD_ID = "00010982" - - api: MockOdiloAPI - - def sample_data(self, filename): - return self.files.sample_data(filename) - - def sample_json(self, filename): - data = self.sample_data(filename) - return data, json.loads(data) - - def error_message(self, error_code, message=None, token=None): - """Create a JSON document that simulates the message served by - Odilo given a certain error condition. - """ - message = message or self.db.fresh_str() - token = token or self.db.fresh_str() - data = dict(errorCode=error_code, message=message, token=token) - return json.dumps(data) - - def __init__(self, db: DatabaseTransactionFixture, files: OdiloFilesFixture): - library = db.default_library() - self.files = files - self.db = db - self.patron = db.patron() - self.patron.authorization_identifier = "0001000265" - self.collection = MockOdiloAPI.mock_collection(db.session, db.default_library()) - self.circulation = CirculationAPI( - db.session, - library, - registry=IntegrationRegistry( - Goals.LICENSE_GOAL, {ExternalIntegration.ODILO: MockOdiloAPI} - ), - ) - self.api = self.circulation.api_for_collection[self.collection.id] # type: ignore[assignment] - self.edition, self.licensepool = db.edition( - data_source_name=DataSource.ODILO, - identifier_type=Identifier.ODILO_ID, - collection=self.collection, - identifier_id=self.RECORD_ID, - with_license_pool=True, - ) - - self.delivery_mechanism = self.licensepool.delivery_mechanisms[0] - - -@pytest.fixture(scope="function") -def odilo( - db: DatabaseTransactionFixture, api_odilo_files_fixture: OdiloFilesFixture -) -> OdiloFixture: - return OdiloFixture(db, api_odilo_files_fixture) - - -class TestOdiloAPI: - def test_token_post_success(self, odilo: OdiloFixture): - odilo.api.queue_response(200, content="some content") - response = odilo.api.token_post(odilo.db.fresh_url(), "the payload") - assert 200 == response.status_code, ( - "Status code != 200 --> %i" % response.status_code - ) - assert odilo.api.access_token_response.content == response.content - odilo.api.log.info("Test token post success ok!") - - def test_get_success(self, odilo: OdiloFixture): - odilo.api.queue_response(200, content="some content") - status_code, headers, content = odilo.api.get(odilo.db.fresh_url(), {}) - assert 200 == status_code - assert b"some content" == content - odilo.api.log.info("Test get success ok!") - - def test_401_on_get_refreshes_bearer_token(self, odilo: OdiloFixture): - assert "bearer token" == odilo.api.token - - # We try to GET and receive a 401. - odilo.api.queue_response(401) - - # We refresh the bearer token. (This happens in - # MockOdiloAPI.token_post, so we don't mock the response - # in the normal way.) - odilo.api.access_token_response = odilo.api.mock_access_token_response( - "new bearer token" - ) - - # Then we retry the GET and it succeeds this time. - odilo.api.queue_response(200, content="at last, the content") - status_code, headers, content = odilo.api.get(odilo.db.fresh_url(), {}) - - assert 200 == status_code - assert b"at last, the content" == content - - # The bearer token has been updated. - assert "new bearer token" == odilo.api.token - - odilo.api.log.info("Test 401 on get refreshes bearer token ok!") - - def test_credential_refresh_success(self, odilo: OdiloFixture): - """Verify the process of refreshing the Odilo bearer token.""" - credential = odilo.api.credential_object(lambda x: x) - assert "bearer token" == credential.credential - assert odilo.api.token == credential.credential - - odilo.api.access_token_response = odilo.api.mock_access_token_response( - "new bearer token" - ) - odilo.api.refresh_creds(credential) - assert "new bearer token" == credential.credential - assert odilo.api.token == credential.credential - - # By default, the access token's 'expiresIn' value is -1, - # indicating that the token will never expire. - # - # To reflect this fact, credential.expires is set to None. - assert None == credential.expires - - # But a token may specify a specific expiration time, - # which is used to set a future value for credential.expires. - odilo.api.access_token_response = odilo.api.mock_access_token_response( - "new bearer token 2", 1000 - ) - odilo.api.refresh_creds(credential) - assert "new bearer token 2" == credential.credential - assert odilo.api.token == credential.credential - assert credential.expires > utc_now() - - def test_credential_refresh_failure(self, odilo: OdiloFixture): - """Verify that a useful error message results when the Odilo bearer - token cannot be refreshed, since this is the most likely point - of failure on a new setup. - """ - odilo.api.access_token_response = MockRequestsResponse( - 200, {"Content-Type": "text/html"}, "Hi, this is the website, not the API." - ) - credential = odilo.api.credential_object(lambda x: x) - with pytest.raises(BadResponseException) as excinfo: - odilo.api.refresh_creds(credential) - assert "Bad response from " in str(excinfo.value) - assert ( - "may not be the right base URL. Response document was: 'Hi, this is the website, not the API.'" - in str(excinfo.value) - ) - - # Also test a 400 response code. - odilo.api.access_token_response = MockRequestsResponse( - 400, - {"Content-Type": "application/json"}, - json.dumps(dict(errors=[dict(description="Oops")])), - ) - with pytest.raises(BadResponseException) as excinfo: - odilo.api.refresh_creds(credential) - assert "Bad response from" in str(excinfo.value) - assert "Oops" in str(excinfo.value) - - # If there's a 400 response but no error information, - # the generic error message is used. - odilo.api.access_token_response = MockRequestsResponse( - 400, {"Content-Type": "application/json"}, json.dumps(dict()) - ) - with pytest.raises(BadResponseException) as excinfo: - odilo.api.refresh_creds(credential) - assert "Bad response from" in str(excinfo.value) - assert "may not be the right base URL." in str(excinfo.value) - - def test_401_after_token_refresh_raises_error(self, odilo: OdiloFixture): - assert "bearer token" == odilo.api.token - - # We try to GET and receive a 401. - odilo.api.queue_response(401) - - # We refresh the bearer token. - odilo.api.access_token_response = odilo.api.mock_access_token_response( - "new bearer token" - ) - - # Then we retry the GET but we get another 401. - odilo.api.queue_response(401) - - # That raises a BadResponseException - with pytest.raises(BadResponseException) as excinfo: - odilo.api.get(odilo.db.fresh_url(), {}) - assert "Something's wrong with the Odilo OAuth Bearer Token!" in str( - excinfo.value - ) - - # The bearer token has been updated. - assert "new bearer token" == odilo.api.token - - def test_external_integration(self, odilo: OdiloFixture): - assert odilo.collection.external_integration == odilo.api.external_integration( - odilo.db.session - ) - - def test__run_self_tests( - self, - odilo: OdiloFixture, - create_simple_auth_integration: SimpleAuthIntegrationFixture, - ): - """Verify that OdiloAPI._run_self_tests() calls the right - methods. - """ - - class Mock(MockOdiloAPI): - "Mock every method used by OdiloAPI._run_self_tests." - - def __init__(self, _db, collection): - """Stop the default constructor from running.""" - self._db = _db - self.collection_id = collection.id - - # First we will call check_creds() to get a fresh credential. - mock_credential = object() - - def check_creds(self, force_refresh=False): - self.check_creds_called_with = force_refresh - return self.mock_credential - - # Finally, for every library associated with this - # collection, we'll call get_patron_checkouts() using - # the credentials of that library's test patron. - mock_patron_checkouts = object() - get_patron_checkouts_called_with = [] - - def get_patron_checkouts(self, patron, pin): - self.get_patron_checkouts_called_with.append((patron, pin)) - return self.mock_patron_checkouts - - # Now let's make sure two Libraries have access to this - # Collection -- one library with a default patron and one - # without. - no_default_patron = odilo.db.library(name="no patron") - odilo.collection.libraries.append(no_default_patron) - - with_default_patron = odilo.db.default_library() - create_simple_auth_integration(with_default_patron) - - # Now that everything is set up, run the self-test. - api = Mock(odilo.db.session, odilo.collection) - results = sorted(api._run_self_tests(odilo.db.session), key=lambda x: x.name) - loans_failure, sitewide, loans_success = results - - # Make sure all three tests were run and got the expected result. - # - - # We got a sitewide access token. - assert "Obtaining a sitewide access token" == sitewide.name - assert True == sitewide.success - assert api.mock_credential == sitewide.result - assert True == api.check_creds_called_with - - # We got the default patron's checkouts for the library that had - # a default patron configured. - assert ( - "Viewing the active loans for the test patron for library %s" - % with_default_patron.name - == loans_success.name - ) - assert True == loans_success.success - # get_patron_checkouts was only called once. - [(patron, pin)] = api.get_patron_checkouts_called_with - assert "username1" == patron.authorization_identifier - assert "password1" == pin - assert api.mock_patron_checkouts == loans_success.result - - # We couldn't get a patron access token for the other library. - assert ( - "Acquiring test patron credentials for library %s" % no_default_patron.name - == loans_failure.name - ) - assert False == loans_failure.success - assert "Library has no test patron configured." == str(loans_failure.exception) - - def test_run_self_tests_short_circuit(self, odilo: OdiloFixture): - """If OdiloAPI.check_creds can't get credentials, the rest of - the self-tests aren't even run. - - This probably doesn't matter much, because if check_creds doesn't - work we won't be able to instantiate the OdiloAPI class. - """ - - def explode(*args, **kwargs): - raise Exception("Failure!") - - odilo.api.check_creds = explode - - # Only one test will be run. - [check_creds] = odilo.api._run_self_tests(odilo.db.session) - assert "Failure!" == str(check_creds.exception) - - -class TestOdiloCirculationAPI: - ################# - # General tests - ################# - - # Test 404 Not Found --> patron not found --> 'patronNotFound' - def test_01_patron_not_found(self, odilo: OdiloFixture): - patron_not_found_data, patron_not_found_json = odilo.sample_json( - "error_patron_not_found.json" - ) - odilo.api.queue_response(404, content=patron_not_found_json) - - patron = odilo.db.patron() - patron.authorization_identifier = "no such patron" - pytest.raises( - PatronNotFoundOnRemote, - odilo.api.checkout, - patron, - odilo.PIN, - odilo.licensepool, - odilo.delivery_mechanism, - ) - odilo.api.log.info("Test patron not found ok!") - - # Test 404 Not Found --> record not found --> 'ERROR_DATA_NOT_FOUND' - def test_02_data_not_found(self, odilo: OdiloFixture): - data_not_found_data, data_not_found_json = odilo.sample_json( - "error_data_not_found.json" - ) - odilo.api.queue_response(404, content=data_not_found_json) - - odilo.licensepool.identifier.identifier = "12345678" - pytest.raises( - NotFoundOnRemote, - odilo.api.checkout, - odilo.patron, - odilo.PIN, - odilo.licensepool, - odilo.delivery_mechanism, - ) - odilo.api.log.info("Test resource not found on remote ok!") - - def test_make_absolute_url(self, odilo: OdiloFixture): - # A relative URL is made absolute using the API's base URL. - relative = "/relative-url" - absolute = odilo.api._make_absolute_url(relative) - assert absolute == odilo.api.library_api_base_url.decode("utf-8") + relative - - # An absolute URL is not modified. - for protocol in ("http", "https"): - already_absolute = "%s://example.com/" % protocol - assert already_absolute == odilo.api._make_absolute_url(already_absolute) - - ################# - # Checkout tests - ################# - - # Test 400 Bad Request --> Invalid format for that resource - def test_11_checkout_fake_format(self, odilo: OdiloFixture): - odilo.api.queue_response(400, content="") - pytest.raises( - NoAcceptableFormat, - odilo.api.checkout, - odilo.patron, - odilo.PIN, - odilo.licensepool, - odilo.delivery_mechanism, - ) - odilo.api.log.info("Test invalid format for resource ok!") - - def test_12_checkout_acsm_epub(self, odilo: OdiloFixture): - checkout_data, checkout_json = odilo.sample_json("checkout_acsm_epub_ok.json") - odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout(odilo) - - def test_13_checkout_acsm_pdf(self, odilo: OdiloFixture): - odilo.delivery_mechanism.delivery_mechanism.content_type = ( - MediaTypes.PDF_MEDIA_TYPE - ) - checkout_data, checkout_json = odilo.sample_json("checkout_acsm_pdf_ok.json") - odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout(odilo) - - def test_14_checkout_ebook_streaming(self, odilo: OdiloFixture): - odilo.delivery_mechanism.delivery_mechanism.content_type = ( - Representation.TEXT_HTML_MEDIA_TYPE - ) - odilo.delivery_mechanism.delivery_mechanism.drm_scheme = ( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE - ) - checkout_data, checkout_json = odilo.sample_json( - "checkout_ebook_streaming_ok.json" - ) - odilo.api.queue_response(200, content=checkout_json) - self.perform_and_validate_checkout(odilo) - - def test_mechanism_set_on_borrow(self, odilo: OdiloFixture): - """The delivery mechanism for an Odilo title is set on checkout.""" - assert OdiloAPI.SET_DELIVERY_MECHANISM_AT == OdiloAPI.BORROW_STEP - - def perform_and_validate_checkout(self, odilo: OdiloFixture): - loan_info = odilo.api.checkout( - odilo.patron, odilo.PIN, odilo.licensepool, odilo.delivery_mechanism - ) - assert loan_info, "LoanInfo null --> checkout failed!" - odilo.api.log.info("Loan ok: %s" % loan_info.identifier) - - ################# - # Fulfill tests - ################# - - def test_21_fulfill_acsm_epub(self, odilo: OdiloFixture): - checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") - odilo.api.queue_response(200, content=checkout_json) - - acsm_data = odilo.sample_data("fulfill_ok_acsm_epub.acsm") - odilo.api.queue_response(200, content=acsm_data) - - fulfillment_info = self.fulfill(odilo) - assert fulfillment_info.content_type[0] == Representation.EPUB_MEDIA_TYPE - assert fulfillment_info.content_type[1] == DeliveryMechanism.ADOBE_DRM - - def test_22_fulfill_acsm_pdf(self, odilo: OdiloFixture): - odilo.delivery_mechanism.delivery_mechanism.content_type = ( - MediaTypes.PDF_MEDIA_TYPE - ) - checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") - odilo.api.queue_response(200, content=checkout_json) - - acsm_data = odilo.sample_data("fulfill_ok_acsm_pdf.acsm") - odilo.api.queue_response(200, content=acsm_data) - - fulfillment_info = self.fulfill(odilo) - assert fulfillment_info.content_type[0] == Representation.PDF_MEDIA_TYPE - assert fulfillment_info.content_type[1] == DeliveryMechanism.ADOBE_DRM - - def test_23_fulfill_ebook_streaming(self, odilo: OdiloFixture): - odilo.delivery_mechanism.delivery_mechanism.content_type = ( - Representation.TEXT_HTML_MEDIA_TYPE - ) - odilo.delivery_mechanism.delivery_mechanism.drm_scheme = ( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE - ) - checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") - odilo.api.queue_response(200, content=checkout_json) - - odilo.licensepool.identifier.identifier = "00011055" - fulfillment_info = self.fulfill(odilo) - assert fulfillment_info.content_type[0] == Representation.TEXT_HTML_MEDIA_TYPE - assert ( - fulfillment_info.content_type[1] - == DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE - ) - - def fulfill(self, odilo: OdiloFixture): - fulfillment_info = odilo.api.fulfill( - odilo.patron, odilo.PIN, odilo.licensepool, odilo.delivery_mechanism - ) - assert fulfillment_info, "Cannot Fulfill !!" - - if fulfillment_info.content_link: - odilo.api.log.info("Fulfill link: %s" % fulfillment_info.content_link) - if fulfillment_info.content: - odilo.api.log.info("Fulfill content: %s" % fulfillment_info.content) - - return fulfillment_info - - ################# - # Hold tests - ################# - - def test_31_already_on_hold(self, odilo: OdiloFixture): - already_on_hold_data, already_on_hold_json = odilo.sample_json( - "error_hold_already_in_hold.json" - ) - odilo.api.queue_response(403, content=already_on_hold_json) - - pytest.raises( - AlreadyOnHold, - odilo.api.place_hold, - odilo.patron, - odilo.PIN, - odilo.licensepool, - "ejcepas@odilotid.es", - ) - - odilo.api.log.info("Test hold already on hold ok!") - - def test_32_place_hold(self, odilo: OdiloFixture): - hold_ok_data, hold_ok_json = odilo.sample_json("place_hold_ok.json") - odilo.api.queue_response(200, content=hold_ok_json) - - hold_info = odilo.api.place_hold( - odilo.patron, odilo.PIN, odilo.licensepool, "ejcepas@odilotid.es" - ) - assert hold_info, "HoldInfo null --> place hold failed!" - odilo.api.log.info("Hold ok: %s" % hold_info.identifier) - - ################# - # Patron Activity tests - ################# - - def test_41_patron_activity_invalid_patron(self, odilo: OdiloFixture): - patron_not_found_data, patron_not_found_json = odilo.sample_json( - "error_patron_not_found.json" - ) - odilo.api.queue_response(404, content=patron_not_found_json) - - pytest.raises( - PatronNotFoundOnRemote, odilo.api.patron_activity, odilo.patron, odilo.PIN - ) - - odilo.api.log.info("Test patron activity --> invalid patron ok!") - - def test_42_patron_activity(self, odilo: OdiloFixture): - patron_checkouts_data, patron_checkouts_json = odilo.sample_json( - "patron_checkouts.json" - ) - patron_holds_data, patron_holds_json = odilo.sample_json("patron_holds.json") - odilo.api.queue_response(200, content=patron_checkouts_json) - odilo.api.queue_response(200, content=patron_holds_json) - - loans_and_holds = odilo.api.patron_activity(odilo.patron, odilo.PIN) - assert loans_and_holds - assert 12 == len(loans_and_holds) - odilo.api.log.info("Test patron activity ok !!") - - ################# - # Checkin tests - ################# - - def test_51_checkin_patron_not_found(self, odilo: OdiloFixture): - patron_not_found_data, patron_not_found_json = odilo.sample_json( - "error_patron_not_found.json" - ) - odilo.api.queue_response(404, content=patron_not_found_json) - - pytest.raises( - PatronNotFoundOnRemote, - odilo.api.checkin, - odilo.patron, - odilo.PIN, - odilo.licensepool, - ) - - odilo.api.log.info("Test checkin --> invalid patron ok!") - - def test_52_checkin_checkout_not_found(self, odilo: OdiloFixture): - checkout_not_found_data, checkout_not_found_json = odilo.sample_json( - "error_checkout_not_found.json" - ) - odilo.api.queue_response(404, content=checkout_not_found_json) - - pytest.raises( - NotCheckedOut, odilo.api.checkin, odilo.patron, odilo.PIN, odilo.licensepool - ) - - odilo.api.log.info("Test checkin --> invalid checkout ok!") - - def test_53_checkin(self, odilo: OdiloFixture): - checkout_data, checkout_json = odilo.sample_json("patron_checkouts.json") - odilo.api.queue_response(200, content=checkout_json) - - checkin_data, checkin_json = odilo.sample_json("checkin_ok.json") - odilo.api.queue_response(200, content=checkin_json) - - response = odilo.api.checkin(odilo.patron, odilo.PIN, odilo.licensepool) - assert response.status_code == 200, ( - "Response code != 200, cannot perform checkin for record: " - + odilo.licensepool.identifier.identifier - + " patron: " - + odilo.patron.authorization_identifier - ) - - checkout_returned = response.json() - - assert checkout_returned - assert "4318" == checkout_returned["id"] - odilo.api.log.info("Checkout returned: %s" % checkout_returned["id"]) - - ################# - # Patron Activity tests - ################# - - def test_61_return_hold_patron_not_found(self, odilo: OdiloFixture): - patron_not_found_data, patron_not_found_json = odilo.sample_json( - "error_patron_not_found.json" - ) - odilo.api.queue_response(404, content=patron_not_found_json) - - pytest.raises( - PatronNotFoundOnRemote, - odilo.api.release_hold, - odilo.patron, - odilo.PIN, - odilo.licensepool, - ) - - odilo.api.log.info("Test release hold --> invalid patron ok!") - - def test_62_return_hold_not_found(self, odilo: OdiloFixture): - holds_data, holds_json = odilo.sample_json("patron_holds.json") - odilo.api.queue_response(200, content=holds_json) - - checkin_data, checkin_json = odilo.sample_json("error_hold_not_found.json") - odilo.api.queue_response(404, content=checkin_json) - - response = odilo.api.release_hold(odilo.patron, odilo.PIN, odilo.licensepool) - assert response == True, ( - "Cannot release hold, response false " - + odilo.licensepool.identifier.identifier - + " patron: " - + odilo.patron.authorization_identifier - ) - - odilo.api.log.info( - "Hold returned: %s" % odilo.licensepool.identifier.identifier - ) - - def test_63_return_hold(self, odilo: OdiloFixture): - holds_data, holds_json = odilo.sample_json("patron_holds.json") - odilo.api.queue_response(200, content=holds_json) - - release_hold_ok_data, release_hold_ok_json = odilo.sample_json( - "release_hold_ok.json" - ) - odilo.api.queue_response(200, content=release_hold_ok_json) - - response = odilo.api.release_hold(odilo.patron, odilo.PIN, odilo.licensepool) - assert response == True, ( - "Cannot release hold, response false " - + odilo.licensepool.identifier.identifier - + " patron: " - + odilo.patron.authorization_identifier - ) - - odilo.api.log.info( - "Hold returned: %s" % odilo.licensepool.identifier.identifier - ) - - -class TestOdiloDiscoveryAPI: - def test_run(self, odilo: OdiloFixture): - """Verify that running the OdiloCirculationMonitor calls all_ids().""" - - class Mock(OdiloCirculationMonitor): - def all_ids(self, modification_date=None): - self.called_with = modification_date - return 30, 15 - - # The first time run() is called, all_ids is called with - # a modification_date of None. - monitor = Mock(odilo.db.session, odilo.collection, api_class=MockOdiloAPI) - monitor.run() - assert None == monitor.called_with - progress = monitor.timestamp() - completed = progress.finish - - # The return value of all_ids() is used to populate the - # achievements field. - assert "Updated records: 30. New records: 15." == progress.achievements - - # The second time run() is called, all_ids() is called with a - # modification date five minutes earlier than the completion - # of the last run. - monitor.run() - expect = completed - monitor.OVERLAP - assert (expect - monitor.called_with).total_seconds() < 2 - - def test_all_ids_with_date(self, odilo: OdiloFixture): - # TODO: This tests that all_ids doesn't crash when you pass in - # a date. It doesn't test anything about all_ids except the - # return value. - monitor = OdiloCirculationMonitor( - odilo.db.session, odilo.collection, api_class=MockOdiloAPI - ) - assert monitor, "Monitor null !!" - assert ExternalIntegration.ODILO == monitor.protocol, "Wat??" - - records_metadata_data, records_metadata_json = odilo.sample_json( - "records_metadata.json" - ) - monitor.api.queue_response(200, content=records_metadata_data) - - availability_data = odilo.sample_data("record_availability.json") - for record in records_metadata_json: - monitor.api.queue_response(200, content=availability_data) - - monitor.api.queue_response(200, content="[]") # No more resources retrieved - - timestamp = TimestampData(start=datetime_utc(2017, 9, 1)) - updated, new = monitor.all_ids(None) - assert 10 == updated - assert 10 == new - - odilo.api.log.info("Odilo circulation monitor with date finished ok!!") - - def test_all_ids_without_date(self, odilo: OdiloFixture): - # TODO: This tests that all_ids doesn't crash when you pass in - # an empty date. It doesn't test anything about all_ids except the - # return value. - - monitor = OdiloCirculationMonitor( - odilo.db.session, odilo.collection, api_class=MockOdiloAPI - ) - assert monitor, "Monitor null !!" - assert ExternalIntegration.ODILO == monitor.protocol, "Wat??" - - records_metadata_data, records_metadata_json = odilo.sample_json( - "records_metadata.json" - ) - monitor.api.queue_response(200, content=records_metadata_data) - - availability_data = odilo.sample_data("record_availability.json") - for record in records_metadata_json: - monitor.api.queue_response(200, content=availability_data) - - monitor.api.queue_response(200, content="[]") # No more resources retrieved - - updated, new = monitor.all_ids(datetime_utc(2017, 9, 1)) - assert 10 == updated - assert 10 == new - - odilo.api.log.info("Odilo circulation monitor without date finished ok!!") - - -class OdiloCoverageFixture(OdiloFixture): - def __init__(self, db: DatabaseTransactionFixture, files: OdiloFilesFixture): - super().__init__(db, files) - self.provider = OdiloBibliographicCoverageProvider( - self.collection, api_class=MockOdiloAPI - ) - self.api = self.provider.api - - -@pytest.fixture(scope="function") -def odilo_coverage( - db: DatabaseTransactionFixture, api_odilo_files_fixture: OdiloFilesFixture -) -> OdiloCoverageFixture: - return OdiloCoverageFixture(db, api_odilo_files_fixture) - - -class TestOdiloBibliographicCoverageProvider: - def test_process_item(self, odilo_coverage: OdiloCoverageFixture): - record_metadata, record_metadata_json = odilo_coverage.sample_json( - "odilo_metadata.json" - ) - odilo_coverage.api.queue_response(200, content=record_metadata_json) - availability, availability_json = odilo_coverage.sample_json( - "odilo_availability.json" - ) - odilo_coverage.api.queue_response(200, content=availability) - - identifier, made_new = odilo_coverage.provider.process_item("00010982") - - # Check that the Identifier returned has the right .type and .identifier. - assert identifier, "Problem while testing process item !!!" - assert identifier.type == Identifier.ODILO_ID - assert identifier.identifier == "00010982" - - # Check that metadata and availability information were imported properly - [pool] = identifier.licensed_through - assert "Busy Brownies" == pool.work.title - - assert 2 == pool.licenses_owned - assert 1 == pool.licenses_available - assert 2 == pool.patrons_in_hold_queue - assert 1 == pool.licenses_reserved - - names = [x.delivery_mechanism.name for x in pool.delivery_mechanisms] - assert sorted( - [ - Representation.EPUB_MEDIA_TYPE - + " (" - + DeliveryMechanism.ADOBE_DRM - + ")", - Representation.TEXT_HTML_MEDIA_TYPE - + " (" - + DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE - + ")", - ] - ) == sorted(names) - - # Check that handle_success was called --> A Work was created and made presentation ready. - assert True == pool.work.presentation_ready - - odilo_coverage.api.log.info("Testing process item finished ok !!") - - def test_process_inactive_item(self, odilo_coverage: OdiloCoverageFixture): - record_metadata, record_metadata_json = odilo_coverage.sample_json( - "odilo_metadata_inactive.json" - ) - odilo_coverage.api.queue_response(200, content=record_metadata_json) - availability, availability_json = odilo_coverage.sample_json( - "odilo_availability_inactive.json" - ) - odilo_coverage.api.queue_response(200, content=availability) - - identifier, made_new = odilo_coverage.provider.process_item("00011135") - - # Check that the Identifier returned has the right .type and .identifier. - assert identifier, "Problem while testing process inactive item !!!" - assert identifier.type == Identifier.ODILO_ID - assert identifier.identifier == "00011135" - - [pool] = identifier.licensed_through - assert ( - "!Tention A Story of Boy-Life during the Peninsular War" == pool.work.title - ) - - # Check work not available - assert 0 == pool.licenses_owned - assert 0 == pool.licenses_available - - assert True == pool.work.presentation_ready - - odilo_coverage.api.log.info("Testing process item inactive finished ok !!") - - -class TestOdiloRepresentationExtractor: - def test_book_info_with_metadata(self, odilo: OdiloFixture): - # Tests that can convert an odilo json block into a Metadata object. - - raw, book_json = odilo.sample_json("odilo_metadata.json") - raw, availability = odilo.sample_json("odilo_availability.json") - metadata, active = OdiloRepresentationExtractor.record_info_to_metadata( - book_json, availability - ) - - assert "Busy Brownies" == metadata.title - assert ( - " (The Classic Fantasy Literature of Elves for Children)" - == metadata.subtitle - ) - assert "eng" == metadata.language - assert Edition.BOOK_MEDIUM == metadata.medium - assert ( - "The Classic Fantasy Literature for Children written in 1896 retold for Elves adventure." - == metadata.series - ) - assert "1" == metadata.series_position - assert "ANBOCO" == metadata.publisher - assert 2013 == metadata.published.year - assert 2 == metadata.published.month - assert 2 == metadata.published.day - assert 2017 == metadata.data_source_last_updated.year - assert 3 == metadata.data_source_last_updated.month - assert 10 == metadata.data_source_last_updated.day - # Related IDs. - assert (Identifier.ODILO_ID, "00010982") == ( - metadata.primary_identifier.type, - metadata.primary_identifier.identifier, - ) - ids = [(x.type, x.identifier) for x in metadata.identifiers] - assert [ - (Identifier.ISBN, "9783736418837"), - (Identifier.ODILO_ID, "00010982"), - ] == sorted(ids) - - subjects = sorted(metadata.subjects, key=lambda x: x.identifier) - weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - assert [ - ("Children", "tag", weight), - ("Classics", "tag", weight), - ("FIC004000", "BISAC", weight), - ("Fantasy", "tag", weight), - ("K-12", "Grade level", weight), - ("LIT009000", "BISAC", weight), - ("YAF019020", "BISAC", weight), - ] == [(x.identifier, x.type, x.weight) for x in subjects] - - [author] = metadata.contributors - assert "Veale, E." == author.sort_name - assert "E. Veale" == author.display_name - assert [Contributor.AUTHOR_ROLE] == author.roles - - # Available formats. - [acsm_epub, ebook_streaming] = sorted( - metadata.circulation.formats, key=lambda x: x.content_type - ) - assert Representation.EPUB_MEDIA_TYPE == acsm_epub.content_type - assert DeliveryMechanism.ADOBE_DRM == acsm_epub.drm_scheme - - assert Representation.TEXT_HTML_MEDIA_TYPE == ebook_streaming.content_type - assert ( - DeliveryMechanism.STREAMING_TEXT_CONTENT_TYPE == ebook_streaming.drm_scheme - ) - - # Links to various resources. - image, thumbnail, description = sorted(metadata.links, key=lambda x: x.rel) - - assert Hyperlink.IMAGE == image.rel - assert ( - "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistUS/pg54159.jpg" - == image.href - ) - - assert Hyperlink.THUMBNAIL_IMAGE == thumbnail.rel - assert ( - "http://pruebasotk.odilotk.es/public/OdiloPlace_eduDistUS/pg54159_225x318.jpg" - == thumbnail.href - ) - - assert Hyperlink.DESCRIPTION == description.rel - assert description.content.startswith( - "All the Brownies had promised to help, and when a Brownie undertakes a thing he works as busily" - ) - - circulation = metadata.circulation - assert 2 == circulation.licenses_owned - assert 1 == circulation.licenses_available - assert 2 == circulation.patrons_in_hold_queue - assert 1 == circulation.licenses_reserved - - odilo.api.log.info("Testing book info with metadata finished ok !!") - - def test_book_info_missing_metadata(self, odilo: OdiloFixture): - # Verify that we properly handle missing metadata from Odilo. - raw, book_json = odilo.sample_json("odilo_metadata.json") - - # This was seen in real data. - book_json["series"] = " " - book_json["seriesPosition"] = " " - - metadata, active = OdiloRepresentationExtractor.record_info_to_metadata( - book_json, {} - ) - assert None == metadata.series - assert None == metadata.series_position - - def test_default_language_spanish(self, odilo: OdiloFixture): - """Since Odilo primarily distributes Spanish-language titles, if a - title comes in with no specified language, we assume it's - Spanish. - """ - raw, book_json = odilo.sample_json("odilo_metadata.json") - raw, availability = odilo.sample_json("odilo_availability.json") - del book_json["language"] - metadata, active = OdiloRepresentationExtractor.record_info_to_metadata( - book_json, availability - ) - assert "spa" == metadata.language diff --git a/tests/fixtures/api_odilo_files.py b/tests/fixtures/api_odilo_files.py deleted file mode 100644 index 62d060b868..0000000000 --- a/tests/fixtures/api_odilo_files.py +++ /dev/null @@ -1,16 +0,0 @@ -import pytest - -from tests.fixtures.files import APIFilesFixture - - -class OdiloFilesFixture(APIFilesFixture): - """A fixture providing access to Odilo files.""" - - def __init__(self): - super().__init__("odilo") - - -@pytest.fixture() -def api_odilo_files_fixture() -> OdiloFilesFixture: - """A fixture providing access to Odilo files.""" - return OdiloFilesFixture() From 349f6909b7774f96f9310b17e11ba455c737e9bb Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Mon, 23 Oct 2023 11:53:30 +0530 Subject: [PATCH 130/262] Switched on OPDS2 capabilities for crawlable feeds (#1475) --- api/controller.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/controller.py b/api/controller.py index 1339297daf..5508547e53 100644 --- a/api/controller.py +++ b/api/controller.py @@ -1077,7 +1077,7 @@ def _crawlable_feed( facets=facets, pagination=pagination, search_engine=search_engine, - ).as_response() + ).as_response(mime_types=flask.request.accept_mimetypes) def _load_search_facets(self, lane): entrypoints = list(flask.request.library.entrypoints) From 13c4b59dd111dd7a855c469e505412cd85d4b42c Mon Sep 17 00:00:00 2001 From: dbernstein Date: Tue, 24 Oct 2023 12:33:53 -0700 Subject: [PATCH 131/262] Add report name to the playtime summary attachment filename. (#1474) --- core/jobs/playtime_entries.py | 12 +++++++++--- tests/core/jobs/test_playtime_entries.py | 4 ++-- 2 files changed, 11 insertions(+), 5 deletions(-) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index 6e5917ed84..c9f72bc87f 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -132,12 +132,18 @@ def do_run(self): Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE, "" ) + # format report name for use in csv attachment filename below + subject_prefix = reporting_name if len(reporting_name) > 0: - reporting_name += ": " + subject_prefix += ": " - email_subject = f"{reporting_name}Playtime Summaries {formatted_start_date} - {formatted_until_date}" + email_subject = f"{subject_prefix}Playtime Summaries {formatted_start_date} - {formatted_until_date}" + reporting_name_with_no_spaces = reporting_name.replace(" ", "_") + "-" attachment_extension = "csv" - attachment_name = f"playtime-summary-{formatted_start_date}-{formatted_until_date}.{attachment_extension}" + attachment_name = ( + f"playtime-summary-{reporting_name_with_no_spaces}" + f"{formatted_start_date}-{formatted_until_date}.{attachment_extension}" + ) # Write to a temporary file so we don't overflow the memory with TemporaryFile( diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index 438293e44c..b7bd95e8ab 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -247,7 +247,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): # collection2 library2 playtime(db.session, identifier, collection2, library2, date3m(3), 300) - reporting_name = "test-cm" + reporting_name = "test cm" # Horrible unbracketted syntax for python 3.8 with patch("core.jobs.playtime_entries.csv.writer") as writer, patch( @@ -297,7 +297,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): receivers=["reporting@test.email"], text="", attachments={ - f"playtime-summary-{cutoff}-{until}.csv": "" + f"playtime-summary-{reporting_name.replace(' ', '_')}-{cutoff}-{until}.csv": "" }, # Mock objects do not write data ) From aa2cfbb1b27ab7cc0e23dadedc770ff9ae839381 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 25 Oct 2023 12:20:03 +0530 Subject: [PATCH 132/262] PP-554 Remove cachedfeeds (#1476) * Removed the CachedFeeds table and supporting columns from Work Removed all mentions and uses of the said attributes and tables * Propagate Lane.max_cache_age down to the feed responses Since the caching is now outsourced to CDNs we should propagate the Lane's max_cache_age down to the CDN itself, so it can be cached correctly per the lane preference * Drop cachedfeedds and cache columns migration * Removed the max_cache_age attribute from facets --- .../20231020_7fceb9488bc6_drop_cachedfeeds.py | 95 +++ api/config.py | 13 - api/controller.py | 58 +- api/lanes.py | 7 - bin/cache_opds_blocks | 11 - bin/cache_opds_lane_facets | 11 - bin/database_reaper | 2 +- core/config.py | 3 - core/facets.py | 3 - core/feed/annotator/verbose.py | 2 - core/feed/opds.py | 3 +- core/lane.py | 128 +-- core/model/__init__.py | 6 +- core/model/cachedfeed.py | 398 +--------- core/model/edition.py | 4 - core/model/library.py | 8 - core/model/work.py | 24 +- core/monitor.py | 12 - core/scripts.py | 18 - docker/services/cron/cron.d/circulation | 1 - scripts.py | 413 +--------- tests/api/test_config.py | 8 - tests/api/test_controller_cm.py | 56 +- tests/api/test_controller_opdsfeed.py | 4 +- tests/api/test_lanes.py | 26 +- tests/api/test_scripts.py | 437 +---------- tests/core/models/test_cachedfeed.py | 731 ------------------ tests/core/models/test_listeners.py | 17 +- tests/core/test_lane.py | 88 +-- tests/core/test_monitor.py | 32 - tests/core/test_scripts.py | 34 - 31 files changed, 140 insertions(+), 2513 deletions(-) create mode 100644 alembic/versions/20231020_7fceb9488bc6_drop_cachedfeeds.py delete mode 100755 bin/cache_opds_blocks delete mode 100755 bin/cache_opds_lane_facets delete mode 100644 tests/core/models/test_cachedfeed.py diff --git a/alembic/versions/20231020_7fceb9488bc6_drop_cachedfeeds.py b/alembic/versions/20231020_7fceb9488bc6_drop_cachedfeeds.py new file mode 100644 index 0000000000..d96f239354 --- /dev/null +++ b/alembic/versions/20231020_7fceb9488bc6_drop_cachedfeeds.py @@ -0,0 +1,95 @@ +"""Drop cachedfeeds + +Revision ID: 7fceb9488bc6 +Revises: 0739d5558dda +Create Date: 2023-10-20 10:55:49.709820+00:00 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "7fceb9488bc6" +down_revision = "0739d5558dda" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_index("ix_cachedfeeds_lane_id", table_name="cachedfeeds") + op.drop_index("ix_cachedfeeds_library_id", table_name="cachedfeeds") + op.drop_index( + "ix_cachedfeeds_library_id_lane_id_type_facets_pagination", + table_name="cachedfeeds", + ) + op.drop_index("ix_cachedfeeds_timestamp", table_name="cachedfeeds") + op.drop_index("ix_cachedfeeds_work_id", table_name="cachedfeeds") + op.drop_table("cachedfeeds") + op.drop_column("works", "simple_opds_entry") + op.drop_column("works", "verbose_opds_entry") + op.drop_column("editions", "simple_opds_entry") + + +def downgrade() -> None: + op.add_column( + "works", + sa.Column( + "verbose_opds_entry", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "works", + sa.Column( + "simple_opds_entry", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "editions", + sa.Column( + "simple_opds_entry", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + op.create_table( + "cachedfeeds", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("lane_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "timestamp", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column("type", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("unique_key", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("facets", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("pagination", sa.VARCHAR(), autoincrement=False, nullable=False), + sa.Column("content", sa.VARCHAR(), autoincrement=False, nullable=True), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("work_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.ForeignKeyConstraint( + ["lane_id"], ["lanes.id"], name="cachedfeeds_lane_id_fkey" + ), + sa.ForeignKeyConstraint( + ["library_id"], ["libraries.id"], name="cachedfeeds_library_id_fkey" + ), + sa.ForeignKeyConstraint( + ["work_id"], ["works.id"], name="cachedfeeds_work_id_fkey" + ), + sa.PrimaryKeyConstraint("id", name="cachedfeeds_pkey"), + ) + op.create_index("ix_cachedfeeds_work_id", "cachedfeeds", ["work_id"], unique=False) + op.create_index( + "ix_cachedfeeds_timestamp", "cachedfeeds", ["timestamp"], unique=False + ) + op.create_index( + "ix_cachedfeeds_library_id_lane_id_type_facets_pagination", + "cachedfeeds", + ["library_id", "lane_id", "type", "facets", "pagination"], + unique=False, + ) + op.create_index( + "ix_cachedfeeds_library_id", "cachedfeeds", ["library_id"], unique=False + ) + op.create_index("ix_cachedfeeds_lane_id", "cachedfeeds", ["lane_id"], unique=False) diff --git a/api/config.py b/api/config.py index bb8a0609da..636fd6cc48 100644 --- a/api/config.py +++ b/api/config.py @@ -16,8 +16,6 @@ class Configuration(CoreConfiguration): - DEFAULT_OPDS_FORMAT = "simple_opds_entry" - # The list of patron web urls allowed to access this CM PATRON_WEB_HOSTNAMES = "patron_web_hostnames" @@ -282,14 +280,3 @@ def cipher(cls, key: bytes) -> PKCS1OAEP_Cipher: encrypt() (public key) or decrypt() (private key). """ return PKCS1_OAEP.new(RSA.import_key(key)) - - -# We changed Configuration.DEFAULT_OPDS_FORMAT, but the Configuration -# class from core still has the old value. Change that one to match, -# so that core code that checks this constant will get the right -# value. -# -# TODO: We should come up with a better solution for this, probably -# involving a registry of Configuration objects that returns the -# appropriate one in any situation. This is a source of subtle bugs. -CoreConfiguration.DEFAULT_OPDS_FORMAT = Configuration.DEFAULT_OPDS_FORMAT diff --git a/api/controller.py b/api/controller.py index 5508547e53..0c4d78b0e8 100644 --- a/api/controller.py +++ b/api/controller.py @@ -68,21 +68,11 @@ ) from core.feed.navigation import NavigationFeed from core.feed.opds import NavigationFacets -from core.lane import ( - BaseFacets, - Facets, - FeaturedFacets, - Lane, - Pagination, - SearchFacets, - WorkList, -) +from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList from core.marc import MARCExporter from core.metadata_layer import ContributorData from core.model import ( - Admin, Annotation, - CachedFeed, CirculationEvent, Collection, ConfigurationSetting, @@ -249,30 +239,6 @@ def load_facets_from_request(self, *args, **kwargs): ): return NO_SUCH_LANE.detailed(_("Lane does not exist")) - if ( - isinstance(facets, BaseFacets) - and getattr(facets, "max_cache_age", None) is not None - ): - # A faceting object was loaded, and it tried to do something nonstandard - # with caching. - - # Try to get the AdminSignInController, which is - # associated with the CirculationManager object by the - # admin interface in admin/controller. - # - # If the admin interface wasn't initialized for whatever - # reason, we'll default to assuming the user is not an - # authenticated admin. - authenticated = False - controller = getattr(self, "admin_sign_in_controller", None) - if controller: - admin = controller.authenticated_admin_from_request() - # If authenticated_admin_from_request returns anything other than an admin (probably - # a ProblemDetail), the user is not an authenticated admin. - if isinstance(admin, Admin): - authenticated = True - if not authenticated: - facets.max_cache_age = None return facets def reload_settings_if_changed(self): @@ -948,7 +914,7 @@ def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): search_engine=search_engine, ) return feed.as_response( - max_age=int(max_age) if max_age else None, + max_age=int(max_age) if max_age else lane.max_cache_age(), mime_types=flask.request.accept_mimetypes, ) @@ -984,7 +950,7 @@ def navigation(self, lane_identifier): worklist=lane, annotator=annotator, facets=facets, - ).as_response() + ).as_response(max_age=lane.max_cache_age()) def crawlable_library_feed(self): """Build or retrieve a crawlable acquisition feed for the @@ -1077,7 +1043,9 @@ def _crawlable_feed( facets=facets, pagination=pagination, search_engine=search_engine, - ).as_response(mime_types=flask.request.accept_mimetypes) + ).as_response( + mime_types=flask.request.accept_mimetypes, max_age=worklist.max_cache_age() + ) def _load_search_facets(self, lane): entrypoints = list(flask.request.library.entrypoints) @@ -1163,7 +1131,9 @@ def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): ) if isinstance(response, ProblemDetail): return response - return response.as_response(mime_types=flask.request.accept_mimetypes) + return response.as_response( + mime_types=flask.request.accept_mimetypes, max_age=lane.max_cache_age() + ) def _qa_feed( self, feed_factory, feed_title, controller_name, facet_class, worklist_factory @@ -1215,7 +1185,7 @@ def _qa_feed( annotator=annotator, search_engine=search_engine, facets=facets, - max_age=CachedFeed.IGNORE_CACHE, + max_age=0, ) def qa_feed(self, feed_class=OPDSAcquisitionFeed): @@ -2024,7 +1994,7 @@ def contributor( pagination=pagination, annotator=annotator, search_engine=search_engine, - ).as_response() + ).as_response(max_age=lane.max_cache_age()) def permalink(self, identifier_type, identifier): """Serve an entry for a single book. @@ -2121,7 +2091,7 @@ def related( pagination=None, facets=facets, search_engine=search_engine, - ).as_response() + ).as_response(max_age=lane.max_cache_age()) def recommendations( self, @@ -2180,7 +2150,7 @@ def recommendations( pagination=pagination, annotator=annotator, search_engine=search_engine, - ).as_response() + ).as_response(max_age=lane.max_cache_age()) def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): """Serve a feed of books in a given series.""" @@ -2219,7 +2189,7 @@ def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFe pagination=pagination, annotator=annotator, search_engine=search_engine, - ).as_response() + ).as_response(max_age=lane.max_cache_age()) class ProfileController(CirculationManagerController): diff --git a/api/lanes.py b/api/lanes.py index 41a0e32c54..2827469049 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -14,7 +14,6 @@ WorkList, ) from core.model import ( - CachedFeed, Contributor, DataSource, Edition, @@ -1037,7 +1036,6 @@ class RecommendationLane(WorkBasedLane): # Cache for 24 hours -- would ideally be much longer but availability # information goes stale. MAX_CACHE_AGE = 24 * 60 * 60 - CACHED_FEED_TYPE = CachedFeed.RECOMMENDATIONS_TYPE def __init__( self, library, work, display_name=None, novelist_api=None, parent=None @@ -1124,7 +1122,6 @@ class SeriesLane(DynamicLane): # Cache for 24 hours -- would ideally be longer but availability # information goes stale. MAX_CACHE_AGE = 24 * 60 * 60 - CACHED_FEED_TYPE = CachedFeed.SERIES_TYPE def __init__(self, library, series_name, parent=None, **kwargs): if not series_name: @@ -1185,7 +1182,6 @@ class ContributorLane(DynamicLane): # Cache for 24 hours -- would ideally be longer but availability # information goes stale. MAX_CACHE_AGE = 24 * 60 * 60 - CACHED_FEED_TYPE = CachedFeed.CONTRIBUTOR_TYPE def __init__( self, library, contributor, parent=None, languages=None, audiences=None @@ -1250,7 +1246,6 @@ class RelatedBooksLane(WorkBasedLane): service. """ - CACHED_FEED_TYPE = CachedFeed.RELATED_TYPE DISPLAY_NAME = "Related Books" ROUTE = "related_books" @@ -1354,8 +1349,6 @@ def _recommendation_sublane(self, _db, novelist_api): class CrawlableFacets(Facets): """A special Facets class for crawlable feeds.""" - CACHED_FEED_TYPE = CachedFeed.CRAWLABLE_TYPE - # These facet settings are definitive of a crawlable feed. # Library configuration settings don't matter. SETTINGS = { diff --git a/bin/cache_opds_blocks b/bin/cache_opds_blocks deleted file mode 100755 index 6de4efda6a..0000000000 --- a/bin/cache_opds_blocks +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -"""Refresh the top-level OPDS groups.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) -from scripts import CacheOPDSGroupFeedPerLane - -CacheOPDSGroupFeedPerLane().run() diff --git a/bin/cache_opds_lane_facets b/bin/cache_opds_lane_facets deleted file mode 100755 index 2f1819a5f1..0000000000 --- a/bin/cache_opds_lane_facets +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python -"""Refresh the OPDS lane facets.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) -from scripts import CacheFacetListsPerLane - -CacheFacetListsPerLane().run() diff --git a/bin/database_reaper b/bin/database_reaper index 678a1576fe..d3aba0d44d 100755 --- a/bin/database_reaper +++ b/bin/database_reaper @@ -1,5 +1,5 @@ #!/usr/bin/env python -"""Remove miscellaneous expired things (Credentials, CachedFeeds, Loans, etc.) +"""Remove miscellaneous expired things (Credentials, Loans, etc.) from the database. """ import os diff --git a/core/config.py b/core/config.py index 2ae72cfd6f..e728877b51 100644 --- a/core/config.py +++ b/core/config.py @@ -73,9 +73,6 @@ class Configuration(ConfigurationConstants): # Configuration key for push notifications status PUSH_NOTIFICATIONS_STATUS = "push_notifications_status" - # Lane policies - DEFAULT_OPDS_FORMAT = "verbose_opds_entry" - # Integrations URL = "url" INTEGRATIONS = "integrations" diff --git a/core/facets.py b/core/facets.py index 3dd7600879..692967fca2 100644 --- a/core/facets.py +++ b/core/facets.py @@ -8,9 +8,6 @@ class FacetConstants: ENTRY_POINT_REL = "http://librarysimplified.org/terms/rel/entrypoint" ENTRY_POINT_FACET_GROUP_NAME = "entrypoint" - # Query arguments can change how long a feed is to be cached. - MAX_CACHE_AGE_NAME = "max_age" - # Subset the collection, roughly, by quality. COLLECTION_FACET_GROUP_NAME = "collection" COLLECTION_FULL = "full" diff --git a/core/feed/annotator/verbose.py b/core/feed/annotator/verbose.py index 965f36aa01..eabcb870a2 100644 --- a/core/feed/annotator/verbose.py +++ b/core/feed/annotator/verbose.py @@ -22,8 +22,6 @@ class VerboseAnnotator(Annotator): in great detail. """ - opds_cache_field = Work.verbose_opds_entry.name - def annotate_work_entry( self, entry: WorkEntry, updated: Optional[datetime] = None ) -> None: diff --git a/core/feed/opds.py b/core/feed/opds.py index 3943bce58a..b33e1a5447 100644 --- a/core/feed/opds.py +++ b/core/feed/opds.py @@ -11,7 +11,6 @@ from core.feed.serializer.opds2 import OPDS2Serializer from core.feed.types import FeedData, WorkEntry from core.lane import FeaturedFacets -from core.model.cachedfeed import CachedFeed from core.util.flask_util import OPDSEntryResponse, OPDSFeedResponse from core.util.opds_writer import OPDSMessage @@ -106,4 +105,4 @@ class UnfulfillableWork(Exception): class NavigationFacets(FeaturedFacets): - CACHED_FEED_TYPE = CachedFeed.NAVIGATION_TYPE + pass diff --git a/core/lane.py b/core/lane.py index 740a5ef105..59c0f9c651 100644 --- a/core/lane.py +++ b/core/lane.py @@ -29,7 +29,6 @@ aliased, backref, contains_eager, - defer, joinedload, query, relationship, @@ -43,7 +42,6 @@ from core.facets import FacetConstants from core.model import ( Base, - CachedFeed, Collection, CustomList, CustomListEntry, @@ -84,36 +82,14 @@ class BaseFacets(FacetConstants): This is intended solely for use as a base class. """ - # If the use of a certain faceting object has implications for the - # type of feed (the way FeaturedFacets always implies a 'groups' feed), - # set the type of feed here. This will override any CACHED_FEED_TYPE - # associated with the WorkList. - CACHED_FEED_TYPE: Optional[str] = None - - # By default, faceting objects have no opinion on how long the feeds - # generated using them should be cached. - max_cache_age = None - def items(self): """Yields a 2-tuple for every active facet setting. These tuples are used to generate URLs that can identify - specific facet settings, and to distinguish between CachedFeed - objects that represent the same feed with different facet - settings. + specific facet settings. """ return [] - @property - def cached(self): - """This faceting object's opinion on whether feeds should be cached. - - :return: A boolean, or None for 'no opinion'. - """ - if self.max_cache_age is None: - return None - return self.max_cache_age != 0 - @property def query_string(self): """A query string fragment that propagates all active facet @@ -166,24 +142,18 @@ class FacetsWithEntryPoint(BaseFacets): selected EntryPoint. """ - def __init__( - self, entrypoint=None, entrypoint_is_default=False, max_cache_age=None, **kwargs - ): + def __init__(self, entrypoint=None, entrypoint_is_default=False, **kwargs): """Constructor. :param entrypoint: An EntryPoint (optional). :param entrypoint_is_default: If this is True, then `entrypoint` is a default value and was not determined by a user's explicit choice. - :param max_cache_age: Any feeds generated by this faceting object - will be cached for this amount of time. The default is to have - no opinion and let the Worklist manage this. :param kwargs: Other arguments may be supplied based on user input, but the default implementation is to ignore them. """ self.entrypoint = entrypoint self.entrypoint_is_default = entrypoint_is_default - self.max_cache_age = max_cache_age self.constructor_kwargs = kwargs @classmethod @@ -208,7 +178,6 @@ def navigate(self, entrypoint): return self.__class__( entrypoint=entrypoint, entrypoint_is_default=False, - max_cache_age=self.max_cache_age, **self.constructor_kwargs, ) @@ -284,15 +253,9 @@ def _from_request( return entrypoint entrypoint, is_default = entrypoint - max_cache_age = get_argument(Facets.MAX_CACHE_AGE_NAME, None) - max_cache_age = cls.load_max_cache_age(max_cache_age) - if isinstance(max_cache_age, ProblemDetail): - return max_cache_age - return cls( entrypoint=entrypoint, entrypoint_is_default=is_default, - max_cache_age=max_cache_age, **extra_kwargs, ) @@ -322,48 +285,13 @@ def load_entrypoint(cls, name, valid_entrypoints, default=None): return default, True return ep, False - @classmethod - def load_max_cache_age(cls, value): - """Convert a value for the MAX_CACHE_AGE_NAME parameter to a value - that CachedFeed will understand. - - :param value: A string. - :return: For now, either CachedFeed.IGNORE_CACHE or None. - """ - if value is None: - return value - - try: - value = int(value) - except ValueError as e: - value = None - - # At the moment, the only acceptable value that can be set - # through the web is zero -- i.e. don't use the cache at - # all. We can't give web clients fine-grained control over - # the internal workings of our cache; the most we can do - # is give them the opportunity to opt out. - # - # Thus, any nonzero value will be ignored. - if value == 0: - value = CachedFeed.IGNORE_CACHE - else: - value = None - return value - def items(self): """Yields a 2-tuple for every active facet setting. - In this class that just means the entrypoint and any max_cache_age. + In this class that just means the entrypoint. """ if self.entrypoint: yield (self.ENTRY_POINT_FACET_GROUP_NAME, self.entrypoint.INTERNAL_NAME) - if self.max_cache_age not in (None, CachedFeed.CACHE_FOREVER): - if self.max_cache_age == CachedFeed.IGNORE_CACHE: - value = 0 - else: - value = self.max_cache_age - yield (self.MAX_CACHE_AGE_NAME, str(value)) def modify_search_filter(self, filter): """Modify the given external_search.Filter object @@ -641,7 +569,6 @@ def navigate( enabled_facets=self.facets_enabled_at_init, entrypoint=(entrypoint or self.entrypoint), entrypoint_is_default=False, - max_cache_age=self.max_cache_age, ) def items(self): @@ -973,9 +900,6 @@ class FeaturedFacets(FacetsWithEntryPoint): AcquisitionFeed.groups(). """ - # This Facets class is used exclusively for grouped feeds. - CACHED_FEED_TYPE = CachedFeed.GROUPS_TYPE - def __init__( self, minimum_featured_quality, entrypoint=None, random_seed=None, **kwargs ): @@ -1012,9 +936,7 @@ def navigate(self, minimum_featured_quality=None, entrypoint=None): minimum_featured_quality or self.minimum_featured_quality ) entrypoint = entrypoint or self.entrypoint - return self.__class__( - minimum_featured_quality, entrypoint, max_cache_age=self.max_cache_age - ) + return self.__class__(minimum_featured_quality, entrypoint) def modify_search_filter(self, filter): super().modify_search_filter(filter) @@ -1413,10 +1335,8 @@ class WorkList: def visible(self) -> bool: return True - def max_cache_age(self, type): - """Determine how long a feed for this WorkList should be cached - internally. - """ + def max_cache_age(self): + """Determine how long a feed for this WorkList should be cached.""" return self.MAX_CACHE_AGE @classmethod @@ -2426,7 +2346,6 @@ def base_query(cls, _db): # Apply optimizations. qu = cls._modify_loading(qu) - qu = cls._defer_unused_fields(qu) return qu @classmethod @@ -2475,18 +2394,6 @@ def only_show_ready_deliverable_works(self, _db, query, show_suppressed=False): query, show_suppressed=show_suppressed, collection_ids=self.collection_ids ) - @classmethod - def _defer_unused_fields(cls, query): - """Some applications use the simple OPDS entry and some - applications use the verbose. Whichever one we don't need, - we can stop from even being sent over from the - database. - """ - if Configuration.DEFAULT_OPDS_FORMAT == "simple_opds_entry": - return query.options(defer(Work.verbose_opds_entry)) - else: - return query.options(defer(Work.simple_opds_entry)) - def bibliographic_filter_clauses(self, _db, qu): """Create a SQLAlchemy filter that excludes books whose bibliographic metadata doesn't match what we're looking for. @@ -2813,13 +2720,6 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # admin interface can see all the lanes, visible or not. _visible = Column("visible", Boolean, default=True, nullable=False) - # A Lane may have many CachedFeeds. - cachedfeeds: Mapped[List[CachedFeed]] = relationship( - "CachedFeed", - backref="lane", - cascade="all, delete-orphan", - ) - # A Lane may have many CachedMARCFiles. cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( "CachedMARCFile", @@ -3032,22 +2932,6 @@ def uses_customlists(self): return True return False - def max_cache_age(self, type): - """Determine how long a feed for this WorkList should be cached - internally. - - :param type: The type of feed. - """ - if type == CachedFeed.GROUPS_TYPE: - # Generating grouped feeds on the fly for Lanes is not incredibly - # expensive, but it's slow enough that we prefer to regenerate - # them in the background (using force_refresh=True) rather - # than while someone is waiting for an HTTP response. - return CachedFeed.CACHE_FOREVER - - # Other than that, we have no opinion -- use the default. - return super().max_cache_age(type) - def update_size(self, _db, search_engine=None): """Update the stored estimate of the number of Works in this Lane.""" library = self.get_library(_db) diff --git a/core/model/__init__.py b/core/model/__init__.py index 8627923774..c05aaf8db7 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -515,11 +515,7 @@ def _bulk_operation(self): SAMLFederation, ) from core.model.admin import Admin, AdminRole -from core.model.cachedfeed import ( - CachedFeed, - CachedMARCFile, - WillNotGenerateExpensiveFeed, -) +from core.model.cachedfeed import CachedMARCFile from core.model.circulationevent import CirculationEvent from core.model.classification import Classification, Genre, Subject from core.model.collection import ( diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index cc0ac2093f..a6603caa8f 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -1,406 +1,16 @@ -# CachedFeed, WillNotGenerateExpensiveFeed +# Cached Marc Files from __future__ import annotations -import datetime -import logging -from collections import namedtuple -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING -from sqlalchemy import Column, DateTime, ForeignKey, Index, Integer, Unicode +from sqlalchemy import Column, DateTime, ForeignKey, Integer from sqlalchemy.orm import Mapped, relationship -from sqlalchemy.sql.expression import and_ -from core.model import Base, flush, get_one, get_one_or_create -from core.model.work import Work -from core.util.datetime_helpers import utc_now -from core.util.flask_util import OPDSFeedResponse +from core.model import Base if TYPE_CHECKING: from core.model import Representation -# This named tuple makes it easy to manage the return value of -# CachedFeed._prepare_keys. -CachedFeedKeys = namedtuple( - "CachedFeedKeys", - [ - "feed_type", - "library", - "work", - "lane_id", - "unique_key", - "facets_key", - "pagination_key", - ], -) - - -class CachedFeed(Base): - __tablename__ = "cachedfeeds" - id = Column(Integer, primary_key=True) - - # Every feed is associated with a lane. If null, this is a feed - # for a WorkList. If work_id is also null, it's a feed for the - # top-level. - lane_id = Column(Integer, ForeignKey("lanes.id"), nullable=True, index=True) - - # Every feed has a timestamp reflecting when it was created. - timestamp = Column(DateTime(timezone=True), nullable=True, index=True) - - # A feed is of a certain type--such as 'page' or 'groups'. - type = Column(Unicode, nullable=False) - - # A feed associated with a WorkList can have a unique key. - # This should be null if the feed is associated with a Lane. - unique_key = Column(Unicode, nullable=True) - - # A 'page' feed is associated with a set of values for the facet - # groups. - facets = Column(Unicode, nullable=True) - - # A 'page' feed is associated with a set of values for pagination. - pagination = Column(Unicode, nullable=False) - - # The content of the feed. - content = Column(Unicode, nullable=True) - - # Every feed is associated with a Library. - library_id = Column(Integer, ForeignKey("libraries.id"), index=True) - - # A feed may be associated with a Work. - work_id = Column(Integer, ForeignKey("works.id"), nullable=True, index=True) - work: Mapped[Optional[Work]] = relationship("Work", back_populates="cached_feeds") - - # Distinct types of feeds that might be cached. - GROUPS_TYPE = "groups" - PAGE_TYPE = "page" - NAVIGATION_TYPE = "navigation" - CRAWLABLE_TYPE = "crawlable" - RELATED_TYPE = "related" - RECOMMENDATIONS_TYPE = "recommendations" - SERIES_TYPE = "series" - CONTRIBUTOR_TYPE = "contributor" - - # Special constants for cache durations. - CACHE_FOREVER = object() - IGNORE_CACHE = object() - - log = logging.getLogger("CachedFeed") - - @classmethod - def fetch( - cls, - _db, - worklist, - facets, - pagination, - refresher_method, - max_age=None, - raw=False, - **response_kwargs, - ): - """Retrieve a cached feed from the database if possible. - - Generate it from scratch and store it in the database if - necessary. - - Return it in the most useful form to the caller. - - :param _db: A database connection. - :param worklist: The WorkList associated with this feed. - :param facets: A Facets object that distinguishes this feed from - others (for instance, by its sort order). - :param pagination: A Pagination object that explains which - page of a larger feed is being cached. - :param refresher_method: A function to call if it turns out - the contents of the feed need to be regenerated. This - function must take no arguments and return an object that - implements __unicode__. (A Unicode string or an OPDSFeed is fine.) - :param max_age: If a cached feed is older than this, it will - be considered stale and regenerated. This may be either a - number of seconds or a timedelta. If no value is - specified, a default value will be calculated based on - WorkList and Facets configuration. Setting this value to - zero will force a refresh. - :param raw: If this is False (the default), a Response ready to be - converted into a Flask Response object will be returned. If this - is True, the CachedFeed object itself will be returned. In most - non-test situations the default is better. - - :return: A Response or CachedFeed containing up-to-date content. - """ - - # Gather the information necessary to uniquely identify this - # page of this feed. - keys = cls._prepare_keys(_db, worklist, facets, pagination) - - # Calculate the maximum cache age, converting from timedelta - # to seconds if necessary. - max_age = cls.max_cache_age(worklist, keys.feed_type, facets, max_age) - - # These arguments will probably be passed into get_one, and - # will be passed into get_one_or_create in the event of a cache - # miss. - - # TODO: this constraint_clause might not be necessary anymore. - # ISTR it was an attempt to avoid race conditions, and we do a - # better job of that now. - constraint_clause = and_(cls.content != None, cls.timestamp != None) - kwargs = dict( - on_multiple="interchangeable", - constraint=constraint_clause, - type=keys.feed_type, - library=keys.library, - work=keys.work, - lane_id=keys.lane_id, - unique_key=keys.unique_key, - facets=keys.facets_key, - pagination=keys.pagination_key, - ) - feed_data = None - if max_age is cls.IGNORE_CACHE or isinstance(max_age, int) and max_age <= 0: - # Don't even bother checking for a CachedFeed: we're - # just going to replace it. - feed_obj = None - else: - feed_obj = get_one(_db, cls, **kwargs) - - should_refresh = cls._should_refresh(feed_obj, max_age) - if should_refresh: - # This is a cache miss. Either feed_obj is None or - # it's no good. We need to generate a new feed. - feed_data = str(refresher_method()) - generation_time = utc_now() - - if max_age is not cls.IGNORE_CACHE: - # Having gone through all the trouble of generating - # the feed, we want to cache it in the database. - - # Since it can take a while to generate a feed, and we know - # that the feed in the database is stale, it's possible that - # another thread _also_ noticed that feed was stale, and - # generated a similar feed while we were working. - # - # To avoid a database error, fetch the feed _again_ from the - # database rather than assuming we have the up-to-date - # object. - feed_obj, is_new = get_one_or_create(_db, cls, **kwargs) - if feed_obj.timestamp is None or feed_obj.timestamp < generation_time: - # Either there was no contention for this object, or there - # was contention but our feed is more up-to-date than - # the other thread(s). Our feed takes priority. - feed_obj.content = feed_data - feed_obj.timestamp = generation_time - elif feed_obj: - feed_data = feed_obj.content - - if raw and feed_obj: - return feed_obj - - # We have the information necessary to create a useful - # response-type object. - # - # Set some defaults in case the caller didn't pass them in. - if isinstance(max_age, int): - response_kwargs.setdefault("max_age", max_age) - - if max_age == cls.IGNORE_CACHE: - # If we were asked to ignore our internal cache, we should - # also tell the client not to store this document in _its_ - # internal cache. - response_kwargs["max_age"] = 0 - - if keys.library and keys.library.has_root_lanes: - # If this feed is associated with a Library that guides - # patrons to different lanes based on their patron type, - # all CachedFeeds need to be treated as private (but - # cacheable) on the client side. Otherwise, a change of - # client credentials might cause a cached representation - # to be reused when it should have been discarded. - # - # TODO: it might be possible to make this decision in a - # more fine-grained way, which would allow intermediaries - # to cache these feeds. - response_kwargs["private"] = True - - return OPDSFeedResponse(response=feed_data, **response_kwargs) - - @classmethod - def feed_type(cls, worklist, facets): - """Determine the 'type' of the feed. - - This may be defined either by `worklist` or by `facets`, with - `facets` taking priority. - - :return: A string that can go into cachedfeeds.type. - """ - type = CachedFeed.PAGE_TYPE - if worklist: - type = worklist.CACHED_FEED_TYPE or type - if facets: - type = facets.CACHED_FEED_TYPE or type - return type - - @classmethod - def max_cache_age(cls, worklist, type, facets, override=None): - """Determine the number of seconds that a cached feed - of a given type can remain fresh. - - Order of precedence: `override`, `facets`, `worklist`. - - :param worklist: A WorkList which may have an opinion on this - topic. - :param type: The type of feed being generated. - :param facets: A faceting object that may have an opinion on this - topic. - :param override: A specific value passed in by the caller. This - may either be a number of seconds or a timedelta. - - :return: A number of seconds, or CACHE_FOREVER or IGNORE_CACHE - """ - value = override - if value is None and facets is not None: - value = facets.max_cache_age - if value is None and worklist is not None: - value = worklist.max_cache_age(type) - - if value in (cls.CACHE_FOREVER, cls.IGNORE_CACHE): - # Special caching rules apply. - return value - - if value is None: - # Assume the feed should not be cached at all. - value = 0 - - if isinstance(value, datetime.timedelta): - value = value.total_seconds() - return value - - @classmethod - def _should_refresh(cls, feed_obj, max_age): - """Should we try to get a new representation of this CachedFeed? - - :param feed_obj: A CachedFeed. This may be None, which is why - this is a class method. - - :param max_age: Either a number of seconds, or one of the constants - CACHE_FOREVER or IGNORE_CACHE. - """ - should_refresh = False - if feed_obj is None: - # If we didn't find a CachedFeed (maybe because we didn't - # bother looking), we must always refresh. - should_refresh = True - elif max_age == cls.IGNORE_CACHE: - # If we are ignoring the cache, we must always refresh. - should_refresh = True - elif max_age == cls.CACHE_FOREVER: - # If we found *anything*, and the cache time is CACHE_FOREVER, - # we will never refresh. - should_refresh = False - elif ( - feed_obj.timestamp - and feed_obj.timestamp + datetime.timedelta(seconds=max_age) <= utc_now() - ): - # Here it comes down to a date comparison: how old is the - # CachedFeed? - should_refresh = True - return should_refresh - - @classmethod - def _prepare_keys(cls, _db, worklist, facets, pagination): - """Prepare various unique keys that will go into the database - and be used to distinguish CachedFeeds from one another. - - This is kept in a helper method for ease of testing. - - :param worklist: A WorkList. - :param facets: A Facets object. - :param pagination: A Pagination object. - - :return: A CachedFeedKeys object. - """ - if not worklist: - raise ValueError("Cannot prepare a CachedFeed without a WorkList.") - - feed_type = cls.feed_type(worklist, facets) - - # The Library is the one associated with `worklist`. - library = worklist.get_library(_db) - - # A feed may be associated with a specific Work, - # e.g. recommendations for readers of that Work. - work = getattr(worklist, "work", None) - - # Either lane_id or unique_key must be set, but not both. - from core.lane import Lane - - if isinstance(worklist, Lane): - lane_id = worklist.id - unique_key = None - else: - lane_id = None - unique_key = worklist.unique_key - - facets_key = "" - if facets is not None: - if isinstance(facets.query_string, bytes): - facets_key = facets.query_string.decode("utf-8") - else: - facets_key = facets.query_string - - pagination_key = "" - if pagination is not None: - if isinstance(pagination.query_string, bytes): - pagination_key = pagination.query_string.decode("utf-8") - else: - pagination_key = pagination.query_string - - return CachedFeedKeys( - feed_type=feed_type, - library=library, - work=work, - lane_id=lane_id, - unique_key=unique_key, - facets_key=facets_key, - pagination_key=pagination_key, - ) - - def update(self, _db, content): - self.content = content - self.timestamp = utc_now() - flush(_db) - - def __repr__(self): - if self.content: - length = len(self.content) - else: - length = "No content" - return "".format( - self.id, - self.lane_id, - self.type, - self.facets, - self.pagination, - self.timestamp, - length, - ) - - -Index( - "ix_cachedfeeds_library_id_lane_id_type_facets_pagination", - CachedFeed.library_id, - CachedFeed.lane_id, - CachedFeed.type, - CachedFeed.facets, - CachedFeed.pagination, -) - - -class WillNotGenerateExpensiveFeed(Exception): - """This exception is raised when a feed is not cached, but it's too - expensive to generate. - """ - class CachedMARCFile(Base): """A record that a MARC file has been created and cached for a particular lane.""" diff --git a/core/model/edition.py b/core/model/edition.py index 6d6307cc5c..59c395d46b 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -136,10 +136,6 @@ class Edition(Base, EditionConstants): cover_full_url = Column(Unicode) cover_thumbnail_url = Column(Unicode) - # An OPDS entry containing all metadata about this entry that - # would be relevant to display to a library patron. - simple_opds_entry = Column(Unicode, default=None) - # Information kept in here probably won't be used. extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) diff --git a/core/model/library.py b/core/model/library.py index e56c9eb9d1..9b9e563f2b 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -49,7 +49,6 @@ from core.lane import Lane from core.model import ( # noqa: autoflake AdminRole, - CachedFeed, CachedMARCFile, CirculationEvent, Collection, @@ -109,13 +108,6 @@ class Library(Base, HasSessionCache): "AdminRole", back_populates="library", cascade="all, delete-orphan" ) - # A Library may have many CachedFeeds. - cachedfeeds: Mapped[List[CachedFeed]] = relationship( - "CachedFeed", - backref="library", - cascade="all, delete-orphan", - ) - # A Library may have many CachedMARCFiles. cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( "CachedMARCFile", diff --git a/core/model/work.py b/core/model/work.py index c226c90265..8ce42a3964 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -59,12 +59,7 @@ # Import related models when doing type checking if TYPE_CHECKING: - from core.model import ( # noqa: autoflake - CachedFeed, - CustomListEntry, - Library, - LicensePool, - ) + from core.model import CustomListEntry, Library, LicensePool class WorkGenre(Base): @@ -148,12 +143,6 @@ class Work(Base): "CustomListEntry", backref="work" ) - # One Work may have multiple CachedFeeds, and if a CachedFeed - # loses its Work, it ceases to exist. - cached_feeds: Mapped[List[CachedFeed]] = relationship( - "CachedFeed", back_populates="work", cascade="all, delete-orphan" - ) - # One Work may participate in many WorkGenre assignments. genres = association_proxy("work_genres", "genre", creator=WorkGenre.from_genre) work_genres: Mapped[List[WorkGenre]] = relationship( @@ -220,15 +209,6 @@ class Work(Base): # will be made to make the Work presentation ready. presentation_ready_exception = Column(Unicode, default=None, index=True) - # A precalculated OPDS entry containing all metadata about this - # work that would be relevant to display to a library patron. - simple_opds_entry = Column(Unicode, default=None) - - # A precalculated OPDS entry containing all metadata about this - # work that would be relevant to display in a machine-to-machine - # integration context. - verbose_opds_entry = Column(Unicode, default=None) - # A precalculated MARC record containing metadata about this # work that would be relevant to display in a library's public # catalog. @@ -237,8 +217,6 @@ class Work(Base): # These fields are potentially large and can be deferred if you # don't need all the data in a Work. LARGE_FIELDS = [ - "simple_opds_entry", - "verbose_opds_entry", "marc_record", "summary_text", ] diff --git a/core/monitor.py b/core/monitor.py index 349c85c9fe..4f664757c9 100644 --- a/core/monitor.py +++ b/core/monitor.py @@ -12,7 +12,6 @@ from core.metadata_layer import TimestampData from core.model import ( Base, - CachedFeed, CirculationEvent, Collection, CollectionMissing, @@ -889,17 +888,6 @@ def query(self): # ReaperMonitors that do something specific. -class CachedFeedReaper(ReaperMonitor): - """Removed cached feeds older than thirty days.""" - - MODEL_CLASS = CachedFeed - TIMESTAMP_FIELD = "timestamp" - MAX_AGE = 30 - - -ReaperMonitor.REGISTRY.append(CachedFeedReaper) - - class CredentialReaper(ReaperMonitor): """Remove Credentials that expired more than a day ago.""" diff --git a/core/scripts.py b/core/scripts.py index 0c0c56a44b..da886168b7 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -27,7 +27,6 @@ from core.metadata_layer import TimestampData from core.model import ( BaseCoverageRecord, - CachedFeed, Collection, ConfigurationSetting, Contributor, @@ -1774,8 +1773,6 @@ def _optimized_query(self): ) .options( defer(Work.summary_text), - defer(Work.simple_opds_entry), - defer(Work.verbose_opds_entry), ) ) @@ -2495,7 +2492,6 @@ def run(self, cmd_args=None): self.out("\n") else: self.out("There are no libraries in the system -- that's a problem.") - self.delete_cached_feeds() self.out("\n") collections = parsed.collections or self._db.query(Collection) for collection in collections: @@ -2519,20 +2515,6 @@ def check_library(self, library): else: self.out(" Associated with %s lanes.", len(library.lanes)) - def delete_cached_feeds(self): - page_feeds = self._db.query(CachedFeed).filter( - CachedFeed.type != CachedFeed.GROUPS_TYPE - ) - page_feeds_count = page_feeds.count() - self.out( - "%d feeds in cachedfeeds table, not counting grouped feeds.", - page_feeds_count, - ) - if page_feeds_count: - self.out(" Deleting them all.") - page_feeds.delete() - self._db.commit() - def explain_collection(self, collection): self.out('Examining collection "%s"', collection.name) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 0f3a9ddf86..cea2902683 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -11,7 +11,6 @@ HOME=/var/www/circulation # These scripts update internal caches. # -*/30 * * * * root core/bin/run cache_opds_blocks >> /var/log/cron.log 2>&1 */30 * * * * root core/bin/run -d 15 search_index_refresh >> /var/log/cron.log 2>&1 10 0 * * * root core/bin/run search_index_clear >> /var/log/cron.log 2>&1 0 0 * * * root core/bin/run update_custom_list_size >> /var/log/cron.log 2>&1 diff --git a/scripts.py b/scripts.py index 4636a370b1..1fa5e82576 100644 --- a/scripts.py +++ b/scripts.py @@ -18,7 +18,6 @@ from api.axis import Axis360BibliographicCoverageProvider from api.bibliotheca import BibliothecaCirculationSweep from api.config import CannotLoadConfiguration, Configuration -from api.controller import CirculationManager from api.lanes import create_default_lanes from api.local_analytics_exporter import LocalAnalyticsExporter from api.marc import LibraryAnnotator as MARCLibraryAnnotator @@ -30,10 +29,8 @@ OPDSForDistributorsReaperMonitor, ) from api.overdrive import OverdriveAPI -from core.entrypoint import EntryPoint from core.external_search import ExternalSearchIndex -from core.feed.acquisition import OPDSAcquisitionFeed -from core.lane import Facets, FeaturedFacets, Lane, Pagination +from core.lane import Lane from core.marc import MARCExporter from core.model import ( LOCK_ID_DB_INIT, @@ -46,7 +43,6 @@ ExternalIntegration, Hold, Identifier, - Library, LicensePool, Loan, Patron, @@ -66,7 +62,6 @@ from core.service.container import container_instance from core.util import LanguageCodes from core.util.datetime_helpers import utc_now -from core.util.opds_writer import OPDSFeed class Script(CoreScript): @@ -149,412 +144,6 @@ def q(self): ) -class CacheRepresentationPerLane(TimestampScript, LaneSweeperScript): - name = "Cache one representation per lane" - - @classmethod - def arg_parser(cls, _db): - parser = LaneSweeperScript.arg_parser(_db) - parser.add_argument( - "--language", - help="Process only lanes that include books in this language.", - action="append", - ) - parser.add_argument( - "--max-depth", - help="Stop processing lanes once you reach this depth.", - type=int, - default=None, - ) - parser.add_argument( - "--min-depth", - help="Start processing lanes once you reach this depth.", - type=int, - default=1, - ) - return parser - - def __init__(self, _db=None, cmd_args=None, manager=None, *args, **kwargs): - """Constructor. - :param _db: A database connection. - :param cmd_args: A mock set of command-line arguments, to use instead - of looking at the actual command line. - :param testing: If this method creates a CirculationManager object, - this value will be passed in to its constructor as its value for - `testing`. - :param manager: A mock CirculationManager object, to use instead - of creating a new one (creating a CirculationManager object is - very time-consuming). - :param *args: Positional arguments to pass to the superconstructor. - :param **kwargs: Keyword arguments to pass to the superconstructor. - """ - - super().__init__(_db, *args, **kwargs) - self.parse_args(cmd_args) - if not manager: - manager = CirculationManager(self._db, self.services) - from api.app import app - - app.manager = manager - self.app = app - self.base_url = ConfigurationSetting.sitewide( - self._db, Configuration.BASE_URL_KEY - ).value - - def parse_args(self, cmd_args=None): - parser = self.arg_parser(self._db) - parsed = parser.parse_args(cmd_args) - self.languages = [] - if parsed.language: - for language in parsed.language: - alpha = LanguageCodes.string_to_alpha_3(language) - if alpha: - self.languages.append(alpha) - else: - self.log.warning("Ignored unrecognized language code %s", alpha) - self.max_depth = parsed.max_depth - self.min_depth = parsed.min_depth - - # Return the parsed arguments in case a subclass needs to - # process more args. - return parsed - - def should_process_lane(self, lane): - if not isinstance(lane, Lane): - return False - - language_ok = False - if not self.languages: - # We are considering lanes for every single language. - language_ok = True - - if not lane.languages: - # The lane has no language restrictions. - language_ok = True - - for language in self.languages: - if language in lane.languages: - language_ok = True - break - if not language_ok: - return False - - if self.max_depth is not None and lane.depth > self.max_depth: - return False - if self.min_depth is not None and lane.depth < self.min_depth: - return False - - return True - - def cache_url(self, annotator, lane, languages): - raise NotImplementedError() - - def generate_representation(self, *args, **kwargs): - raise NotImplementedError() - - # The generated document will probably be an OPDS acquisition - # feed. - ACCEPT_HEADER = OPDSFeed.ACQUISITION_FEED_TYPE - - cache_url_method = None - - def process_library(self, library): - begin = time.time() - client = self.app.test_client() - ctx = self.app.test_request_context(base_url=self.base_url) - ctx.push() - super().process_library(library) - ctx.pop() - end = time.time() - self.log.info( - "Processed library %s in %.2fsec", library.short_name, end - begin - ) - - def process_lane(self, lane): - """Generate a number of feeds for this lane. - One feed will be generated for each combination of Facets and - Pagination objects returned by facets() and pagination(). - """ - cached_feeds = [] - for facets in self.facets(lane): - for pagination in self.pagination(lane): - extra_description = "" - if facets: - extra_description += " Facets: %s." % facets.query_string - if pagination: - extra_description += " Pagination: %s." % pagination.query_string - self.log.info( - "Generating feed for %s.%s", lane.full_identifier, extra_description - ) - a = time.time() - feed = self.do_generate(lane, facets, pagination) - b = time.time() - if feed: - cached_feeds.append(feed) - self.log.info( - "Took %.2f sec to make %d bytes.", (b - a), len(feed.data) - ) - total_size = sum(len(x.data) for x in cached_feeds) - return cached_feeds - - def facets(self, lane): - """Yield a Facets object for each set of facets this - script is expected to handle. - :param lane: The lane under consideration. (Different lanes may have - different available facets.) - :yield: A sequence of Facets objects. - """ - yield None - - def pagination(self, lane): - """Yield a Pagination object for each page of a feed this - script is expected to handle. - :param lane: The lane under consideration. (Different lanes may have - different pagination rules.) - :yield: A sequence of Pagination objects. - """ - yield None - - -class CacheFacetListsPerLane(CacheRepresentationPerLane): - """Cache the first two pages of every relevant facet list for this lane.""" - - name = "Cache paginated OPDS feed for each lane" - - @classmethod - def arg_parser(cls, _db): - parser = CacheRepresentationPerLane.arg_parser(_db) - available = Facets.DEFAULT_ENABLED_FACETS[Facets.ORDER_FACET_GROUP_NAME] - order_help = "Generate feeds for this ordering. Possible values: %s." % ( - ", ".join(available) - ) - parser.add_argument( - "--order", - help=order_help, - action="append", - default=[], - ) - - available = Facets.DEFAULT_ENABLED_FACETS[Facets.AVAILABILITY_FACET_GROUP_NAME] - availability_help = ( - "Generate feeds for this availability setting. Possible values: %s." - % (", ".join(available)) - ) - parser.add_argument( - "--availability", - help=availability_help, - action="append", - default=[], - ) - - available = Facets.DEFAULT_ENABLED_FACETS[Facets.COLLECTION_FACET_GROUP_NAME] - collection_help = ( - "Generate feeds for this collection within each lane. Possible values: %s." - % (", ".join(available)) - ) - parser.add_argument( - "--collection", - help=collection_help, - action="append", - default=[], - ) - - available = [x.INTERNAL_NAME for x in EntryPoint.ENTRY_POINTS] - entrypoint_help = ( - "Generate feeds for this entry point within each lane. Possible values: %s." - % (", ".join(available)) - ) - parser.add_argument( - "--entrypoint", - help=entrypoint_help, - action="append", - default=[], - ) - - default_pages = 2 - parser.add_argument( - "--pages", - help="Number of pages to cache for each facet. Default: %d" % default_pages, - type=int, - default=default_pages, - ) - return parser - - def parse_args(self, cmd_args=None): - parsed = super().parse_args(cmd_args) - self.orders = parsed.order - self.availabilities = parsed.availability - self.collections = parsed.collection - self.entrypoints = parsed.entrypoint - self.pages = parsed.pages - return parsed - - def facets(self, lane): - """This script covers a user-specified combination of facets, but it - defaults to using every combination of available facets for - the given lane with a certain sort order. - This means every combination of availability, collection, and - entry point. - That's a whole lot of feeds, which is why this script isn't - actually used -- by the time we generate all of then, they've - expired. - """ - library = lane.get_library(self._db) - default_order = library.default_facet(Facets.ORDER_FACET_GROUP_NAME) - allowed_orders = library.enabled_facets(Facets.ORDER_FACET_GROUP_NAME) - chosen_orders = self.orders or [default_order] - - allowed_entrypoint_names = [x.INTERNAL_NAME for x in library.entrypoints] - default_entrypoint_name = None - if allowed_entrypoint_names: - default_entrypoint_name = allowed_entrypoint_names[0] - - chosen_entrypoints = self.entrypoints or allowed_entrypoint_names - - default_availability = library.default_facet( - Facets.AVAILABILITY_FACET_GROUP_NAME - ) - allowed_availabilities = library.enabled_facets( - Facets.AVAILABILITY_FACET_GROUP_NAME - ) - chosen_availabilities = self.availabilities or [default_availability] - - default_collection = library.default_facet(Facets.COLLECTION_FACET_GROUP_NAME) - allowed_collections = library.enabled_facets(Facets.COLLECTION_FACET_GROUP_NAME) - chosen_collections = self.collections or [default_collection] - - top_level = lane.parent is None - for entrypoint_name in chosen_entrypoints: - entrypoint = EntryPoint.BY_INTERNAL_NAME.get(entrypoint_name) - if not entrypoint: - logging.warning("Ignoring unknown entry point %s" % entrypoint_name) - continue - if not entrypoint_name in allowed_entrypoint_names: - logging.warning("Ignoring disabled entry point %s" % entrypoint_name) - continue - for order in chosen_orders: - if order not in allowed_orders: - logging.warning("Ignoring unsupported ordering %s" % order) - continue - for availability in chosen_availabilities: - if availability not in allowed_availabilities: - logging.warning( - "Ignoring unsupported availability %s" % availability - ) - continue - for collection in chosen_collections: - if collection not in allowed_collections: - logging.warning( - "Ignoring unsupported collection %s" % collection - ) - continue - facets = Facets( - library=library, - collection=collection, - availability=availability, - distributor=None, # All distributors always - collection_name=None, # All collections - entrypoint=entrypoint, - entrypoint_is_default=( - top_level - and entrypoint.INTERNAL_NAME == default_entrypoint_name - ), - order=order, - order_ascending=True, - ) - yield facets - - def pagination(self, lane): - """This script covers a user-specified number of pages.""" - page = Pagination.default() - for pagenum in range(0, self.pages): - yield page - page = page.next_page - if not page: - # There aren't enough books to fill `self.pages` - # pages. Stop working. - break - - def do_generate(self, lane, facets, pagination, feed_class=None): - feeds = [] - title = lane.display_name - library = lane.get_library(self._db) - annotator = self.app.manager.annotator(lane, facets=facets) - url = annotator.feed_url(lane, facets=facets, pagination=pagination) - feed_class = feed_class or OPDSAcquisitionFeed - return feed_class.page( - _db=self._db, - title=title, - url=url, - worklist=lane, - annotator=annotator, - pagination=pagination, - facets=facets, - search_engine=None, - ).as_response(max_age=0) - - -class CacheOPDSGroupFeedPerLane(CacheRepresentationPerLane): - name = "Cache OPDS grouped feed for each lane" - - def should_process_lane(self, lane): - # OPDS grouped feeds are only generated for lanes that have sublanes. - if not lane.children: - return False - if self.max_depth is not None and lane.depth > self.max_depth: - return False - return True - - def do_generate(self, lane, facets, pagination, feed_class=None): - title = lane.display_name - annotator = self.app.manager.annotator(lane, facets=facets) - url = annotator.groups_url(lane, facets) - feed_class = feed_class or OPDSAcquisitionFeed - - # Since grouped feeds are only cached for lanes that have sublanes, - # there's no need to consider the case of a lane with no sublanes, - # unlike the corresponding code in OPDSFeedController.groups() - return feed_class.groups( - _db=self._db, - title=title, - url=url, - worklist=lane, - annotator=annotator, - pagination=None, - facets=facets, - search_engine=None, - ).as_response(max_age=0) - - def facets(self, lane): - """Generate a Facets object for each of the library's enabled - entrypoints. - This is the only way grouped feeds are ever generated, so there is - no way to override this. - """ - top_level = lane.parent is None - library: Library = lane.get_library(self._db) - - # If the WorkList has explicitly defined EntryPoints, we want to - # create a grouped feed for each EntryPoint. Otherwise, we want - # to create a single grouped feed with no particular EntryPoint. - # - # We use library.entrypoints instead of lane.entrypoints - # because WorkList.entrypoints controls which entry points you - # can *switch to* from a given WorkList. We're handling the - # case where you switched further up the hierarchy and now - # you're navigating downwards. - entrypoints = list(library.entrypoints) or [None] - default_entrypoint = entrypoints[0] - for entrypoint in entrypoints: - facets = FeaturedFacets( - minimum_featured_quality=library.settings.minimum_featured_quality, - uses_customlists=lane.uses_customlists, - entrypoint=entrypoint, - entrypoint_is_default=(top_level and entrypoint is default_entrypoint), - ) - yield facets - - class CacheMARCFiles(LaneSweeperScript): """Generate and cache MARC files for each input library.""" diff --git a/tests/api/test_config.py b/tests/api/test_config.py index ac50a16dc0..affccc2b1a 100644 --- a/tests/api/test_config.py +++ b/tests/api/test_config.py @@ -10,7 +10,6 @@ from api.config import Configuration from core.config import CannotLoadConfiguration -from core.config import Configuration as CoreConfiguration from core.configuration.library import LibrarySettings from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.files import FilesFixture @@ -136,13 +135,6 @@ def test_max_outstanding_fines( assert max_fines is not None assert 100 == max_fines.amount - def test_default_opds_format(self): - # Initializing the Configuration object modifies the corresponding - # object in core, so that core code will behave appropriately. - assert ( - Configuration.DEFAULT_OPDS_FORMAT == CoreConfiguration.DEFAULT_OPDS_FORMAT - ) - @patch.object(os, "environ", new=dict()) def test_fcm_credentials(self, notifications_files_fixture): invalid_json = "{ this is invalid JSON }" diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index 06ff4d81aa..ba91176cb9 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -10,7 +10,7 @@ LibraryAnnotator, ) from core.lane import Facets, WorkList -from core.model import Admin, CachedFeed, ConfigurationSetting, create +from core.model import ConfigurationSetting, create from core.model.discovery_service_registration import DiscoveryServiceRegistration from core.problem_details import * from core.util.problem_detail import ProblemDetail @@ -251,60 +251,6 @@ def __init__(self, *args, **kwargs): assert isinstance(annotator, CirculationManagerAnnotator) assert worklist == annotator.lane - def test_load_facets_from_request_disable_caching( - self, circulation_fixture: CirculationControllerFixture - ): - # Only an authenticated admin can ask to disable caching, - # and load_facets_from_request is where we enforce this. - class MockAdminSignInController: - # Pretend to be able to find (or not) an Admin authenticated - # to make the current request. - admin = None - - def authenticated_admin_from_request(self): - return self.admin - - admin = Admin() - controller = MockAdminSignInController() - - circulation_fixture.manager.admin_sign_in_controller = controller # type: ignore[assignment] - - with circulation_fixture.request_context_with_library("/"): - # If you don't specify a max cache age, nothing happens, - # whether or not you're an admin. - for value in INVALID_CREDENTIALS, admin: - controller.admin = value # type: ignore - facets = circulation_fixture.manager.load_facets_from_request() - assert None == facets.max_cache_age - - with circulation_fixture.request_context_with_library("/?max_age=0"): - # Not an admin, max cache age requested. - controller.admin = INVALID_CREDENTIALS # type: ignore - facets = circulation_fixture.manager.load_facets_from_request() - assert None == facets.max_cache_age - - # Admin, max age requested. This is the only case where - # nonstandard caching rules make it through - # load_facets_from_request(). - controller.admin = admin # type: ignore - facets = circulation_fixture.manager.load_facets_from_request() - assert CachedFeed.IGNORE_CACHE == facets.max_cache_age - - # Since the admin sign-in controller is part of the admin - # package and not the API proper, test a situation where, for - # whatever reason, that controller was never initialized. - del circulation_fixture.manager.admin_sign_in_controller - - # Now what controller.admin says doesn't matter, because the - # controller's not associated with the CirculationManager. - # But everything still basically works; you just can't - # disable the cache. - with circulation_fixture.request_context_with_library("/?max_age=0"): - for value in (INVALID_CREDENTIALS, admin): - controller.admin = value # type: ignore - facets = circulation_fixture.manager.load_facets_from_request() - assert None == facets.max_cache_age - def test_load_facets_from_request_denies_access_to_inaccessible_worklist( self, circulation_fixture: CirculationControllerFixture ): diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index 345eadcb82..31a50df4e2 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -17,7 +17,7 @@ from core.feed.navigation import NavigationFeed from core.feed.opds import NavigationFacets from core.lane import Facets, FeaturedFacets, Pagination, SearchFacets, WorkList -from core.model import CachedFeed, Edition +from core.model import Edition from core.util.flask_util import Response from tests.fixtures.api_controller import CirculationControllerFixture, WorkSpec from tests.fixtures.library import LibraryFixture @@ -748,7 +748,7 @@ def test__qa_feed(self, circulation_fixture: CirculationControllerFixture): assert expect_url == kwargs.pop("url") # type: ignore # These feeds are never to be cached. - assert CachedFeed.IGNORE_CACHE == kwargs.pop("max_age") # type: ignore + assert 0 == kwargs.pop("max_age") # type: ignore # To improve performance, a Pagination object was created that # limits each lane in the test feed to a single Work. diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index 7238f5821e..b4f762f886 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -31,14 +31,7 @@ from core.external_search import Filter from core.lane import DefaultSortOrderFacets, Facets, FeaturedFacets, Lane, WorkList from core.metadata_layer import ContributorData, Metadata -from core.model import ( - CachedFeed, - Contributor, - DataSource, - Edition, - ExternalIntegration, - create, -) +from core.model import Contributor, DataSource, Edition, ExternalIntegration, create from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture from tests.fixtures.search import ExternalSearchFixtureFake @@ -516,10 +509,6 @@ def related_books_fixture(db: DatabaseTransactionFixture) -> RelatedBooksFixture class TestRelatedBooksLane: - def test_feed_type(self, related_books_fixture: RelatedBooksFixture): - # All feeds from these lanes are cached as 'related works' feeds. - assert CachedFeed.RELATED_TYPE == RelatedBooksLane.CACHED_FEED_TYPE - def test_initialization(self, related_books_fixture: RelatedBooksFixture): # Asserts that a RelatedBooksLane won't be initialized for a work # without related books @@ -768,10 +757,6 @@ def test_default_sort_order(self, db: DatabaseTransactionFixture): class TestSeriesLane: - def test_feed_type(self): - # All feeds from these lanes are cached as series feeds. - assert CachedFeed.SERIES_TYPE == SeriesLane.CACHED_FEED_TYPE - def test_initialization(self, lane_fixture: LaneFixture): # An error is raised if SeriesLane is created with an empty string. pytest.raises(ValueError, SeriesLane, lane_fixture.db.default_library(), "") @@ -841,10 +826,6 @@ def test_default_sort_order(self, db: DatabaseTransactionFixture): class TestContributorLane: - def test_feed_type(self): - # All feeds of this type are cached as contributor feeds. - assert CachedFeed.CONTRIBUTOR_TYPE == ContributorLane.CACHED_FEED_TYPE - def test_initialization(self, lane_fixture: LaneFixture): with pytest.raises(ValueError) as excinfo: ContributorLane(lane_fixture.db.default_library(), None) @@ -925,11 +906,6 @@ def test_overview_facets(self, lane_fixture: LaneFixture): class TestCrawlableFacets: - def test_feed_type(self, db: DatabaseTransactionFixture): - # All crawlable feeds are cached as such, no matter what - # WorkList they come from. - assert CachedFeed.CRAWLABLE_TYPE == CrawlableFacets.CACHED_FEED_TYPE - def test_default(self, db: DatabaseTransactionFixture): facets = CrawlableFacets.default(db.default_library()) assert CrawlableFacets.COLLECTION_FULL == facets.collection diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index 225cade1ac..f66b0ecc10 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -15,9 +15,8 @@ from api.config import Configuration from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI -from core.entrypoint import AudiobooksEntryPoint, EbooksEntryPoint -from core.external_search import ExternalSearchIndex, mock_search_index -from core.lane import Facets, FeaturedFacets, Pagination, WorkList +from core.external_search import ExternalSearchIndex +from core.lane import WorkList from core.marc import MARCExporter from core.model import ( LOCK_ID_DB_INIT, @@ -30,29 +29,21 @@ create, ) from core.util.datetime_helpers import datetime_utc, utc_now -from core.util.flask_util import OPDSFeedResponse, Response -from core.util.opds_writer import OPDSFeed from scripts import ( AdobeAccountIDResetScript, - CacheFacetListsPerLane, CacheMARCFiles, - CacheOPDSGroupFeedPerLane, - CacheRepresentationPerLane, GenerateShortTokenScript, InstanceInitializationScript, LanguageListScript, LocalAnalyticsExportScript, NovelistSnapshotScript, ) -from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.library import LibraryFixture -from tests.fixtures.search import EndToEndSearchFixture, ExternalSearchFixtureFake -from tests.mocks.search import fake_hits +from tests.fixtures.search import EndToEndSearchFixture if TYPE_CHECKING: from tests.fixtures.authenticator import SimpleAuthIntegrationFixture from tests.fixtures.database import DatabaseTransactionFixture - from tests.fixtures.search import ExternalSearchFixture class TestAdobeAccountIDResetScript: @@ -120,428 +111,6 @@ def lane_script_fixture( return LaneScriptFixture(db, library_fixture) -class TestCacheRepresentationPerLane: - def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - - # Test that should_process_lane respects any specified - # language restrictions. - script = CacheRepresentationPerLane( - db.session, - [ - "--language=fre", - "--language=English", - "--language=none", - "--min-depth=0", - ], - manager=object(), - ) - assert ["fre", "eng"] == script.languages - - english_lane = db.lane(languages=["eng"]) - assert True == script.should_process_lane(english_lane) - - no_english_lane = db.lane(languages=["spa", "fre"]) - assert True == script.should_process_lane(no_english_lane) - - no_english_or_french_lane = db.lane(languages=["spa"]) - assert False == script.should_process_lane(no_english_or_french_lane) - - # Test that should_process_lane respects maximum depth - # restrictions. - script = CacheRepresentationPerLane( - db.session, ["--max-depth=0", "--min-depth=0"], manager=object() - ) - assert 0 == script.max_depth - - child = db.lane(display_name="sublane") - parent = db.lane(display_name="parent") - parent.sublanes = [child] - assert True == script.should_process_lane(parent) - assert False == script.should_process_lane(child) - - script = CacheRepresentationPerLane( - db.session, ["--min-depth=1"], manager=MockCirculationManager(db.session) - ) - assert 1 == script.min_depth - assert False == script.should_process_lane(parent) - assert True == script.should_process_lane(child) - - def test_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - # process_lane() calls do_generate() once for every - # combination of items yielded by facets() and pagination(). - - class MockFacets: - def __init__(self, query): - self.query = query - - @property - def query_string(self): - return self.query - - facets1 = MockFacets("facets1") - facets2 = MockFacets("facets2") - page1 = Pagination.default() - page2 = page1.next_page - - class Mock(CacheRepresentationPerLane): - generated = [] - - def do_generate(self, lane, facets, pagination): - value = (lane, facets, pagination) - response = Response("mock response") - response.value = value - self.generated.append(response) - return response - - def facets(self, lane): - yield facets1 - yield facets2 - - def pagination(self, lane): - yield page1 - yield page2 - - lane = db.lane() - script = Mock(db.session, manager=object(), cmd_args=[]) - generated = script.process_lane(lane) - assert generated == script.generated - - c1, c2, c3, c4 = (x.value for x in script.generated) - assert (lane, facets1, page1) == c1 - assert (lane, facets1, page2) == c2 - assert (lane, facets2, page1) == c3 - assert (lane, facets2, page2) == c4 - - def test_default_facets(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - # By default, do_generate will only be called once, with facets=None. - script = CacheRepresentationPerLane(db.session, manager=object(), cmd_args=[]) - assert [None] == list(script.facets(object())) - - def test_default_pagination(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - # By default, do_generate will only be called once, with pagination=None. - script = CacheRepresentationPerLane(db.session, manager=object(), cmd_args=[]) - assert [None] == list(script.pagination(object())) - - -class TestCacheFacetListsPerLane: - def test_arguments(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - # Verify that command-line arguments become attributes of - # the CacheFacetListsPerLane object. - script = CacheFacetListsPerLane( - db.session, ["--order=title", "--order=added"], manager=object() - ) - assert ["title", "added"] == script.orders - script = CacheFacetListsPerLane( - db.session, - ["--availability=all", "--availability=always"], - manager=object(), - ) - assert ["all", "always"] == script.availabilities - - script = CacheFacetListsPerLane( - db.session, ["--collection=main", "--collection=full"], manager=object() - ) - assert ["main", "full"] == script.collections - - script = CacheFacetListsPerLane( - db.session, ["--entrypoint=Audio", "--entrypoint=Book"], manager=object() - ) - assert ["Audio", "Book"] == script.entrypoints - - script = CacheFacetListsPerLane(db.session, ["--pages=1"], manager=object()) - assert 1 == script.pages - - def test_facets( - self, lane_script_fixture: LaneScriptFixture, library_fixture: LibraryFixture - ): - db = lane_script_fixture.db - # Verify that CacheFacetListsPerLane.facets combines the items - # found in the attributes created by command-line parsing. - script = CacheFacetListsPerLane(db.session, manager=object(), cmd_args=[]) - script.orders = [Facets.ORDER_TITLE, Facets.ORDER_AUTHOR, "nonsense"] - script.entrypoints = [ - AudiobooksEntryPoint.INTERNAL_NAME, - "nonsense", - EbooksEntryPoint.INTERNAL_NAME, - ] - script.availabilities = [Facets.AVAILABLE_NOW, "nonsense"] - script.collections = [Facets.COLLECTION_FULL, "nonsense"] - - library = library_fixture.library() - - # EbooksEntryPoint is normally a valid entry point, but we're - # going to disable it for this library. - settings = library_fixture.mock_settings() - settings.enabled_entry_points = [AudiobooksEntryPoint.INTERNAL_NAME] - library.update_settings(settings) - - lane = db.lane(library=library) - - # We get one Facets object for every valid combination - # of parameters. Here there are 2*1*1*1 combinations. - f1, f2 = script.facets(lane) - - # The facets differ only in their .order. - assert Facets.ORDER_TITLE == f1.order - assert Facets.ORDER_AUTHOR == f2.order - - # All other fields are tied to the only acceptable values - # given in the script attributes. The first (and only) - # enabled entry point is treated as the default. - for f in f1, f2: - assert AudiobooksEntryPoint == f.entrypoint - assert True == f.entrypoint_is_default - assert Facets.AVAILABLE_NOW == f.availability - assert Facets.COLLECTION_FULL == f.collection - - # The first entry point is treated as the default only for WorkLists - # that have no parent. When the WorkList has a parent, the selected - # entry point is treated as an explicit choice -- navigating downward - # in the lane hierarchy ratifies the default value. - sublane = db.lane(parent=lane, library=library) - f1, f2 = script.facets(sublane) - for f in f1, f2: - assert False == f.entrypoint_is_default - - def test_pagination(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - script = CacheFacetListsPerLane(db.session, manager=object(), cmd_args=[]) - script.pages = 3 - lane = db.lane() - p1, p2, p3 = script.pagination(lane) - pagination = Pagination.default() - assert pagination.query_string == p1.query_string - assert pagination.next_page.query_string == p2.query_string - assert pagination.next_page.next_page.query_string == p3.query_string - - def test_do_generate( - self, - lane_script_fixture: LaneScriptFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - db = lane_script_fixture.db - migration = end_to_end_search_fixture.external_search_index.start_migration() - assert migration is not None - migration.finish() - - # When it's time to generate a feed, AcquisitionFeed.page - # is called with the right arguments. - class MockAcquisitionFeed: - called_with = None - - @classmethod - def page(cls, **kwargs): - cls.called_with = kwargs - resp = MagicMock() - resp.as_response.return_value = "here's your feed" - return resp - - # Test our ability to generate a single feed. - script = CacheFacetListsPerLane(db.session, testing=True, cmd_args=[]) - facets = Facets.default(db.default_library()) - pagination = Pagination.default() - - with script.app.test_request_context("/"): - lane = db.lane() - result = script.do_generate( - lane, facets, pagination, feed_class=MockAcquisitionFeed - ) - assert "here's your feed" == result - - args = MockAcquisitionFeed.called_with - assert db.session == args["_db"] # type: ignore - assert lane == args["worklist"] # type: ignore - assert lane.display_name == args["title"] # type: ignore - - # The Pagination object was passed into - # MockAcquisitionFeed.page, and it was also used to make the - # feed URL (see below). - assert pagination == args["pagination"] # type: ignore - - # The Facets object was passed into - # MockAcquisitionFeed.page, and it was also used to make - # the feed URL and to create the feed annotator. - assert facets == args["facets"] # type: ignore - annotator = args["annotator"] # type: ignore - assert facets == annotator.facets - assert args["url"] == annotator.feed_url( # type: ignore - lane, facets=facets, pagination=pagination - ) - - # Try again without mocking AcquisitionFeed, to verify that - # we get a Flask Response containing an OPDS feed. - response = script.do_generate(lane, facets, pagination) - assert isinstance(response, OPDSFeedResponse) - assert OPDSFeed.ACQUISITION_FEED_TYPE == response.content_type - assert response.get_data(as_text=True).startswith(" timestamp1 - - # Since there was a matching CachedFeed in the database - # already, that CachedFeed was passed into _should_refresh -- - # previously this value was None. - assert (result1, 42) == Mock._should_refresh_called_with - - # Now try the scenario where the feed does not need to be refreshed. - clear_helpers() - Mock.SHOULD_REFRESH = False - result3 = m( - db.session, - worklist, - facets, - pagination, - refresher, - max_age, - raw=True, - ) - - # Not only do we have the same CachedFeed as before, but its - # timestamp and content are unchanged. - assert result3 == result2 - assert "This is feed #2" == result3.content - assert timestamp2 == result3.timestamp - - # If max_age ends up zero, we don't check for the existence of a - # cached feed before forging ahead. - Mock.MAX_CACHE_AGE = 0 - clear_helpers() - m( - db.session, - worklist, - facets, - pagination, - refresher, - max_age, - raw=True, - ) - - # A matching CachedFeed exists in the database, but we didn't - # even look for it, because we knew we'd be looking it up - # again after feed generation. - assert (None, 0) == Mock._should_refresh_called_with - - def test_no_race_conditions(self, db: DatabaseTransactionFixture): - # Why do we look up a CachedFeed again after feed generation? - # Well, let's see what happens if someone else messes around - # with the CachedFeed object _while the refresher is running_. - # - # This is a race condition that happens in real life. Rather - # than setting up a multi-threaded test, we can have the - # refresher itself simulate a background modification by - # messing around with the CachedFeed object we know will - # eventually be returned. - # - # The most up-to-date feed always wins, so background - # modifications will take effect only if they made the - # CachedFeed look _newer_ than the foreground process does. - facets = Facets.default(db.default_library()) - pagination = Pagination.default() - wl = WorkList() - wl.initialize(db.default_library()) - - m = CachedFeed.fetch - - # In this case, two simulated threads try to create the same - # CachedFeed at the same time. We end up with a single - # CachedFeed containing the result of the last code that ran. - def simultaneous_refresher(): - # This refresher method simulates another thread creating - # a CachedFeed for this feed while this thread's - # refresher is running. - def other_thread_refresher(): - return "Another thread made a feed." - - m( - db.session, - wl, - facets, - pagination, - other_thread_refresher, - 0, - raw=True, - ) - - return "Then this thread made a feed." - - # This will call simultaneous_refresher(), which will call - # CachedFeed.fetch() _again_, which will call - # other_thread_refresher(). - result = m( - db.session, - wl, - facets, - pagination, - simultaneous_refresher, - 0, - raw=True, - ) - - # We ended up with a single CachedFeed containing the - # latest information. - assert [result] == db.session.query(CachedFeed).all() - assert "Then this thread made a feed." == result.content - - # If two threads contend for an existing CachedFeed, the one that - # sets CachedFeed.timestamp to the later value wins. - # - # Here, the other thread wins by setting .timestamp on the - # existing CachedFeed to a date in the future. - now = utc_now() - tomorrow = now + datetime.timedelta(days=1) - yesterday = now - datetime.timedelta(days=1) - - def tomorrow_vs_now(): - result.content = "Someone in the background set tomorrow's content." - result.timestamp = tomorrow - return "Today's content can't compete." - - tomorrow_result = m( - db.session, - wl, - facets, - pagination, - tomorrow_vs_now, - 0, - raw=True, - ) - assert tomorrow_result == result - assert ( - "Someone in the background set tomorrow's content." - == tomorrow_result.content - ) - assert tomorrow_result.timestamp == tomorrow - - # Here, the other thread sets .timestamp to a date in the past, and - # it loses out to the (apparently) newer feed. - def yesterday_vs_now(): - result.content = "Someone in the background set yesterday's content." - result.timestamp = yesterday - return "Today's content is fresher." - - now_result = m( - db.session, - wl, - facets, - pagination, - yesterday_vs_now, - 0, - raw=True, - ) - - # We got the same CachedFeed we've been getting this whole - # time, but the outdated data set by the 'background thread' - # has been fixed. - assert result == now_result - assert "Today's content is fresher." == result.content - assert result.timestamp > yesterday - - # This shouldn't happen, but if the CachedFeed's timestamp or - # content are *cleared out* in the background, between the - # time the CacheFeed is fetched and the time the refresher - # finishes, then we don't know what's going on and we don't - # take chances. We create a whole new CachedFeed object for - # the updated version of the feed. - - # First, try the situation where .timestamp is cleared out in - # the background. - def timestamp_cleared_in_background(): - result.content = "Someone else sets content and clears timestamp." - result.timestamp = None - - return "Non-weird content." - - result2 = m( - db.session, - wl, - facets, - pagination, - timestamp_cleared_in_background, - 0, - raw=True, - ) - now = utc_now() - - # result2 is a brand new CachedFeed. - assert result2 != result - assert "Non-weird content." == result2.content - assert (now - result2.timestamp).total_seconds() < 2 - - # We let the background process do whatever it wants to do - # with the old one. - assert "Someone else sets content and clears timestamp." == result.content - assert None == result.timestamp - - # Next, test the situation where .content is cleared out. - def content_cleared_in_background(): - result2.content = None - result2.timestamp = tomorrow - - return "Non-weird content." - - result3 = m( - db.session, - wl, - facets, - pagination, - content_cleared_in_background, - 0, - raw=True, - ) - now = utc_now() - - # Again, a brand new CachedFeed. - assert result3 != result2 - assert result3 != result - assert "Non-weird content." == result3.content - assert (now - result3.timestamp).total_seconds() < 2 - - # Again, we let the background process have the old one for - # whatever weird thing it wants to do. - assert None == result2.content - assert tomorrow == result2.timestamp - - def test_response_format(self, db: DatabaseTransactionFixture): - # Verify that fetch() can be told to return an appropriate - # OPDSFeedResponse object. This is the default behavior, since - # it preserves some useful information that would otherwise be - # lost. - facets = Facets.default(db.default_library()) - pagination = Pagination.default() - wl = WorkList() - wl.initialize(db.default_library()) - - def refresh(): - return "Here's a feed." - - private = object() - r = CachedFeed.fetch( - db.session, - wl, - facets, - pagination, - refresh, - max_age=102, - private=private, - ) - assert isinstance(r, OPDSFeedResponse) - assert 200 == r.status_code - assert OPDSFeed.ACQUISITION_FEED_TYPE == r.content_type - assert 102 == r.max_age - assert "Here's a feed." == str(r) - - # The extra argument `private`, not used by CachedFeed.fetch, was - # passed on to the OPDSFeedResponse constructor. - assert private == r.private - - # The CachedFeed was created; just not returned. - cf = db.session.query(CachedFeed).one() - assert "Here's a feed." == cf.content - - # Try it again as a cache hit. - r = CachedFeed.fetch( - db.session, - wl, - facets, - pagination, - refresh, - max_age=102, - private=private, - ) - assert isinstance(r, OPDSFeedResponse) - assert 200 == r.status_code - assert OPDSFeed.ACQUISITION_FEED_TYPE == r.content_type - assert 102 == r.max_age - assert "Here's a feed." == str(r) - - # If we tell CachedFeed to cache its feed 'forever', that only - # applies to the _database_ cache. The client is told to cache - # the feed for the default period. - r = CachedFeed.fetch( - db.session, - wl, - facets, - pagination, - refresh, - max_age=CachedFeed.CACHE_FOREVER, - private=private, - ) - assert isinstance(r, OPDSFeedResponse) - assert OPDSFeed.DEFAULT_MAX_AGE == r.max_age - - # If the Library associated with the WorkList used in the feed - # has root lanes, `private` is always set to True, even if we - # asked for the opposite. - - from core.model import Library - - Library._has_root_lane_cache[db.default_library().id] = True - r = CachedFeed.fetch( - db.session, - wl, - facets, - pagination, - refresh, - private=False, - ) - assert isinstance(r, OPDSFeedResponse) - assert True == r.private - - # Tests of helper methods. - - def test_feed_type(self): - # Verify that a WorkList or a Facets object can determine the - # value to be stored in CachedFeed.type, with Facets taking - # priority. - class DontCare: - CACHED_FEED_TYPE = None - - class WorkList: - CACHED_FEED_TYPE = "from worklist" - - class Facets: - CACHED_FEED_TYPE = "from facets" - - m = CachedFeed.feed_type - - # The default type is PAGE_TYPE. - assert CachedFeed.PAGE_TYPE == m(None, None) - assert CachedFeed.PAGE_TYPE == m(DontCare, DontCare) - - # If `worklist` has an opinion and `facets` doesn't, we use that. - assert "from worklist" == m(WorkList, None) - assert "from worklist" == m(WorkList, DontCare) - - # If `facets` has an opinion`, it is always used. - assert "from facets" == m(DontCare, Facets) - assert "from facets" == m(None, Facets) - assert "from facets" == m(WorkList, Facets) - - def test_max_cache_age(self): - m = CachedFeed.max_cache_age - - # If override is provided, that value is always used. - assert 60 == m(None, None, None, 60) - assert 60 == m(None, None, None, datetime.timedelta(minutes=1)) - - # Otherwise, the faceting object gets a chance to weigh in. - class MockFacets: - max_cache_age = 22 - - facets = MockFacets() - assert 22 == m(None, "feed type", facets=facets) - - # If there is no override and the faceting object doesn't - # care, CachedFeed.max_cache_age depends on - # WorkList.max_cache_age. This method can return a few - # different data types. - class MockWorklist: - def max_cache_age(self, type): - return dict( - number=1, - timedelta=datetime.timedelta(seconds=2), - expensive=CachedFeed.CACHE_FOREVER, - dont_cache=None, - )[type] - - # The result is always either a number of seconds or - # CACHE_FOREVER. - wl = MockWorklist() - assert 1 == m(wl, "number", None) - assert 2 == m(wl, "timedelta", None) - assert 0 == m(wl, "dont_cache", None) - assert CachedFeed.CACHE_FOREVER == m(wl, "expensive", None) - - # The faceting object still takes precedence, assuming it has - # an opinion. - facets.max_cache_age = None - assert CachedFeed.CACHE_FOREVER == m(wl, "expensive", facets) - - facets.max_cache_age = 22 - assert 22 == m(wl, "expensive", facets) - - # And an override takes precedence over that. - assert 60 == m(wl, "expensive", facets, 60) - - def test__prepare_keys(self, db: DatabaseTransactionFixture): - # Verify the method that turns WorkList, Facets, and Pagination - # into a unique set of values for CachedFeed fields. - - # First, prepare some mock classes. - class MockCachedFeed(CachedFeed): - feed_type_called_with = None - - @classmethod - def feed_type(cls, worklist, facets): - cls.feed_type_called_with = (worklist, facets) - return "mock type" - - class MockFacets: - query_string = b"facets query string" - - class MockPagination: - query_string = b"pagination query string" - - m = MockCachedFeed._prepare_keys - # A WorkList of some kind is required. - with pytest.raises(ValueError) as excinfo: - m(db.session, None, MockFacets, MockPagination) - assert "Cannot prepare a CachedFeed without a WorkList." in str(excinfo.value) - - # Basic Lane case, no facets or pagination. - lane = db.lane() - - # The response object is a named tuple. feed_type, library and - # lane_id are the only members set. - keys = m(db.session, lane, None, None) - assert "mock type" == keys.feed_type - assert lane.library == keys.library - assert None == keys.work - assert lane.id == keys.lane_id - assert None == keys.unique_key - assert "" == keys.facets_key - assert "" == keys.pagination_key - - # When pagination and/or facets are available, facets_key and - # pagination_key are set appropriately. - keys = m(db.session, lane, MockFacets, MockPagination) - assert "facets query string" == keys.facets_key - assert "pagination query string" == keys.pagination_key - - # Now we can check that feed_type was obtained by passing - # `worklist` and `facets` into MockCachedFeed.feed_type. - assert "mock type" == keys.feed_type - assert (lane, MockFacets) == MockCachedFeed.feed_type_called_with - - # When a WorkList is used instead of a Lane, keys.lane_id is None - # but keys.unique_key is set to worklist.unique_key. - worklist = WorkList() - worklist.initialize( - library=db.default_library(), - display_name="wl", - languages=["eng", "spa"], - audiences=[Classifier.AUDIENCE_CHILDREN], - ) - - keys = m(db.session, worklist, None, None) - assert "mock type" == keys.feed_type - assert worklist.get_library(db.session) == keys.library - assert None == keys.work - assert None == keys.lane_id - assert "wl-eng,spa-Children" == keys.unique_key - assert keys.unique_key == worklist.unique_key - assert "" == keys.facets_key - assert "" == keys.pagination_key - - # When a WorkList is associated with a specific .work, - # that information is included as keys.work. - work = object() - worklist.work = work # type: ignore[attr-defined] - keys = m(db.session, worklist, None, None) - assert work == keys.work - - def test__should_refresh(self): - # Test the algorithm that tells whether a CachedFeed is stale. - m = CachedFeed._should_refresh - - # If there's no CachedFeed, we must always refresh. - assert True == m(None, object()) - - class MockCachedFeed: - def __init__(self, timestamp): - self.timestamp = timestamp - - now = utc_now() - - # This feed was generated five minutes ago. - five_minutes_old = MockCachedFeed(now - datetime.timedelta(minutes=5)) - - # This feed was generated a thousand years ago. - ancient = MockCachedFeed(now - datetime.timedelta(days=1000 * 365)) - - # If we intend to cache forever, then even a thousand-year-old - # feed shouldn't be refreshed. - assert False == m(ancient, CachedFeed.CACHE_FOREVER) - - # Otherwise, it comes down to a date comparison. - - # If we're caching a feed for ten minutes, then the - # five-minute-old feed should not be refreshed. - assert False == m(five_minutes_old, 600) - - # If we're caching a feed for only a few seconds (or not at all), - # then the five-minute-old feed should be refreshed. - assert True == m(five_minutes_old, 0) - assert True == m(five_minutes_old, 1) - - # Realistic end-to-end tests. - - def test_lifecycle_with_lane(self, db: DatabaseTransactionFixture): - facets = Facets.default(db.default_library()) - pagination = Pagination.default() - lane = db.lane("My Lane", languages=["eng", "chi"]) - - # Fetch a cached feed from the database. It comes out updated. - refresher = MockFeedGenerator() - args = (db.session, lane, facets, pagination, refresher) - feed = CachedFeed.fetch(*args, max_age=0, raw=True) - assert "This is feed #1" == feed.content - - assert pagination.query_string == feed.pagination - assert facets.query_string == feed.facets - assert lane.id == feed.lane_id - - # Fetch it again, with a high max_age, and it's cached! - feed = CachedFeed.fetch(*args, max_age=1000, raw=True) - assert "This is feed #1" == feed.content - - # Fetch it with a low max_age, and it gets updated again. - feed = CachedFeed.fetch(*args, max_age=0, raw=True) - assert "This is feed #2" == feed.content - - # The special constant CACHE_FOREVER means it's always cached. - feed = CachedFeed.fetch(*args, max_age=CachedFeed.CACHE_FOREVER, raw=True) - assert "This is feed #2" == feed.content - - def test_lifecycle_with_worklist(self, db: DatabaseTransactionFixture): - facets = Facets.default(db.default_library()) - pagination = Pagination.default() - lane = WorkList() - lane.initialize(db.default_library()) - - # Fetch a cached feed from the database. It comes out updated. - refresher = MockFeedGenerator() - args = (db.session, lane, facets, pagination, refresher) - feed = CachedFeed.fetch(*args, max_age=0, raw=True) - assert "This is feed #1" == feed.content - - assert pagination.query_string == feed.pagination - assert facets.query_string == feed.facets - assert None == feed.lane_id - assert lane.unique_key == feed.unique_key - - # Fetch it again, with a high max_age, and it's cached! - feed = CachedFeed.fetch(*args, max_age=1000, raw=True) - assert "This is feed #1" == feed.content - - # Fetch it with a low max_age, and it gets updated again. - feed = CachedFeed.fetch(*args, max_age=0, raw=True) - assert "This is feed #2" == feed.content - - # The special constant CACHE_FOREVER means it's always cached. - feed = CachedFeed.fetch(*args, max_age=CachedFeed.CACHE_FOREVER, raw=True) - assert "This is feed #2" == feed.content diff --git a/tests/core/models/test_listeners.py b/tests/core/models/test_listeners.py index 2fc0c6e7c8..508aa87046 100644 --- a/tests/core/models/test_listeners.py +++ b/tests/core/models/test_listeners.py @@ -5,13 +5,7 @@ from core import lane, model from core.config import Configuration -from core.model import ( - CachedFeed, - ConfigurationSetting, - Timestamp, - WorkCoverageRecord, - create, -) +from core.model import ConfigurationSetting, Timestamp, WorkCoverageRecord from core.model.listeners import site_configuration_has_changed from core.util.datetime_helpers import utc_now from tests.fixtures.database import DatabaseTransactionFixture @@ -219,15 +213,6 @@ def test_configuration_relevant_collection_change_updates_configuration( session.commit() data.mock.assert_was_called() - # Associating a CachedFeed with the library does _not_ call - # the method, because nothing changed on the Library object and - # we don't listen for 'append' events on Library.cachedfeeds. - create( - session, CachedFeed, type="page", pagination="", facets="", library=library - ) - session.commit() - data.mock.assert_was_not_called() - # NOTE: test_work.py:TestWork.test_reindex_on_availability_change # tests the circumstances under which a database change # requires that a Work's entry in the search index be diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 9eac0b8699..8b1b4fc1e5 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -32,7 +32,6 @@ WorkList, ) from core.model import ( - CachedFeed, CustomList, DataSource, Edition, @@ -70,10 +69,8 @@ def test_items(self): assert [expect_items] == list(f.items()) assert "%s=%s" % expect_items == f.query_string - f.max_cache_age = 41 expect_items = [ (f.ENTRY_POINT_FACET_GROUP_NAME, ep.INTERNAL_NAME), - (f.MAX_CACHE_AGE_NAME, "41"), ] assert expect_items == list(f.items()) @@ -95,7 +92,7 @@ def test_navigate(self): old_entrypoint = object() kwargs = dict(extra_key="extra_value") facets = FacetsWithEntryPoint( - old_entrypoint, entrypoint_is_default=True, max_cache_age=123, **kwargs + old_entrypoint, entrypoint_is_default=True, **kwargs ) new_entrypoint = object() new_facets = facets.navigate(new_entrypoint) @@ -110,9 +107,6 @@ def test_navigate(self): # the new Facets object is not using a default EntryPoint. assert False == new_facets.entrypoint_is_default - # The max_cache_age was preserved. - assert 123 == new_facets.max_cache_age - # The keyword arguments used to create the original faceting # object were propagated to its constructor. assert kwargs == new_facets.constructor_kwargs @@ -153,12 +147,11 @@ def _from_request(cls, *args, **kwargs): assert expect == result def test__from_request(self): - # _from_request calls load_entrypoint() and - # load_max_cache_age() and instantiates the class with the - # result. + # _from_request calls load_entrypoint() and instantiates + # the class with the result. class MockFacetsWithEntryPoint(FacetsWithEntryPoint): - # Mock load_entrypoint() and load_max_cache_age() to + # Mock load_entrypoint() to # return whatever values we have set up ahead of time. @classmethod @@ -175,22 +168,15 @@ def load_entrypoint(cls, entrypoint_name, entrypoints, default=None): ) return cls.expect_load_entrypoint - @classmethod - def load_max_cache_age(cls, max_cache_age): - cls.load_max_cache_age_called_with = max_cache_age - return cls.expect_max_cache_age - # Mock the functions that pull information out of an HTTP # request. # EntryPoint.load_entrypoint pulls the facet group name and # the maximum cache age out of the 'request' and passes those - # values into load_entrypoint() and load_max_cache_age. + # values into load_entrypoint() def get_argument(key, default): if key == Facets.ENTRY_POINT_FACET_GROUP_NAME: return "entrypoint name from request" - elif key == Facets.MAX_CACHE_AGE_NAME: - return "max cache age from request" # FacetsWithEntryPoint.load_entrypoint does not use # get_header(). @@ -217,25 +203,19 @@ def m(): MockFacetsWithEntryPoint.expect_load_entrypoint = INVALID_INPUT assert INVALID_INPUT == m() - # Similarly if load_entrypoint() works but load_max_cache_age - # returns a ProblemDetail. expect_entrypoint = object() expect_is_default = object() MockFacetsWithEntryPoint.expect_load_entrypoint = ( expect_entrypoint, expect_is_default, ) - MockFacetsWithEntryPoint.expect_max_cache_age = INVALID_INPUT - assert INVALID_INPUT == m() # Next, test success. The return value of load_entrypoint() is # is passed as 'entrypoint' into the FacetsWithEntryPoint - # constructor. The object returned by load_max_cache_age is - # passed as 'max_cache_age'. + # constructor. # # The object returned by load_entrypoint() does not need to be a # currently enabled entrypoint for the library. - MockFacetsWithEntryPoint.expect_max_cache_age = 345 facets = m() assert isinstance(facets, FacetsWithEntryPoint) assert expect_entrypoint == facets.entrypoint @@ -245,13 +225,8 @@ def m(): ["Selectable entrypoints"], default_entrypoint, ) == MockFacetsWithEntryPoint.load_entrypoint_called_with - assert 345 == facets.max_cache_age assert dict(extra="extra kwarg") == facets.constructor_kwargs assert MockFacetsWithEntryPoint.selectable_entrypoints_called_with == config - assert ( - MockFacetsWithEntryPoint.load_max_cache_age_called_with - == "max cache age from request" - ) def test_load_entrypoint(self): audio = AudiobooksEntryPoint @@ -287,30 +262,6 @@ def test_load_entrypoint(self): # nothing. assert (None, True) == m(audio.INTERNAL_NAME, []) - def test_load_max_cache_age(self): - m = FacetsWithEntryPoint.load_max_cache_age - - # The two valid options for max_cache_age as loaded in from a request are - # IGNORE_CACHE (do not pull from cache) and None (no opinion). - assert None == m("") - assert None == m(None) - assert CachedFeed.IGNORE_CACHE == m(0) - assert CachedFeed.IGNORE_CACHE == m("0") - - # All other values are treated as 'no opinion'. - assert None == m("1") - assert None == m(2) - assert None == m("not a number") - - def test_cache_age(self): - # No matter what type of feed we ask about, the max_cache_age of a - # FacetsWithEntryPoint is whatever is stored in its .max_cache_age. - # - # This is true even for 'feed types' that make no sense. - max_cache_age = object() - facets = FacetsWithEntryPoint(max_cache_age=max_cache_age) - assert max_cache_age == facets.max_cache_age - def test_selectable_entrypoints(self): """The default implementation of selectable_entrypoints just returns the worklist's entrypoints. @@ -352,19 +303,6 @@ def _configure_facets(library, enabled, default): library.settings_dict[f"facets_default_{key}"] = value library._settings = None - def test_max_cache_age(self, db: DatabaseTransactionFixture): - # A default Facets object has no opinion on what max_cache_age - # should be. - facets = Facets( - db.default_library(), - Facets.COLLECTION_FULL, - Facets.AVAILABLE_ALL, - Facets.ORDER_TITLE, - Facets.DISTRIBUTOR_ALL, - Facets.COLLECTION_NAME_ALL, - ) - assert None == facets.max_cache_age - def test_facet_groups(self, db: DatabaseTransactionFixture): db.default_collection().data_source = DataSource.AMAZON facets = Facets( @@ -1291,11 +1229,6 @@ def test_constructor(self): assert entrypoint == facets.entrypoint assert True == facets.entrypoint_is_default - def test_feed_type(self): - # If a grouped feed is built via CachedFeed.fetch, it will be - # filed as a grouped feed. - assert CachedFeed.GROUPS_TYPE == FeaturedFacets.CACHED_FEED_TYPE - def test_default( self, db: DatabaseTransactionFixture, library_fixture: LibraryFixture ): @@ -2231,7 +2164,7 @@ def test_max_cache_age(self): # WorkList is the default cache age for any type of OPDS feed, # no matter what type of feed is being generated. wl = WorkList() - assert OPDSFeed.DEFAULT_MAX_AGE == wl.max_cache_age(object()) + assert OPDSFeed.DEFAULT_MAX_AGE == wl.max_cache_age() def test_filter(self, db: DatabaseTransactionFixture): # Verify that filter() calls modify_search_filter_hook() @@ -3011,13 +2944,9 @@ class Mock(DatabaseBackedWorkList): def _modify_loading(cls, qu): return [qu, "_modify_loading"] - @classmethod - def _defer_unused_fields(cls, qu): - return qu + ["_defer_unused_fields"] - result = Mock.base_query(db.session) - [base_query, m, d] = result + [base_query, m] = result expect = ( db.session.query(Work) .join(Work.license_pools) @@ -3026,7 +2955,6 @@ def _defer_unused_fields(cls, qu): ) assert str(expect) == str(base_query) assert "_modify_loading" == m - assert "_defer_unused_fields" == d def test_bibliographic_filter_clauses(self, db: DatabaseTransactionFixture): called = dict() diff --git a/tests/core/test_monitor.py b/tests/core/test_monitor.py index 55bf12d010..42104f12e5 100644 --- a/tests/core/test_monitor.py +++ b/tests/core/test_monitor.py @@ -6,7 +6,6 @@ from core.config import Configuration from core.metadata_layer import TimestampData from core.model import ( - CachedFeed, CirculationEvent, Collection, CollectionMissing, @@ -28,7 +27,6 @@ get_one_or_create, ) from core.monitor import ( - CachedFeedReaper, CirculationEventLocationScrubber, CollectionMonitor, CollectionReaper, @@ -1030,8 +1028,6 @@ def test_cutoff(self, db: DatabaseTransactionFixture): Time.time_eq(m.cutoff, utc_now() - m.MAX_AGE) def test_specific_reapers(self, db: DatabaseTransactionFixture): - assert CachedFeed.timestamp == CachedFeedReaper(db.session).timestamp_field - assert 30 == CachedFeedReaper.MAX_AGE assert Credential.expires == CredentialReaper(db.session).timestamp_field assert 1 == CredentialReaper.MAX_AGE assert ( @@ -1040,10 +1036,6 @@ def test_specific_reapers(self, db: DatabaseTransactionFixture): ) assert 60 == PatronRecordReaper.MAX_AGE - def test_where_clause(self, db: DatabaseTransactionFixture): - m = CachedFeedReaper(db.session) - assert "cachedfeeds.timestamp < :timestamp_1" == str(m.where_clause) - def test_run_once(self, db: DatabaseTransactionFixture): # Create four Credentials: two expired, two valid. expired1 = db.credential() @@ -1140,25 +1132,9 @@ def remove_work(self, work): for work in works: WorkCoverageRecord.add_for(work, operation="some operation") - # Each work has a CachedFeed. - for work in works: - feed = CachedFeed( - work=work, type="page", content="content", pagination="", facets="" - ) - db.session.add(feed) - - # Also create a CachedFeed that has no associated Work. - workless_feed = CachedFeed( - work=None, type="page", content="content", pagination="", facets="" - ) - db.session.add(workless_feed) - - db.session.commit() - # Run the reaper. s = MockSearchIndex() m = WorkReaper(db.session, search_index_client=s) - print(m.search_index_client) m.run_once() # Search index was updated @@ -1188,14 +1164,6 @@ def remove_work(self, work): assert 2 == len([x for x in l.entries if not x.work]) assert [has_license_pool] == [x.work for x in l.entries if x.work] - # The CachedFeeds associated with the reaped Works have been - # deleted. The surviving Work still has one, and the - # CachedFeed that didn't have a work in the first place is - # unaffected. - feeds = db.session.query(CachedFeed).all() - assert [workless_feed] == [x for x in feeds if not x.work] - assert [has_license_pool] == [x.work for x in feeds if x.work] - class TestCollectionReaper: def test_query(self, db: DatabaseTransactionFixture): diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index 289140cd33..a4a8f3b17b 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -18,7 +18,6 @@ from core.lane import Lane, WorkList from core.metadata_layer import TimestampData from core.model import ( - CachedFeed, Collection, ConfigurationSetting, Contributor, @@ -1723,39 +1722,6 @@ def test_check_library( assert " This library has no collections -- that's a problem." == no_collection assert " This library has no lanes -- that's a problem." == no_lanes - def test_delete_cached_feeds( - self, - db: DatabaseTransactionFixture, - end_to_end_search_fixture: EndToEndSearchFixture, - ): - groups = CachedFeed(type=CachedFeed.GROUPS_TYPE, pagination="") - db.session.add(groups) - not_groups = CachedFeed(type=CachedFeed.PAGE_TYPE, pagination="") - db.session.add(not_groups) - - assert 2 == db.session.query(CachedFeed).count() - - script = MockWhereAreMyBooks( - _db=db.session, search=end_to_end_search_fixture.external_search_index - ) - script.delete_cached_feeds() - how_many, theyre_gone = script.output - assert ( - "%d feeds in cachedfeeds table, not counting grouped feeds.", - [1], - ) == how_many - assert " Deleting them all." == theyre_gone - - # Call it again, and we don't see "Deleting them all". There aren't - # any to delete. - script.output = [] - script.delete_cached_feeds() - [how_many] = script.output - assert ( - "%d feeds in cachedfeeds table, not counting grouped feeds.", - [0], - ) == how_many - @staticmethod def check_explanation( db: DatabaseTransactionFixture, From a9779578b16921418ffbb50220812786049c4943 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:33:28 +0000 Subject: [PATCH 133/262] Bump werkzeug from 3.0.0 to 3.0.1 (#1479) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c837f38bfe..f385ce05ca 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4384,13 +4384,13 @@ test = ["websockets"] [[package]] name = "werkzeug" -version = "3.0.0" +version = "3.0.1" description = "The comprehensive WSGI web application library." optional = false python-versions = ">=3.8" files = [ - {file = "werkzeug-3.0.0-py3-none-any.whl", hash = "sha256:cbb2600f7eabe51dbc0502f58be0b3e1b96b893b05695ea2b35b43d4de2d9962"}, - {file = "werkzeug-3.0.0.tar.gz", hash = "sha256:3ffff4dcc32db52ef3cc94dff3000a3c2846890f3a5a51800a27b909c5e770f0"}, + {file = "werkzeug-3.0.1-py3-none-any.whl", hash = "sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10"}, + {file = "werkzeug-3.0.1.tar.gz", hash = "sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc"}, ] [package.dependencies] From cafb589e611efb2e34bdffda7a824007d56bc1b1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 25 Oct 2023 17:33:41 +0000 Subject: [PATCH 134/262] Bump pytest from 7.4.2 to 7.4.3 (#1480) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index f385ce05ca..c070c487c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3242,13 +3242,13 @@ files = [ [[package]] name = "pytest" -version = "7.4.2" +version = "7.4.3" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-7.4.2-py3-none-any.whl", hash = "sha256:1d881c6124e08ff0a1bb75ba3ec0bfd8b5354a01c194ddd5a0a870a48d99b002"}, - {file = "pytest-7.4.2.tar.gz", hash = "sha256:a766259cfab564a2ad52cb1aae1b881a75c3eb7e34ca3779697c23ed47c47069"}, + {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, + {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, ] [package.dependencies] From 39abc33799ce2b927e26f5cee9689e78034d00bc Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 26 Oct 2023 11:35:47 +0530 Subject: [PATCH 135/262] Added the device registration link to the patron profile document (#1478) --- api/authenticator.py | 2 +- core/model/constants.py | 1 + core/model/patron.py | 12 ++++++++++++ tests/api/test_authenticator.py | 6 ++++-- tests/core/models/test_patron.py | 20 +++++++++++++++++++- 5 files changed, 37 insertions(+), 4 deletions(-) diff --git a/api/authenticator.py b/api/authenticator.py index d120358860..69a8ee5088 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -78,7 +78,7 @@ def profile_document(self): ) links.append(annotations_link) - doc["links"] = links + doc["links"].extend(links) if drm: doc["drm"] = drm diff --git a/core/model/constants.py b/core/model/constants.py index b0c5197f51..ba891ccb30 100644 --- a/core/model/constants.py +++ b/core/model/constants.py @@ -239,6 +239,7 @@ class LinkRelations: BORROW = "http://opds-spec.org/acquisition/borrow" TIME_TRACKING = "http://palaceproject.io/terms/timeTracking" + DEVICE_REGISTRATION = "http://palaceproject.io/terms/deviceRegistration" CIRCULATION_ALLOWED = [ OPEN_ACCESS_DOWNLOAD, diff --git a/core/model/patron.py b/core/model/patron.py index 4e686b1295..00869856fe 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -24,6 +24,7 @@ from core.classifier import Classifier from core.model import Base, get_one_or_create, numericrange_to_tuple +from core.model.constants import LinkRelations from core.model.credential import Credential from core.model.hybrid import hybrid_property from core.user_profile import ProfileStorage @@ -770,6 +771,17 @@ def profile_document(self): ) settings = {self.SYNCHRONIZE_ANNOTATIONS: patron.synchronize_annotations} doc[self.SETTINGS_KEY] = settings + doc["links"] = [ + dict( + rel=LinkRelations.DEVICE_REGISTRATION, + type="application/json", + href=self.url_for( + "put_patron_devices", + library_short_name=self.patron.library.short_name, + _external=True, + ), + ) + ] return doc def update(self, settable, full): diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index ca0ef63e27..505741c211 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -52,6 +52,7 @@ from core.integration.registry import IntegrationRegistry from core.mock_analytics_provider import MockAnalyticsProvider from core.model import CirculationEvent, ConfigurationSetting, Library, Patron +from core.model.constants import LinkRelations from core.model.integration import ( IntegrationConfiguration, IntegrationLibraryConfiguration, @@ -458,7 +459,7 @@ def mock_url_for(endpoint, library_short_name, _external=True): assert "drm:vendor" not in doc assert "drm:clientToken" not in doc assert "drm:scheme" not in doc - assert "links" not in doc + assert len(doc["links"]) == 1 # Now there's authdata configured, and the DRM fields are populated with # the vendor ID and a short client token @@ -474,13 +475,14 @@ def mock_url_for(endpoint, library_short_name, _external=True): assert ( adobe["drm:scheme"] == "http://librarysimplified.org/terms/drm/scheme/ACS" ) - [annotations_link] = doc["links"] + [devices_link, annotations_link] = doc["links"] assert annotations_link["rel"] == "http://www.w3.org/ns/oa#annotationService" assert ( annotations_link["href"] == "http://host/annotations?library_short_name=default" ) assert annotations_link["type"] == AnnotationWriter.CONTENT_TYPE + assert devices_link["rel"] == LinkRelations.DEVICE_REGISTRATION class TestAuthenticator: diff --git a/tests/core/models/test_patron.py b/tests/core/models/test_patron.py index bbca58fcd5..d6653725c0 100644 --- a/tests/core/models/test_patron.py +++ b/tests/core/models/test_patron.py @@ -5,6 +5,7 @@ from core.classifier import Classifier from core.model import create, tuple_to_numericrange +from core.model.constants import LinkRelations from core.model.credential import Credential from core.model.datasource import DataSource from core.model.licensing import PolicyException @@ -750,6 +751,13 @@ def test_age_appropriate_match(self): ) +def mock_url_for(url, **kwargs): + item_list = [f"{k}={v}" for k, v in kwargs.items()] + item_list.sort() # Ensure repeatable order + items = ";".join(item_list) + return f"{url} : {items}" + + class ExamplePatronProfileStorageFixture: patron: Patron store: PatronProfileStorage @@ -761,7 +769,7 @@ def create( ) -> "ExamplePatronProfileStorageFixture": data = ExamplePatronProfileStorageFixture() data.patron = transaction.patron() - data.store = PatronProfileStorage(data.patron) + data.store = PatronProfileStorage(data.patron, url_for=mock_url_for) data.transaction = transaction return data @@ -787,6 +795,14 @@ def test_profile_document( ): data = example_patron_profile_fixture + links = [ + dict( + rel=LinkRelations.DEVICE_REGISTRATION, + type="application/json", + href="put_patron_devices : _external=True;library_short_name=default", + ) + ] + # synchronize_annotations always shows up as settable, even if # the current value is None. data.patron.authorization_identifier = "abcd" @@ -795,6 +811,7 @@ def test_profile_document( assert { "simplified:authorization_identifier": "abcd", "settings": {"simplified:synchronize_annotations": None}, + "links": links, } == rep data.patron.synchronize_annotations = True @@ -804,6 +821,7 @@ def test_profile_document( "simplified:authorization_expires": "2016-01-01T10:20:30Z", "simplified:authorization_identifier": "abcd", "settings": {"simplified:synchronize_annotations": True}, + "links": links, } == rep def test_update( From f3650449cd9b3e478030a422be60b56e188830cf Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 26 Oct 2023 09:37:26 -0300 Subject: [PATCH 136/262] Type check collection.py (PP-503) (#1481) Add type checking information to collection.py before refactoring. --- core/model/classification.py | 6 +- core/model/collection.py | 212 +++++++++++------- core/model/contributor.py | 4 +- core/model/hassessioncache.py | 24 +- core/model/integration.py | 7 +- core/model/library.py | 5 +- core/opds_import.py | 2 +- pyproject.toml | 1 + tests/api/mockapi/overdrive.py | 1 + tests/api/test_controller_multilib.py | 1 + tests/api/test_controller_scopedsession.py | 1 + tests/core/models/test_collection.py | 47 +++- tests/core/models/test_configuration.py | 1 + .../models/test_integration_configuration.py | 5 +- 14 files changed, 206 insertions(+), 111 deletions(-) diff --git a/core/model/classification.py b/core/model/classification.py index f225b1484d..d4d406716a 100644 --- a/core/model/classification.py +++ b/core/model/classification.py @@ -56,8 +56,8 @@ class Subject(Base): OVERDRIVE = Classifier.OVERDRIVE # Overdrive's classification system BISAC = Classifier.BISAC BIC = Classifier.BIC # BIC Subject Categories - TAG = Classifier.TAG # Folksonomic tags. - FREEFORM_AUDIENCE = Classifier.FREEFORM_AUDIENCE + TAG: str = Classifier.TAG # Folksonomic tags. + FREEFORM_AUDIENCE: str = Classifier.FREEFORM_AUDIENCE NYPL_APPEAL = Classifier.NYPL_APPEAL # Types with terms that are suitable for search. @@ -65,7 +65,7 @@ class Subject(Base): AXIS_360_AUDIENCE = Classifier.AXIS_360_AUDIENCE GRADE_LEVEL = Classifier.GRADE_LEVEL - AGE_RANGE = Classifier.AGE_RANGE + AGE_RANGE: str = Classifier.AGE_RANGE LEXILE_SCORE = Classifier.LEXILE_SCORE ATOS_SCORE = Classifier.ATOS_SCORE INTEREST_LEVEL = Classifier.INTEREST_LEVEL diff --git a/core/model/collection.py b/core/model/collection.py index 801f3095f7..47053908fe 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,8 +1,8 @@ -# Collection, CollectionIdentifier, CollectionMissing from __future__ import annotations +import datetime from abc import ABCMeta, abstractmethod -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, TypeVar from sqlalchemy import ( Boolean, @@ -15,7 +15,7 @@ exists, func, ) -from sqlalchemy.orm import Mapped, backref, joinedload, mapper, relationship +from sqlalchemy.orm import Mapped, Query, backref, joinedload, mapper, relationship from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ @@ -40,9 +40,11 @@ from core.util.string_helpers import base64 if TYPE_CHECKING: - # This is needed during type checking so we have the - # types of related models. - from core.model import Credential, CustomList, Timestamp # noqa: autoflake + from core.external_search import ExternalSearchIndex + from core.model import Credential, CustomList, Timestamp + + +T = TypeVar("T") class Collection(Base, HasSessionCache): @@ -161,14 +163,16 @@ class Collection(Base, HasSessionCache): # every library. GLOBAL_COLLECTION_DATA_SOURCES = [DataSource.ENKI] - def __repr__(self): - return '' % (self.name, self.protocol, self.id) + def __repr__(self) -> str: + return f'' - def cache_key(self): - return (self.name, self.external_integration.protocol) + def cache_key(self) -> Tuple[str | None, str | None]: + return self.name, self.external_integration.protocol @classmethod - def by_name_and_protocol(cls, _db, name, protocol): + def by_name_and_protocol( + cls, _db: Session, name: str, protocol: str + ) -> Tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -178,13 +182,15 @@ def by_name_and_protocol(cls, _db, name, protocol): """ key = (name, protocol) - def lookup_hook(): + def lookup_hook() -> Tuple[Collection, bool]: return cls._by_name_and_protocol(_db, key) return cls.by_cache_key(_db, key, lookup_hook) @classmethod - def _by_name_and_protocol(cls, _db, cache_key): + def _by_name_and_protocol( + cls, _db: Session, cache_key: Tuple[str, str] + ) -> Tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -215,7 +221,7 @@ def _by_name_and_protocol(cls, _db, cache_key): return collection, is_new @classmethod - def by_protocol(cls, _db, protocol): + def by_protocol(cls, _db: Session, protocol: str | None) -> Query[Collection]: """Query collections that get their licenses through the given protocol. Collections marked for deletion are not included. @@ -239,13 +245,16 @@ def by_protocol(cls, _db, protocol): return qu @classmethod - def by_datasource(cls, _db, data_source): + def by_datasource( + cls, _db: Session, data_source: DataSource | str + ) -> Query[Collection]: """Query collections that are associated with the given DataSource. Collections marked for deletion are not included. """ - if isinstance(data_source, DataSource): - data_source = data_source.name + data_source_name = ( + data_source.name if isinstance(data_source, DataSource) else data_source + ) qu = ( _db.query(cls) @@ -257,23 +266,29 @@ def by_datasource(cls, _db, data_source): IntegrationConfiguration.settings_dict[ Collection.DATA_SOURCE_NAME_SETTING ].astext - == data_source + == data_source_name ) .filter(Collection.marked_for_deletion == False) ) return qu @hybrid_property - def protocol(self): + def protocol(self) -> str: """What protocol do we need to use to get licenses for this collection? """ - return ( - self.integration_configuration and self.integration_configuration.protocol - ) + if self.integration_configuration is None: + raise ValueError("Collection has no integration configuration.") + + if self.integration_configuration.protocol is None: + raise ValueError( + "Collection has integration configuration but no protocol." + ) + + return self.integration_configuration.protocol @protocol.setter - def protocol(self, new_protocol): + def protocol(self, new_protocol: str) -> None: """Modify the protocol in use by this Collection.""" if self.parent and self.parent.protocol != new_protocol: raise ValueError( @@ -285,14 +300,14 @@ def protocol(self, new_protocol): child.protocol = new_protocol @hybrid_property - def primary_identifier_source(self): + def primary_identifier_source(self) -> str | None: """Identify if should try to use another identifier than """ return self.integration_configuration.settings_dict.get( ExternalIntegration.PRIMARY_IDENTIFIER_SOURCE ) @primary_identifier_source.setter - def primary_identifier_source(self, new_primary_identifier_source): + def primary_identifier_source(self, new_primary_identifier_source: str) -> None: """Modify the primary identifier source in use by this Collection.""" self.integration_configuration.settings_dict = ( self.integration_configuration.settings_dict.copy() @@ -311,7 +326,9 @@ def primary_identifier_source(self, new_primary_identifier_source): EBOOK_LOAN_DURATION_KEY = "ebook_loan_duration" STANDARD_DEFAULT_LOAN_PERIOD = 21 - def default_loan_period(self, library, medium=EditionConstants.BOOK_MEDIUM): + def default_loan_period( + self, library: Library, medium: str = EditionConstants.BOOK_MEDIUM + ) -> int: """Until we hear otherwise from the license provider, we assume that someone who borrows a non-open-access item from this collection has it for this number of days. @@ -323,7 +340,7 @@ def default_loan_period(self, library, medium=EditionConstants.BOOK_MEDIUM): return value @classmethod - def loan_period_key(cls, medium=EditionConstants.BOOK_MEDIUM): + def loan_period_key(cls, medium: str = EditionConstants.BOOK_MEDIUM) -> str: if medium == EditionConstants.AUDIO_MEDIUM: return cls.AUDIOBOOK_LOAN_DURATION_KEY else: @@ -331,29 +348,33 @@ def loan_period_key(cls, medium=EditionConstants.BOOK_MEDIUM): def default_loan_period_setting( self, - library, - medium=EditionConstants.BOOK_MEDIUM, - ): + library: Library, + medium: str = EditionConstants.BOOK_MEDIUM, + ) -> Optional[int]: """Until we hear otherwise from the license provider, we assume that someone who borrows a non-open-access item from this collection has it for this number of days. """ key = self.loan_period_key(medium) + if library.id is None: + return None + config = self.integration_configuration.for_library(library.id) + if config is None: + return None - if config: - return config.settings_dict.get(key) + return config.settings_dict.get(key) DEFAULT_RESERVATION_PERIOD_KEY = "default_reservation_period" STANDARD_DEFAULT_RESERVATION_PERIOD = 3 - def _set_settings(self, **kwargs): + def _set_settings(self, **kwargs: Any) -> None: settings_dict = self.integration_configuration.settings_dict.copy() settings_dict.update(kwargs) self.integration_configuration.settings_dict = settings_dict @hybrid_property - def default_reservation_period(self): + def default_reservation_period(self) -> int: """Until we hear otherwise from the license provider, we assume that someone who puts an item on hold has this many days to check it out before it goes to the next person in line. @@ -366,7 +387,7 @@ def default_reservation_period(self): ) @default_reservation_period.setter - def default_reservation_period(self, new_value): + def default_reservation_period(self, new_value: int) -> None: new_value = int(new_value) self._set_settings(**{self.DEFAULT_RESERVATION_PERIOD_KEY: new_value}) @@ -395,7 +416,7 @@ def default_audience(self, new_value: str) -> None: """ self._set_settings(**{self.DEFAULT_AUDIENCE_KEY: str(new_value)}) - def create_external_integration(self, protocol): + def create_external_integration(self, protocol: str) -> ExternalIntegration: """Create an ExternalIntegration for this Collection. To be used immediately after creating a new Collection, @@ -424,7 +445,9 @@ def create_external_integration(self, protocol): self.external_integration_id = external_integration.id return external_integration - def create_integration_configuration(self, protocol): + def create_integration_configuration( + self, protocol: str + ) -> IntegrationConfiguration: _db = Session.object_session(self) goal = Goals.LICENSE_GOAL if self.integration_configuration_id: @@ -466,8 +489,9 @@ def external_integration(self) -> ExternalIntegration: return self._external_integration @property - def unique_account_id(self): + def unique_account_id(self) -> str: """Identifier that uniquely represents this Collection of works""" + unique_account_id: str | None if ( self.data_source and self.data_source.name in self.GLOBAL_COLLECTION_DATA_SOURCES @@ -488,7 +512,7 @@ def unique_account_id(self): return unique_account_id @hybrid_property - def data_source(self): + def data_source(self) -> DataSource | None: """Find the data source associated with this Collection. Bibliographic metadata obtained through the collection @@ -502,7 +526,11 @@ def data_source(self): the data source is a Collection-specific setting. """ data_source = None - name = ExternalIntegration.DATA_SOURCE_FOR_LICENSE_PROTOCOL.get(self.protocol) + name = None + if self.protocol is not None: + name = ExternalIntegration.DATA_SOURCE_FOR_LICENSE_PROTOCOL.get( + self.protocol + ) if not name: name = self.integration_configuration.settings_dict.get( Collection.DATA_SOURCE_NAME_SETTING @@ -513,29 +541,38 @@ def data_source(self): return data_source @data_source.setter - def data_source(self, new_value): - if isinstance(new_value, DataSource): - new_value = new_value.name - if self.protocol == new_value: + def data_source(self, new_value: DataSource | str) -> None: + new_datasource_name = ( + new_value.name if isinstance(new_value, DataSource) else new_value + ) + + if self.protocol == new_datasource_name: return # Only set a DataSource for Collections that don't have an # implied source. if self.protocol not in ExternalIntegration.DATA_SOURCE_FOR_LICENSE_PROTOCOL: - if new_value is not None: - new_value = str(new_value) - self._set_settings(**{Collection.DATA_SOURCE_NAME_SETTING: new_value}) + if new_datasource_name is not None: + new_datasource_name = str(new_datasource_name) + self._set_settings( + **{Collection.DATA_SOURCE_NAME_SETTING: new_datasource_name} + ) @property - def parents(self): - if self.parent_id: - _db = Session.object_session(self) - parent = Collection.by_id(_db, self.parent_id) - yield parent - yield from parent.parents + def parents(self) -> Generator[Collection, None, None]: + if not self.parent_id: + return None + + _db = Session.object_session(self) + parent = Collection.by_id(_db, self.parent_id) + if parent is None: + return None + + yield parent + yield from parent.parents @property - def metadata_identifier(self): + def metadata_identifier(self) -> str: """Identifier based on collection details that uniquely represents this Collection on the metadata wrangler. This identifier is composed of the Collection protocol and account identifier. @@ -558,13 +595,13 @@ def metadata_identifier(self): protocol = encode(self.protocol) metadata_identifier = protocol + ":" + account_id - return encode(metadata_identifier) + return encode(metadata_identifier) # type: ignore[no-any-return] - def disassociate_library(self, library): + def disassociate_library(self, library: Library) -> None: """Disassociate a Library from this Collection and delete any relevant ConfigurationSettings. """ - if library is None or not library in self.libraries: + if library is None or library not in self.libraries: # No-op. return @@ -602,7 +639,7 @@ def disassociate_library(self, library): self.libraries.remove(library) @classmethod - def _decode_metadata_identifier(cls, metadata_identifier): + def _decode_metadata_identifier(cls, metadata_identifier: str) -> Tuple[str, str]: """Invert the metadata_identifier property.""" if not metadata_identifier: raise ValueError("No metadata identifier provided.") @@ -619,7 +656,12 @@ def _decode_metadata_identifier(cls, metadata_identifier): return protocol, account_id @classmethod - def from_metadata_identifier(cls, _db, metadata_identifier, data_source=None): + def from_metadata_identifier( + cls, + _db: Session, + metadata_identifier: str, + data_source: DataSource | str | None = None, + ) -> Tuple[Collection, bool]: """Finds or creates a Collection on the metadata wrangler, based on its unique metadata_identifier. """ @@ -639,22 +681,26 @@ def from_metadata_identifier(cls, _db, metadata_identifier, data_source=None): # identifier. Give it an ExternalIntegration with the # corresponding protocol, and set its data source and # external_account_id. - collection, is_new = create(_db, Collection, name=metadata_identifier) - collection.create_external_integration(protocol) - collection.create_integration_configuration(protocol) + new_collection, is_new = create(_db, Collection, name=metadata_identifier) + new_collection.create_external_integration(protocol) + new_collection.create_integration_configuration(protocol) + collection = new_collection if protocol == ExternalIntegration.OPDS_IMPORT: # For OPDS Import collections only, we store the URL to # the OPDS feed (the "account ID") and the data source. collection.external_account_id = account_id - if data_source and not isinstance(data_source, DataSource): - data_source = DataSource.lookup(_db, data_source, autocreate=True) - collection.data_source = data_source + if isinstance(data_source, DataSource): + collection.data_source = data_source + elif data_source is not None: + collection.data_source = DataSource.lookup( + _db, data_source, autocreate=True + ) return collection, is_new @property - def pools_with_no_delivery_mechanisms(self): + def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: """Find all LicensePools in this Collection that have no delivery mechanisms whatsoever. @@ -662,9 +708,9 @@ def pools_with_no_delivery_mechanisms(self): """ _db = Session.object_session(self) qu = LicensePool.with_no_delivery_mechanisms(_db) - return qu.filter(LicensePool.collection == self) + return qu.filter(LicensePool.collection == self) # type: ignore[no-any-return] - def explain(self, include_secrets=False): + def explain(self, include_secrets: bool = False) -> List[str]: """Create a series of human-readable strings to explain a collection's settings. @@ -693,11 +739,11 @@ def explain(self, include_secrets=False): lines.append(f'Setting "{name}": "{value}"') return lines - def catalog_identifier(self, identifier): + def catalog_identifier(self, identifier: Identifier) -> None: """Inserts an identifier into a catalog""" self.catalog_identifiers([identifier]) - def catalog_identifiers(self, identifiers): + def catalog_identifiers(self, identifiers: List[Identifier]) -> None: """Inserts identifiers into the catalog""" if not identifiers: # Nothing to do. @@ -707,7 +753,7 @@ def catalog_identifiers(self, identifiers): already_in_catalog = ( _db.query(Identifier) .join(CollectionIdentifier) - .filter(CollectionIdentifier.collection_id == self.id) + .filter(CollectionIdentifier.collection_id == self.id) # type: ignore[attr-defined] .filter(Identifier.id.in_([x.id for x in identifiers])) .all() ) @@ -720,7 +766,9 @@ def catalog_identifiers(self, identifiers): _db.bulk_insert_mappings(CollectionIdentifier, new_catalog_entries) _db.commit() - def unresolved_catalog(self, _db, data_source_name, operation): + def unresolved_catalog( + self, _db: Session, data_source_name: str, operation: str + ) -> Query[Identifier]: """Returns a query with all identifiers in a Collection's catalog that have unsuccessfully attempted resolution. This method is used on the metadata wrangler. @@ -739,14 +787,16 @@ def unresolved_catalog(self, _db, data_source_name, operation): .outerjoin(Identifier.licensed_through) .outerjoin(Identifier.coverage_records) .outerjoin(LicensePool.work) - .outerjoin(Identifier.collections) + .outerjoin(Identifier.collections) # type: ignore[attr-defined] .filter(Collection.id == self.id, is_not_resolved, Work.id == None) .order_by(Identifier.id) ) return query - def isbns_updated_since(self, _db, timestamp): + def isbns_updated_since( + self, _db: Session, timestamp: datetime.datetime | None + ) -> Query[Identifier]: """Finds all ISBNs in a collection's catalog that have been updated since the timestamp but don't have a Work to show for it. Used in the metadata wrangler. @@ -755,7 +805,7 @@ def isbns_updated_since(self, _db, timestamp): """ isbns = ( _db.query(Identifier, func.max(CoverageRecord.timestamp).label("latest")) - .join(Identifier.collections) + .join(Identifier.collections) # type: ignore[attr-defined] .join(Identifier.coverage_records) .outerjoin(Identifier.licensed_through) .group_by(Identifier.id) @@ -777,11 +827,11 @@ def isbns_updated_since(self, _db, timestamp): @classmethod def restrict_to_ready_deliverable_works( cls, - query, - collection_ids=None, - show_suppressed=False, - allow_holds=True, - ): + query: Query[T], + collection_ids: List[int] | None = None, + show_suppressed: bool = False, + allow_holds: bool = True, + ) -> Query[T]: """Restrict a query to show only presentation-ready works present in an appropriate collection which the default client can fulfill. @@ -856,7 +906,7 @@ def restrict_to_ready_deliverable_works( ) return query - def delete(self, search_index=None): + def delete(self, search_index: ExternalSearchIndex | None = None) -> None: """Delete a collection. Collections can have hundreds of thousands of diff --git a/core/model/contributor.py b/core/model/contributor.py index 284d8e55b1..00e2ebf93a 100644 --- a/core/model/contributor.py +++ b/core/model/contributor.py @@ -3,7 +3,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING, Dict, List, Set from sqlalchemy import Column, ForeignKey, Integer, Unicode, UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY, JSON @@ -93,7 +93,7 @@ class Contributor(Base): COPYRIGHT_HOLDER_ROLE = "Copyright holder" TRANSCRIBER_ROLE = "Transcriber" DESIGNER_ROLE = "Designer" - AUTHOR_ROLES = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} + AUTHOR_ROLES: Set[str] = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} # Map our recognized roles to MARC relators. # https://www.loc.gov/marc/relators/relaterm.html diff --git a/core/model/hassessioncache.py b/core/model/hassessioncache.py index 1f17ef9bd5..f17fa48bfa 100644 --- a/core/model/hassessioncache.py +++ b/core/model/hassessioncache.py @@ -6,7 +6,7 @@ from abc import abstractmethod from collections import namedtuple from types import SimpleNamespace -from typing import Callable, Hashable +from typing import Callable, Hashable, TypeVar from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Mapped, Session @@ -21,6 +21,8 @@ CacheTuple = namedtuple("CacheTuple", ["id", "key", "stats"]) +T = TypeVar("T") + class HasSessionCache: CACHE_ATTRIBUTE = "_palace_cache" @@ -94,8 +96,8 @@ def _cache_lookup( cache: CacheTuple, cache_name: str, cache_key: Hashable, - cache_miss_hook: Callable[[], tuple[Self | None, bool]], - ) -> tuple[Self | None, bool]: + cache_miss_hook: Callable[[], tuple[T, bool]], + ) -> tuple[T, bool]: """Helper method used by both by_id and by_cache_key. Looks up `cache_key` in the `cache_name` property of `cache`, returning @@ -142,20 +144,22 @@ def _cache_from_session(cls, _db: Session) -> CacheTuple: def by_id(cls, db: Session, id: int) -> Self | None: """Look up an item by its unique database ID.""" cache = cls._cache_from_session(db) - - def lookup_hook(): # type: ignore[no-untyped-def] - return get_one(db, cls, id=id), False - - obj, _ = cls._cache_lookup(db, cache, "id", id, lookup_hook) + obj, _ = cls._cache_lookup( + db, cache, "id", id, lambda: cls._by_id_lookup_hook(db, id) + ) return obj + @classmethod + def _by_id_lookup_hook(cls, db: Session, id: int) -> tuple[Self | None, bool]: + return get_one(db, cls, id=id), False + @classmethod def by_cache_key( cls, db: Session, cache_key: Hashable, - cache_miss_hook: Callable[[], tuple[Self | None, bool]], - ) -> tuple[Self | None, bool]: + cache_miss_hook: Callable[[], tuple[T, bool]], + ) -> tuple[T, bool]: """Look up an item by its cache key.""" cache = cls._cache_from_session(db) return cls._cache_lookup(db, cache, "key", cache_key, cache_miss_hook) diff --git a/core/model/integration.py b/core/model/integration.py index eaa4e43f6a..ebac448cb6 100644 --- a/core/model/integration.py +++ b/core/model/integration.py @@ -72,14 +72,17 @@ def for_library( @overload def for_library( - self, library_id: int, create: Literal[False] = False + self, library_id: int | None, create: bool = False ) -> IntegrationLibraryConfiguration | None: ... def for_library( - self, library_id: int, create: bool = False + self, library_id: int | None, create: bool = False ) -> IntegrationLibraryConfiguration | None: """Fetch the library configuration specifically by library_id""" + if library_id is None: + return None + for config in self.library_configurations: if config.library_id == library_id: return config diff --git a/core/model/library.py b/core/model/library.py index 9b9e563f2b..9f66939124 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -328,7 +328,8 @@ def enabled_facets(self, group_name: str) -> List[str]: if group_name == FacetConstants.COLLECTION_NAME_FACETS_GROUP_NAME: enabled = [] for collection in self.collections: - enabled.append(collection.name) + if collection.name is not None: + enabled.append(collection.name) return enabled return getattr(self.settings, f"facets_enabled_{group_name}") # type: ignore[no-any-return] @@ -386,7 +387,7 @@ def restrict_to_ready_deliverable_works( collection_ids = collection_ids or [ x.id for x in self.all_collections if x.id is not None ] - return Collection.restrict_to_ready_deliverable_works( # type: ignore[no-any-return] + return Collection.restrict_to_ready_deliverable_works( query, collection_ids=collection_ids, show_suppressed=show_suppressed, diff --git a/core/opds_import.py b/core/opds_import.py index 49dd713fe5..b85098398a 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -1880,7 +1880,7 @@ def data_source(self, collection: Collection) -> Optional[DataSource]: By default, this URL is stored as a setting on the collection, but subclasses may hard-code it. """ - return collection.data_source # type: ignore[no-any-return] + return collection.data_source def feed_contains_new_data(self, feed: bytes | str) -> bool: """Does the given feed contain any entries that haven't been imported diff --git a/pyproject.toml b/pyproject.toml index acb3dc7b98..02490c5d11 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -87,6 +87,7 @@ module = [ "core.feed.*", "core.integration.*", "core.model.announcements", + "core.model.collection", "core.model.hassessioncache", "core.model.integration", "core.model.library", diff --git a/tests/api/mockapi/overdrive.py b/tests/api/mockapi/overdrive.py index d7ea7d7f0f..e96c927eeb 100644 --- a/tests/api/mockapi/overdrive.py +++ b/tests/api/mockapi/overdrive.py @@ -70,6 +70,7 @@ def mock_collection( } library.collections.append(collection) db = DatabaseTransactionFixture + assert library.id is not None db.set_settings(config.for_library(library.id, create=True), ils_name=ils_name) _db.refresh(config) return collection diff --git a/tests/api/test_controller_multilib.py b/tests/api/test_controller_multilib.py index da69c34d9f..4e725fce1c 100644 --- a/tests/api/test_controller_multilib.py +++ b/tests/api/test_controller_multilib.py @@ -21,6 +21,7 @@ def make_default_collection(_db, library): name=f"{controller_fixture.db.fresh_str()} (for multi-library test)", ) collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) + collection.create_integration_configuration(ExternalIntegration.OPDS_IMPORT) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/test_controller_scopedsession.py index 53afdaa5cd..797a7b25f6 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/test_controller_scopedsession.py @@ -60,6 +60,7 @@ def make_default_collection(self, session: Session, library): name=self.fresh_id() + " (collection for scoped session)", ) collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) + collection.create_integration_configuration(ExternalIntegration.OPDS_IMPORT) library.collections.append(collection) return collection diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index 1986abf796..cde4aeeadf 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -229,6 +229,32 @@ def test_unique_account_id( enki_child.parent = enki assert DataSource.ENKI + "+enkichild" == enki_child.unique_account_id + def test_get_protocol(self, db: DatabaseTransactionFixture): + test_collection = db.collection() + integration = test_collection.integration_configuration + test_collection.integration_configuration = None + + # A collection with no associated ExternalIntegration has no protocol. + with pytest.raises(ValueError) as excinfo: + getattr(test_collection, "protocol") + + assert "Collection has no integration configuration" in str(excinfo.value) + + integration.protocol = None + test_collection.integration_configuration = integration + + # If a collection has an integration that doesn't have a protocol set, + # it has no protocol, so we get an exception. + with pytest.raises(ValueError) as excinfo: + getattr(test_collection, "protocol") + + assert "Collection has integration configuration but no protocol" in str( + excinfo.value + ) + + integration.protocol = "test protocol" + assert test_collection.protocol == "test protocol" + def test_change_protocol( self, example_collection_fixture: ExampleCollectionFixture ): @@ -269,23 +295,24 @@ def test_data_source(self, example_collection_fixture: ExampleCollectionFixture) bibliotheca = db.collection(protocol=ExternalIntegration.BIBLIOTHECA) # The rote data_source is returned for the obvious collection. + assert bibliotheca.data_source is not None assert DataSource.BIBLIOTHECA == bibliotheca.data_source.name # The less obvious OPDS collection doesn't have a DataSource. assert None == opds.data_source # Trying to change the Bibliotheca collection's data_source does nothing. - bibliotheca.data_source = DataSource.AXIS_360 + bibliotheca.data_source = DataSource.AXIS_360 # type: ignore[assignment] assert isinstance(bibliotheca.data_source, DataSource) assert DataSource.BIBLIOTHECA == bibliotheca.data_source.name # Trying to change the opds collection's data_source is fine. - opds.data_source = DataSource.PLYMPTON + opds.data_source = DataSource.PLYMPTON # type: ignore[assignment] assert isinstance(opds.data_source, DataSource) assert DataSource.PLYMPTON == opds.data_source.name # Resetting it to something else is fine. - opds.data_source = DataSource.OA_CONTENT_SERVER + opds.data_source = DataSource.OA_CONTENT_SERVER # type: ignore[assignment] assert isinstance(opds.data_source, DataSource) assert DataSource.OA_CONTENT_SERVER == opds.data_source.name @@ -520,7 +547,8 @@ def new_data_source(): # Because this isn't an OPDS collection, the external account # ID is not stored, the data source is the default source for # the protocol, and no new data source was created. - assert None == mirror_collection.external_account_id + assert mirror_collection.external_account_id is None + assert mirror_collection.data_source is not None assert DataSource.OVERDRIVE == mirror_collection.data_source.name assert None == new_data_source() @@ -529,6 +557,7 @@ def new_data_source(): mirror_collection = create( db.session, Collection, name=collection.metadata_identifier )[0] + assert collection.protocol is not None mirror_collection.create_external_integration(collection.protocol) mirror_collection.create_integration_configuration(collection.protocol) @@ -728,7 +757,7 @@ def assert_isbns(expected, result_query): assert_isbns([i2, i1], updated_isbns) # That CoverageRecord timestamp is also returned. - i1_timestamp = updated_isbns[1][1] + i1_timestamp = updated_isbns[1][1] # type: ignore[index] assert isinstance(i1_timestamp, datetime.datetime) assert i1_oclc_record.timestamp == i1_timestamp @@ -736,8 +765,8 @@ def assert_isbns(expected, result_query): # then will be returned. timestamp = utc_now() i1.coverage_records[0].timestamp = utc_now() - updated_isbns = test_collection.isbns_updated_since(db.session, timestamp) - assert_isbns([i1], updated_isbns) + updated_isbns_2 = test_collection.isbns_updated_since(db.session, timestamp) + assert_isbns([i1], updated_isbns_2) # Prepare an ISBN associated with a Work. work = db.work(with_license_pool=True) @@ -745,8 +774,8 @@ def assert_isbns(expected, result_query): i2.coverage_records[0].timestamp = utc_now() # ISBNs that have a Work will be ignored. - updated_isbns = test_collection.isbns_updated_since(db.session, timestamp) - assert_isbns([i1], updated_isbns) + updated_isbns_3 = test_collection.isbns_updated_since(db.session, timestamp) + assert_isbns([i1], updated_isbns_3) def test_custom_lists(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 7d10ba373d..4ac3f35691 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -515,6 +515,7 @@ def test_data_source( # For most collections, the protocol determines the # data source. collection = db.collection(protocol=ExternalIntegration.OVERDRIVE) + assert collection.data_source is not None assert DataSource.OVERDRIVE == collection.data_source.name # For OPDS Import collections, data source is a setting which diff --git a/tests/core/models/test_integration_configuration.py b/tests/core/models/test_integration_configuration.py index 50c3c5f4ed..251487423e 100644 --- a/tests/core/models/test_integration_configuration.py +++ b/tests/core/models/test_integration_configuration.py @@ -16,8 +16,11 @@ def test_for_library(seslf, db: DatabaseTransactionFixture): library = db.default_library() assert library.id is not None + # No library ID provided + assert config.for_library(None) is None + # No library config exists - assert config.for_library(library.id) == None + assert config.for_library(library.id) is None # This should create a new config libconfig = config.for_library(library.id, create=True) From 415283e34cee00dd858c477f644158cd8b9bc376 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 26 Oct 2023 18:18:46 +0530 Subject: [PATCH 137/262] Activated OPDS2 capabilities for several feed endpoints (#1484) --- api/admin/controller/custom_lists.py | 4 +++- api/controller.py | 12 +++++++++--- core/app_server.py | 2 +- 3 files changed, 13 insertions(+), 5 deletions(-) diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index 905a857062..a5f867e4ac 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -359,7 +359,9 @@ def custom_list( query, self._db, list.name or "", url, pagination, url_fn, annotator ) annotator.annotate_feed(feed) - return feed.as_response(max_age=0) + return feed.as_response( + max_age=0, mime_types=flask.request.accept_mimetypes + ) elif flask.request.method == "POST": ctx: Context = flask.request.context.body # type: ignore diff --git a/api/controller.py b/api/controller.py index 0c4d78b0e8..add2d4ddc6 100644 --- a/api/controller.py +++ b/api/controller.py @@ -1994,7 +1994,9 @@ def contributor( pagination=pagination, annotator=annotator, search_engine=search_engine, - ).as_response(max_age=lane.max_cache_age()) + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) def permalink(self, identifier_type, identifier): """Serve an entry for a single book. @@ -2091,7 +2093,9 @@ def related( pagination=None, facets=facets, search_engine=search_engine, - ).as_response(max_age=lane.max_cache_age()) + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) def recommendations( self, @@ -2189,7 +2193,9 @@ def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFe pagination=pagination, annotator=annotator, search_engine=search_engine, - ).as_response(max_age=lane.max_cache_age()) + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) class ProfileController(CirculationManagerController): diff --git a/core/app_server.py b/core/app_server.py index 1a604bac9f..f38ab5947a 100644 --- a/core/app_server.py +++ b/core/app_server.py @@ -290,7 +290,7 @@ def work_lookup(self, annotator, route_name="lookup", **process_urn_kwargs): precomposed_entries=handler.precomposed_entries, ) opds_feed.generate_feed(annotate=False) - return opds_feed.as_response() + return opds_feed.as_response(mime_types=flask.request.accept_mimetypes) def process_urns(self, urns, **process_urn_kwargs): """Process a number of URNs by instantiating a URNLookupHandler From 75085ad39322debfb7f61023732ee7b0afcb68c3 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 26 Oct 2023 19:38:50 +0530 Subject: [PATCH 138/262] PP-586 No retries for web application based requests (#1483) * Remove retries any request that are within the context of the web application This is to ensure we do not hang web requests due to issues with third party APIs * Request timeouts dropped to 5 seconds within an app context --- api/app.py | 2 ++ core/util/http.py | 24 +++++++++++++++++++++--- tests/api/test_app.py | 14 ++++++++++++++ tests/core/util/test_http.py | 14 ++++++++++++++ 4 files changed, 51 insertions(+), 3 deletions(-) create mode 100644 tests/api/test_app.py diff --git a/api/app.py b/api/app.py index d4cde207c1..90c6a45a28 100644 --- a/api/app.py +++ b/api/app.py @@ -28,6 +28,7 @@ from core.service.container import Services, container_instance from core.util import LanguageCodes from core.util.cache import CachedData +from core.util.http import HTTP from scripts import InstanceInitializationScript @@ -103,6 +104,7 @@ def initialize_database(): def initialize_application() -> PalaceFlask: + HTTP.set_quick_failure_settings() with app.app_context(), flask_babel.force_locale("en"): initialize_database() diff --git a/core/util/http.py b/core/util/http.py index 43707df0e4..496778e409 100644 --- a/core/util/http.py +++ b/core/util/http.py @@ -1,4 +1,5 @@ import logging +import time from json import JSONDecodeError from typing import Any, Callable, Dict, List, Optional, Union from urllib.parse import urlparse @@ -12,6 +13,7 @@ import core from core.exceptions import IntegrationException from core.problem_details import INTEGRATION_ERROR +from core.util.log import LoggerMixin from core.util.problem_detail import JSON_MEDIA_TYPE as PROBLEM_DETAIL_JSON_MEDIA_TYPE from core.util.problem_detail import ProblemError @@ -157,13 +159,22 @@ class RequestTimedOut(RequestNetworkException, requests.exceptions.Timeout): internal_message = "Timeout accessing %s: %s" -class HTTP: +class HTTP(LoggerMixin): """A helper for the `requests` module.""" # In case an app version is not present, we can use this version as a fallback # for all outgoing http requests without a custom user-agent DEFAULT_USER_AGENT_VERSION = "1.x.x" + DEFAULT_REQUEST_RETRIES = 5 + DEFAULT_REQUEST_TIMEOUT = 20 + + @classmethod + def set_quick_failure_settings(cls) -> None: + """Ensure any outgoing requests aren't long-running""" + cls.DEFAULT_REQUEST_RETRIES = 0 + cls.DEFAULT_REQUEST_TIMEOUT = 5 + @classmethod def get_with_timeout(cls, url: str, *args, **kwargs) -> Response: """Make a GET request with timeout handling.""" @@ -219,9 +230,11 @@ def _request_with_timeout( expected_encoding = kwargs.pop("expected_encoding", "utf-8") if not "timeout" in kwargs: - kwargs["timeout"] = 20 + kwargs["timeout"] = cls.DEFAULT_REQUEST_TIMEOUT - max_retry_count: int = int(kwargs.pop("max_retry_count", 5)) + max_retry_count: int = int( + kwargs.pop("max_retry_count", cls.DEFAULT_REQUEST_RETRIES) + ) backoff_factor: float = float(kwargs.pop("backoff_factor", 1.0)) # Unicode data can't be sent over the wire. Convert it @@ -258,6 +271,7 @@ def _request_with_timeout( # arguments, it will still work. args = args + (url,) + request_start_time = time.time() if make_request_with == sessions.Session.request: with sessions.Session() as session: retry_strategy = Retry( @@ -269,10 +283,14 @@ def _request_with_timeout( session.mount("http://", adapter) session.mount("https://", adapter) + print(session) response = session.request(*args, **kwargs) else: response = make_request_with(*args, **kwargs) + cls.logger().info( + f"Request time for {url} took {time.time() - request_start_time:.2f} seconds" + ) if verbose: logging.info( diff --git a/tests/api/test_app.py b/tests/api/test_app.py new file mode 100644 index 0000000000..c904402067 --- /dev/null +++ b/tests/api/test_app.py @@ -0,0 +1,14 @@ +from api.app import initialize_application +from core.util.http import HTTP +from tests.fixtures.database import DatabaseTransactionFixture + + +def test_initialize_application_http(db: DatabaseTransactionFixture): + # Use the db transaction fixture so that we don't use the production settings by mistake + assert HTTP.DEFAULT_REQUEST_RETRIES == 5 + assert HTTP.DEFAULT_REQUEST_TIMEOUT == 20 + # Initialize the app, which will set the HTTP configuration + initialize_application() + # Now we have quick request failure setup + assert HTTP.DEFAULT_REQUEST_RETRIES == 0 + assert HTTP.DEFAULT_REQUEST_TIMEOUT == 5 diff --git a/tests/core/util/test_http.py b/tests/core/util/test_http.py index b615e53364..6aecede5eb 100644 --- a/tests/core/util/test_http.py +++ b/tests/core/util/test_http.py @@ -41,6 +41,20 @@ def test_series(self): assert "3xx" == m(399) assert "5xx" == m(500) + @mock.patch("core.util.http.sessions.Session") + def test_request_with_timeout_defaults(self, mock_session): + with mock.patch.object(HTTP, "DEFAULT_REQUEST_TIMEOUT", 10), mock.patch.object( + HTTP, "DEFAULT_REQUEST_RETRIES", 2 + ): + mock_ctx = mock_session().__enter__() + mock_request = mock_ctx.request + HTTP.request_with_timeout("GET", "url") + # The session adapter has a retry attached + assert mock_ctx.mount.call_args[0][1].max_retries.total == 2 + mock_request.assert_called_once() + # The request has a timeout + assert mock_request.call_args[1]["timeout"] == 10 + @mock.patch("core.util.http.core.__version__", "") def test_request_with_timeout_success(self, mock_request): request = mock_request(MockRequestsResponse(200, content="Success!")) From d784b7960894adc0b5a6f6e07e7988a4ee202ed6 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 26 Oct 2023 11:38:55 -0300 Subject: [PATCH 139/262] Remove unused functions from collection model (PP-503) (#1482) This is a follow up to #1481. Remove a number of functions that are not used on collections. It looks like they existed to support the metadata wrangler at some point. --- .../list_collection_metadata_identifiers | 11 - core/coverage.py | 19 -- core/model/collection.py | 202 +----------- core/scripts.py | 49 --- tests/core/models/test_collection.py | 294 ------------------ tests/core/test_coverage.py | 29 -- tests/core/test_scripts.py | 35 --- 7 files changed, 2 insertions(+), 637 deletions(-) delete mode 100755 bin/informational/list_collection_metadata_identifiers diff --git a/bin/informational/list_collection_metadata_identifiers b/bin/informational/list_collection_metadata_identifiers deleted file mode 100755 index 2ccd7bc96a..0000000000 --- a/bin/informational/list_collection_metadata_identifiers +++ /dev/null @@ -1,11 +0,0 @@ -#!/usr/bin/env python3 -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..", "..") -sys.path.append(os.path.abspath(package_dir)) - -from core.scripts import ListCollectionMetadataIdentifiersScript # noqa: E402 - -ListCollectionMetadataIdentifiersScript().run() diff --git a/core/coverage.py b/core/coverage.py index 459e6dddf0..577dacfe6f 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1387,25 +1387,6 @@ def run(self, _db, **kwargs): provider.finalize_timestampdata(self.progress) -class CatalogCoverageProvider(CollectionCoverageProvider): - """Most CollectionCoverageProviders provide coverage to Identifiers - that are licensed through a given Collection. - - A CatalogCoverageProvider provides coverage to Identifiers that - are present in a given Collection's catalog. - """ - - def items_that_need_coverage(self, identifiers=None, **kwargs): - """Find all Identifiers in this Collection's catalog but lacking - coverage through this CoverageProvider. - """ - qu = super(CollectionCoverageProvider, self).items_that_need_coverage( - identifiers, **kwargs - ) - qu = qu.join(Identifier.collections).filter(Collection.id == self.collection_id) - return qu - - class BibliographicCoverageProvider(CollectionCoverageProvider): """Fill in bibliographic metadata for all books in a Collection. diff --git a/core/model/collection.py b/core/model/collection.py index 47053908fe..1e32de6263 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,6 +1,5 @@ from __future__ import annotations -import datetime from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, TypeVar @@ -13,15 +12,14 @@ Unicode, UniqueConstraint, exists, - func, ) -from sqlalchemy.orm import Mapped, Query, backref, joinedload, mapper, relationship +from sqlalchemy.orm import Mapped, Query, backref, mapper, relationship from sqlalchemy.orm.exc import NoResultFound from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ from core.integration.goals import Goals -from core.model import Base, create, get_one, get_one_or_create +from core.model import Base, create, get_one_or_create from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.constants import EditionConstants from core.model.coverage import CoverageRecord @@ -37,7 +35,6 @@ from core.model.library import Library from core.model.licensing import LicensePool, LicensePoolDeliveryMechanism from core.model.work import Work -from core.util.string_helpers import base64 if TYPE_CHECKING: from core.external_search import ExternalSearchIndex @@ -488,29 +485,6 @@ def external_integration(self) -> ExternalIntegration: ) return self._external_integration - @property - def unique_account_id(self) -> str: - """Identifier that uniquely represents this Collection of works""" - unique_account_id: str | None - if ( - self.data_source - and self.data_source.name in self.GLOBAL_COLLECTION_DATA_SOURCES - and not self.parent - ): - # Every top-level collection from this data source has the - # same catalog. Treat them all as one collection named - # after the data source. - unique_account_id = self.data_source.name - else: - unique_account_id = self.external_account_id - - if not unique_account_id: - raise ValueError("Unique account identifier not set") - - if self.parent: - return self.parent.unique_account_id + "+" + unique_account_id - return unique_account_id - @hybrid_property def data_source(self) -> DataSource | None: """Find the data source associated with this Collection. @@ -571,32 +545,6 @@ def parents(self) -> Generator[Collection, None, None]: yield parent yield from parent.parents - @property - def metadata_identifier(self) -> str: - """Identifier based on collection details that uniquely represents - this Collection on the metadata wrangler. This identifier is - composed of the Collection protocol and account identifier. - - A circulation manager provides a Collection's metadata - identifier as part of collection registration. The metadata - wrangler creates a corresponding Collection on its side, - *named after* the metadata identifier -- regardless of the name - of that collection on the circulation manager side. - """ - account_id = self.unique_account_id - if self.protocol == ExternalIntegration.OPDS_IMPORT: - # Remove ending / from OPDS url that could duplicate the collection - # on the Metadata Wrangler. - while account_id.endswith("/"): - account_id = account_id[:-1] - - encode = base64.urlsafe_b64encode - account_id = encode(account_id) - protocol = encode(self.protocol) - - metadata_identifier = protocol + ":" + account_id - return encode(metadata_identifier) # type: ignore[no-any-return] - def disassociate_library(self, library: Library) -> None: """Disassociate a Library from this Collection and delete any relevant ConfigurationSettings. @@ -638,67 +586,6 @@ def disassociate_library(self, library: Library) -> None: self.libraries.remove(library) - @classmethod - def _decode_metadata_identifier(cls, metadata_identifier: str) -> Tuple[str, str]: - """Invert the metadata_identifier property.""" - if not metadata_identifier: - raise ValueError("No metadata identifier provided.") - try: - decode = base64.urlsafe_b64decode - details = decode(metadata_identifier) - encoded_details = details.split(":", 1) - [protocol, account_id] = [decode(d) for d in encoded_details] - except (TypeError, ValueError) as e: - raise ValueError( - "Metadata identifier '%s' is invalid: %s" - % (metadata_identifier, str(e)) - ) - return protocol, account_id - - @classmethod - def from_metadata_identifier( - cls, - _db: Session, - metadata_identifier: str, - data_source: DataSource | str | None = None, - ) -> Tuple[Collection, bool]: - """Finds or creates a Collection on the metadata wrangler, based - on its unique metadata_identifier. - """ - - # Decode the metadata identifier into a protocol and an - # account ID. If the metadata identifier is invalid, this - # will raise an exception. - protocol, account_id = cls._decode_metadata_identifier(metadata_identifier) - - # Now that we know the metadata identifier is valid, try to - # look up a collection named after it. - collection = get_one(_db, Collection, name=metadata_identifier) - is_new = False - - if not collection: - # Create a collection named after the metadata - # identifier. Give it an ExternalIntegration with the - # corresponding protocol, and set its data source and - # external_account_id. - new_collection, is_new = create(_db, Collection, name=metadata_identifier) - new_collection.create_external_integration(protocol) - new_collection.create_integration_configuration(protocol) - collection = new_collection - - if protocol == ExternalIntegration.OPDS_IMPORT: - # For OPDS Import collections only, we store the URL to - # the OPDS feed (the "account ID") and the data source. - collection.external_account_id = account_id - if isinstance(data_source, DataSource): - collection.data_source = data_source - elif data_source is not None: - collection.data_source = DataSource.lookup( - _db, data_source, autocreate=True - ) - - return collection, is_new - @property def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: """Find all LicensePools in this Collection that have no delivery @@ -739,91 +626,6 @@ def explain(self, include_secrets: bool = False) -> List[str]: lines.append(f'Setting "{name}": "{value}"') return lines - def catalog_identifier(self, identifier: Identifier) -> None: - """Inserts an identifier into a catalog""" - self.catalog_identifiers([identifier]) - - def catalog_identifiers(self, identifiers: List[Identifier]) -> None: - """Inserts identifiers into the catalog""" - if not identifiers: - # Nothing to do. - return - - _db = Session.object_session(identifiers[0]) - already_in_catalog = ( - _db.query(Identifier) - .join(CollectionIdentifier) - .filter(CollectionIdentifier.collection_id == self.id) # type: ignore[attr-defined] - .filter(Identifier.id.in_([x.id for x in identifiers])) - .all() - ) - - new_catalog_entries = [ - dict(collection_id=self.id, identifier_id=identifier.id) - for identifier in identifiers - if identifier not in already_in_catalog - ] - _db.bulk_insert_mappings(CollectionIdentifier, new_catalog_entries) - _db.commit() - - def unresolved_catalog( - self, _db: Session, data_source_name: str, operation: str - ) -> Query[Identifier]: - """Returns a query with all identifiers in a Collection's catalog that - have unsuccessfully attempted resolution. This method is used on the - metadata wrangler. - - :return: a sqlalchemy.Query - """ - coverage_source = DataSource.lookup(_db, data_source_name) - is_not_resolved = and_( - CoverageRecord.operation == operation, - CoverageRecord.data_source_id == coverage_source.id, - CoverageRecord.status != CoverageRecord.SUCCESS, - ) - - query = ( - _db.query(Identifier) - .outerjoin(Identifier.licensed_through) - .outerjoin(Identifier.coverage_records) - .outerjoin(LicensePool.work) - .outerjoin(Identifier.collections) # type: ignore[attr-defined] - .filter(Collection.id == self.id, is_not_resolved, Work.id == None) - .order_by(Identifier.id) - ) - - return query - - def isbns_updated_since( - self, _db: Session, timestamp: datetime.datetime | None - ) -> Query[Identifier]: - """Finds all ISBNs in a collection's catalog that have been updated - since the timestamp but don't have a Work to show for it. Used in - the metadata wrangler. - - :return: a Query - """ - isbns = ( - _db.query(Identifier, func.max(CoverageRecord.timestamp).label("latest")) - .join(Identifier.collections) # type: ignore[attr-defined] - .join(Identifier.coverage_records) - .outerjoin(Identifier.licensed_through) - .group_by(Identifier.id) - .order_by("latest") - .filter( - Collection.id == self.id, - LicensePool.work_id == None, - CoverageRecord.status == CoverageRecord.SUCCESS, - ) - .enable_eagerloads(False) - .options(joinedload(Identifier.coverage_records)) - ) - - if timestamp: - isbns = isbns.filter(CoverageRecord.timestamp > timestamp) - - return isbns - @classmethod def restrict_to_ready_deliverable_works( cls, diff --git a/core/scripts.py b/core/scripts.py index da886168b7..1aa6a6a78e 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -2572,55 +2572,6 @@ def explain_collection(self, collection): ) -class ListCollectionMetadataIdentifiersScript(CollectionInputScript): - """List the metadata identifiers for Collections in the database. - - This script is helpful for accounting for and tracking collections on - the metadata wrangler. - """ - - def __init__(self, _db=None, output=None): - _db = _db or self._db - super().__init__(_db) - self.output = output or sys.stdout - - def run(self, cmd_args=None): - parsed = self.parse_command_line(self._db, cmd_args=cmd_args) - self.do_run(parsed.collections) - - def do_run(self, collections=None): - collection_ids = list() - if collections: - collection_ids = [c.id for c in collections] - - collections = self._db.query(Collection).order_by(Collection.id) - if collection_ids: - collections = collections.filter(Collection.id.in_(collection_ids)) - - self.output.write("COLLECTIONS\n") - self.output.write("=" * 50 + "\n") - - def add_line(id, name, protocol, metadata_identifier): - line = f"({id}) {name}/{protocol} => {metadata_identifier}\n" - self.output.write(line) - - count = 0 - for collection in collections: - if not count: - # Add a format line. - add_line("id", "name", "protocol", "metadata_identifier") - - count += 1 - add_line( - str(collection.id), - collection.name, - collection.protocol, - collection.metadata_identifier, - ) - - self.output.write("\n%d collections found.\n" % count) - - class UpdateLaneSizeScript(LaneSweeperScript): def __init__(self, _db=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index cde4aeeadf..fb8810b9b5 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -1,4 +1,3 @@ -import datetime import json import pytest @@ -12,15 +11,12 @@ from core.model.customlist import CustomList from core.model.datasource import DataSource from core.model.edition import Edition -from core.model.identifier import Identifier from core.model.integration import ( IntegrationConfiguration, IntegrationLibraryConfiguration, ) from core.model.licensing import Hold, License, LicensePool, Loan from core.model.work import Work -from core.util.datetime_helpers import utc_now -from core.util.string_helpers import base64 from tests.fixtures.database import DatabaseTransactionFixture @@ -190,45 +186,6 @@ def test_create_external_integration( in str(excinfo.value) ) - def test_unique_account_id( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture - - # Most collections work like this: - overdrive = db.collection( - external_account_id="od1", data_source_name=DataSource.OVERDRIVE - ) - od_child = db.collection( - external_account_id="odchild", data_source_name=DataSource.OVERDRIVE - ) - od_child.parent = overdrive - - # The unique account ID of a primary collection is the - # external account ID. - assert "od1" == overdrive.unique_account_id - - # For children of those collections, the unique account ID is scoped - # to the parent collection. - assert "od1+odchild" == od_child.unique_account_id - - # Enki works a little differently. Enki collections don't have - # an external account ID, because all Enki collections are - # identical. - enki = db.collection(data_source_name=DataSource.ENKI) - - # So the unique account ID is the name of the data source. - assert DataSource.ENKI == enki.unique_account_id - - # A (currently hypothetical) library-specific subcollection of - # the global Enki collection must have an external_account_id, - # and its name is scoped to the parent collection as usual. - enki_child = db.collection( - external_account_id="enkichild", data_source_name=DataSource.ENKI - ) - enki_child.parent = enki - assert DataSource.ENKI + "+enkichild" == enki_child.unique_account_id - def test_get_protocol(self, db: DatabaseTransactionFixture): test_collection = db.collection() integration = test_collection.integration_configuration @@ -459,201 +416,6 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): 'External account ID: "id2"', ] == data - def test_metadata_identifier( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - # If the collection doesn't have its unique identifier, an error - # is raised. - pytest.raises(ValueError, getattr, test_collection, "metadata_identifier") - - def build_expected(protocol, unique_id): - encode = base64.urlsafe_b64encode - encoded = [encode(value) for value in [protocol, unique_id]] - joined = ":".join(encoded) - return encode(joined) - - # With a unique identifier, we get back the expected identifier. - test_collection.external_account_id = "id" - expected = build_expected(ExternalIntegration.OVERDRIVE, "id") - assert expected == test_collection.metadata_identifier - - # If there's a parent, its unique id is incorporated into the result. - child = db.collection( - name="Child", - protocol=ExternalIntegration.OPDS_IMPORT, - external_account_id=db.fresh_url(), - ) - child.parent = test_collection - expected = build_expected( - ExternalIntegration.OPDS_IMPORT, "id+%s" % child.external_account_id - ) - assert expected == child.metadata_identifier - - # If it's an OPDS_IMPORT collection with a url external_account_id, - # closing '/' marks are removed. - opds = db.collection( - name="OPDS", - protocol=ExternalIntegration.OPDS_IMPORT, - external_account_id=(db.fresh_url() + "/"), - ) - assert isinstance(opds.external_account_id, str) - expected = build_expected( - ExternalIntegration.OPDS_IMPORT, opds.external_account_id[:-1] - ) - assert expected == opds.metadata_identifier - - def test_from_metadata_identifier( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - data_source = "New data source" - - # A ValueError results if we try to look up using an invalid - # identifier. - with pytest.raises(ValueError) as excinfo: - Collection.from_metadata_identifier( - db.session, "not a real identifier", data_source=data_source - ) - assert ( - "Metadata identifier 'not a real identifier' is invalid: Incorrect padding" - in str(excinfo.value) - ) - - # Of if we pass in the empty string. - with pytest.raises(ValueError) as excinfo: - Collection.from_metadata_identifier(db.session, "", data_source=data_source) - assert "No metadata identifier provided" in str(excinfo.value) - - # No new data source was created. - def new_data_source(): - return DataSource.lookup(db.session, data_source) - - assert None == new_data_source() - - # If a mirrored collection doesn't exist, it is created. - test_collection.external_account_id = "id" - mirror_collection, is_new = Collection.from_metadata_identifier( - db.session, test_collection.metadata_identifier, data_source=data_source - ) - assert True == is_new - assert test_collection.metadata_identifier == mirror_collection.name - assert test_collection.protocol == mirror_collection.protocol - - # Because this isn't an OPDS collection, the external account - # ID is not stored, the data source is the default source for - # the protocol, and no new data source was created. - assert mirror_collection.external_account_id is None - assert mirror_collection.data_source is not None - assert DataSource.OVERDRIVE == mirror_collection.data_source.name - assert None == new_data_source() - - # If the mirrored collection already exists, it is returned. - collection = db.collection(external_account_id=db.fresh_url()) - mirror_collection = create( - db.session, Collection, name=collection.metadata_identifier - )[0] - assert collection.protocol is not None - mirror_collection.create_external_integration(collection.protocol) - mirror_collection.create_integration_configuration(collection.protocol) - - # Confirm that there's no external_account_id and no DataSource. - # TODO I don't understand why we don't store this information, - # even if only to keep it in an easy-to-read form. - assert None == mirror_collection.external_account_id - assert None == mirror_collection.data_source - assert None == new_data_source() - - # Now try a lookup of an OPDS Import-type collection. - result, is_new = Collection.from_metadata_identifier( - db.session, collection.metadata_identifier, data_source=data_source - ) - assert False == is_new - assert mirror_collection == result - # The external_account_id and data_source have been set now. - assert collection.external_account_id == mirror_collection.external_account_id - - # A new DataSource object has been created. - source = new_data_source() - assert "New data source" == source.name - assert source == mirror_collection.data_source - - def test_catalog_identifier( - self, example_collection_fixture: ExampleCollectionFixture - ): - """#catalog_identifier associates an identifier with the catalog""" - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - identifier = db.identifier() - test_collection.catalog_identifier(identifier) - - assert 1 == len(test_collection.catalog) - assert identifier == test_collection.catalog[0] - - def test_catalog_identifiers( - self, example_collection_fixture: ExampleCollectionFixture - ): - """#catalog_identifier associates multiple identifiers with a catalog""" - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - i1 = db.identifier() - i2 = db.identifier() - i3 = db.identifier() - - # One of the identifiers is already in the catalog. - test_collection.catalog_identifier(i3) - - test_collection.catalog_identifiers([i1, i2, i3]) - - # Now all three identifiers are in the catalog. - assert sorted([i1, i2, i3]) == sorted(test_collection.catalog) - - def test_unresolved_catalog( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - # A regular schmegular identifier: untouched, pure. - pure_id = db.identifier() - - # A 'resolved' identifier that doesn't have a work yet. - # (This isn't supposed to happen, but jic.) - source = DataSource.lookup(db.session, DataSource.GUTENBERG) - operation = "test-thyself" - resolved_id = db.identifier() - db.coverage_record( - resolved_id, source, operation=operation, status=CoverageRecord.SUCCESS - ) - - # An unresolved identifier--we tried to resolve it, but - # it all fell apart. - unresolved_id = db.identifier() - db.coverage_record( - unresolved_id, - source, - operation=operation, - status=CoverageRecord.TRANSIENT_FAILURE, - ) - - # An identifier with a Work already. - id_with_work = db.work().presentation_edition.primary_identifier - - test_collection.catalog_identifiers( - [pure_id, resolved_id, unresolved_id, id_with_work] - ) - - result = test_collection.unresolved_catalog(db.session, source.name, operation) - - # Only the failing identifier is in the query. - assert [unresolved_id] == result.all() - def test_disassociate_library( self, example_collection_fixture: ExampleCollectionFixture ): @@ -721,62 +483,6 @@ def test_disassociate_library( collection.disassociate_library(other_library) assert "No known external integration for collection" in str(excinfo.value) - def test_isbns_updated_since( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection - - i1 = db.identifier(identifier_type=Identifier.ISBN, foreign_id=db.isbn_take()) - i2 = db.identifier(identifier_type=Identifier.ISBN, foreign_id=db.isbn_take()) - i3 = db.identifier(identifier_type=Identifier.ISBN, foreign_id=db.isbn_take()) - i4 = db.identifier(identifier_type=Identifier.ISBN, foreign_id=db.isbn_take()) - - timestamp = utc_now() - - # An empty catalog returns nothing.. - assert [] == test_collection.isbns_updated_since(db.session, None).all() - - # Give the ISBNs some coverage. - content_cafe = DataSource.lookup(db.session, DataSource.CONTENT_CAFE) - for isbn in [i2, i3, i1]: - db.coverage_record(isbn, content_cafe) - - # Give one ISBN more than one coverage record. - oclc = DataSource.lookup(db.session, DataSource.OCLC) - i1_oclc_record = db.coverage_record(i1, oclc) - - def assert_isbns(expected, result_query): - results = [r[0] for r in result_query] - assert expected == results - - # When no timestamp is given, all ISBNs in the catalog are returned, - # in order of their CoverageRecord timestamp. - test_collection.catalog_identifiers([i1, i2]) - updated_isbns = test_collection.isbns_updated_since(db.session, None).all() - assert_isbns([i2, i1], updated_isbns) - - # That CoverageRecord timestamp is also returned. - i1_timestamp = updated_isbns[1][1] # type: ignore[index] - assert isinstance(i1_timestamp, datetime.datetime) - assert i1_oclc_record.timestamp == i1_timestamp - - # When a timestamp is passed, only works that have been updated since - # then will be returned. - timestamp = utc_now() - i1.coverage_records[0].timestamp = utc_now() - updated_isbns_2 = test_collection.isbns_updated_since(db.session, timestamp) - assert_isbns([i1], updated_isbns_2) - - # Prepare an ISBN associated with a Work. - work = db.work(with_license_pool=True) - work.license_pools[0].identifier = i2 - i2.coverage_records[0].timestamp = utc_now() - - # ISBNs that have a Work will be ignored. - updated_isbns_3 = test_collection.isbns_updated_since(db.session, timestamp) - assert_isbns([i1], updated_isbns_3) - def test_custom_lists(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture test_collection = example_collection_fixture.collection diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index 3054d08852..9fb3d48e01 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -4,7 +4,6 @@ from core.coverage import ( BaseCoverageProvider, - CatalogCoverageProvider, CoverageFailure, CoverageProviderProgress, IdentifierCoverageProvider, @@ -1908,34 +1907,6 @@ def test_set_presentation_ready(self, db: DatabaseTransactionFixture): assert True == pool.work.presentation_ready -class TestCatalogCoverageProvider: - def test_items_that_need_coverage(self, db: DatabaseTransactionFixture): - c1 = db.collection() - c2 = db.collection() - - i1 = db.identifier() - c1.catalog_identifier(i1) - - i2 = db.identifier() - c2.catalog_identifier(i2) - - i3 = db.identifier() - - # This Identifier is licensed through the Collection c1, but - # it's not in the catalog--catalogs are used for different - # things. - edition, lp = db.edition(with_license_pool=True, collection=c1) - - # We have four identifiers, but only i1 shows up, because - # it's the only one in c1's catalog. - class Provider(CatalogCoverageProvider): - SERVICE_NAME = "test" - DATA_SOURCE_NAME = DataSource.OVERDRIVE - - provider = Provider(c1) - assert [i1] == provider.items_that_need_coverage().all() - - class BibliographicCoverageProviderFixture: transaction: DatabaseTransactionFixture work: Work diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index a4a8f3b17b..d732068e6c 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -55,7 +55,6 @@ IdentifierInputScript, LaneSweeperScript, LibraryInputScript, - ListCollectionMetadataIdentifiersScript, LoanNotificationsScript, MockStdin, OPDSImportScript, @@ -1988,40 +1987,6 @@ def test_subject_checked(self, db: DatabaseTransactionFixture): assert subject.checked == True -class TestListCollectionMetadataIdentifiersScript: - def test_do_run(self, db: DatabaseTransactionFixture): - output = StringIO() - script = ListCollectionMetadataIdentifiersScript(_db=db.session, output=output) - - # Create two collections. - c1 = db.collection(external_account_id=db.fresh_url()) - c2 = db.collection( - name="Local Over", - protocol=ExternalIntegration.OVERDRIVE, - external_account_id="banana", - ) - - script.do_run() - - def expected(c): - return "({}) {}/{} => {}\n".format( - str(c.id), - c.name, - c.protocol, - c.metadata_identifier, - ) - - # In the output, there's a header, a line describing the format, - # metdata identifiers for each collection, and a count of the - # collections found. - output = output.getvalue() - assert "COLLECTIONS" in output - assert "(id) name/protocol => metadata_identifier\n" in output - assert expected(c1) in output - assert expected(c2) in output - assert "2 collections found.\n" in output - - class TestRebuildSearchIndexScript: def test_do_run( self, From 67fb62b0cbcdbe1fd378881842e877003b4a5c1a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 22:25:53 +0000 Subject: [PATCH 140/262] Bump alembic from 1.12.0 to 1.12.1 (#1485) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index c070c487c4..0cfcb8e62b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.12.0" +version = "1.12.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" files = [ - {file = "alembic-1.12.0-py3-none-any.whl", hash = "sha256:03226222f1cf943deee6c85d9464261a6c710cd19b4fe867a3ad1f25afda610f"}, - {file = "alembic-1.12.0.tar.gz", hash = "sha256:8e7645c32e4f200675e69f0745415335eb59a3663f5feb487abfa0b30c45888b"}, + {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, + {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, ] [package.dependencies] From dca7f6140cc5154f62003a5920ae58866e4a6695 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 26 Oct 2023 22:26:05 +0000 Subject: [PATCH 141/262] Bump pyopenssl from 23.2.0 to 23.3.0 (#1486) --- poetry.lock | 60 ++++++++++++++++++++++++++--------------------------- 1 file changed, 30 insertions(+), 30 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0cfcb8e62b..33804e1aad 100644 --- a/poetry.lock +++ b/poetry.lock @@ -827,34 +827,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.4" +version = "41.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, - {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, - {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, - {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, - {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, - {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, - {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, - {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, - {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, - {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, + {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, + {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, + {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, + {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, + {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, + {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, + {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, + {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, + {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, ] [package.dependencies] @@ -3130,20 +3130,20 @@ files = [ [[package]] name = "pyopenssl" -version = "23.2.0" +version = "23.3.0" description = "Python wrapper module around the OpenSSL library" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "pyOpenSSL-23.2.0-py3-none-any.whl", hash = "sha256:24f0dc5227396b3e831f4c7f602b950a5e9833d292c8e4a2e06b709292806ae2"}, - {file = "pyOpenSSL-23.2.0.tar.gz", hash = "sha256:276f931f55a452e7dea69c7173e984eb2a4407ce413c918aa34b55f82f9b8bac"}, + {file = "pyOpenSSL-23.3.0-py3-none-any.whl", hash = "sha256:6756834481d9ed5470f4a9393455154bc92fe7a64b7bc6ee2c804e78c52099b2"}, + {file = "pyOpenSSL-23.3.0.tar.gz", hash = "sha256:6b2cba5cc46e822750ec3e5a81ee12819850b11303630d575e98108a079c2b12"}, ] [package.dependencies] -cryptography = ">=38.0.0,<40.0.0 || >40.0.0,<40.0.1 || >40.0.1,<42" +cryptography = ">=41.0.5,<42" [package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0)", "sphinx-rtd-theme"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] test = ["flaky", "pretend", "pytest (>=3.0.1)"] [[package]] From 6c99b096d046d8febe872da3fde32cf137ef8262 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 27 Oct 2023 13:01:48 +0530 Subject: [PATCH 142/262] PP-564 Import duration from palace marketplace (#1477) * audiobook duration ingested from OPDS into the edition metadata * Float durations for audiobooks are serialized into the feeds * OPDS2 duration import test --- ...23_cc084e35e037_add_duration_to_edition.py | 24 +++++++++++++++++++ core/feed/annotator/base.py | 3 +++ core/feed/serializer/opds.py | 6 +++++ core/feed/serializer/opds2.py | 2 ++ core/feed/types.py | 1 + core/metadata_layer.py | 3 +++ core/model/edition.py | 16 ++++++++++++- core/opds2_import.py | 3 +++ tests/api/feed/test_annotators.py | 2 ++ tests/api/feed/test_opds2_serializer.py | 2 ++ tests/api/feed/test_opds_serializer.py | 5 ++++ tests/core/files/opds2/feed.json | 1 + tests/core/test_metadata.py | 6 +++++ tests/core/test_opds2_import.py | 1 + 14 files changed, 74 insertions(+), 1 deletion(-) create mode 100644 alembic/versions/20231023_cc084e35e037_add_duration_to_edition.py diff --git a/alembic/versions/20231023_cc084e35e037_add_duration_to_edition.py b/alembic/versions/20231023_cc084e35e037_add_duration_to_edition.py new file mode 100644 index 0000000000..db43a7f587 --- /dev/null +++ b/alembic/versions/20231023_cc084e35e037_add_duration_to_edition.py @@ -0,0 +1,24 @@ +"""Add duration to edition + +Revision ID: cc084e35e037 +Revises: 0739d5558dda +Create Date: 2023-10-23 10:58:21.856412+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "cc084e35e037" +down_revision = "7fceb9488bc6" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column("editions", sa.Column("duration", sa.Float(), nullable=True)) + + +def downgrade() -> None: + op.drop_column("editions", "duration") diff --git a/core/feed/annotator/base.py b/core/feed/annotator/base.py index d100cdc2d4..8c7577e8ab 100644 --- a/core/feed/annotator/base.py +++ b/core/feed/annotator/base.py @@ -304,6 +304,9 @@ def annotate_work_entry( if edition.series: computed.series = self.series(edition.series, edition.series_position) + if edition.duration is not None: + computed.duration = float(edition.duration) + content = self.content(work) if content: computed.summary = FeedEntryType(text=content) diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index 97f3ac2e39..aa4c08c484 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -153,6 +153,12 @@ def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: feed_entry.subtitle.text, ) ) + if feed_entry.duration is not None: + entry.append( + OPDSFeed.E( + f"{{{OPDSFeed.DCTERMS_NS}}}duration", str(feed_entry.duration) + ) + ) if feed_entry.summary: entry.append(OPDSFeed.E("summary", feed_entry.summary.text)) if feed_entry.pwid: diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index fca7c93a6b..91fe915cb2 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -72,6 +72,8 @@ def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: metadata["title"] = data.title.text if data.sort_title: metadata["sortAs"] = data.sort_title.text + if data.duration is not None: + metadata["duration"] = data.duration if data.subtitle: metadata["subtitle"] = data.subtitle.text diff --git a/core/feed/types.py b/core/feed/types.py index f81b70fb36..cdf5207bd5 100644 --- a/core/feed/types.py +++ b/core/feed/types.py @@ -150,6 +150,7 @@ class WorkEntryData(BaseModel): identifier: Optional[str] = None pwid: Optional[str] = None issued: Optional[datetime | date] = None + duration: Optional[float] = None summary: Optional[FeedEntryType] = None language: Optional[FeedEntryType] = None diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 1b02c7534a..f4ef93c1a7 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -1087,6 +1087,7 @@ class Metadata: "subtitle", "language", "medium", + "duration", "series", "series_position", "publisher", @@ -1117,6 +1118,7 @@ def __init__( measurements=None, links=None, data_source_last_updated=None, + duration=None, # Note: brought back to keep callers of bibliographic extraction process_one() methods simple. circulation=None, **kwargs, @@ -1143,6 +1145,7 @@ def __init__( self.imprint = imprint self.issued = issued self.published = published + self.duration = duration self.primary_identifier = primary_identifier self.identifiers = identifiers or [] diff --git a/core/model/edition.py b/core/model/edition.py index 59c395d46b..af435a7692 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -5,7 +5,17 @@ from collections import defaultdict from typing import TYPE_CHECKING, Dict, List -from sqlalchemy import Column, Date, Enum, ForeignKey, Index, Integer, String, Unicode +from sqlalchemy import ( + Column, + Date, + Enum, + Float, + ForeignKey, + Index, + Integer, + String, + Unicode, +) from sqlalchemy.dialects.postgresql import JSON from sqlalchemy.ext.mutable import MutableDict from sqlalchemy.orm import Mapped, relationship @@ -126,6 +136,10 @@ class Edition(Base, EditionConstants): medium = Column(MEDIUM_ENUM, index=True) + # The playtime duration of an audiobook (seconds) + # https://github.com/readium/webpub-manifest/tree/master/contexts/default#duration-and-number-of-pages + duration = Column(Float, nullable=True) + cover_id = Column( Integer, ForeignKey("resources.id", use_alter=True, name="fk_editions_summary_id"), diff --git a/core/opds2_import.py b/core/opds2_import.py index 3b2947651b..418caa052d 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -769,6 +769,8 @@ def _extract_publication_metadata( publication.metadata.contributors, Contributor.CONTRIBUTOR_ROLE ) ) + # Audiobook duration + duration = publication.metadata.duration feed_self_url = first_or_default( feed.links.get_by_rel(OPDS2LinkRelationsRegistry.SELF.key) @@ -826,6 +828,7 @@ def _extract_publication_metadata( series_position=series_position, links=links, data_source_last_updated=last_opds_update, + duration=duration, circulation=circulation_data, ) diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py index d3c4f5929f..c09d8e90cb 100644 --- a/tests/api/feed/test_annotators.py +++ b/tests/api/feed/test_annotators.py @@ -416,6 +416,7 @@ def test_annotate_work_entry(self, db: DatabaseTransactionFixture): edition.publisher = "publisher" edition.imprint = "imprint" edition.issued = utc_now().date() + edition.duration = 10 # datetime for > today pool.availability_time = (utc_now() + timedelta(days=1)).date() @@ -447,6 +448,7 @@ def test_annotate_work_entry(self, db: DatabaseTransactionFixture): assert data.summary and data.summary.get("type") == "html" assert data.publisher == FeedEntryType(text="publisher") assert data.issued == edition.issued + assert data.duration == edition.duration # Missing values assert data.language == None diff --git a/tests/api/feed/test_opds2_serializer.py b/tests/api/feed/test_opds2_serializer.py index 506a21d961..9619268028 100644 --- a/tests/api/feed/test_opds2_serializer.py +++ b/tests/api/feed/test_opds2_serializer.py @@ -82,6 +82,7 @@ def test_serialize_work_entry(self): Acquisition(href="http://acquisition", rel="acquisition-rel") ], other_links=[Link(href="http://link", rel="rel")], + duration=10, ) serializer = OPDS2Serializer() @@ -92,6 +93,7 @@ def test_serialize_work_entry(self): assert metadata["@type"] == data.additionalType assert metadata["title"] == data.title.text assert metadata["sortAs"] == data.sort_title.text + assert metadata["duration"] == data.duration assert metadata["subtitle"] == data.subtitle.text assert metadata["identifier"] == data.identifier assert metadata["language"] == data.language.text diff --git a/tests/api/feed/test_opds_serializer.py b/tests/api/feed/test_opds_serializer.py index 142d406e7a..bf9b60a20f 100644 --- a/tests/api/feed/test_opds_serializer.py +++ b/tests/api/feed/test_opds_serializer.py @@ -155,6 +155,7 @@ def test_serialize_work_entry(self): FeedEntryType.create(scheme="scheme", term="term", label="label") ], ratings=[FeedEntryType(text="rating")], + duration=10, ) element = OPDS1Serializer().serialize_work_entry(data) @@ -238,6 +239,10 @@ def test_serialize_work_entry(self): assert len(child) == 1 assert child[0].text == data.ratings[0].text + child = element.findall(f"{{{OPDSFeed.DCTERMS_NS}}}duration") + assert len(child) == 1 + assert child[0].text == "10" + def test_serialize_work_entry_empty(self): # A no-data work entry element = OPDS1Serializer().serialize_work_entry(WorkEntryData()) diff --git a/tests/core/files/opds2/feed.json b/tests/core/files/opds2/feed.json index 98491a13d8..918afa4fd1 100644 --- a/tests/core/files/opds2/feed.json +++ b/tests/core/files/opds2/feed.json @@ -16,6 +16,7 @@ "title": "Moby-Dick", "author": "Herman Melville", "identifier": "urn:isbn:978-3-16-148410-0", + "duration": 100.2, "language": "en", "publisher": { "name": "Test Publisher" diff --git a/tests/core/test_metadata.py b/tests/core/test_metadata.py index 49ea2b2c6b..7d51d3f95d 100644 --- a/tests/core/test_metadata.py +++ b/tests/core/test_metadata.py @@ -668,6 +668,7 @@ def test_from_edition(self, db: DatabaseTransactionFixture): edition.primary_identifier.add_link( Hyperlink.IMAGE, "image", edition.data_source ) + edition.duration = 100.1 metadata = Metadata.from_edition(edition) # make sure the metadata and the originating edition match @@ -704,6 +705,7 @@ def test_update(self, db: DatabaseTransactionFixture): edition_old.subtitle = "old_subtitile" edition_old.series = "old_series" edition_old.series_position = 5 + edition_old.duration = 10 metadata_old = Metadata.from_edition(edition_old) edition_new, pool = db.edition(with_license_pool=True) @@ -712,6 +714,7 @@ def test_update(self, db: DatabaseTransactionFixture): edition_new.subtitle = "new_updated_subtitile" edition_new.series = "new_series" edition_new.series_position = 0 + edition_new.duration = 11 metadata_new = Metadata.from_edition(edition_new) metadata_old.update(metadata_new) @@ -720,6 +723,7 @@ def test_update(self, db: DatabaseTransactionFixture): assert metadata_old.subtitle == metadata_new.subtitle assert metadata_old.series == edition_new.series assert metadata_old.series_position == edition_new.series_position + assert metadata_old.duration == metadata_new.duration def test_apply(self, db: DatabaseTransactionFixture): edition_old, pool = db.edition(with_license_pool=True) @@ -737,6 +741,7 @@ def test_apply(self, db: DatabaseTransactionFixture): imprint="Follywood", published=datetime.date(1987, 5, 4), issued=datetime.date(1989, 4, 5), + duration=10, ) edition_new, changed = metadata.apply(edition_old, pool.collection) @@ -753,6 +758,7 @@ def test_apply(self, db: DatabaseTransactionFixture): assert edition_new.imprint == "Follywood" assert edition_new.published == datetime.date(1987, 5, 4) assert edition_new.issued == datetime.date(1989, 4, 5) + assert edition_new.duration == 10 edition_new, changed = metadata.apply(edition_new, pool.collection) assert changed == False diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index 567e57e310..8a7d1c5556 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -169,6 +169,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( assert "eng" == moby_dick_edition.language assert EditionConstants.BOOK_MEDIUM == moby_dick_edition.medium assert "Herman Melville" == moby_dick_edition.author + assert moby_dick_edition.duration == 100.2 assert 1 == len(moby_dick_edition.author_contributors) [moby_dick_author] = moby_dick_edition.author_contributors From 94ee662608128f1cede80c4a1568c5f93f26443f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 23:59:02 +0000 Subject: [PATCH 143/262] Bump types-psycopg2 from 2.9.21.14 to 2.9.21.15 (#1488) --- poetry.lock | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 33804e1aad..c07f05db01 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2771,6 +2771,8 @@ files = [ {file = "psycopg2-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:426f9f29bde126913a20a96ff8ce7d73fd8a216cfb323b1f04da402d452853c3"}, {file = "psycopg2-2.9.9-cp311-cp311-win32.whl", hash = "sha256:ade01303ccf7ae12c356a5e10911c9e1c51136003a9a1d92f7aa9d010fb98372"}, {file = "psycopg2-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:121081ea2e76729acfb0673ff33755e8703d45e926e416cb59bae3a86c6a4981"}, + {file = "psycopg2-2.9.9-cp312-cp312-win32.whl", hash = "sha256:d735786acc7dd25815e89cc4ad529a43af779db2e25aa7c626de864127e5a024"}, + {file = "psycopg2-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:a7653d00b732afb6fc597e29c50ad28087dcb4fbfb28e86092277a559ae4e693"}, {file = "psycopg2-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:5e0d98cade4f0e0304d7d6f25bbfbc5bd186e07b38eac65379309c4ca3193efa"}, {file = "psycopg2-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:7e2dacf8b009a1c1e843b5213a87f7c544b2b042476ed7755be813eaf4e8347a"}, {file = "psycopg2-2.9.9-cp38-cp38-win32.whl", hash = "sha256:ff432630e510709564c01dafdbe996cb552e0b9f3f065eb89bdce5bd31fabf4c"}, @@ -2821,6 +2823,8 @@ files = [ {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, @@ -4169,13 +4173,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.14" +version = "2.9.21.15" description = "Typing stubs for psycopg2" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.14.tar.gz", hash = "sha256:bf73a0ac4da4e278c89bf1b01fc596d5a5ac7a356cfe6ac0249f47b9e259f868"}, - {file = "types_psycopg2-2.9.21.14-py3-none-any.whl", hash = "sha256:cd9c5350631f3bc6184ec8d48f2ed31d4ea660f89d0fffe78239450782f383c5"}, + {file = "types-psycopg2-2.9.21.15.tar.gz", hash = "sha256:cf99b62ab32cd4ef412fc3c4da1c29ca5a130847dff06d709b84a523802406f0"}, + {file = "types_psycopg2-2.9.21.15-py3-none-any.whl", hash = "sha256:cc80479def02e4dd1ef21649d82f04426c73bc0693bcc0a8b5223c7c168472af"}, ] [[package]] From 55b4b333ad1db5d145bd98de390b8e8c4a258473 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 30 Oct 2023 23:59:24 +0000 Subject: [PATCH 144/262] Bump types-pyopenssl from 23.2.0.2 to 23.3.0.0 (#1489) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index c07f05db01..12c2b3f35d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4184,13 +4184,13 @@ files = [ [[package]] name = "types-pyopenssl" -version = "23.2.0.2" +version = "23.3.0.0" description = "Typing stubs for pyOpenSSL" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-pyOpenSSL-23.2.0.2.tar.gz", hash = "sha256:6a010dac9ecd42b582d7dd2cc3e9e40486b79b3b64bb2fffba1474ff96af906d"}, - {file = "types_pyOpenSSL-23.2.0.2-py3-none-any.whl", hash = "sha256:19536aa3debfbe25a918cf0d898e9f5fbbe6f3594a429da7914bf331deb1b342"}, + {file = "types-pyOpenSSL-23.3.0.0.tar.gz", hash = "sha256:5ffb077fe70b699c88d5caab999ae80e192fe28bf6cda7989b7e79b1e4e2dcd3"}, + {file = "types_pyOpenSSL-23.3.0.0-py3-none-any.whl", hash = "sha256:00171433653265843b7469ddb9f3c86d698668064cc33ef10537822156130ebf"}, ] [package.dependencies] From e7813df3de36095ff73b2565ce3c7a0ce1386f0b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 31 Oct 2023 00:00:04 +0000 Subject: [PATCH 145/262] Bump sqlalchemy from 1.4.49 to 1.4.50 (#1490) --- poetry.lock | 77 +++++++++++++++++++---------------------------------- 1 file changed, 27 insertions(+), 50 deletions(-) diff --git a/poetry.lock b/poetry.lock index 12c2b3f35d..9b9efd42ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3910,59 +3910,36 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.49" +version = "1.4.50" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.49-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e126cf98b7fd38f1e33c64484406b78e937b1a280e078ef558b95bf5b6895f6"}, - {file = "SQLAlchemy-1.4.49-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:03db81b89fe7ef3857b4a00b63dedd632d6183d4ea5a31c5d8a92e000a41fc71"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:95b9df9afd680b7a3b13b38adf6e3a38995da5e162cc7524ef08e3be4e5ed3e1"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a63e43bf3f668c11bb0444ce6e809c1227b8f067ca1068898f3008a273f52b09"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca46de16650d143a928d10842939dab208e8d8c3a9a8757600cae9b7c579c5cd"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f835c050ebaa4e48b18403bed2c0fda986525896efd76c245bdd4db995e51a4c"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c21b172dfb22e0db303ff6419451f0cac891d2e911bb9fbf8003d717f1bcf91"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win32.whl", hash = "sha256:5fb1ebdfc8373b5a291485757bd6431de8d7ed42c27439f543c81f6c8febd729"}, - {file = "SQLAlchemy-1.4.49-cp310-cp310-win_amd64.whl", hash = "sha256:f8a65990c9c490f4651b5c02abccc9f113a7f56fa482031ac8cb88b70bc8ccaa"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8923dfdf24d5aa8a3adb59723f54118dd4fe62cf59ed0d0d65d940579c1170a4"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9ab2c507a7a439f13ca4499db6d3f50423d1d65dc9b5ed897e70941d9e135b0"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5debe7d49b8acf1f3035317e63d9ec8d5e4d904c6e75a2a9246a119f5f2fdf3d"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win32.whl", hash = "sha256:82b08e82da3756765c2e75f327b9bf6b0f043c9c3925fb95fb51e1567fa4ee87"}, - {file = "SQLAlchemy-1.4.49-cp311-cp311-win_amd64.whl", hash = "sha256:171e04eeb5d1c0d96a544caf982621a1711d078dbc5c96f11d6469169bd003f1"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f23755c384c2969ca2f7667a83f7c5648fcf8b62a3f2bbd883d805454964a800"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8396e896e08e37032e87e7fbf4a15f431aa878c286dc7f79e616c2feacdb366c"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66da9627cfcc43bbdebd47bfe0145bb662041472393c03b7802253993b6b7c90"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win32.whl", hash = "sha256:9a06e046ffeb8a484279e54bda0a5abfd9675f594a2e38ef3133d7e4d75b6214"}, - {file = "SQLAlchemy-1.4.49-cp312-cp312-win_amd64.whl", hash = "sha256:7cf8b90ad84ad3a45098b1c9f56f2b161601e4670827d6b892ea0e884569bd1d"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:36e58f8c4fe43984384e3fbe6341ac99b6b4e083de2fe838f0fdb91cebe9e9cb"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b31e67ff419013f99ad6f8fc73ee19ea31585e1e9fe773744c0f3ce58c039c30"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc22807a7e161c0d8f3da34018ab7c97ef6223578fcdd99b1d3e7ed1100a5db"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c14b29d9e1529f99efd550cd04dbb6db6ba5d690abb96d52de2bff4ed518bc95"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c40f3470e084d31247aea228aa1c39bbc0904c2b9ccbf5d3cfa2ea2dac06f26d"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win32.whl", hash = "sha256:706bfa02157b97c136547c406f263e4c6274a7b061b3eb9742915dd774bbc264"}, - {file = "SQLAlchemy-1.4.49-cp36-cp36m-win_amd64.whl", hash = "sha256:a7f7b5c07ae5c0cfd24c2db86071fb2a3d947da7bd487e359cc91e67ac1c6d2e"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:4afbbf5ef41ac18e02c8dc1f86c04b22b7a2125f2a030e25bbb4aff31abb224b"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24e300c0c2147484a002b175f4e1361f102e82c345bf263242f0449672a4bccf"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:393cd06c3b00b57f5421e2133e088df9cabcececcea180327e43b937b5a7caa5"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:201de072b818f8ad55c80d18d1a788729cccf9be6d9dc3b9d8613b053cd4836d"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7653ed6817c710d0c95558232aba799307d14ae084cc9b1f4c389157ec50df5c"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win32.whl", hash = "sha256:647e0b309cb4512b1f1b78471fdaf72921b6fa6e750b9f891e09c6e2f0e5326f"}, - {file = "SQLAlchemy-1.4.49-cp37-cp37m-win_amd64.whl", hash = "sha256:ab73ed1a05ff539afc4a7f8cf371764cdf79768ecb7d2ec691e3ff89abbc541e"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:37ce517c011560d68f1ffb28af65d7e06f873f191eb3a73af5671e9c3fada08a"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1878ce508edea4a879015ab5215546c444233881301e97ca16fe251e89f1c55"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ab792ca493891d7a45a077e35b418f68435efb3e1706cb8155e20e86a9013c"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0e8e608983e6f85d0852ca61f97e521b62e67969e6e640fe6c6b575d4db68557"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ccf956da45290df6e809ea12c54c02ace7f8ff4d765d6d3dfb3655ee876ce58d"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win32.whl", hash = "sha256:f167c8175ab908ce48bd6550679cc6ea20ae169379e73c7720a28f89e53aa532"}, - {file = "SQLAlchemy-1.4.49-cp38-cp38-win_amd64.whl", hash = "sha256:45806315aae81a0c202752558f0df52b42d11dd7ba0097bf71e253b4215f34f4"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:b6d0c4b15d65087738a6e22e0ff461b407533ff65a73b818089efc8eb2b3e1de"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a843e34abfd4c797018fd8d00ffffa99fd5184c421f190b6ca99def4087689bd"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:738d7321212941ab19ba2acf02a68b8ee64987b248ffa2101630e8fccb549e0d"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1c890421651b45a681181301b3497e4d57c0d01dc001e10438a40e9a9c25ee77"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d26f280b8f0a8f497bc10573849ad6dc62e671d2468826e5c748d04ed9e670d5"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win32.whl", hash = "sha256:ec2268de67f73b43320383947e74700e95c6770d0c68c4e615e9897e46296294"}, - {file = "SQLAlchemy-1.4.49-cp39-cp39-win_amd64.whl", hash = "sha256:bbdf16372859b8ed3f4d05f925a984771cd2abd18bd187042f24be4886c2a15f"}, - {file = "SQLAlchemy-1.4.49.tar.gz", hash = "sha256:06ff25cbae30c396c4b7737464f2a7fc37a67b7da409993b182b024cec80aed9"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, + {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, + {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, + {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, + {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, + {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, + {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, + {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, + {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, ] [package.dependencies] @@ -3971,7 +3948,7 @@ mypy = {version = ">=0.910", optional = true, markers = "python_version >= \"3\" sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\""} [package.extras] -aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] +aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] From 4d71076e7e55a69adea8e44b74014ee089b705db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 12:08:54 +0000 Subject: [PATCH 146/262] Bump types-jsonschema from 4.19.0.3 to 4.19.0.4 (#1491) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9b9efd42ec..66c1f94fac 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4125,13 +4125,13 @@ Flask = ">=2.0.0" [[package]] name = "types-jsonschema" -version = "4.19.0.3" +version = "4.19.0.4" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.19.0.3.tar.gz", hash = "sha256:e0fc0f5d51fd0988bf193be42174a5376b0096820ff79505d9c1b66de23f0581"}, - {file = "types_jsonschema-4.19.0.3-py3-none-any.whl", hash = "sha256:5cedbb661e5ca88d95b94b79902423e3f97a389c245e5fe0ab384122f27d56b9"}, + {file = "types-jsonschema-4.19.0.4.tar.gz", hash = "sha256:994feb6632818259c4b5dbd733867824cb475029a6abc2c2b5201a2268b6e7d2"}, + {file = "types_jsonschema-4.19.0.4-py3-none-any.whl", hash = "sha256:b73c3f4ba3cd8108602d1198a438e2698d5eb6b9db206ed89a33e24729b0abe7"}, ] [package.dependencies] From e12881f22e8391283454f7d5690e5715d0a112e6 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 1 Nov 2023 18:40:54 +0530 Subject: [PATCH 147/262] PP-637 Duration information for Overdrive (#1492) * Added duration extraction during the overdrive format sweep --- api/overdrive.py | 32 +++++++++++++++++------- tests/api/files/overdrive/audiobook.json | 2 +- tests/api/test_overdrive.py | 10 ++++++++ 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/api/overdrive.py b/api/overdrive.py index 8e8da7b8cc..5b14468cfb 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -82,6 +82,7 @@ from core.scripts import InputScript, Script from core.util.datetime_helpers import strptime_utc, utc_now from core.util.http import HTTP, BadResponseException +from core.util.log import LoggerMixin from core.util.string_helpers import base64 @@ -2159,11 +2160,9 @@ class OverdriveData: max_retry_count: int = 0 -class OverdriveRepresentationExtractor: +class OverdriveRepresentationExtractor(LoggerMixin): """Extract useful information from Overdrive's JSON representations.""" - log = logging.getLogger("Overdrive representation extractor") - def __init__(self, api): """Constructor. @@ -2183,7 +2182,7 @@ def availability_link_list(cls, book_list): products = book_list["products"] for product in products: if not "id" in product: - cls.log.warning("No ID found in %r", product) + cls.logger().warning("No ID found in %r", product) continue book_id = product["id"] data = dict( @@ -2344,7 +2343,7 @@ def parse_roles(cls, id, rolestring): processed = [] for x in roles: if x not in cls.overdrive_role_to_simplified_role: - cls.log.error("Could not process role %s for %s", x, id) + cls.logger().error("Could not process role %s for %s", x, id) else: processed.append(cls.overdrive_role_to_simplified_role[x]) return processed @@ -2488,6 +2487,8 @@ def book_info_to_metadata( # Otherwise we'll probably give it a fraction of this weight. trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT + duration: Optional[int] = None + if include_bibliographic: title = book.get("title", None) sort_title = book.get("sortTitle") @@ -2563,7 +2564,7 @@ def book_info_to_metadata( overdrive_medium and overdrive_medium not in cls.overdrive_medium_to_simplified_medium ): - cls.log.error( + cls.logger().error( "Could not process medium %s for %s", overdrive_medium, overdrive_id ) @@ -2607,6 +2608,18 @@ def book_info_to_metadata( links = [] sample_hrefs = set() for format in book.get("formats", []): + duration_str: Optional[str] = format.get("duration") + if duration_str is not None: + # Using this method only the last valid duration attribute is captured + # If there are multiple formats with different durations, the edition will ignore the rest + try: + hrs, mins, secs = duration_str.split(":") + duration = (int(hrs) * 3600) + (int(mins) * 60) + int(secs) + except Exception as ex: + cls.logger().error( + f"Duration ({duration_str}) could not be parsed: {ex}" + ) + for new_id in format.get("identifiers", []): t = new_id["type"] v = new_id["value"] @@ -2625,7 +2638,7 @@ def book_info_to_metadata( # books appear to have the same ISBN. ISBNs # which fail check digit checks or are invalid # also can occur. Log them for review. - cls.log.info("Bad ISBN value provided: %s", orig_v) + cls.logger().info("Bad ISBN value provided: %s", orig_v) continue elif t == "DOI": type_key = Identifier.DOI @@ -2654,7 +2667,7 @@ def book_info_to_metadata( ) if not content_type: # Unusable by us. - cls.log.warning( + cls.logger().warning( f"Did not find a sample format mapping for '{overdrive_format_name}': {href}" ) continue @@ -2746,6 +2759,7 @@ def book_info_to_metadata( contributors=contributors, measurements=measurements, links=links, + duration=duration, ) else: metadata = Metadata( @@ -2762,7 +2776,7 @@ def book_info_to_metadata( for content_type, drm_scheme in internal_formats: formats.append(FormatData(content_type, drm_scheme)) elif format_id not in cls.ignorable_overdrive_formats: - cls.log.error( + cls.logger().error( "Could not process Overdrive format %s for %s", format_id, overdrive_id, diff --git a/tests/api/files/overdrive/audiobook.json b/tests/api/files/overdrive/audiobook.json index 8b44c1b01b..1b3e8bf4a6 100644 --- a/tests/api/files/overdrive/audiobook.json +++ b/tests/api/files/overdrive/audiobook.json @@ -1 +1 @@ -{"publisher": "Duke Classics", "popularity": 510, "links": {"self": {"href": "https://integration.api.overdrive.com/v1/collections/v1L1BBQ0AAA2_/products/13714f86-7a9c-4ba6-80b4-bc8a83de8615/metadata", "type": "application/vnd.overdrive.api+json"}}, "title": "Pride and Prejudice", "reviews": [{"content": "Juliet Stevenson delivers Austen's lovely prose with the grace and intelligence that it deserves. Most of the novel moves at a stately pace, even the most emotional peaks of the love story, and Stevenson delivers it with the measured cadence it demands. She isn't quite as strong when speaking the male dialogue, but when she's speaking as any of the female characters--especially the silly, breathy ones like Lydia or Mrs. Bennet--she strikes the perfect tone. However, this abridgment leaves much to be desired. Some of Austen's wittiest lines are cut, and so much is lost from some interactions that several characters come off far more flatly than written. G.T.B. (c) AudioFile 2004, Portland, Maine", "source": "\"AudioFile", "premium": true}, {"content": "

June 4, 2018
Collagist Fabe adds flair to Jane Austen\u2019s Pride and Prejudice with 39 original illustrations that accompany the unabridged text. Fabe\u2019s collages overlay bright, watercolor-washed scenes with retro cut-paper figures and objects sampled from fashion magazines from the 1930s to the \u201950s. Accompanying each tableau is a quote from the Pride and Prejudice passage that inspired it. Like Austen\u2019s book, Fabe\u2019s work explores arcane customs of beauty and courtship, pageantry and social artifice: in one collage, a housewife holds a tray of drinks while a man sits happily with a sandwich in hand in the distance. While tinged with irony and more than a dash of social commentary, the collages nevertheless have a spirit of glee and evidence deep reverence for the novel. As Fabe describes in a preface, Austen \u201cwas a little bit mean\u2014the way real people are mean\u2014so there are both heroes and nincompoops. Family is both beloved and annoying. That is Austen\u2019s genius, her ability to describe people in all their frailty and humor.\u201d This is a sweet and visually appealing homage.

", "source": "\"Publisher's", "premium": true}], "crossRefId": 479501, "isPublicPerformanceAllowed": false, "mediaType": "Audiobook", "starRating": 4.5, "languages": [{"code": "en", "name": "English"}], "edition": "Unabridged", "isOwnedByCollections": true, "formats": [{"partCount": 11, "rights": [{"type": "PlayOnPC", "value": 1}, {"type": "PlayOnPCCount", "value": -1}, {"type": "BurnToCD", "value": 1}, {"type": "BurnToCDCount", "value": -1}, {"type": "PlayOnPM", "value": 1}, {"type": "TransferToSDMI", "value": 1}, {"type": "TransferToNonSDMI", "value": 1}, {"type": "TransferCount", "value": -1}, {"type": "CollaborativePlay", "value": 0}, {"type": "PublicPerformance", "value": 0}, {"type": "TranscodeToAAC", "value": 1}], "fileName": "PrideandPrejudice", "fileSize": 355553350, "samples": [{"url": "https://excerpts.cdn.overdrive.com/FormatType-25/2389-1/479501-PrideAndPrejudice.wma", "source": "Introduction", "formatType": "audiobook-wma"}, {"url": "https://excerpts.cdn.overdrive.com/FormatType-425/2389-1/479501-PrideAndPrejudice.mp3", "source": "Part 1", "formatType": "audiobook-mp3"}, {"url": "https://samples.overdrive.com/?crid=13714F86-7A9C-4BA6-80B4-BC8A83DE8615&.epub-sample.overdrive.com", "source": "Part 1", "formatType": "audiobook-overdrive"}], "onSaleDate": "11/15/2010", "id": "audiobook-mp3", "name": "OverDrive MP3 Audiobook"}, {"partCount": 0, "fileName": "PrideandPrejudice", "fileSize": 355506882, "samples": [{"url": "https://excerpts.cdn.overdrive.com/FormatType-25/2389-1/479501-PrideAndPrejudice.wma", "source": "Introduction", "formatType": "audiobook-wma"}, {"url": "https://excerpts.cdn.overdrive.com/FormatType-425/2389-1/479501-PrideAndPrejudice.mp3", "source": "Part 1", "formatType": "audiobook-mp3"}, {"url": "https://samples.overdrive.com/?crid=13714F86-7A9C-4BA6-80B4-BC8A83DE8615&.epub-sample.overdrive.com", "source": "Part 1", "formatType": "audiobook-overdrive"}], "onSaleDate": "11/15/2010", "id": "audiobook-overdrive", "name": "OverDrive Listen"}], "sortTitle": "Pride and Prejudice", "images": {"cover150Wide": {"href": "https://img1.od-cdn.com/ImageType-150/2389-1/137/14F/86/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img150.jpg", "type": "image/jpeg"}, "cover300Wide": {"href": "https://img1.od-cdn.com/ImageType-400/2389-1/137/14F/86/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img400.jpg", "type": "image/jpeg"}, "cover": {"href": "https://img1.od-cdn.com/ImageType-100/2389-1/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img100.jpg", "type": "image/jpeg"}, "thumbnail": {"href": "https://img1.od-cdn.com/ImageType-200/2389-1/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img200.jpg", "type": "image/jpeg"}}, "isPublicDomain": false, "subjects": [{"value": "Classic Literature"}, {"value": "Fiction"}], "creators": [{"fileAs": "Austen, Jane", "role": "Author", "name": "Jane Austen"}, {"fileAs": "Kellgren, Kate", "role": "Narrator", "name": "Kate Kellgren"}], "shortDescription": "

Pride and Prejudice is one of the most beloved novels of all time. Elizabeth Bennett, the book's poverty-bound heroine has charmed readers with her wit and sincerity since the book was first published in 1813. Its hero, the handsome and wealthy Mr. Darcy, infuriates Elizabeth \u2014 and women readers around the world \u2014 with his rude arrogance, but all fall in love with him anyway.

A comedy of manners, romance, and neighborhood drama \u2014 it's all there and more.", "id": "13714f86-7a9c-4ba6-80b4-bc8a83de8615"} +{"publisher": "Duke Classics", "popularity": 510, "links": {"self": {"href": "https://integration.api.overdrive.com/v1/collections/v1L1BBQ0AAA2_/products/13714f86-7a9c-4ba6-80b4-bc8a83de8615/metadata", "type": "application/vnd.overdrive.api+json"}}, "title": "Pride and Prejudice", "reviews": [{"content": "Juliet Stevenson delivers Austen's lovely prose with the grace and intelligence that it deserves. Most of the novel moves at a stately pace, even the most emotional peaks of the love story, and Stevenson delivers it with the measured cadence it demands. She isn't quite as strong when speaking the male dialogue, but when she's speaking as any of the female characters--especially the silly, breathy ones like Lydia or Mrs. Bennet--she strikes the perfect tone. However, this abridgment leaves much to be desired. Some of Austen's wittiest lines are cut, and so much is lost from some interactions that several characters come off far more flatly than written. G.T.B. (c) AudioFile 2004, Portland, Maine", "source": "\"AudioFile", "premium": true}, {"content": "

June 4, 2018
Collagist Fabe adds flair to Jane Austen\u2019s Pride and Prejudice with 39 original illustrations that accompany the unabridged text. Fabe\u2019s collages overlay bright, watercolor-washed scenes with retro cut-paper figures and objects sampled from fashion magazines from the 1930s to the \u201950s. Accompanying each tableau is a quote from the Pride and Prejudice passage that inspired it. Like Austen\u2019s book, Fabe\u2019s work explores arcane customs of beauty and courtship, pageantry and social artifice: in one collage, a housewife holds a tray of drinks while a man sits happily with a sandwich in hand in the distance. While tinged with irony and more than a dash of social commentary, the collages nevertheless have a spirit of glee and evidence deep reverence for the novel. As Fabe describes in a preface, Austen \u201cwas a little bit mean\u2014the way real people are mean\u2014so there are both heroes and nincompoops. Family is both beloved and annoying. That is Austen\u2019s genius, her ability to describe people in all their frailty and humor.\u201d This is a sweet and visually appealing homage.

", "source": "\"Publisher's", "premium": true}], "crossRefId": 479501, "isPublicPerformanceAllowed": false, "mediaType": "Audiobook", "starRating": 4.5, "languages": [{"code": "en", "name": "English"}], "edition": "Unabridged", "isOwnedByCollections": true, "formats": [{"duration": "10:09:00", "partCount": 11, "rights": [{"type": "PlayOnPC", "value": 1}, {"type": "PlayOnPCCount", "value": -1}, {"type": "BurnToCD", "value": 1}, {"type": "BurnToCDCount", "value": -1}, {"type": "PlayOnPM", "value": 1}, {"type": "TransferToSDMI", "value": 1}, {"type": "TransferToNonSDMI", "value": 1}, {"type": "TransferCount", "value": -1}, {"type": "CollaborativePlay", "value": 0}, {"type": "PublicPerformance", "value": 0}, {"type": "TranscodeToAAC", "value": 1}], "fileName": "PrideandPrejudice", "fileSize": 355553350, "samples": [{"url": "https://excerpts.cdn.overdrive.com/FormatType-25/2389-1/479501-PrideAndPrejudice.wma", "source": "Introduction", "formatType": "audiobook-wma"}, {"url": "https://excerpts.cdn.overdrive.com/FormatType-425/2389-1/479501-PrideAndPrejudice.mp3", "source": "Part 1", "formatType": "audiobook-mp3"}, {"url": "https://samples.overdrive.com/?crid=13714F86-7A9C-4BA6-80B4-BC8A83DE8615&.epub-sample.overdrive.com", "source": "Part 1", "formatType": "audiobook-overdrive"}], "onSaleDate": "11/15/2010", "id": "audiobook-mp3", "name": "OverDrive MP3 Audiobook"}, {"duration": "10:09:01", "partCount": 0, "fileName": "PrideandPrejudice", "fileSize": 355506882, "samples": [{"url": "https://excerpts.cdn.overdrive.com/FormatType-25/2389-1/479501-PrideAndPrejudice.wma", "source": "Introduction", "formatType": "audiobook-wma"}, {"url": "https://excerpts.cdn.overdrive.com/FormatType-425/2389-1/479501-PrideAndPrejudice.mp3", "source": "Part 1", "formatType": "audiobook-mp3"}, {"url": "https://samples.overdrive.com/?crid=13714F86-7A9C-4BA6-80B4-BC8A83DE8615&.epub-sample.overdrive.com", "source": "Part 1", "formatType": "audiobook-overdrive"}], "onSaleDate": "11/15/2010", "id": "audiobook-overdrive", "name": "OverDrive Listen"}], "sortTitle": "Pride and Prejudice", "images": {"cover150Wide": {"href": "https://img1.od-cdn.com/ImageType-150/2389-1/137/14F/86/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img150.jpg", "type": "image/jpeg"}, "cover300Wide": {"href": "https://img1.od-cdn.com/ImageType-400/2389-1/137/14F/86/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img400.jpg", "type": "image/jpeg"}, "cover": {"href": "https://img1.od-cdn.com/ImageType-100/2389-1/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img100.jpg", "type": "image/jpeg"}, "thumbnail": {"href": "https://img1.od-cdn.com/ImageType-200/2389-1/{13714F86-7A9C-4BA6-80B4-BC8A83DE8615}Img200.jpg", "type": "image/jpeg"}}, "isPublicDomain": false, "subjects": [{"value": "Classic Literature"}, {"value": "Fiction"}], "creators": [{"fileAs": "Austen, Jane", "role": "Author", "name": "Jane Austen"}, {"fileAs": "Kellgren, Kate", "role": "Narrator", "name": "Kate Kellgren"}], "shortDescription": "

Pride and Prejudice is one of the most beloved novels of all time. Elizabeth Bennett, the book's poverty-bound heroine has charmed readers with her wit and sincerity since the book was first published in 1813. Its hero, the handsome and wealthy Mr. Darcy, infuriates Elizabeth \u2014 and women readers around the world \u2014 with his rude arrogance, but all fall in love with him anyway.

A comedy of manners, romance, and neighborhood drama \u2014 it's all there and more.", "id": "13714f86-7a9c-4ba6-80b4-bc8a83de8615"} diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index c665e6af37..21fd291ea8 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -3598,6 +3598,16 @@ def test_audiobook_info(self, overdrive_api_fixture: OverdriveAPIFixture): MediaTypes.OVERDRIVE_AUDIOBOOK_MANIFEST_MEDIA_TYPE == manifest.content_type ) assert "application/x-od-media" == legacy.content_type + assert ( + metadata.duration == 10 * 3600 + 9 * 60 + 1 + ) # The last formats' duration attribute + + # The last format will be invalid, so only the first format should work + info["formats"][1]["duration"] = "10:09" # Invalid format + metadata = OverdriveRepresentationExtractor.book_info_to_metadata(info) + assert ( + metadata.duration == 10 * 3600 + 9 * 60 + 0 + ) # The first formats' duration attribute def test_book_info_with_sample(self, overdrive_api_fixture: OverdriveAPIFixture): # This book has two samples; one available as a direct download and From 6c70cf19454cf12c5a275a05aa57fa01677ea6fd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 1 Nov 2023 23:24:28 +0000 Subject: [PATCH 148/262] Bump uwsgi from 2.0.22 to 2.0.23 (#1493) --- poetry.lock | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 66c1f94fac..e56b424f42 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2815,6 +2815,7 @@ files = [ {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, + {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, @@ -4292,12 +4293,12 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "uwsgi" -version = "2.0.22" +version = "2.0.23" description = "The uWSGI server" optional = false python-versions = "*" files = [ - {file = "uwsgi-2.0.22.tar.gz", hash = "sha256:4cc4727258671ac5fa17ab422155e9aaef8a2008ebb86e4404b66deaae965db2"}, + {file = "uwsgi-2.0.23.tar.gz", hash = "sha256:0cafda0c16f921db7fe42cfaf81b167cf884ee17350efbdd87d1ecece2d7de37"}, ] [[package]] From 3c209e6bc0243b8a8605994265c6b3ab5f2fb44f Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Thu, 2 Nov 2023 11:55:43 -0400 Subject: [PATCH 149/262] Run `cache_marc_files` later to spread load on OpenSearch cluster. (#1496) --- docker/services/cron/cron.d/circulation | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index cea2902683..e1dcc09fbc 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -42,7 +42,7 @@ HOME=/var/www/circulation 0 0 * * 0 root core/bin/run -d 60 novelist_update >> /var/log/cron.log 2>&1 # Generate MARC files for libraries that have a MARC exporter configured. -0 1 * * * root core/bin/run cache_marc_files >> /var/log/cron.log 2>&1 +0 3 * * * root core/bin/run cache_marc_files >> /var/log/cron.log 2>&1 # The remaining scripts keep the circulation manager in sync with # specific types of collections. From 7e163465a4969661d3f16559951f465c63fb1074 Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Thu, 2 Nov 2023 11:56:10 -0400 Subject: [PATCH 150/262] Run loan notifications script only once daily. (#1497) --- docker/services/cron/cron.d/circulation | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index e1dcc09fbc..05e707dd3a 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -106,7 +106,7 @@ HOME=/var/www/circulation # Notifications # -10 */2 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 +10 3 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 15 */2 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 0 1 * * * root core/bin/run patron_activity_sync_notifications >> /var/log/cron.log 2>&1 From 500064baf981a02119552333d28adfaaf96b7e86 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 3 Nov 2023 17:56:16 +0530 Subject: [PATCH 151/262] Added a db commit to the loan notifications loop (#1499) Without this commit the patron_last_notified changes are lost --- core/scripts.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/core/scripts.py b/core/scripts.py index 1aa6a6a78e..e5d4f4ab6f 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -2778,6 +2778,8 @@ def do_run(self): processed_loans += 1 self.process_loan(loan) last_loan_id = loan.id + # Commit every batch + self._db.commit() self.log.info( f"Loan Notifications Job ended: {processed_loans} loans processed" From 93928ee409e6b0bd0e5524615b0a68d92cbd901a Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Mon, 6 Nov 2023 19:40:40 +0530 Subject: [PATCH 152/262] PP-495 Analytics as a container service (#1487) * Analytics as a service through dependency injection Analytics is no more a singleton, and is not library dependant Analytics will be configured through env vars now and not the admin ui * Removed the admin UI analytics service controller The GET method returns a problem detail for the UI to display * Added an analytics container with a singleton so the analytics object is created only once * Switched to the injection pattern for CirculationManager load_settings * Removed the analytics requirement from the licensing methods * Inject s3 service directly in the the analytics provider * Removed location_source and adjoining logic * Moved away from container_instance() towards injection * Removed the admin analytics route * Injected analytics into the overdrive new title script * Removed the Services container from the CirculationManager Since we inject the Analytics provider directly, this is unneeded --- README.md | 8 +- api/admin/controller/__init__.py | 2 - api/admin/controller/analytics_services.py | 126 ------ api/admin/controller/base.py | 8 - api/admin/controller/view.py | 12 +- api/admin/routes.py | 16 - api/app.py | 7 +- api/axis.py | 8 +- api/bibliotheca.py | 15 +- api/circulation.py | 15 +- api/controller.py | 16 +- api/discovery/registration_script.py | 2 +- api/enki.py | 6 +- api/odl.py | 14 +- api/overdrive.py | 11 +- api/s3_analytics_provider.py | 51 +-- core/analytics.py | 136 +----- core/feed/annotator/circulation.py | 7 +- core/local_analytics_provider.py | 91 +--- core/metadata_layer.py | 27 +- core/model/licensing.py | 7 - core/service/analytics/configuration.py | 5 + core/service/analytics/container.py | 15 + core/service/configuration.py | 2 + core/service/container.py | 21 + core/util/http.py | 1 - .../controller/test_analytics_services.py | 398 ------------------ tests/api/admin/test_routes.py | 23 - tests/api/conftest.py | 1 + tests/api/feed/test_library_annotator.py | 14 +- tests/api/mockapi/circulation.py | 12 +- tests/api/test_bibliotheca.py | 63 ++- tests/api/test_controller_analytics.py | 49 +-- tests/api/test_controller_cm.py | 2 +- tests/api/test_controller_loan.py | 4 +- tests/api/test_controller_opdsfeed.py | 2 +- tests/api/test_overdrive.py | 25 +- tests/core/conftest.py | 1 + tests/core/test_analytics.py | 184 +------- tests/core/test_local_analytics_provider.py | 131 +----- tests/core/test_metadata.py | 67 --- tests/core/test_s3_analytics_provider.py | 20 +- tests/fixtures/container.py | 9 + tests/fixtures/database.py | 4 - 44 files changed, 278 insertions(+), 1360 deletions(-) delete mode 100644 api/admin/controller/analytics_services.py create mode 100644 core/service/analytics/configuration.py create mode 100644 core/service/analytics/container.py delete mode 100644 tests/api/admin/controller/test_analytics_services.py create mode 100644 tests/fixtures/container.py diff --git a/README.md b/README.md index a5eb443339..63cc4f36d1 100644 --- a/README.md +++ b/README.md @@ -244,7 +244,7 @@ export SIMPLIFIED_FCM_CREDENTIALS_FILE="/opt/credentials/fcm_credentials.json" The FCM credentials can be downloaded once a Google Service account has been created. More details in the [FCM documentation](https://firebase.google.com/docs/admin/setup#set-up-project-and-service-account) -##### Quicksight Dashboards +#### Quicksight Dashboards For generating quicksight dashboard links the following environment variable is required `QUICKSIGHT_AUTHORIZED_ARNS` - A dictionary of the format `"": ["arn:aws:quicksight:...",...]` @@ -252,6 +252,12 @@ where each quicksight dashboard gets treated with an arbitrary "name", and a lis The first the "authorized arns" is always considered as the `InitialDashboardID` when creating an embed URL for the respective "dashboard name". +#### Analytics + +Local analytics are enabled by default. S3 analytics can be enabled via the following environment variable: + +- PALACE_S3_ANALYTICS_ENABLED: A boolean value to disable or enable s3 analytics. The default is false. + #### Email ### Email sending diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index cb7b581a11..0579f5f545 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -11,7 +11,6 @@ def setup_admin_controllers(manager: CirculationManager): """Set up all the controllers that will be used by the admin parts of the web app.""" from api.admin.controller.admin_search import AdminSearchController - from api.admin.controller.analytics_services import AnalyticsServicesController from api.admin.controller.announcement_service import AnnouncementSettings from api.admin.controller.catalog_services import CatalogServicesController from api.admin.controller.collection_self_tests import CollectionSelfTestsController @@ -71,7 +70,6 @@ def setup_admin_controllers(manager: CirculationManager): manager.admin_discovery_service_library_registrations_controller = ( DiscoveryServiceLibraryRegistrationsController(manager) ) - manager.admin_analytics_services_controller = AnalyticsServicesController(manager) manager.admin_metadata_services_controller = MetadataServicesController(manager) manager.admin_metadata_service_self_tests_controller = ( MetadataServiceSelfTestsController(manager) diff --git a/api/admin/controller/analytics_services.py b/api/admin/controller/analytics_services.py deleted file mode 100644 index 6a9dec59a9..0000000000 --- a/api/admin/controller/analytics_services.py +++ /dev/null @@ -1,126 +0,0 @@ -import flask -from flask import Response - -from api.admin.controller.settings import SettingsController -from api.admin.problem_details import INCOMPLETE_CONFIGURATION, MISSING_ANALYTICS_NAME -from api.s3_analytics_provider import S3AnalyticsProvider -from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import ExternalIntegration -from core.util import first_or_default -from core.util.problem_detail import ProblemDetail - - -class AnalyticsServicesController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - provider_apis = [ - LocalAnalyticsProvider, - S3AnalyticsProvider, - ] - self.protocols = self._get_integration_protocols(provider_apis) - self.goal = ExternalIntegration.ANALYTICS_GOAL - - def update_protocol_settings(self): - """Update configuration settings of the providers.""" - s3_analytics_provider = first_or_default( - [ - protocol - for protocol in self.protocols - if protocol["name"] == S3AnalyticsProvider.__module__ - ] - ) - - def process_analytics_services(self): - if flask.request.method == "GET": - return self.process_get() - else: - return self.process_post() - - def process_get(self): - if flask.request.method == "GET": - services = self._get_integration_info(self.goal, self.protocols) - - self.update_protocol_settings() - - # Librarians should be able to see, but not modify local analytics services. - # Setting the level to 2 will communicate that to the front end. - for x in services: - if x["protocol"] == "core.local_analytics_provider": - x["level"] = 2 - return dict( - analytics_services=services, - protocols=self.protocols, - ) - - def process_post(self): - name = flask.request.form.get("name") - protocol = flask.request.form.get("protocol") - url = flask.request.form.get("url") - fields = {"name": name, "protocol": protocol, "url": url} - - # Don't let librarians create local analytics services. - if protocol == "core.local_analytics_provider": - self.require_higher_than_librarian() - - form_field_error = self.validate_form_fields(**fields) - if form_field_error: - return form_field_error - - is_new = False - id = flask.request.form.get("id") - - if id: - # Find an existing service in order to edit it - service = self.look_up_service_by_id(id, protocol) - else: - service, is_new = self._create_integration( - self.protocols, protocol, self.goal - ) - - if isinstance(service, ProblemDetail): - self._db.rollback() - return service - - name_error = self.check_name_unique(service, name) - if name_error: - self._db.rollback() - return name_error - - protocol_error = self.set_protocols(service, protocol) - if protocol_error: - self._db.rollback() - return protocol_error - - service.name = name - - if is_new: - return Response(str(service.id), 201) - else: - return Response(str(service.id), 200) - - def validate_form_fields(self, **fields): - """The 'name' and 'URL' fields cannot be blank, the URL must be valid, - and the protocol must be selected from the list of recognized protocols.""" - - name = fields.get("name") - protocol = fields.get("protocol") - url = fields.get("url") - - if not name: - return MISSING_ANALYTICS_NAME - if protocol: - error = self.validate_protocol() - if error: - return error - else: - wrong_format = self.validate_formats() - if wrong_format: - return wrong_format - - # The URL is only relevant, and required, if the user is creating a Google Analytics - # integration; the local analytics form doesn't have a URL field. - if "url" in list(flask.request.form.keys()) and not url: - return INCOMPLETE_CONFIGURATION - - def process_delete(self, service_id): - return self._delete_integration(service_id, self.goal) diff --git a/api/admin/controller/base.py b/api/admin/controller/base.py index d7c1569340..fe9f496bd7 100644 --- a/api/admin/controller/base.py +++ b/api/admin/controller/base.py @@ -149,11 +149,3 @@ def require_librarian(self, library): admin = getattr(flask.request, "admin", None) if not admin or not admin.is_librarian(library): raise AdminNotAuthorized() - - def require_higher_than_librarian(self): - # A quick way to check the admin's permissions level without needing to already know the library; - # used as a fail-safe in AnalyticsServicesController.process_post in case a librarian somehow manages - # to submit a Local Analytics form despite the checks on the front end. - admin = getattr(flask.request, "admin", None) - if not admin or not admin.roles or admin.roles[0].role == "librarian": - raise AdminNotAuthorized() diff --git a/api/admin/controller/view.py b/api/admin/controller/view.py index 6683ee8e74..6dbacc86ab 100644 --- a/api/admin/controller/view.py +++ b/api/admin/controller/view.py @@ -10,8 +10,7 @@ from api.admin.controller.base import AdminController from api.admin.templates import admin as admin_template from api.config import Configuration -from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import ConfigurationSetting, ExternalIntegration, Library, get_one +from core.model import ConfigurationSetting, Library from core.util.problem_detail import ProblemDetail @@ -80,13 +79,8 @@ def __call__(self, collection, book, path=None): or Configuration.DEFAULT_TOS_TEXT ) - local_analytics = get_one( - self._db, - ExternalIntegration, - protocol=LocalAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - show_circ_events_download = local_analytics != None + # We always have local_analytics + show_circ_events_download = True response = Response( flask.render_template_string( diff --git a/api/admin/routes.py b/api/admin/routes.py index c334bdb32a..d9de20fc3a 100644 --- a/api/admin/routes.py +++ b/api/admin/routes.py @@ -484,22 +484,6 @@ def metadata_service_self_tests(identifier): ) -@app.route("/admin/analytics_services", methods=["GET", "POST"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def analytics_services(): - return app.manager.admin_analytics_services_controller.process_analytics_services() - - -@app.route("/admin/analytics_service/", methods=["DELETE"]) -@returns_json_or_response_or_problem_detail -@requires_admin -@requires_csrf_token -def analytics_service(service_id): - return app.manager.admin_analytics_services_controller.process_delete(service_id) - - @app.route("/admin/search_services", methods=["GET", "POST"]) @returns_json_or_response_or_problem_detail @requires_admin diff --git a/api/app.py b/api/app.py index 90c6a45a28..320604f429 100644 --- a/api/app.py +++ b/api/app.py @@ -18,7 +18,6 @@ ) from core.app_server import ErrorHandler from core.flask_sqlalchemy_session import flask_scoped_session -from core.local_analytics_provider import LocalAnalyticsProvider from core.model import ( LOCK_ID_APP_INIT, ConfigurationSetting, @@ -68,9 +67,6 @@ def initialize_admin(_db=None): _db = _db or app._db # The secret key is used for signing cookies for admin login app.secret_key = ConfigurationSetting.sitewide_secret(_db, Configuration.SECRET_KEY) - # Create a default Local Analytics service if one does not - # already exist. - LocalAnalyticsProvider.initialize(_db) def initialize_circulation_manager(container: Services): @@ -81,7 +77,7 @@ def initialize_circulation_manager(container: Services): else: if getattr(app, "manager", None) is None: try: - app.manager = CirculationManager(app._db, container) + app.manager = CirculationManager(app._db) except Exception: logging.exception("Error instantiating circulation manager!") raise @@ -155,7 +151,6 @@ def run(url=None): # Setup database by initializing it or running migrations InstanceInitializationScript().run() - initialize_application() logging.info("Starting app on %s:%s", host, port) diff --git a/api/axis.py b/api/axis.py index dc3b3e5cbf..04bd50f3e1 100644 --- a/api/axis.py +++ b/api/axis.py @@ -29,6 +29,7 @@ from urllib.parse import urlparse import certifi +from dependency_injector.wiring import Provide, inject from flask_babel import lazy_gettext as _ from lxml import etree from lxml.etree import _Element @@ -89,6 +90,7 @@ Subject, ) from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor +from core.service.container import Services from core.util.datetime_helpers import datetime_utc, strptime_utc, utc_now from core.util.flask_util import Response from core.util.http import HTTP, RequestNetworkException @@ -596,11 +598,12 @@ def update_licensepools_for_identifiers( for removed_identifier in remainder: self._reap(removed_identifier) + @inject def update_book( self, bibliographic: Metadata, availability: CirculationData, - analytics: Optional[Analytics] = None, + analytics: Analytics = Provide[Services.analytics.analytics], ) -> Tuple[Edition, bool, LicensePool, bool]: """Create or update a single book based on bibliographic and availability data from the Axis 360 API. @@ -610,9 +613,8 @@ def update_book( :param availability: A CirculationData object containing availability data about this title. """ - analytics = analytics or Analytics(self._db) license_pool, new_license_pool = availability.license_pool( - self._db, self.collection, analytics + self._db, self.collection ) edition, new_edition = bibliographic.edition(self._db) license_pool.edition = edition diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 64e81448bd..984c7f39d3 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -15,6 +15,7 @@ from typing import Dict, Generator, List, Tuple, Type, TypeVar, Union import dateutil.parser +from dependency_injector.wiring import Provide, inject from flask_babel import lazy_gettext as _ from lxml.etree import _Element from pymarc import parse_xml_to_array @@ -74,6 +75,7 @@ from core.model.configuration import ConfigurationAttributeValue from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import RunCollectionMonitorScript +from core.service.container import Services from core.util.datetime_helpers import datetime_utc, strptime_utc, to_utc, utc_now from core.util.http import HTTP from core.util.string_helpers import base64 @@ -1268,7 +1270,7 @@ def process_items(self, identifiers): continue if pool.licenses_owned > 0: self.log.warn("Removing %s from circulation.", identifier.identifier) - pool.update_availability(0, 0, 0, 0, self.analytics, as_of=now) + pool.update_availability(0, 0, 0, 0, as_of=now) def _process_metadata( self, @@ -1299,7 +1301,14 @@ class BibliothecaTimelineMonitor(CollectionMonitor, TimelineMonitor): PROTOCOL = ExternalIntegration.BIBLIOTHECA LOG_DATE_FORMAT = "%Y-%m-%dT%H:%M:%S" - def __init__(self, _db, collection, api_class=BibliothecaAPI, analytics=None): + @inject + def __init__( + self, + _db, + collection, + api_class=BibliothecaAPI, + analytics: Analytics = Provide[Services.analytics.analytics], + ): """Initializer. :param _db: Database session object. @@ -1312,7 +1321,7 @@ def __init__(self, _db, collection, api_class=BibliothecaAPI, analytics=None): :param analytics: An optional Analytics object. :type analytics: Optional[Analytics] """ - self.analytics = analytics or Analytics(_db) + self.analytics = analytics super().__init__(_db, collection) if isinstance(api_class, BibliothecaAPI): # We were given an actual API object. Just use it. diff --git a/api/circulation.py b/api/circulation.py index 92c8179b52..0ea18556f2 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -7,17 +7,7 @@ from abc import ABC, abstractmethod from threading import Thread from types import TracebackType -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Iterable, - List, - Literal, - Tuple, - Type, - TypeVar, -) +from typing import Any, Dict, Iterable, List, Literal, Tuple, Type, TypeVar import flask from flask import Response @@ -58,9 +48,6 @@ from core.util.datetime_helpers import utc_now from core.util.log import LoggerMixin -if TYPE_CHECKING: - pass - class CirculationInfo: def __init__( diff --git a/api/controller.py b/api/controller.py index add2d4ddc6..fa5b5ea7e3 100644 --- a/api/controller.py +++ b/api/controller.py @@ -13,6 +13,7 @@ import flask import pytz from attr import define +from dependency_injector.wiring import Provide, inject from expiringdict import ExpiringDict from flask import Response, make_response, redirect from flask_babel import lazy_gettext as _ @@ -111,7 +112,6 @@ from werkzeug import Response as wkResponse from api.admin.controller.admin_search import AdminSearchController - from api.admin.controller.analytics_services import AnalyticsServicesController from api.admin.controller.announcement_service import AnnouncementSettings from api.admin.controller.catalog_services import CatalogServicesController from api.admin.controller.collection_self_tests import CollectionSelfTestsController @@ -190,7 +190,6 @@ class CirculationManager: admin_self_tests_controller: SelfTestsController admin_discovery_services_controller: DiscoveryServicesController admin_discovery_service_library_registrations_controller: DiscoveryServiceLibraryRegistrationsController - admin_analytics_services_controller: AnalyticsServicesController admin_metadata_services_controller: MetadataServicesController admin_metadata_service_self_tests_controller: MetadataServiceSelfTestsController admin_patron_auth_services_controller: PatronAuthServicesController @@ -209,9 +208,14 @@ class CirculationManager: admin_view_controller: ViewController admin_quicksight_controller: QuickSightController - def __init__(self, _db, services: Services): + @inject + def __init__( + self, + _db, + analytics: Analytics = Provide[Services.analytics.analytics], + ): self._db = _db - self.services = services + self.analytics = analytics self.site_configuration_last_update = ( Configuration.site_configuration_last_update(self._db, timeout=0) ) @@ -260,8 +264,6 @@ def load_settings(self): configuration after changes are made in the administrative interface. """ - self.analytics = Analytics(self._db, refresh=True) - with elapsed_time_logging( log_method=self.log.debug, skip_start=True, @@ -391,7 +393,7 @@ def setup_search(self): def setup_circulation(self, library, analytics): """Set up the Circulation object.""" - return CirculationAPI(self._db, library, analytics) + return CirculationAPI(self._db, library, analytics=analytics) def setup_one_time_controllers(self): """Set up all the controllers that will be used by the web app. diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 30c1bfd7c5..4d75cd7b5f 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -67,7 +67,7 @@ def do_run( # Set up an application context so we have access to url_for. from api.app import app - app.manager = manager or CirculationManager(self._db, self.services) + app.manager = manager or CirculationManager(self._db) base_url = ConfigurationSetting.sitewide( self._db, Configuration.BASE_URL_KEY ).value diff --git a/api/enki.py b/api/enki.py index bf8137dfac..9f663230f6 100644 --- a/api/enki.py +++ b/api/enki.py @@ -6,6 +6,7 @@ import time from typing import Any, Callable, Generator, Mapping, Tuple, cast +from dependency_injector.wiring import Provide from flask_babel import lazy_gettext as _ from pydantic import HttpUrl from requests import Response as RequestsResponse @@ -55,6 +56,7 @@ ) from core.model.configuration import ConfigurationAttributeValue from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor +from core.service.container import Services from core.util.datetime_helpers import from_timestamp, strptime_utc, utc_now from core.util.http import HTTP, RemoteIntegrationException, RequestTimedOut @@ -787,7 +789,7 @@ def __init__( _db: Session, collection: Collection, api_class: EnkiAPI | Callable[..., EnkiAPI] = EnkiAPI, - analytics: Optional[Analytics] = None, + analytics: Analytics = Provide[Services.analytics.analytics], ): """Constructor.""" super().__init__(_db, collection) @@ -798,7 +800,7 @@ def __init__( api = api_class self.api = api self.collection_id = collection.id - self.analytics = analytics or Analytics(_db) + self.analytics = analytics @property def collection(self) -> Collection | None: diff --git a/api/odl.py b/api/odl.py index 289ba2532b..36dd99ed44 100644 --- a/api/odl.py +++ b/api/odl.py @@ -8,6 +8,7 @@ from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, TypeVar import dateutil +from dependency_injector.wiring import Provide, inject from flask import url_for from flask_babel import lazy_gettext as _ from lxml.etree import Element @@ -27,7 +28,6 @@ from api.circulation_exceptions import * from api.lcp.hash import Hasher, HasherFactory, HashingAlgorithm from core import util -from core.analytics import Analytics from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, @@ -67,6 +67,7 @@ OPDSImportMonitor, OPDSXMLParser, ) +from core.service.container import Services from core.util.datetime_helpers import to_utc, utc_now from core.util.http import HTTP, BadResponseException from core.util.string_helpers import base64 @@ -200,7 +201,13 @@ class BaseODLAPI(PatronActivityCirculationAPI[SettingsType, LibrarySettingsType] EXPIRED_STATUS, ] - def __init__(self, _db: Session, collection: Collection) -> None: + @inject + def __init__( + self, + _db: Session, + collection: Collection, + analytics: Any = Provide[Services.analytics.analytics], + ) -> None: super().__init__(_db, collection) if collection.protocol != self.label(): raise ValueError( @@ -215,7 +222,7 @@ def __init__(self, _db: Session, collection: Collection) -> None: self.username = settings.username self.password = settings.password - self.analytics = Analytics(_db) + self.analytics = analytics self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() @@ -721,7 +728,6 @@ def _update_hold_position(self, holdinfo: HoldInfo, pool: LicensePool) -> None: def update_licensepool(self, licensepool: LicensePool) -> None: # Update the pool and the next holds in the queue when a license is reserved. licensepool.update_availability_from_licenses( - analytics=self.analytics, as_of=utc_now(), ) holds = licensepool.get_active_holds() diff --git a/api/overdrive.py b/api/overdrive.py index 5b14468cfb..aa06ae8879 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -15,6 +15,7 @@ import dateutil import flask import isbnlib +from dependency_injector.wiring import Provide, inject from flask_babel import lazy_gettext as _ from requests import Response from requests.structures import CaseInsensitiveDict @@ -80,6 +81,7 @@ ) from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import InputScript, Script +from core.service.container import Services from core.util.datetime_helpers import strptime_utc, utc_now from core.util.http import HTTP, BadResponseException from core.util.log import LoggerMixin @@ -1974,13 +1976,18 @@ class OverdriveCirculationMonitor(CollectionMonitor, TimelineMonitor): PROTOCOL = ExternalIntegration.OVERDRIVE OVERLAP = datetime.timedelta(minutes=1) + @inject def __init__( - self, _db, collection, api_class=OverdriveAPI, analytics_class=Analytics + self, + _db, + collection, + api_class=OverdriveAPI, + analytics: Analytics = Provide[Services.analytics.analytics], ): """Constructor.""" super().__init__(_db, collection) self.api = api_class(_db, collection) - self.analytics = analytics_class(_db) + self.analytics = analytics def recently_changed_ids(self, start, cutoff): return self.api.recently_changed_ids(start, cutoff) diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index 4a954cc736..e604a9a6cf 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -1,34 +1,24 @@ +from __future__ import annotations + import datetime import json import random import string -from typing import Dict, Optional - -from flask_babel import lazy_gettext as _ -from sqlalchemy.orm import Session +from typing import TYPE_CHECKING, Dict, Optional from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider from core.model import Library, LicensePool, MediaTypes -from core.service.container import Services -from core.service.storage.s3 import S3Service + +if TYPE_CHECKING: + from core.service.storage.s3 import S3Service class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" - NAME = _("S3 Analytics") - DESCRIPTION = _("Store analytics events in a S3 bucket.") - - SETTINGS = LocalAnalyticsProvider.SETTINGS - - def __init__( - self, - integration, - services: Services, - library=None, - ): - super().__init__(integration, services, library) + def __init__(self, s3_service: Optional[S3Service]): + self.s3_service = s3_service @staticmethod def _create_event_object( @@ -38,7 +28,7 @@ def _create_event_object( time: datetime.datetime, old_value, new_value, - neighborhood: str, + neighborhood: Optional[str] = None, ) -> Dict: """Create a Python dict containing required information about the event. @@ -146,7 +136,7 @@ def collect_event( time, old_value=None, new_value=None, - **kwargs + **kwargs, ): """Log the event using the appropriate for the specific provider's mechanism. @@ -177,25 +167,16 @@ def collect_event( if not library and not license_pool: raise ValueError("Either library or license_pool must be provided.") - if library: - _db = Session.object_session(library) - else: - _db = Session.object_session(license_pool) - if library and self.library_id and library.id != self.library_id: - return - - neighborhood = None - if self.location_source == self.LOCATION_SOURCE_NEIGHBORHOOD: - neighborhood = kwargs.pop("neighborhood", None) event = self._create_event_object( - library, license_pool, event_type, time, old_value, new_value, neighborhood + library, license_pool, event_type, time, old_value, new_value ) content = json.dumps( event, default=str, ensure_ascii=True, ) + storage = self._get_storage() analytics_file_key = self._get_file_key(library, license_pool, event_type, time) @@ -242,13 +223,9 @@ def _get_storage(self) -> S3Service: :return: StorageServiceBase object """ - s3_storage_service = self.services.storage.analytics() - if s3_storage_service is None: + if self.s3_service is None: raise CannotLoadConfiguration( "No storage service is configured with an analytics bucket." ) - return s3_storage_service - - -Provider = S3AnalyticsProvider + return self.s3_service diff --git a/core/analytics.py b/core/analytics.py index ca288b9291..ab41fd20ab 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -1,130 +1,40 @@ from __future__ import annotations -import importlib -import logging -from collections import defaultdict -from typing import Any, Dict, Optional, Set +from typing import TYPE_CHECKING, Optional -from sqlalchemy.orm.session import Session - -from core.config import CannotLoadConfiguration -from core.model import ExternalIntegration -from core.service.container import container_instance +from api.s3_analytics_provider import S3AnalyticsProvider +from core.local_analytics_provider import LocalAnalyticsProvider from core.util.datetime_helpers import utc_now -from core.util.log import log_elapsed_time - - -class Analytics: - """Loads configuration and dispatches methods for analytics providers. - - SINGLETON!! Only one instance is meant to exist at any given time. - - Configuration is loaded only on the first instantiation or when - `refresh=True` is passed in to facilitate reload. - """ +from core.util.log import LoggerMixin - _singleton_instance = None - log = logging.getLogger("core.analytics.Analytics") +if TYPE_CHECKING: + from core.service.storage.s3 import S3Service - GLOBAL_ENABLED: Optional[bool] = None - LIBRARY_ENABLED: Set[int] = set() - def __new__(cls, _db: Session, refresh: bool = False) -> Analytics: - instance = cls._singleton_instance - if instance is None: - refresh = True - instance = super().__new__(cls) - cls._singleton_instance = instance - cls.log.debug("Set singleton instance.") - if refresh: - instance._initialize_instance(_db) - return instance +class Analytics(LoggerMixin): + """Dispatches methods for analytics providers.""" - @classmethod - def _reset_singleton_instance(cls): - """Reset the singleton instance. Primarily used for tests.""" - cls.log.debug("Resetting singleton instance (should be used only for tests).") - cls._singleton_instance = None + def __init__( + self, + s3_analytics_enabled: bool = False, + s3_service: Optional[S3Service] = None, + ) -> None: + self.providers = [LocalAnalyticsProvider()] - @log_elapsed_time(log_method=log.debug, message_prefix="Initializing instance") - def _initialize_instance(self, _db: Session) -> None: - """Initialize an instance (usually the singleton) of the class. - - We don't use __init__ because it would be run whether or not - a new instance were instantiated. - """ - services = container_instance() - sitewide_providers = [] - library_providers = defaultdict(list) - initialization_exceptions: Dict[int, Exception | str] = {} - global_enabled = False - library_enabled = set() - # Find a list of all the ExternalIntegrations set up with a - # goal of analytics. - integrations = _db.query(ExternalIntegration).filter( - ExternalIntegration.goal == ExternalIntegration.ANALYTICS_GOAL - ) - # Turn each integration into an analytics provider. - for integration in integrations: - module = integration.protocol - libraries = integration.libraries - try: - provider_class = self._provider_class_from_module(module) - if provider_class: - if not libraries: - provider = provider_class(integration, services) - sitewide_providers.append(provider) - global_enabled = True - else: - for library in libraries: - provider = provider_class(integration, services, library) - library_providers[library.id].append(provider) - library_enabled.add(library.id) - else: - initialization_exceptions[integration.id] = ( - "Module %s does not have Provider defined." % module - ) + if s3_analytics_enabled: + if s3_service is not None: + self.providers.append(S3AnalyticsProvider(s3_service)) + else: self.log.info( - "Provider {provider!r} for protocol {protocol!r} has {scope} scope.".format( - protocol=module, - provider=provider_class.__name__, - scope=f"per-library ({len(libraries)})" - if libraries - else "site-wide", - ) + "S3 analytics is not configured: No analytics bucket was specified." ) - except (ImportError, CannotLoadConfiguration) as e: - initialization_exceptions[integration.id] = e - - # update the instance variables all at once - self.sitewide_providers = sitewide_providers - self.library_providers = library_providers - self.initialization_exceptions = initialization_exceptions - Analytics.GLOBAL_ENABLED = global_enabled - Analytics.LIBRARY_ENABLED = library_enabled - - @classmethod - def _provider_class_from_module(cls, module: str) -> Any: - # Relative imports, which should be configured only during testing, are - # relative to this module. sys.path will handle the absolute imports. - import_kwargs = {"package": __name__} if module.startswith(".") else {} - provider_module = importlib.import_module(module, **import_kwargs) - return getattr(provider_module, "Provider", None) def collect_event(self, library, license_pool, event_type, time=None, **kwargs): if not time: time = utc_now() - providers = list(self.sitewide_providers) - if library: - providers.extend(self.library_providers[library.id]) - for provider in providers: + + for provider in self.providers: provider.collect_event(library, license_pool, event_type, time, **kwargs) - @classmethod - def is_configured(cls, library): - if cls.GLOBAL_ENABLED is None: - Analytics(Session.object_session(library)) - if cls.GLOBAL_ENABLED: - return True - else: - return library.id in cls.LIBRARY_ENABLED + def is_configured(self): + return len(self.providers) > 0 diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py index d04465f1c9..e488b01c43 100644 --- a/core/feed/annotator/circulation.py +++ b/core/feed/annotator/circulation.py @@ -9,6 +9,7 @@ from collections import defaultdict from typing import Any, Dict, List, Optional, Tuple +from dependency_injector.wiring import Provide, inject from flask import url_for from sqlalchemy.orm import Session @@ -51,6 +52,7 @@ ) from core.model.patron import Hold, Loan, Patron from core.model.work import Work +from core.service.container import Services from core.util.datetime_helpers import from_timestamp from core.util.opds_writer import OPDSFeed @@ -179,6 +181,7 @@ def format_types(cls, delivery_mechanism: DeliveryMechanism) -> List[str]: class CirculationManagerAnnotator(Annotator): hidden_content_types: list[str] + @inject def __init__( self, lane: Optional[WorkList], @@ -186,6 +189,7 @@ def __init__( active_holds_by_work: Optional[Dict[Work, Hold]] = None, active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, hidden_content_types: Optional[List[str]] = None, + analytics: Analytics = Provide[Services.analytics.analytics], ) -> None: if lane: logger_name = "Circulation Manager Annotator for %s" % lane.display_name @@ -198,6 +202,7 @@ def __init__( self.active_fulfillments_by_work = active_fulfillments_by_work or {} self.hidden_content_types = hidden_content_types or [] self.facet_view = "feed" + self.analytics = analytics def is_work_entry_solo(self, work: Work) -> bool: """Return a boolean value indicating whether the work's OPDS catalog entry is served by itself, @@ -927,7 +932,7 @@ def annotate_work_entry( ) ) - if Analytics.is_configured(self.library): + if self.analytics.is_configured(): entry.computed.other_links.append( Link( rel="http://librarysimplified.org/terms/rel/analytics/open-book", diff --git a/core/local_analytics_provider.py b/core/local_analytics_provider.py index 2ded27a636..57679a9411 100644 --- a/core/local_analytics_provider.py +++ b/core/local_analytics_provider.py @@ -1,59 +1,10 @@ -from flask_babel import lazy_gettext as _ from sqlalchemy.orm.session import Session -from core.model import CirculationEvent, ExternalIntegration, create, get_one -from core.service.container import Services +from core.model import CirculationEvent +from core.util.log import LoggerMixin -class LocalAnalyticsProvider: - NAME = _("Local Analytics") - - DESCRIPTION = _("Store analytics events in the 'circulationevents' database table.") - - # A given site can only have one analytics provider. - CARDINALITY = 1 - - # Where to get the 'location' of an analytics event. - LOCATION_SOURCE = "location_source" - - # The 'location' of an analytics event is the 'neighborhood' of - # the request's authenticated patron. - LOCATION_SOURCE_NEIGHBORHOOD = "neighborhood" - - # Analytics events have no 'location'. - LOCATION_SOURCE_DISABLED = "" - - SETTINGS = [ - { - "key": LOCATION_SOURCE, - "label": _("Geographic location of events"), - "description": _( - "Local analytics events may have a geographic location associated with them. How should the location be determined?

Note: to use the patron's neighborhood as the event location, you must also tell your patron authentication mechanism how to gather a patron's neighborhood information." - ), - "default": LOCATION_SOURCE_DISABLED, - "type": "select", - "options": [ - {"key": LOCATION_SOURCE_DISABLED, "label": _("Disable this feature.")}, - { - "key": LOCATION_SOURCE_NEIGHBORHOOD, - "label": _("Use the patron's neighborhood as the event location."), - }, - ], - }, - ] - - def __init__(self, integration, services: Services, library=None): - self.integration_id = integration.id - self.location_source = ( - integration.setting(self.LOCATION_SOURCE).value - or self.LOCATION_SOURCE_DISABLED - ) - self.services = services - if library: - self.library_id = library.id - else: - self.library_id = None - +class LocalAnalyticsProvider(LoggerMixin): def collect_event( self, library, @@ -70,12 +21,6 @@ def collect_event( _db = Session.object_session(library) else: _db = Session.object_session(license_pool) - if library and self.library_id and library.id != self.library_id: - return - - neighborhood = None - if self.location_source == self.LOCATION_SOURCE_NEIGHBORHOOD: - neighborhood = kwargs.pop("neighborhood", None) return CirculationEvent.log( _db, @@ -85,34 +30,4 @@ def collect_event( new_value, start=time, library=library, - location=neighborhood, ) - - @classmethod - def initialize(cls, _db): - """Find or create a local analytics service.""" - - # If a local analytics service already exists, return it. - local_analytics = get_one( - _db, - ExternalIntegration, - protocol=cls.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - # If a local analytics service already exists, don't create a - # default one. Otherwise, create it with default name of - # "Local Analytics". - if not local_analytics: - local_analytics, ignore = create( - _db, - ExternalIntegration, - protocol=cls.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - name=str(cls.NAME), - ) - return local_analytics - - -# The Analytics class looks for the name "Provider". -Provider = LocalAnalyticsProvider diff --git a/core/metadata_layer.py b/core/metadata_layer.py index f4ef93c1a7..57cabc141c 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -13,6 +13,7 @@ from typing import List, Optional from dateutil.parser import parse +from dependency_injector.wiring import Provide, inject from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ @@ -42,6 +43,7 @@ get_one_or_create, ) from core.model.licensing import LicenseFunctions, LicenseStatus +from core.service.container import Services from core.util import LanguageCodes from core.util.datetime_helpers import to_utc, utc_now from core.util.median import median @@ -80,7 +82,10 @@ def __init__( ) @classmethod - def from_license_source(cls, _db, **args): + @inject + def from_license_source( + cls, _db, analytics: Analytics = Provide[Services.analytics.analytics], **args + ): """When gathering data from the license source, overwrite all old data from this source with new data from the same source. Also overwrite an old rights status with an updated status and update @@ -94,7 +99,7 @@ def from_license_source(cls, _db, **args): links=True, rights=True, formats=True, - analytics=Analytics(_db), + analytics=analytics, **args, ) @@ -845,14 +850,11 @@ def primary_identifier(self, _db): self.primary_identifier_obj = obj return self.primary_identifier_obj - def license_pool(self, _db, collection, analytics=None): + def license_pool(self, _db, collection): """Find or create a LicensePool object for this CirculationData. :param collection: The LicensePool object will be associated with the given Collection. - - :param analytics: If the LicensePool is newly created, the event - will be tracked with this. """ if not collection: raise ValueError("Cannot find license pool: no collection provided.") @@ -907,7 +909,12 @@ def set_default_rights_uri(self, data_source_name, default_rights_uri=None): # We still haven't determined rights, so it's unknown. self.default_rights_uri = RightsStatus.UNKNOWN - def apply(self, _db, collection, replace=None): + def apply( + self, + _db, + collection, + replace=None, + ): """Update the title with this CirculationData's information. :param collection: A Collection representing actual copies of @@ -936,11 +943,9 @@ def apply(self, _db, collection, replace=None): if replace is None: replace = ReplacementPolicy() - analytics = replace.analytics or Analytics(_db) - pool = None if collection: - pool, ignore = self.license_pool(_db, collection, analytics) + pool, ignore = self.license_pool(_db, collection) data_source = self.data_source(_db) identifier = self.primary_identifier(_db) @@ -1029,7 +1034,6 @@ def apply(self, _db, collection, replace=None): f"License {license.identifier} has been removed from feed." ) changed_availability = pool.update_availability_from_licenses( - analytics=analytics, as_of=self.last_checked, ) else: @@ -1039,7 +1043,6 @@ def apply(self, _db, collection, replace=None): new_licenses_available=self.licenses_available, new_licenses_reserved=self.licenses_reserved, new_patrons_in_hold_queue=self.patrons_in_hold_queue, - analytics=analytics, as_of=self.last_checked, ) diff --git a/core/model/licensing.py b/core/model/licensing.py index 29778be6ad..31b2f8fb73 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -29,7 +29,6 @@ if TYPE_CHECKING: # Only import for type checking, since it creates an import cycle - from core.analytics import Analytics from core.model import ( # noqa: autoflake Collection, DataSource, @@ -667,14 +666,11 @@ def needs_update(self): def update_availability_from_licenses( self, - analytics: Analytics | None = None, as_of: datetime.datetime | None = None, ): """ Update the LicensePool with new availability information, based on the licenses and holds that are associated with it. - - Log the implied changes with the analytics provider. """ _db = Session.object_session(self) @@ -704,7 +700,6 @@ def update_availability_from_licenses( licenses_available, licenses_reserved, patrons_in_hold_queue, - analytics=analytics, as_of=as_of, ) @@ -730,7 +725,6 @@ def update_availability( new_licenses_available, new_licenses_reserved, new_patrons_in_hold_queue, - analytics=None, as_of=None, ): """Update the LicensePool with new availability information. @@ -881,7 +875,6 @@ def update_availability_from_delta( new_licenses_available, new_licenses_reserved, new_patrons_in_hold_queue, - analytics=analytics, as_of=event_date, ) diff --git a/core/service/analytics/configuration.py b/core/service/analytics/configuration.py new file mode 100644 index 0000000000..9a4c3e3dbf --- /dev/null +++ b/core/service/analytics/configuration.py @@ -0,0 +1,5 @@ +from core.service.configuration import ServiceConfiguration + + +class AnalyticsConfiguration(ServiceConfiguration): + s3_analytics_enabled: bool = False diff --git a/core/service/analytics/container.py b/core/service/analytics/container.py new file mode 100644 index 0000000000..0ee8d7a4da --- /dev/null +++ b/core/service/analytics/container.py @@ -0,0 +1,15 @@ +from dependency_injector import providers +from dependency_injector.containers import DeclarativeContainer + +from core.analytics import Analytics + + +class AnalyticsContainer(DeclarativeContainer): + config = providers.Configuration() + storage = providers.DependenciesContainer() + + analytics: providers.Provider[Analytics] = providers.Singleton( + Analytics, + s3_analytics_enabled=config.s3_analytics_enabled, + s3_service=storage.analytics, + ) diff --git a/core/service/configuration.py b/core/service/configuration.py index 14321668e5..53d8120df0 100644 --- a/core/service/configuration.py +++ b/core/service/configuration.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from pathlib import Path from typing import Any diff --git a/core/service/container.py b/core/service/container.py index e25f58deda..b05986a57a 100644 --- a/core/service/container.py +++ b/core/service/container.py @@ -2,6 +2,8 @@ from dependency_injector.containers import DeclarativeContainer from dependency_injector.providers import Container +from core.service.analytics.configuration import AnalyticsConfiguration +from core.service.analytics.container import AnalyticsContainer from core.service.logging.configuration import LoggingConfiguration from core.service.logging.container import Logging from core.service.storage.configuration import StorageConfiguration @@ -21,6 +23,12 @@ class Services(DeclarativeContainer): config=config.logging, ) + analytics = Container( + AnalyticsContainer, + config=config.analytics, + storage=storage, + ) + def create_container() -> Services: container = Services() @@ -28,8 +36,21 @@ def create_container() -> Services: { "storage": StorageConfiguration().dict(), "logging": LoggingConfiguration().dict(), + "analytics": AnalyticsConfiguration().dict(), } ) + container.wire( + modules=[ + "core.metadata_layer", + "api.odl", + "api.axis", + "api.bibliotheca", + "api.enki", + "api.controller", + "api.overdrive", + "core.feed.annotator.circulation", + ] + ) return container diff --git a/core/util/http.py b/core/util/http.py index 496778e409..c156876821 100644 --- a/core/util/http.py +++ b/core/util/http.py @@ -283,7 +283,6 @@ def _request_with_timeout( session.mount("http://", adapter) session.mount("https://", adapter) - print(session) response = session.request(*args, **kwargs) else: diff --git a/tests/api/admin/controller/test_analytics_services.py b/tests/api/admin/controller/test_analytics_services.py deleted file mode 100644 index c95f5424f0..0000000000 --- a/tests/api/admin/controller/test_analytics_services.py +++ /dev/null @@ -1,398 +0,0 @@ -import json - -import flask -import pytest -from werkzeug.datastructures import ImmutableMultiDict - -from api.admin.exceptions import AdminNotAuthorized -from api.admin.problem_details import ( - CANNOT_CHANGE_PROTOCOL, - INCOMPLETE_CONFIGURATION, - INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_ANALYTICS_NAME, - MISSING_SERVICE, - NO_PROTOCOL_FOR_NEW_SERVICE, - NO_SUCH_LIBRARY, - UNKNOWN_PROTOCOL, -) -from api.s3_analytics_provider import S3AnalyticsProvider -from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import ( - AdminRole, - ConfigurationSetting, - ExternalIntegration, - create, - get_one, -) -from tests.fixtures.api_admin import SettingsControllerFixture - - -class TestAnalyticsServices: - def test_analytics_services_get_with_one_default_service( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - with settings_ctrl_fixture.request_context_with_admin("/"): - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert len(response.get("analytics_services")) == 1 - local_analytics = response.get("analytics_services")[0] - assert local_analytics.get("name") == LocalAnalyticsProvider.NAME - assert local_analytics.get("protocol") == LocalAnalyticsProvider.__module__ - - protocols = response.get("protocols") - assert S3AnalyticsProvider.NAME in [p.get("label") for p in protocols] - assert "settings" in protocols[0] - - def test_analytics_services_get_with_one_service( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - # Delete the local analytics service that gets created by default. - local_analytics_default = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=LocalAnalyticsProvider.__module__, - ) - - settings_ctrl_fixture.ctrl.db.session.delete(local_analytics_default) - - local_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=LocalAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - local_service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - with settings_ctrl_fixture.request_context_with_admin("/"): - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - [local_analytics] = response.get("analytics_services") - - assert local_service.id == local_analytics.get("id") - assert local_service.protocol == local_analytics.get("protocol") - assert local_analytics.get("protocol") == LocalAnalyticsProvider.__module__ - [library] = local_analytics.get("libraries") - assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name - == library.get("short_name") - ) - - def test_analytics_services_post_errors( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict([]) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response == MISSING_ANALYTICS_NAME - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Name"), - ("protocol", "Unknown"), - ("url", "http://test"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response == UNKNOWN_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Name"), - ("url", "http://test"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response == NO_PROTOCOL_FOR_NEW_SERVICE - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Name"), - ("id", "123"), - ("url", "http://test"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.uri == MISSING_SERVICE.uri - - [local_analytics] = ( - settings_ctrl_fixture.ctrl.db.session.query(ExternalIntegration) - .filter(ExternalIntegration.goal == ExternalIntegration.ANALYTICS_GOAL) - .all() - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - assert isinstance(local_analytics.name, str) - flask.request.form = ImmutableMultiDict( - [ - ("name", local_analytics.name), - ("protocol", S3AnalyticsProvider.__module__), - ("url", "http://test"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response == INTEGRATION_NAME_ALREADY_IN_USE - - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=S3AnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Name"), - ("id", str(service.id)), - ("protocol", "core.local_analytics_provider"), - ("url", "http://test"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response == CANNOT_CHANGE_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(service.id)), - ("name", "analytics name"), - ("protocol", S3AnalyticsProvider.__module__), - ("url", ""), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(service.id)), - ("protocol", S3AnalyticsProvider.__module__), - ("name", "some other analytics name"), - (ExternalIntegration.URL, "http://test"), - ("libraries", json.dumps([{"short_name": "not-a-library"}])), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.uri == NO_SUCH_LIBRARY.uri - - library = settings_ctrl_fixture.ctrl.db.library( - name="Library", - short_name="L", - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(service.id)), - ("protocol", S3AnalyticsProvider.__module__), - ("name", "some other name"), - (ExternalIntegration.URL, ""), - ("libraries", json.dumps([{"short_name": library.short_name}])), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - settings_ctrl_fixture.admin.remove_role(AdminRole.LIBRARY_MANAGER) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("protocol", LocalAnalyticsProvider.__module__), - (ExternalIntegration.URL, "url"), - ("libraries", json.dumps([])), - ] - ) - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services, - ) - - def test_analytics_services_post_create( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - library = settings_ctrl_fixture.ctrl.db.library( - name="Library", - short_name="L", - ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "S3 analytics name"), - ("protocol", S3AnalyticsProvider.__module__), - ( - "location_source", - S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD, - ), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.status_code == 201 - - service = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, - ) - assert isinstance(service, ExternalIntegration) - assert service.id == int(response.get_data()) - assert S3AnalyticsProvider.__module__ == service.protocol - assert ( - "neighborhood" - == ConfigurationSetting.for_externalintegration( - S3AnalyticsProvider.LOCATION_SOURCE, - service, - ).value - ) - - local_analytics_default = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=LocalAnalyticsProvider.__module__, - ) - settings_ctrl_fixture.ctrl.db.session.delete(local_analytics_default) - - # Creating a local analytics service doesn't require a URL. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "local analytics name"), - ("protocol", LocalAnalyticsProvider.__module__), - ( - "libraries", - json.dumps([{"short_name": "L", "tracking_id": "trackingid"}]), - ), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.status_code == 201 - - def test_analytics_services_post_edit( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - s3_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=S3AnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(s3_service.id)), - ("name", "some other analytics name"), - ("protocol", S3AnalyticsProvider.__module__), - ( - S3AnalyticsProvider.LOCATION_SOURCE, - S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD, - ), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_analytics_services() - ) - assert response.status_code == 200 - - assert s3_service.id == int(response.get_data()) - assert s3_service.name == "some other analytics name" - assert S3AnalyticsProvider.__module__ == s3_service.protocol - assert ( - S3AnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD - == ConfigurationSetting.for_externalintegration( - S3AnalyticsProvider.LOCATION_SOURCE, - s3_service, - ).value - ) - - def test_check_name_unique(self, settings_ctrl_fixture: SettingsControllerFixture): - kwargs = dict( - protocol=S3AnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - existing_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - name="existing service", - **kwargs - ) - new_service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - name="new service", - **kwargs - ) - - m = ( - settings_ctrl_fixture.manager.admin_analytics_services_controller.check_name_unique - ) - - # Try to change new service so that it has the same name as existing service - # -- this is not allowed. - result = m(new_service, existing_service.name) - assert result == INTEGRATION_NAME_ALREADY_IN_USE - - # Try to edit existing service without changing its name -- this is fine. - assert None == m(existing_service, existing_service.name) - - # Changing the existing service's name is also fine. - assert None == m(existing_service, "new name") - - def test_analytics_service_delete( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=S3AnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_analytics_services_controller.process_delete, - service.id, - ) - - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - response = settings_ctrl_fixture.manager.admin_analytics_services_controller.process_delete( - service.id - ) - assert response.status_code == 200 - - service1 = get_one( - settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, id=service.id - ) - assert None == service1 diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index a86fd39b4a..9cbd5561c3 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -619,29 +619,6 @@ def test_process_delete(self, fixture: AdminRouteFixture): fixture.assert_supported_methods(url, "DELETE") -class TestAdminAnalyticsServices: - CONTROLLER_NAME = "admin_analytics_services_controller" - - @pytest.fixture(scope="function") - def fixture(self, admin_route_fixture: AdminRouteFixture) -> AdminRouteFixture: - admin_route_fixture.set_controller_name(self.CONTROLLER_NAME) - return admin_route_fixture - - def test_process_analytics_services(self, fixture: AdminRouteFixture): - url = "/admin/analytics_services" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_analytics_services # type: ignore - ) - fixture.assert_supported_methods(url, "GET", "POST") - - def test_process_delete(self, fixture: AdminRouteFixture): - url = "/admin/analytics_service/" - fixture.assert_authenticated_request_calls( - url, fixture.controller.process_delete, "", http_method="DELETE" # type: ignore - ) - fixture.assert_supported_methods(url, "DELETE") - - class TestAdminSearchServices: CONTROLLER_NAME = "admin_search_services_controller" diff --git a/tests/api/conftest.py b/tests/api/conftest.py index bda4e6f1e9..109f98c015 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -24,6 +24,7 @@ "tests.fixtures.api_overdrive_files", "tests.fixtures.api_routes", "tests.fixtures.authenticator", + "tests.fixtures.container", "tests.fixtures.csv_files", "tests.fixtures.database", "tests.fixtures.files", diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 9afa8c8937..5eec641269 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -1,7 +1,7 @@ import datetime from collections import defaultdict from typing import List -from unittest.mock import create_autospec +from unittest.mock import create_autospec, patch import dateutil import feedparser @@ -13,7 +13,6 @@ from api.circulation import BaseCirculationAPI, CirculationAPI, FulfillmentInfo from api.lanes import ContributorLane from api.novelist import NoveListAPI -from core.analytics import Analytics from core.classifier import ( # type: ignore[attr-defined] Classifier, Fantasy, @@ -41,6 +40,7 @@ Work, ) from core.opds_import import OPDSXMLParser +from core.service.container import container_instance from core.util.datetime_helpers import utc_now from core.util.flask_util import OPDSFeedResponse from core.util.opds_writer import OPDSFeed @@ -576,7 +576,13 @@ def test_annotate_work_entry(self, annotator_fixture: LibraryAnnotatorFixture): work_entry = WorkEntry( work=work, license_pool=None, edition=edition, identifier=identifier ) - annotator.annotate_work_entry(work_entry) + + with patch.object( + container_instance().analytics.analytics(), + "is_configured", + lambda: False, + ): + annotator.annotate_work_entry(work_entry) assert work_entry.computed is not None links = { x.rel @@ -603,11 +609,11 @@ def test_annotate_work_entry(self, annotator_fixture: LibraryAnnotatorFixture): # If analytics are configured, a link is added to # create an 'open_book' analytics event for this title. - Analytics.GLOBAL_ENABLED = True work_entry = WorkEntry( work=work, license_pool=None, edition=edition, identifier=identifier ) annotator.annotate_work_entry(work_entry) + assert work_entry.computed is not None [analytics_link] = [ x.href for x in work_entry.computed.other_links if x.rel == open_book_rel diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index 8caece5d55..a6181c4216 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,7 +1,6 @@ from abc import ABC from collections import defaultdict -from typing import Optional, Type -from unittest.mock import MagicMock +from typing import Type from sqlalchemy.orm import Session @@ -17,7 +16,6 @@ from core.integration.settings import BaseSettings from core.model import DataSource, Hold, Loan, get_one_or_create from core.model.configuration import ExternalIntegration -from core.service.container import Services from tests.mocks.search import ExternalSearchIndexFake @@ -176,10 +174,8 @@ def api_for_license_pool(self, licensepool): class MockCirculationManager(CirculationManager): d_circulation: MockCirculationAPI - def __init__(self, db: Session, services: Optional[Services] = None): - if services is None: - services = MagicMock(spec=Services) - super().__init__(db, services) + def __init__(self, db: Session): + super().__init__(db) def setup_search(self): """Set up a search client.""" @@ -200,4 +196,4 @@ def setup_search(self): def setup_circulation(self, library, analytics): """Set up the Circulation object.""" - return MockCirculationAPI(self._db, library, analytics) + return MockCirculationAPI(self._db, library, analytics=analytics) diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index 491b1e1407..55ee36a461 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -4,7 +4,15 @@ import random from datetime import datetime, timedelta from io import BytesIO, StringIO -from typing import TYPE_CHECKING, ClassVar, Optional, Protocol, Type, runtime_checkable +from typing import ( + TYPE_CHECKING, + ClassVar, + Optional, + Protocol, + Type, + cast, + runtime_checkable, +) from unittest import mock from unittest.mock import MagicMock, create_autospec @@ -41,6 +49,7 @@ RemoteInitiatedServerError, ) from api.web_publication_manifest import FindawayManifest +from core.analytics import Analytics from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry from core.metadata_layer import ReplacementPolicy, TimestampData @@ -1178,7 +1187,10 @@ def initialized_monitor(self, db: DatabaseTransactionFixture): ], ) def test_optional_iso_date_valid_dates( - self, specified_default_start, expected_default_start, default_monitor + self, + specified_default_start: datetime | str | None, + expected_default_start: datetime | None, + default_monitor: BibliothecaPurchaseMonitor, ): # ISO 8601 strings, `datetime`s, or None are valid. actual_default_start = default_monitor._optional_iso_date( @@ -1190,8 +1202,8 @@ def test_optional_iso_date_valid_dates( def test_monitor_intrinsic_start_time( self, - default_monitor, - initialized_monitor, + default_monitor: BibliothecaPurchaseMonitor, + initialized_monitor: BibliothecaPurchaseMonitor, bibliotheca_fixture: BibliothecaAPITestFixture, ): db = bibliotheca_fixture.db @@ -1234,9 +1246,9 @@ def test_monitor_intrinsic_start_time( ) def test_specified_start_trumps_intrinsic_default_start( self, - specified_default_start, - override_timestamp, - expected_start, + specified_default_start: str | None, + override_timestamp: bool, + expected_start: datetime | None, bibliotheca_fixture: BibliothecaAPITestFixture, ): db = bibliotheca_fixture.db @@ -1312,9 +1324,9 @@ def test_specified_start_trumps_intrinsic_default_start( ) def test_specified_start_can_override_timestamp( self, - specified_default_start, - override_timestamp, - expected_start, + specified_default_start: str | None, + override_timestamp: bool, + expected_start: datetime | None, bibliotheca_fixture: BibliothecaAPITestFixture, ): monitor = BibliothecaPurchaseMonitor( @@ -1349,11 +1361,15 @@ def test_specified_start_can_override_timestamp( assert progress.start == expected_actual_start_time @pytest.mark.parametrize("input", [("invalid"), ("2020/10"), (["2020-10-05"])]) - def test_optional_iso_date_invalid_dates(self, input, default_monitor): + def test_optional_iso_date_invalid_dates( + self, + input: list[str] | str, + default_monitor: BibliothecaPurchaseMonitor, + ): with pytest.raises(ValueError) as excinfo: default_monitor._optional_iso_date(input) - def test_catch_up_from(self, default_monitor): + def test_catch_up_from(self, default_monitor: BibliothecaPurchaseMonitor): # catch_up_from() slices up its given timespan, calls # purchases() to find purchases for each slice, processes each # purchase using process_record(), and sets a checkpoint for each @@ -1425,7 +1441,7 @@ def test_catch_up_from(self, default_monitor): progress, start, full_slice[0], "MARC records processed: 1" ) - def test__checkpoint(self, default_monitor): + def test__checkpoint(self, default_monitor: BibliothecaPurchaseMonitor): # The _checkpoint method allows the BibliothecaPurchaseMonitor # to preserve its progress in case of a crash. @@ -1452,7 +1468,7 @@ def test__checkpoint(self, default_monitor): assert timestamp_obj.start == BibliothecaPurchaseMonitor.DEFAULT_START_TIME assert timestamp_obj.finish == finish - def test_purchases(self, default_monitor): + def test_purchases(self, default_monitor: BibliothecaPurchaseMonitor): # The purchases() method calls marc_request repeatedly, handling # pagination. @@ -1475,7 +1491,10 @@ def test_purchases(self, default_monitor): assert ([1] * 50) + ([2] * 50) + ([3] * 49) == records def test_process_record( - self, default_monitor, caplog, bibliotheca_fixture: BibliothecaAPITestFixture + self, + default_monitor: BibliothecaPurchaseMonitor, + caplog: pytest.LogCaptureFixture, + bibliotheca_fixture: BibliothecaAPITestFixture, ): # process_record may create a LicensePool, trigger the # bibliographic coverage provider, and/or issue a "license @@ -1483,7 +1502,7 @@ def test_process_record( # MARC record. purchase_time = utc_now() analytics = MockAnalyticsProvider() - default_monitor.analytics = analytics + default_monitor.analytics = cast(Analytics, analytics) ensure_coverage = MagicMock() default_monitor.bibliographic_coverage_provider.ensure_coverage = ( ensure_coverage @@ -1548,7 +1567,9 @@ def test_process_record( assert analytics.count == 0 def test_end_to_end( - self, default_monitor, bibliotheca_fixture: BibliothecaAPITestFixture + self, + default_monitor: BibliothecaPurchaseMonitor, + bibliotheca_fixture: BibliothecaAPITestFixture, ): # Limited end-to-end test of the BibliothecaPurchaseMonitor. @@ -1564,10 +1585,10 @@ def test_end_to_end( # book, and one to the metadata endpoint for information about # that book. api = default_monitor.api - api.queue_response( + api.queue_response( # type: ignore [attr-defined] 200, content=bibliotheca_fixture.files.sample_data("marc_records_one.xml") ) - api.queue_response( + api.queue_response( # type: ignore [attr-defined] 200, content=bibliotheca_fixture.files.sample_data("item_metadata_single.xml"), ) @@ -1592,7 +1613,7 @@ def test_end_to_end( # An analytics event was issued to commemorate the addition of # the book to the collection. # No more DISTRIBUTOR events - assert default_monitor.analytics.count == 0 + assert default_monitor.analytics.count == 0 # type: ignore [attr-defined] # The timestamp has been updated; the next time the monitor # runs it will ask for purchases that haven't happened yet. @@ -1741,7 +1762,7 @@ def test_handle_event(self, bibliotheca_fixture: BibliothecaAPITestFixture): db.session, bibliotheca_fixture.collection, api_class=api, - analytics=analytics, + analytics=cast(Analytics, analytics), ) now = utc_now() diff --git a/tests/api/test_controller_analytics.py b/tests/api/test_controller_analytics.py index 936ad627aa..fad58b1725 100644 --- a/tests/api/test_controller_analytics.py +++ b/tests/api/test_controller_analytics.py @@ -2,9 +2,7 @@ import pytest from api.problem_details import INVALID_ANALYTICS_EVENT_TYPE -from core.analytics import Analytics -from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import CirculationEvent, ExternalIntegration, create, get_one +from core.model import CirculationEvent, get_one from tests.fixtures.api_controller import CirculationControllerFixture from tests.fixtures.database import DatabaseTransactionFixture @@ -25,20 +23,6 @@ class TestAnalyticsController: def test_track_event(self, analytics_fixture: AnalyticsFixture): db = analytics_fixture.db - integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="core.local_analytics_provider", - ) - integration.setting( - LocalAnalyticsProvider.LOCATION_SOURCE - ).value = LocalAnalyticsProvider.LOCATION_SOURCE_NEIGHBORHOOD - - # The Analytics singleton will have already been instantiated, - # so here we simulate a reload of its configuration with `refresh`. - analytics_fixture.manager.analytics = Analytics(db.session, refresh=True) - with analytics_fixture.request_context_with_library("/"): response = analytics_fixture.manager.analytics_controller.track_event( analytics_fixture.identifier.type, @@ -48,34 +32,7 @@ def test_track_event(self, analytics_fixture: AnalyticsFixture): assert 400 == response.status_code assert INVALID_ANALYTICS_EVENT_TYPE.uri == response.uri - # If there is no active patron, or if the patron has no - # associated neighborhood, the CirculationEvent is created - # with no location. patron = db.patron() - for request_patron in (None, patron): - with analytics_fixture.request_context_with_library("/"): - flask.request.patron = request_patron # type: ignore - response = analytics_fixture.manager.analytics_controller.track_event( - analytics_fixture.identifier.type, - analytics_fixture.identifier.identifier, - "open_book", - ) - assert 200 == response.status_code - - circulation_event = get_one( - db.session, - CirculationEvent, - type="open_book", - license_pool=analytics_fixture.lp, - ) - assert None is not circulation_event - assert None == circulation_event.location - db.session.delete(circulation_event) - - # If the patron has an associated neighborhood, and the - # analytics controller is set up to use patron neighborhood as - # event location, then the CirculationEvent is created with - # that neighborhood as its location. patron.neighborhood = "Mars Grid 4810579" with analytics_fixture.request_context_with_library("/"): flask.request.patron = patron # type: ignore @@ -93,5 +50,7 @@ def test_track_event(self, analytics_fixture: AnalyticsFixture): license_pool=analytics_fixture.lp, ) assert circulation_event is not None - assert patron.neighborhood == circulation_event.location + assert ( + circulation_event.location == None + ) # We no longer use the location source db.session.delete(circulation_event) diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index ba91176cb9..a43de03acd 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -154,7 +154,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session, MagicMock()) + circulation = BadSearch(circulation_fixture.db.session) # We didn't get a search object. assert None == circulation.external_search diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index a5d556e683..7231409dd8 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -111,8 +111,10 @@ def test_can_fulfill_without_loan(self, loan_fixture: LoanFixture): class MockLibraryAuthenticator: identifies_individuals = False + short_name = loan_fixture.db.default_library().short_name + assert short_name is not None loan_fixture.manager.auth.library_authenticators[ - loan_fixture.db.default_library().short_name + short_name ] = MockLibraryAuthenticator() def mock_can_fulfill_without_loan(patron, pool, lpdm): diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/test_controller_opdsfeed.py index 31a50df4e2..e237c72e63 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/test_controller_opdsfeed.py @@ -676,7 +676,7 @@ class BadSearch(CirculationManager): def setup_search(self): raise Exception("doomed!") - circulation = BadSearch(circulation_fixture.db.session, MagicMock()) + circulation = BadSearch(circulation_fixture.db.session) # An attempt to call FeedController.search() will return a # problem detail. diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index 21fd291ea8..2d69ff793b 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -31,6 +31,7 @@ OverdriveRepresentationExtractor, RecentOverdriveCollectionMonitor, ) +from core.analytics import Analytics from core.config import CannotLoadConfiguration from core.coverage import CoverageFailure from core.integration.goals import Goals @@ -2941,9 +2942,8 @@ def update_licensepool(self, book_id): self.update_licensepool_calls.append((book_id, pool)) return pool, is_new, is_changed - class MockAnalytics: - def __init__(self, _db): - self.db = _db + class MockAnalytics(Analytics): + def __init__(self): self.events = [] def collect_event(self, *args): @@ -2970,14 +2970,13 @@ def should_stop(self, start, book, is_changed): db.session, overdrive_api_fixture.collection, api_class=MockAPI, - analytics_class=MockAnalytics, + analytics=MockAnalytics(), ) api = monitor.api # A MockAnalytics object was created and is ready to receive analytics # events. assert isinstance(monitor.analytics, MockAnalytics) - assert db.session == monitor.analytics.db # The 'Overdrive API' is ready to tell us about four books, # but only one of them (the first) represents a change from what @@ -3066,9 +3065,8 @@ def update_licensepool(self, book_id): self.update_licensepool_calls.append((book_id, pool)) return pool, is_new, is_changed - class MockAnalytics: - def __init__(self, _db): - self._db = _db + class MockAnalytics(Analytics): + def __init__(self): self.events = [] def collect_event(self, *args): @@ -3078,14 +3076,13 @@ def collect_event(self, *args): db.session, overdrive_api_fixture.collection, api_class=MockAPI, - analytics_class=MockAnalytics, + analytics=MockAnalytics(), ) api = monitor.api # A MockAnalytics object was created and is ready to receive analytics # events. assert isinstance(monitor.analytics, MockAnalytics) - assert db.session == monitor.analytics._db lp1 = db.licensepool(None) lp1.last_checked = utc_now() @@ -3127,9 +3124,8 @@ def update_licensepool(self, book_id): self.tries[str(book_id)] = current_count raise StaleDataError("Ouch!") - class MockAnalytics: - def __init__(self, _db): - self._db = _db + class MockAnalytics(Analytics): + def __init__(self): self.events = [] def collect_event(self, *args): @@ -3139,14 +3135,13 @@ def collect_event(self, *args): db.session, overdrive_api_fixture.collection, api_class=MockAPI, - analytics_class=MockAnalytics, + analytics=MockAnalytics(), ) api = monitor.api # A MockAnalytics object was created and is ready to receive analytics # events. assert isinstance(monitor.analytics, MockAnalytics) - assert db.session == monitor.analytics._db lp1 = db.licensepool(None) lp1.last_checked = utc_now() diff --git a/tests/core/conftest.py b/tests/core/conftest.py index 15b69bb34a..fc9177cad2 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -1,6 +1,7 @@ pytest_plugins = [ "tests.fixtures.announcements", "tests.fixtures.csv_files", + "tests.fixtures.container", "tests.fixtures.database", "tests.fixtures.library", "tests.fixtures.opds2_files", diff --git a/tests/core/test_analytics.py b/tests/core/test_analytics.py index 5ba554eab2..89fe7641d5 100644 --- a/tests/core/test_analytics.py +++ b/tests/core/test_analytics.py @@ -1,182 +1,40 @@ +from unittest.mock import MagicMock + +from api.s3_analytics_provider import S3AnalyticsProvider from core.analytics import Analytics from core.local_analytics_provider import LocalAnalyticsProvider -from core.mock_analytics_provider import MockAnalyticsProvider -from core.model import CirculationEvent, ExternalIntegration, create # We can't import mock_analytics_provider from within a test, # and we can't tell Analytics to do so either. We need to tell # it to perform an import relative to the module the Analytics # class is in. -from tests.fixtures.database import DatabaseTransactionFixture MOCK_PROTOCOL = "..mock_analytics_provider" class TestAnalytics: - def test_initialize(self, db: DatabaseTransactionFixture): - # supports multiple analytics providers, site-wide or with libraries - # Two site-wide integrations - site_wide_integration1, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=MOCK_PROTOCOL, - ) - site_wide_integration1.url = db.fresh_str() - site_wide_integration2, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="..local_analytics_provider", - ) - - # A broken integration - missing_integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="missing_provider", - ) - - # Two library-specific integrations - l1 = db.library(short_name="L1") - l2 = db.library(short_name="L2") - - library_integration1, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=MOCK_PROTOCOL, - ) - library_integration1.libraries += [l1, l2] - - library_integration2, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=MOCK_PROTOCOL, - ) - library_integration2.libraries += [l2] - - analytics = Analytics(db.session) - assert 2 == len(analytics.sitewide_providers) - assert isinstance(analytics.sitewide_providers[0], MockAnalyticsProvider) - assert site_wide_integration1.url == analytics.sitewide_providers[0].url - assert isinstance(analytics.sitewide_providers[1], LocalAnalyticsProvider) - assert missing_integration.id in analytics.initialization_exceptions - - assert 1 == len(analytics.library_providers[l1.id]) - assert isinstance(analytics.library_providers[l1.id][0], MockAnalyticsProvider) - - assert 2 == len(analytics.library_providers[l2.id]) - for provider in analytics.library_providers[l2.id]: - assert isinstance(provider, MockAnalyticsProvider) - - # Instantiating an Analytics object initializes class - # variables with the current state of site analytics. + def test_is_configured(self): + analytics = Analytics() + assert analytics.is_configured() == True - # We have global analytics enabled. - assert Analytics.GLOBAL_ENABLED is True + analytics = Analytics(s3_analytics_enabled=True) + assert analytics.is_configured() == True - # We also have analytics enabled for two of the three libraries. - assert {l1.id, l2.id} == Analytics.LIBRARY_ENABLED + # If somehow we don't have providers, we don't have analytics + analytics.providers = [] + assert analytics.is_configured() == False - # Now we'll change the analytics configuration. - db.session.delete(site_wide_integration1) - db.session.delete(site_wide_integration2) - db.session.delete(library_integration1) + def test_init_analytics(self): + analytics = Analytics() - # But Analytics is a singleton, so if we instantiate a new - # Analytics object in the same app instance, it will be the - # same as the previous one. - analytics2 = Analytics(db.session) - assert analytics2 == analytics - assert 2 == len(analytics.sitewide_providers) - assert 1 == len(analytics.library_providers[l1.id]) - assert 2 == len(analytics.library_providers[l2.id]) + assert len(analytics.providers) == 1 + assert type(analytics.providers[0]) == LocalAnalyticsProvider - # If, however, we simulate a configuration refresh ... - analytics3 = Analytics(db.session, refresh=True) - # ... we will see the updated configuration. - assert analytics3 == analytics - assert Analytics.GLOBAL_ENABLED is False - assert {l2.id} == Analytics.LIBRARY_ENABLED # type: ignore - - def test_is_configured(self, db: DatabaseTransactionFixture): - # If the Analytics constructor has not been called, then - # is_configured() calls it so that the values are populated. - Analytics.GLOBAL_ENABLED = None - library = db.default_library() - assert False == Analytics.is_configured(library) - assert False == Analytics.GLOBAL_ENABLED - assert set() == Analytics.LIBRARY_ENABLED - - # If analytics are enabled globally, they are enabled for any - # library. - Analytics.GLOBAL_ENABLED = True - assert True == Analytics.is_configured(object()) - - # If not, they are enabled only for libraries whose IDs are - # in LIBRARY_ENABLED. - Analytics.GLOBAL_ENABLED = False - assert False == Analytics.is_configured(library) - assert isinstance(library.id, int) - Analytics.LIBRARY_ENABLED.add(library.id) - assert True == Analytics.is_configured(library) - - def test_collect_event(self, db: DatabaseTransactionFixture): - # This will be a site-wide integration because it will have no - # associated libraries when the Analytics singleton is instantiated. - # the first time. - sitewide_integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=MOCK_PROTOCOL, - ) - - # This will be a per-library integration because it will have at least - # one associated library when the Analytics singleton is instantiated. - library_integration, ignore = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=MOCK_PROTOCOL, - ) - library = db.library(short_name="library") - library_integration.libraries += [library] - - work = db.work(title="title", with_license_pool=True) - [lp] = work.license_pools - analytics = Analytics(db.session) - sitewide_provider = analytics.sitewide_providers[0] - library_provider = analytics.library_providers[library.id][0] - - analytics.collect_event( - db.default_library(), - lp, - CirculationEvent.DISTRIBUTOR_CHECKIN, - None, + analytics = Analytics( + s3_analytics_enabled=True, + s3_service=MagicMock(), ) - # The sitewide provider was called. - assert 1 == sitewide_provider.count - assert CirculationEvent.DISTRIBUTOR_CHECKIN == sitewide_provider.event_type - - # The library provider wasn't called, since the event was for a different library. - assert 0 == library_provider.count - - analytics.collect_event(library, lp, CirculationEvent.DISTRIBUTOR_CHECKIN, None) - - # Now both providers were called, since the event was for the library provider's library. - assert 2 == sitewide_provider.count - assert 1 == library_provider.count - assert CirculationEvent.DISTRIBUTOR_CHECKIN == library_provider.event_type - - # Here's an event that we couldn't associate with any - # particular library. - analytics.collect_event(None, lp, CirculationEvent.DISTRIBUTOR_CHECKOUT, None) - - # It's counted as a sitewide event, but not as a library event. - assert 3 == sitewide_provider.count - assert 1 == library_provider.count + assert len(analytics.providers) == 2 + assert type(analytics.providers[0]) == LocalAnalyticsProvider + assert type(analytics.providers[1]) == S3AnalyticsProvider diff --git a/tests/core/test_local_analytics_provider.py b/tests/core/test_local_analytics_provider.py index 478e4d6d68..7b54da8f5b 100644 --- a/tests/core/test_local_analytics_provider.py +++ b/tests/core/test_local_analytics_provider.py @@ -5,7 +5,7 @@ import pytest from core.local_analytics_provider import LocalAnalyticsProvider -from core.model import CirculationEvent, ExternalIntegration, create, get_one +from core.model import CirculationEvent, ExternalIntegration from core.util.datetime_helpers import utc_now if TYPE_CHECKING: @@ -13,44 +13,6 @@ from tests.fixtures.services import MockServicesFixture -class TestInitializeLocalAnalyticsProvider: - def test_initialize(self, db: DatabaseTransactionFixture): - session = db.session - - local_analytics = get_one( - session, - ExternalIntegration, - protocol=LocalAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - # There shouldn't exist a local analytics service. - assert local_analytics is None - - # So when the Local Analytics provider is initialized, it will - # create one with the default name of "Local Analytics". - local_analytics = LocalAnalyticsProvider.initialize(session) - - assert isinstance(local_analytics, ExternalIntegration) - assert local_analytics.name == LocalAnalyticsProvider.NAME - - # When an analytics provider is initialized, retrieving a - # local analytics service should return the same one. - local_analytics = LocalAnalyticsProvider.initialize(session) - - local_analytics_2 = get_one( - session, - ExternalIntegration, - protocol=LocalAnalyticsProvider.__module__, - goal=ExternalIntegration.ANALYTICS_GOAL, - ) - - assert isinstance(local_analytics, ExternalIntegration) - assert isinstance(local_analytics_2, ExternalIntegration) - assert local_analytics_2.id == local_analytics.id - assert local_analytics_2.name == local_analytics.name - - class LocalAnalyticsProviderFixture: transaction: DatabaseTransactionFixture integration: ExternalIntegration @@ -62,16 +24,8 @@ def __init__( mock_services_fixture: MockServicesFixture, ): self.transaction = transaction - self.integration, ignore = create( - transaction.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol="core.local_analytics_provider", - ) self.services = mock_services_fixture.services - self.la = LocalAnalyticsProvider( - self.integration, self.services, transaction.default_library() - ) + self.la = LocalAnalyticsProvider() @pytest.fixture() @@ -120,36 +74,6 @@ def test_collect_event( assert CirculationEvent.DISTRIBUTOR_CHECKIN == event.type assert now == event.start - # The LocalAnalyticsProvider will not handle an event intended - # for a different library. - now = utc_now() - data.la.collect_event( - library2, - lp, - CirculationEvent.DISTRIBUTOR_CHECKIN, - now, - old_value=None, - new_value=None, - ) - assert 1 == qu.count() - - # It's possible to instantiate the LocalAnalyticsProvider - # without a library. - la = LocalAnalyticsProvider(data.integration, data.services) - - # In that case, it will process events for any library. - for library in [database.default_library(), library2]: - now = utc_now() - la.collect_event( - library, - lp, - CirculationEvent.DISTRIBUTOR_CHECKIN, - now, - old_value=None, - new_value=None, - ) - assert 3 == qu.count() - def test_collect_with_missing_information( self, local_analytics_provider_fixture: LocalAnalyticsProviderFixture ): @@ -168,54 +92,3 @@ def test_collect_with_missing_information( with pytest.raises(ValueError) as excinfo: data.la.collect_event(None, None, "event", now) assert "Either library or license_pool must be provided." in str(excinfo.value) - - def test_neighborhood_is_location( - self, local_analytics_provider_fixture: LocalAnalyticsProviderFixture - ): - # If a 'neighborhood' argument is provided, its value - # is used as CirculationEvent.location. - data = local_analytics_provider_fixture - database = local_analytics_provider_fixture.transaction - - # The default LocalAnalytics object doesn't have a location - # gathering policy, and the default is to ignore location. - event, is_new = data.la.collect_event( - database.default_library(), - None, - "event", - utc_now(), - neighborhood="Gormenghast", - ) - assert is_new is True - assert event.location is None - - # Create another LocalAnalytics object that uses the patron - # neighborhood as the event location. - - p = LocalAnalyticsProvider - data.integration.setting( - p.LOCATION_SOURCE - ).value = p.LOCATION_SOURCE_NEIGHBORHOOD - la = p(data.integration, data.services, database.default_library()) - - event, is_new = la.collect_event( - database.default_library(), - None, - "event", - utc_now(), - neighborhood="Gormenghast", - ) - assert is_new is True - assert "Gormenghast" == event.location - - # If no neighborhood is available, the event ends up with no location - # anyway. - event2, is_new = la.collect_event( - database.default_library(), - None, - "event", - utc_now(), - ) - assert event2 != event - assert is_new is True - assert event2.location is None diff --git a/tests/core/test_metadata.py b/tests/core/test_metadata.py index 7d51d3f95d..b5c6880a5c 100644 --- a/tests/core/test_metadata.py +++ b/tests/core/test_metadata.py @@ -5,7 +5,6 @@ import pytest -from core.analytics import Analytics from core.classifier import NO_NUMBER, NO_VALUE from core.metadata_layer import ( CirculationData, @@ -1087,72 +1086,6 @@ def test_links_filtered(self): assert [link2, link5, link4, link3] == filtered_links -class TestCirculationData: - def test_apply_propagates_analytics(self, db: DatabaseTransactionFixture): - # Verify that an Analytics object is always passed into - # license_pool() and update_availability(), even if none is - # provided in the ReplacementPolicy. - # - # NOTE: this test was written to verify a bug fix; it's not a - # comprehensive test of CirculationData.apply(). - source = DataSource.lookup(db.session, DataSource.GUTENBERG) - identifier = db.identifier() - collection = db.default_collection() - - class MockLicensePool: - # A LicensePool-like object that tracks how its - # update_availability() method was called. - delivery_mechanisms: list = [] - licenses: list = [] - work = None - - def calculate_work(self): - return None, False - - def update_availability(self, **kwargs): - self.update_availability_called_with = kwargs - - pool = MockLicensePool() - - class MockCirculationData(CirculationData): - # A CirculationData-like object that always says - # update_availability ought to be called on a - # specific MockLicensePool. - def license_pool(self, _db, collection, analytics): - self.license_pool_called_with = (_db, collection, analytics) - return pool, False - - def _availability_needs_update(self, *args): - # Force update_availability to be called. - return True - - # First try with no particular ReplacementPolicy. - data = MockCirculationData(source, identifier) - data.apply(db.session, collection) - - # A generic Analytics object was created and passed in to - # MockCirculationData.license_pool(). - analytics1 = data.license_pool_called_with[-1] - assert isinstance(analytics1, Analytics) - - # Then, the same Analytics object was passed into the - # update_availability() method of the MockLicensePool returned - # by license_pool() - analytics2 = pool.update_availability_called_with["analytics"] - assert analytics1 == analytics2 - - # Now try with a ReplacementPolicy that mentions a specific - # analytics object. - analytics = object() - policy = ReplacementPolicy(analytics=analytics) - data.apply(db.session, collection, replace=policy) - - # That object was used instead of a generic Analytics object in - # both cases. - assert analytics == data.license_pool_called_with[-1] - assert analytics == pool.update_availability_called_with["analytics"] - - class TestTimestampData: def test_constructor(self): # By default, all fields are set to None diff --git a/tests/core/test_s3_analytics_provider.py b/tests/core/test_s3_analytics_provider.py index 9ed40e782a..bcb64444d1 100644 --- a/tests/core/test_s3_analytics_provider.py +++ b/tests/core/test_s3_analytics_provider.py @@ -10,13 +10,7 @@ from api.s3_analytics_provider import S3AnalyticsProvider from core.classifier import Classifier from core.config import CannotLoadConfiguration -from core.model import ( - CirculationEvent, - DataSource, - ExternalIntegration, - MediaTypes, - create, -) +from core.model import CirculationEvent, DataSource, MediaTypes if TYPE_CHECKING: from tests.fixtures.database import DatabaseTransactionFixture @@ -28,16 +22,10 @@ def __init__( self, db: DatabaseTransactionFixture, services_fixture: MockServicesFixture ) -> None: self.db = db - self.analytics_integration, _ = create( - db.session, - ExternalIntegration, - goal=ExternalIntegration.ANALYTICS_GOAL, - protocol=S3AnalyticsProvider.__module__, - ) self.services = services_fixture.services self.analytics_storage = services_fixture.storage.analytics self.analytics_provider = S3AnalyticsProvider( - self.analytics_integration, self.services, db.default_library() + services_fixture.services.storage.analytics(), ) @@ -69,9 +57,7 @@ def test_exception_is_raised_when_no_analytics_bucket_configured( s3_analytics_fixture.services.storage.analytics.override(None) provider = S3AnalyticsProvider( - s3_analytics_fixture.analytics_integration, - s3_analytics_fixture.services, - s3_analytics_fixture.db.default_library(), + s3_analytics_fixture.services.storage.analytics() ) # Act, Assert diff --git a/tests/fixtures/container.py b/tests/fixtures/container.py new file mode 100644 index 0000000000..8d30774930 --- /dev/null +++ b/tests/fixtures/container.py @@ -0,0 +1,9 @@ +import pytest + +from core.service.container import container_instance + + +@pytest.fixture(autouse=True) +def services_container_instance(): + # This creates and wires the container + return container_instance() diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index a143b67413..26f685e2d0 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -20,7 +20,6 @@ import core.lane from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry -from core.analytics import Analytics from core.classifier import Classifier from core.config import Configuration from core.configuration.library import LibrarySettings @@ -198,9 +197,6 @@ def close(self): # other session. self._transaction.rollback() - # Reset the Analytics singleton between tests. - Analytics._reset_singleton_instance() - Configuration.SITE_CONFIGURATION_LAST_UPDATE = None Configuration.LAST_CHECKED_FOR_SITE_CONFIGURATION_UPDATE = None From 710219c0208ee76c4505ca811a08881b75f24f3c Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 6 Nov 2023 13:24:33 -0400 Subject: [PATCH 153/262] Update how we load settings from database (#1500) * Update how we load settings from database. * Make sure config data in tests validates * More test fixes * Mypy fix --- core/integration/base.py | 8 ++-- .../controller/test_library_registrations.py | 10 ++--- tests/api/test_circulationapi.py | 7 +++- tests/api/test_controller_multilib.py | 9 ++++- tests/api/test_controller_scopedsession.py | 9 ++++- tests/api/test_controller_work.py | 2 + tests/api/test_lanes.py | 4 +- tests/core/integration/test_base.py | 40 +++++++++++++++++-- tests/core/models/test_configuration.py | 16 ++++---- tests/core/models/test_library.py | 11 ----- tests/core/test_opds2_import.py | 2 +- tests/core/test_opds_import.py | 13 ++++-- tests/fixtures/database.py | 15 +++---- 13 files changed, 96 insertions(+), 50 deletions(-) diff --git a/core/integration/base.py b/core/integration/base.py index 2aea7fa127..db17e20ae6 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -26,9 +26,9 @@ def integration_settings_load( """ Load the settings object for an integration from the database. - These settings ARE NOT validated when loaded from the database. It is assumed that - the settings have already been validated when they were saved to the database. This - speeds up the loading of the settings from the database. + The settings are validated when loaded from the database, this is done rather + than using construct() because there are some types that need to get type converted + when round tripping from the database (such as enum) and construct() doesn't do that. :param settings_cls: The settings class that the settings should be loaded into. :param integration: The integration to load the settings from. This should be a @@ -37,7 +37,7 @@ def integration_settings_load( :return: An instance of the settings class loaded with the settings from the database. """ settings_dict = integration.settings_dict - return settings_cls.construct(**settings_dict) + return settings_cls(**settings_dict) def integration_settings_update( diff --git a/tests/api/admin/controller/test_library_registrations.py b/tests/api/admin/controller/test_library_registrations.py index 2445f58311..889b566c92 100644 --- a/tests/api/admin/controller/test_library_registrations.py +++ b/tests/api/admin/controller/test_library_registrations.py @@ -37,7 +37,7 @@ def test_discovery_service_library_registrations_get( # Here's a discovery service. discovery_service = create_integration_configuration.discovery_service( - url="http://service-url/" + url="http://service-url.com/" ) # We successfully registered this library with the service. @@ -89,7 +89,7 @@ def test_discovery_service_library_registrations_get( # registration link. root_catalog = dict(links=[dict(href="http://register-here/", rel="register")]) requests_mock.get( - "http://service-url/", + "http://service-url.com/", json=root_catalog, headers={"Content-Type": OpdsRegistrationService.OPDS_2_TYPE}, ) @@ -137,7 +137,7 @@ def test_discovery_service_library_registrations_get( # happened. The target of the first request is the URL to # the discovery service's main catalog. The second request # is to the "register" link found in that catalog. - assert ["service-url", "register-here"] == [ + assert ["service-url.com", "register-here"] == [ r.hostname for r in requests_mock.request_history ] @@ -174,7 +174,7 @@ def test_discovery_service_library_registrations_get( # there will be no second request. requests_mock.reset() requests_mock.get( - "http://service-url/", + "http://service-url.com/", json=REMOTE_INTEGRATION_FAILED.response[0], status_code=502, ) @@ -239,7 +239,7 @@ def test_discovery_service_library_registrations_post( # Create an IntegrationConfiguration to avoid that problem in future tests. discovery_service = create_integration_configuration.discovery_service( - url="http://register-here/" + url="http://register-here.com/" ) # We might not get a library short name. diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index 2150cefa64..de1012ff0e 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -886,10 +886,15 @@ def test_borrow_hold_limit_reached( def test_fulfill_errors(self, circulation_api: CirculationAPIFixture): # Here's an open-access title. + collection = circulation_api.db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" + ) circulation_api.pool.open_access = True + circulation_api.pool.collection = collection + circulation_api.circulation.remotes[ circulation_api.pool.data_source.name - ] = OPDSAPI(circulation_api.db.session, circulation_api.collection) + ] = OPDSAPI(circulation_api.db.session, collection) # The patron has the title on loan. circulation_api.pool.loan_to(circulation_api.patron) diff --git a/tests/api/test_controller_multilib.py b/tests/api/test_controller_multilib.py index 4e725fce1c..60488f1c51 100644 --- a/tests/api/test_controller_multilib.py +++ b/tests/api/test_controller_multilib.py @@ -1,4 +1,5 @@ from core.model import Collection, ExternalIntegration, get_one_or_create +from core.opds_import import OPDSAPI from tests.fixtures.api_controller import ( CirculationControllerFixture, ControllerFixtureSetupOverrides, @@ -21,7 +22,13 @@ def make_default_collection(_db, library): name=f"{controller_fixture.db.fresh_str()} (for multi-library test)", ) collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - collection.create_integration_configuration(ExternalIntegration.OPDS_IMPORT) + integration = collection.create_integration_configuration( + ExternalIntegration.OPDS_IMPORT + ) + settings = OPDSAPI.settings_class()( + external_account_id="http://url.com", data_source="OPDS" + ) + OPDSAPI.settings_update(integration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/test_controller_scopedsession.py index 797a7b25f6..f5addd55f4 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/test_controller_scopedsession.py @@ -13,6 +13,7 @@ Library, create, ) +from core.opds_import import OPDSAPI from tests.fixtures.api_controller import ( ControllerFixture, ControllerFixtureSetupOverrides, @@ -60,7 +61,13 @@ def make_default_collection(self, session: Session, library): name=self.fresh_id() + " (collection for scoped session)", ) collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - collection.create_integration_configuration(ExternalIntegration.OPDS_IMPORT) + integration = collection.create_integration_configuration( + ExternalIntegration.OPDS_IMPORT + ) + settings = OPDSAPI.settings_class()( + external_account_id="http://url.com", data_source="OPDS" + ) + OPDSAPI.settings_update(integration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_work.py b/tests/api/test_controller_work.py index 092e2e3ae0..29f9dc2897 100644 --- a/tests/api/test_controller_work.py +++ b/tests/api/test_controller_work.py @@ -71,6 +71,7 @@ def work_fixture(db: DatabaseTransactionFixture): class TestWorkController: def test_contributor(self, work_fixture: WorkFixture): m = work_fixture.manager.work_controller.contributor + work_fixture.collection.data_source = None # Find a real Contributor put in the system through the setup # process. @@ -843,6 +844,7 @@ def test_related_no_works(self, work_fixture: WorkFixture): assert result == NOT_FOUND_ON_REMOTE def test_series(self, work_fixture: WorkFixture): + work_fixture.collection.data_source = None # Test the ability of the series() method to generate an OPDS # feed representing all the books in a given series, subject # to an optional language and audience restriction. diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index b4f762f886..b02782c42e 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -1163,7 +1163,7 @@ def test_constructor(self, db: DatabaseTransactionFixture): ] = available_now assert ( - "License source {[Unknown]} - Medium {Book} - Collection name {%s}" + "License source {OPDS} - Medium {Book} - Collection name {%s}" % db.default_collection().name == default_ebooks.display_name ) @@ -1171,7 +1171,7 @@ def test_constructor(self, db: DatabaseTransactionFixture): assert [Edition.BOOK_MEDIUM] == default_ebooks.media assert ( - "License source {[Unknown]} - Medium {Audio} - Collection name {%s}" + "License source {OPDS} - Medium {Audio} - Collection name {%s}" % db.default_collection().name == default_audio.display_name ) diff --git a/tests/core/integration/test_base.py b/tests/core/integration/test_base.py index bd4cb4ec3a..30f9cac628 100644 --- a/tests/core/integration/test_base.py +++ b/tests/core/integration/test_base.py @@ -1,11 +1,15 @@ +import datetime +from enum import Enum from functools import partial from unittest.mock import MagicMock, Mock, patch import pytest from core.integration.base import integration_settings_load, integration_settings_update +from core.integration.goals import Goals from core.integration.settings import BaseSettings from core.model import IntegrationConfiguration +from tests.fixtures.database import DatabaseTransactionFixture class BaseFixture: @@ -31,10 +35,40 @@ def base_fixture(): def test_integration_settings_load(base_fixture: BaseFixture) -> None: return_value: BaseSettings = base_fixture.load() - base_fixture.mock_settings_cls.construct.assert_called_once_with( - test="test", number=123 + base_fixture.mock_settings_cls.assert_called_once_with(test="test", number=123) + assert return_value is base_fixture.mock_settings_cls.return_value + + +def test_integration_settings_roundtrip(db: DatabaseTransactionFixture) -> None: + class TestEnum(Enum): + FOO = "foo" + BAR = "bar" + + class TestSettings(BaseSettings): + test: str + number: int + enum: TestEnum + date: datetime.date + + # Create a settings object and save it to the database + settings = TestSettings( + test="test", number=123, enum=TestEnum.FOO, date=datetime.date.today() ) - assert return_value is base_fixture.mock_settings_cls.construct.return_value + integration = db.integration_configuration(protocol="test", goal=Goals.LICENSE_GOAL) + integration_settings_update(TestSettings, integration, settings) + settings_dict = integration.settings_dict.copy() + + # Expire this object in the session, so that we can be sure that the integration data + # gets round-tripped from the database, which includes a JSON serialization step. + db.session.flush() + db.session.expire(integration) + + # Load the settings from the database and check that the settings_dict is different + # due to the JSON serialization, but that once we load the settings object, it is + # equal to the original settings object. + assert integration.settings_dict != settings_dict + settings_roundtripped = integration_settings_load(TestSettings, integration) + assert settings_roundtripped == settings def test_integration_settings_update_no_merge(base_fixture: BaseFixture) -> None: diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 4ac3f35691..351ede607e 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -3,9 +3,9 @@ from core.config import CannotLoadConfiguration, Configuration from core.model import create, get_one -from core.model.collection import Collection from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.datasource import DataSource +from core.opds_import import OPDSAPI from tests.fixtures.database import DatabaseTransactionFixture @@ -516,18 +516,20 @@ def test_data_source( # data source. collection = db.collection(protocol=ExternalIntegration.OVERDRIVE) assert collection.data_source is not None - assert DataSource.OVERDRIVE == collection.data_source.name + assert collection.data_source.name == DataSource.OVERDRIVE # For OPDS Import collections, data source is a setting which # might not be present. - assert None == db.default_collection().data_source + opds_collection = db.collection(protocol=ExternalIntegration.OPDS_IMPORT) + assert opds_collection.data_source is None # data source will be automatically created if necessary. - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{Collection.DATA_SOURCE_NAME_SETTING: "New Data Source"} + settings = OPDSAPI.settings_class()( + external_account_id="http://url.com/feed", data_source="New Data Source" ) - assert "New Data Source" == db.default_collection().data_source.name + OPDSAPI.settings_update(opds_collection.integration_configuration, settings) + assert isinstance(opds_collection.data_source, DataSource) + assert opds_collection.data_source.name == "New Data Source" # type: ignore[unreachable] def test_set_key_value_pair( self, example_externalintegration_fixture: ExampleExternalIntegrationFixture diff --git a/tests/core/models/test_library.py b/tests/core/models/test_library.py index 4d7c7f71f0..49b98e6cc0 100644 --- a/tests/core/models/test_library.py +++ b/tests/core/models/test_library.py @@ -1,7 +1,6 @@ import pytest from Crypto.PublicKey.RSA import RsaKey, import_key -from core.configuration.library import LibrarySettings from core.model.configuration import ConfigurationSetting from core.model.library import Library from tests.fixtures.database import DatabaseTransactionFixture @@ -236,16 +235,6 @@ def test_settings(self, db: DatabaseTransactionFixture): with pytest.raises(ValueError): library.settings - # We don't validate settings when loaded from the database, since - # we assume they were validated when they were set. - library.settings_dict = {} - settings = library.settings - - # This would normally not be possible, because website is - # a required property. - assert isinstance(settings, LibrarySettings) - assert not hasattr(settings, "website") - # Test with a properly formatted settings dict. library2 = db.library() assert library2.settings.website == "http://library.com" diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index 8a7d1c5556..dc50b228d7 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -465,7 +465,7 @@ class Opds2ApiFixture: def __init__(self, db: DatabaseTransactionFixture, mock_http: MagicMock): self.patron = db.patron() self.collection: Collection = db.collection( - protocol=ExternalIntegration.OPDS2_IMPORT + protocol=ExternalIntegration.OPDS2_IMPORT, data_source_name="test" ) self.integration = self.collection.create_external_integration( ExternalIntegration.OPDS2_IMPORT diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index e0451bc128..171b644f06 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1368,6 +1368,9 @@ def test_update_work_for_edition_having_multiple_license_pools( def test_assert_importable_content(self, db: DatabaseTransactionFixture): session = db.session + collection = db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" + ) class Mock(OPDSImporter): """An importer that may or may not be able to find @@ -1403,7 +1406,7 @@ class NoLinks(Mock): do_get = MagicMock() # Here, there are no links at all. - importer = NoLinks(session, db.default_collection(), do_get) + importer = NoLinks(session, collection, do_get) with pytest.raises(IntegrationException) as excinfo: importer.assert_importable_content("feed", "url") assert "No open-access links were found in the OPDS feed." in str(excinfo.value) @@ -1430,7 +1433,7 @@ class BadLinks(Mock): ), ] - bad_links_importer = BadLinks(session, db.default_collection(), do_get) + bad_links_importer = BadLinks(session, collection, do_get) with pytest.raises(IntegrationException) as excinfo: bad_links_importer.assert_importable_content( "feed", "url", max_get_attempts=2 @@ -1467,7 +1470,7 @@ def _is_open_access_link(self, url, type): return False return "this is a book" - good_link_importer = GoodLink(session, db.default_collection(), do_get) + good_link_importer = GoodLink(session, collection, do_get) result = good_link_importer.assert_importable_content( "feed", "url", max_get_attempts=5 ) @@ -2303,7 +2306,9 @@ class OPDSAPIFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db self.session = db.session - self.collection = db.collection(protocol=OPDSAPI.label()) + self.collection = db.collection( + protocol=OPDSAPI.label(), data_source_name="OPDS" + ) self.api = OPDSAPI(self.session, self.collection) self.mock_patron = MagicMock() diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 26f685e2d0..e1f3393cf0 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -165,17 +165,12 @@ def __init__( def _make_default_library(self) -> Library: """Ensure that the default library exists in the given database.""" library = self.library("default", "default") - collection, ignore = get_one_or_create( - self._session, Collection, name="Default Collection" + collection = self.collection( + "Default Collection", + protocol=ExternalIntegration.OPDS_IMPORT, + data_source_name="OPDS", ) - integration = collection.create_external_integration( - ExternalIntegration.OPDS_IMPORT - ) - integration.goal = ExternalIntegration.LICENSE_GOAL - config = collection.create_integration_configuration( - ExternalIntegration.OPDS_IMPORT - ) - config.for_library(library.id, create=True) + collection.integration_configuration.for_library(library.id, create=True) if collection not in library.collections: library.collections.append(collection) return library From 4763d83386dde174a0a4ea7e9fa0cc66a394c7ae Mon Sep 17 00:00:00 2001 From: dbernstein Date: Mon, 6 Nov 2023 12:48:15 -0800 Subject: [PATCH 154/262] =?UTF-8?q?Adds=20new=20type=20of=20dialect=20for?= =?UTF-8?q?=20SIP2=20servers=20that=20require=20spaces=20instead=E2=80=A6?= =?UTF-8?q?=20(#1495)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit of zeros in the time zone segment of the date time values sent via the SIP2 protocol. Resolves: https://ebce-lyrasis.atlassian.net/browse/PP-546 --- api/sip/__init__.py | 1 + api/sip/client.py | 6 ++++-- api/sip/dialect.py | 10 +++++++--- tests/api/sip/test_client.py | 14 +++++++++++--- 4 files changed, 23 insertions(+), 8 deletions(-) diff --git a/api/sip/__init__.py b/api/sip/__init__.py index 07788a2014..e42bb96b21 100644 --- a/api/sip/__init__.py +++ b/api/sip/__init__.py @@ -120,6 +120,7 @@ class SIP2Settings(BasicAuthProviderSettings): options={ Sip2Dialect.GENERIC_ILS: "Generic ILS", Sip2Dialect.AG_VERSO: "Auto-Graphics VERSO", + Sip2Dialect.FOLIO: "Folio", }, required=True, ), diff --git a/api/sip/client.py b/api/sip/client.py index 7bfd53f9f3..8a2317db77 100644 --- a/api/sip/client.py +++ b/api/sip/client.py @@ -372,7 +372,7 @@ def patron_information(self, *args, **kwargs): def end_session(self, *args, **kwargs): """Send end session message.""" - if self.dialect_config.sendEndSession: + if self.dialect_config.send_end_session: return self.make_request( self.end_session_message, self.end_session_response_parser, @@ -842,8 +842,10 @@ def parse_patron_status(cls, status_string): def now(self): """Return the current time, formatted as SIP expects it.""" + tz_spaces = self.dialect_config.tz_spaces now = utc_now() - return datetime.datetime.strftime(now, "%Y%m%d0000%H%M%S") + zzzz = " " * 4 if tz_spaces else "0" * 4 + return datetime.datetime.strftime(now, f"%Y%m%d{zzzz}%H%M%S") def summary( self, diff --git a/api/sip/dialect.py b/api/sip/dialect.py index a962a35076..bb8b5829e7 100644 --- a/api/sip/dialect.py +++ b/api/sip/dialect.py @@ -6,19 +6,23 @@ class DialectConfig: """Describe a SIP2 dialect_config.""" - sendEndSession: bool + send_end_session: bool + tz_spaces: bool class Dialect(Enum): GENERIC_ILS = "GenericILS" AG_VERSO = "AutoGraphicsVerso" + FOLIO = "TZSpaces" @property def config(self) -> DialectConfig: """Return the configuration for this dialect.""" if self == Dialect.GENERIC_ILS: - return DialectConfig(sendEndSession=True) + return DialectConfig(send_end_session=True, tz_spaces=False) elif self == Dialect.AG_VERSO: - return DialectConfig(sendEndSession=False) + return DialectConfig(send_end_session=False, tz_spaces=False) + elif self == Dialect.FOLIO: + return DialectConfig(send_end_session=True, tz_spaces=True) else: raise NotImplementedError(f"Unknown dialect: {self}") diff --git a/tests/api/sip/test_client.py b/tests/api/sip/test_client.py index aefeb00ebe..2153165249 100644 --- a/tests/api/sip/test_client.py +++ b/tests/api/sip/test_client.py @@ -708,12 +708,13 @@ def test_parse_patron_status(self): class TestClientDialects: @pytest.mark.parametrize( - "dialect,expected_read_count,expected_write_count", + "dialect,expected_read_count,expected_write_count,expected_tz_spaces", [ # Generic ILS should send end_session message - (Dialect.GENERIC_ILS, 1, 1), + (Dialect.GENERIC_ILS, 1, 1, False), # AG VERSO ILS shouldn't end_session message - (Dialect.AG_VERSO, 0, 0), + (Dialect.AG_VERSO, 0, 0, False), + (Dialect.FOLIO, 1, 1, True), ], ) def test_dialect( @@ -722,6 +723,7 @@ def test_dialect( dialect, expected_read_count, expected_write_count, + expected_tz_spaces, ): sip = sip_client_factory(dialect=dialect) sip.queue_response("36Y201610210000142637AO3|AA25891000331441|AF|AG") @@ -729,3 +731,9 @@ def test_dialect( assert sip.dialect_config == dialect.config assert sip.read_count == expected_read_count assert sip.write_count == expected_write_count + assert sip.dialect_config.tz_spaces == expected_tz_spaces + + # verify timestamp format aligns with the expected tz spaces dialect + ts = sip.now() + tz_element = ts[8:12] + assert tz_element == (" " if expected_tz_spaces else "0000") From 972d266ef33add9e014db02bc702099871157d0a Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Mon, 6 Nov 2023 18:45:41 -0500 Subject: [PATCH 155/262] Add a second run of `cache_marc_files`. (#1503) --- docker/services/cron/cron.d/circulation | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 05e707dd3a..6e72199837 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -42,7 +42,7 @@ HOME=/var/www/circulation 0 0 * * 0 root core/bin/run -d 60 novelist_update >> /var/log/cron.log 2>&1 # Generate MARC files for libraries that have a MARC exporter configured. -0 3 * * * root core/bin/run cache_marc_files >> /var/log/cron.log 2>&1 +0 3,11 * * * root core/bin/run cache_marc_files >> /var/log/cron.log 2>&1 # The remaining scripts keep the circulation manager in sync with # specific types of collections. From 7c2d0eeba9c3e0aee1589be41233ed868b39f402 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 7 Nov 2023 09:42:36 -0400 Subject: [PATCH 156/262] Enforce SIP2 timeout across calls to read on the socket (PP-659) (#1502) * Enforce SIP2 timeout across calls to read on the socket. * Add an additional test. * Add some logging around how long messages take. --- api/sip/client.py | 18 +++++++++++++----- tests/api/sip/test_client.py | 33 ++++++++++++++++++++++++++++++--- 2 files changed, 43 insertions(+), 8 deletions(-) diff --git a/api/sip/client.py b/api/sip/client.py index 8a2317db77..d9aed2c7ad 100644 --- a/api/sip/client.py +++ b/api/sip/client.py @@ -31,6 +31,7 @@ import socket import ssl import tempfile +import time from enum import Enum from typing import Callable, Optional @@ -38,6 +39,7 @@ from api.sip.dialect import Dialect from core.util.datetime_helpers import utc_now +from core.util.log import LoggerMixin # SIP2 defines a large number of fields which are used in request and # response messages. This library focuses on defining the response @@ -222,9 +224,7 @@ class Constants: TERMINATOR_CHAR = "\r" -class SIPClient(Constants): - log = client_logger - +class SIPClient(Constants, LoggerMixin): # Maximum retries of a SIP message before failing. MAXIMUM_RETRIES = 5 # Timeout in seconds @@ -878,7 +878,7 @@ def summary( if summary.count("Y") > 1: # This violates the spec but in my tests it seemed to # work, so we'll allow it. - self.log.warn( + self.log.warning( "Summary requested too many kinds of detailed information: %s" % summary ) return summary @@ -893,16 +893,22 @@ def do_send(self, data): This method exists only to be subclassed by MockSIPClient. """ - self.connection.send(data) + start_time = time.time() + self.connection.sendall(data) + time_taken = time.time() - start_time + self.log.info("Sent %s bytes in %.2f seconds", len(data), time_taken) def read_message(self, max_size=1024 * 1024): """Read a SIP2 message from the socket connection. A SIP2 message ends with a \\r character. """ + start_time = time.time() done = False data = b"" while not done: + if time.time() - start_time > self.TIMEOUT: + raise OSError("Timeout reading from socket.") tmp = self.connection.recv(4096) data = data + tmp if not tmp: @@ -911,6 +917,8 @@ def read_message(self, max_size=1024 * 1024): done = True if len(data) > max_size: raise OSError("SIP2 response too large.") + time_taken = time.time() - start_time + self.log.info("Received %s bytes in %.2f seconds", len(data), time_taken) return data def append_checksum(self, text, include_sequence_number=True): diff --git a/tests/api/sip/test_client.py b/tests/api/sip/test_client.py index 2153165249..309c22d6d0 100644 --- a/tests/api/sip/test_client.py +++ b/tests/api/sip/test_client.py @@ -4,7 +4,7 @@ import tempfile from functools import partial from typing import Callable, List, Optional -from unittest.mock import MagicMock, Mock +from unittest.mock import MagicMock, Mock, patch import pytest from _pytest.monkeypatch import MonkeyPatch @@ -39,16 +39,20 @@ def recv(self, size): self.data = self.data[size:] return block + def sendall(self, data): + self.data += data + class MockSocketFixture: def __init__(self, monkeypatch: MonkeyPatch): self.monkeypatch = monkeypatch + self.mock = MockSocket() + # Patch the socket method so that we don't create a real network socket. + self.monkeypatch.setattr("socket.socket", lambda x, y: self.mock) @pytest.fixture(scope="function") def mock_socket(monkeypatch: MonkeyPatch) -> MockSocketFixture: - # Patch the socket method so that we don't create a real network socket. - monkeypatch.setattr("socket.socket", lambda x, y: MockSocket()) return MockSocketFixture(monkeypatch) @@ -304,6 +308,21 @@ def create_context(protocol): wrap_called = self.context_with_verification.wrap_socket.call_args assert wrap_called.kwargs["server_hostname"] == target_server + def test_send(self, mock_socket: MockSocketFixture): + target_server = object() + sip = SIPClient(target_server, 999) + sip.connect() + + mock_socket.mock.sendall = MagicMock() + + # Send a message and make sure it's queued up. + sip.send("abcd") + + # Make sure we called sendall on the socket. + mock_socket.mock.sendall.assert_called_once_with( + ("abcd" + SIPClient.TERMINATOR_CHAR).encode(SIPClient.DEFAULT_ENCODING) + ) + def test_read_message(self): target_server = object() sip = SIPClient(target_server, 999) @@ -338,6 +357,14 @@ def test_read_message(self): conn.queue_data("no newline") with pytest.raises(IOError, match="No data read from socket."): sip.read_message() + + # IOError if we exceed the timeout, even if we're in the + # middle of reading a message. + with patch("api.sip.client.time") as mock_time: + mock_time.time.side_effect = [0, 10] + with pytest.raises(IOError, match="Timeout reading from socket."): + sip.read_message() + finally: # Un-mock the socket.socket function socket.socket = old_socket From 402a2c501c8cf6bb812dae05b2f760dc93e975cb Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 8 Nov 2023 10:24:09 -0400 Subject: [PATCH 157/262] Type hint base64 & worker pools (#1501) * Type hint base64 * Type hint worker pools. * Add some more tests. * Code review feedback. --- api/bibliotheca.py | 2 +- api/odl.py | 2 +- api/opds_for_distributors.py | 2 +- api/overdrive.py | 2 +- core/opds_import.py | 2 +- core/util/base64.py | 65 ++++++++++++++++ core/util/string_helpers.py | 66 +--------------- core/util/worker_pools.py | 103 ++++++++++++++----------- pyproject.toml | 3 + tests/api/saml/test_auth.py | 2 +- tests/core/util/test_base64.py | 85 ++++++++++++++++++++ tests/core/util/test_string_helpers.py | 70 +---------------- tests/core/util/test_worker_pools.py | 10 ++- tests/fixtures/api_controller.py | 2 +- 14 files changed, 231 insertions(+), 185 deletions(-) create mode 100644 core/util/base64.py create mode 100644 tests/core/util/test_base64.py diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 984c7f39d3..fc11c706b9 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -76,9 +76,9 @@ from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import RunCollectionMonitorScript from core.service.container import Services +from core.util import base64 from core.util.datetime_helpers import datetime_utc, strptime_utc, to_utc, utc_now from core.util.http import HTTP -from core.util.string_helpers import base64 from core.util.xmlparser import XMLParser, XMLProcessor diff --git a/api/odl.py b/api/odl.py index 36dd99ed44..22107dca86 100644 --- a/api/odl.py +++ b/api/odl.py @@ -68,9 +68,9 @@ OPDSXMLParser, ) from core.service.container import Services +from core.util import base64 from core.util.datetime_helpers import to_utc, utc_now from core.util.http import HTTP, BadResponseException -from core.util.string_helpers import base64 class ODLAPIConstants: diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 0bf4848ec8..04fcaea61a 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -30,9 +30,9 @@ get_one, ) from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor +from core.util import base64 from core.util.datetime_helpers import utc_now from core.util.http import HTTP -from core.util.string_helpers import base64 if TYPE_CHECKING: from requests import Response diff --git a/api/overdrive.py b/api/overdrive.py index aa06ae8879..36959e960d 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -82,10 +82,10 @@ from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import InputScript, Script from core.service.container import Services +from core.util import base64 from core.util.datetime_helpers import strptime_utc, utc_now from core.util.http import HTTP, BadResponseException from core.util.log import LoggerMixin -from core.util.string_helpers import base64 class OverdriveConstants: diff --git a/core/opds_import.py b/core/opds_import.py index b85098398a..5274a6ef26 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -87,11 +87,11 @@ SAMLWAYFlessSetttings, ) from core.selftest import SelfTestResult +from core.util import base64 from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException from core.util.log import LoggerMixin from core.util.opds_writer import OPDSFeed, OPDSMessage -from core.util.string_helpers import base64 from core.util.xmlparser import XMLParser if TYPE_CHECKING: diff --git a/core/util/base64.py b/core/util/base64.py new file mode 100644 index 0000000000..3de2b6e6f4 --- /dev/null +++ b/core/util/base64.py @@ -0,0 +1,65 @@ +from __future__ import annotations + +import base64 as stdlib_base64 +import sys +from functools import wraps +from typing import Callable, TypeVar + +# TODO: Remove this when we drop support for Python 3.9 +if sys.version_info >= (3, 10): + from typing import Concatenate, ParamSpec +else: + from typing_extensions import Concatenate, ParamSpec + +P = ParamSpec("P") +T = TypeVar("T") + + +_ENCODING = "utf8" + + +def _ensure_bytes(s: str | bytes, encoding: str) -> bytes: + if isinstance(s, bytes): + return s + return s.encode(encoding) + + +def _ensure_string(s: str | bytes, encoding: str) -> str: + if isinstance(s, bytes): + return s.decode(encoding) + return s + + +def _wrap_func_bytes_string( + func: Callable[Concatenate[bytes, P], bytes | str], encoding: str +) -> Callable[Concatenate[str | bytes, P], str]: + """ + Wrap a function, ensuring that the first input parameter is + a bytes object, encoding it if necessary and that the returned + object is a string, decoding if necessary. + """ + + @wraps(func) + def wrapped(s: str | bytes, /, *args: P.args, **kwargs: P.kwargs) -> str: + s = _ensure_bytes(s, encoding) + value = func(s, *args, **kwargs) + return _ensure_string(value, encoding) + + return wrapped + + +b64encode = _wrap_func_bytes_string(stdlib_base64.b64encode, _ENCODING) +b64decode = _wrap_func_bytes_string(stdlib_base64.b64decode, _ENCODING) +standard_b64encode = _wrap_func_bytes_string( + stdlib_base64.standard_b64encode, _ENCODING +) +standard_b64decode = _wrap_func_bytes_string( + stdlib_base64.standard_b64decode, _ENCODING +) +urlsafe_b64encode = _wrap_func_bytes_string(stdlib_base64.urlsafe_b64encode, _ENCODING) +urlsafe_b64decode = _wrap_func_bytes_string(stdlib_base64.urlsafe_b64decode, _ENCODING) + +# encodestring and decodestring are deprecated in base64 +# and we should use these instead: +encodebytes = _wrap_func_bytes_string(stdlib_base64.encodebytes, _ENCODING) +decodebytes = _wrap_func_bytes_string(stdlib_base64.decodebytes, _ENCODING) diff --git a/core/util/string_helpers.py b/core/util/string_helpers.py index 2348ec202d..3ef012f248 100644 --- a/core/util/string_helpers.py +++ b/core/util/string_helpers.py @@ -1,75 +1,11 @@ # Helper functions and objects regarding strings -- especially stuff # that lets us negotiate the distinction between Unicode and # bytestrings. - -import base64 as stdlib_base64 import binascii import os -def wrap_func_bytes_unicode(func): - """ - Wrap a function, ensuring that the first input parameter is - a bytes object, encoding it if necessary and that the returned - object is a string, decoding if necessary. - """ - - def wrapped(self, s, *args, **kwargs): - s = self._ensure_bytes(s) - value = func(s, *args, **kwargs) - return self._ensure_unicode(value) - - return wrapped - - -class UnicodeAwareBase64: - """Simulate the interface of the base64 module, but make it look as - though base64-encoding and -decoding works on Unicode strings. - - Behind the scenes, Unicode strings are encoded to a particular - encoding, then base64-encoded or -decoded, then decoded from that - encoding. - - Since we get Unicode strings out of the database, this lets us - base64-encode and -decode strings based on those strings, without - worrying about encoding to bytes and then decoding. - """ - - def __init__(self, encoding): - self.encoding = encoding - - def _ensure_bytes(self, s): - if isinstance(s, bytes): - return s - return s.encode(self.encoding) - - def _ensure_unicode(self, s): - if isinstance(s, bytes): - return s.decode(self.encoding) - return s - - # Wrap most of the base64 module API so that Unicode is handled - # transparently. - b64encode = wrap_func_bytes_unicode(stdlib_base64.b64encode) - b64decode = wrap_func_bytes_unicode(stdlib_base64.b64decode) - standard_b64encode = wrap_func_bytes_unicode(stdlib_base64.standard_b64encode) - standard_b64decode = wrap_func_bytes_unicode(stdlib_base64.standard_b64decode) - urlsafe_b64encode = wrap_func_bytes_unicode(stdlib_base64.urlsafe_b64encode) - urlsafe_b64decode = wrap_func_bytes_unicode(stdlib_base64.urlsafe_b64decode) - - # encodestring and decodestring are deprecated in base64 - # and we should use these instead: - encodebytes = wrap_func_bytes_unicode(stdlib_base64.encodebytes) - decodebytes = wrap_func_bytes_unicode(stdlib_base64.decodebytes) - - -# If you're okay with a Unicode strings being converted to/from UTF-8 -# when you try to encode/decode them, you can use this object instead of -# the standard 'base64' module. -base64 = UnicodeAwareBase64("utf8") - - -def random_string(size): +def random_string(size: int) -> str: """Generate a random string of binary, encoded as hex digits. :param: Size of binary string in bytes. diff --git a/core/util/worker_pools.py b/core/util/worker_pools.py index 7d02b57a43..97ad1fe2b2 100644 --- a/core/util/worker_pools.py +++ b/core/util/worker_pools.py @@ -1,6 +1,19 @@ -import logging +from __future__ import annotations + +import sys from queue import Queue from threading import Thread +from types import TracebackType +from typing import Any, Callable, Literal, Optional, Type + +from sqlalchemy.orm import Session + +from core.util.log import LoggerMixin + +if sys.version_info >= (3, 11): + from typing import Self +else: + from typing_extensions import Self # Much of the work in this file is based on # https://github.com/shazow/workerpool, with @@ -10,24 +23,19 @@ # (or instead of) multithreading. -class Worker(Thread): +class Worker(Thread, LoggerMixin): """A Thread that performs jobs""" @classmethod - def factory(cls, worker_pool): + def factory(cls, worker_pool: Pool) -> Self: return cls(worker_pool) - def __init__(self, jobs): + def __init__(self, jobs: Pool): super().__init__() self.daemon = True self.jobs = jobs - self._log = logging.getLogger(self.name) - - @property - def log(self): - return self._log - def run(self): + def run(self) -> None: while True: try: self.do_job() @@ -37,7 +45,7 @@ def run(self): finally: self.jobs.task_done() - def do_job(self, *args, **kwargs): + def do_job(self, *args: Any, **kwargs: Any) -> None: job = self.jobs.get() if callable(job): job(*args, **kwargs) @@ -52,24 +60,24 @@ class DatabaseWorker(Worker): """A worker Thread that performs jobs with a database session""" @classmethod - def factory(cls, worker_pool, _db): + def factory(cls, worker_pool: Pool, _db: Session) -> Self: # type: ignore[override] return cls(worker_pool, _db) - def __init__(self, jobs, _db): + def __init__(self, jobs: Pool, _db: Session): super().__init__(jobs) self._db = _db - def do_job(self): + def do_job(self) -> None: super().do_job(self._db) -class Pool: +class Pool(LoggerMixin): """A pool of Worker threads and a job queue to keep them busy.""" - log = logging.getLogger(__name__) - - def __init__(self, size, worker_factory=None): - self.jobs = Queue() + def __init__( + self, size: int, worker_factory: Callable[..., Worker] | None = None + ) -> None: + self.jobs: Queue[Job] = Queue() self.size = size self.workers = list() @@ -85,18 +93,18 @@ def __init__(self, size, worker_factory=None): w.start() @property - def success_rate(self): + def success_rate(self) -> float: if self.job_total <= 0 or self.error_count <= 0: return float(1) return self.error_count / float(self.job_total) - def create_worker(self): + def create_worker(self) -> Worker: return self.worker_factory(self) - def inc_error(self): + def inc_error(self) -> None: self.error_count += 1 - def restart(self): + def restart(self) -> Self: for w in self.workers: if not w.is_alive(): w.start() @@ -104,24 +112,29 @@ def restart(self): __enter__ = restart - def __exit__(self, type, value, traceback): + def __exit__( + self, + type: Optional[Type[BaseException]], + value: Optional[BaseException], + traceback: Optional[TracebackType], + ) -> Literal[False]: self.join() - if type: - self.log.error("Error with %r: %r", self, value, exc_info=traceback) + if value is not None: + self.log.error("Error with %r: %r", self, value, exc_info=value) raise value - return + return False - def get(self): + def get(self) -> Job: return self.jobs.get() - def put(self, job): + def put(self, job: Job) -> None: self.job_total += 1 return self.jobs.put(job) - def task_done(self): + def task_done(self) -> None: return self.jobs.task_done() - def join(self): + def join(self) -> None: self.jobs.join() self.log.info( "%d/%d job errors occurred. %.2f%% success rate.", @@ -134,13 +147,20 @@ def join(self): class DatabasePool(Pool): """A pool of DatabaseWorker threads and a job queue to keep them busy.""" - def __init__(self, size, session_factory, worker_factory=None): + def __init__( + self, + size: int, + session_factory: Callable[[], Session], + worker_factory: Callable[..., DatabaseWorker] | None = None, + ): self.session_factory = session_factory - self.worker_factory = worker_factory or DatabaseWorker.factory + self.worker_factory: Callable[..., DatabaseWorker] = ( + worker_factory or DatabaseWorker.factory + ) super().__init__(size, worker_factory=self.worker_factory) - def create_worker(self): + def create_worker(self) -> DatabaseWorker: worker_session = self.session_factory() return self.worker_factory(self, worker_session) @@ -150,17 +170,17 @@ class Job: For use with Worker. """ - def rollback(self, *args, **kwargs): + def rollback(self, *args: Any, **kwargs: Any) -> None: """Cleans up the task if it errors""" - def finalize(self, *args, **kwargs): + def finalize(self, *args: Any, **kwargs: Any) -> None: """Finalizes the task if it is successful""" - def do_run(self): + def do_run(self, *args: Any, **kwargs: Any) -> None: """Does the work""" raise NotImplementedError() - def run(self, *args, **kwargs): + def run(self, *args: Any, **kwargs: Any) -> None: try: self.do_run(*args, **kwargs) except Exception: @@ -171,11 +191,8 @@ def run(self, *args, **kwargs): class DatabaseJob(Job): - def rollback(self, _db): + def rollback(self, _db: Session) -> None: _db.rollback() - def finalize(self, _db): + def finalize(self, _db: Session) -> None: _db.commit() - - def do_run(self): - raise NotImplementedError() diff --git a/pyproject.toml b/pyproject.toml index 02490c5d11..ffc7598b6e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -97,9 +97,12 @@ module = [ "core.service.*", "core.settings.*", "core.util.authentication_for_opds", + "core.util.base64", "core.util.cache", "core.util.notifications", "core.util.problem_detail", + "core.util.string_helpers", + "core.util.worker_pools", "core.util.xmlparser", "tests.fixtures.authenticator", "tests.migration.*", diff --git a/tests/api/saml/test_auth.py b/tests/api/saml/test_auth.py index 928a565743..25a71779ef 100644 --- a/tests/api/saml/test_auth.py +++ b/tests/api/saml/test_auth.py @@ -34,8 +34,8 @@ from api.saml.metadata.parser import SAMLSubjectParser from core.python_expression_dsl.evaluator import DSLEvaluationVisitor, DSLEvaluator from core.python_expression_dsl.parser import DSLParser +from core.util import base64 from core.util.datetime_helpers import datetime_utc -from core.util.string_helpers import base64 from tests.api.saml import saml_strings from tests.fixtures.api_controller import ControllerFixture diff --git a/tests/core/util/test_base64.py b/tests/core/util/test_base64.py new file mode 100644 index 0000000000..d976d8e390 --- /dev/null +++ b/tests/core/util/test_base64.py @@ -0,0 +1,85 @@ +import base64 as stdlib_base64 + +import pytest + +from core.util import base64 + + +@pytest.mark.parametrize( + "encode, decode", + [ + ("b64encode", "b64decode"), + ("standard_b64encode", "standard_b64decode"), + ("urlsafe_b64encode", "urlsafe_b64decode"), + ("encodebytes", "decodebytes"), + ], +) +def test_encoding(encode: str, decode: str) -> None: + string = "םולש" + + encoded_bytes = string.encode("utf8") + encode_method = getattr(base64, encode) + decode_method = getattr(base64, decode) + + # Test a round-trip. Base64-encoding a string and + # then decoding it should give the original string. + encoded = encode_method(string) + decoded = decode_method(encoded) + assert string == decoded + + # Test encoding on its own. Encoding with our wrapped base64 functions and then + # converting to an utf encoded byte string should give the same result as running + # the binary representation of the string through the default bas64 module. + base_encode = getattr(stdlib_base64, encode) + base_encoded = base_encode(encoded_bytes) + assert base_encoded == encoded.encode("utf8") + + # If you pass a bytes object to a wrapped base64 method, it's no problem. + # You still get a string back. + assert encoded == encode_method(encoded_bytes) + assert decoded == decode_method(base_encoded) + + +@pytest.mark.parametrize( + "func", + [ + "b64encode", + "b64decode", + "standard_b64encode", + "standard_b64decode", + "urlsafe_b64encode", + "urlsafe_b64decode", + "encodebytes", + "decodebytes", + ], +) +def test_base64_wraps_stdlib(func): + original_func = getattr(stdlib_base64, func) + wrapped_func = getattr(base64, func) + assert original_func is not wrapped_func + assert original_func is wrapped_func.__wrapped__ + + +def test__wrap_func_bytes_string() -> None: + # Test that the input is always encoded to bytes and the output is always decoded to a string. + func_called_with = None + + def func1(s: bytes) -> bytes: + nonlocal func_called_with + func_called_with = s + return s + + wrapped = base64._wrap_func_bytes_string(func1, "utf8") + assert wrapped("abc") == "abc" + assert func_called_with == b"abc" + assert wrapped(b"abc") == "abc" + assert func_called_with == b"abc" + + # Test that we can wrap a function that returns a string. + def func2(s: bytes) -> str: + nonlocal func_called_with + func_called_with = s + return s.decode("utf8") + + wrapped = base64._wrap_func_bytes_string(func2, "utf8") + assert wrapped("abc") == "abc" diff --git a/tests/core/util/test_string_helpers.py b/tests/core/util/test_string_helpers.py index 2b4f652c76..584d66a729 100644 --- a/tests/core/util/test_string_helpers.py +++ b/tests/core/util/test_string_helpers.py @@ -1,76 +1,8 @@ # Test the helper objects in util.string. -import base64 as stdlib_base64 import re -import pytest - -from core.util.string_helpers import UnicodeAwareBase64, base64, random_string - - -class TestUnicodeAwareBase64: - def test_encoding(self): - string = "םולש" - - # Run the same tests against two different encodings that can - # handle Hebrew characters. - self._test_encoder(string, UnicodeAwareBase64("utf8")) - self._test_encoder(string, UnicodeAwareBase64("iso-8859-8")) - - # If UnicodeAwareBase64 is given a string it can't encode in - # its chosen encoding, an exception is the result. - shift_jis = UnicodeAwareBase64("shift-jis") - pytest.raises(UnicodeEncodeError, shift_jis.b64encode, string) - - def _test_encoder(self, string, base64): - # Create a binary version of the string in the encoder's - # encoding, for use in comparisons. - binary = string.encode(base64.encoding) - - # Test all supported methods of the base64 API. - for encode, decode in [ - ("b64encode", "b64decode"), - ("standard_b64encode", "standard_b64decode"), - ("urlsafe_b64encode", "urlsafe_b64decode"), - ("encodebytes", "decodebytes"), - ]: - encode_method = getattr(base64, encode) - decode_method = getattr(base64, decode) - - # Test a round-trip. Base64-encoding a Unicode string and - # then decoding it should give the original string. - encoded = encode_method(string) - decoded = decode_method(encoded) - assert string == decoded - - # Test encoding on its own. Encoding with a - # UnicodeAwareBase64 and then converting to ASCII should - # give the same result as running the binary - # representation of the string through the default bas64 - # module. - base_encode = getattr(stdlib_base64, encode) - base_encoded = base_encode(binary) - assert base_encoded == encoded.encode("ascii") - - # If you pass in a bytes object to a UnicodeAwareBase64 - # method, it's no problem. You get a Unicode string back. - assert encoded == encode_method(binary) - assert decoded == decode_method(base_encoded) - - def test_default_is_base64(self): - # If you import "base64" from util.string, you get a - # UnicodeAwareBase64 object that encodes as UTF-8 by default. - assert isinstance(base64, UnicodeAwareBase64) - assert "utf8" == base64.encoding - snowman = "☃" - snowman_utf8 = snowman.encode("utf8") - as_base64 = base64.b64encode(snowman) - assert "4piD" == as_base64 - - # This is a Unicode representation of the string you'd get if - # you encoded the snowman as UTF-8, then used the standard - # library to base64-encode the bytestring. - assert b"4piD" == stdlib_base64.b64encode(snowman_utf8) +from core.util.string_helpers import random_string class TestRandomString: diff --git a/tests/core/util/test_worker_pools.py b/tests/core/util/test_worker_pools.py index 5c7740c81e..d1f85ad02e 100644 --- a/tests/core/util/test_worker_pools.py +++ b/tests/core/util/test_worker_pools.py @@ -1,7 +1,14 @@ import threading +from queue import Queue from core.model import Identifier, SessionManager -from core.util.worker_pools import DatabaseJob, DatabasePool, Pool, Queue, Worker +from core.util.worker_pools import ( + DatabaseJob, + DatabasePool, + DatabaseWorker, + Pool, + Worker, +) from tests.fixtures.database import DatabaseTransactionFixture @@ -73,6 +80,7 @@ def test_workers_are_created_with_sessions(self, db: DatabaseTransactionFixture) pool = DatabasePool(2, session_factory) try: for worker in pool.workers: + assert isinstance(worker, DatabaseWorker) assert worker._db assert bind == worker._db.connection() finally: diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index 43c5779fe2..cffdf7175a 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -35,7 +35,7 @@ IntegrationConfiguration, IntegrationLibraryConfiguration, ) -from core.util.string_helpers import base64 +from core.util import base64 from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.database import DatabaseTransactionFixture From 4c3af5076d853fe208abc04dcd87887086041940 Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 9 Nov 2023 10:56:04 +0530 Subject: [PATCH 158/262] Install the tzdata package so the TZ env var is respected (#1504) --- docker/Dockerfile.baseimage | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/Dockerfile.baseimage b/docker/Dockerfile.baseimage index 2e8423db01..ed4163a62e 100644 --- a/docker/Dockerfile.baseimage +++ b/docker/Dockerfile.baseimage @@ -36,6 +36,7 @@ RUN install_clean \ # needed for xmlsec libxmlsec1-dev \ libxmlsec1-openssl \ + tzdata \ pkg-config && \ curl -sSL https://install.python-poetry.org | POETRY_HOME="/opt/poetry" python3 - --yes --version $POETRY_VERSION && \ ln -s /opt/poetry/bin/poetry /usr/local/bin/poetry && \ From 34641cc905f0b49d2e9f54dc272894ace40a34a0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 9 Nov 2023 13:06:29 +0000 Subject: [PATCH 159/262] Bump pyinstrument from 4.6.0 to 4.6.1 (#1506) --- poetry.lock | 122 ++++++++++++++++++++++++++-------------------------- 1 file changed, 61 insertions(+), 61 deletions(-) diff --git a/poetry.lock b/poetry.lock index e56b424f42..d988046fd2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3007,71 +3007,71 @@ files = [ [[package]] name = "pyinstrument" -version = "4.6.0" +version = "4.6.1" description = "Call stack profiler for Python. Shows you why your code is slow!" optional = false python-versions = ">=3.7" files = [ - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:679b5397e3e6c0d6f56df50ba8c683543df4f1f7c1df2e2eb728e275bde2c85b"}, - {file = "pyinstrument-4.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:18479ffa0c922695ba2befab29521b62bfe75debef48d818cea46262cee48a1e"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:daba103955d0d0b37b8bc20a4e8cc6477e839ce5984478fcf3f7cee8318e9636"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d93451e9c7650629b0bc12caa7390f81d1a15835c07f7dc170e953d4684ed1e7"}, - {file = "pyinstrument-4.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01009a7b58a6f11bf5560c23848ea2881acac974b0841fe5d365ef154baabd6f"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:288ea44da6333dacc77b4ba2149dba3dc1e9fbbebd3d5dc51a66c20839d80ef3"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecc106213146dd90659a1483047b3a1c2e174fb190c0e109234e524a4651e377"}, - {file = "pyinstrument-4.6.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:5cd8ab30c8dcd1511e9b3b98f601f17f2c5c9df1d28f8298d215c63d68919bdc"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win32.whl", hash = "sha256:40e3656e6ace5a140880bd980a25f6a356c094c36e28ed1bf935d7349a78b1b6"}, - {file = "pyinstrument-4.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:d9623fc3fde47ae90ad5014737e37034b4abc3fbfb455b7b56cc095f9037d5af"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:beaaa3b647b3a4cbd34b71eacaa31e3eb90e1bf53e15ada3ac7e9df09d737239"}, - {file = "pyinstrument-4.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0c69ab570609ac93b5f4ab2e5ccbf8add4f69a962b06307eea66ba65b5ad9d38"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5992748a74ec7ff445e4b56b5e316673c34b6cdbd3755111f7c023d8a141f001"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bb1ba76c4e912cae159ab9729c7b31bb6d7fe8ed1f0fafce74484a4bb159c240"}, - {file = "pyinstrument-4.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:674868ebc3663b01d7d059a6f5cdeff6f18b49e217617720a5d645a6b55ead03"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:514a0ced357ff400988f599b0294d05e3b68468f9ab876f204bf12765f7fdb1b"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:8ccd1f5b4ad35c734dcf2d08d80b5b37205b4e84aa71fe76f95e43bd30c5eef9"}, - {file = "pyinstrument-4.6.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:611c6cd33f42f19e46d99eeef3b84a47d33fe34cdb0ce6e3635d2ee5038706a3"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win32.whl", hash = "sha256:d20b5cf79bca1b3d425a7362457621741393b1d5ce2d920583541b947bc8a368"}, - {file = "pyinstrument-4.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:ecd8cf03b04dc1b7f151896228993c6aa0fa897cdd517ea127465bc1c826c5b5"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:3d4bed520c0f689a75bca4951f6b7fbad96851e8461086c98e03eb726f8a412a"}, - {file = "pyinstrument-4.6.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b74745f1d22133da8d4a38dd0c78c02c00154a5b7683bdd5df56a7c7705a979b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b6ab698400e8401597e39c4816efa247f2b98c9b4e59e3ec25d534ae6887bd93"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:de1a36a083b324dafe5e2880e5e04267a1983beb027f12c3dc361ddbe3acf9af"}, - {file = "pyinstrument-4.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8adc4f87d4289c1f04f19451b5133b8e307bd9b08c364c48e007ba663fefbf1b"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:08fbc75d3615be6259b7af0c173c7bc48acb6e7bd758678d54eb411ba2903052"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:d86fea6ce117bcff642e24208eb573c00d78b4c2934eb9bd5f915751980cc9bd"}, - {file = "pyinstrument-4.6.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23a3b21373e0c8bf0d00dda79989fcab0bb1d30094f7b210d40d2226fe20e141"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win32.whl", hash = "sha256:a498c82d93621c5cf736e4660142ac0c3bbcb7b059bcbd4278a6364037128656"}, - {file = "pyinstrument-4.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:9116154446b9999f6524e9db29310aee6476a5a471c276928f2b46b6655a2dcc"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:704c6d38abef8fca2e1085756c9574ea180f7ac866aab6943b483152c2828c2a"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbebdc11d4fc6f3123c046d84db88c7f605d53247e3f357314d0c5775d1beaf4"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c7a7bae4cce5f8d084153857cedbce29ca8274c9924884d0461a5db48619c5d"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03289b10715e261a5c33b267d0a430d1b408f929922fde0a9fd311835c60351b"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7f83544ff9abfacdf64b39498ca3dcd454956e44aedb5f67626b7212291c9160"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:40640f02fe7865540e8a1e51bf7f9d2403e3364c3b7edfdb9dae5eb5596811da"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f3719464888d7303e1081996bc56ab75ef5cdf7ef69ccbb7b29f48eb37d8f8b9"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win32.whl", hash = "sha256:46e16de6bd3b74ef01b6457d862fee751515315edb5e9283205e45299a29ac49"}, - {file = "pyinstrument-4.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:9ded87ae11cb0a95a767c817908833ec0821fe0e81650968b201a031edf4bc15"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8bf16e459a868d9dbaacff4f0a0acd6ad78ce36f2aceabf21e9fd0c3b6aca0d4"}, - {file = "pyinstrument-4.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb83e445795431c3d867b298c0583ee27717bbc50e5120a4c98575c979ab3ab8"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29072b1be183e173d7b0f12caf29f8717d273afbf34df950f5fa0d98127cd3fb"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09502af2a383c59e5a0d3bebfab7e5845f79122348358e9e52b2b0187db84a44"}, - {file = "pyinstrument-4.6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a23c982eb9c4d2f8fe553dacb9bdc0991170a0998b94c84f75c2a052e8af4c74"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f7a38ef482f2151393e729c5582191e4ab05f0ed1fa56b16c2377ff3129107af"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e983e16c2fdfb752387133380859c3414e119e41c14f39f5f869f29dcf6e995c"}, - {file = "pyinstrument-4.6.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d00c87e5cea48a562d67f0436999463b7989cff2e4c196b0e8ba06d515f191a9"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win32.whl", hash = "sha256:a24c95cabf2ca5d79b62dbc8ff17749768b8aafd777841352f59f4ffd6688782"}, - {file = "pyinstrument-4.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:f3d88b66dbbcdc6e4c57bd8574ad9d096cd23285eee0f4a5cf74f0e0df6aa190"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2bcfec45cdbb9edf6d5853debac4a792de589e621be07a71dc76acb36e144a3a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e790515a22844bbccaa388c7715b037c45a8d0155c4a6f2990659998a8920501"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93a30e0d93633a28d4adcf7d7e2d158d6331809b95c2c4a155da17ea1e43eaa3"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa554eb8ef1c54849dbf480965b073f39b39b517e466ce241808a00398f9742a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e289898c644cbbb61d931bbcb6505e2a279ad1122612c9098bfb0958ebf5764"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:20ce0f1612a019888a6b94fa7f1e7862842f0b5219282e3354d5b35aceb363f6"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4935f3cdb9062fceac65c50de76f07e05cf630bd3a9c663fedc9e88b5efe7d7c"}, - {file = "pyinstrument-4.6.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dc9c4577ef4b06ae1592c920d0a4f0f0db587a16f530c629ad93e125bc79ebb7"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win32.whl", hash = "sha256:3ec6b04d8cfb34aec48de7fa77aeb919e8e7e19909740ab7a5553339f6f4c53a"}, - {file = "pyinstrument-4.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a6d2e5c15f989629fac41536ec2ca1fe81359fadf4dadf2ff24fe96b389f6df"}, - {file = "pyinstrument-4.6.0.tar.gz", hash = "sha256:3e509e879c853dbc5fdc1757f0cfdbf8bee899c80f53d504a7df28898f0fa8ed"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73476e4bc6e467ac1b2c3c0dd1f0b71c9061d4de14626676adfdfbb14aa342b4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4d1da8efd974cf9df52ee03edaee2d3875105ddd00de35aa542760f7c612bdf7"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:507be1ee2f2b0c9fba74d622a272640dd6d1b0c9ec3388b2cdeb97ad1e77125f"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:95cee6de08eb45754ef4f602ce52b640d1c535d934a6a8733a974daa095def37"}, + {file = "pyinstrument-4.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7873e8cec92321251fdf894a72b3c78f4c5c20afdd1fef0baf9042ec843bb04"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a242f6cac40bc83e1f3002b6b53681846dfba007f366971db0bf21e02dbb1903"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:97c9660cdb4bd2a43cf4f3ab52cffd22f3ac9a748d913b750178fb34e5e39e64"}, + {file = "pyinstrument-4.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e304cd0723e2b18ada5e63c187abf6d777949454c734f5974d64a0865859f0f4"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win32.whl", hash = "sha256:cee21a2d78187dd8a80f72f5d0f1ddb767b2d9800f8bb4d94b6d11f217c22cdb"}, + {file = "pyinstrument-4.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:2000712f71d693fed2f8a1c1638d37b7919124f367b37976d07128d49f1445eb"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a366c6f3dfb11f1739bdc1dee75a01c1563ad0bf4047071e5e77598087df457f"}, + {file = "pyinstrument-4.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c6be327be65d934796558aa9cb0f75ce62ebd207d49ad1854610c97b0579ad47"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e160d9c5d20d3e4ef82269e4e8b246ff09bdf37af5fb8cb8ccca97936d95ad6"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ffbf56605ef21c2fcb60de2fa74ff81f417d8be0c5002a407e414d6ef6dee43"}, + {file = "pyinstrument-4.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c92cc4924596d6e8f30a16182bbe90893b1572d847ae12652f72b34a9a17c24a"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:f4b48a94d938cae981f6948d9ec603bab2087b178d2095d042d5a48aabaecaab"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:e7a386392275bdef4a1849712dc5b74f0023483fca14ef93d0ca27d453548982"}, + {file = "pyinstrument-4.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:871b131b83e9b1122f2325061c68ed1e861eebcb568c934d2fb193652f077f77"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win32.whl", hash = "sha256:8d8515156dd91f5652d13b5fcc87e634f8fe1c07b68d1d0840348cdd50bf5ace"}, + {file = "pyinstrument-4.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb868fbe089036e9f32525a249f4c78b8dc46967612393f204b8234f439c9cc4"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:a18cd234cce4f230f1733807f17a134e64a1f1acabf74a14d27f583cf2b183df"}, + {file = "pyinstrument-4.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:574cfca69150be4ce4461fb224712fbc0722a49b0dc02fa204d02807adf6b5a0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e02cf505e932eb8ccf561b7527550a67ec14fcae1fe0e25319b09c9c166e914"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832fb2acef9d53701c1ab546564c45fb70a8770c816374f8dd11420d399103c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13cb57e9607545623ebe462345b3d0c4caee0125d2d02267043ece8aca8f4ea0"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9be89e7419bcfe8dd6abb0d959d6d9c439c613a4a873514c43d16b48dae697c9"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:476785cfbc44e8e1b1ad447398aa3deae81a8df4d37eb2d8bbb0c404eff979cd"}, + {file = "pyinstrument-4.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e9cebd90128a3d2fee36d3ccb665c1b9dce75261061b2046203e45c4a8012d54"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win32.whl", hash = "sha256:1d0b76683df2ad5c40eff73607dc5c13828c92fbca36aff1ddf869a3c5a55fa6"}, + {file = "pyinstrument-4.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:c4b7af1d9d6a523cfbfedebcb69202242d5bd0cb89c4e094cc73d5d6e38279bd"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79ae152f8c6a680a188fb3be5e0f360ac05db5bbf410169a6c40851dfaebcce9"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07cad2745964c174c65aa75f1bf68a4394d1b4d28f33894837cfd315d1e836f0"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cb81f66f7f94045d723069cf317453d42375de9ff3c69089cf6466b078ac1db4"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ab30ae75969da99e9a529e21ff497c18fdf958e822753db4ae7ed1e67094040"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f36cb5b644762fb3c86289324bbef17e95f91cd710603ac19444a47f638e8e96"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8b45075d9dbbc977dbc7007fb22bb0054c6990fbe91bf48dd80c0b96c6307ba7"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:475ac31477f6302e092463896d6a2055f3e6abcd293bad16ff94fc9185308a88"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win32.whl", hash = "sha256:29172ab3d8609fdf821c3f2562dc61e14f1a8ff5306607c32ca743582d3a760e"}, + {file = "pyinstrument-4.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:bd176f297c99035127b264369d2bb97a65255f65f8d4e843836baf55ebb3cee4"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:23e9b4526978432e9999021da9a545992cf2ac3df5ee82db7beb6908fc4c978c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2dbcaccc9f456ef95557ec501caeb292119c24446d768cb4fb43578b0f3d572c"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2097f63c66c2bc9678c826b9ff0c25acde3ed455590d9dcac21220673fe74fbf"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:205ac2e76bd65d61b9611a9ce03d5f6393e34ec5b41dd38808f25d54e6b3e067"}, + {file = "pyinstrument-4.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f414ddf1161976a40fc0a333000e6a4ad612719eac0b8c9bb73f47153187148"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:65e62ebfa2cd8fb57eda90006f4505ac4c70da00fc2f05b6d8337d776ea76d41"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d96309df4df10be7b4885797c5f69bb3a89414680ebaec0722d8156fde5268c3"}, + {file = "pyinstrument-4.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f3d1ad3bc8ebb4db925afa706aa865c4bfb40d52509f143491ac0df2440ee5d2"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win32.whl", hash = "sha256:dc37cb988c8854eb42bda2e438aaf553536566657d157c4473cc8aad5692a779"}, + {file = "pyinstrument-4.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:2cd4ce750c34a0318fc2d6c727cc255e9658d12a5cf3f2d0473f1c27157bdaeb"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6ca95b21f022e995e062b371d1f42d901452bcbedd2c02f036de677119503355"}, + {file = "pyinstrument-4.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ac1e1d7e1f1b64054c4eb04eb4869a7a5eef2261440e73943cc1b1bc3c828c18"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0711845e953fce6ab781221aacffa2a66dbc3289f8343e5babd7b2ea34da6c90"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b7d28582017de35cb64eb4e4fa603e753095108ca03745f5d17295970ee631f"}, + {file = "pyinstrument-4.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7be57db08bd366a37db3aa3a6187941ee21196e8b14975db337ddc7d1490649d"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9a0ac0f56860398d2628ce389826ce83fb3a557d0c9a2351e8a2eac6eb869983"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a9045186ff13bc826fef16be53736a85029aae3c6adfe52e666cad00d7ca623b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6c4c56b6eab9004e92ad8a48bb54913fdd71fc8a748ae42a27b9e26041646f8b"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win32.whl", hash = "sha256:37e989c44b51839d0c97466fa2b623638b9470d56d79e329f359f0e8fa6d83db"}, + {file = "pyinstrument-4.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:5494c5a84fee4309d7d973366ca6b8b9f8ba1d6b254e93b7c506264ef74f2cef"}, + {file = "pyinstrument-4.6.1.tar.gz", hash = "sha256:f4731b27121350f5a983d358d2272fe3df2f538aed058f57217eef7801a89288"}, ] [package.extras] From 8b5671361e2779f47f2fde87b72a9cc578247678 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 9 Nov 2023 10:10:38 -0400 Subject: [PATCH 160/262] Bugfix make sure patron profile doesn't authenticate twice (PP-701) (#1505) --- api/controller.py | 9 +++------ tests/api/test_controller_profile.py | 22 +++++++++++++--------- 2 files changed, 16 insertions(+), 15 deletions(-) diff --git a/api/controller.py b/api/controller.py index fa5b5ea7e3..4c854fa913 100644 --- a/api/controller.py +++ b/api/controller.py @@ -2203,18 +2203,15 @@ def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFe class ProfileController(CirculationManagerController): """Implement the User Profile Management Protocol.""" - @property - def _controller(self): + def _controller(self, patron): """Instantiate a CoreProfileController that actually does the work.""" - # TODO: Probably better to use request_patron and check for - # None here. - patron = self.authenticated_patron_from_request() storage = CirculationPatronProfileStorage(patron, flask.url_for) return CoreProfileController(storage) def protocol(self): """Handle a UPMP request.""" - controller = self._controller + patron = flask.request.patron + controller = self._controller(patron) if flask.request.method == "GET": result = controller.get() else: diff --git a/tests/api/test_controller_profile.py b/tests/api/test_controller_profile.py index 348dadffbf..fd9119bea7 100644 --- a/tests/api/test_controller_profile.py +++ b/tests/api/test_controller_profile.py @@ -44,7 +44,9 @@ def test_controller_uses_circulation_patron_profile_storage( "/", method="GET", headers=profile_fixture.auth ): assert isinstance( - profile_fixture.manager.profiles._controller.storage, + profile_fixture.manager.profiles._controller( + profile_fixture.other_patron + ).storage, CirculationPatronProfileStorage, ) @@ -59,7 +61,7 @@ def test_get(self, profile_fixture: ProfileFixture): assert "200 OK" == response.status data = json.loads(response.get_data(as_text=True)) settings = data["settings"] - assert True == settings[ProfileStorage.SYNCHRONIZE_ANNOTATIONS] + assert settings[ProfileStorage.SYNCHRONIZE_ANNOTATIONS] is True def test_put(self, profile_fixture: ProfileFixture): """Verify that a patron can modify their own profile.""" @@ -78,7 +80,7 @@ def test_put(self, profile_fixture: ProfileFixture): request_patron = ( profile_fixture.controller.authenticated_patron_from_request() ) - assert None == request_patron.synchronize_annotations + assert request_patron.synchronize_annotations is None # This means we can't create annotations for them. pytest.raises( @@ -90,17 +92,17 @@ def test_put(self, profile_fixture: ProfileFixture): ) # But by sending a PUT request... - response = profile_fixture.manager.profiles.protocol() + profile_fixture.manager.profiles.protocol() # ...we can change synchronize_annotations to True. - assert True == request_patron.synchronize_annotations + assert request_patron.synchronize_annotations is True # The other patron is unaffected. - assert False == profile_fixture.other_patron.synchronize_annotations + assert profile_fixture.other_patron.synchronize_annotations is False # type: ignore[unreachable] # Now we can create an annotation for the patron who enabled # annotation sync. - annotation = Annotation.get_one_or_create( + Annotation.get_one_or_create( # type: ignore[unreachable] profile_fixture.db.session, patron=request_patron, identifier=identifier ) assert 1 == len(request_patron.annotations) @@ -115,11 +117,12 @@ def test_put(self, profile_fixture: ProfileFixture): content_type=ProfileController.MEDIA_TYPE, data=json.dumps(payload), ): - response = profile_fixture.manager.profiles.protocol() + profile_fixture.controller.authenticated_patron_from_request() + profile_fixture.manager.profiles.protocol() # ...the annotation goes away. profile_fixture.db.session.commit() - assert False == request_patron.synchronize_annotations + assert request_patron.synchronize_annotations is False assert 0 == len(request_patron.annotations) def test_problemdetail_on_error(self, profile_fixture: ProfileFixture): @@ -132,6 +135,7 @@ def test_problemdetail_on_error(self, profile_fixture: ProfileFixture): headers=profile_fixture.auth, content_type="text/plain", ): + profile_fixture.controller.authenticated_patron_from_request() response = profile_fixture.manager.profiles.protocol() assert isinstance(response, ProblemDetail) assert 415 == response.status_code From 223aca85415d76bd95fec6694234359da8e534bb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 10 Nov 2023 00:57:32 +0000 Subject: [PATCH 161/262] Bump types-pillow from 10.1.0.0 to 10.1.0.1 (#1507) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d988046fd2..e13a6dfcd4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4140,13 +4140,13 @@ referencing = "*" [[package]] name = "types-pillow" -version = "10.1.0.0" +version = "10.1.0.1" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.7" files = [ - {file = "types-Pillow-10.1.0.0.tar.gz", hash = "sha256:0f5e7cf010ed226800cb5821e87781e5d0e81257d948a9459baa74a8c8b7d822"}, - {file = "types_Pillow-10.1.0.0-py3-none-any.whl", hash = "sha256:f97f596b6a39ddfd26da3eb67421062193e10732d2310f33898d36f9694331b5"}, + {file = "types-Pillow-10.1.0.1.tar.gz", hash = "sha256:2ab92b1fea760315a3608394f26de8c63b6335a67cfc6ffefb3fe492b6ae58e5"}, + {file = "types_Pillow-10.1.0.1-py3-none-any.whl", hash = "sha256:5e81c55f9e66c19b32fbae7a8b562f1fdbf76814a952e3154f8d6c183516bbdb"}, ] [[package]] From 40cd0253d064e2f5bf2247659ec864a2e680cdde Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Fri, 10 Nov 2023 16:03:35 +0530 Subject: [PATCH 162/262] Changed the notification scripts frequency to run every hour (#1510) --- docker/services/cron/cron.d/circulation | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 6e72199837..7a8a40f6e0 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -106,8 +106,8 @@ HOME=/var/www/circulation # Notifications # -10 3 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 -15 */2 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 +10 * * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 +15 * * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 0 1 * * * root core/bin/run patron_activity_sync_notifications >> /var/log/cron.log 2>&1 # Audiobook playtimes From 9b97fbdab6da7ec322f3549f53a062fc6421ebe0 Mon Sep 17 00:00:00 2001 From: dbernstein Date: Wed, 15 Nov 2023 09:53:25 -0800 Subject: [PATCH 163/262] Update Admin UI to 1.11.0 (#1514) --- api/admin/config.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/admin/config.py b/api/admin/config.py index e9cce39b10..957b0c3394 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -17,7 +17,7 @@ class OperationalMode(str, Enum): class Configuration(LoggerMixin): APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" - PACKAGE_VERSION = "1.10.0" + PACKAGE_VERSION = "1.11.0" STATIC_ASSETS = { "admin_js": "circulation-admin.js", From d663582a8fb513346ccc9c3181d5929725158397 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 10:03:24 -0800 Subject: [PATCH 164/262] Bump boto3 from 1.28.52 to 1.29.0 (#1513) Bumps [boto3](https://github.com/boto/boto3) from 1.28.52 to 1.29.0. - [Release notes](https://github.com/boto/boto3/releases) - [Changelog](https://github.com/boto/boto3/blob/develop/CHANGELOG.rst) - [Commits](https://github.com/boto/boto3/compare/1.28.52...1.29.0) --- updated-dependencies: - dependency-name: boto3 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 846 ++++++++++++++++++++++++++-------------------------- 1 file changed, 426 insertions(+), 420 deletions(-) diff --git a/poetry.lock b/poetry.lock index e13a6dfcd4..a3f59fc086 100644 --- a/poetry.lock +++ b/poetry.lock @@ -137,427 +137,433 @@ files = [ [[package]] name = "boto3" -version = "1.28.52" +version = "1.29.0" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.28.52-py3-none-any.whl", hash = "sha256:1d36db102517d62c6968b3b0636303241f56859d12dd071def4882fc6e030b20"}, - {file = "boto3-1.28.52.tar.gz", hash = "sha256:a34fc153cb2f6fb2f79a764286c967392e8aae9412381d943bddc576c4f7631a"}, + {file = "boto3-1.29.0-py3-none-any.whl", hash = "sha256:91c72fa4848eda9311c273db667946bd9d953285ae8d54b7bbad541b74adc254"}, + {file = "boto3-1.29.0.tar.gz", hash = "sha256:3e90ea2faa3e9892b9140f857911f9ef0013192a106f50d0ec7b71e8d1afc90a"}, ] [package.dependencies] -botocore = ">=1.31.52,<1.32.0" +botocore = ">=1.32.0,<1.33.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.6.0,<0.7.0" +s3transfer = ">=0.7.0,<0.8.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.28.52" -description = "Type annotations for boto3 1.28.52 generated with mypy-boto3-builder 7.19.0" +version = "1.29.0" +description = "Type annotations for boto3 1.29.0 generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.28.52.tar.gz", hash = "sha256:12d7e5865aeec52e1f73b935b1c6a42e61325538fc2cb83a87a83e41e9485241"}, - {file = "boto3_stubs-1.28.52-py3-none-any.whl", hash = "sha256:3ea81a225e062f3bcb205467891086ea031519697ad54622e61251b52609b8d6"}, + {file = "boto3-stubs-1.29.0.tar.gz", hash = "sha256:897cb22cbf7971809cac10470121ac194a5cc57d5fb3d8bfec09e07b3cb7646b"}, + {file = "boto3_stubs-1.29.0-py3-none-any.whl", hash = "sha256:e0ffd497ebd63b5d66b7eeef3192201be5453e8e5f449c864dd23877cf18fe3e"}, ] [package.dependencies] -boto3 = {version = "1.28.52", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.31.52", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.29.0", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.32.0", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" -mypy-boto3-cloudformation = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-dynamodb = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-ec2 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-lambda = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-logs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"logs\""} -mypy-boto3-rds = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-s3 = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} -mypy-boto3-sqs = {version = ">=1.28.0,<1.29.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-cloudformation = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-logs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"logs\""} +mypy-boto3-rds = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)"] -account = ["mypy-boto3-account (>=1.28.0,<1.29.0)"] -acm = ["mypy-boto3-acm (>=1.28.0,<1.29.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.28.0,<1.29.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.28.0,<1.29.0)", "mypy-boto3-account (>=1.28.0,<1.29.0)", "mypy-boto3-acm (>=1.28.0,<1.29.0)", "mypy-boto3-acm-pca (>=1.28.0,<1.29.0)", "mypy-boto3-alexaforbusiness (>=1.28.0,<1.29.0)", "mypy-boto3-amp (>=1.28.0,<1.29.0)", "mypy-boto3-amplify (>=1.28.0,<1.29.0)", "mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)", "mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)", "mypy-boto3-apigateway (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)", "mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)", "mypy-boto3-appconfig (>=1.28.0,<1.29.0)", "mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)", "mypy-boto3-appfabric (>=1.28.0,<1.29.0)", "mypy-boto3-appflow (>=1.28.0,<1.29.0)", "mypy-boto3-appintegrations (>=1.28.0,<1.29.0)", "mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-application-insights (>=1.28.0,<1.29.0)", "mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-appmesh (>=1.28.0,<1.29.0)", "mypy-boto3-apprunner (>=1.28.0,<1.29.0)", "mypy-boto3-appstream (>=1.28.0,<1.29.0)", "mypy-boto3-appsync (>=1.28.0,<1.29.0)", "mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)", "mypy-boto3-athena (>=1.28.0,<1.29.0)", "mypy-boto3-auditmanager (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling (>=1.28.0,<1.29.0)", "mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)", "mypy-boto3-backup (>=1.28.0,<1.29.0)", "mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)", "mypy-boto3-backupstorage (>=1.28.0,<1.29.0)", "mypy-boto3-batch (>=1.28.0,<1.29.0)", "mypy-boto3-billingconductor (>=1.28.0,<1.29.0)", "mypy-boto3-braket (>=1.28.0,<1.29.0)", "mypy-boto3-budgets (>=1.28.0,<1.29.0)", "mypy-boto3-ce (>=1.28.0,<1.29.0)", "mypy-boto3-chime (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)", "mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)", "mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)", "mypy-boto3-cloud9 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)", "mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)", "mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-cloudfront (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)", "mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)", "mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)", "mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)", "mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)", "mypy-boto3-codeartifact (>=1.28.0,<1.29.0)", "mypy-boto3-codebuild (>=1.28.0,<1.29.0)", "mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)", "mypy-boto3-codecommit (>=1.28.0,<1.29.0)", "mypy-boto3-codedeploy (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)", "mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)", "mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)", "mypy-boto3-codepipeline (>=1.28.0,<1.29.0)", "mypy-boto3-codestar (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)", "mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)", "mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)", "mypy-boto3-comprehend (>=1.28.0,<1.29.0)", "mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)", "mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)", "mypy-boto3-config (>=1.28.0,<1.29.0)", "mypy-boto3-connect (>=1.28.0,<1.29.0)", "mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)", "mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)", "mypy-boto3-connectcases (>=1.28.0,<1.29.0)", "mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)", "mypy-boto3-controltower (>=1.28.0,<1.29.0)", "mypy-boto3-cur (>=1.28.0,<1.29.0)", "mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)", "mypy-boto3-databrew (>=1.28.0,<1.29.0)", "mypy-boto3-dataexchange (>=1.28.0,<1.29.0)", "mypy-boto3-datapipeline (>=1.28.0,<1.29.0)", "mypy-boto3-datasync (>=1.28.0,<1.29.0)", "mypy-boto3-dax (>=1.28.0,<1.29.0)", "mypy-boto3-detective (>=1.28.0,<1.29.0)", "mypy-boto3-devicefarm (>=1.28.0,<1.29.0)", "mypy-boto3-devops-guru (>=1.28.0,<1.29.0)", "mypy-boto3-directconnect (>=1.28.0,<1.29.0)", "mypy-boto3-discovery (>=1.28.0,<1.29.0)", "mypy-boto3-dlm (>=1.28.0,<1.29.0)", "mypy-boto3-dms (>=1.28.0,<1.29.0)", "mypy-boto3-docdb (>=1.28.0,<1.29.0)", "mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)", "mypy-boto3-drs (>=1.28.0,<1.29.0)", "mypy-boto3-ds (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)", "mypy-boto3-ebs (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)", "mypy-boto3-ecr (>=1.28.0,<1.29.0)", "mypy-boto3-ecr-public (>=1.28.0,<1.29.0)", "mypy-boto3-ecs (>=1.28.0,<1.29.0)", "mypy-boto3-efs (>=1.28.0,<1.29.0)", "mypy-boto3-eks (>=1.28.0,<1.29.0)", "mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)", "mypy-boto3-elasticache (>=1.28.0,<1.29.0)", "mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)", "mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)", "mypy-boto3-elb (>=1.28.0,<1.29.0)", "mypy-boto3-elbv2 (>=1.28.0,<1.29.0)", "mypy-boto3-emr (>=1.28.0,<1.29.0)", "mypy-boto3-emr-containers (>=1.28.0,<1.29.0)", "mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-entityresolution (>=1.28.0,<1.29.0)", "mypy-boto3-es (>=1.28.0,<1.29.0)", "mypy-boto3-events (>=1.28.0,<1.29.0)", "mypy-boto3-evidently (>=1.28.0,<1.29.0)", "mypy-boto3-finspace (>=1.28.0,<1.29.0)", "mypy-boto3-finspace-data (>=1.28.0,<1.29.0)", "mypy-boto3-firehose (>=1.28.0,<1.29.0)", "mypy-boto3-fis (>=1.28.0,<1.29.0)", "mypy-boto3-fms (>=1.28.0,<1.29.0)", "mypy-boto3-forecast (>=1.28.0,<1.29.0)", "mypy-boto3-forecastquery (>=1.28.0,<1.29.0)", "mypy-boto3-frauddetector (>=1.28.0,<1.29.0)", "mypy-boto3-fsx (>=1.28.0,<1.29.0)", "mypy-boto3-gamelift (>=1.28.0,<1.29.0)", "mypy-boto3-gamesparks (>=1.28.0,<1.29.0)", "mypy-boto3-glacier (>=1.28.0,<1.29.0)", "mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)", "mypy-boto3-glue (>=1.28.0,<1.29.0)", "mypy-boto3-grafana (>=1.28.0,<1.29.0)", "mypy-boto3-greengrass (>=1.28.0,<1.29.0)", "mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)", "mypy-boto3-groundstation (>=1.28.0,<1.29.0)", "mypy-boto3-guardduty (>=1.28.0,<1.29.0)", "mypy-boto3-health (>=1.28.0,<1.29.0)", "mypy-boto3-healthlake (>=1.28.0,<1.29.0)", "mypy-boto3-honeycode (>=1.28.0,<1.29.0)", "mypy-boto3-iam (>=1.28.0,<1.29.0)", "mypy-boto3-identitystore (>=1.28.0,<1.29.0)", "mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)", "mypy-boto3-importexport (>=1.28.0,<1.29.0)", "mypy-boto3-inspector (>=1.28.0,<1.29.0)", "mypy-boto3-inspector2 (>=1.28.0,<1.29.0)", "mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)", "mypy-boto3-iot (>=1.28.0,<1.29.0)", "mypy-boto3-iot-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)", "mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)", "mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)", "mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents (>=1.28.0,<1.29.0)", "mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)", "mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)", "mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)", "mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)", "mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)", "mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)", "mypy-boto3-iotwireless (>=1.28.0,<1.29.0)", "mypy-boto3-ivs (>=1.28.0,<1.29.0)", "mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)", "mypy-boto3-ivschat (>=1.28.0,<1.29.0)", "mypy-boto3-kafka (>=1.28.0,<1.29.0)", "mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-kendra (>=1.28.0,<1.29.0)", "mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)", "mypy-boto3-keyspaces (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)", "mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)", "mypy-boto3-kms (>=1.28.0,<1.29.0)", "mypy-boto3-lakeformation (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-lex-models (>=1.28.0,<1.29.0)", "mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)", "mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)", "mypy-boto3-lightsail (>=1.28.0,<1.29.0)", "mypy-boto3-location (>=1.28.0,<1.29.0)", "mypy-boto3-logs (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)", "mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)", "mypy-boto3-m2 (>=1.28.0,<1.29.0)", "mypy-boto3-machinelearning (>=1.28.0,<1.29.0)", "mypy-boto3-macie (>=1.28.0,<1.29.0)", "mypy-boto3-macie2 (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)", "mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)", "mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)", "mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)", "mypy-boto3-medialive (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)", "mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore (>=1.28.0,<1.29.0)", "mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)", "mypy-boto3-mediatailor (>=1.28.0,<1.29.0)", "mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)", "mypy-boto3-memorydb (>=1.28.0,<1.29.0)", "mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)", "mypy-boto3-mgh (>=1.28.0,<1.29.0)", "mypy-boto3-mgn (>=1.28.0,<1.29.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)", "mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)", "mypy-boto3-mobile (>=1.28.0,<1.29.0)", "mypy-boto3-mq (>=1.28.0,<1.29.0)", "mypy-boto3-mturk (>=1.28.0,<1.29.0)", "mypy-boto3-mwaa (>=1.28.0,<1.29.0)", "mypy-boto3-neptune (>=1.28.0,<1.29.0)", "mypy-boto3-neptunedata (>=1.28.0,<1.29.0)", "mypy-boto3-network-firewall (>=1.28.0,<1.29.0)", "mypy-boto3-networkmanager (>=1.28.0,<1.29.0)", "mypy-boto3-nimble (>=1.28.0,<1.29.0)", "mypy-boto3-oam (>=1.28.0,<1.29.0)", "mypy-boto3-omics (>=1.28.0,<1.29.0)", "mypy-boto3-opensearch (>=1.28.0,<1.29.0)", "mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)", "mypy-boto3-opsworks (>=1.28.0,<1.29.0)", "mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)", "mypy-boto3-organizations (>=1.28.0,<1.29.0)", "mypy-boto3-osis (>=1.28.0,<1.29.0)", "mypy-boto3-outposts (>=1.28.0,<1.29.0)", "mypy-boto3-panorama (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)", "mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)", "mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)", "mypy-boto3-personalize (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-events (>=1.28.0,<1.29.0)", "mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-pi (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)", "mypy-boto3-pipes (>=1.28.0,<1.29.0)", "mypy-boto3-polly (>=1.28.0,<1.29.0)", "mypy-boto3-pricing (>=1.28.0,<1.29.0)", "mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)", "mypy-boto3-proton (>=1.28.0,<1.29.0)", "mypy-boto3-qldb (>=1.28.0,<1.29.0)", "mypy-boto3-qldb-session (>=1.28.0,<1.29.0)", "mypy-boto3-quicksight (>=1.28.0,<1.29.0)", "mypy-boto3-ram (>=1.28.0,<1.29.0)", "mypy-boto3-rbin (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-rds-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-data (>=1.28.0,<1.29.0)", "mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)", "mypy-boto3-rekognition (>=1.28.0,<1.29.0)", "mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)", "mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)", "mypy-boto3-resource-groups (>=1.28.0,<1.29.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)", "mypy-boto3-robomaker (>=1.28.0,<1.29.0)", "mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)", "mypy-boto3-route53 (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)", "mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)", "mypy-boto3-route53domains (>=1.28.0,<1.29.0)", "mypy-boto3-route53resolver (>=1.28.0,<1.29.0)", "mypy-boto3-rum (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-s3control (>=1.28.0,<1.29.0)", "mypy-boto3-s3outposts (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)", "mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)", "mypy-boto3-savingsplans (>=1.28.0,<1.29.0)", "mypy-boto3-scheduler (>=1.28.0,<1.29.0)", "mypy-boto3-schemas (>=1.28.0,<1.29.0)", "mypy-boto3-sdb (>=1.28.0,<1.29.0)", "mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)", "mypy-boto3-securityhub (>=1.28.0,<1.29.0)", "mypy-boto3-securitylake (>=1.28.0,<1.29.0)", "mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)", "mypy-boto3-service-quotas (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)", "mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)", "mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)", "mypy-boto3-ses (>=1.28.0,<1.29.0)", "mypy-boto3-sesv2 (>=1.28.0,<1.29.0)", "mypy-boto3-shield (>=1.28.0,<1.29.0)", "mypy-boto3-signer (>=1.28.0,<1.29.0)", "mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)", "mypy-boto3-sms (>=1.28.0,<1.29.0)", "mypy-boto3-sms-voice (>=1.28.0,<1.29.0)", "mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)", "mypy-boto3-snowball (>=1.28.0,<1.29.0)", "mypy-boto3-sns (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)", "mypy-boto3-ssm (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)", "mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)", "mypy-boto3-sso (>=1.28.0,<1.29.0)", "mypy-boto3-sso-admin (>=1.28.0,<1.29.0)", "mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)", "mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)", "mypy-boto3-storagegateway (>=1.28.0,<1.29.0)", "mypy-boto3-sts (>=1.28.0,<1.29.0)", "mypy-boto3-support (>=1.28.0,<1.29.0)", "mypy-boto3-support-app (>=1.28.0,<1.29.0)", "mypy-boto3-swf (>=1.28.0,<1.29.0)", "mypy-boto3-synthetics (>=1.28.0,<1.29.0)", "mypy-boto3-textract (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-query (>=1.28.0,<1.29.0)", "mypy-boto3-timestream-write (>=1.28.0,<1.29.0)", "mypy-boto3-tnb (>=1.28.0,<1.29.0)", "mypy-boto3-transcribe (>=1.28.0,<1.29.0)", "mypy-boto3-transfer (>=1.28.0,<1.29.0)", "mypy-boto3-translate (>=1.28.0,<1.29.0)", "mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)", "mypy-boto3-voice-id (>=1.28.0,<1.29.0)", "mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)", "mypy-boto3-waf (>=1.28.0,<1.29.0)", "mypy-boto3-waf-regional (>=1.28.0,<1.29.0)", "mypy-boto3-wafv2 (>=1.28.0,<1.29.0)", "mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)", "mypy-boto3-wisdom (>=1.28.0,<1.29.0)", "mypy-boto3-workdocs (>=1.28.0,<1.29.0)", "mypy-boto3-worklink (>=1.28.0,<1.29.0)", "mypy-boto3-workmail (>=1.28.0,<1.29.0)", "mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces (>=1.28.0,<1.29.0)", "mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)", "mypy-boto3-xray (>=1.28.0,<1.29.0)"] -amp = ["mypy-boto3-amp (>=1.28.0,<1.29.0)"] -amplify = ["mypy-boto3-amplify (>=1.28.0,<1.29.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.28.0,<1.29.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.28.0,<1.29.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.28.0,<1.29.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.28.0,<1.29.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.28.0,<1.29.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.28.0,<1.29.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.28.0,<1.29.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.28.0,<1.29.0)"] -appflow = ["mypy-boto3-appflow (>=1.28.0,<1.29.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.28.0,<1.29.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.28.0,<1.29.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.28.0,<1.29.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.28.0,<1.29.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.28.0,<1.29.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.28.0,<1.29.0)"] -appstream = ["mypy-boto3-appstream (>=1.28.0,<1.29.0)"] -appsync = ["mypy-boto3-appsync (>=1.28.0,<1.29.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.28.0,<1.29.0)"] -athena = ["mypy-boto3-athena (>=1.28.0,<1.29.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.28.0,<1.29.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.28.0,<1.29.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.28.0,<1.29.0)"] -backup = ["mypy-boto3-backup (>=1.28.0,<1.29.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.28.0,<1.29.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.28.0,<1.29.0)"] -batch = ["mypy-boto3-batch (>=1.28.0,<1.29.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.28.0,<1.29.0)"] -boto3 = ["boto3 (==1.28.52)", "botocore (==1.31.52)"] -braket = ["mypy-boto3-braket (>=1.28.0,<1.29.0)"] -budgets = ["mypy-boto3-budgets (>=1.28.0,<1.29.0)"] -ce = ["mypy-boto3-ce (>=1.28.0,<1.29.0)"] -chime = ["mypy-boto3-chime (>=1.28.0,<1.29.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.28.0,<1.29.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.28.0,<1.29.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.28.0,<1.29.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.28.0,<1.29.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.28.0,<1.29.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.28.0,<1.29.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.28.0,<1.29.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.28.0,<1.29.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.28.0,<1.29.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.28.0,<1.29.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.28.0,<1.29.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.28.0,<1.29.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.28.0,<1.29.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.28.0,<1.29.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.28.0,<1.29.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.28.0,<1.29.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.28.0,<1.29.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.28.0,<1.29.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.28.0,<1.29.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.28.0,<1.29.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.28.0,<1.29.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.28.0,<1.29.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.28.0,<1.29.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.28.0,<1.29.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.28.0,<1.29.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.28.0,<1.29.0)"] -codestar = ["mypy-boto3-codestar (>=1.28.0,<1.29.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.28.0,<1.29.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.28.0,<1.29.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.28.0,<1.29.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.28.0,<1.29.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.28.0,<1.29.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.28.0,<1.29.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.28.0,<1.29.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.28.0,<1.29.0)"] -config = ["mypy-boto3-config (>=1.28.0,<1.29.0)"] -connect = ["mypy-boto3-connect (>=1.28.0,<1.29.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.28.0,<1.29.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.28.0,<1.29.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.28.0,<1.29.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.28.0,<1.29.0)"] -controltower = ["mypy-boto3-controltower (>=1.28.0,<1.29.0)"] -cur = ["mypy-boto3-cur (>=1.28.0,<1.29.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.28.0,<1.29.0)"] -databrew = ["mypy-boto3-databrew (>=1.28.0,<1.29.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.28.0,<1.29.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.28.0,<1.29.0)"] -datasync = ["mypy-boto3-datasync (>=1.28.0,<1.29.0)"] -dax = ["mypy-boto3-dax (>=1.28.0,<1.29.0)"] -detective = ["mypy-boto3-detective (>=1.28.0,<1.29.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.28.0,<1.29.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.28.0,<1.29.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.28.0,<1.29.0)"] -discovery = ["mypy-boto3-discovery (>=1.28.0,<1.29.0)"] -dlm = ["mypy-boto3-dlm (>=1.28.0,<1.29.0)"] -dms = ["mypy-boto3-dms (>=1.28.0,<1.29.0)"] -docdb = ["mypy-boto3-docdb (>=1.28.0,<1.29.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.28.0,<1.29.0)"] -drs = ["mypy-boto3-drs (>=1.28.0,<1.29.0)"] -ds = ["mypy-boto3-ds (>=1.28.0,<1.29.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.28.0,<1.29.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.28.0,<1.29.0)"] -ebs = ["mypy-boto3-ebs (>=1.28.0,<1.29.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.28.0,<1.29.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.28.0,<1.29.0)"] -ecr = ["mypy-boto3-ecr (>=1.28.0,<1.29.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.28.0,<1.29.0)"] -ecs = ["mypy-boto3-ecs (>=1.28.0,<1.29.0)"] -efs = ["mypy-boto3-efs (>=1.28.0,<1.29.0)"] -eks = ["mypy-boto3-eks (>=1.28.0,<1.29.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.28.0,<1.29.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.28.0,<1.29.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.28.0,<1.29.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.28.0,<1.29.0)"] -elb = ["mypy-boto3-elb (>=1.28.0,<1.29.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.28.0,<1.29.0)"] -emr = ["mypy-boto3-emr (>=1.28.0,<1.29.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.28.0,<1.29.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.28.0,<1.29.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.28.0,<1.29.0)"] -es = ["mypy-boto3-es (>=1.28.0,<1.29.0)"] -essential = ["mypy-boto3-cloudformation (>=1.28.0,<1.29.0)", "mypy-boto3-dynamodb (>=1.28.0,<1.29.0)", "mypy-boto3-ec2 (>=1.28.0,<1.29.0)", "mypy-boto3-lambda (>=1.28.0,<1.29.0)", "mypy-boto3-rds (>=1.28.0,<1.29.0)", "mypy-boto3-s3 (>=1.28.0,<1.29.0)", "mypy-boto3-sqs (>=1.28.0,<1.29.0)"] -events = ["mypy-boto3-events (>=1.28.0,<1.29.0)"] -evidently = ["mypy-boto3-evidently (>=1.28.0,<1.29.0)"] -finspace = ["mypy-boto3-finspace (>=1.28.0,<1.29.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.28.0,<1.29.0)"] -firehose = ["mypy-boto3-firehose (>=1.28.0,<1.29.0)"] -fis = ["mypy-boto3-fis (>=1.28.0,<1.29.0)"] -fms = ["mypy-boto3-fms (>=1.28.0,<1.29.0)"] -forecast = ["mypy-boto3-forecast (>=1.28.0,<1.29.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.28.0,<1.29.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.28.0,<1.29.0)"] -fsx = ["mypy-boto3-fsx (>=1.28.0,<1.29.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.28.0,<1.29.0)"] -gamesparks = ["mypy-boto3-gamesparks (>=1.28.0,<1.29.0)"] -glacier = ["mypy-boto3-glacier (>=1.28.0,<1.29.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.28.0,<1.29.0)"] -glue = ["mypy-boto3-glue (>=1.28.0,<1.29.0)"] -grafana = ["mypy-boto3-grafana (>=1.28.0,<1.29.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.28.0,<1.29.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.28.0,<1.29.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.28.0,<1.29.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.28.0,<1.29.0)"] -health = ["mypy-boto3-health (>=1.28.0,<1.29.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.28.0,<1.29.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.28.0,<1.29.0)"] -iam = ["mypy-boto3-iam (>=1.28.0,<1.29.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.28.0,<1.29.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.28.0,<1.29.0)"] -importexport = ["mypy-boto3-importexport (>=1.28.0,<1.29.0)"] -inspector = ["mypy-boto3-inspector (>=1.28.0,<1.29.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.28.0,<1.29.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.28.0,<1.29.0)"] -iot = ["mypy-boto3-iot (>=1.28.0,<1.29.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.28.0,<1.29.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.28.0,<1.29.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.28.0,<1.29.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.28.0,<1.29.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.28.0,<1.29.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.28.0,<1.29.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.28.0,<1.29.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.28.0,<1.29.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.28.0,<1.29.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.28.0,<1.29.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.28.0,<1.29.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.28.0,<1.29.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.28.0,<1.29.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.28.0,<1.29.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.28.0,<1.29.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.28.0,<1.29.0)"] -ivs = ["mypy-boto3-ivs (>=1.28.0,<1.29.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.28.0,<1.29.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.28.0,<1.29.0)"] -kafka = ["mypy-boto3-kafka (>=1.28.0,<1.29.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.28.0,<1.29.0)"] -kendra = ["mypy-boto3-kendra (>=1.28.0,<1.29.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.28.0,<1.29.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.28.0,<1.29.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.28.0,<1.29.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.28.0,<1.29.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.28.0,<1.29.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.28.0,<1.29.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.28.0,<1.29.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.28.0,<1.29.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.28.0,<1.29.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.28.0,<1.29.0)"] -kms = ["mypy-boto3-kms (>=1.28.0,<1.29.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.28.0,<1.29.0)"] -lambda = ["mypy-boto3-lambda (>=1.28.0,<1.29.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.28.0,<1.29.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.28.0,<1.29.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.28.0,<1.29.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.28.0,<1.29.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.28.0,<1.29.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.28.0,<1.29.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.28.0,<1.29.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.28.0,<1.29.0)"] -location = ["mypy-boto3-location (>=1.28.0,<1.29.0)"] -logs = ["mypy-boto3-logs (>=1.28.0,<1.29.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.28.0,<1.29.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.28.0,<1.29.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.28.0,<1.29.0)"] -m2 = ["mypy-boto3-m2 (>=1.28.0,<1.29.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.28.0,<1.29.0)"] -macie = ["mypy-boto3-macie (>=1.28.0,<1.29.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.28.0,<1.29.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.28.0,<1.29.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.28.0,<1.29.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.28.0,<1.29.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.28.0,<1.29.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.28.0,<1.29.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.28.0,<1.29.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.28.0,<1.29.0)"] -medialive = ["mypy-boto3-medialive (>=1.28.0,<1.29.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.28.0,<1.29.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.28.0,<1.29.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.28.0,<1.29.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.28.0,<1.29.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.28.0,<1.29.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.28.0,<1.29.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.28.0,<1.29.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.28.0,<1.29.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.28.0,<1.29.0)"] -mgh = ["mypy-boto3-mgh (>=1.28.0,<1.29.0)"] -mgn = ["mypy-boto3-mgn (>=1.28.0,<1.29.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.28.0,<1.29.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.28.0,<1.29.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.28.0,<1.29.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.28.0,<1.29.0)"] -mobile = ["mypy-boto3-mobile (>=1.28.0,<1.29.0)"] -mq = ["mypy-boto3-mq (>=1.28.0,<1.29.0)"] -mturk = ["mypy-boto3-mturk (>=1.28.0,<1.29.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.28.0,<1.29.0)"] -neptune = ["mypy-boto3-neptune (>=1.28.0,<1.29.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.28.0,<1.29.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.28.0,<1.29.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.28.0,<1.29.0)"] -nimble = ["mypy-boto3-nimble (>=1.28.0,<1.29.0)"] -oam = ["mypy-boto3-oam (>=1.28.0,<1.29.0)"] -omics = ["mypy-boto3-omics (>=1.28.0,<1.29.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.28.0,<1.29.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.28.0,<1.29.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.28.0,<1.29.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.28.0,<1.29.0)"] -organizations = ["mypy-boto3-organizations (>=1.28.0,<1.29.0)"] -osis = ["mypy-boto3-osis (>=1.28.0,<1.29.0)"] -outposts = ["mypy-boto3-outposts (>=1.28.0,<1.29.0)"] -panorama = ["mypy-boto3-panorama (>=1.28.0,<1.29.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.28.0,<1.29.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.28.0,<1.29.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.28.0,<1.29.0)"] -personalize = ["mypy-boto3-personalize (>=1.28.0,<1.29.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.28.0,<1.29.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.28.0,<1.29.0)"] -pi = ["mypy-boto3-pi (>=1.28.0,<1.29.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.28.0,<1.29.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.28.0,<1.29.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.28.0,<1.29.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.28.0,<1.29.0)"] -pipes = ["mypy-boto3-pipes (>=1.28.0,<1.29.0)"] -polly = ["mypy-boto3-polly (>=1.28.0,<1.29.0)"] -pricing = ["mypy-boto3-pricing (>=1.28.0,<1.29.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.28.0,<1.29.0)"] -proton = ["mypy-boto3-proton (>=1.28.0,<1.29.0)"] -qldb = ["mypy-boto3-qldb (>=1.28.0,<1.29.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.28.0,<1.29.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.28.0,<1.29.0)"] -ram = ["mypy-boto3-ram (>=1.28.0,<1.29.0)"] -rbin = ["mypy-boto3-rbin (>=1.28.0,<1.29.0)"] -rds = ["mypy-boto3-rds (>=1.28.0,<1.29.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.28.0,<1.29.0)"] -redshift = ["mypy-boto3-redshift (>=1.28.0,<1.29.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.28.0,<1.29.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.28.0,<1.29.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.28.0,<1.29.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.28.0,<1.29.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.28.0,<1.29.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.28.0,<1.29.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.28.0,<1.29.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.28.0,<1.29.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.28.0,<1.29.0)"] -route53 = ["mypy-boto3-route53 (>=1.28.0,<1.29.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.28.0,<1.29.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.28.0,<1.29.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.28.0,<1.29.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.28.0,<1.29.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.28.0,<1.29.0)"] -rum = ["mypy-boto3-rum (>=1.28.0,<1.29.0)"] -s3 = ["mypy-boto3-s3 (>=1.28.0,<1.29.0)"] -s3control = ["mypy-boto3-s3control (>=1.28.0,<1.29.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.28.0,<1.29.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.28.0,<1.29.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.28.0,<1.29.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.28.0,<1.29.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.28.0,<1.29.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.28.0,<1.29.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.28.0,<1.29.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.28.0,<1.29.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.28.0,<1.29.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.28.0,<1.29.0)"] -schemas = ["mypy-boto3-schemas (>=1.28.0,<1.29.0)"] -sdb = ["mypy-boto3-sdb (>=1.28.0,<1.29.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.28.0,<1.29.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.28.0,<1.29.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.28.0,<1.29.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.28.0,<1.29.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.28.0,<1.29.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.28.0,<1.29.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.28.0,<1.29.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.28.0,<1.29.0)"] -ses = ["mypy-boto3-ses (>=1.28.0,<1.29.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.28.0,<1.29.0)"] -shield = ["mypy-boto3-shield (>=1.28.0,<1.29.0)"] -signer = ["mypy-boto3-signer (>=1.28.0,<1.29.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.28.0,<1.29.0)"] -sms = ["mypy-boto3-sms (>=1.28.0,<1.29.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.28.0,<1.29.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.28.0,<1.29.0)"] -snowball = ["mypy-boto3-snowball (>=1.28.0,<1.29.0)"] -sns = ["mypy-boto3-sns (>=1.28.0,<1.29.0)"] -sqs = ["mypy-boto3-sqs (>=1.28.0,<1.29.0)"] -ssm = ["mypy-boto3-ssm (>=1.28.0,<1.29.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.28.0,<1.29.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.28.0,<1.29.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.28.0,<1.29.0)"] -sso = ["mypy-boto3-sso (>=1.28.0,<1.29.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.28.0,<1.29.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.28.0,<1.29.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.28.0,<1.29.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.28.0,<1.29.0)"] -sts = ["mypy-boto3-sts (>=1.28.0,<1.29.0)"] -support = ["mypy-boto3-support (>=1.28.0,<1.29.0)"] -support-app = ["mypy-boto3-support-app (>=1.28.0,<1.29.0)"] -swf = ["mypy-boto3-swf (>=1.28.0,<1.29.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.28.0,<1.29.0)"] -textract = ["mypy-boto3-textract (>=1.28.0,<1.29.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.28.0,<1.29.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.28.0,<1.29.0)"] -tnb = ["mypy-boto3-tnb (>=1.28.0,<1.29.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.28.0,<1.29.0)"] -transfer = ["mypy-boto3-transfer (>=1.28.0,<1.29.0)"] -translate = ["mypy-boto3-translate (>=1.28.0,<1.29.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.28.0,<1.29.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.28.0,<1.29.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.28.0,<1.29.0)"] -waf = ["mypy-boto3-waf (>=1.28.0,<1.29.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.28.0,<1.29.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.28.0,<1.29.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.28.0,<1.29.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.28.0,<1.29.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.28.0,<1.29.0)"] -worklink = ["mypy-boto3-worklink (>=1.28.0,<1.29.0)"] -workmail = ["mypy-boto3-workmail (>=1.28.0,<1.29.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.28.0,<1.29.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.28.0,<1.29.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.28.0,<1.29.0)"] -xray = ["mypy-boto3-xray (>=1.28.0,<1.29.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)"] +account = ["mypy-boto3-account (>=1.29.0,<1.30.0)"] +acm = ["mypy-boto3-acm (>=1.29.0,<1.30.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.29.0,<1.30.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)", "mypy-boto3-account (>=1.29.0,<1.30.0)", "mypy-boto3-acm (>=1.29.0,<1.30.0)", "mypy-boto3-acm-pca (>=1.29.0,<1.30.0)", "mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)", "mypy-boto3-amp (>=1.29.0,<1.30.0)", "mypy-boto3-amplify (>=1.29.0,<1.30.0)", "mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)", "mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)", "mypy-boto3-apigateway (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)", "mypy-boto3-appconfig (>=1.29.0,<1.30.0)", "mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)", "mypy-boto3-appfabric (>=1.29.0,<1.30.0)", "mypy-boto3-appflow (>=1.29.0,<1.30.0)", "mypy-boto3-appintegrations (>=1.29.0,<1.30.0)", "mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-application-insights (>=1.29.0,<1.30.0)", "mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-appmesh (>=1.29.0,<1.30.0)", "mypy-boto3-apprunner (>=1.29.0,<1.30.0)", "mypy-boto3-appstream (>=1.29.0,<1.30.0)", "mypy-boto3-appsync (>=1.29.0,<1.30.0)", "mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)", "mypy-boto3-athena (>=1.29.0,<1.30.0)", "mypy-boto3-auditmanager (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)", "mypy-boto3-backup (>=1.29.0,<1.30.0)", "mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)", "mypy-boto3-backupstorage (>=1.29.0,<1.30.0)", "mypy-boto3-batch (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-billingconductor (>=1.29.0,<1.30.0)", "mypy-boto3-braket (>=1.29.0,<1.30.0)", "mypy-boto3-budgets (>=1.29.0,<1.30.0)", "mypy-boto3-ce (>=1.29.0,<1.30.0)", "mypy-boto3-chime (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)", "mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)", "mypy-boto3-cloud9 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)", "mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)", "mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-cloudfront (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)", "mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)", "mypy-boto3-codeartifact (>=1.29.0,<1.30.0)", "mypy-boto3-codebuild (>=1.29.0,<1.30.0)", "mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)", "mypy-boto3-codecommit (>=1.29.0,<1.30.0)", "mypy-boto3-codedeploy (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)", "mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-codepipeline (>=1.29.0,<1.30.0)", "mypy-boto3-codestar (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)", "mypy-boto3-comprehend (>=1.29.0,<1.30.0)", "mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)", "mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)", "mypy-boto3-config (>=1.29.0,<1.30.0)", "mypy-boto3-connect (>=1.29.0,<1.30.0)", "mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)", "mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)", "mypy-boto3-connectcases (>=1.29.0,<1.30.0)", "mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)", "mypy-boto3-controltower (>=1.29.0,<1.30.0)", "mypy-boto3-cur (>=1.29.0,<1.30.0)", "mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)", "mypy-boto3-databrew (>=1.29.0,<1.30.0)", "mypy-boto3-dataexchange (>=1.29.0,<1.30.0)", "mypy-boto3-datapipeline (>=1.29.0,<1.30.0)", "mypy-boto3-datasync (>=1.29.0,<1.30.0)", "mypy-boto3-datazone (>=1.29.0,<1.30.0)", "mypy-boto3-dax (>=1.29.0,<1.30.0)", "mypy-boto3-detective (>=1.29.0,<1.30.0)", "mypy-boto3-devicefarm (>=1.29.0,<1.30.0)", "mypy-boto3-devops-guru (>=1.29.0,<1.30.0)", "mypy-boto3-directconnect (>=1.29.0,<1.30.0)", "mypy-boto3-discovery (>=1.29.0,<1.30.0)", "mypy-boto3-dlm (>=1.29.0,<1.30.0)", "mypy-boto3-dms (>=1.29.0,<1.30.0)", "mypy-boto3-docdb (>=1.29.0,<1.30.0)", "mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)", "mypy-boto3-drs (>=1.29.0,<1.30.0)", "mypy-boto3-ds (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)", "mypy-boto3-ebs (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)", "mypy-boto3-ecr (>=1.29.0,<1.30.0)", "mypy-boto3-ecr-public (>=1.29.0,<1.30.0)", "mypy-boto3-ecs (>=1.29.0,<1.30.0)", "mypy-boto3-efs (>=1.29.0,<1.30.0)", "mypy-boto3-eks (>=1.29.0,<1.30.0)", "mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)", "mypy-boto3-elasticache (>=1.29.0,<1.30.0)", "mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)", "mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)", "mypy-boto3-elb (>=1.29.0,<1.30.0)", "mypy-boto3-elbv2 (>=1.29.0,<1.30.0)", "mypy-boto3-emr (>=1.29.0,<1.30.0)", "mypy-boto3-emr-containers (>=1.29.0,<1.30.0)", "mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-entityresolution (>=1.29.0,<1.30.0)", "mypy-boto3-es (>=1.29.0,<1.30.0)", "mypy-boto3-events (>=1.29.0,<1.30.0)", "mypy-boto3-evidently (>=1.29.0,<1.30.0)", "mypy-boto3-finspace (>=1.29.0,<1.30.0)", "mypy-boto3-finspace-data (>=1.29.0,<1.30.0)", "mypy-boto3-firehose (>=1.29.0,<1.30.0)", "mypy-boto3-fis (>=1.29.0,<1.30.0)", "mypy-boto3-fms (>=1.29.0,<1.30.0)", "mypy-boto3-forecast (>=1.29.0,<1.30.0)", "mypy-boto3-forecastquery (>=1.29.0,<1.30.0)", "mypy-boto3-frauddetector (>=1.29.0,<1.30.0)", "mypy-boto3-fsx (>=1.29.0,<1.30.0)", "mypy-boto3-gamelift (>=1.29.0,<1.30.0)", "mypy-boto3-glacier (>=1.29.0,<1.30.0)", "mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)", "mypy-boto3-glue (>=1.29.0,<1.30.0)", "mypy-boto3-grafana (>=1.29.0,<1.30.0)", "mypy-boto3-greengrass (>=1.29.0,<1.30.0)", "mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)", "mypy-boto3-groundstation (>=1.29.0,<1.30.0)", "mypy-boto3-guardduty (>=1.29.0,<1.30.0)", "mypy-boto3-health (>=1.29.0,<1.30.0)", "mypy-boto3-healthlake (>=1.29.0,<1.30.0)", "mypy-boto3-honeycode (>=1.29.0,<1.30.0)", "mypy-boto3-iam (>=1.29.0,<1.30.0)", "mypy-boto3-identitystore (>=1.29.0,<1.30.0)", "mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)", "mypy-boto3-importexport (>=1.29.0,<1.30.0)", "mypy-boto3-inspector (>=1.29.0,<1.30.0)", "mypy-boto3-inspector2 (>=1.29.0,<1.30.0)", "mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)", "mypy-boto3-iot (>=1.29.0,<1.30.0)", "mypy-boto3-iot-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)", "mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)", "mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)", "mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)", "mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)", "mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)", "mypy-boto3-iotwireless (>=1.29.0,<1.30.0)", "mypy-boto3-ivs (>=1.29.0,<1.30.0)", "mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)", "mypy-boto3-ivschat (>=1.29.0,<1.30.0)", "mypy-boto3-kafka (>=1.29.0,<1.30.0)", "mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-kendra (>=1.29.0,<1.30.0)", "mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)", "mypy-boto3-keyspaces (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)", "mypy-boto3-kms (>=1.29.0,<1.30.0)", "mypy-boto3-lakeformation (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)", "mypy-boto3-lex-models (>=1.29.0,<1.30.0)", "mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-lightsail (>=1.29.0,<1.30.0)", "mypy-boto3-location (>=1.29.0,<1.30.0)", "mypy-boto3-logs (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)", "mypy-boto3-m2 (>=1.29.0,<1.30.0)", "mypy-boto3-machinelearning (>=1.29.0,<1.30.0)", "mypy-boto3-macie (>=1.29.0,<1.30.0)", "mypy-boto3-macie2 (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)", "mypy-boto3-medialive (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)", "mypy-boto3-mediatailor (>=1.29.0,<1.30.0)", "mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)", "mypy-boto3-memorydb (>=1.29.0,<1.30.0)", "mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)", "mypy-boto3-mgh (>=1.29.0,<1.30.0)", "mypy-boto3-mgn (>=1.29.0,<1.30.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)", "mypy-boto3-mobile (>=1.29.0,<1.30.0)", "mypy-boto3-mq (>=1.29.0,<1.30.0)", "mypy-boto3-mturk (>=1.29.0,<1.30.0)", "mypy-boto3-mwaa (>=1.29.0,<1.30.0)", "mypy-boto3-neptune (>=1.29.0,<1.30.0)", "mypy-boto3-neptunedata (>=1.29.0,<1.30.0)", "mypy-boto3-network-firewall (>=1.29.0,<1.30.0)", "mypy-boto3-networkmanager (>=1.29.0,<1.30.0)", "mypy-boto3-nimble (>=1.29.0,<1.30.0)", "mypy-boto3-oam (>=1.29.0,<1.30.0)", "mypy-boto3-omics (>=1.29.0,<1.30.0)", "mypy-boto3-opensearch (>=1.29.0,<1.30.0)", "mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)", "mypy-boto3-opsworks (>=1.29.0,<1.30.0)", "mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)", "mypy-boto3-organizations (>=1.29.0,<1.30.0)", "mypy-boto3-osis (>=1.29.0,<1.30.0)", "mypy-boto3-outposts (>=1.29.0,<1.30.0)", "mypy-boto3-panorama (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)", "mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)", "mypy-boto3-personalize (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-events (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-pi (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)", "mypy-boto3-pipes (>=1.29.0,<1.30.0)", "mypy-boto3-polly (>=1.29.0,<1.30.0)", "mypy-boto3-pricing (>=1.29.0,<1.30.0)", "mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)", "mypy-boto3-proton (>=1.29.0,<1.30.0)", "mypy-boto3-qldb (>=1.29.0,<1.30.0)", "mypy-boto3-qldb-session (>=1.29.0,<1.30.0)", "mypy-boto3-quicksight (>=1.29.0,<1.30.0)", "mypy-boto3-ram (>=1.29.0,<1.30.0)", "mypy-boto3-rbin (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-rds-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-rekognition (>=1.29.0,<1.30.0)", "mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)", "mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)", "mypy-boto3-resource-groups (>=1.29.0,<1.30.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)", "mypy-boto3-robomaker (>=1.29.0,<1.30.0)", "mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)", "mypy-boto3-route53 (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)", "mypy-boto3-route53domains (>=1.29.0,<1.30.0)", "mypy-boto3-route53resolver (>=1.29.0,<1.30.0)", "mypy-boto3-rum (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-s3control (>=1.29.0,<1.30.0)", "mypy-boto3-s3outposts (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-savingsplans (>=1.29.0,<1.30.0)", "mypy-boto3-scheduler (>=1.29.0,<1.30.0)", "mypy-boto3-schemas (>=1.29.0,<1.30.0)", "mypy-boto3-sdb (>=1.29.0,<1.30.0)", "mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)", "mypy-boto3-securityhub (>=1.29.0,<1.30.0)", "mypy-boto3-securitylake (>=1.29.0,<1.30.0)", "mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)", "mypy-boto3-service-quotas (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)", "mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)", "mypy-boto3-ses (>=1.29.0,<1.30.0)", "mypy-boto3-sesv2 (>=1.29.0,<1.30.0)", "mypy-boto3-shield (>=1.29.0,<1.30.0)", "mypy-boto3-signer (>=1.29.0,<1.30.0)", "mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)", "mypy-boto3-sms (>=1.29.0,<1.30.0)", "mypy-boto3-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)", "mypy-boto3-snowball (>=1.29.0,<1.30.0)", "mypy-boto3-sns (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)", "mypy-boto3-ssm (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)", "mypy-boto3-sso (>=1.29.0,<1.30.0)", "mypy-boto3-sso-admin (>=1.29.0,<1.30.0)", "mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)", "mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)", "mypy-boto3-storagegateway (>=1.29.0,<1.30.0)", "mypy-boto3-sts (>=1.29.0,<1.30.0)", "mypy-boto3-support (>=1.29.0,<1.30.0)", "mypy-boto3-support-app (>=1.29.0,<1.30.0)", "mypy-boto3-swf (>=1.29.0,<1.30.0)", "mypy-boto3-synthetics (>=1.29.0,<1.30.0)", "mypy-boto3-textract (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-query (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-write (>=1.29.0,<1.30.0)", "mypy-boto3-tnb (>=1.29.0,<1.30.0)", "mypy-boto3-transcribe (>=1.29.0,<1.30.0)", "mypy-boto3-transfer (>=1.29.0,<1.30.0)", "mypy-boto3-translate (>=1.29.0,<1.30.0)", "mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)", "mypy-boto3-voice-id (>=1.29.0,<1.30.0)", "mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)", "mypy-boto3-waf (>=1.29.0,<1.30.0)", "mypy-boto3-waf-regional (>=1.29.0,<1.30.0)", "mypy-boto3-wafv2 (>=1.29.0,<1.30.0)", "mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)", "mypy-boto3-wisdom (>=1.29.0,<1.30.0)", "mypy-boto3-workdocs (>=1.29.0,<1.30.0)", "mypy-boto3-worklink (>=1.29.0,<1.30.0)", "mypy-boto3-workmail (>=1.29.0,<1.30.0)", "mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)", "mypy-boto3-xray (>=1.29.0,<1.30.0)"] +amp = ["mypy-boto3-amp (>=1.29.0,<1.30.0)"] +amplify = ["mypy-boto3-amplify (>=1.29.0,<1.30.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.29.0,<1.30.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.29.0,<1.30.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.29.0,<1.30.0)"] +appflow = ["mypy-boto3-appflow (>=1.29.0,<1.30.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.29.0,<1.30.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.29.0,<1.30.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.29.0,<1.30.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.29.0,<1.30.0)"] +appstream = ["mypy-boto3-appstream (>=1.29.0,<1.30.0)"] +appsync = ["mypy-boto3-appsync (>=1.29.0,<1.30.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)"] +athena = ["mypy-boto3-athena (>=1.29.0,<1.30.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.29.0,<1.30.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.29.0,<1.30.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)"] +backup = ["mypy-boto3-backup (>=1.29.0,<1.30.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.29.0,<1.30.0)"] +batch = ["mypy-boto3-batch (>=1.29.0,<1.30.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.29.0,<1.30.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.29.0,<1.30.0)"] +boto3 = ["boto3 (==1.29.0)", "botocore (==1.32.0)"] +braket = ["mypy-boto3-braket (>=1.29.0,<1.30.0)"] +budgets = ["mypy-boto3-budgets (>=1.29.0,<1.30.0)"] +ce = ["mypy-boto3-ce (>=1.29.0,<1.30.0)"] +chime = ["mypy-boto3-chime (>=1.29.0,<1.30.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.29.0,<1.30.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.29.0,<1.30.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.29.0,<1.30.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.29.0,<1.30.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.29.0,<1.30.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.29.0,<1.30.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.29.0,<1.30.0)"] +codestar = ["mypy-boto3-codestar (>=1.29.0,<1.30.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.29.0,<1.30.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)"] +config = ["mypy-boto3-config (>=1.29.0,<1.30.0)"] +connect = ["mypy-boto3-connect (>=1.29.0,<1.30.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.29.0,<1.30.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)"] +controltower = ["mypy-boto3-controltower (>=1.29.0,<1.30.0)"] +cur = ["mypy-boto3-cur (>=1.29.0,<1.30.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)"] +databrew = ["mypy-boto3-databrew (>=1.29.0,<1.30.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.29.0,<1.30.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.29.0,<1.30.0)"] +datasync = ["mypy-boto3-datasync (>=1.29.0,<1.30.0)"] +datazone = ["mypy-boto3-datazone (>=1.29.0,<1.30.0)"] +dax = ["mypy-boto3-dax (>=1.29.0,<1.30.0)"] +detective = ["mypy-boto3-detective (>=1.29.0,<1.30.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.29.0,<1.30.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.29.0,<1.30.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.29.0,<1.30.0)"] +discovery = ["mypy-boto3-discovery (>=1.29.0,<1.30.0)"] +dlm = ["mypy-boto3-dlm (>=1.29.0,<1.30.0)"] +dms = ["mypy-boto3-dms (>=1.29.0,<1.30.0)"] +docdb = ["mypy-boto3-docdb (>=1.29.0,<1.30.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)"] +drs = ["mypy-boto3-drs (>=1.29.0,<1.30.0)"] +ds = ["mypy-boto3-ds (>=1.29.0,<1.30.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.29.0,<1.30.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)"] +ebs = ["mypy-boto3-ebs (>=1.29.0,<1.30.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.29.0,<1.30.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)"] +ecr = ["mypy-boto3-ecr (>=1.29.0,<1.30.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.29.0,<1.30.0)"] +ecs = ["mypy-boto3-ecs (>=1.29.0,<1.30.0)"] +efs = ["mypy-boto3-efs (>=1.29.0,<1.30.0)"] +eks = ["mypy-boto3-eks (>=1.29.0,<1.30.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.29.0,<1.30.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)"] +elb = ["mypy-boto3-elb (>=1.29.0,<1.30.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.29.0,<1.30.0)"] +emr = ["mypy-boto3-emr (>=1.29.0,<1.30.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.29.0,<1.30.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.29.0,<1.30.0)"] +es = ["mypy-boto3-es (>=1.29.0,<1.30.0)"] +essential = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)"] +events = ["mypy-boto3-events (>=1.29.0,<1.30.0)"] +evidently = ["mypy-boto3-evidently (>=1.29.0,<1.30.0)"] +finspace = ["mypy-boto3-finspace (>=1.29.0,<1.30.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.29.0,<1.30.0)"] +firehose = ["mypy-boto3-firehose (>=1.29.0,<1.30.0)"] +fis = ["mypy-boto3-fis (>=1.29.0,<1.30.0)"] +fms = ["mypy-boto3-fms (>=1.29.0,<1.30.0)"] +forecast = ["mypy-boto3-forecast (>=1.29.0,<1.30.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.29.0,<1.30.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.29.0,<1.30.0)"] +fsx = ["mypy-boto3-fsx (>=1.29.0,<1.30.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.29.0,<1.30.0)"] +glacier = ["mypy-boto3-glacier (>=1.29.0,<1.30.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)"] +glue = ["mypy-boto3-glue (>=1.29.0,<1.30.0)"] +grafana = ["mypy-boto3-grafana (>=1.29.0,<1.30.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.29.0,<1.30.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.29.0,<1.30.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.29.0,<1.30.0)"] +health = ["mypy-boto3-health (>=1.29.0,<1.30.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.29.0,<1.30.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.29.0,<1.30.0)"] +iam = ["mypy-boto3-iam (>=1.29.0,<1.30.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.29.0,<1.30.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)"] +importexport = ["mypy-boto3-importexport (>=1.29.0,<1.30.0)"] +inspector = ["mypy-boto3-inspector (>=1.29.0,<1.30.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.29.0,<1.30.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)"] +iot = ["mypy-boto3-iot (>=1.29.0,<1.30.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.29.0,<1.30.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.29.0,<1.30.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.29.0,<1.30.0)"] +ivs = ["mypy-boto3-ivs (>=1.29.0,<1.30.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.29.0,<1.30.0)"] +kafka = ["mypy-boto3-kafka (>=1.29.0,<1.30.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)"] +kendra = ["mypy-boto3-kendra (>=1.29.0,<1.30.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.29.0,<1.30.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.29.0,<1.30.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)"] +kms = ["mypy-boto3-kms (>=1.29.0,<1.30.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.29.0,<1.30.0)"] +lambda = ["mypy-boto3-lambda (>=1.29.0,<1.30.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.29.0,<1.30.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.29.0,<1.30.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.29.0,<1.30.0)"] +location = ["mypy-boto3-location (>=1.29.0,<1.30.0)"] +logs = ["mypy-boto3-logs (>=1.29.0,<1.30.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)"] +m2 = ["mypy-boto3-m2 (>=1.29.0,<1.30.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.29.0,<1.30.0)"] +macie = ["mypy-boto3-macie (>=1.29.0,<1.30.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.29.0,<1.30.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)"] +medialive = ["mypy-boto3-medialive (>=1.29.0,<1.30.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.29.0,<1.30.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.29.0,<1.30.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.29.0,<1.30.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.29.0,<1.30.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)"] +mgh = ["mypy-boto3-mgh (>=1.29.0,<1.30.0)"] +mgn = ["mypy-boto3-mgn (>=1.29.0,<1.30.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)"] +mobile = ["mypy-boto3-mobile (>=1.29.0,<1.30.0)"] +mq = ["mypy-boto3-mq (>=1.29.0,<1.30.0)"] +mturk = ["mypy-boto3-mturk (>=1.29.0,<1.30.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.29.0,<1.30.0)"] +neptune = ["mypy-boto3-neptune (>=1.29.0,<1.30.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.29.0,<1.30.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.29.0,<1.30.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.29.0,<1.30.0)"] +nimble = ["mypy-boto3-nimble (>=1.29.0,<1.30.0)"] +oam = ["mypy-boto3-oam (>=1.29.0,<1.30.0)"] +omics = ["mypy-boto3-omics (>=1.29.0,<1.30.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.29.0,<1.30.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.29.0,<1.30.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)"] +organizations = ["mypy-boto3-organizations (>=1.29.0,<1.30.0)"] +osis = ["mypy-boto3-osis (>=1.29.0,<1.30.0)"] +outposts = ["mypy-boto3-outposts (>=1.29.0,<1.30.0)"] +panorama = ["mypy-boto3-panorama (>=1.29.0,<1.30.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)"] +personalize = ["mypy-boto3-personalize (>=1.29.0,<1.30.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.29.0,<1.30.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)"] +pi = ["mypy-boto3-pi (>=1.29.0,<1.30.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.29.0,<1.30.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)"] +pipes = ["mypy-boto3-pipes (>=1.29.0,<1.30.0)"] +polly = ["mypy-boto3-polly (>=1.29.0,<1.30.0)"] +pricing = ["mypy-boto3-pricing (>=1.29.0,<1.30.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)"] +proton = ["mypy-boto3-proton (>=1.29.0,<1.30.0)"] +qldb = ["mypy-boto3-qldb (>=1.29.0,<1.30.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.29.0,<1.30.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.29.0,<1.30.0)"] +ram = ["mypy-boto3-ram (>=1.29.0,<1.30.0)"] +rbin = ["mypy-boto3-rbin (>=1.29.0,<1.30.0)"] +rds = ["mypy-boto3-rds (>=1.29.0,<1.30.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.29.0,<1.30.0)"] +redshift = ["mypy-boto3-redshift (>=1.29.0,<1.30.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.29.0,<1.30.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.29.0,<1.30.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.29.0,<1.30.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.29.0,<1.30.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)"] +route53 = ["mypy-boto3-route53 (>=1.29.0,<1.30.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.29.0,<1.30.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.29.0,<1.30.0)"] +rum = ["mypy-boto3-rum (>=1.29.0,<1.30.0)"] +s3 = ["mypy-boto3-s3 (>=1.29.0,<1.30.0)"] +s3control = ["mypy-boto3-s3control (>=1.29.0,<1.30.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.29.0,<1.30.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.29.0,<1.30.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.29.0,<1.30.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.29.0,<1.30.0)"] +schemas = ["mypy-boto3-schemas (>=1.29.0,<1.30.0)"] +sdb = ["mypy-boto3-sdb (>=1.29.0,<1.30.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.29.0,<1.30.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.29.0,<1.30.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.29.0,<1.30.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)"] +ses = ["mypy-boto3-ses (>=1.29.0,<1.30.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.29.0,<1.30.0)"] +shield = ["mypy-boto3-shield (>=1.29.0,<1.30.0)"] +signer = ["mypy-boto3-signer (>=1.29.0,<1.30.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)"] +sms = ["mypy-boto3-sms (>=1.29.0,<1.30.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.29.0,<1.30.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)"] +snowball = ["mypy-boto3-snowball (>=1.29.0,<1.30.0)"] +sns = ["mypy-boto3-sns (>=1.29.0,<1.30.0)"] +sqs = ["mypy-boto3-sqs (>=1.29.0,<1.30.0)"] +ssm = ["mypy-boto3-ssm (>=1.29.0,<1.30.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)"] +sso = ["mypy-boto3-sso (>=1.29.0,<1.30.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.29.0,<1.30.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.29.0,<1.30.0)"] +sts = ["mypy-boto3-sts (>=1.29.0,<1.30.0)"] +support = ["mypy-boto3-support (>=1.29.0,<1.30.0)"] +support-app = ["mypy-boto3-support-app (>=1.29.0,<1.30.0)"] +swf = ["mypy-boto3-swf (>=1.29.0,<1.30.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.29.0,<1.30.0)"] +textract = ["mypy-boto3-textract (>=1.29.0,<1.30.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.29.0,<1.30.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.29.0,<1.30.0)"] +tnb = ["mypy-boto3-tnb (>=1.29.0,<1.30.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.29.0,<1.30.0)"] +transfer = ["mypy-boto3-transfer (>=1.29.0,<1.30.0)"] +translate = ["mypy-boto3-translate (>=1.29.0,<1.30.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.29.0,<1.30.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)"] +waf = ["mypy-boto3-waf (>=1.29.0,<1.30.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.29.0,<1.30.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.29.0,<1.30.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.29.0,<1.30.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.29.0,<1.30.0)"] +worklink = ["mypy-boto3-worklink (>=1.29.0,<1.30.0)"] +workmail = ["mypy-boto3-workmail (>=1.29.0,<1.30.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.29.0,<1.30.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)"] +xray = ["mypy-boto3-xray (>=1.29.0,<1.30.0)"] [[package]] name = "botocore" -version = "1.31.52" +version = "1.32.0" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.31.52-py3-none-any.whl", hash = "sha256:46b0a75a38521aa6a75fddccb1542e002930e609d4e13516f40fef170d32e515"}, - {file = "botocore-1.31.52.tar.gz", hash = "sha256:6d09881c5a8be34b497872ca3936f8757d886a6f42f2a8703411928189cfedc0"}, + {file = "botocore-1.32.0-py3-none-any.whl", hash = "sha256:9c1e143feb6a04235cec342d2acb31a0f44df3c89f309f839e03e38a75f3f44e"}, + {file = "botocore-1.32.0.tar.gz", hash = "sha256:95fe3357b9ddc4559941dbea0f0a6b8fc043305f013b7ae2a85dff0c3b36ee92"}, ] [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = ">=1.25.4,<1.27" +urllib3 = [ + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, +] [package.extras] -crt = ["awscrt (==0.16.26)"] +crt = ["awscrt (==0.19.12)"] [[package]] name = "botocore-stubs" @@ -1276,12 +1282,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1383,8 +1389,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -2355,13 +2361,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-cloudformation" -version = "1.28.48" -description = "Type annotations for boto3.CloudFormation 1.28.48 service generated with mypy-boto3-builder 7.19.0" +version = "1.29.0" +description = "Type annotations for boto3.CloudFormation 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudformation-1.28.48.tar.gz", hash = "sha256:efbe4073397800824287c8d52a65383112862f903b16fd587b5113449652371b"}, - {file = "mypy_boto3_cloudformation-1.28.48-py3-none-any.whl", hash = "sha256:653e14414abc9fab8d29d693f138639a1325322fb6ba1b06ca90a6ccb11dfd94"}, + {file = "mypy-boto3-cloudformation-1.29.0.tar.gz", hash = "sha256:91b7202a439d31f7e6645f34ea810f1900f23214900fdf6de210a0704c14da70"}, + {file = "mypy_boto3_cloudformation-1.29.0-py3-none-any.whl", hash = "sha256:b719c35be8b4d5606e9b4fd66d4d0c0e3d5eaf9508a72099053c8e0640b652af"}, ] [package.dependencies] @@ -2369,13 +2375,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.28.36" -description = "Type annotations for boto3.DynamoDB 1.28.36 service generated with mypy-boto3-builder 7.18.0" +version = "1.29.0" +description = "Type annotations for boto3.DynamoDB 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.28.36.tar.gz", hash = "sha256:5fe1d336fdc8c58f345c9c1b4e4c1a2d164660531cf3a074d4598975fb2687de"}, - {file = "mypy_boto3_dynamodb-1.28.36-py3-none-any.whl", hash = "sha256:9a3b49385d17e421661ab8639fc09cc64a706198be20287f82d83511289294a3"}, + {file = "mypy-boto3-dynamodb-1.29.0.tar.gz", hash = "sha256:e9b0f1cf1d66d2cbc5d7177832dcd08d85cfa84983934aa361bfc3ca57e06edc"}, + {file = "mypy_boto3_dynamodb-1.29.0-py3-none-any.whl", hash = "sha256:a54d9bf0a9449423fa909586a6003e55ba1d64fc77107c228f4091020f83d134"}, ] [package.dependencies] @@ -2383,13 +2389,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ec2" -version = "1.28.51" -description = "Type annotations for boto3.EC2 1.28.51 service generated with mypy-boto3-builder 7.19.0" +version = "1.29.0" +description = "Type annotations for boto3.EC2 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ec2-1.28.51.tar.gz", hash = "sha256:e62204a712e89a14e0663d3d18813ac8072706684b0e89a1b04a7d01c10f9ebe"}, - {file = "mypy_boto3_ec2-1.28.51-py3-none-any.whl", hash = "sha256:f9162f00b144cf2fab5b8b56d7883674b1d4096c79a7226b592918eae17e6235"}, + {file = "mypy-boto3-ec2-1.29.0.tar.gz", hash = "sha256:1e636794205fee5069701a32919c03b40da1d031dbcd6bfca097c9001774dfc1"}, + {file = "mypy_boto3_ec2-1.29.0-py3-none-any.whl", hash = "sha256:9684add91b80889880ba1403f28e32d5f6dcf540f318de59913e0e9fa10965be"}, ] [package.dependencies] @@ -2397,13 +2403,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-lambda" -version = "1.28.36" -description = "Type annotations for boto3.Lambda 1.28.36 service generated with mypy-boto3-builder 7.18.0" +version = "1.29.0" +description = "Type annotations for boto3.Lambda 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.28.36.tar.gz", hash = "sha256:70498e6ff6bfd60b758553d27fadf691ba169572faca01c2bd457da0b48b9cff"}, - {file = "mypy_boto3_lambda-1.28.36-py3-none-any.whl", hash = "sha256:edb1f49279f7713929a70eaab00cf3d4ba65a10016db636805d022b2eaf14c84"}, + {file = "mypy-boto3-lambda-1.29.0.tar.gz", hash = "sha256:e4537261f7b675b1c165a7dc04d4b661f2f338a45e57bd2bee92d9a41a9cd407"}, + {file = "mypy_boto3_lambda-1.29.0-py3-none-any.whl", hash = "sha256:cc3f4dee77181feb2a1ec90f72258a32bdc75f83d01b3c637ca791073279d3e5"}, ] [package.dependencies] @@ -2411,13 +2417,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-logs" -version = "1.28.52" -description = "Type annotations for boto3.CloudWatchLogs 1.28.52 service generated with mypy-boto3-builder 7.19.0" +version = "1.29.0" +description = "Type annotations for boto3.CloudWatchLogs 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-logs-1.28.52.tar.gz", hash = "sha256:b51e9b97961223bfe2314ce16887bbd229857b3960a61d372480d6d688168b7e"}, - {file = "mypy_boto3_logs-1.28.52-py3-none-any.whl", hash = "sha256:d180d3ece8aeb349ae504fa9eddb1afb0d9574a237e9d728c6502a8b8e2d9147"}, + {file = "mypy-boto3-logs-1.29.0.tar.gz", hash = "sha256:c30f51dafad4578d74930f3dc7fa0e0cba7250546a0388ed614f8226e6180ffd"}, + {file = "mypy_boto3_logs-1.29.0-py3-none-any.whl", hash = "sha256:0f74e18b773e099d99050337a67127a1dd80441f810286d2bf1acdfc5c70a1ea"}, ] [package.dependencies] @@ -2425,13 +2431,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-rds" -version = "1.28.41" -description = "Type annotations for boto3.RDS 1.28.41 service generated with mypy-boto3-builder 7.18.2" +version = "1.29.0" +description = "Type annotations for boto3.RDS 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-rds-1.28.41.tar.gz", hash = "sha256:19b7d6a1e3e53ff7d03157052885ceead29861fba2d9cbddf701f6238159cb72"}, - {file = "mypy_boto3_rds-1.28.41-py3-none-any.whl", hash = "sha256:2935be3736147b726964e3cebcb918d0fc394e4012b9b7d3d0d35c12aa740ddf"}, + {file = "mypy-boto3-rds-1.29.0.tar.gz", hash = "sha256:2e7688620ec81a637fbb129ed4165592b118f255089de98013d3b95fb14bcf89"}, + {file = "mypy_boto3_rds-1.29.0-py3-none-any.whl", hash = "sha256:3cab2b07a29c06ad1f469bcd98a8796f23ae423f7f03a93d43b3a0cf4cb9877c"}, ] [package.dependencies] @@ -2439,13 +2445,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.28.52" -description = "Type annotations for boto3.S3 1.28.52 service generated with mypy-boto3-builder 7.19.0" +version = "1.29.0" +description = "Type annotations for boto3.S3 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.28.52.tar.gz", hash = "sha256:179cb7542cc5ef656f1323ad51eb237afcba77d1e5ed07d21a013fe36effb8b2"}, - {file = "mypy_boto3_s3-1.28.52-py3-none-any.whl", hash = "sha256:a75cd5ff28f1cb5109dd50db94259436701208fa97c61b5a2cc0689e169b7cba"}, + {file = "mypy-boto3-s3-1.29.0.tar.gz", hash = "sha256:3c8473974e304aa512abbf6a47454d9834674e89db414545e2f0cb4fcdd227c9"}, + {file = "mypy_boto3_s3-1.29.0-py3-none-any.whl", hash = "sha256:f5040429b0c3814c6ec9c1a59256976186acb7376fd3b56c4e7e5d03272bb1a8"}, ] [package.dependencies] @@ -2453,13 +2459,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-sqs" -version = "1.28.36" -description = "Type annotations for boto3.SQS 1.28.36 service generated with mypy-boto3-builder 7.18.0" +version = "1.29.0" +description = "Type annotations for boto3.SQS 1.29.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-sqs-1.28.36.tar.gz", hash = "sha256:d9c159e020f0ef225a6d5850a3673e8b236327243ba5ffe0d13762ae4fdc0e21"}, - {file = "mypy_boto3_sqs-1.28.36-py3-none-any.whl", hash = "sha256:8457aa9f2a6da44e8543e547597773f67a04e517f6a398989117cf1fa3f70d6e"}, + {file = "mypy-boto3-sqs-1.29.0.tar.gz", hash = "sha256:0835256e3aabd27b2acf613c1b82a22b9de18412a0b07bd04d6d214c3f063906"}, + {file = "mypy_boto3_sqs-1.29.0-py3-none-any.whl", hash = "sha256:db88751bd7765f51c2b1f9061545ddb06639d301c3d981d3b3fa4b367f0ca8ea"}, ] [package.dependencies] @@ -3862,13 +3868,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.6.2" +version = "0.7.0" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.6.2-py3-none-any.whl", hash = "sha256:b014be3a8a2aab98cfe1abc7229cc5a9a0cf05eb9c1f2b86b230fd8df3f78084"}, - {file = "s3transfer-0.6.2.tar.gz", hash = "sha256:cab66d3380cca3e70939ef2255d01cd8aece6a4907a9528740f668c4b0611861"}, + {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, + {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, ] [package.dependencies] From 1f4023b63c501639b8a4ccc5b51dbd88d4cfaec4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 10:05:39 -0800 Subject: [PATCH 165/262] Bump types-psycopg2 from 2.9.21.15 to 2.9.21.16 (#1515) Bumps [types-psycopg2](https://github.com/python/typeshed) from 2.9.21.15 to 2.9.21.16. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-psycopg2 dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index a3f59fc086..9a48a95dbc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4157,13 +4157,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.15" +version = "2.9.21.16" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.15.tar.gz", hash = "sha256:cf99b62ab32cd4ef412fc3c4da1c29ca5a130847dff06d709b84a523802406f0"}, - {file = "types_psycopg2-2.9.21.15-py3-none-any.whl", hash = "sha256:cc80479def02e4dd1ef21649d82f04426c73bc0693bcc0a8b5223c7c168472af"}, + {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"}, + {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"}, ] [[package]] From 3c59596e2e77f2941abf6be8c0649ebe75bd4907 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 10:13:56 -0800 Subject: [PATCH 166/262] Bump mypy from 1.6.1 to 1.7.0 (#1511) Bumps [mypy](https://github.com/python/mypy) from 1.6.1 to 1.7.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.6.1...v1.7.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 57 +++++++++++++++++++++++++++-------------------------- 1 file changed, 29 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9a48a95dbc..d413092615 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2315,38 +2315,38 @@ files = [ [[package]] name = "mypy" -version = "1.6.1" +version = "1.7.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e5012e5cc2ac628177eaac0e83d622b2dd499e28253d4107a08ecc59ede3fc2c"}, - {file = "mypy-1.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d8fbb68711905f8912e5af474ca8b78d077447d8f3918997fecbf26943ff3cbb"}, - {file = "mypy-1.6.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a1ad938fee7d2d96ca666c77b7c494c3c5bd88dff792220e1afbebb2925b5e"}, - {file = "mypy-1.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b96ae2c1279d1065413965c607712006205a9ac541895004a1e0d4f281f2ff9f"}, - {file = "mypy-1.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:40b1844d2e8b232ed92e50a4bd11c48d2daa351f9deee6c194b83bf03e418b0c"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:81af8adaa5e3099469e7623436881eff6b3b06db5ef75e6f5b6d4871263547e5"}, - {file = "mypy-1.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8c223fa57cb154c7eab5156856c231c3f5eace1e0bed9b32a24696b7ba3c3245"}, - {file = "mypy-1.6.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8032e00ce71c3ceb93eeba63963b864bf635a18f6c0c12da6c13c450eedb183"}, - {file = "mypy-1.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4c46b51de523817a0045b150ed11b56f9fff55f12b9edd0f3ed35b15a2809de0"}, - {file = "mypy-1.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:19f905bcfd9e167159b3d63ecd8cb5e696151c3e59a1742e79bc3bcb540c42c7"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:82e469518d3e9a321912955cc702d418773a2fd1e91c651280a1bda10622f02f"}, - {file = "mypy-1.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d4473c22cc296425bbbce7e9429588e76e05bc7342da359d6520b6427bf76660"}, - {file = "mypy-1.6.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59a0d7d24dfb26729e0a068639a6ce3500e31d6655df8557156c51c1cb874ce7"}, - {file = "mypy-1.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cfd13d47b29ed3bbaafaff7d8b21e90d827631afda134836962011acb5904b71"}, - {file = "mypy-1.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:eb4f18589d196a4cbe5290b435d135dee96567e07c2b2d43b5c4621b6501531a"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:41697773aa0bf53ff917aa077e2cde7aa50254f28750f9b88884acea38a16169"}, - {file = "mypy-1.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7274b0c57737bd3476d2229c6389b2ec9eefeb090bbaf77777e9d6b1b5a9d143"}, - {file = "mypy-1.6.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbaf4662e498c8c2e352da5f5bca5ab29d378895fa2d980630656178bd607c46"}, - {file = "mypy-1.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bb8ccb4724f7d8601938571bf3f24da0da791fe2db7be3d9e79849cb64e0ae85"}, - {file = "mypy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:68351911e85145f582b5aa6cd9ad666c8958bcae897a1bfda8f4940472463c45"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:49ae115da099dcc0922a7a895c1eec82c1518109ea5c162ed50e3b3594c71208"}, - {file = "mypy-1.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8b27958f8c76bed8edaa63da0739d76e4e9ad4ed325c814f9b3851425582a3cd"}, - {file = "mypy-1.6.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:925cd6a3b7b55dfba252b7c4561892311c5358c6b5a601847015a1ad4eb7d332"}, - {file = "mypy-1.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f57e6b6927a49550da3d122f0cb983d400f843a8a82e65b3b380d3d7259468f"}, - {file = "mypy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a43ef1c8ddfdb9575691720b6352761f3f53d85f1b57d7745701041053deff30"}, - {file = "mypy-1.6.1-py3-none-any.whl", hash = "sha256:4cbe68ef919c28ea561165206a2dcb68591c50f3bcf777932323bc208d949cf1"}, - {file = "mypy-1.6.1.tar.gz", hash = "sha256:4d01c00d09a0be62a4ca3f933e315455bde83f37f892ba4b08ce92f3cf44bcc1"}, + {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, + {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, + {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, + {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, + {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, + {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, + {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, + {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, + {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, + {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, + {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, + {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, + {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, + {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, + {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, + {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, + {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, + {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, + {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, + {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, + {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, + {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, + {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, + {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, + {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, + {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, + {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, ] [package.dependencies] @@ -2357,6 +2357,7 @@ typing-extensions = ">=4.1.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] +mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] From 59de0c9cd34eb833c8aab00f838398731f8a26d9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 15 Nov 2023 10:41:11 -0800 Subject: [PATCH 167/262] Bump types-pillow from 10.1.0.1 to 10.1.0.2 (#1512) Bumps [types-pillow](https://github.com/python/typeshed) from 10.1.0.1 to 10.1.0.2. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-pillow dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d413092615..f3c2e2accf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4147,13 +4147,13 @@ referencing = "*" [[package]] name = "types-pillow" -version = "10.1.0.1" +version = "10.1.0.2" description = "Typing stubs for Pillow" optional = false python-versions = ">=3.7" files = [ - {file = "types-Pillow-10.1.0.1.tar.gz", hash = "sha256:2ab92b1fea760315a3608394f26de8c63b6335a67cfc6ffefb3fe492b6ae58e5"}, - {file = "types_Pillow-10.1.0.1-py3-none-any.whl", hash = "sha256:5e81c55f9e66c19b32fbae7a8b562f1fdbf76814a952e3154f8d6c183516bbdb"}, + {file = "types-Pillow-10.1.0.2.tar.gz", hash = "sha256:525c1c5ee67b0ac1721c40d2bc618226ef2123c347e527e14e05b920721a13b9"}, + {file = "types_Pillow-10.1.0.2-py3-none-any.whl", hash = "sha256:131078ffa547bf9a201d39ffcdc65633e108148085f4f1b07d4647fcfec6e923"}, ] [[package]] From e0f7512c9662c87380a4135d9e781183eb0ae7ed Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Nov 2023 13:25:06 +0000 Subject: [PATCH 168/262] Bump boto3 from 1.29.0 to 1.29.1 (#1517) --- poetry.lock | 28 ++++++++++++++-------------- 1 file changed, 14 insertions(+), 14 deletions(-) diff --git a/poetry.lock b/poetry.lock index f3c2e2accf..44d056ebfd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -137,17 +137,17 @@ files = [ [[package]] name = "boto3" -version = "1.29.0" +version = "1.29.1" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.29.0-py3-none-any.whl", hash = "sha256:91c72fa4848eda9311c273db667946bd9d953285ae8d54b7bbad541b74adc254"}, - {file = "boto3-1.29.0.tar.gz", hash = "sha256:3e90ea2faa3e9892b9140f857911f9ef0013192a106f50d0ec7b71e8d1afc90a"}, + {file = "boto3-1.29.1-py3-none-any.whl", hash = "sha256:192695305fa65012d21f78ee852b91cb56dd571e84d51fb71f756302bf19d23f"}, + {file = "boto3-1.29.1.tar.gz", hash = "sha256:20285ebf4e98b2905a88aeb162b4f77ff908b2e3e31038b3223e593789290aa3"}, ] [package.dependencies] -botocore = ">=1.32.0,<1.33.0" +botocore = ">=1.32.1,<1.33.0" jmespath = ">=0.7.1,<2.0.0" s3transfer = ">=0.7.0,<0.8.0" @@ -156,18 +156,18 @@ crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.29.0" -description = "Type annotations for boto3 1.29.0 generated with mypy-boto3-builder 7.20.3" +version = "1.29.1" +description = "Type annotations for boto3 1.29.1 generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.29.0.tar.gz", hash = "sha256:897cb22cbf7971809cac10470121ac194a5cc57d5fb3d8bfec09e07b3cb7646b"}, - {file = "boto3_stubs-1.29.0-py3-none-any.whl", hash = "sha256:e0ffd497ebd63b5d66b7eeef3192201be5453e8e5f449c864dd23877cf18fe3e"}, + {file = "boto3-stubs-1.29.1.tar.gz", hash = "sha256:6dfe6af913b1a4a43bc8d62c98bf98ec2f1ad3474fabeacc7d9a521c5bd689da"}, + {file = "boto3_stubs-1.29.1-py3-none-any.whl", hash = "sha256:3a923b7895f06b079f48cf316d69c3b61e02ca67fdc73c07f864ab09c448723d"}, ] [package.dependencies] -boto3 = {version = "1.29.0", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.32.0", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.29.1", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.32.1", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" mypy-boto3-cloudformation = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} mypy-boto3-dynamodb = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} @@ -218,7 +218,7 @@ batch = ["mypy-boto3-batch (>=1.29.0,<1.30.0)"] bedrock = ["mypy-boto3-bedrock (>=1.29.0,<1.30.0)"] bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)"] billingconductor = ["mypy-boto3-billingconductor (>=1.29.0,<1.30.0)"] -boto3 = ["boto3 (==1.29.0)", "botocore (==1.32.0)"] +boto3 = ["boto3 (==1.29.1)", "botocore (==1.32.1)"] braket = ["mypy-boto3-braket (>=1.29.0,<1.30.0)"] budgets = ["mypy-boto3-budgets (>=1.29.0,<1.30.0)"] ce = ["mypy-boto3-ce (>=1.29.0,<1.30.0)"] @@ -545,13 +545,13 @@ xray = ["mypy-boto3-xray (>=1.29.0,<1.30.0)"] [[package]] name = "botocore" -version = "1.32.0" +version = "1.32.1" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.32.0-py3-none-any.whl", hash = "sha256:9c1e143feb6a04235cec342d2acb31a0f44df3c89f309f839e03e38a75f3f44e"}, - {file = "botocore-1.32.0.tar.gz", hash = "sha256:95fe3357b9ddc4559941dbea0f0a6b8fc043305f013b7ae2a85dff0c3b36ee92"}, + {file = "botocore-1.32.1-py3-none-any.whl", hash = "sha256:1d9c0ff3eb7828a8bd8c5c7f12cd9d8c05c6fe4c616ef963fdaab538a0da3809"}, + {file = "botocore-1.32.1.tar.gz", hash = "sha256:fcf3cc2913afba8e5f7ebcc15e8f6bfae844ab64bf983bf5a6fe3bb54cce239d"}, ] [package.dependencies] From fa447b74d8e9363bd1c414f98dd537b47e8ee2fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 17 Nov 2023 13:25:40 +0000 Subject: [PATCH 169/262] Bump pyfakefs from 5.3.0 to 5.3.1 (#1518) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 44d056ebfd..f8216b3f52 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3003,13 +3003,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyfakefs" -version = "5.3.0" +version = "5.3.1" description = "pyfakefs implements a fake file system that mocks the Python file system modules." optional = false python-versions = ">=3.7" files = [ - {file = "pyfakefs-5.3.0-py3-none-any.whl", hash = "sha256:33c1f891078c727beec465e75cb314120635e2298456493cc2cc0539e2130cbb"}, - {file = "pyfakefs-5.3.0.tar.gz", hash = "sha256:e3e35f65ce55ee8ecc5e243d55cfdbb5d0aa24938f6e04e19f0fab062f255020"}, + {file = "pyfakefs-5.3.1-py3-none-any.whl", hash = "sha256:dbe268b70da64f1506baf7d7a2a2248b96b56d28d61a68859272b5fdc321c39e"}, + {file = "pyfakefs-5.3.1.tar.gz", hash = "sha256:dd1fb374039fadccf35d3f3df7aa5d239482e0650dcd240e053d3b9e78740918"}, ] [[package]] From a006f7dee77476569846761ba687813216df9b25 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 17 Nov 2023 09:26:43 -0400 Subject: [PATCH 170/262] Instead of raising an exception, we should return a problem detail. (#1508) --- api/sip/__init__.py | 27 +++++-- tests/api/sip/test_authentication_provider.py | 74 ++++++++++++------- 2 files changed, 69 insertions(+), 32 deletions(-) diff --git a/api/sip/__init__.py b/api/sip/__init__.py index e42bb96b21..9d99a3ed3a 100644 --- a/api/sip/__init__.py +++ b/api/sip/__init__.py @@ -1,6 +1,8 @@ +from __future__ import annotations + import json from datetime import datetime -from typing import Callable, Optional, Type, Union +from typing import Any, Callable, Dict, Optional, Type, Union from pydantic import Field, PositiveInt @@ -10,6 +12,7 @@ BasicAuthProviderLibrarySettings, BasicAuthProviderSettings, ) +from api.problem_details import INVALID_CREDENTIALS from api.sip.client import Sip2Encoding, SIPClient from api.sip.dialect import Dialect as Sip2Dialect from core.analytics import Analytics @@ -21,6 +24,7 @@ from core.model import Patron from core.util import MoneyUtility from core.util.http import RemoteIntegrationException +from core.util.problem_detail import ProblemDetail, ProblemError class SIP2Settings(BasicAuthProviderSettings): @@ -287,7 +291,9 @@ def settings_class(cls) -> Type[SIP2Settings]: def library_settings_class(cls) -> Type[SIP2LibrarySettings]: return SIP2LibrarySettings - def patron_information(self, username, password): + def patron_information( + self, username: str | None, password: str | None + ) -> Dict[str, Any] | ProblemDetail: try: sip = self.client sip.connect() @@ -298,11 +304,15 @@ def patron_information(self, username, password): return info except OSError as e: - raise RemoteIntegrationException(self.server or "unknown server", str(e)) + server_name = self.server or "unknown server" + self.log.warning(f"SIP2 error ({server_name}): {str(e)}", exc_info=e) + return INVALID_CREDENTIALS.detailed( + f"Error contacting authentication server ({server_name}). Please try again later." + ) def remote_patron_lookup( self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + ) -> PatronData | None | ProblemDetail: info = self.patron_information( patron_or_patrondata.authorization_identifier, None ) @@ -310,7 +320,7 @@ def remote_patron_lookup( def remote_authenticate( self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + ) -> PatronData | None | ProblemDetail: """Authenticate a patron with the SIP2 server. :param username: The patron's username/barcode/card @@ -371,11 +381,16 @@ def raw_patron_information(): ("Raw test patron information"), raw_patron_information ) - def info_to_patrondata(self, info, validate_password=True) -> Optional[PatronData]: + def info_to_patrondata( + self, info: Dict[str, Any] | ProblemDetail, validate_password: bool = True + ) -> Optional[PatronData] | ProblemDetail: """Convert the SIP-specific dictionary obtained from SIPClient.patron_information() to an abstract, authenticator-independent PatronData object. """ + if isinstance(info, ProblemDetail): + return info + if info.get("valid_patron", "N") == "N": # The patron could not be identified as a patron of this # library. Don't return any data. diff --git a/tests/api/sip/test_authentication_provider.py b/tests/api/sip/test_authentication_provider.py index 7173089339..f32755784a 100644 --- a/tests/api/sip/test_authentication_provider.py +++ b/tests/api/sip/test_authentication_provider.py @@ -8,11 +8,12 @@ from api.authentication.base import PatronData from api.authentication.basic import BasicAuthProviderLibrarySettings, Keyboards +from api.problem_details import INVALID_CREDENTIALS from api.sip import SIP2AuthenticationProvider, SIP2LibrarySettings, SIP2Settings from api.sip.client import Constants, Sip2Encoding, SIPClient from api.sip.dialect import Dialect from core.config import CannotLoadConfiguration -from core.util.http import RemoteIntegrationException +from core.util.problem_detail import ProblemDetail from tests.fixtures.database import DatabaseTransactionFixture @@ -400,7 +401,7 @@ def test_encoding( assert patrondata.external_type is None assert PatronData.NO_VALUE == patrondata.block_reason - def test_ioerror_during_connect_becomes_remoteintegrationexception( + def test_ioerror_during_connect_becomes_problemdetail( self, create_provider: Callable[..., SIP2AuthenticationProvider], create_settings: Callable[..., SIP2Settings], @@ -418,20 +419,26 @@ def connect(self): ) provider = create_provider(client=CannotConnect, settings=settings) - with pytest.raises(RemoteIntegrationException) as excinfo: - provider.remote_authenticate( - "username", - "password", - ) - assert "Error accessing unknown server: Doom!" in str(excinfo.value) + response = provider.remote_authenticate( + "username", + "password", + ) - def test_ioerror_during_send_becomes_remoteintegrationexception( + assert isinstance(response, ProblemDetail) + assert response.status_code == INVALID_CREDENTIALS.status_code + assert response.uri == INVALID_CREDENTIALS.uri + assert ( + response.detail + == "Error contacting authentication server (unknown server). Please try again later." + ) + + def test_ioerror_during_send_becomes_problemdetail( self, create_provider: Callable[..., SIP2AuthenticationProvider], create_settings: Callable[..., SIP2Settings], ): """If there's an IOError communicating with the server, - it becomes a RemoteIntegrationException. + it becomes a ProblemDetail to be sent to the client. """ class CannotSend(MockSIPClient): @@ -443,12 +450,18 @@ def do_send(self, data): ) provider = create_provider(client=CannotSend, settings=settings) - with pytest.raises(RemoteIntegrationException) as excinfo: - provider.remote_authenticate( - "username", - "password", - ) - assert "Error accessing server.local: Doom!" in str(excinfo.value) + response = provider.remote_authenticate( + "username", + "password", + ) + + assert isinstance(response, ProblemDetail) + assert response.status_code == INVALID_CREDENTIALS.status_code + assert response.uri == INVALID_CREDENTIALS.uri + assert ( + response.detail + == "Error contacting authentication server (server.local). Please try again later." + ) def test_parse_date(self): parse = SIP2AuthenticationProvider.parse_date @@ -492,6 +505,20 @@ def patron_information(self, identifier, password): assert client.patron_information == "1234" assert client.password is None + def test_info_to_patrondata_problemdetail( + self, + create_provider: Callable[..., SIP2AuthenticationProvider], + create_settings: Callable[..., SIP2Settings], + ): + # If we get a ProblemDetail we just return it. + settings = create_settings( + url="server.local", + ) + provider = create_provider(settings=settings) + problem_detail = ProblemDetail("foo") + patron = provider.info_to_patrondata(problem_detail) + assert patron is problem_detail + def test_info_to_patrondata_validate_password( self, create_provider: Callable[..., SIP2AuthenticationProvider], @@ -508,8 +535,7 @@ def test_info_to_patrondata_validate_password( TestSIP2AuthenticationProvider.sierra_valid_login ) patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "LE CARRÉ, JOHN" == patron.personal_name @@ -541,8 +567,7 @@ def test_info_to_patrondata_no_validate_password( TestSIP2AuthenticationProvider.sierra_valid_login ) patron = provider.info_to_patrondata(info, validate_password=False) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "LE CARRÉ, JOHN" == patron.personal_name @@ -556,8 +581,7 @@ def test_info_to_patrondata_no_validate_password( TestSIP2AuthenticationProvider.sierra_invalid_login ) patron = provider.info_to_patrondata(info, validate_password=False) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "foo@example.com" == patron.email_address assert "SHELDON, ALICE" == patron.personal_name @@ -590,8 +614,7 @@ def test_patron_block_setting( TestSIP2AuthenticationProvider.evergreen_expired_card ) patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "863716" == patron.permanent_id assert "Booth Expired Test" == patron.personal_name @@ -625,8 +648,7 @@ def test_patron_block_setting_with_fines( ) info["fee_limit"] = "10.0" patron = provider.info_to_patrondata(info) - assert patron is not None - assert patron.__class__ == PatronData + assert isinstance(patron, PatronData) assert "12345" == patron.authorization_identifier assert "863718" == patron.permanent_id assert "Booth Excessive Fines Test" == patron.personal_name From 8cbe9398de4fddf4625db238da3bdfc6cb87407f Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 17 Nov 2023 09:27:02 -0400 Subject: [PATCH 171/262] Fix parameters for enki call (PP-620) (#1509) * Fix parameters for enki call. * Fix test fixture. --- api/enki.py | 2 +- tests/api/mockapi/enki.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/enki.py b/api/enki.py index 9f663230f6..a3b192828f 100644 --- a/api/enki.py +++ b/api/enki.py @@ -217,7 +217,7 @@ def request( headers = dict(extra_headers) if extra_headers else {} try: response = self._request( - method, url, headers=headers, data=data, params=params, **kwargs + url, method, headers=headers, data=data, params=params, **kwargs ) except RequestTimedOut as e: if not retry_on_timeout: diff --git a/tests/api/mockapi/enki.py b/tests/api/mockapi/enki.py index e73477677c..14c7caf8b9 100644 --- a/tests/api/mockapi/enki.py +++ b/tests/api/mockapi/enki.py @@ -40,7 +40,7 @@ def __init__( def queue_response(self, status_code, headers={}, content=None): self.responses.insert(0, MockRequestsResponse(status_code, headers, content)) - def _request(self, method, url, headers, data, params, **kwargs): + def _request(self, url, method, headers, data, params, **kwargs): """Override EnkiAPI._request to pull responses from a queue instead of making real HTTP requests """ From f3b9c9b1b4ae2a2e28ed4f074710e839b1bd88cf Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 17 Nov 2023 09:58:54 -0400 Subject: [PATCH 172/262] Remove external integrations from collection integrations (PP-503) (#1494) This PR changes the data model for our collections in a number of ways with the goal of removing all reliance on external integrations from our collections. All the database columns and tables "removed" by this PR have just been renamed on the Python side, but left in place for now. Once this goes into a release #1498 can be merged to remove the now unused tables and columns. The major changes: The external_integration_id column has been removed. Collections no longer have a name column. Previously both integrations and collections had names, this meant that they could get out of sync. Now collection names are driven by the name of their integration. A migration handles making sure that the existing integrations that have gotten out of sync are updated to have their collections name. The collections_libraries table has been removed. Collections used to be able to be related to libraries in two ways: through the collections_libraries table, and via their integration_library_configuration. This situation was confusing, and meant that these could get out of sync. The collection table no longer uses its external_account_id column. This was duplicated between the table and the settings before, it now lives only in the settings. As with the others the column is still present, but is renamed and unused and will be dropped once this is rolled out everywhere. The settings column get a gin index, to make it easier to query the jsonb column. Some of the properties on the collection class have been moved out to the specific license provider API that used them. These were on the collection model because previously some providers didn't have their own API classes. There are still some of these that could be moved into a more suitable location, but that can happen in a follow up PR. The collection controller and collection self test controller have been updated to use the integration settings controller base classes. This lets us keep all the logic for updating these settings isolated there. The biggest consequence of this change is that we now pass around the integration ID rather then the collection ID when referencing collections and collection self tests. This is transparent to the admin UI, since it just sends back the IDs given to it when it queries for the collection settings. This makes the collection controllers act like the other controllers that modify integrations, rather then using different IDs in the collection controller. The collection self test controller removes the special case that used to exist for OPDS importer collections, since they now have their own API class. This PR removes the self test functionality for the OPDS 1.x importer class, since we don't have any OPDS 1.x collections in our production CMs. Here is a ticket to add self tests for the collection classes missing them (OPDS 2, OPDS 2 + ODL), but this is the situation we've been in for awhile, so no impact from this PR. This PR will make it easier to add these self tests by just implementing the self test interface in the api classes. There is also a DB migration to apply these database changes and an associated migration test. All the major files touched by this PR are now fully type hinted, and added to the strict mypy group in pyproject.toml. --- ..._remove_collection_external_integration.py | 271 ++++++++ api/admin/controller/__init__.py | 2 +- api/admin/controller/collection_self_tests.py | 100 ++- api/admin/controller/collection_settings.py | 443 ++++--------- api/admin/controller/integration_settings.py | 46 +- .../patron_auth_service_self_tests.py | 102 +-- api/admin/controller/self_tests.py | 127 +++- api/admin/controller/settings.py | 48 +- api/admin/problem_details.py | 9 - api/axis.py | 4 +- api/bibliotheca.py | 7 +- api/circulation.py | 16 +- api/controller.py | 32 +- api/integration/registry/license_providers.py | 7 +- api/local_analytics_exporter.py | 8 +- api/monitor.py | 7 +- api/odl.py | 4 +- api/odl2.py | 2 +- api/opds_for_distributors.py | 7 +- api/overdrive.py | 187 +++--- api/selftest.py | 13 +- core/integration/base.py | 77 ++- core/lane.py | 34 +- core/migration/util.py | 13 + core/model/__init__.py | 9 +- core/model/collection.py | 345 +++------- core/model/configuration.py | 47 +- core/model/integration.py | 88 ++- core/model/library.py | 24 +- core/model/listeners.py | 2 - core/opds2_import.py | 41 +- core/opds_import.py | 152 +---- core/scripts.py | 31 +- core/selftest.py | 3 - pyproject.toml | 3 + .../controller/test_collection_self_tests.py | 279 ++++---- .../controller/test_collection_settings.py | 58 -- .../api/admin/controller/test_collections.py | 624 ++++++++---------- .../api/admin/controller/test_custom_lists.py | 12 +- tests/api/admin/controller/test_lanes.py | 2 +- .../api/admin/controller/test_patron_auth.py | 1 - .../admin/controller/test_search_services.py | 2 +- tests/api/admin/controller/test_settings.py | 6 +- tests/api/mockapi/axis.py | 24 +- tests/api/mockapi/bibliotheca.py | 24 +- tests/api/mockapi/enki.py | 8 +- tests/api/mockapi/opds_for_distributors.py | 28 +- tests/api/mockapi/overdrive.py | 39 +- tests/api/test_bibliotheca.py | 6 - tests/api/test_circulationapi.py | 4 +- tests/api/test_controller_base.py | 2 +- tests/api/test_controller_loan.py | 13 +- tests/api/test_controller_multilib.py | 16 +- tests/api/test_controller_odl_notify.py | 22 +- tests/api/test_controller_playtime_entries.py | 2 +- tests/api/test_controller_scopedsession.py | 12 +- tests/api/test_enki.py | 6 +- tests/api/test_lanes.py | 15 +- tests/api/test_opds_for_distributors.py | 10 - tests/api/test_overdrive.py | 91 +-- tests/api/test_selftest.py | 4 + tests/core/models/test_collection.py | 233 ++----- tests/core/models/test_configuration.py | 13 - .../models/test_integration_configuration.py | 24 +- tests/core/models/test_library.py | 2 +- tests/core/models/test_listeners.py | 29 - tests/core/models/test_work.py | 6 +- tests/core/test_external_search.py | 7 +- tests/core/test_lane.py | 35 +- tests/core/test_opds2_import.py | 31 +- tests/core/test_opds_import.py | 468 ++++--------- tests/core/test_opds_validate.py | 28 +- tests/core/test_scripts.py | 5 +- tests/core/test_selftest.py | 2 +- tests/fixtures/database.py | 35 +- tests/fixtures/odl.py | 23 +- tests/migration/test_20231101_2d72d6876c52.py | 237 +++++++ 77 files changed, 2188 insertions(+), 2611 deletions(-) create mode 100644 alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py delete mode 100644 tests/api/admin/controller/test_collection_settings.py create mode 100644 tests/migration/test_20231101_2d72d6876c52.py diff --git a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py new file mode 100644 index 0000000000..2401ebb2f0 --- /dev/null +++ b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py @@ -0,0 +1,271 @@ +"""Remove collection external integration. + +Revision ID: 2d72d6876c52 +Revises: cc084e35e037 +Create Date: 2023-11-01 22:42:06.754873+00:00 + +""" + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op +from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.migration.util import migration_logger +from core.model import json_serializer + +# revision identifiers, used by Alembic. +revision = "2d72d6876c52" +down_revision = "cc084e35e037" +branch_labels = None +depends_on = None + + +log = migration_logger(revision) + + +def upgrade() -> None: + conn = op.get_bind() + + # Our collection names have gotten out of sync with the integration names. The collection names + # are what are being displayed to users, so before we stop using the collection name, we need + # to update the integration name to match the collection name. + # For now, we leave the collection name column in place, but we make it nullable and remove the + # unique constraint. + rows = conn.execute( + "SELECT c.id as collection_id, ic.id as integration_id, ic.name as integration_name, " + "c.name as collection_name from collections c JOIN integration_configurations ic " + "ON c.integration_configuration_id = ic.id WHERE c.name != ic.name" + ).all() + + for row in rows: + log.info( + f"Updating name for collection {row.collection_id} from {row.integration_name} to {row.collection_name}." + ) + conn.execute( + "UPDATE integration_configurations SET name = (%s) WHERE id = (%s)", + (row.collection_name, row.integration_id), + ) + + op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=True) + op.drop_index("ix_collections_name", table_name="collections") + + # We have moved the setting for the TOKEN_AUTH integration from an external integration + # to a new JSONB column on the integration_configurations table (context). We need to move + # the data into the new column as part of this migration. + # The context column is not nullable, so we need to set a default value for the existing + # rows. We will use an empty JSON object. We create the column as nullable, set the default + # value, then make it non-nullable. + op.add_column( + "integration_configurations", + sa.Column("context", postgresql.JSONB(astext_type=sa.Text()), nullable=True), + ) + + conn.execute("UPDATE integration_configurations SET context = '{}'") + + rows = conn.execute( + "SELECT c.id, cs.value FROM collections c " + "JOIN externalintegrations ei ON c.external_integration_id = ei.id " + "JOIN configurationsettings cs ON ei.id = cs.external_integration_id " + "WHERE key='token_auth_endpoint' and value <> ''" + ).all() + + for row in rows: + context = json_serializer({"token_auth_endpoint": row.value}) + log.info(f"Updating context for collection {row.id} to {context}.") + conn.execute( + "UPDATE integration_configurations SET context = (%s) " + "FROM collections " + "WHERE integration_configurations.id = collections.integration_configuration_id " + "and collections.id = (%s)", + (context, row.id), + ) + + op.alter_column("integration_configurations", "context", nullable=False) + + # We have moved the data that was in external_account_id into the settings column of the + # integration, so we need to make sure that it gets moved as part of this migration. We + # also make sure that the new settings are valid for the integration before saving them + # to the database. + rows = conn.execute( + "SELECT ic.id as integration_id, ic.settings, ic.protocol, ic.goal, c.external_account_id FROM collections c " + "JOIN integration_configurations ic ON c.integration_configuration_id = ic.id" + ).all() + + registry = LicenseProvidersRegistry() + for row in rows: + if row.external_account_id is None: + continue + settings_dict = row.settings.copy() + settings_dict["external_account_id"] = row.external_account_id + impl_class = registry.get(row.protocol) + if impl_class is None: + raise RuntimeError( + f"Could not find implementation for protocol {row.protocol}" + ) + settings_obj = impl_class.settings_class()(**settings_dict) + new_settings_dict = settings_obj.dict() + if row.settings != new_settings_dict: + new_settings = json_serializer(new_settings_dict) + log.info( + f"Updating settings for integration {row.integration_id} from {row.settings} to {new_settings}." + ) + conn.execute( + "UPDATE integration_configurations SET settings = (%s) WHERE id = (%s)", + (new_settings, row.integration_id), + ) + + # Because collections now rely on integration_configurations, they can no longer + # have a null value for integration_configuration_id. This should already be true + # of our existing collections. We also drop our foreign key constraint, and recreate + # it with the correct ondelete behavior. + op.alter_column( + "collections", + "integration_configuration_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + op.drop_constraint( + "collections_integration_configuration_id_fkey", + "collections", + type_="foreignkey", + ) + op.create_foreign_key( + "collections_integration_configuration_id_fkey", + "collections", + "integration_configurations", + ["integration_configuration_id"], + ["id"], + ) + + # The data that was in the collections_libraries table is now tracked by + # integration_library_configurations, we keep the data in the collections_libraries + # table for now, but we remove the foreign key constraints and indexes. + op.alter_column( + "collections_libraries", + "collection_id", + existing_type=sa.INTEGER(), + nullable=True, + ) + op.alter_column( + "collections_libraries", "library_id", existing_type=sa.INTEGER(), nullable=True + ) + op.drop_index( + "ix_collections_libraries_collection_id", table_name="collections_libraries" + ) + op.drop_index( + "ix_collections_libraries_library_id", table_name="collections_libraries" + ) + op.drop_constraint( + "collections_libraries_collection_id_fkey", + "collections_libraries", + type_="foreignkey", + ) + op.drop_constraint( + "collections_libraries_library_id_fkey", + "collections_libraries", + type_="foreignkey", + ) + + # Collections have now been migrated entirely to use integration_configurations. We keep this column + # for now, but we remove the foreign key constraint and index. + op.drop_index("ix_collections_external_integration_id", table_name="collections") + op.drop_constraint( + "collections_external_integration_id_fkey", "collections", type_="foreignkey" + ) + + # We create a new index on the settings column of integration_configurations. This + # will allow us to quickly find integrations that have a specific setting. + op.create_index( + "ix_integration_configurations_settings_dict", + "integration_configurations", + ["settings"], + unique=False, + postgresql_using="gin", + ) + + +def downgrade() -> None: + op.drop_index( + "ix_integration_configurations_settings_dict", + table_name="integration_configurations", + postgresql_using="gin", + ) + + op.create_foreign_key( + "collections_external_integration_id_fkey", + "collections", + "externalintegrations", + ["external_integration_id"], + ["id"], + ) + op.create_index( + "ix_collections_external_integration_id", + "collections", + ["external_integration_id"], + unique=False, + ) + + op.create_foreign_key( + "collections_libraries_collection_id_fkey", + "collections_libraries", + "collections", + ["collection_id"], + ["id"], + ) + op.create_foreign_key( + "collections_libraries_library_id_fkey", + "collections_libraries", + "libraries", + ["library_id"], + ["id"], + ) + op.create_index( + "ix_collections_libraries_library_id", + "collections_libraries", + ["library_id"], + unique=False, + ) + op.create_index( + "ix_collections_libraries_collection_id", + "collections_libraries", + ["collection_id"], + unique=False, + ) + op.alter_column( + "collections_libraries", + "library_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + op.alter_column( + "collections_libraries", + "collection_id", + existing_type=sa.INTEGER(), + nullable=False, + ) + + op.drop_constraint( + "collections_integration_configuration_id_fkey", + "collections", + type_="foreignkey", + ) + op.create_foreign_key( + "collections_integration_configuration_id_fkey", + "collections", + "integration_configurations", + ["integration_configuration_id"], + ["id"], + ondelete="SET NULL", + ) + op.alter_column( + "collections", + "integration_configuration_id", + existing_type=sa.INTEGER(), + nullable=True, + ) + + op.drop_column("integration_configurations", "context") + + op.create_index("ix_collections_name", "collections", ["name"], unique=False) + op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=False) diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 0579f5f545..5aaf9ee57f 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -83,7 +83,7 @@ def setup_admin_controllers(manager: CirculationManager): ) manager.admin_collection_settings_controller = CollectionSettingsController(manager) manager.admin_collection_self_tests_controller = CollectionSelfTestsController( - manager + manager._db ) manager.admin_sitewide_configuration_settings_controller = ( SitewideConfigurationSettingsController(manager) diff --git a/api/admin/controller/collection_self_tests.py b/api/admin/controller/collection_self_tests.py index 38ceb7c404..6b308421f2 100644 --- a/api/admin/controller/collection_self_tests.py +++ b/api/admin/controller/collection_self_tests.py @@ -1,63 +1,41 @@ -from flask_babel import lazy_gettext as _ +from __future__ import annotations -from api.admin.controller.self_tests import SelfTestsController -from api.admin.problem_details import * +from typing import Any, Dict, Optional + +from flask import Response +from sqlalchemy.orm import Session + +from api.admin.controller.self_tests import IntegrationSelfTestsController +from api.circulation import CirculationApiType from api.integration.registry.license_providers import LicenseProvidersRegistry -from api.selftest import HasCollectionSelfTests -from core.model import Collection -from core.opds_import import OPDSImporter, OPDSImportMonitor - - -class CollectionSelfTestsController(SelfTestsController): - def __init__(self, manager): - super().__init__(manager) - self.type = _("collection") - self.registry = LicenseProvidersRegistry() - self.protocols = self._get_collection_protocols(self.registry.integrations) - - def process_collection_self_tests(self, identifier): - return self._manage_self_tests(identifier) - - def look_up_by_id(self, identifier): - """Find the collection to display self test results or run self tests for; - display an error message if a collection with this ID turns out not to exist""" - - collection = Collection.by_id(self._db, identifier) - if not collection: - return NO_SUCH_COLLECTION - - self.protocol_class = self._find_protocol_class(collection) - return collection - - def get_info(self, collection): - """Compile information about this collection, including the results from the last time, if ever, - that the self tests were run.""" - - return dict( - id=collection.id, - name=collection.name, - protocol=collection.protocol, - parent_id=collection.parent_id, - settings=dict(external_account_id=collection.external_account_id), - ) - - def _find_protocol_class(self, collection): - """Figure out which protocol is providing books to this collection""" - return self.registry.get(collection.protocol) - - def run_tests(self, collection): - collection_protocol = collection.protocol or None - - if self.protocol_class: - value = None - if collection_protocol == OPDSImportMonitor.PROTOCOL: - self.protocol_class = OPDSImportMonitor - value, results = self.protocol_class.run_self_tests( - self._db, self.protocol_class, self._db, collection, OPDSImporter - ) - elif issubclass(self.protocol_class, HasCollectionSelfTests): - value, results = self.protocol_class.run_self_tests( - self._db, self.protocol_class, self._db, collection - ) - - return value +from core.integration.registry import IntegrationRegistry +from core.model import IntegrationConfiguration +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail + + +class CollectionSelfTestsController(IntegrationSelfTestsController[CirculationApiType]): + def __init__( + self, + db: Session, + registry: Optional[IntegrationRegistry[CirculationApiType]] = None, + ): + registry = registry or LicenseProvidersRegistry() + super().__init__(db, registry) + + def process_collection_self_tests( + self, identifier: Optional[int] + ) -> Response | ProblemDetail: + return self.process_self_tests(identifier) + + def run_self_tests( + self, integration: IntegrationConfiguration + ) -> Optional[Dict[str, Any]]: + protocol_class = self.get_protocol_class(integration) + if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): + test_result, _ = protocol_class.run_self_tests( + self.db, protocol_class, self.db, integration.collection + ) + return test_result + + return None diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 6474d2ef4d..d1402f08fb 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -1,367 +1,174 @@ -import json -from typing import Any, Dict, List, Optional +from typing import Any, Dict, Optional, Union import flask from flask import Response -from flask_babel import lazy_gettext as _ -from api.admin.controller.settings import SettingsController +from api.admin.controller.base import AdminPermissionsControllerMixin +from api.admin.controller.integration_settings import IntegrationSettingsController +from api.admin.form_data import ProcessFormData from api.admin.problem_details import ( - CANNOT_CHANGE_PROTOCOL, CANNOT_DELETE_COLLECTION_WITH_CHILDREN, - COLLECTION_NAME_ALREADY_IN_USE, - INCOMPLETE_CONFIGURATION, MISSING_COLLECTION, MISSING_COLLECTION_NAME, MISSING_PARENT, + MISSING_SERVICE, NO_PROTOCOL_FOR_NEW_SERVICE, - NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, - PROTOCOL_DOES_NOT_SUPPORT_SETTINGS, UNKNOWN_PROTOCOL, ) +from api.circulation import CirculationApiType from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.integration.base import HasChildIntegrationConfiguration +from core.integration.registry import IntegrationRegistry from core.model import ( Collection, - ConfigurationSetting, - Library, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + create, get_one, - get_one_or_create, + json_serializer, + site_configuration_has_changed, ) -from core.model.admin import Admin -from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail, ProblemError -class CollectionSettingsController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - self.type = _("collection") - self.registry = LicenseProvidersRegistry() - - def _get_collection_protocols(self): - protocols = super()._get_collection_protocols(self.registry.integrations) - - # dedupe and only keep the latest SETTINGS - # this will allow child objects to overwrite - # parent settings with the same key - # This relies on the fact that child settings - # are added after parent settings as such - # `SETTINGS + .to_settings()` - for protocol in protocols: - if "settings" not in protocol: - continue - _found_settings = dict() - for ix, setting in enumerate(protocol["settings"]): - _key = setting["key"] - _found_settings[_key] = ix - _settings = [] - # Go through the dict items and only use the latest found settings - # for any given key - for _, v in _found_settings.items(): - _settings.append(protocol["settings"][v]) - protocol["settings"] = _settings +class CollectionSettingsController( + IntegrationSettingsController[CirculationApiType], AdminPermissionsControllerMixin +): + def default_registry(self) -> IntegrationRegistry[CirculationApiType]: + return LicenseProvidersRegistry() + + def configured_service_info( + self, service: IntegrationConfiguration + ) -> Optional[Dict[str, Any]]: + service_info = super().configured_service_info(service) + user = getattr(flask.request, "admin", None) + if service_info: + # Add 'marked_for_deletion' to the service info + service_info["marked_for_deletion"] = service.collection.marked_for_deletion + service_info["parent_id"] = service.collection.parent_id + if user and user.can_see_collection(service.collection): + return service_info + return None - return protocols + def configured_service_library_info( + self, library_configuration: IntegrationLibraryConfiguration + ) -> Optional[Dict[str, Any]]: + library_info = super().configured_service_library_info(library_configuration) + user = getattr(flask.request, "admin", None) + if library_info: + if user and user.is_librarian(library_configuration.library): + return library_info + return None - def process_collections(self): + def process_collections(self) -> Union[Response, ProblemDetail]: if flask.request.method == "GET": return self.process_get() else: return self.process_post() - # GET - def process_get(self): - collections_db = self._db.query(Collection).order_by(Collection.name).all() - ConfigurationSetting.cache_warm(self._db) - Collection.cache_warm(self._db, lambda: collections_db) - protocols = self._get_collection_protocols() - user = flask.request.admin - collections = [] - collection_object: Collection - for collection_object in collections_db: - if not user or not user.can_see_collection(collection_object): - continue - - collection_dict = self.collection_to_dict(collection_object) - if collection_object.integration_configuration: - libraries = self.load_libraries(collection_object, user) - collection_dict["libraries"] = libraries - collection_dict[ - "settings" - ] = collection_object.integration_configuration.settings_dict - self.load_settings(collection_object, collection_dict["settings"]) - collection_dict["self_test_results"] = self._get_prior_test_results( - collection_object - ) - collection_dict[ - "marked_for_deletion" - ] = collection_object.marked_for_deletion - - collections.append(collection_dict) - - return dict( - collections=collections, - protocols=protocols, + def process_get(self) -> Response: + return Response( + json_serializer( + { + "collections": self.configured_services, + "protocols": list(self.protocols.values()), + } + ), + status=200, + mimetype="application/json", ) - def collection_to_dict(self, collection_object): - return dict( - id=collection_object.id, - name=collection_object.name, - protocol=collection_object.protocol, - parent_id=collection_object.parent_id, - ) - - def load_libraries(self, collection_object: Collection, user: Admin) -> List[Dict]: - """Get a list of the libraries that 1) are associated with this collection - and 2) the user is affiliated with""" - - libraries = [] - integration: IntegrationConfiguration = ( - collection_object.integration_configuration - ) - if not integration: - return [] - for library in collection_object.libraries: - if not user or not user.is_librarian(library): - continue - library_info = dict(short_name=library.short_name) - # Find and update the library settings if they exist - for config in integration.library_configurations: - if library.id == config.library_id: - library_info.update(config.settings_dict) - break - libraries.append(library_info) - - return libraries - - def load_settings(self, collection_object, collection_settings): - """Compile the information about the collection that corresponds to the settings - externally imposed by the collection's protocol.""" - - settings = collection_settings - settings["external_account_id"] = collection_object.external_account_id - - def find_protocol_class(self, collection_object): - """Figure out which class this collection's protocol belongs to, from the list - of possible protocols defined in the registry""" - - return self.registry.get(collection_object.protocol) - - # POST - def process_post(self): + def process_post(self) -> Union[Response, ProblemDetail]: self.require_system_admin() - protocols = self._get_collection_protocols() - is_new = False - collection = None - - name = flask.request.form.get("name") - protocol_name = flask.request.form.get("protocol") - parent_id = flask.request.form.get("parent_id") - fields = {"name": name, "protocol": protocol_name} - id = flask.request.form.get("id") - if id: - collection = get_one(self._db, Collection, id=id) - fields["collection"] = collection - - error = self.validate_form_fields(is_new, protocols, **fields) - if error: - return error - - settings_class = self._get_settings_class( - self.registry, protocol_name, is_child=(parent_id is not None) - ) - if not settings_class: - return UNKNOWN_PROTOCOL - - if protocol_name and not collection: - collection, is_new = get_one_or_create(self._db, Collection, name=name) - if not is_new: - self._db.rollback() - return COLLECTION_NAME_ALREADY_IN_USE - collection.create_integration_configuration(protocol_name) - # Mirrors still use the external integration - # TODO: Remove the use of external integrations when Mirrors are migrated - # to use the integration configurations - collection.create_external_integration(protocol_name) - - collection.name = name - [protocol_dict] = [p for p in protocols if p.get("name") == protocol_name] - - valid = self.validate_parent(protocol_dict, collection) - if isinstance(valid, ProblemDetail): - self._db.rollback() - return valid - - settings = protocol_dict["settings"] - settings_error = self.process_settings(settings, collection) - if settings_error: - self._db.rollback() - return settings_error - - libraries_error = self.process_libraries(protocol_dict, collection) - if libraries_error: - return libraries_error - - if is_new: - return Response(str(collection.id), 201) - else: - return Response(str(collection.id), 200) + try: + form_data = flask.request.form + protocol = form_data.get("protocol", None, str) + id = form_data.get("id", None, int) + name = form_data.get("name", None, str) + parent_id = form_data.get("parent_id", None, int) + libraries_data = form_data.get("libraries", None, str) + + if protocol is None and id is None: + raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) + + if protocol is None or protocol not in self.registry: + self.log.warning( + f"Unknown patron authentication service protocol: {protocol}" + ) + raise ProblemError(UNKNOWN_PROTOCOL) - def validate_form_fields(self, is_new, protocols, **fields): - """Check that 1) the required fields aren't blank, 2) the protocol is on the - list of recognized protocols, 3) the collection (if there is one) is valid, and - 4) the URL is valid""" - if not fields.get("name"): - return MISSING_COLLECTION_NAME - if "collection" in fields: - if fields.get("collection"): - invalid_collection = self.validate_collection(**fields) - if invalid_collection: - return invalid_collection + if id is not None: + # Find an existing service to edit + integration = self.get_existing_service(id, name, protocol) + response_code = 200 else: - return MISSING_COLLECTION - if fields.get("protocol"): - if fields.get("protocol") not in [p.get("name") for p in protocols]: - return UNKNOWN_PROTOCOL - else: - return NO_PROTOCOL_FOR_NEW_SERVICE - - def validate_collection(self, **fields): - """The protocol of an existing collection cannot be changed, and - collections must have unique names.""" - if fields.get("protocol") != fields.get("collection").protocol: - return CANNOT_CHANGE_PROTOCOL - if fields.get("name") != fields.get("collection").name: - collection_with_name = get_one( - self._db, Collection, name=fields.get("name") - ) - if collection_with_name: - return COLLECTION_NAME_ALREADY_IN_USE - - def validate_parent(self, protocol, collection): - """Verify that the parent collection is set properly, then determine - the type of the settings that need to be validated: are they 1) settings for a - regular collection (e.g. client key and client secret for an Overdrive collection), - or 2) settings for a child collection (e.g. library ID for an Overdrive Advantage collection)? - """ - - parent_id = flask.request.form.get("parent_id") - if parent_id and not protocol.get("child_settings"): - return PROTOCOL_DOES_NOT_SUPPORT_PARENTS - if parent_id: - parent = get_one(self._db, Collection, id=parent_id) - if not parent: - return MISSING_PARENT - collection.parent = parent - else: - collection.parent = None + # Create a new service + if not name: + raise ProblemError(MISSING_COLLECTION_NAME) + integration = self.create_new_service(name, protocol) + # Make sure the service is associated with a collection + create(self._db, Collection, integration_configuration=integration) + response_code = 201 + + impl_cls = self.registry[protocol] + + # Validate and set parent collection + if parent_id is not None: + if issubclass(impl_cls, HasChildIntegrationConfiguration): + settings_class = impl_cls.child_settings_class() + parent_integration = get_one( + self._db, IntegrationConfiguration, id=parent_id + ) + if ( + parent_integration is None + or parent_integration.collection is None + ): + raise ProblemError(MISSING_PARENT) + integration.collection.parent = parent_integration.collection + else: + raise ProblemError(PROTOCOL_DOES_NOT_SUPPORT_PARENTS) + else: + settings_class = impl_cls.settings_class() - return True + # Update settings + validated_settings = ProcessFormData.get_settings(settings_class, form_data) + integration.settings_dict = validated_settings.dict() - def validate_external_account_id_setting(self, value, setting): - """Check that the user has submitted any required values for associating - this collection with an external account.""" - if not value and not setting.get("optional"): - # Roll back any changes to the collection that have already been made. - return INCOMPLETE_CONFIGURATION.detailed( - _( - "The collection configuration is missing a required setting: %(setting)s", - setting=setting.get("label"), + # Update library settings + if libraries_data: + self.process_libraries( + integration, libraries_data, impl_cls.library_settings_class() ) - ) - def process_settings( - self, settings: List[Dict[str, Any]], collection: Collection - ) -> Optional[ProblemDetail]: - """Process the settings for the given collection. + # Trigger a site configuration change + site_configuration_has_changed(self._db) - Go through the settings that the user has just submitted for this collection, - and check that each setting is valid and that no required settings are missing. If - the setting passes all of the validations, go ahead and set it for this collection. - """ - settings_class = self._get_settings_class( - self.registry, - collection.protocol, - is_child=(flask.request.form.get("parent_id") is not None), - ) - if isinstance(settings_class, ProblemDetail): - return settings_class - if settings_class is None: - return PROTOCOL_DOES_NOT_SUPPORT_SETTINGS - collection_settings = {} - for setting in settings: - key = setting["key"] - value = self._extract_form_setting_value(setting, flask.request.form) - if key == "external_account_id": - error = self.validate_external_account_id_setting(value, setting) - if error: - return error - collection.external_account_id = value - elif value is not None: - # Only if the key was present in the request should we add it - collection_settings[key] = value - else: - # Keep existing setting value, when present, if a value is not specified. - # This can help prevent accidental loss of settings due to some programming errors. - if key in collection.integration_configuration.settings_dict: - collection_settings[ - key - ] = collection.integration_configuration.settings_dict[key] - - # validate then apply - try: - validated_settings = settings_class(**collection_settings) - except ProblemError as ex: - return ex.problem_detail - collection.integration_configuration.settings_dict = validated_settings.dict() - return None - - def process_libraries(self, protocol, collection): - """Go through the libraries that the user is trying to associate with this collection; - check that each library actually exists, and that the library-related configuration settings - that the user has submitted are complete and valid. If the library passes all of the validations, - go ahead and associate it with this collection.""" + except ProblemError as e: + self._db.rollback() + return e.problem_detail - libraries = [] - protocol_class = self.registry.get(protocol["name"]) - if flask.request.form.get("libraries"): - libraries = json.loads(flask.request.form.get("libraries")) + return Response(str(integration.id), response_code) - for library_info in libraries: - library = get_one( - self._db, Library, short_name=library_info.get("short_name") - ) - if not library: - return NO_SUCH_LIBRARY.detailed( - _( - "You attempted to add the collection to %(library_short_name)s, but the library does not exist.", - library_short_name=library_info.get("short_name"), - ) - ) - if collection not in library.collections: - library.collections.append(collection) - result = self._set_configuration_library( - collection.integration_configuration, library_info, protocol_class - ) - if isinstance(result, ProblemDetail): - return result + def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + self.require_system_admin() - short_names = [l.get("short_name") for l in libraries] - for library in collection.libraries: - if library.short_name not in short_names: - collection.disassociate_library(library) + integration = get_one( + self._db, + IntegrationConfiguration, + id=service_id, + goal=self.registry.goal, + ) + if not integration: + return MISSING_SERVICE - # DELETE - def process_delete(self, collection_id): - self.require_system_admin() - collection = get_one(self._db, Collection, id=collection_id) + collection = integration.collection if not collection: return MISSING_COLLECTION + if len(collection.children) > 0: return CANNOT_DELETE_COLLECTION_WITH_CHILDREN # Flag the collection to be deleted by script in the background. collection.marked_for_deletion = True - return Response(str(_("Deleted")), 200) + return Response("Deleted", 200) diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index b491a508fe..b84fd1ac93 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -13,6 +13,7 @@ ) from api.controller import CirculationManager from core.integration.base import ( + HasChildIntegrationConfiguration, HasIntegrationConfiguration, HasLibraryIntegrationConfiguration, ) @@ -63,7 +64,7 @@ def default_registry(self) -> IntegrationRegistry[T]: @memoize(ttls=1800) def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: """Cached result for integration implementations""" - protocols = {} + protocols = [] for name, api in self.registry: protocol = { "name": name, @@ -75,15 +76,37 @@ def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: protocol[ "library_settings" ] = api.library_settings_class().configuration_form(self._db) + if issubclass(api, HasChildIntegrationConfiguration): + protocol[ + "child_settings" + ] = api.child_settings_class().configuration_form(self._db) protocol.update(api.protocol_details(self._db)) - protocols[name] = protocol - return protocols + protocols.append((name, protocol)) + protocols.sort(key=lambda x: x[0]) + return dict(protocols) @property def protocols(self) -> Dict[str, Dict[str, Any]]: """Use a property for implementations to allow expiring cached results""" return self._cached_protocols() + def configured_service_info( + self, service: IntegrationConfiguration + ) -> Optional[Dict[str, Any]]: + return { + "id": service.id, + "name": service.name, + "protocol": service.protocol, + "settings": service.settings_dict, + } + + def configured_service_library_info( + self, library_configuration: IntegrationLibraryConfiguration + ) -> Optional[Dict[str, Any]]: + library_info = {"short_name": library_configuration.library.short_name} + library_info.update(library_configuration.settings_dict) + return library_info + @property def configured_services(self) -> List[Dict[str, Any]]: """Return a list of all currently configured services for the controller's goal.""" @@ -99,20 +122,19 @@ def configured_services(self) -> List[Dict[str, Any]]: ) continue - service_info = { - "id": service.id, - "name": service.name, - "protocol": service.protocol, - "settings": service.settings_dict, - } + service_info = self.configured_service_info(service) + if service_info is None: + continue api = self.registry[service.protocol] if issubclass(api, HasLibraryIntegrationConfiguration): libraries = [] for library_settings in service.library_configurations: - library_info = {"short_name": library_settings.library.short_name} - library_info.update(library_settings.settings_dict) - libraries.append(library_info) + library_info = self.configured_service_library_info( + library_settings + ) + if library_info is not None: + libraries.append(library_info) service_info["libraries"] = libraries configured_services.append(service_info) diff --git a/api/admin/controller/patron_auth_service_self_tests.py b/api/admin/controller/patron_auth_service_self_tests.py index 6fae2a3d62..23d3b7cd49 100644 --- a/api/admin/controller/patron_auth_service_self_tests.py +++ b/api/admin/controller/patron_auth_service_self_tests.py @@ -2,57 +2,44 @@ from typing import Any, Dict, Optional, Type -import flask from flask import Response from sqlalchemy.orm import Session -from api.admin.problem_details import * -from api.authentication.base import AuthenticationProvider +from api.admin.controller.self_tests import IntegrationSelfTestsController +from api.admin.problem_details import FAILED_TO_RUN_SELF_TESTS +from api.authentication.base import AuthenticationProviderType from api.integration.registry.patron_auth import PatronAuthRegistry -from core.integration.goals import Goals from core.integration.registry import IntegrationRegistry -from core.model import get_one, json_serializer -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationConfiguration from core.util.problem_detail import ProblemDetail, ProblemError -class PatronAuthServiceSelfTestsController: +class PatronAuthServiceSelfTestsController( + IntegrationSelfTestsController[AuthenticationProviderType] +): def __init__( self, db: Session, - registry: Optional[IntegrationRegistry[AuthenticationProvider]] = None, + registry: Optional[IntegrationRegistry[AuthenticationProviderType]] = None, ): - self.db = db - self.registry = registry if registry else PatronAuthRegistry() + registry = registry or PatronAuthRegistry() + super().__init__(db, registry) def process_patron_auth_service_self_tests( self, identifier: Optional[int] ) -> Response | ProblemDetail: - if not identifier: - return MISSING_IDENTIFIER - try: - if flask.request.method == "GET": - return self.self_tests_process_get(identifier) - else: - return self.self_tests_process_post(identifier) - except ProblemError as e: - return e.problem_detail - - def self_tests_process_get(self, identifier: int) -> Response: - integration = self.look_up_by_id(identifier) - info = self.get_info(integration) - protocol_class = self.get_protocol_class(integration) + return self.process_self_tests(identifier) + def get_prior_test_results( + self, + protocol_class: Type[AuthenticationProviderType], + integration: IntegrationConfiguration, + ) -> Dict[str, Any]: # Find the first library associated with this service. library_configuration = self.get_library_configuration(integration) - if library_configuration is not None: - self_test_results = protocol_class.load_self_test_results(integration) - else: - self_test_results = dict( + if library_configuration is None: + return dict( exception=( "You must associate this service with at least one library " "before you can run self tests for it." @@ -60,58 +47,9 @@ def self_tests_process_get(self, identifier: int) -> Response: disabled=True, ) - info["self_test_results"] = ( - self_test_results if self_test_results else "No results yet" - ) - return Response( - json_serializer({"self_test_results": info}), - status=200, - mimetype="application/json", - ) - - def self_tests_process_post(self, identifier: int) -> Response: - integration = self.look_up_by_id(identifier) - self.run_tests(integration) - return Response("Successfully ran new self tests", 200) - - @staticmethod - def get_library_configuration( - integration: IntegrationConfiguration, - ) -> Optional[IntegrationLibraryConfiguration]: - if not integration.library_configurations: - return None - return integration.library_configurations[0] - - def get_protocol_class( - self, integration: IntegrationConfiguration - ) -> Type[AuthenticationProvider]: - if not integration.protocol or integration.protocol not in self.registry: - raise ProblemError(problem_detail=UNKNOWN_PROTOCOL) - return self.registry[integration.protocol] - - def look_up_by_id(self, identifier: int) -> IntegrationConfiguration: - service = get_one( - self.db, - IntegrationConfiguration, - id=identifier, - goal=Goals.PATRON_AUTH_GOAL, - ) - if not service: - raise (ProblemError(problem_detail=MISSING_SERVICE)) - return service - - @staticmethod - def get_info(patron_auth_service: IntegrationConfiguration): - info = dict( - id=patron_auth_service.id, - name=patron_auth_service.name, - protocol=patron_auth_service.protocol, - goal=patron_auth_service.goal, - settings=patron_auth_service.settings_dict, - ) - return info + return super().get_prior_test_results(protocol_class, integration) - def run_tests(self, integration: IntegrationConfiguration) -> Dict[str, Any]: + def run_self_tests(self, integration: IntegrationConfiguration) -> Dict[str, Any]: # If the auth service doesn't have at least one library associated with it, # we can't run self tests. library_configuration = self.get_library_configuration(integration) diff --git a/api/admin/controller/self_tests.py b/api/admin/controller/self_tests.py index 71e6f5836a..82705b9dcc 100644 --- a/api/admin/controller/self_tests.py +++ b/api/admin/controller/self_tests.py @@ -1,10 +1,31 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import Any, Dict, Generic, Optional, Type, TypeVar + import flask from flask import Response from flask_babel import lazy_gettext as _ +from sqlalchemy.orm import Session from api.admin.controller.settings import SettingsController -from api.admin.problem_details import FAILED_TO_RUN_SELF_TESTS, MISSING_IDENTIFIER -from core.util.problem_detail import ProblemDetail +from api.admin.problem_details import ( + FAILED_TO_RUN_SELF_TESTS, + MISSING_IDENTIFIER, + MISSING_SERVICE, + UNKNOWN_PROTOCOL, +) +from core.integration.base import HasIntegrationConfiguration +from core.integration.registry import IntegrationRegistry +from core.integration.settings import BaseSettings +from core.model import ( + IntegrationConfiguration, + IntegrationLibraryConfiguration, + get_one, + json_serializer, +) +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail, ProblemError class SelfTestsController(SettingsController): @@ -71,3 +92,105 @@ def self_tests_process_post(self, identifier): return FAILED_TO_RUN_SELF_TESTS.detailed( _("Failed to run self tests for this %(type)s.", type=self.type) ) + + +T = TypeVar("T", bound=HasIntegrationConfiguration[BaseSettings]) + + +class IntegrationSelfTestsController(Generic[T], ABC): + def __init__( + self, + db: Session, + registry: IntegrationRegistry[T], + ): + self.db = db + self.registry = registry + + @abstractmethod + def run_self_tests( + self, integration: IntegrationConfiguration + ) -> Optional[Dict[str, Any]]: + ... + + def get_protocol_class(self, integration: IntegrationConfiguration) -> Type[T]: + if not integration.protocol or integration.protocol not in self.registry: + raise ProblemError(problem_detail=UNKNOWN_PROTOCOL) + return self.registry[integration.protocol] + + def look_up_by_id(self, identifier: int) -> IntegrationConfiguration: + service = get_one( + self.db, + IntegrationConfiguration, + id=identifier, + goal=self.registry.goal, + ) + if not service: + raise (ProblemError(problem_detail=MISSING_SERVICE)) + return service + + @staticmethod + def get_info(integration: IntegrationConfiguration) -> Dict[str, Any]: + info = dict( + id=integration.id, + name=integration.name, + protocol=integration.protocol, + goal=integration.goal, + settings=integration.settings_dict, + ) + return info + + @staticmethod + def get_library_configuration( + integration: IntegrationConfiguration, + ) -> Optional[IntegrationLibraryConfiguration]: + if not integration.library_configurations: + return None + return integration.library_configurations[0] + + def get_prior_test_results( + self, protocol_class: Type[T], integration: IntegrationConfiguration + ) -> Dict[str, Any]: + if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): + self_test_results = protocol_class.load_self_test_results(integration) # type: ignore[unreachable] + else: + self_test_results = dict( + exception=("Self tests are not supported for this integration."), + disabled=True, + ) + + return self_test_results + + def process_self_tests(self, identifier: Optional[int]) -> Response | ProblemDetail: + if not identifier: + return MISSING_IDENTIFIER + try: + if flask.request.method == "GET": + return self.self_tests_process_get(identifier) + else: + return self.self_tests_process_post(identifier) + except ProblemError as e: + return e.problem_detail + + def self_tests_process_get(self, identifier: int) -> Response: + integration = self.look_up_by_id(identifier) + info = self.get_info(integration) + protocol_class = self.get_protocol_class(integration) + + self_test_results = self.get_prior_test_results(protocol_class, integration) + + info["self_test_results"] = ( + self_test_results if self_test_results else "No results yet" + ) + return Response( + json_serializer({"self_test_results": info}), + status=200, + mimetype="application/json", + ) + + def self_tests_process_post(self, identifier: int) -> Response: + integration = self.look_up_by_id(identifier) + results = self.run_self_tests(integration) + if results is not None: + return Response("Successfully ran new self tests", 200) + else: + raise ProblemError(problem_detail=FAILED_TO_RUN_SELF_TESTS) diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index 277605c802..d4314b2d85 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -2,7 +2,7 @@ import json import logging -from typing import TYPE_CHECKING, Any, Dict, Optional, Type, cast +from typing import TYPE_CHECKING, Any, Dict, Optional, Type import flask from flask import Response @@ -24,7 +24,6 @@ ) from api.admin.validator import Validator from api.controller import CirculationManagerController -from api.integration.registry.license_providers import LicenseProvidersRegistry from core.external_search import ExternalSearchIndex from core.integration.base import ( HasChildIntegrationConfiguration, @@ -43,8 +42,6 @@ get_one, get_one_or_create, ) -from core.opds_import import OPDSImporter, OPDSImportMonitor -from core.selftest import BaseHasSelfTests from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: @@ -307,14 +304,16 @@ def _set_configuration_library( library = get_one(self._db, Library, short_name=info_copy.pop("short_name")) if not library: raise RuntimeError("Could not find the configuration library") - config = None # Validate first validated_data = protocol_class.library_settings_class()(**info_copy) + # Attach the configuration - config = configuration.for_library(cast(int, library.id), create=True) - config.settings_dict = validated_data.dict() - return config + library_configuration = IntegrationLibraryConfiguration( + library=library, settings_dict=validated_data.dict() + ) + configuration.library_configurations.append(library_configuration) + return library_configuration def _set_integration_library(self, integration, library_info, protocol): library = get_one(self._db, Library, short_name=library_info.get("short_name")) @@ -407,24 +406,7 @@ def _get_prior_test_results(self, item, protocol_class=None, *extra_args): self_test_results = None try: - if self.type == "collection": - if not item.protocol or not len(item.protocol): - return None - - if not protocol_class: - registry = LicenseProvidersRegistry() - protocol_class = registry.get(item.protocol) - - if item.protocol == OPDSImportMonitor.PROTOCOL: - protocol_class = OPDSImportMonitor - extra_args = (OPDSImporter,) - - if issubclass(protocol_class, BaseHasSelfTests): - self_test_results = protocol_class.prior_test_results( - self._db, protocol_class, self._db, item, *extra_args - ) - - elif self.type == "search service": + if self.type == "search service": self_test_results = ExternalSearchIndex.prior_test_results( self._db, None, self._db, item ) @@ -432,20 +414,6 @@ def _get_prior_test_results(self, item, protocol_class=None, *extra_args): self_test_results = protocol_class.prior_test_results( self._db, *extra_args ) - elif self.type == "patron authentication service": - library = None - if len(item.libraries): - library = item.libraries[0] - self_test_results = protocol_class.prior_test_results( - self._db, None, library, item - ) - else: - self_test_results = dict( - exception=_( - "You must associate this service with at least one library before you can run self tests for it." - ), - disabled=True, - ) except Exception as e: # This is bad, but not so bad that we should short-circuit diff --git a/api/admin/problem_details.py b/api/admin/problem_details.py index 5d4c881a4b..081e2586ac 100644 --- a/api/admin/problem_details.py +++ b/api/admin/problem_details.py @@ -168,15 +168,6 @@ detail=_("You must identify the analytics service by its name."), ) -COLLECTION_NAME_ALREADY_IN_USE = pd( - "http://librarysimplified.org/terms/problem/collection-name-already-in-use", - status_code=400, - title=_("Collection name already in use"), - detail=_( - "The collection name must be unique, and there's already a collection with the specified name." - ), -) - CANNOT_DELETE_COLLECTION_WITH_CHILDREN = pd( "http://librarysimplified.org/terms/problem/cannot-delete-collection-with-children", status_code=400, diff --git a/api/axis.py b/api/axis.py index 04bd50f3e1..567be25189 100644 --- a/api/axis.py +++ b/api/axis.py @@ -116,7 +116,7 @@ class Axis360Settings(BaseSettings): password: str = FormField( form=ConfigurationFormItem(label=_("Password"), required=True) ) - external_account_id: Optional[str] = FormField( + external_account_id: str = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -221,8 +221,8 @@ def __init__(self, _db: Session, collection: Collection) -> None: ) super().__init__(_db, collection) - self.library_id = collection.external_account_id or "" settings = self.settings + self.library_id = settings.external_account_id self.username = settings.username self.password = settings.password diff --git a/api/bibliotheca.py b/api/bibliotheca.py index fc11c706b9..a66df81e45 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -95,7 +95,7 @@ class BibliothecaSettings(BaseSettings): required=True, ) ) - external_account_id: Optional[str] = FormField( + external_account_id: str = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -171,7 +171,7 @@ def __init__(self, _db, collection): self.version = self.DEFAULT_VERSION self.account_id = settings.username self.account_key = settings.password - self.library_id = collection.external_account_id + self.library_id = settings.external_account_id self.base_url = self.DEFAULT_BASE_URL if not self.account_id or not self.account_key or not self.library_id: @@ -339,9 +339,6 @@ def _simple_http_get(self, url, headers, *args, **kwargs): """This will be overridden in MockBibliothecaAPI.""" return Representation.simple_http_get(url, headers, *args, **kwargs) - def external_integration(self, _db): - return self.collection.external_integration - def _run_self_tests(self, _db): def _count_events(): now = utc_now() diff --git a/api/circulation.py b/api/circulation.py index 0ea18556f2..097f4f2ed4 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -325,7 +325,7 @@ class APIAwareFulfillmentInfo(FulfillmentInfo, ABC): def __init__( self, - api: BaseCirculationAPI[BaseSettings, BaseSettings], + api: CirculationApiType, data_source_name: Optional[str], identifier_type: Optional[str], identifier: Optional[str], @@ -625,10 +625,7 @@ def settings(self) -> SettingsType: return self.settings_load(self.integration_configuration()) def library_settings(self, library: Library | int) -> LibrarySettingsType | None: - library_id = library.id if isinstance(library, Library) else library - if library_id is None: - return None - libconfig = self.integration_configuration().for_library(library_id=library_id) + libconfig = self.integration_configuration().for_library(library) if libconfig is None: return None config = self.library_settings_load(libconfig) @@ -713,6 +710,9 @@ def update_availability(self, licensepool: LicensePool) -> None: ... +CirculationApiType = BaseCirculationAPI[BaseSettings, BaseSettings] + + class PatronActivityCirculationAPI( BaseCirculationAPI[SettingsType, LibrarySettingsType], ABC ): @@ -740,9 +740,7 @@ def __init__( db: Session, library: Library, analytics: Optional[Analytics] = None, - registry: Optional[ - IntegrationRegistry[BaseCirculationAPI[BaseSettings, BaseSettings]] - ] = None, + registry: Optional[IntegrationRegistry[CirculationApiType]] = None, ): """Constructor. @@ -806,7 +804,7 @@ def library(self) -> Optional[Library]: def api_for_license_pool( self, licensepool: LicensePool - ) -> Optional[BaseCirculationAPI[BaseSettings, BaseSettings]]: + ) -> Optional[CirculationApiType]: """Find the API to use for the given license pool.""" return self.api_for_collection.get(licensepool.collection.id) diff --git a/api/controller.py b/api/controller.py index 4c854fa913..d7a296faa3 100644 --- a/api/controller.py +++ b/api/controller.py @@ -82,6 +82,8 @@ DeliveryMechanism, Hold, Identifier, + IntegrationConfiguration, + IntegrationLibraryConfiguration, Library, LicensePool, LicensePoolDeliveryMechanism, @@ -652,13 +654,27 @@ def load_licensepools(self, library, identifier_type, identifier): """ _db = Session.object_session(library) pools = ( - _db.query(LicensePool) - .join(LicensePool.collection) - .join(LicensePool.identifier) - .join(Collection.libraries) - .filter(Identifier.type == identifier_type) - .filter(Identifier.identifier == identifier) - .filter(Library.id == library.id) + _db.scalars( + select(LicensePool) + .join(Collection, LicensePool.collection_id == Collection.id) + .join(Identifier, LicensePool.identifier_id == Identifier.id) + .join( + IntegrationConfiguration, + Collection.integration_configuration_id + == IntegrationConfiguration.id, + ) + .join( + IntegrationLibraryConfiguration, + IntegrationConfiguration.id + == IntegrationLibraryConfiguration.parent_id, + ) + .where( + Identifier.type == identifier_type, + Identifier.identifier == identifier, + IntegrationLibraryConfiguration.library_id == library.id, + ) + ) + .unique() .all() ) if not pools: @@ -973,7 +989,7 @@ def crawlable_collection_feed(self, collection_name): """Build or retrieve a crawlable acquisition feed for the requested collection. """ - collection = get_one(self._db, Collection, name=collection_name) + collection = Collection.by_name(self._db, collection_name) if not collection: return NO_SUCH_COLLECTION title = collection.name diff --git a/api/integration/registry/license_providers.py b/api/integration/registry/license_providers.py index 134ec494e1..0f47eb7033 100644 --- a/api/integration/registry/license_providers.py +++ b/api/integration/registry/license_providers.py @@ -6,13 +6,10 @@ from core.integration.registry import IntegrationRegistry if TYPE_CHECKING: - from api.circulation import BaseCirculationAPI # noqa: autoflake - from core.integration.settings import BaseSettings # noqa: autoflake + from api.circulation import CirculationApiType # noqa: autoflake -class LicenseProvidersRegistry( - IntegrationRegistry["BaseCirculationAPI[BaseSettings, BaseSettings]"] -): +class LicenseProvidersRegistry(IntegrationRegistry["CirculationApiType"]): def __init__(self) -> None: super().__init__(Goals.LICENSE_GOAL) diff --git a/api/local_analytics_exporter.py b/api/local_analytics_exporter.py index d5a608d038..e8ec83c61a 100644 --- a/api/local_analytics_exporter.py +++ b/api/local_analytics_exporter.py @@ -11,6 +11,7 @@ Edition, Genre, Identifier, + IntegrationConfiguration, Library, LicensePool, Work, @@ -112,7 +113,7 @@ def analytics_query(self, start, end, locations=None, library=None): Edition.imprint, Edition.language, CirculationEvent.location, - Collection.name.label("collection_name"), + IntegrationConfiguration.name.label("collection_name"), Library.short_name.label("library_short_name"), Library.name.label("library_name"), Edition.medium, @@ -130,6 +131,11 @@ def analytics_query(self, start, end, locations=None, library=None): .join(Work, Work.id == LicensePool.work_id) .join(Edition, Work.presentation_edition_id == Edition.id) .join(Collection, LicensePool.collection_id == Collection.id) + .join( + IntegrationConfiguration, + Collection.integration_configuration_id + == IntegrationConfiguration.id, + ) .join(DataSource, LicensePool.data_source_id == DataSource.id) .outerjoin(Library, CirculationEvent.library_id == Library.id) ) diff --git a/api/monitor.py b/api/monitor.py index cfe3619253..6e9df5075a 100644 --- a/api/monitor.py +++ b/api/monitor.py @@ -8,6 +8,7 @@ Collection, ExternalIntegration, Hold, + IntegrationConfiguration, LicensePool, Loan, ) @@ -36,7 +37,7 @@ def where_clause(self): """ source_of_truth = or_( LicensePool.open_access == True, - ExternalIntegration.protocol.in_(self.SOURCE_OF_TRUTH_PROTOCOLS), + IntegrationConfiguration.protocol.in_(self.SOURCE_OF_TRUTH_PROTOCOLS), ) source_of_truth_subquery = ( @@ -44,8 +45,8 @@ def where_clause(self): .join(self.MODEL_CLASS.license_pool) .join(LicensePool.collection) .join( - ExternalIntegration, - Collection.external_integration_id == ExternalIntegration.id, + IntegrationConfiguration, + Collection.integration_configuration_id == IntegrationConfiguration.id, ) .filter(source_of_truth) ) diff --git a/api/odl.py b/api/odl.py index 22107dca86..a7596f26df 100644 --- a/api/odl.py +++ b/api/odl.py @@ -12,7 +12,7 @@ from flask import url_for from flask_babel import lazy_gettext as _ from lxml.etree import Element -from pydantic import HttpUrl, PositiveInt +from pydantic import AnyHttpUrl, HttpUrl, PositiveInt from requests import Response from sqlalchemy.sql.expression import or_ from uritemplate import URITemplate @@ -79,7 +79,7 @@ class ODLAPIConstants: class ODLSettings(OPDSImporterSettings): - external_account_id: Optional[HttpUrl] = FormField( + external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( label=_("ODL feed URL"), required=True, diff --git a/api/odl2.py b/api/odl2.py index d5654bab64..60a18c03b3 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -37,7 +37,7 @@ from core.model.patron import Hold, Loan, Patron -class ODL2Settings(OPDS2ImporterSettings, ODLSettings): +class ODL2Settings(ODLSettings, OPDS2ImporterSettings): skipped_license_formats: List[str] = FormField( default=["text/html"], alias="odl2_skipped_license_formats", diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 04fcaea61a..7542b04abc 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -20,7 +20,6 @@ Collection, Credential, DeliveryMechanism, - ExternalIntegration, Hyperlink, Identifier, LicensePool, @@ -102,18 +101,14 @@ def label(cls) -> str: def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) - self.external_integration_id = collection.external_integration.id settings = self.settings self.data_source_name = settings.data_source self.username = settings.username self.password = settings.password - self.feed_url = collection.external_account_id + self.feed_url = settings.external_account_id self.auth_url: Optional[str] = None - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: - return get_one(_db, ExternalIntegration, id=self.external_integration_id) - def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: """Try to get a token.""" yield self.run_test("Negotiate a fulfillment token", self._get_token, _db) diff --git a/api/overdrive.py b/api/overdrive.py index 36959e960d..092c4dad85 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -9,7 +9,7 @@ import time import urllib.parse from threading import RLock -from typing import Any, Dict, List, Set, Tuple, Union, cast +from typing import Any, Dict, List, Set, Tuple, Union from urllib.parse import quote, urlsplit, urlunsplit import dateutil @@ -19,7 +19,7 @@ from flask_babel import lazy_gettext as _ from requests import Response from requests.structures import CaseInsensitiveDict -from sqlalchemy.exc import NoResultFound +from sqlalchemy import select from sqlalchemy.orm import Query, Session from sqlalchemy.orm.exc import StaleDataError @@ -40,7 +40,11 @@ from core.config import CannotLoadConfiguration, Configuration from core.connection_config import ConnectionSetting from core.coverage import BibliographicCoverageProvider -from core.integration.base import HasChildIntegrationConfiguration +from core.integration.base import ( + HasChildIntegrationConfiguration, + integration_settings_update, +) +from core.integration.goals import Goals from core.integration.settings import ( BaseSettings, ConfigurationFormItem, @@ -70,6 +74,7 @@ ExternalIntegration, Hyperlink, Identifier, + IntegrationConfiguration, LicensePool, LicensePoolDeliveryMechanism, Measurement, @@ -77,7 +82,6 @@ Patron, Representation, Subject, - get_one_or_create, ) from core.monitor import CollectionMonitor, IdentifierSweepMonitor, TimelineMonitor from core.scripts import InputScript, Script @@ -89,19 +93,6 @@ class OverdriveConstants: - OVERDRIVE_CLIENT_KEY = "overdrive_client_key" - OVERDRIVE_CLIENT_SECRET = "overdrive_client_secret" - OVERDRIVE_SERVER_NICKNAME = "overdrive_server_nickname" - OVERDRIVE_WEBSITE_ID = "overdrive_website_id" - - # Note that the library ID is not included here because it is not Overdrive-specific - OVERDRIVE_CONFIGURATION_KEYS = { - OVERDRIVE_CLIENT_KEY, - OVERDRIVE_CLIENT_SECRET, - OVERDRIVE_SERVER_NICKNAME, - OVERDRIVE_WEBSITE_ID, - } - PRODUCTION_SERVERS = "production" TESTING_SERVERS = "testing" @@ -207,10 +198,10 @@ class OverdriveChildSettings(BaseSettings): class OverdriveAPI( - PatronActivityCirculationAPI, + PatronActivityCirculationAPI[OverdriveSettings, OverdriveLibrarySettings], CirculationInternalFormatsMixin, HasCollectionSelfTests, - HasChildIntegrationConfiguration, + HasChildIntegrationConfiguration[OverdriveSettings, OverdriveChildSettings], OverdriveConstants, ): SET_DELIVERY_MECHANISM_AT = BaseCirculationAPI.FULFILL_STEP @@ -370,62 +361,40 @@ def __init__(self, _db, collection): % collection.protocol ) - _library_id = collection.external_account_id - if not _library_id: - raise ValueError( - "Collection %s must have an external account ID" % collection.id - ) - else: - self._library_id = _library_id - - self._db = _db - self._external_integration = collection.external_integration - if collection.id is None: - raise ValueError( - "Collection passed into OverdriveAPI must have an ID, but %s does not" - % collection.name - ) - self._collection_id = collection.id - - # Initialize configuration information. - self._integration_configuration_id = cast( - int, collection.integration_configuration.id - ) - self._configuration = OverdriveData() - if collection.parent: # This is an Overdrive Advantage account. - self.parent_library_id = collection.parent.external_account_id + parent_settings = self.settings_load( + collection.parent.integration_configuration + ) + self.parent_library_id = parent_settings.external_account_id # We're going to inherit all of the Overdrive credentials # from the parent (the main Overdrive account), except for the # library ID, which we already set. - parent_integration = collection.parent.integration_configuration - parent_config = self.settings_load(parent_integration) - for key in OverdriveConstants.OVERDRIVE_CONFIGURATION_KEYS: - parent_value = getattr(parent_config, key, None) - setattr(self._configuration, key, parent_value) + self._settings = self.settings_load( + collection.integration_configuration, + collection.parent.integration_configuration, + ) else: self.parent_library_id = None + self._settings = self.settings_load(collection.integration_configuration) - # Self settings should override parent settings where available - settings = collection.integration_configuration.settings_dict - for name, schema in self.settings_class().schema()["properties"].items(): - if name in settings or not hasattr(self._configuration, name): - setattr( - self._configuration, name, settings.get(name, schema.get("default")) - ) + self._library_id = self._settings.external_account_id + if not self._library_id: + raise ValueError( + "Collection %s must have an external account ID" % collection.id + ) - if not self._configuration.overdrive_client_key: + if not self._settings.overdrive_client_key: raise CannotLoadConfiguration("Overdrive client key is not configured") - if not self._configuration.overdrive_client_secret: + if not self._settings.overdrive_client_secret: raise CannotLoadConfiguration( "Overdrive client password/secret is not configured" ) - if not self._configuration.overdrive_website_id: + if not self._settings.overdrive_website_id: raise CannotLoadConfiguration("Overdrive website ID is not configured") - self._server_nickname = self._configuration.overdrive_server_nickname + self._server_nickname = self._settings.overdrive_server_nickname self._hosts = self._determine_hosts(server_nickname=self._server_nickname) @@ -439,9 +408,9 @@ def __init__(self, _db, collection): OverdriveBibliographicCoverageProvider(collection, api_class=self) ) - def configuration(self): - """Overdrive has a different implementation for configuration""" - return self._configuration + @property + def settings(self) -> OverdriveSettings: + return self._settings def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: # Figure out which hostnames we'll be using when constructing @@ -451,9 +420,6 @@ def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: return dict(self.HOSTS[server_nickname]) - def external_integration(self, db: Session) -> ExternalIntegration: - return self._external_integration - def endpoint(self, url: str, **kwargs) -> str: """Create the URL to an Overdrive API endpoint. @@ -494,10 +460,6 @@ def collection_token(self): self._collection_token = library["collectionToken"] return self._collection_token - @property - def collection(self) -> Optional[Collection]: - return Collection.by_id(self._db, id=self._collection_id) - @property def source(self): return DataSource.lookup(self._db, DataSource.OVERDRIVE) @@ -507,7 +469,7 @@ def ils_name(self, library): config = self.integration_configuration().for_library(library.id) if not config: return self.ILS_NAME_DEFAULT - return config.settings_dict.get(self.ILS_NAME_KEY, self.ILS_NAME_DEFAULT) + return self.library_settings_load(config).ils_name @property def advantage_library_id(self): @@ -820,25 +782,25 @@ def make_link_safe(cls, url: str) -> str: def _do_get(self, url: str, headers, **kwargs) -> Response: """This method is overridden in MockOverdriveAPI.""" url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["max_retry_count"] = self.settings.max_retry_count kwargs["timeout"] = 120 return HTTP.get_with_timeout(url, headers=headers, **kwargs) def _do_post(self, url: str, payload, headers, **kwargs) -> Response: """This method is overridden in MockOverdriveAPI.""" url = self.endpoint(url) - kwargs["max_retry_count"] = int(self._configuration.max_retry_count) + kwargs["max_retry_count"] = self.settings.max_retry_count kwargs["timeout"] = 120 return HTTP.post_with_timeout(url, payload, headers=headers, **kwargs) def website_id(self) -> bytes: - return self._configuration.overdrive_website_id.encode("utf-8") + return self.settings.overdrive_website_id.encode("utf-8") def client_key(self) -> bytes: - return self._configuration.overdrive_client_key.encode("utf-8") + return self.settings.overdrive_client_key.encode("utf-8") def client_secret(self) -> bytes: - return self._configuration.overdrive_client_secret.encode("utf-8") + return self.settings.overdrive_client_secret.encode("utf-8") def library_id(self) -> str: return self._library_id @@ -968,7 +930,7 @@ def scope_string(self, library): its own Patron Authentication. """ return "websiteid:{} authorizationname:{}".format( - self._configuration.overdrive_website_id, + self.settings.overdrive_website_id, self.ils_name(library), ) @@ -2851,37 +2813,53 @@ def to_collection(self, _db): collection, Overdrive Advantage collection) """ # First find the parent Collection. - try: - parent = ( - Collection.by_protocol(_db, ExternalIntegration.OVERDRIVE) - .filter(Collection.external_account_id == self.parent_library_id) - .one() + parent = _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.protocol == ExternalIntegration.OVERDRIVE, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationConfiguration.settings_dict.contains( + {"external_account_id": self.parent_library_id} + ), ) - except NoResultFound as e: + ).scalar_one_or_none() + if parent is None: # Without the parent's credentials we can't access the child. raise ValueError( "Cannot create a Collection whose parent does not already exist." ) name = parent.name + " / " + self.name - child, is_new = get_one_or_create( - _db, - Collection, - parent_id=parent.id, - external_account_id=self.library_id, - create_method_kwargs=dict(name=name), - ) - if is_new: - # Make sure the child has its protocol set appropriately. - integration = child.create_external_integration( - ExternalIntegration.OVERDRIVE + child = _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + Collection.parent_id == parent.id, + IntegrationConfiguration.protocol == ExternalIntegration.OVERDRIVE, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationConfiguration.settings_dict.contains( + {"external_account_id" == self.library_id} + ), ) - configuration = child.create_integration_configuration( - ExternalIntegration.OVERDRIVE + ).scalar_one_or_none() + + if child is None: + # The child doesn't exist yet. Create it. + child, _ = Collection.by_name_and_protocol( + _db, name, ExternalIntegration.OVERDRIVE + ) + child.parent = parent + child_settings = OverdriveChildSettings.construct( + external_account_id=self.library_id ) + integration_settings_update( + OverdriveChildSettings, child.integration_configuration, child_settings + ) + else: + # Set or update the name of the collection to reflect the name of + # the library, just in case that name has changed. + child.integration_configuration.name = name - # Set or update the name of the collection to reflect the name of - # the library, just in case that name has changed. - child.name = name return parent, child @@ -2971,11 +2949,11 @@ def do_run(self, *args, **kwargs): query: Query = Collection.by_protocol( self._db, protocol=ExternalIntegration.OVERDRIVE ) - for c in query.filter(Collection.parent_id == None): - collection: Collection = c + for collection in query.filter(Collection.parent_id == None): api = self._create_overdrive_api(collection=collection) client_key = api.client_key().decode() client_secret = api.client_secret().decode() + library_id = api.library_id() try: library_token = api.collection_token @@ -2986,12 +2964,15 @@ def do_run(self, *args, **kwargs): Collection.parent_id == collection.id ) already_configured_aa_libraries = [ - e.external_account_id for e in existing_child_collections + OverdriveAPI.child_settings_load( + e.integration_configuration + ).external_account_id + for e in existing_child_collections ] self._data.append( [ collection.name, - collection.external_account_id, + library_id, client_key, client_secret, library_token, @@ -3003,7 +2984,7 @@ def do_run(self, *args, **kwargs): ) except Exception as e: logging.error( - f"Could not connect to collection {c.name}: reason: {str(e)}." + f"Could not connect to collection {collection.name}: reason: {str(e)}." ) file_path = parsed.output_file_path[0] diff --git a/api/selftest.py b/api/selftest.py index c3eb5e5bef..dfc3bbb2b6 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -1,6 +1,6 @@ from __future__ import annotations -from abc import ABC +from abc import ABC, abstractmethod from typing import Generator, Iterable, Optional, Tuple, Union from sqlalchemy.orm.session import Session @@ -120,7 +120,9 @@ class HasSelfTests(CoreHasSelfTests, HasPatronSelfTests): """Circulation specific self-tests, with the external integration paradigm""" -class HasCollectionSelfTests(HasSelfTestsIntegrationConfiguration, HasPatronSelfTests): +class HasCollectionSelfTests( + HasSelfTestsIntegrationConfiguration, HasPatronSelfTests, ABC +): """Extra tests to verify the integrity of imported collections of books. @@ -128,7 +130,14 @@ class HasCollectionSelfTests(HasSelfTestsIntegrationConfiguration, HasPatronSelf point to the Collection to be tested. """ + @property + @abstractmethod + def collection(self) -> Collection | None: + ... + def integration(self, _db: Session) -> IntegrationConfiguration | None: + if not self.collection: + return None return self.collection.integration_configuration def _no_delivery_mechanisms_test(self): diff --git a/core/integration/base.py b/core/integration/base.py index db17e20ae6..bd01635f2f 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -1,7 +1,17 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import TYPE_CHECKING, Any, Dict, Generic, Mapping, Protocol, Type, TypeVar +from typing import ( + TYPE_CHECKING, + Any, + Dict, + Generic, + Mapping, + Optional, + Protocol, + Type, + TypeVar, +) from sqlalchemy.orm import Session from sqlalchemy.orm.attributes import Mapped, flag_modified @@ -21,7 +31,7 @@ class IntegrationConfigurationProtocol(Protocol): def integration_settings_load( settings_cls: Type[T], - integration: IntegrationConfigurationProtocol, + integration: IntegrationConfigurationProtocol | Dict[str, Any], ) -> T: """ Load the settings object for an integration from the database. @@ -31,12 +41,15 @@ def integration_settings_load( when round tripping from the database (such as enum) and construct() doesn't do that. :param settings_cls: The settings class that the settings should be loaded into. - :param integration: The integration to load the settings from. This should be a - SQLAlchemy model with a settings_dict JSONB column. + :param integration: The integration to load the settings from or a dict that should + be loaded into the model. If it is an integration, it should be a SQLAlchemy model + with a settings_dict JSONB column. :return: An instance of the settings class loaded with the settings from the database. """ - settings_dict = integration.settings_dict + settings_dict = ( + integration if isinstance(integration, dict) else integration.settings_dict + ) return settings_cls(**settings_dict) @@ -72,6 +85,7 @@ def integration_settings_update( SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) +ChildSettingsType = TypeVar("ChildSettingsType", bound=BaseSettings, covariant=True) class HasIntegrationConfiguration(Generic[SettingsType], ABC): @@ -167,9 +181,58 @@ def library_settings_update( ) -class HasChildIntegrationConfiguration(HasIntegrationConfiguration[SettingsType], ABC): +class HasChildIntegrationConfiguration( + Generic[SettingsType, ChildSettingsType], + HasIntegrationConfiguration[SettingsType], + ABC, +): @classmethod @abstractmethod - def child_settings_class(cls) -> Type[BaseSettings]: + def child_settings_class(cls) -> Type[ChildSettingsType]: """Get the child settings class""" ... + + @classmethod + def child_settings_load(cls, child: IntegrationConfiguration) -> ChildSettingsType: + """ + Load the child settings object for this integration from the database. + """ + return integration_settings_load(cls.child_settings_class(), child) + + @classmethod + def settings_load( + cls, + integration: IntegrationConfiguration, + parent: Optional[IntegrationConfiguration] = None, + ) -> SettingsType: + """ + Load the full settings object for this integration from the database. + + If a parent is provided, the child settings will be merged with the parent settings, with the child + settings taking precedence. + """ + if parent is None: + return super().settings_load(integration) + else: + parent_settings = super().settings_load(parent) + child_settings = cls.child_settings_load(integration) + + merged_settings = parent_settings.dict() + merged_settings.update(child_settings.dict()) + return integration_settings_load(cls.settings_class(), merged_settings) + + @classmethod + def child_settings_update( + cls, + integration: IntegrationConfiguration, + new_settings: BaseSettings | Mapping[str, Any], + merge: bool = False, + ) -> None: + """ + Update the settings for this library integration in the database. + + See the documentation for `integration_settings_update` for more details. + """ + integration_settings_update( + cls.child_settings_class(), integration, new_settings, merge + ) diff --git a/core/lane.py b/core/lane.py index 59c0f9c651..7ddacb5af4 100644 --- a/core/lane.py +++ b/core/lane.py @@ -48,6 +48,7 @@ DataSource, Edition, Genre, + IntegrationConfiguration, Library, LicensePool, Work, @@ -57,11 +58,7 @@ tuple_to_numericrange, ) from core.model.before_flush_decorator import Listener -from core.model.configuration import ( - ConfigurationAttributeValue, - ConfigurationSetting, - ExternalIntegration, -) +from core.model.configuration import ConfigurationAttributeValue, ExternalIntegration from core.model.constants import EditionConstants from core.model.hybrid import hybrid_property from core.model.listeners import site_configuration_has_changed @@ -711,7 +708,7 @@ def modify_search_filter(self, filter): self.collection_name and self.collection_name != self.COLLECTION_NAME_ALL ): - collection = get_one(_db, Collection, name=self.collection_name) + collection = Collection.by_name(_db, self.collection_name) if collection: filter.collection_ids = [collection.id] @@ -2301,22 +2298,21 @@ def _restrict_query_for_no_hold_collections( # Modify the query to not show holds on collections # that don't allow it # This query looks like a prime candidate for some in-memory caching - restricted_collections = ( - _db.query(Collection.id) + restricted_collections = _db.execute( + select(Collection.id) .join( - ConfigurationSetting, - Collection.external_integration_id - == ConfigurationSetting.external_integration_id, + IntegrationConfiguration, + Collection.integration_configuration_id == IntegrationConfiguration.id, ) - .filter( - Collection.id.in_(self.collection_ids), - ConfigurationSetting.library_id == self.library_id, - ConfigurationSetting.key == ExternalIntegration.DISPLAY_RESERVES, - ConfigurationSetting.value == ConfigurationAttributeValue.NOVALUE.value, + .where( + IntegrationConfiguration.settings_dict.contains( + { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } + ) ) - .all() - ) - restricted_collection_ids = (r[0] for r in restricted_collections) + ).all() + restricted_collection_ids = (r.id for r in restricted_collections) # If a licensepool is from a collection that restricts holds # and has no available copies, then we don't want to see it diff --git a/core/migration/util.py b/core/migration/util.py index 0085ce6a93..1da519cd2b 100644 --- a/core/migration/util.py +++ b/core/migration/util.py @@ -1,5 +1,6 @@ from __future__ import annotations +import logging from typing import Any, List import sqlalchemy as sa @@ -64,3 +65,15 @@ def drop_enum(op: Any, enum_name: str, checkfirst: bool = True) -> None: Alembic migration helper function to drop an enum type. """ sa.Enum(name=enum_name).drop(op.get_bind(), checkfirst=checkfirst) + + +def migration_logger(revision: str) -> logging.Logger: + """ + Create a logger for a migration revision. + + This logger will be used to log messages during the migration. + """ + log = logging.getLogger(f"palace.migration.{revision}") + log.setLevel(logging.INFO) + log.disabled = False + return log diff --git a/core/model/__init__.py b/core/model/__init__.py index c05aaf8db7..67a4ab9966 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -11,7 +11,7 @@ from pydantic.json import pydantic_encoder from sqlalchemy import create_engine from sqlalchemy.engine import Connection -from sqlalchemy.exc import IntegrityError +from sqlalchemy.exc import DatabaseError, IntegrityError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, sessionmaker from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound @@ -54,6 +54,13 @@ def pg_advisory_lock( connection.execute(text(f"SELECT pg_advisory_lock({lock_id});")) try: yield + except IntegrityError: + # If there was an IntegrityError, and we are in a transaction, + # we need to roll it back before we are able to release the lock. + transaction = connection.get_transaction() + if transaction is not None: + transaction.rollback() + raise finally: # Close the lock connection.execute(text(f"SELECT pg_advisory_unlock({lock_id});")) diff --git a/core/model/collection.py b/core/model/collection.py index 1e32de6263..5c6c79feed 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,6 +1,5 @@ from __future__ import annotations -from abc import ABCMeta, abstractmethod from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, TypeVar from sqlalchemy import ( @@ -12,14 +11,15 @@ Unicode, UniqueConstraint, exists, + select, ) -from sqlalchemy.orm import Mapped, Query, backref, mapper, relationship -from sqlalchemy.orm.exc import NoResultFound +from sqlalchemy.ext.associationproxy import association_proxy +from sqlalchemy.orm import Mapped, Query, mapper, relationship from sqlalchemy.orm.session import Session from sqlalchemy.sql.expression import and_, or_ from core.integration.goals import Goals -from core.model import Base, create, get_one_or_create +from core.model import Base, create from core.model.configuration import ConfigurationSetting, ExternalIntegration from core.model.constants import EditionConstants from core.model.coverage import CoverageRecord @@ -28,10 +28,7 @@ from core.model.hassessioncache import HasSessionCache from core.model.hybrid import hybrid_property from core.model.identifier import Identifier -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationConfiguration from core.model.library import Library from core.model.licensing import LicensePool, LicensePoolDeliveryMechanism from core.model.work import Work @@ -51,34 +48,38 @@ class Collection(Base, HasSessionCache): __tablename__ = "collections" id = Column(Integer, primary_key=True, nullable=False) - name = Column(Unicode, unique=True, nullable=False, index=True) + # TODO: This should no longer be used. And will be removed in the next release. + # Collections store their configurations in integration configurations now. + # This is only left here in case there needs to be a rollback to the current + # release. + _name_deprecated = Column("name", Unicode) DATA_SOURCE_NAME_SETTING = "data_source" - # For use in forms that edit Collections. - EXTERNAL_ACCOUNT_ID_KEY = "external_account_id" + # TODO: This should no longer be used. And will be removed in the next release. + # Collections store their configurations in integration configurations now. + # This is only left here in case there needs to be a rollback to the current + # release. + _external_account_id_deprecated = Column("external_account_id", Unicode) - # How does the provider of this collection distinguish it from - # other collections it provides? On the other side this is usually - # called a "library ID". - external_account_id = Column(Unicode, nullable=True) + # TODO: This should no longer be used. And will be removed in the next release. + # Collections store their configurations in integration configurations now. + # This is only left here in case there needs to be a rollback to the current + # release. + _external_integration_id_deprecated = Column("external_integration_id", Integer) # How do we connect to the provider of this collection? Any url, # authentication information, or additional configuration goes # into the external integration, as does the 'protocol', which # designates the integration technique we will use to actually get # the metadata and licenses. Each Collection has a distinct - # ExternalIntegration. - external_integration_id = Column( - Integer, ForeignKey("externalintegrations.id"), unique=True, index=True - ) - _external_integration: ExternalIntegration - + # integration configuration. integration_configuration_id = Column( Integer, - ForeignKey("integration_configurations.id", ondelete="SET NULL"), + ForeignKey("integration_configurations.id"), unique=True, index=True, + nullable=False, ) integration_configuration: Mapped[IntegrationConfiguration] = relationship( "IntegrationConfiguration", @@ -94,27 +95,26 @@ class Collection(Base, HasSessionCache): # secret as the Overdrive collection, but it has a distinct # external_account_id. parent_id = Column(Integer, ForeignKey("collections.id"), index=True) - # SQLAlchemy will create a Collection-typed field called "parent". - parent: Collection - - # When deleting a collection, this flag is set to True so that the deletion - # script can take care of deleting it in the background. This is - # useful for deleting large collections which can timeout when deleting. - marked_for_deletion = Column(Boolean, default=False) + parent: Collection = relationship( + "Collection", remote_side=[id], back_populates="children" + ) # A collection may have many child collections. For example, # An Overdrive collection may have many children corresponding # to Overdrive Advantage collections. children: Mapped[List[Collection]] = relationship( - "Collection", backref=backref("parent", remote_side=[id]), uselist=True + "Collection", back_populates="parent", uselist=True ) + # When deleting a collection, this flag is set to True so that the deletion + # script can take care of deleting it in the background. This is + # useful for deleting large collections which can timeout when deleting. + marked_for_deletion = Column(Boolean, default=False) + # A Collection can provide books to many Libraries. - libraries: Mapped[List[Library]] = relationship( - "Library", - secondary=lambda: collections_libraries, - backref="collections", - uselist=True, + # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#composite-association-proxies + libraries: Mapped[List[Library]] = association_proxy( + "integration_configuration", "libraries" ) # A Collection can include many LicensePools. @@ -164,7 +164,7 @@ def __repr__(self) -> str: return f'' def cache_key(self) -> Tuple[str | None, str | None]: - return self.name, self.external_integration.protocol + return self.name, self.integration_configuration.protocol @classmethod def by_name_and_protocol( @@ -198,25 +198,55 @@ def _by_name_and_protocol( """ name, protocol = cache_key - qu = cls.by_protocol(_db, protocol) - qu = qu.filter(Collection.name == name) - try: - collection = qu.one() + query = select(IntegrationConfiguration).where( + IntegrationConfiguration.name == name + ) + integration_or_none = _db.execute(query).scalar_one_or_none() + if integration_or_none is None: + integration, _ = create( + _db, + IntegrationConfiguration, + protocol=protocol, + goal=Goals.LICENSE_GOAL, + name=name, + ) + else: + integration = integration_or_none + + if integration.goal != Goals.LICENSE_GOAL: + raise ValueError( + f'Integration "{name}" does not have goal "{Goals.LICENSE_GOAL.name}".' + ) + if integration.protocol != protocol: + raise ValueError( + f'Integration "{name}" does not use protocol "{protocol}".' + ) + + if integration.collection is not None: + collection = integration.collection is_new = False - except NoResultFound as e: - # Make a new Collection. - collection, is_new = get_one_or_create(_db, Collection, name=name) - if not is_new and collection.protocol != protocol: - # The collection already exists, it just uses a different - # protocol than the one we asked about. - raise ValueError( - f'Collection "{name}" does not use protocol "{protocol}".' - ) - integration = collection.create_external_integration(protocol=protocol) - collection.external_integration.protocol = protocol - collection.create_integration_configuration(protocol) + else: + collection, _ = create( # type: ignore[unreachable] + _db, + Collection, + integration_configuration=integration, + ) + is_new = True + return collection, is_new + @classmethod + def by_name(cls, _db: Session, name: str) -> Collection | None: + """Find a Collection by name.""" + return _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.name == name, + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + ) + ).scalar_one_or_none() + @classmethod def by_protocol(cls, _db: Session, protocol: str | None) -> Query[Collection]: """Query collections that get their licenses through the given protocol. @@ -241,35 +271,17 @@ def by_protocol(cls, _db: Session, protocol: str | None) -> Query[Collection]: return qu - @classmethod - def by_datasource( - cls, _db: Session, data_source: DataSource | str - ) -> Query[Collection]: - """Query collections that are associated with the given DataSource. - - Collections marked for deletion are not included. - """ - data_source_name = ( - data_source.name if isinstance(data_source, DataSource) else data_source - ) - - qu = ( - _db.query(cls) - .join( - IntegrationConfiguration, - cls.integration_configuration_id == IntegrationConfiguration.id, - ) - .filter( - IntegrationConfiguration.settings_dict[ - Collection.DATA_SOURCE_NAME_SETTING - ].astext - == data_source_name - ) - .filter(Collection.marked_for_deletion == False) - ) - return qu + @property + def name(self) -> str: + """What is the name of this collection?""" + if self.integration_configuration is None: + raise ValueError("Collection has no integration configuration.") + name = self.integration_configuration.name + if not name: + raise ValueError("Collection has no name.") + return name - @hybrid_property + @property def protocol(self) -> str: """What protocol do we need to use to get licenses for this collection? @@ -296,23 +308,6 @@ def protocol(self, new_protocol: str) -> None: for child in self.children: child.protocol = new_protocol - @hybrid_property - def primary_identifier_source(self) -> str | None: - """Identify if should try to use another identifier than """ - return self.integration_configuration.settings_dict.get( - ExternalIntegration.PRIMARY_IDENTIFIER_SOURCE - ) - - @primary_identifier_source.setter - def primary_identifier_source(self, new_primary_identifier_source: str) -> None: - """Modify the primary identifier source in use by this Collection.""" - self.integration_configuration.settings_dict = ( - self.integration_configuration.settings_dict.copy() - ) - self.integration_configuration.settings_dict[ - ExternalIntegration.PRIMARY_IDENTIFIER_SOURCE - ] = new_primary_identifier_source - # For collections that can control the duration of the loans they # create, the durations are stored in these settings and new loans are # expected to be created using these settings. For collections @@ -353,10 +348,8 @@ def default_loan_period_setting( collection has it for this number of days. """ key = self.loan_period_key(medium) - if library.id is None: - return None - config = self.integration_configuration.for_library(library.id) + config = self.integration_configuration.for_library(library) if config is None: return None @@ -413,78 +406,6 @@ def default_audience(self, new_value: str) -> None: """ self._set_settings(**{self.DEFAULT_AUDIENCE_KEY: str(new_value)}) - def create_external_integration(self, protocol: str) -> ExternalIntegration: - """Create an ExternalIntegration for this Collection. - - To be used immediately after creating a new Collection, - e.g. in by_name_and_protocol, from_metadata_identifier, and - various test methods that create mock Collections. - - If an external integration already exists, return it instead - of creating another one. - - :param protocol: The protocol known to be in use when getting - licenses for this collection. - """ - _db = Session.object_session(self) - goal = ExternalIntegration.LICENSE_GOAL - external_integration, is_new = get_one_or_create( - _db, - ExternalIntegration, - id=self.external_integration_id, - create_method_kwargs=dict(protocol=protocol, goal=goal), - ) - if external_integration.protocol != protocol: - raise ValueError( - "Located ExternalIntegration, but its protocol (%s) does not match desired protocol (%s)." - % (external_integration.protocol, protocol) - ) - self.external_integration_id = external_integration.id - return external_integration - - def create_integration_configuration( - self, protocol: str - ) -> IntegrationConfiguration: - _db = Session.object_session(self) - goal = Goals.LICENSE_GOAL - if self.integration_configuration_id: - integration = self.integration_configuration - else: - integration, is_new = create( - _db, - IntegrationConfiguration, - protocol=protocol, - goal=goal, - name=self.name, - ) - if integration.protocol != protocol: - raise ValueError( - "Located ExternalIntegration, but its protocol (%s) does not match desired protocol (%s)." - % (integration.protocol, protocol) - ) - self.integration_configuration_id = integration.id - # Immediately accessing the relationship fills out the data - return self.integration_configuration - - @property - def external_integration(self) -> ExternalIntegration: - """Find the external integration for this Collection, assuming - it already exists. - - This is generally a safe assumption since by_name_and_protocol and - from_metadata_identifier both create ExternalIntegrations for the - Collections they create. - """ - # We don't enforce this on the database level because it is - # legitimate for a newly created Collection to have no - # ExternalIntegration. But by the time it's being used for real, - # it needs to have one. - if not self.external_integration_id: - raise ValueError( - "No known external integration for collection %s" % self.name - ) - return self._external_integration - @hybrid_property def data_source(self) -> DataSource | None: """Find the data source associated with this Collection. @@ -545,47 +466,6 @@ def parents(self) -> Generator[Collection, None, None]: yield parent yield from parent.parents - def disassociate_library(self, library: Library) -> None: - """Disassociate a Library from this Collection and delete any relevant - ConfigurationSettings. - """ - if library is None or library not in self.libraries: - # No-op. - return - - _db = Session.object_session(self) - if self.external_integration_id: - qu = ( - _db.query(ConfigurationSetting) - .filter(ConfigurationSetting.library == library) - .filter( - ConfigurationSetting.external_integration - == self.external_integration - ) - ) - qu.delete() - else: - raise ValueError( - "No known external integration for collection %s" % self.name - ) - if self.integration_configuration_id: - qu = ( - _db.query(IntegrationLibraryConfiguration) - .filter(IntegrationLibraryConfiguration.library_id == library.id) - .filter( - IntegrationLibraryConfiguration.parent_id - == self.integration_configuration_id - ) - ) - qu.delete() - else: - raise ValueError( - "No known integration library configuration for collection %s" - % self.name - ) - - self.libraries.remove(library) - @property def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: """Find all LicensePools in this Collection that have no delivery @@ -616,8 +496,6 @@ def explain(self, include_secrets: bool = False) -> List[str]: lines.append('Protocol: "%s"' % integration.protocol) for library in self.libraries: lines.append('Used by library: "%s"' % library.short_name) - if self.external_account_id: - lines.append('External account ID: "%s"' % self.external_account_id) for name in sorted(integration.settings_dict): value = integration.settings_dict[name] if ( @@ -723,8 +601,7 @@ def delete(self, search_index: ExternalSearchIndex | None = None) -> None: _db = Session.object_session(self) # Disassociate all libraries from this collection. - for library in self.libraries: - self.disassociate_library(library) + self.libraries.clear() # Delete all the license pools. This should be the only part # of the application where LicensePools are permanently @@ -742,29 +619,23 @@ def delete(self, search_index: ExternalSearchIndex | None = None) -> None: _db.delete(pool) - # Delete the ExternalIntegration associated with this - # Collection, assuming it wasn't deleted already. - if self.external_integration: - _db.delete(self.external_integration) - # Now delete the Collection itself. _db.delete(self) _db.commit() -collections_libraries: Table = Table( +# TODO: This should no longer be used. And will be removed in the next release. +# Collections store their configurations in integration configurations now. +# This is only left here in case there needs to be a rollback to the current +# release. +_collections_libraries_deprecated: Table = Table( "collections_libraries", Base.metadata, Column( "collection_id", Integer, - ForeignKey("collections.id"), - index=True, - nullable=False, - ), - Column( - "library_id", Integer, ForeignKey("libraries.id"), index=True, nullable=False ), + Column("library_id", Integer), UniqueConstraint("collection_id", "library_id"), ) @@ -830,19 +701,3 @@ class CollectionMissing(Exception): ), UniqueConstraint("collection_id", "customlist_id"), ) - - -class HasExternalIntegrationPerCollection(metaclass=ABCMeta): - """Interface allowing to get access to an external integration""" - - @abstractmethod - def collection_external_integration( - self, collection: Optional[Collection] - ) -> ExternalIntegration: - """Returns an external integration associated with the collection - - :param collection: Collection - - :return: External integration associated with the collection - """ - raise NotImplementedError() diff --git a/core/model/configuration.py b/core/model/configuration.py index c68bca2722..7cd424e9b0 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -3,9 +3,8 @@ # ExternalIntegration, ExternalIntegrationLink, ConfigurationSetting import json import logging -from abc import ABCMeta, abstractmethod from enum import Enum -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING, List from sqlalchemy import Column, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship @@ -42,10 +41,6 @@ class ExternalIntegration(Base): # to this are defined in the circulation manager. PATRON_AUTH_GOAL = "patron_auth" - # These integrations are associated with external services such - # as Overdrive which provide access to books. - LICENSE_GOAL = "licenses" - # These integrations are associated with external services such as # the metadata wrangler, which provide information about books, # but not the books themselves. @@ -181,14 +176,6 @@ class ExternalIntegration(Base): uselist=True, ) - # Any number of Collections may designate an ExternalIntegration - # as the source of their configuration - collections: Mapped[List[Collection]] = relationship( - "Collection", - backref="_external_integration", - foreign_keys="Collection.external_integration_id", - ) - libraries: Mapped[List[Library]] = relationship( "Library", back_populates="integrations", @@ -331,24 +318,6 @@ def password(self): def password(self, new_password): return self.set_setting(self.PASSWORD, new_password) - @hybrid_property - def custom_accept_header(self): - return self.setting(self.CUSTOM_ACCEPT_HEADER).value - - @custom_accept_header.setter - def custom_accept_header(self, new_custom_accept_header): - return self.set_setting(self.CUSTOM_ACCEPT_HEADER, new_custom_accept_header) - - @hybrid_property - def primary_identifier_source(self): - return self.setting(self.PRIMARY_IDENTIFIER_SOURCE).value - - @primary_identifier_source.setter - def primary_identifier_source(self, new_primary_identifier_source): - return self.set_setting( - self.PRIMARY_IDENTIFIER_SOURCE, new_primary_identifier_source - ) - def explain(self, library=None, include_secrets=False): """Create a series of human-readable strings to explain an ExternalIntegration's settings. @@ -679,20 +648,6 @@ def excluded_audio_data_sources(cls, _db): return value -class HasExternalIntegration(metaclass=ABCMeta): - """Interface allowing to get access to an external integration""" - - @abstractmethod - def external_integration(self, db: Session) -> Optional[ExternalIntegration]: - """Returns an external integration associated with this object - - :param db: Database session - - :return: External integration associated with this object - """ - raise NotImplementedError() - - class ConfigurationAttributeValue(Enum): """Enumeration of common configuration attribute values""" diff --git a/core/model/integration.py b/core/model/integration.py index ebac448cb6..893d07d55a 100644 --- a/core/model/integration.py +++ b/core/model/integration.py @@ -1,15 +1,17 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List, Literal, overload +from typing import TYPE_CHECKING, Any, Dict, List from sqlalchemy import Column from sqlalchemy import Enum as SQLAlchemyEnum -from sqlalchemy import ForeignKey, Integer, Unicode +from sqlalchemy import ForeignKey, Index, Integer, Unicode, select from sqlalchemy.dialects.postgresql import JSONB +from sqlalchemy.ext.associationproxy import association_proxy from sqlalchemy.orm import Mapped, Query, Session, relationship +from sqlalchemy.orm.attributes import flag_modified from core.integration.goals import Goals -from core.model import Base, get_one_or_create +from core.model import Base if TYPE_CHECKING: from core.model import Collection, Library @@ -49,6 +51,24 @@ class IntegrationConfiguration(Base): "settings", JSONB, nullable=False, default=dict ) + # Integration specific context data. Stored as json. This is used to + # store configuration data that is not user supplied for a particular + # integration. + context: Mapped[Dict[str, Any]] = Column(JSONB, nullable=False, default=dict) + + __table_args__ = ( + Index( + "ix_integration_configurations_settings_dict", + settings_dict, + postgresql_using="gin", + ), + ) + + def context_update(self, new_context: Dict[str, Any]) -> None: + """Update the context for this integration""" + self.context.update(new_context) + flag_modified(self, "context") + # Self test results, stored as json. self_test_results = Column(JSONB, nullable=False, default=dict) @@ -58,45 +78,44 @@ class IntegrationConfiguration(Base): "IntegrationLibraryConfiguration", back_populates="parent", uselist=True, - cascade="all, delete", + cascade="all, delete-orphan", passive_deletes=True, ) - collection: Mapped[Collection] = relationship("Collection", uselist=False) + collection: Mapped[Collection] = relationship( + "Collection", back_populates="integration_configuration", uselist=False + ) - @overload - def for_library( - self, library_id: int, create: Literal[True] - ) -> IntegrationLibraryConfiguration: - ... + # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#simplifying-association-objects + libraries: Mapped[List[Library]] = association_proxy( + "library_configurations", + "library", + creator=lambda library: IntegrationLibraryConfiguration(library=library), + ) - @overload def for_library( - self, library_id: int | None, create: bool = False + self, library: int | Library | None ) -> IntegrationLibraryConfiguration | None: - ... + """Fetch the library configuration for a specific library""" + from core.model import Library - def for_library( - self, library_id: int | None, create: bool = False - ) -> IntegrationLibraryConfiguration | None: - """Fetch the library configuration specifically by library_id""" - if library_id is None: + if library is None: return None - for config in self.library_configurations: - if config.library_id == library_id: - return config - if create: - session = Session.object_session(self) - config, _ = get_one_or_create( - session, - IntegrationLibraryConfiguration, - parent_id=self.id, - library_id=library_id, + db = Session.object_session(self) + if isinstance(library, Library): + if library.id is None: + return None + library_id = library.id + else: + library_id = library + + return db.execute( + select(IntegrationLibraryConfiguration).where( + IntegrationLibraryConfiguration.library_id == library_id, + IntegrationLibraryConfiguration.parent_id == self.id, ) - session.refresh(self) - return config - return None + ).scalar_one_or_none() def __repr__(self) -> str: return f"" @@ -112,6 +131,10 @@ class IntegrationLibraryConfiguration(Base): It stores the configuration settings for each external integration in a single json row in the database. These settings are then serialized using Pydantic to a python object. + + This is a many-to-many relationship between IntegrationConfiguration and + Library. Implementing the Association Object pattern: + https://docs.sqlalchemy.org/en/14/orm/basic_relationships.html#association-object """ __tablename__ = "integration_library_configurations" @@ -128,8 +151,7 @@ class IntegrationLibraryConfiguration(Base): "IntegrationConfiguration", back_populates="library_configurations" ) - # The library this integration is associated with. This is optional - # and is only used for integrations that are specific to a library. + # The library this integration is associated with. library_id = Column( Integer, ForeignKey("libraries.id", ondelete="CASCADE"), diff --git a/core/model/library.py b/core/model/library.py index 9f66939124..9991101bbe 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -10,6 +10,7 @@ Generator, List, Optional, + Sequence, Tuple, Type, Union, @@ -26,6 +27,7 @@ Table, Unicode, UniqueConstraint, + select, ) from sqlalchemy.dialects.postgresql import JSONB from sqlalchemy.orm import Mapped, Query, relationship @@ -36,6 +38,7 @@ from core.entrypoint import EntryPoint from core.facets import FacetConstants from core.integration.base import integration_settings_load, integration_settings_update +from core.integration.goals import Goals from core.model import Base, get_one from core.model.announcements import Announcement from core.model.customlist import customlist_sharedlibrary @@ -189,8 +192,25 @@ class Library(Base, HasSessionCache): uselist=False, ) - # Typing specific - collections: List[Collection] + @property + def collections(self) -> Sequence[Collection]: + """Get the collections for this library""" + from core.model import ( + Collection, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + ) + + _db = Session.object_session(self) + return _db.scalars( + select(Collection) + .join(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .where( + IntegrationConfiguration.goal == Goals.LICENSE_GOAL, + IntegrationLibraryConfiguration.library_id == self.id, + ) + ).all() # Cache of the libraries loaded settings object _settings: Optional[LibrarySettings] diff --git a/core/model/listeners.py b/core/model/listeners.py index 6148dc9999..889320a503 100644 --- a/core/model/listeners.py +++ b/core/model/listeners.py @@ -108,8 +108,6 @@ def _site_configuration_has_changed(_db, cooldown=1): # catch most that slip through the cracks. @event.listens_for(Collection.children, "append") @event.listens_for(Collection.children, "remove") -@event.listens_for(Collection.libraries, "append") -@event.listens_for(Collection.libraries, "remove") @event.listens_for(ExternalIntegration.settings, "append") @event.listens_for(ExternalIntegration.settings, "remove") @event.listens_for(Library.integrations, "append") diff --git a/core/opds2_import.py b/core/opds2_import.py index 418caa052d..3206118939 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -10,7 +10,6 @@ Dict, Iterable, List, - Literal, Optional, Tuple, Type, @@ -66,7 +65,6 @@ RightsStatus, Subject, ) -from core.model.configuration import ConfigurationSetting from core.model.constants import IdentifierType from core.opds_import import ( BaseOPDSAPI, @@ -194,16 +192,10 @@ def description(cls) -> str: def __init__(self, _db: Session, collection: Collection): super().__init__(_db, collection) - # TODO: This needs to be refactored to use IntegrationConfiguration, - # but it has been temporarily rolled back, since the IntegrationConfiguration - # code caused problems fulfilling TOKEN_AUTH books in production. - # This should be fixed as part of the work PP-313 to fully remove - # ExternalIntegrations from our collections code. - token_auth_configuration = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, collection.external_integration - ) - self.token_auth_configuration = ( - token_auth_configuration.value if token_auth_configuration else None + self.token_auth_configuration: str | None = ( + collection.integration_configuration.context.get( + ExternalIntegration.TOKEN_AUTH + ) ) @classmethod @@ -249,6 +241,12 @@ def fulfill_token_auth( ) return fulfillment + if not self.token_auth_configuration: + self.log.warning( + "No token auth configuration found, unable to fulfill via OPDS2 token auth." + ) + return fulfillment + token = self.get_authentication_token( patron, licensepool.data_source, self.token_auth_configuration ) @@ -308,11 +306,6 @@ def __init__( self._parser = parser self.ignored_identifier_types = self.settings.ignored_identifier_types - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - raise NotImplementedError("OPDS2Importer does not support this method") - def _is_identifier_allowed(self, identifier: Identifier) -> bool: """Check the identifier and return a boolean value indicating whether CM can import it. @@ -878,15 +871,6 @@ def _find_formats_in_non_open_access_acquisition_links( return formats - def external_integration(self, db: Session) -> ExternalIntegration: - """Return an external integration associated with this object. - :param db: Database session - :return: External integration associated with this object - """ - if self.collection is None: - raise ValueError("Collection is not set") - return self.collection.external_integration - @staticmethod def _get_publications( feed: opds2_ast.OPDS2Feed, @@ -1034,10 +1018,9 @@ def _parse_feed_links(self, links: list[core_ast.Link]) -> None: for link in links: if first_or_default(link.rels) == Hyperlink.TOKEN_AUTH: # Save the collection-wide token authentication endpoint - auth_setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, self.external_integration(self._db) + self.collection.integration_configuration.context_update( + {ExternalIntegration.TOKEN_AUTH: link.href} ) - auth_setting.value = link.href def extract_feed_data( self, feed: str | opds2_ast.OPDS2Feed, feed_url: str | None = None diff --git a/core/opds_import.py b/core/opds_import.py index 5274a6ef26..ce9f1da987 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -16,12 +16,12 @@ Generic, Iterable, List, - Literal, Optional, Sequence, Tuple, Type, TypeVar, + cast, overload, ) from urllib.parse import urljoin, urlparse @@ -32,15 +32,13 @@ from feedparser import FeedParserDict from flask_babel import lazy_gettext as _ from lxml import etree -from pydantic import HttpUrl +from pydantic import AnyHttpUrl from sqlalchemy.orm.session import Session from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold from api.saml.credential import SAMLCredentialManager -from api.selftest import HasCollectionSelfTests from core.classifier import Classifier -from core.config import IntegrationException from core.connection_config import ConnectionSetting from core.coverage import CoverageFailure from core.integration.base import integration_settings_load @@ -78,7 +76,6 @@ Subject, get_one, ) -from core.model.configuration import HasExternalIntegration from core.model.formats import FormatPrioritiesSettings from core.monitor import CollectionMonitor from core.saml.wayfless import ( @@ -86,7 +83,6 @@ SAMLWAYFlessFulfillmentError, SAMLWAYFlessSetttings, ) -from core.selftest import SelfTestResult from core.util import base64 from core.util.datetime_helpers import datetime_utc, to_utc, utc_now from core.util.http import HTTP, BadResponseException @@ -118,7 +114,7 @@ class OPDSImporterSettings( ): _NO_DEFAULT_AUDIENCE = "" - external_account_id: Optional[HttpUrl] = FormField( + external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( label=_("URL"), required=True, @@ -138,10 +134,10 @@ class OPDSImporterSettings( "assume the books have this target audience." ), type=ConfigurationFormItemType.SELECT, - format="narrow", - options={_NO_DEFAULT_AUDIENCE: _("No default audience")}.update( - {audience: audience for audience in sorted(Classifier.AUDIENCES)} - ), + options={ + **{_NO_DEFAULT_AUDIENCE: _("No default audience")}, + **{audience: audience for audience in sorted(Classifier.AUDIENCES)}, + }, required=False, ), ) @@ -421,12 +417,6 @@ def extract_last_update_dates( def extract_next_links(self, feed: str | bytes) -> List[str]: ... - @abstractmethod - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - ... - @overload def parse_identifier(self, identifier: str) -> Identifier: ... @@ -705,92 +695,13 @@ def __init__( """ super().__init__(_db, collection, data_source_name) - self.primary_identifier_source = None - if collection: - self.primary_identifier_source = collection.primary_identifier_source + self.primary_identifier_source = self.settings.primary_identifier_source # In general, we are cautious when mirroring resources so that # we don't, e.g. accidentally get our IP banned from # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get - def assert_importable_content( - self, feed: str, feed_url: str, max_get_attempts: int = 5 - ) -> Literal[True]: - """Raise an exception if the given feed contains nothing that can, - even theoretically, be turned into a LicensePool. - - By default, this means the feed must link to open-access content - that can actually be retrieved. - """ - metadata, failures = self.extract_feed_data(feed, feed_url) - get_attempts = 0 - - # Find an open-access link, and try to GET it just to make - # sure OPDS feed isn't hiding non-open-access stuff behind an - # open-access link. - # - # To avoid taking forever or antagonizing API providers, we'll - # give up after `max_get_attempts` failures. - for link in self._open_access_links(list(metadata.values())): - url = link.href - success = self._is_open_access_link(url, link.media_type) - if success: - return True - get_attempts += 1 - if get_attempts >= max_get_attempts: - error = ( - "Was unable to GET supposedly open-access content such as %s (tried %s times)" - % (url, get_attempts) - ) - explanation = "This might be an OPDS For Distributors feed, or it might require different authentication credentials." - raise IntegrationException(error, explanation) - - raise IntegrationException( - "No open-access links were found in the OPDS feed.", - "This might be an OPDS for Distributors feed.", - ) - - @classmethod - def _open_access_links( - cls, metadatas: List[Metadata] - ) -> Generator[LinkData, None, None]: - """Find all open-access links in a list of Metadata objects. - - :param metadatas: A list of Metadata objects. - :yield: A sequence of `LinkData` objects. - """ - for item in metadatas: - if not item.circulation: - continue - for link in item.circulation.links: - if link.rel == Hyperlink.OPEN_ACCESS_DOWNLOAD: - yield link - - def _is_open_access_link( - self, url: str, type: Optional[str] - ) -> str | Literal[False]: - """Is `url` really an open-access link? - - That is, can we make a normal GET request and get something - that looks like a book? - """ - headers = {} - if type: - headers["Accept"] = type - status, headers, body = self.http_get(url, headers=headers) - if status == 200 and len(body) > 1024 * 10: - # We could also check the media types, but this is good - # enough for now. - return "Found a book-like thing at %s" % url - self.log.error( - "Supposedly open-access link %s didn't give us a book. Status=%s, body length=%s", - url, - status, - len(body), - ) - return False - def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: if isinstance(feed, (bytes, str)): parsed = feedparser.parse(feed) @@ -1737,9 +1648,7 @@ def extract_series(cls, series_tag: Element) -> Tuple[Optional[str], Optional[st return series_name, series_position -class OPDSImportMonitor( - CollectionMonitor, HasCollectionSelfTests, HasExternalIntegration -): +class OPDSImportMonitor(CollectionMonitor): """Periodically monitor a Collection's OPDS archive feed and import every title it mentions. """ @@ -1779,16 +1688,13 @@ def __init__( "Collection %s has no associated data source." % collection.name ) - self.external_integration_id = collection.external_integration.id - feed_url = self.opds_url(collection) - self.feed_url = "" if feed_url is None else feed_url - self.force_reimport = force_reimport self.importer = import_class(_db, collection=collection, **import_class_kwargs) settings = self.importer.settings self.username = settings.username self.password = settings.password + self.feed_url = settings.external_account_id self.custom_accept_header = settings.custom_accept_header self._max_retry_count = settings.max_retry_count @@ -1797,32 +1703,6 @@ def __init__( self._feed_base_url = f"{parsed_url.scheme}://{parsed_url.hostname}{(':' + str(parsed_url.port)) if parsed_url.port else ''}/" super().__init__(_db, collection) - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: - return get_one(_db, ExternalIntegration, id=self.external_integration_id) - - def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: - """Retrieve the first page of the OPDS feed""" - first_page = self.run_test( - "Retrieve the first page of the OPDS feed (%s)" % self.feed_url, - self.follow_one_link, - self.feed_url, - ) - yield first_page - if not first_page.result: - return - - # We got a page, but does it have anything the importer can - # turn into a Work? - # - # By default, this means it must contain an open-access link. - next_links, content = first_page.result - yield self.run_test( - "Checking for importable content", - self.importer.assert_importable_content, - content, - self.feed_url, - ) - def _get( self, url: str, headers: Dict[str, str] ) -> Tuple[int, Dict[str, str], bytes]: @@ -1866,14 +1746,6 @@ def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: return headers - def opds_url(self, collection: Collection) -> Optional[str]: - """Returns the OPDS import URL for the given collection. - - By default, this URL is stored as the external account ID, but - subclasses may override this. - """ - return collection.external_account_id - def data_source(self, collection: Collection) -> Optional[DataSource]: """Returns the data source name for the given collection. @@ -2029,7 +1901,7 @@ def import_one_feed( # Because we are importing into a Collection, we will immediately # mark a book as presentation-ready if possible. imported_editions, pools, works, failures = self.importer.import_from_feed( - feed, feed_url=self.opds_url(self.collection) + feed, feed_url=self.feed_url ) # Create CoverageRecords for the successful imports. @@ -2053,7 +1925,7 @@ def import_one_feed( def _get_feeds(self) -> Iterable[Tuple[str, bytes]]: feeds = [] - queue = [self.feed_url] + queue = [cast(str, self.feed_url)] seen_links = set() # First, follow the feed's next links until we reach a page with diff --git a/core/scripts.py b/core/scripts.py index e5d4f4ab6f..23b018f5d9 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -11,7 +11,7 @@ from enum import Enum from typing import Generator, Optional, Type -from sqlalchemy import and_, exists, or_, tuple_ +from sqlalchemy import and_, exists, or_, select, tuple_ from sqlalchemy.orm import Query, Session, defer from sqlalchemy.orm.attributes import flag_modified from sqlalchemy.orm.exc import MultipleResultsFound, NoResultFound @@ -23,6 +23,7 @@ Filter, SearchIndexCoverageProvider, ) +from core.integration.goals import Goals from core.lane import Lane from core.metadata_layer import TimestampData from core.model import ( @@ -35,6 +36,7 @@ Edition, ExternalIntegration, Identifier, + IntegrationConfiguration, Library, LicensePool, LicensePoolDeliveryMechanism, @@ -1111,14 +1113,23 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): args = self.parse_command_line(_db, cmd_args=cmd_args) if args.name: name = args.name - collection = get_one(_db, Collection, name=name) + collection = Collection.by_name(_db, name) if collection: collections = [collection] else: output.write("Could not locate collection by name: %s" % name) collections = [] else: - collections = _db.query(Collection).order_by(Collection.name).all() + collections = ( + _db.execute( + select(Collection) + .join(IntegrationConfiguration) + .where(IntegrationConfiguration.goal == Goals.LICENSE_GOAL) + .order_by(IntegrationConfiguration.name) + ) + .scalars() + .all() + ) if not collections: output.write("No collections found.\n") for collection in collections: @@ -1243,7 +1254,7 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): protocol = None name = args.name protocol = args.protocol - collection = get_one(_db, Collection, Collection.name == name) + collection = Collection.by_name(_db, name) if not collection: if protocol: collection, is_new = Collection.by_name_and_protocol( @@ -1258,20 +1269,16 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): ) config = collection.integration_configuration settings = config.settings_dict.copy() - integration = collection.external_integration if protocol: config.protocol = protocol - integration.protocol = protocol if args.external_account_id: - collection.external_account_id = args.external_account_id - + settings["external_account_id"] = args.external_account_id if args.url: settings["url"] = args.url if args.username: settings["username"] = args.username if args.password: settings["password"] = args.password - self.apply_settings(args.setting, integration) if args.setting: for setting in args.setting: key, value = ConfigurationSettingScript._parse_setting(setting) @@ -1288,8 +1295,7 @@ def do_run(self, _db=None, cmd_args=None, output=sys.stdout): message += " I only know about: %s" % library_names raise ValueError(message) if collection not in library.collections: - library.collections.append(collection) - config.for_library(library.id, create=True) + collection.libraries.append(library) site_configuration_has_changed(_db) _db.commit() output.write("Configuration settings stored.\n") @@ -1934,7 +1940,8 @@ def look_up_collections(cls, _db, parsed, *args, **kwargs): """ parsed.collections = [] for name in parsed.collection_names: - collection = get_one(_db, Collection, name=name) + collection = Collection.by_name(_db, name) + if not collection: raise ValueError("Unknown collection: %s" % name) parsed.collections.append(collection) diff --git a/core/selftest.py b/core/selftest.py index a01c16c2c1..da7c946112 100644 --- a/core/selftest.py +++ b/core/selftest.py @@ -373,9 +373,6 @@ def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: class HasSelfTestsIntegrationConfiguration(BaseHasSelfTests, LoggerMixin, ABC): - # Typing specific - collection: Any - def store_self_test_results( self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] ) -> None: diff --git a/pyproject.toml b/pyproject.toml index ffc7598b6e..607e6604dd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,10 +67,13 @@ disallow_untyped_defs = true module = [ "api.admin.announcement_list_validator", "api.admin.config", + "api.admin.controller.collection_self_tests", + "api.admin.controller.collection_settings", "api.admin.controller.discovery_service_library_registrations", "api.admin.controller.discovery_services", "api.admin.controller.integration_settings", "api.admin.controller.library_settings", + "api.admin.controller.patron_auth_service_self_tests", "api.admin.controller.patron_auth_services", "api.admin.form_data", "api.admin.model.dashboard_statistics", diff --git a/tests/api/admin/controller/test_collection_self_tests.py b/tests/api/admin/controller/test_collection_self_tests.py index 8dd7e0a5f2..f48a3c12ab 100644 --- a/tests/api/admin/controller/test_collection_self_tests.py +++ b/tests/api/admin/controller/test_collection_self_tests.py @@ -1,174 +1,179 @@ -from flask_babel import lazy_gettext as _ - -from api.admin.problem_details import * -from api.axis import Axis360API +from unittest.mock import MagicMock + +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from api.admin.controller.collection_self_tests import CollectionSelfTestsController +from api.admin.problem_details import ( + FAILED_TO_RUN_SELF_TESTS, + MISSING_IDENTIFIER, + MISSING_SERVICE, + UNKNOWN_PROTOCOL, +) +from api.integration.registry.license_providers import LicenseProvidersRegistry from api.selftest import HasCollectionSelfTests -from core.opds_import import OPDSImportMonitor +from core.selftest import HasSelfTestsIntegrationConfiguration +from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.axis import MockAxis360API -from tests.fixtures.api_admin import SettingsControllerFixture +from tests.fixtures.database import DatabaseTransactionFixture + + +@pytest.fixture +def controller(db: DatabaseTransactionFixture) -> CollectionSelfTestsController: + return CollectionSelfTestsController(db.session) class TestCollectionSelfTests: def test_collection_self_tests_with_no_identifier( - self, settings_ctrl_fixture: SettingsControllerFixture + self, controller: CollectionSelfTestsController ): - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - None - ) - assert response.title == MISSING_IDENTIFIER.title - assert response.detail == MISSING_IDENTIFIER.detail - assert response.status_code == 400 + response = controller.process_collection_self_tests(None) + assert isinstance(response, ProblemDetail) + assert response.title == MISSING_IDENTIFIER.title + assert response.detail == MISSING_IDENTIFIER.detail + assert response.status_code == 400 def test_collection_self_tests_with_no_collection_found( - self, settings_ctrl_fixture: SettingsControllerFixture + self, controller: CollectionSelfTestsController ): - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - -1 - ) - assert response == NO_SUCH_COLLECTION - assert response.status_code == 404 + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_get(-1) + assert excinfo.value.problem_detail == MISSING_SERVICE - def test_collection_self_tests_test_get( - self, settings_ctrl_fixture: SettingsControllerFixture + def test_collection_self_tests_with_unknown_protocol( + self, db: DatabaseTransactionFixture, controller: CollectionSelfTestsController ): - old_prior_test_results = HasCollectionSelfTests.prior_test_results - setattr( - HasCollectionSelfTests, - "prior_test_results", - settings_ctrl_fixture.mock_prior_test_results, + collection = db.collection(protocol="test") + assert collection.integration_configuration.id is not None + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_get(collection.integration_configuration.id) + assert excinfo.value.problem_detail == UNKNOWN_PROTOCOL + + def test_collection_self_tests_with_unsupported_protocol( + self, db: DatabaseTransactionFixture, controller: CollectionSelfTestsController + ): + registry = LicenseProvidersRegistry() + registry.register(object, canonical="mock_api") # type: ignore[arg-type] + collection = db.collection(protocol="mock_api") + controller = CollectionSelfTestsController(db.session, registry) + assert collection.integration_configuration.id is not None + result = controller.self_tests_process_get( + collection.integration_configuration.id ) + + assert result.status_code == 200 + assert isinstance(result.json, dict) + assert result.json["self_test_results"]["self_test_results"] == { + "disabled": True, + "exception": "Self tests are not supported for this integration.", + } + + def test_collection_self_tests_test_get( + self, + db: DatabaseTransactionFixture, + controller: CollectionSelfTestsController, + monkeypatch: MonkeyPatch, + ): collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + db.session, + db.default_library(), + ) + + self_test_results = dict( + duration=0.9, + start="2018-08-08T16:04:05Z", + end="2018-08-08T16:05:05Z", + results=[], + ) + mock = MagicMock(return_value=self_test_results) + monkeypatch.setattr( + HasSelfTestsIntegrationConfiguration, "load_self_test_results", mock ) # Make sure that HasSelfTest.prior_test_results() was called and that # it is in the response's collection object. - with settings_ctrl_fixture.request_context_with_admin("/"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - responseCollection = response.get("self_test_results") + assert collection.integration_configuration.id is not None + response = controller.self_tests_process_get( + collection.integration_configuration.id + ) - assert responseCollection.get("id") == collection.id - assert responseCollection.get("name") == collection.name - assert responseCollection.get("protocol") == collection.protocol - assert ( - responseCollection.get("self_test_results") - == settings_ctrl_fixture.self_test_results - ) + data = response.json + assert isinstance(data, dict) + test_results = data.get("self_test_results") + assert isinstance(test_results, dict) - setattr(HasCollectionSelfTests, "prior_test_results", old_prior_test_results) + assert test_results.get("id") == collection.integration_configuration.id + assert test_results.get("name") == collection.name + assert test_results.get("protocol") == collection.protocol + assert test_results.get("self_test_results") == self_test_results + assert mock.call_count == 1 def test_collection_self_tests_failed_post( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + db: DatabaseTransactionFixture, + controller: CollectionSelfTestsController, + monkeypatch: MonkeyPatch, ): - # This makes HasSelfTests.run_self_tests return no values - old_run_self_tests = HasCollectionSelfTests.run_self_tests - setattr( - HasCollectionSelfTests, - "run_self_tests", - settings_ctrl_fixture.mock_failed_run_self_tests, + collection = MockAxis360API.mock_collection( + db.session, + db.default_library(), ) - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + # This makes HasSelfTests.run_self_tests return no values + self_test_results = (None, None) + mock = MagicMock(return_value=self_test_results) + monkeypatch.setattr( + HasSelfTestsIntegrationConfiguration, "run_self_tests", mock ) # Failed to run self tests - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) + assert collection.integration_configuration.id is not None - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.failed_run_self_tests_called_with - assert response.title == FAILED_TO_RUN_SELF_TESTS.title - assert response.detail == "Failed to run self tests for this collection." - assert response.status_code == 400 + with pytest.raises(ProblemError) as excinfo: + controller.self_tests_process_post(collection.integration_configuration.id) - setattr(HasCollectionSelfTests, "run_self_tests", old_run_self_tests) + assert excinfo.value.problem_detail == FAILED_TO_RUN_SELF_TESTS + + def test_collection_self_tests_run_self_tests_unsupported_collection( + self, + db: DatabaseTransactionFixture, + ): + registry = LicenseProvidersRegistry() + registry.register(object, canonical="mock_api") # type: ignore[arg-type] + collection = db.collection(protocol="mock_api") + controller = CollectionSelfTestsController(db.session, registry) + response = controller.run_self_tests(collection.integration_configuration) + assert response is None def test_collection_self_tests_post( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + db: DatabaseTransactionFixture, ): - old_run_self_tests = HasCollectionSelfTests.run_self_tests - setattr( - HasCollectionSelfTests, - "run_self_tests", - settings_ctrl_fixture.mock_run_self_tests, - ) + mock = MagicMock() - collection = settings_ctrl_fixture.ctrl.db.collection() - # Successfully ran new self tests for the OPDSImportMonitor provider API - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.response == _("Successfully ran new self tests") - assert response._status == "200 OK" - - # The provider API class and the collection should be passed to - # the run_self_tests method of the provider API class. - assert run_self_tests_args[1] == OPDSImportMonitor - assert run_self_tests_args[3] == collection + class MockApi(HasCollectionSelfTests): + def __new__(cls, *args, **kwargs): + nonlocal mock + return mock(*args, **kwargs) - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), - ) - # Successfully ran new self tests - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.response == _("Successfully ran new self tests") - assert response._status == "200 OK" - - # The provider API class and the collection should be passed to - # the run_self_tests method of the provider API class. - assert run_self_tests_args[1] == Axis360API - assert run_self_tests_args[3] == collection + @property + def collection(self) -> None: + return None - collection = MockAxis360API.mock_collection( - settings_ctrl_fixture.ctrl.db.session, - settings_ctrl_fixture.ctrl.db.default_library(), + registry = LicenseProvidersRegistry() + registry.register(MockApi, canonical="Foo") # type: ignore[arg-type] + + collection = db.collection(protocol="Foo") + controller = CollectionSelfTestsController(db.session, registry) + + assert collection.integration_configuration.id is not None + response = controller.self_tests_process_post( + collection.integration_configuration.id ) - collection.protocol = "Non existing protocol" - # clearing out previous call to mocked run_self_tests - settings_ctrl_fixture.run_self_tests_called_with = (None, None) - - # No protocol found so run_self_tests was not called - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - response = settings_ctrl_fixture.manager.admin_collection_self_tests_controller.process_collection_self_tests( - collection.id - ) - - ( - run_self_tests_args, - run_self_tests_kwargs, - ) = settings_ctrl_fixture.run_self_tests_called_with - assert response.title == FAILED_TO_RUN_SELF_TESTS.title - assert response.detail == "Failed to run self tests for this collection." - assert response.status_code == 400 - - # The method returns None but it was not called - assert run_self_tests_args == None - - setattr(HasCollectionSelfTests, "run_self_tests", old_run_self_tests) + + assert response.get_data(as_text=True) == "Successfully ran new self tests" + assert response.status_code == 200 + + mock.assert_called_once_with(db.session, collection) + mock()._run_self_tests.assert_called_once_with(db.session) + assert mock().store_self_test_results.call_count == 1 diff --git a/tests/api/admin/controller/test_collection_settings.py b/tests/api/admin/controller/test_collection_settings.py deleted file mode 100644 index c8f342f2af..0000000000 --- a/tests/api/admin/controller/test_collection_settings.py +++ /dev/null @@ -1,58 +0,0 @@ -from typing import Any -from unittest.mock import PropertyMock, create_autospec, patch - -from api.admin.controller.collection_settings import CollectionSettingsController -from api.controller import CirculationManager -from core.integration.goals import Goals -from core.integration.registry import IntegrationRegistry -from tests.fixtures.database import DatabaseTransactionFixture - - -class TestCollectionSettingsController: - def test_duplicate_protocol_settings(self, db: DatabaseTransactionFixture): - """Dedupe protocol settings using the last settings of the same value""" - manager = create_autospec(spec=CirculationManager) - manager._db = PropertyMock(return_value=db.session) - - class MockProviderAPI: - NAME = "NAME" - SETTINGS = [ - dict(key="k1", value="v1"), - dict(key="k2", value="v2"), # This should get overwritten - dict(key="k2", value="v3"), # Only this should remain - ] - - controller = CollectionSettingsController(manager) - with patch.object( - controller, "registry", IntegrationRegistry[Any](Goals.LICENSE_GOAL) - ) as registry: - registry.register(MockProviderAPI, canonical=MockProviderAPI.NAME) - protocols = controller._get_collection_protocols() - - k2_list = list(filter(lambda x: x["key"] == "k2", protocols[0]["settings"])) - assert len(k2_list) == 1 - assert k2_list[0]["value"] == "v3" - - class MockProviderAPIMulti: - NAME = "NAME" - SETTINGS = [ - dict(key="k1", value="v0"), # This should get overwritten - dict(key="k1", value="v1"), # Only this should remain - dict(key="k2", value="v1"), # This should get overwritten - dict(key="k2", value="v2"), # This should get overwritten - dict(key="k2", value="v4"), # Only this should remain - ] - - with patch.object( - controller, "registry", IntegrationRegistry[Any](Goals.LICENSE_GOAL) - ) as registry: - registry.register(MockProviderAPIMulti, canonical=MockProviderAPIMulti.NAME) - protocols = controller._get_collection_protocols() - - k2_list = list(filter(lambda x: x["key"] == "k2", protocols[0]["settings"])) - assert len(k2_list) == 1 - assert k2_list[0]["value"] == "v4" - - k1_list = list(filter(lambda x: x["key"] == "k1", protocols[0]["settings"])) - assert len(k1_list) == 1 - assert k1_list[0]["value"] == "v1" diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index 2444944542..a399673791 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -1,25 +1,26 @@ import json +from typing import Dict import flask import pytest +from flask import Response from werkzeug.datastructures import ImmutableMultiDict from api.admin.exceptions import AdminNotAuthorized from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, CANNOT_DELETE_COLLECTION_WITH_CHILDREN, - COLLECTION_NAME_ALREADY_IN_USE, INCOMPLETE_CONFIGURATION, - MISSING_COLLECTION, + INTEGRATION_NAME_ALREADY_IN_USE, MISSING_COLLECTION_NAME, MISSING_PARENT, + MISSING_SERVICE, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, UNKNOWN_PROTOCOL, ) from api.integration.registry.license_providers import LicenseProvidersRegistry -from api.selftest import HasCollectionSelfTests from core.model import ( Admin, AdminRole, @@ -28,87 +29,87 @@ create, get_one, ) -from core.selftest import HasSelfTests -from tests.fixtures.api_admin import SettingsControllerFixture +from core.util.problem_detail import ProblemDetail +from tests.fixtures.api_admin import AdminControllerFixture from tests.fixtures.database import DatabaseTransactionFixture class TestCollectionSettings: def test_collections_get_with_no_collections( - self, settings_ctrl_fixture: SettingsControllerFixture - ): + self, admin_ctrl_fixture: AdminControllerFixture + ) -> None: + db = admin_ctrl_fixture.ctrl.db # Delete any existing collections created by the test setup. - for collection in settings_ctrl_fixture.ctrl.db.session.query(Collection): - settings_ctrl_fixture.ctrl.db.session.delete(collection) + db.session.delete(db.default_collection()) - with settings_ctrl_fixture.request_context_with_admin("/"): + with admin_ctrl_fixture.request_context_with_admin("/"): response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) - assert response.get("collections") == [] + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + assert data.get("collections") == [] - names = {p.get("name") for p in response.get("protocols")} + names = {p.get("name") for p in data.get("protocols", {})} expected_names = {k for k, v in LicenseProvidersRegistry()} assert names == expected_names def test_collections_get_collections_with_multiple_collections( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - old_prior_test_results = HasSelfTests.prior_test_results - setattr( - HasCollectionSelfTests, - "prior_test_results", - settings_ctrl_fixture.mock_prior_test_results, - ) - session = settings_ctrl_fixture.ctrl.db.session + self, admin_ctrl_fixture: AdminControllerFixture + ) -> None: + session = admin_ctrl_fixture.ctrl.db.session + db = admin_ctrl_fixture.ctrl.db - [c1] = settings_ctrl_fixture.ctrl.db.default_library().collections + [c1] = db.default_library().collections - c2 = settings_ctrl_fixture.ctrl.db.collection( + c2 = db.collection( name="Collection 2", protocol=ExternalIntegration.OVERDRIVE, + external_account_id="1234", + settings=dict( + overdrive_client_secret="b", + overdrive_client_key="user", + overdrive_website_id="100", + ), ) - c2.external_account_id = "1234" - DatabaseTransactionFixture.set_settings( - c2.integration_configuration, - overdrive_client_secret="b", - overdrive_client_key="user", - overdrive_website_id="100", - ) - - c3 = settings_ctrl_fixture.ctrl.db.collection( + c3 = db.collection( name="Collection 3", protocol=ExternalIntegration.OVERDRIVE, + external_account_id="5678", ) - c3.external_account_id = "5678" c3.parent = c2 - l1 = settings_ctrl_fixture.ctrl.db.library(short_name="L1") - c3.libraries += [l1, settings_ctrl_fixture.ctrl.db.default_library()] + l1 = db.library(short_name="L1") + c3.libraries += [l1, db.default_library()] assert isinstance(l1.id, int) - l1_config = c3.integration_configuration.for_library(l1.id, create=True) + l1_config = c3.integration_configuration.for_library(l1.id) + assert l1_config is not None DatabaseTransactionFixture.set_settings(l1_config, ebook_loan_duration="14") # Commit the config changes session.commit() - l1_librarian, ignore = create( - settings_ctrl_fixture.ctrl.db.session, Admin, email="admin@l1.org" - ) + l1_librarian, ignore = create(session, Admin, email="admin@l1.org") l1_librarian.add_role(AdminRole.LIBRARIAN, l1) - with settings_ctrl_fixture.request_context_with_admin("/"): - controller = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller - ) + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + + with admin_ctrl_fixture.request_context_with_admin("/"): + controller = admin_ctrl_fixture.manager.admin_collection_settings_controller response = controller.process_collections() + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) # The system admin can see all collections. coll2, coll3, coll1 = sorted( - response.get("collections"), key=lambda c: c.get("name") + data.get("collections", []), key=lambda c: c.get("name", "") ) - assert c1.id == coll1.get("id") - assert c2.id == coll2.get("id") - assert c3.id == coll3.get("id") + assert c1.integration_configuration.id == coll1.get("id") + assert c2.integration_configuration.id == coll2.get("id") + assert c3.integration_configuration.id == coll3.get("id") assert c1.name == coll1.get("name") assert c2.name == coll2.get("name") @@ -118,23 +119,15 @@ def test_collections_get_collections_with_multiple_collections( assert c2.protocol == coll2.get("protocol") assert c3.protocol == coll3.get("protocol") - assert settings_ctrl_fixture.self_test_results == coll1.get( - "self_test_results" - ) - assert settings_ctrl_fixture.self_test_results == coll2.get( - "self_test_results" - ) - assert settings_ctrl_fixture.self_test_results == coll3.get( - "self_test_results" - ) - settings1 = coll1.get("settings", {}) settings2 = coll2.get("settings", {}) settings3 = coll3.get("settings", {}) - assert c1.external_account_id == settings1.get("external_account_id") - assert c2.external_account_id == settings2.get("external_account_id") - assert c3.external_account_id == settings3.get("external_account_id") + assert ( + settings1.get("external_account_id") == "http://opds.example.com/feed" + ) + assert settings2.get("external_account_id") == "1234" + assert settings3.get("external_account_id") == "5678" assert c2.integration_configuration.settings_dict[ "overdrive_client_secret" @@ -149,218 +142,179 @@ def test_collections_get_collections_with_multiple_collections( ) assert "L1" == coll3_l1.get("short_name") assert "14" == coll3_l1.get("ebook_loan_duration") - assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name - == coll3_default.get("short_name") - ) + assert db.default_library().short_name == coll3_default.get("short_name") - with settings_ctrl_fixture.request_context_with_admin("/", admin=l1_librarian): + with admin_ctrl_fixture.request_context_with_admin("/", admin=l1_librarian): # A librarian only sees collections associated with their library. response = controller.process_collections() - [coll3] = response.get("collections") - assert c3.id == coll3.get("id") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + [coll3] = data.get("collections", []) + assert c3.integration_configuration.id == coll3.get("id") coll3_libraries = coll3.get("libraries") assert 1 == len(coll3_libraries) assert "L1" == coll3_libraries[0].get("short_name") assert "14" == coll3_libraries[0].get("ebook_loan_duration") - setattr(HasCollectionSelfTests, "prior_test_results", old_prior_test_results) - + @pytest.mark.parametrize( + "post_data,expected,detailed", + [ + pytest.param( + {"protocol": "Overdrive"}, + MISSING_COLLECTION_NAME, + False, + id="missing_name", + ), + pytest.param( + {"name": "collection"}, + NO_PROTOCOL_FOR_NEW_SERVICE, + False, + id="missing_protocol", + ), + pytest.param( + {"name": "collection", "protocol": "Unknown"}, + UNKNOWN_PROTOCOL, + False, + id="unknown_protocol", + ), + pytest.param( + {"id": "123456789", "name": "collection", "protocol": "Bibliotheca"}, + MISSING_SERVICE, + False, + id="missing_service", + ), + pytest.param( + {"name": "Collection 1", "protocol": "Bibliotheca"}, + INTEGRATION_NAME_ALREADY_IN_USE, + False, + id="name_in_use", + ), + pytest.param( + {"id": "", "name": "Collection 1", "protocol": "Bibliotheca"}, + CANNOT_CHANGE_PROTOCOL, + False, + id="change_protocol", + ), + pytest.param( + { + "name": "Collection 2", + "protocol": "Bibliotheca", + "parent_id": "1234", + }, + PROTOCOL_DOES_NOT_SUPPORT_PARENTS, + False, + id="protocol_does_not_support_parents", + ), + pytest.param( + {"name": "Collection 2", "protocol": "Overdrive", "parent_id": "1234"}, + MISSING_PARENT, + False, + id="missing_parent", + ), + pytest.param( + { + "name": "collection", + "protocol": "OPDS Import", + "external_account_id": "http://url.test", + "data_source": "test", + "libraries": json.dumps([{"short_name": "nosuchlibrary"}]), + }, + NO_SUCH_LIBRARY, + True, + id="no_such_library", + ), + pytest.param( + {"name": "collection", "protocol": "OPDS Import"}, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_opds", + ), + pytest.param( + { + "name": "collection", + "protocol": "Overdrive", + "external_account_id": "1234", + "overdrive_client_key": "user", + "overdrive_client_secret": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_overdrive", + ), + pytest.param( + { + "name": "collection", + "protocol": "Bibliotheca", + "external_account_id": "1234", + "password": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_bibliotheca", + ), + pytest.param( + { + "name": "collection", + "protocol": "Axis 360", + "username": "user", + "password": "password", + }, + INCOMPLETE_CONFIGURATION, + True, + id="incomplete_axis", + ), + ], + ) def test_collections_post_errors( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + admin_ctrl_fixture: AdminControllerFixture, + post_data: Dict[str, str], + expected: ProblemDetail, + detailed: bool, ): - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("protocol", "Overdrive"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_COLLECTION_NAME - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == NO_PROTOCOL_FOR_NEW_SERVICE - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ("protocol", "Unknown"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == UNKNOWN_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", "123456789"), - ("name", "collection"), - ("protocol", "Bibliotheca"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_COLLECTION + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - collection = settings_ctrl_fixture.ctrl.db.collection( + collection = admin_ctrl_fixture.ctrl.db.collection( name="Collection 1", protocol=ExternalIntegration.OVERDRIVE ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 1"), - ("protocol", "Bibliotheca"), - ] - ) + if "id" in post_data and post_data["id"] == "": + post_data["id"] = str(collection.integration_configuration.id) + + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict(post_data) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) - assert response == COLLECTION_NAME_ALREADY_IN_USE - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + if detailed: + assert isinstance(response, ProblemDetail) + assert response.status_code == expected.status_code + assert response.uri == expected.uri + else: + assert response == expected + + def test_collections_post_errors_no_permissions( + self, admin_ctrl_fixture: AdminControllerFixture + ): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), ("name", "Collection 1"), ("protocol", "Overdrive"), ] ) pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections, + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(collection.id)), - ("name", "Collection 1"), - ("protocol", "Bibliotheca"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == CANNOT_CHANGE_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 2"), - ("protocol", "Bibliotheca"), - ("parent_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == PROTOCOL_DOES_NOT_SUPPORT_PARENTS - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "Collection 2"), - ("protocol", "Overdrive"), - ("parent_id", "1234"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response == MISSING_PARENT - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection"), - ("protocol", "OPDS Import"), - ("external_account_id", "http://url.test"), - ("data_source", "test"), - ("libraries", json.dumps([{"short_name": "nosuchlibrary"}])), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == NO_SUCH_LIBRARY.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "OPDS Import"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Overdrive"), - ("external_account_id", "1234"), - ("overdrive_client_key", "user"), - ("overdrive_client_secret", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Bibliotheca"), - ("external_account_id", "1234"), - ("password", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", "collection1"), - ("protocol", "Axis 360"), - ("username", "user"), - ("password", "password"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() - ) - assert response.uri == INCOMPLETE_CONFIGURATION.uri - - def test_collections_post_create( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - db = settings_ctrl_fixture.ctrl.db + def test_collections_post_create(self, admin_ctrl_fixture: AdminControllerFixture): + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db l1 = db.library( name="Library 1", short_name="L1", @@ -374,7 +328,7 @@ def test_collections_post_create( short_name="L3", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "New Collection"), @@ -395,18 +349,20 @@ def test_collections_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) + assert isinstance(response, Response) assert response.status_code == 201 # The collection was created and configured properly. - collection = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, name="New Collection" - ) + collection = Collection.by_name(db.session, name="New Collection") assert isinstance(collection, Collection) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) assert "New Collection" == collection.name - assert "acctid" == collection.external_account_id + assert ( + "acctid" + == collection.integration_configuration.settings_dict["external_account_id"] + ) assert ( "username" == collection.integration_configuration.settings_dict[ @@ -442,12 +398,12 @@ def test_collections_post_create( assert "l2_ils" == l2_settings.settings_dict["ils_name"] # This collection will be a child of the first collection. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "Child Collection"), ("protocol", "Overdrive"), - ("parent_id", str(collection.id)), + ("parent_id", str(collection.integration_configuration.id)), ( "libraries", json.dumps([{"short_name": "L3", "ils_name": "l3_ils"}]), @@ -456,18 +412,20 @@ def test_collections_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) + assert isinstance(response, Response) assert response.status_code == 201 # The collection was created and configured properly. - child = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, name="Child Collection" - ) + child = Collection.by_name(db.session, name="Child Collection") assert isinstance(child, Collection) - assert child.id == int(response.response[0]) + assert child.integration_configuration.id == int(response.get_data()) assert "Child Collection" == child.name - assert "child-acctid" == child.external_account_id + assert ( + "child-acctid" + == child.integration_configuration.settings_dict["external_account_id"] + ) # The settings that are inherited from the parent weren't set. assert "username" not in child.integration_configuration.settings_dict @@ -481,23 +439,23 @@ def test_collections_post_create( assert l3_settings is not None assert "l3_ils" == l3_settings.settings_dict["ils_name"] - def test_collections_post_edit( - self, settings_ctrl_fixture: SettingsControllerFixture - ): + def test_collections_post_edit(self, admin_ctrl_fixture: AdminControllerFixture): # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db + collection = db.collection( name="Collection 1", protocol=ExternalIntegration.OVERDRIVE ) - l1 = settings_ctrl_fixture.ctrl.db.library( + l1 = db.library( name="Library 1", short_name="L1", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), ("external_account_id", "1234"), @@ -512,11 +470,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection has been changed. assert "user2" == collection.integration_configuration.settings_dict.get( @@ -540,10 +499,10 @@ def test_collections_post_edit( assert l1_settings is not None assert "the_ils" == l1_settings.settings_dict.get("ils_name") - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), ("external_account_id", "1234"), @@ -554,11 +513,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection is the same. assert "user2" == collection.integration_configuration.settings_dict.get( @@ -573,42 +533,41 @@ def test_collections_post_edit( # have been deleted. assert collection.integration_configuration.library_configurations == [] - parent = settings_ctrl_fixture.ctrl.db.collection( - name="Parent", protocol=ExternalIntegration.OVERDRIVE - ) + parent = db.collection(name="Parent", protocol=ExternalIntegration.OVERDRIVE) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.OVERDRIVE), - ("parent_id", str(parent.id)), + ("parent_id", str(parent.integration_configuration.id)), ("external_account_id", "1234"), ("libraries", json.dumps([])), ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The collection now has a parent. assert parent == collection.parent - library = settings_ctrl_fixture.ctrl.db.default_library() - collection2 = settings_ctrl_fixture.ctrl.db.collection( + library = db.default_library() + collection2 = db.collection( name="Collection 2", protocol=ExternalIntegration.ODL ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection2.id)), + ("id", str(collection2.integration_configuration.id)), ("name", "Collection 2"), ("protocol", ExternalIntegration.ODL), - ("external_account_id", "1234"), + ("external_account_id", "http://test.com/feed"), ("username", "user"), ("password", "password"), ("data_source", "datasource"), @@ -628,11 +587,12 @@ def test_collections_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - settings_ctrl_fixture.ctrl.db.session.refresh(collection2) + admin_ctrl_fixture.ctrl.db.session.refresh(collection2) assert len(collection2.integration_configuration.library_configurations) == 1 # The library configuration value was correctly coerced to int assert ( @@ -642,35 +602,25 @@ def test_collections_post_edit( == 200 ) - def _base_collections_post_request(self, collection): - """A template for POST requests to the collections controller.""" - return [ - ("id", str(collection.id)), - ("name", "Collection 1"), - ("protocol", ExternalIntegration.AXIS_360), - ("external_account_id", "1234"), - ("username", "user2"), - ("password", "password"), - ("url", "http://axis.test/"), - ] - def test_collections_post_edit_library_specific_configuration( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): # The collection exists. - collection = settings_ctrl_fixture.ctrl.db.collection( + db = admin_ctrl_fixture.ctrl.db + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + collection = db.collection( name="Collection 1", protocol=ExternalIntegration.AXIS_360 ) - l1 = settings_ctrl_fixture.ctrl.db.library( + l1 = db.library( name="Library 1", short_name="L1", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.AXIS_360), ("external_account_id", "1234"), @@ -684,7 +634,7 @@ def test_collections_post_edit_library_specific_configuration( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 @@ -695,10 +645,10 @@ def test_collections_post_edit_library_specific_configuration( assert "14" == l1_settings.settings_dict.get("ebook_loan_duration") # Remove the connection between collection and library. - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ - ("id", str(collection.id)), + ("id", str(collection.integration_configuration.id)), ("name", "Collection 1"), ("protocol", ExternalIntegration.AXIS_360), ("external_account_id", "1234"), @@ -709,58 +659,58 @@ def test_collections_post_edit_library_specific_configuration( ] ) response = ( - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_collections() ) assert response.status_code == 200 + assert isinstance(response, Response) - assert collection.id == int(response.response[0]) + assert collection.integration_configuration.id == int(response.get_data()) # The settings associated with the collection+library were removed # when the connection between collection and library was deleted. assert isinstance(l1.id, int) - assert None == collection.integration_configuration.for_library(l1.id) + assert collection.integration_configuration.for_library(l1.id) is None assert [] == collection.libraries - def test_collection_delete(self, settings_ctrl_fixture: SettingsControllerFixture): - collection = settings_ctrl_fixture.ctrl.db.collection() - assert False == collection.marked_for_deletion + def test_collection_delete(self, admin_ctrl_fixture: AdminControllerFixture): + db = admin_ctrl_fixture.ctrl.db + collection = db.collection() + assert collection.marked_for_deletion is False - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete, - collection.id, + admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete, + collection.integration_configuration.id, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - response = settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( - collection.id + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + assert collection.integration_configuration.id is not None + response = admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( + collection.integration_configuration.id ) assert response.status_code == 200 + assert isinstance(response, Response) # The collection should still be available because it is not immediately deleted. # The collection will be deleted in the background by a script, but it is # now marked for deletion - fetchedCollection = get_one( - settings_ctrl_fixture.ctrl.db.session, Collection, id=collection.id - ) - assert collection == fetchedCollection - assert True == fetchedCollection.marked_for_deletion + fetched_collection = get_one(db.session, Collection, id=collection.id) + assert fetched_collection == collection + assert fetched_collection.marked_for_deletion is True def test_collection_delete_cant_delete_parent( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - parent = settings_ctrl_fixture.ctrl.db.collection( - protocol=ExternalIntegration.OVERDRIVE - ) - child = settings_ctrl_fixture.ctrl.db.collection( - protocol=ExternalIntegration.OVERDRIVE - ) + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + db = admin_ctrl_fixture.ctrl.db + parent = db.collection(protocol=ExternalIntegration.OVERDRIVE) + child = db.collection(protocol=ExternalIntegration.OVERDRIVE) child.parent = parent - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - response = settings_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( - parent.id + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): + assert parent.integration_configuration.id is not None + response = admin_ctrl_fixture.manager.admin_collection_settings_controller.process_delete( + parent.integration_configuration.id ) - assert CANNOT_DELETE_COLLECTION_WITH_CHILDREN == response + assert response == CANNOT_DELETE_COLLECTION_WITH_CHILDREN diff --git a/tests/api/admin/controller/test_custom_lists.py b/tests/api/admin/controller/test_custom_lists.py index 14ee6b90ab..e71ca13638 100644 --- a/tests/api/admin/controller/test_custom_lists.py +++ b/tests/api/admin/controller/test_custom_lists.py @@ -903,7 +903,7 @@ def _setup_share_locally(self, admin_librarian_fixture: AdminLibrarianFixture): shared_with = admin_librarian_fixture.ctrl.db.library("shared_with") primary_library = admin_librarian_fixture.ctrl.db.library("primary") collection1 = admin_librarian_fixture.ctrl.db.collection("c1") - primary_library.collections.append(collection1) + collection1.libraries.append(primary_library) data_source = DataSource.lookup( admin_librarian_fixture.ctrl.db.session, DataSource.LIBRARY_STAFF @@ -949,7 +949,7 @@ def test_share_locally_success( self, admin_librarian_fixture: AdminLibrarianFixture ): s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) response = self._share_locally( s.list, s.primary_library, admin_librarian_fixture ) @@ -970,11 +970,11 @@ def test_share_locally_with_invalid_entries( self, admin_librarian_fixture: AdminLibrarianFixture ): s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) # Second collection with work in list collection2 = admin_librarian_fixture.ctrl.db.collection() - s.primary_library.collections.append(collection2) + collection2.libraries.append(s.primary_library) w = admin_librarian_fixture.ctrl.db.work(collection=collection2) s.list.add_entry(w) @@ -987,7 +987,7 @@ def test_share_locally_with_invalid_entries( def test_share_locally_get(self, admin_librarian_fixture: AdminLibrarianFixture): """Does the GET method fetch shared lists""" s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) resp = self._share_locally(s.list, s.primary_library, admin_librarian_fixture) assert resp["successes"] == 1 @@ -1022,7 +1022,7 @@ def test_share_locally_get(self, admin_librarian_fixture: AdminLibrarianFixture) def test_share_locally_delete(self, admin_librarian_fixture: AdminLibrarianFixture): """Test the deleting of a lists shared status""" s = self._setup_share_locally(admin_librarian_fixture) - s.shared_with.collections.append(s.collection1) + s.collection1.libraries.append(s.shared_with) resp = self._share_locally(s.list, s.primary_library, admin_librarian_fixture) assert resp["successes"] == 1 diff --git a/tests/api/admin/controller/test_lanes.py b/tests/api/admin/controller/test_lanes.py index 6d54d245bf..577d5429e3 100644 --- a/tests/api/admin/controller/test_lanes.py +++ b/tests/api/admin/controller/test_lanes.py @@ -38,7 +38,7 @@ class TestLanesController: def test_lanes_get(self, alm_fixture: AdminLibraryManagerFixture): library = alm_fixture.ctrl.db.library() collection = alm_fixture.ctrl.db.collection() - library.collections += [collection] + collection.libraries.append(library) english = alm_fixture.ctrl.db.lane( "English", library=library, languages=["eng"] diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index b1925c04b4..ee375fb6e1 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -106,7 +106,6 @@ def test_patron_auth_services_get_with_no_services( protocols = response.get("protocols") assert isinstance(protocols, list) assert 7 == len(protocols) - assert SimpleAuthenticationProvider.__module__ == protocols[0].get("name") assert "settings" in protocols[0] assert "library_settings" in protocols[0] diff --git a/tests/api/admin/controller/test_search_services.py b/tests/api/admin/controller/test_search_services.py index 8b6fcebdf8..ebd470ea0a 100644 --- a/tests/api/admin/controller/test_search_services.py +++ b/tests/api/admin/controller/test_search_services.py @@ -135,7 +135,7 @@ def test_search_services_post_errors(self, settings_ctrl_fixture): session, ExternalIntegration, protocol="test", - goal=ExternalIntegration.LICENSE_GOAL, + goal="test", name="name", ) diff --git a/tests/api/admin/controller/test_settings.py b/tests/api/admin/controller/test_settings.py index 12da7a33dd..12f380ab6e 100644 --- a/tests/api/admin/controller/test_settings.py +++ b/tests/api/admin/controller/test_settings.py @@ -304,8 +304,8 @@ def settings_class(cls): == "Required field 'key' is missing." ) - config = _set_configuration_library( + result = _set_configuration_library( config, dict(short_name="short-name", key="key", value="value"), Protocol1 ) - assert config.library == library - assert config.settings_dict == dict(key="key", value="value") + assert result.library == library + assert result.settings_dict == dict(key="key", value="value") diff --git a/tests/api/mockapi/axis.py b/tests/api/mockapi/axis.py index 0b8456fe53..39adbbe62c 100644 --- a/tests/api/mockapi/axis.py +++ b/tests/api/mockapi/axis.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session from api.axis import Axis360API -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -13,27 +13,17 @@ def mock_collection( cls, _db: Session, library: Library, name: str = "Test Axis 360 Collection" ) -> Collection: """Create a mock Axis 360 collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="c", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name, ExternalIntegration.AXIS_360 ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.AXIS_360 - ) - config = collection.create_integration_configuration( - ExternalIntegration.AXIS_360 - ) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", "url": "http://axis.test/", + "external_account_id": "c", } - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, with_token=True, **kwargs): diff --git a/tests/api/mockapi/bibliotheca.py b/tests/api/mockapi/bibliotheca.py index 36486512da..bbeb73472f 100644 --- a/tests/api/mockapi/bibliotheca.py +++ b/tests/api/mockapi/bibliotheca.py @@ -3,7 +3,7 @@ from sqlalchemy.orm import Session from api.bibliotheca import BibliothecaAPI -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -16,26 +16,16 @@ def mock_collection( self, _db: Session, library: Library, name: str = "Test Bibliotheca Collection" ) -> Collection: """Create a mock Bibliotheca collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="c", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=ExternalIntegration.BIBLIOTHECA ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.BIBLIOTHECA - ) - config = collection.create_integration_configuration( - ExternalIntegration.BIBLIOTHECA - ) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", + "external_account_id": "c", } - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, *args, **kwargs): diff --git a/tests/api/mockapi/enki.py b/tests/api/mockapi/enki.py index 14c7caf8b9..64ddeb8585 100644 --- a/tests/api/mockapi/enki.py +++ b/tests/api/mockapi/enki.py @@ -24,14 +24,14 @@ def __init__( assert collection is not None collection.protocol = EnkiAPI.ENKI if collection not in library.collections: - library.collections.append(collection) + collection.libraries.append(library) # Set the "Enki library ID" variable between the default library # and this Enki collection. - assert library.id is not None + library_config = collection.integration_configuration.for_library(library) + assert library_config is not None DatabaseTransactionFixture.set_settings( - collection.integration_configuration.for_library(library.id, create=True), - **{self.ENKI_LIBRARY_ID_KEY: "c"} + library_config, **{self.ENKI_LIBRARY_ID_KEY: "c"} ) _db.commit() diff --git a/tests/api/mockapi/opds_for_distributors.py b/tests/api/mockapi/opds_for_distributors.py index 038bb5de05..d27417a17c 100644 --- a/tests/api/mockapi/opds_for_distributors.py +++ b/tests/api/mockapi/opds_for_distributors.py @@ -1,7 +1,7 @@ from sqlalchemy.orm import Session from api.opds_for_distributors import OPDSForDistributorsAPI -from core.model import Library, get_one_or_create +from core.model import Library from core.model.collection import Collection from core.util.http import HTTP from tests.core.mock import MockRequestsResponse @@ -20,25 +20,17 @@ def mock_collection( :param _db: Database session. :param name: A name for the collection. """ - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict( - external_account_id="http://opds", - ), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=OPDSForDistributorsAPI.label() ) - integration = collection.create_external_integration( - protocol=OPDSForDistributorsAPI.label() + collection.integration_configuration.settings_dict = dict( + username="a", + password="b", + data_source="data_source", + external_account_id="http://opds", ) - config = collection.create_integration_configuration( - OPDSForDistributorsAPI.label() - ) - config.settings_dict = dict( - username="a", password="b", data_source="data_source" - ) - config.for_library(library.id, create=True) - library.collections.append(collection) + if library not in collection.libraries: + collection.libraries.append(library) return collection def __init__(self, _db, collection, *args, **kwargs): diff --git a/tests/api/mockapi/overdrive.py b/tests/api/mockapi/overdrive.py index e96c927eeb..b5bf23fff4 100644 --- a/tests/api/mockapi/overdrive.py +++ b/tests/api/mockapi/overdrive.py @@ -2,13 +2,12 @@ from sqlalchemy.orm import Session -from api.overdrive import OverdriveAPI, OverdriveConstants -from core.model import Library, get_one_or_create +from api.overdrive import OverdriveAPI, OverdriveLibrarySettings, OverdriveSettings +from core.model import Library from core.model.collection import Collection from core.model.configuration import ExternalIntegration from core.util.http import HTTP from tests.core.mock import MockRequestsResponse -from tests.fixtures.database import DatabaseTransactionFixture class MockOverdriveResponse: @@ -51,28 +50,24 @@ def mock_collection( ils_name: str = "e", ): """Create a mock Overdrive collection for use in tests.""" - collection, ignore = get_one_or_create( - _db, - Collection, - name=name, - create_method_kwargs=dict(external_account_id=library_id), + collection, _ = Collection.by_name_and_protocol( + _db, name=name, protocol=ExternalIntegration.OVERDRIVE ) - integration = collection.create_external_integration( - protocol=ExternalIntegration.OVERDRIVE + settings = OverdriveSettings( + external_account_id=library_id, + overdrive_website_id=website_id, + overdrive_client_key=client_key, + overdrive_client_secret=client_secret, ) - config = collection.create_integration_configuration( - ExternalIntegration.OVERDRIVE + OverdriveAPI.settings_update(collection.integration_configuration, settings) + if library not in collection.libraries: + collection.libraries.append(library) + library_settings = OverdriveLibrarySettings( + ils_name=ils_name, ) - config.settings_dict = { - OverdriveConstants.OVERDRIVE_CLIENT_KEY: client_key, - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: client_secret, - OverdriveConstants.OVERDRIVE_WEBSITE_ID: website_id, - } - library.collections.append(collection) - db = DatabaseTransactionFixture - assert library.id is not None - db.set_settings(config.for_library(library.id, create=True), ils_name=ils_name) - _db.refresh(config) + library_config = collection.integration_configuration.for_library(library.id) + assert library_config is not None + OverdriveAPI.library_settings_update(library_config, library_settings) return collection def queue_collection_token(self): diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index 55ee36a461..cf5a89108b 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -106,12 +106,6 @@ def bibliotheca_fixture( class TestBibliothecaAPI: - def test_external_integration(self, bibliotheca_fixture: BibliothecaAPITestFixture): - assert ( - bibliotheca_fixture.collection.external_integration - == bibliotheca_fixture.api.external_integration(object()) - ) - def test__run_self_tests( self, bibliotheca_fixture: BibliothecaAPITestFixture, diff --git a/tests/api/test_circulationapi.py b/tests/api/test_circulationapi.py index de1012ff0e..91f1d59c55 100644 --- a/tests/api/test_circulationapi.py +++ b/tests/api/test_circulationapi.py @@ -887,7 +887,9 @@ def test_borrow_hold_limit_reached( def test_fulfill_errors(self, circulation_api: CirculationAPIFixture): # Here's an open-access title. collection = circulation_api.db.collection( - protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" + protocol=ExternalIntegration.OPDS_IMPORT, + data_source_name="OPDS", + external_account_id="http://url/", ) circulation_api.pool.open_access = True circulation_api.pool.collection = collection diff --git a/tests/api/test_controller_base.py b/tests/api/test_controller_base.py index 3e27c146da..6e20a57c10 100644 --- a/tests/api/test_controller_base.py +++ b/tests/api/test_controller_base.py @@ -274,7 +274,7 @@ def test_load_licensepools(self, circulation_fixture: CirculationControllerFixtu library = circulation_fixture.library [c1] = library.collections c2 = circulation_fixture.db.collection() - library.collections.append(c2) + c2.libraries.append(library) # Here's a Collection not affiliated with any Library. c3 = circulation_fixture.db.collection() diff --git a/tests/api/test_controller_loan.py b/tests/api/test_controller_loan.py index 7231409dd8..a46188a4a8 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/test_controller_loan.py @@ -1442,19 +1442,18 @@ def test_loan_duration_settings_impact_on_loans_and_borrow_response( data_source_name=collection_data_source_name, ) + collection.libraries.append(loan_fixture.db.default_library()) if collection_default_loan_period: - library_id = loan_fixture.db.default_library().id - assert isinstance(library_id, int) + lib_config = collection.integration_configuration.for_library( + loan_fixture.db.default_library() + ) + assert lib_config is not None DatabaseTransactionFixture.set_settings( - collection.integration_configuration.for_library( - library_id, create=True - ), + lib_config, collection.loan_period_key(), collection_default_loan_period, ) - loan_fixture.db.default_library().collections.append(collection) - def create_work_and_return_license_pool_and_loan_info(**kwargs): loan_start = kwargs.pop("loan_start", utc_now()) loan_end = kwargs.pop("loan_end", None) diff --git a/tests/api/test_controller_multilib.py b/tests/api/test_controller_multilib.py index 60488f1c51..d3c4acbf83 100644 --- a/tests/api/test_controller_multilib.py +++ b/tests/api/test_controller_multilib.py @@ -1,4 +1,4 @@ -from core.model import Collection, ExternalIntegration, get_one_or_create +from core.model import Collection, ExternalIntegration from core.opds_import import OPDSAPI from tests.fixtures.api_controller import ( CirculationControllerFixture, @@ -16,19 +16,15 @@ def make_default_libraries(_db): return [controller_fixture.db.library() for x in range(2)] def make_default_collection(_db, library): - collection, ignore = get_one_or_create( - controller_fixture.db.session, - Collection, - name=f"{controller_fixture.db.fresh_str()} (for multi-library test)", - ) - collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - integration = collection.create_integration_configuration( - ExternalIntegration.OPDS_IMPORT + collection, _ = Collection.by_name_and_protocol( + _db, + f"{controller_fixture.db.fresh_str()} (for multi-library test)", + ExternalIntegration.OPDS_IMPORT, ) settings = OPDSAPI.settings_class()( external_account_id="http://url.com", data_source="OPDS" ) - OPDSAPI.settings_update(integration, settings) + OPDSAPI.settings_update(collection.integration_configuration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_controller_odl_notify.py b/tests/api/test_controller_odl_notify.py index 4f855c8f22..de510c1ce4 100644 --- a/tests/api/test_controller_odl_notify.py +++ b/tests/api/test_controller_odl_notify.py @@ -7,7 +7,7 @@ from api.odl import ODLAPI from api.odl2 import ODL2API from api.problem_details import INVALID_LOAN_FOR_ODL_NOTIFICATION, NO_ACTIVE_LOAN -from core.model import Collection, get_one_or_create +from core.model import Collection from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.database import DatabaseTransactionFixture @@ -18,27 +18,17 @@ def __init__(self, db: DatabaseTransactionFixture): self.library = self.db.default_library() """Create a mock ODL collection to use in tests.""" - self.collection, ignore = get_one_or_create( - self.db.session, - Collection, - name="Test ODL Collection", - create_method_kwargs=dict( - external_account_id="http://odl", - ), + self.collection, _ = Collection.by_name_and_protocol( + self.db.session, "Test ODL Collection", ODLAPI.label() ) - integration = self.collection.create_external_integration( - protocol=self.integration_protocol() - ) - config = self.collection.create_integration_configuration( - self.integration_protocol() - ) - config.settings_dict = { + self.collection.integration_configuration.settings_dict = { "username": "a", "password": "b", "url": "http://metadata", + "external_integration_id": "http://odl", Collection.DATA_SOURCE_NAME_SETTING: "Feedbooks", } - self.library.collections.append(self.collection) + self.collection.libraries.append(self.library) self.work = self.db.work(with_license_pool=True, collection=self.collection) def setup(self, available, concurrency, left=None, expires=None): diff --git a/tests/api/test_controller_playtime_entries.py b/tests/api/test_controller_playtime_entries.py index 86a7f7b87d..36ab25ae8e 100644 --- a/tests/api/test_controller_playtime_entries.py +++ b/tests/api/test_controller_playtime_entries.py @@ -241,7 +241,7 @@ def test_api_validation(self, circulation_fixture: CirculationControllerFixture) assert response.detail == "Collection was not found in the Library." # Identifier not part of collection - library.collections.append(collection) + collection.libraries.append(library) response = circulation_fixture.manager.playtime_entries.track_playtimes( collection.id, identifier.type, identifier.identifier ) diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/test_controller_scopedsession.py index f5addd55f4..66b7ef50b3 100644 --- a/tests/api/test_controller_scopedsession.py +++ b/tests/api/test_controller_scopedsession.py @@ -55,19 +55,15 @@ def make_default_collection(self, session: Session, library): """We need to create a test collection that uses the scoped session. """ - collection, ignore = create( + collection, _ = Collection.by_name_and_protocol( session, - Collection, - name=self.fresh_id() + " (collection for scoped session)", - ) - collection.create_external_integration(ExternalIntegration.OPDS_IMPORT) - integration = collection.create_integration_configuration( - ExternalIntegration.OPDS_IMPORT + self.fresh_id() + " (collection for scoped session)", + ExternalIntegration.OPDS_IMPORT, ) settings = OPDSAPI.settings_class()( external_account_id="http://url.com", data_source="OPDS" ) - OPDSAPI.settings_update(integration, settings) + OPDSAPI.settings_update(collection.integration_configuration, settings) library.collections.append(collection) return collection diff --git a/tests/api/test_enki.py b/tests/api/test_enki.py index 9cdbb3cbb0..811ec3e826 100644 --- a/tests/api/test_enki.py +++ b/tests/api/test_enki.py @@ -82,8 +82,12 @@ def test_enki_library_id(self, enki_test_fixture: EnkiTestFixure): assert other_library.id is not None config = enki_test_fixture.api.integration_configuration() assert config is not None + + config.libraries.append(other_library) + lib_config = config.for_library(other_library) + assert lib_config is not None DatabaseTransactionFixture.set_settings( - config.for_library(other_library.id, create=True), + lib_config, **{enki_test_fixture.api.ENKI_LIBRARY_ID_KEY: "other library id"}, ) db.session.commit() diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index b02782c42e..87822ded17 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -927,7 +927,7 @@ def test_init(self, db: DatabaseTransactionFixture): library = db.default_library() default_collection = db.default_collection() other_library_collection = db.collection() - library.collections.append(other_library_collection) + other_library_collection.libraries.append(library) # This collection is not associated with any library. unused_collection = db.collection() @@ -1111,12 +1111,13 @@ def test_constructor(self, db: DatabaseTransactionFixture): # The default library comes with a collection whose data # source is unspecified. Make another one whose data source _is_ # specified. + library = db.default_library() overdrive_collection = db.collection( "Test Overdrive Collection", protocol=ExternalIntegration.OVERDRIVE, data_source_name=DataSource.OVERDRIVE, ) - db.default_library().collections.append(overdrive_collection) + overdrive_collection.libraries.append(library) # Create another collection that is _not_ associated with this # library. It will not be used at all. @@ -1127,11 +1128,11 @@ def test_constructor(self, db: DatabaseTransactionFixture): ) # Pass in a JackpotFacets object - facets = JackpotFacets.default(db.default_library()) + facets = JackpotFacets.default(library) # The JackpotWorkList has no works of its own -- only its children # have works. - wl = JackpotWorkList(db.default_library(), facets) + wl = JackpotWorkList(library, facets) assert [] == wl.works(db.session) # Let's take a look at the children. @@ -1156,11 +1157,11 @@ def test_constructor(self, db: DatabaseTransactionFixture): # These worklists show ebooks and audiobooks from the two # collections associated with the default library. [ - default_ebooks, default_audio, - overdrive_ebooks, + default_ebooks, overdrive_audio, - ] = available_now + overdrive_ebooks, + ] = sorted(available_now, key=lambda x: x.display_name) assert ( "License source {OPDS} - Medium {Book} - Collection name {%s}" diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 8d440b4b33..78be51e0b9 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -92,16 +92,6 @@ def opds_dist_api_fixture( class TestOPDSForDistributorsAPI: - def test_external_integration( - self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture - ): - assert ( - opds_dist_api_fixture.collection.external_integration - == opds_dist_api_fixture.api.external_integration( - opds_dist_api_fixture.db.session - ) - ) - def test__run_self_tests( self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture ): diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index 2d69ff793b..a9b18ab69e 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -8,7 +8,7 @@ import random from datetime import timedelta from typing import TYPE_CHECKING, Any, Dict -from unittest.mock import MagicMock, PropertyMock, create_autospec, patch +from unittest.mock import MagicMock, create_autospec, patch import pytest from requests import Response @@ -568,14 +568,6 @@ def availability_link_list(self, content): # this page) and a link to the next page. assert result == (["an availability queue"], "http://next-page/") - def test_external_integration(self, overdrive_api_fixture: OverdriveAPIFixture): - assert ( - overdrive_api_fixture.collection.external_integration - == overdrive_api_fixture.api.external_integration( - overdrive_api_fixture.db.session - ) - ) - def test_lock_in_format(self, overdrive_api_fixture: OverdriveAPIFixture): # Verify which formats do or don't need to be locked in before # fulfillment. @@ -2127,25 +2119,12 @@ def test_refresh_patron_access_token_is_fulfillment( patron = db.patron() patron.authorization_identifier = "barcode" credential = db.credential(patron=patron) - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) - db.default_collection().integration_configuration.for_library( - patron.library.id, create=True - ) # Mocked testing credentials encoded_auth = base64.b64encode(b"TestingKey:TestingSecret") # use a real Overdrive API - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS # but mock the request methods od_api._do_post = MagicMock() @@ -2172,18 +2151,8 @@ def test_cannot_fulfill_error_audiobook( db = overdrive_api_fixture.db patron = db.patron() patron.authorization_identifier = "barcode" - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) # use a real Overdrive API - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS od_api.get_loan = MagicMock(return_value={"isFormatLockedIn": True}) od_api.get_download_link = MagicMock(return_value=None) @@ -2207,21 +2176,8 @@ def test_no_drm_fulfillment(self, overdrive_api_fixture: OverdriveAPIFixture): patron = db.patron() work = db.work(with_license_pool=True) patron.authorization_identifier = "barcode" - db.default_collection().integration_configuration.protocol = "Overdrive" - db.default_collection().external_account_id = 1 - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - **{ - OverdriveConstants.OVERDRIVE_CLIENT_KEY: "user", - OverdriveConstants.OVERDRIVE_CLIENT_SECRET: "password", - OverdriveConstants.OVERDRIVE_WEBSITE_ID: "100", - }, - ) - db.default_collection().integration_configuration.for_library( - patron.library.id, create=True - ) - od_api = OverdriveAPI(db.session, db.default_collection()) + od_api = OverdriveAPI(db.session, overdrive_api_fixture.collection) od_api._server_nickname = OverdriveConstants.TESTING_SERVERS # Load the mock API data @@ -2297,7 +2253,8 @@ def token_post( pin = "patron_pin" # clear out any collections added before we add ours - library.collections = [] + for collection in library.collections: + collection.libraries = [] # Distinct credentials for the two OverDrive collections in which our # library has membership. @@ -3807,8 +3764,10 @@ def test_to_collection(self, overdrive_api_fixture: OverdriveAPIFixture): p, collection = account.to_collection(session) assert p == parent assert parent == collection.parent - assert collection.external_account_id == account.library_id - assert ExternalIntegration.LICENSE_GOAL == collection.external_integration.goal + assert ( + collection.integration_configuration.settings_dict["external_account_id"] + == account.library_id + ) assert ExternalIntegration.OVERDRIVE == collection.protocol assert Goals.LICENSE_GOAL == collection.integration_configuration.goal assert ExternalIntegration.OVERDRIVE == collection.protocol @@ -3950,19 +3909,25 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture client_secret = "cs" library_token = "lt" - parent: Collection = db.collection( + library = db.library() + parent: Collection = MockOverdriveAPI.mock_collection( + db.session, + library, name=parent_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=parent_od_library_id, + library_id=parent_od_library_id, + client_key=client_key, + client_secret=client_secret, ) - child1: Collection = db.collection( + child1: Collection = MockOverdriveAPI.mock_collection( + db.session, + library, name=child1_library_name, - protocol=ExternalIntegration.OVERDRIVE, - external_account_id=child1_advantage_library_id, + library_id=child1_advantage_library_id, ) child1.parent = parent - overdrive_api = MagicMock() - overdrive_api.get_advantage_accounts.return_value = [ + overdrive_api = MockOverdriveAPI(db.session, parent) + mock_get_advantage_accounts = MagicMock() + mock_get_advantage_accounts.return_value = [ OverdriveAdvantageAccount( parent_od_library_id, child1_advantage_library_id, @@ -3976,10 +3941,8 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture child2_token, ), ] - - overdrive_api.client_key.return_value = bytes(client_key, "utf-8") - overdrive_api.client_secret.return_value = bytes(client_secret, "utf-8") - type(overdrive_api).collection_token = PropertyMock(return_value=library_token) + overdrive_api.get_advantage_accounts = mock_get_advantage_accounts + overdrive_api._collection_token = library_token with patch( "api.overdrive.GenerateOverdriveAdvantageAccountList._create_overdrive_api" @@ -4034,6 +3997,4 @@ def test_generate_od_advantage_account_list(self, db: DatabaseTransactionFixture os.remove(output_file_path) assert last_index == 2 - overdrive_api.client_key.assert_called_once() - overdrive_api.client_secret.assert_called_once() overdrive_api.get_advantage_accounts.assert_called_once() diff --git a/tests/api/test_selftest.py b/tests/api/test_selftest.py index 18381ac08b..b7eb79f424 100644 --- a/tests/api/test_selftest.py +++ b/tests/api/test_selftest.py @@ -296,6 +296,10 @@ def _no_delivery_mechanisms_test(self): self._no_delivery_mechanisms_called = True return "1" + @property + def collection(self) -> None: + return None + mock = Mock() results = [x for x in mock._run_self_tests(MagicMock())] assert ["1"] == [x.result for x in results] diff --git a/tests/core/models/test_collection.py b/tests/core/models/test_collection.py index fb8810b9b5..a33602ab9e 100644 --- a/tests/core/models/test_collection.py +++ b/tests/core/models/test_collection.py @@ -1,9 +1,13 @@ import json +from unittest.mock import MagicMock import pytest +from sqlalchemy import select from core.config import Configuration -from core.model import create, get_one_or_create +from core.external_search import ExternalSearchIndex +from core.integration.goals import Goals +from core.model import get_one_or_create from core.model.circulationevent import CirculationEvent from core.model.collection import Collection from core.model.configuration import ConfigurationSetting, ExternalIntegration @@ -11,10 +15,7 @@ from core.model.customlist import CustomList from core.model.datasource import DataSource from core.model.edition import Edition -from core.model.integration import ( - IntegrationConfiguration, - IntegrationLibraryConfiguration, -) +from core.model.integration import IntegrationLibraryConfiguration from core.model.licensing import Hold, License, LicensePool, Loan from core.model.work import Work from tests.fixtures.database import DatabaseTransactionFixture @@ -48,10 +49,7 @@ def example_collection_fixture( class TestCollection: - def test_by_name_and_protocol( - self, example_collection_fixture: ExampleCollectionFixture - ): - db = example_collection_fixture.database_fixture + def test_by_name_and_protocol(self, db: DatabaseTransactionFixture): name = "A name" protocol = ExternalIntegration.OVERDRIVE key = (name, protocol) @@ -91,7 +89,22 @@ def test_by_name_and_protocol( Collection.by_name_and_protocol( db.session, name, ExternalIntegration.BIBLIOTHECA ) - assert 'Collection "A name" does not use protocol "Bibliotheca".' in str( + assert 'Integration "A name" does not use protocol "Bibliotheca".' in str( + excinfo.value + ) + + # You'll get an exception if you look up an existing integration + # but the goal doesn't match. + db.integration_configuration( + protocol=protocol, goal=Goals.DISCOVERY_GOAL, name="another name" + ) + + with pytest.raises(ValueError) as excinfo: + Collection.by_name_and_protocol( + db.session, "another name", ExternalIntegration.OVERDRIVE + ) + + assert 'Integration "another name" does not have goal "LICENSE_GOAL".' in str( excinfo.value ) @@ -119,25 +132,6 @@ def test_by_protocol(self, example_collection_fixture: ExampleCollectionFixture) c1.marked_for_deletion = True assert [test_collection] == Collection.by_protocol(db.session, overdrive).all() - def test_by_datasource(self, example_collection_fixture: ExampleCollectionFixture): - """Collections can be found by their associated DataSource""" - db = example_collection_fixture.database_fixture - c1 = db.collection(data_source_name=DataSource.GUTENBERG) - c2 = db.collection(data_source_name=DataSource.OVERDRIVE) - - # Using the DataSource name - assert {c1} == set( - Collection.by_datasource(db.session, DataSource.GUTENBERG).all() - ) - - # Using the DataSource itself - overdrive = DataSource.lookup(db.session, DataSource.OVERDRIVE) - assert {c2} == set(Collection.by_datasource(db.session, overdrive).all()) - - # A collection marked for deletion is filtered out. - c2.marked_for_deletion = True - assert 0 == Collection.by_datasource(db.session, overdrive).count() - def test_parents(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture @@ -153,39 +147,6 @@ def test_parents(self, example_collection_fixture: ExampleCollectionFixture): c3.parent_id = c2.id assert [c2, c1] == list(c3.parents) - def test_create_external_integration( - self, example_collection_fixture: ExampleCollectionFixture - ): - # A newly created Collection has no associated ExternalIntegration. - db = example_collection_fixture.database_fixture - collection, ignore = get_one_or_create( - db.session, Collection, name=db.fresh_str() - ) - assert None == collection.external_integration_id - with pytest.raises(ValueError) as excinfo: - getattr(collection, "external_integration") - assert "No known external integration for collection" in str(excinfo.value) - - # We can create one with create_external_integration(). - overdrive = ExternalIntegration.OVERDRIVE - integration = collection.create_external_integration(protocol=overdrive) - assert integration.id == collection.external_integration_id - assert overdrive == integration.protocol - - # If we call create_external_integration() again we get the same - # ExternalIntegration as before. - integration2 = collection.create_external_integration(protocol=overdrive) - assert integration == integration2 - - # If we try to initialize an ExternalIntegration with a different - # protocol, we get an error. - with pytest.raises(ValueError) as excinfo: - collection.create_external_integration(protocol="blah") - assert ( - "Located ExternalIntegration, but its protocol (Overdrive) does not match desired protocol (blah)." - in str(excinfo.value) - ) - def test_get_protocol(self, db: DatabaseTransactionFixture): test_collection = db.collection() integration = test_collection.integration_configuration @@ -284,9 +245,7 @@ def test_default_loan_period( test_collection = example_collection_fixture.collection library = db.default_library() - library.collections.append(test_collection) - assert isinstance(library.id, int) - test_collection.integration_configuration.for_library(library.id, create=True) + test_collection.libraries.append(library) ebook = Edition.BOOK_MEDIUM audio = Edition.AUDIO_MEDIUM @@ -372,19 +331,20 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): about a Collection. """ db = example_collection_fixture.database_fixture - test_collection = example_collection_fixture.collection library = db.default_library() library.name = "The only library" library.short_name = "only one" - library.collections.append(test_collection) - test_collection.external_account_id = "id" + test_collection = example_collection_fixture.collection + test_collection.libraries.append(library) + test_collection.integration_configuration.settings_dict = { "url": "url", "username": "username", "password": "password", "setting": "value", + "external_account_id": "id", } data = test_collection.explain() @@ -392,7 +352,7 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): 'Name: "test collection"', 'Protocol: "Overdrive"', 'Used by library: "only one"', - 'External account ID: "id"', + 'Setting "external_account_id": "id"', 'Setting "setting": "value"', 'Setting "url": "url"', 'Setting "username": "username"', @@ -403,20 +363,21 @@ def test_explain(self, example_collection_fixture: ExampleCollectionFixture): # If the collection is the child of another collection, # its parent is mentioned. - child = Collection(name="Child", external_account_id="id2") + child = db.collection( + name="Child", + external_account_id="id2", + protocol=ExternalIntegration.OVERDRIVE, + ) child.parent = test_collection - - child.create_external_integration(protocol=ExternalIntegration.OVERDRIVE) - child.create_integration_configuration(protocol=ExternalIntegration.OVERDRIVE) data = child.explain() assert [ 'Name: "Child"', "Parent: test collection", 'Protocol: "Overdrive"', - 'External account ID: "id2"', + 'Setting "external_account_id": "id2"', ] == data - def test_disassociate_library( + def test_disassociate_libraries( self, example_collection_fixture: ExampleCollectionFixture ): db = example_collection_fixture.database_fixture @@ -428,60 +389,51 @@ def test_disassociate_library( other_library = db.library() collection.libraries.append(other_library) - # It has an ExternalIntegration, which has some settings. + # It has an integration, which has some settings. integration = collection.integration_configuration - DatabaseTransactionFixture.set_settings( - integration, **{"integration setting": "value2"} - ) - setting2 = integration.for_library(db.default_library().id) - DatabaseTransactionFixture.set_settings( - setting2, **{"default_library+integration setting": "value2"} - ) - setting3 = integration.for_library(other_library.id, create=True) - DatabaseTransactionFixture.set_settings( - setting3, **{"other_library+integration setting": "value3"} - ) + integration.settings_dict = {"key": "value"} + + # And it has some library-specific settings. + default_library_settings = integration.for_library(db.default_library()) + assert default_library_settings is not None + default_library_settings.settings_dict = {"a": "b"} + other_library_settings = integration.for_library(other_library) + assert other_library_settings is not None + other_library_settings.settings_dict = {"c": "d"} # Now, disassociate one of the libraries from the collection. - collection.disassociate_library(db.default_library()) + collection.libraries.remove(db.default_library()) # It's gone. assert db.default_library() not in collection.libraries assert collection not in db.default_library().collections - # Furthermore, ConfigurationSettings that configure that - # Library's relationship to this Collection's - # ExternalIntegration have been deleted. - all_settings = db.session.query(IntegrationConfiguration).all() - all_library_settings = db.session.query(IntegrationLibraryConfiguration).all() - assert setting2 not in all_library_settings + # The library-specific settings for that library have been deleted. + library_config_ids = [ + l.library_id + for l in db.session.execute( + select(IntegrationLibraryConfiguration.library_id) + ) + ] + assert db.default_library().id not in library_config_ids - # The other library is unaffected. + # But the library-specific settings for the other library are still there. assert other_library in collection.libraries - assert collection in other_library.collections - assert setting3 in all_library_settings + assert other_library.id in library_config_ids + assert collection.integration_configuration.library_configurations[ + 0 + ].settings_dict == {"c": "d"} - # As is the library-independent configuration of this Collection's - # ExternalIntegration. - assert integration in all_settings + # We now disassociate all libraries from the collection. + collection.libraries.clear() - # Calling disassociate_library again is a no-op. - collection.disassociate_library(db.default_library()) - assert db.default_library() not in collection.libraries + # All the library-specific settings have been deleted. + assert collection.integration_configuration.library_configurations == [] + assert collection.integration_configuration.libraries == [] + assert collection.libraries == [] - # If you somehow manage to call disassociate_library on a Collection - # that has no associated ExternalIntegration, an exception is raised. - collection.integration_configuration_id = None - with pytest.raises(ValueError) as excinfo: - collection.disassociate_library(other_library) - assert "No known integration library configuration for collection" in str( - excinfo.value - ) - - collection.external_integration_id = None - with pytest.raises(ValueError) as excinfo: - collection.disassociate_library(other_library) - assert "No known external integration for collection" in str(excinfo.value) + # The integration settings are still there. + assert collection.integration_configuration.settings_dict == {"key": "value"} def test_custom_lists(self, example_collection_fixture: ExampleCollectionFixture): db = example_collection_fixture.database_fixture @@ -611,17 +563,6 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): # It's associated with a library. assert db.default_library() in collection.libraries - # It has an ExternalIntegration, which has some settings. - integration = collection.external_integration - setting1 = integration.set_setting("integration setting", "value2") - setting2 = ConfigurationSetting.for_library_and_externalintegration( - db.session, - "library+integration setting", - db.default_library(), - integration, - ) - setting2.value = "value2" - # It's got a Work that has a LicensePool, which has a License, # which has a loan. work = db.work(with_license_pool=True) @@ -659,13 +600,7 @@ def test_delete(self, example_collection_fixture: ExampleCollectionFixture): # Finally, here's a mock ExternalSearchIndex so we can track when # Works are removed from the search index. - class MockExternalSearchIndex: - removed = [] - - def remove_work(self, work): - self.removed.append(work) - - index = MockExternalSearchIndex() + index = MagicMock(spec=ExternalSearchIndex) # delete() will not work on a collection that's not marked for # deletion. @@ -711,16 +646,7 @@ def remove_work(self, work): # Our search index was told to remove the first work (which no longer # has any LicensePools), but not the second. - assert [work] == index.removed - - # The collection ExternalIntegration and its settings have been deleted. - # The storage ExternalIntegration remains. - external_integrations = db.session.query(ExternalIntegration).all() - assert integration not in external_integrations - - settings = db.session.query(ConfigurationSetting).all() - for setting in (setting1, setting2): - assert setting not in settings + index.remove_work.assert_called_once_with(work) # If no search_index is passed into delete() (the default behavior), # we try to instantiate the normal ExternalSearchIndex object. Since @@ -733,22 +659,3 @@ def remove_work(self, work): # We've now deleted every LicensePool created for this test. assert 0 == db.session.query(LicensePool).count() assert [] == work2.license_pools - - -class TestCollectionForMetadataWrangler: - """Tests that requirements to the metadata wrangler's use of Collection - are being met by continued development on the Collection class. - - If any of these tests are failing, development will be required on the - metadata wrangler to meet the needs of the new Collection class. - """ - - def test_only_name_is_required( - self, example_collection_fixture: ExampleCollectionFixture - ): - """Test that only name is a required field on - the Collection class. - """ - db = example_collection_fixture.database_fixture - collection = create(db.session, Collection, name="banana")[0] - assert True == isinstance(collection, Collection) diff --git a/tests/core/models/test_configuration.py b/tests/core/models/test_configuration.py index 351ede607e..da04cfe462 100644 --- a/tests/core/models/test_configuration.py +++ b/tests/core/models/test_configuration.py @@ -604,16 +604,3 @@ def test_explain( # If we pass in True for include_secrets, we see the passwords. with_secrets = integration.explain(include_secrets=True) assert "password='somepass'" in with_secrets - - def test_custom_accept_header( - self, example_externalintegration_fixture: ExampleExternalIntegrationFixture - ): - db = example_externalintegration_fixture.database_fixture - - integration = db.external_integration("protocol", "goal") - # Must be empty if not set - assert integration.custom_accept_header == None - - # Must be the same value if set - integration.custom_accept_header = "custom header" - assert integration.custom_accept_header == "custom header" diff --git a/tests/core/models/test_integration_configuration.py b/tests/core/models/test_integration_configuration.py index 251487423e..4618159a39 100644 --- a/tests/core/models/test_integration_configuration.py +++ b/tests/core/models/test_integration_configuration.py @@ -1,5 +1,7 @@ +from unittest.mock import MagicMock + from core.integration.goals import Goals -from core.model import create +from core.model import Library, create from core.model.integration import IntegrationConfiguration from tests.fixtures.database import DatabaseTransactionFixture @@ -19,16 +21,18 @@ def test_for_library(seslf, db: DatabaseTransactionFixture): # No library ID provided assert config.for_library(None) is None + # Library has no ID + mock_library = MagicMock(spec=Library) + mock_library.id = None + assert config.for_library(mock_library) is None + # No library config exists assert config.for_library(library.id) is None - # This should create a new config - libconfig = config.for_library(library.id, create=True) - assert libconfig is not None - assert libconfig.library == library - assert libconfig.parent == config - assert libconfig.settings_dict == {} + config.libraries.append(library) + + # Library config exists + libconfig = config.for_library(library.id) - # The same config is returned henceforth - assert config.for_library(library.id) == libconfig - assert config.for_library(library.id, create=True) == libconfig + # The same config is returned for the same library + assert config.for_library(library) is libconfig diff --git a/tests/core/models/test_library.py b/tests/core/models/test_library.py index 49b98e6cc0..73b16d261e 100644 --- a/tests/core/models/test_library.py +++ b/tests/core/models/test_library.py @@ -134,7 +134,7 @@ def test_estimated_holdings_by_language(self, db: DatabaseTransactionFixture): # If we remove the default collection from the default library, # it loses all its works. - db.default_library().collections = [] + db.default_collection().libraries = [] estimate = library.estimated_holdings_by_language(include_open_access=False) assert dict() == estimate diff --git a/tests/core/models/test_listeners.py b/tests/core/models/test_listeners.py index 508aa87046..da6887c84a 100644 --- a/tests/core/models/test_listeners.py +++ b/tests/core/models/test_listeners.py @@ -189,35 +189,6 @@ def test_lane_change_updates_configuration( lane.add_genre("Science Fiction") data.mock.assert_was_called() - def test_configuration_relevant_collection_change_updates_configuration( - self, - example_site_configuration_changed_fixture: ExampleSiteConfigurationHasChangedFixture, - ): - """When you add a relevant item to a SQLAlchemy collection, such as - adding a Collection to library.collections, - site_configuration_has_changed is called. - """ - - data = example_site_configuration_changed_fixture - session = data.transaction.session - # Creating a collection calls the method via an 'after_insert' - # event on Collection. - library = data.transaction.default_library() - collection = data.transaction.collection() - session.commit() - data.mock.assert_was_called() - - # Adding the collection to the library calls the method via - # an 'append' event on Collection.libraries. - library.collections.append(collection) - session.commit() - data.mock.assert_was_called() - - # NOTE: test_work.py:TestWork.test_reindex_on_availability_change - # tests the circumstances under which a database change - # requires that a Work's entry in the search index be - # recreated. - def _set_property(object, value, property_name): setattr(object, property_name, value) diff --git a/tests/core/models/test_work.py b/tests/core/models/test_work.py index 58c26d7580..ae96d2b267 100644 --- a/tests/core/models/test_work.py +++ b/tests/core/models/test_work.py @@ -1002,7 +1002,7 @@ def test_to_search_document(self, db: DatabaseTransactionFixture): # for the same Work. collection1 = db.default_collection() collection2 = db.collection() - db.default_library().collections.append(collection2) + collection2.libraries.append(db.default_library()) pool2 = db.licensepool(edition=edition, collection=collection2) pool2.work_id = work.id pool2.licenses_available = 0 @@ -1714,8 +1714,8 @@ def test_active_license_pool_accounts_for_library( l2 = db.library() c1 = db.collection() c2 = db.collection() - l1.collections = [c1] - l2.collections = [c2] + c1.libraries = [l1] + c2.libraries = [l2] work: Work = db.work(presentation_edition=db.edition()) lp1: LicensePool = db.licensepool( work.presentation_edition, diff --git a/tests/core/test_external_search.py b/tests/core/test_external_search.py index 4dcc6e7b41..8c82273c42 100644 --- a/tests/core/test_external_search.py +++ b/tests/core/test_external_search.py @@ -1326,7 +1326,7 @@ def _populate_works( # Each work has one LicensePool associated with the default # collection. result.collection1 = transaction.default_collection() - result.collection1.name = "Collection 1 - ACB" + result.collection1.integration_configuration.name = "Collection 1 - ACB" [result.a1] = result.a.license_pools [result.b1] = result.b.license_pools [result.c1] = result.c.license_pools @@ -3428,7 +3428,8 @@ def test_constructor(self, filter_fixture: FilterFixture): # If the library has no collections, the collection filter # will filter everything out. - transaction.default_library().collections = [] + transaction.default_collection().libraries = [] + assert transaction.default_library().collections == [] library_filter = Filter(collections=transaction.default_library()) assert [] == library_filter.collection_ids @@ -3678,7 +3679,7 @@ def scoring_functions(self, filter): # library. library2 = transaction.library() collection2 = transaction.collection() - library2.collections.append(collection2) + collection2.libraries.append(library2) for_other_library = WorkList() for_other_library.initialize(library2) for_default_library.append_child(for_other_library) diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 8b1b4fc1e5..6e588794bc 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -44,7 +44,7 @@ tuple_to_numericrange, ) from core.model.collection import Collection -from core.model.configuration import ConfigurationSetting, ExternalIntegration +from core.model.configuration import ConfigurationAttributeValue, ExternalIntegration from core.problem_details import INVALID_INPUT from core.util.datetime_helpers import utc_now from core.util.opds_writer import OPDSFeed @@ -304,7 +304,7 @@ def _configure_facets(library, enabled, default): library._settings = None def test_facet_groups(self, db: DatabaseTransactionFixture): - db.default_collection().data_source = DataSource.AMAZON + db.default_collection().data_source = DataSource.AMAZON # type: ignore[assignment] facets = Facets( db.default_library(), Facets.COLLECTION_FULL, @@ -2571,14 +2571,10 @@ def test_worklist_for_resultset_no_holds_allowed( w1.license_pools[0].licenses_available = 0 collection1: Collection = w1.license_pools[0].collection - cs1 = ConfigurationSetting( - library_id=db.default_library().id, - external_integration_id=collection1.external_integration_id, - key=ExternalIntegration.DISPLAY_RESERVES, - _value="no", - ) - db.session.add(cs1) - db.session.commit() + integration1 = collection1.integration_configuration + integration1.settings_dict = { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } class MockHit: def __init__(self, work_id, has_last_update=False): @@ -2612,7 +2608,7 @@ def __contains__(self, k): # Work1 now has 2 licensepools, one of which has availability alternate_collection = db.collection() - db.default_library().collections.append(alternate_collection) + alternate_collection.libraries.append(db.default_library()) alternate_w1_lp: LicensePool = db.licensepool( w1.presentation_edition, collection=alternate_collection ) @@ -2625,13 +2621,9 @@ def __contains__(self, k): assert [[w2], [w1]] == m(db.session, [[hit2], [hit1]]) # Now both collections are restricted and have no availability - cs2 = ConfigurationSetting( - library_id=db.default_library().id, - external_integration_id=alternate_collection.external_integration_id, - key=ExternalIntegration.DISPLAY_RESERVES, - _value="no", - ) - db.session.add(cs2) + alternate_collection.integration_configuration.settings_dict = { + ExternalIntegration.DISPLAY_RESERVES: ConfigurationAttributeValue.NOVALUE.value + } assert [[w2], []] == m(db.session, [[hit2], [hit1]]) # Both restricted but one has availability @@ -2863,14 +2855,17 @@ def test_works_from_database_end_to_end(self, db: DatabaseTransactionFixture): # A DatabaseBackedWorkList will only find books licensed # through one of its collections. + db.default_collection().libraries = [] collection = db.collection() - db.default_library().collections = [collection] + collection.libraries.append(db.default_library()) + assert db.default_library().collections == [collection] wl.initialize(db.default_library()) assert 0 == wl.works_from_database(db.session).count() # If a DatabaseBackedWorkList has no collections, it has no # books. - db.default_library().collections = [] + collection.libraries = [] + assert db.default_library().collections == [] wl.initialize(db.default_library()) assert 0 == wl.works_from_database(db.session).count() diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index dc50b228d7..54b03dd196 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -10,7 +10,6 @@ from api.circulation import CirculationAPI, FulfillmentInfo from api.circulation_exceptions import CannotFulfill from core.model import ( - ConfigurationSetting, Contribution, Contributor, DataSource, @@ -96,9 +95,13 @@ def opds2_importer_fixture( ) -> TestOPDS2ImporterFixture: data = TestOPDS2ImporterFixture() data.transaction = db - data.collection = db.collection(protocol=OPDS2API.label()) + data.collection = db.collection( + protocol=OPDS2API.label(), + data_source_name="OPDS 2.0 Data Source", + external_account_id="http://opds2.example.org/feed", + ) data.library = db.default_library() - data.library.collections.append(data.collection) + data.collection.libraries.append(data.library) data.data_source = DataSource.lookup( db.session, "OPDS 2.0 Data Source", autocreate=True ) @@ -453,27 +456,25 @@ def test_auth_token_feed( imported_editions, pools, works, failures = data.importer.import_from_feed( content ) - setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, data.collection.external_integration + token_endpoint = data.collection.integration_configuration.context.get( + ExternalIntegration.TOKEN_AUTH ) # Did the token endpoint get stored correctly? - assert setting.value == "http://example.org/auth?userName={patron_id}" + assert token_endpoint == "http://example.org/auth?userName={patron_id}" class Opds2ApiFixture: def __init__(self, db: DatabaseTransactionFixture, mock_http: MagicMock): self.patron = db.patron() self.collection: Collection = db.collection( - protocol=ExternalIntegration.OPDS2_IMPORT, data_source_name="test" - ) - self.integration = self.collection.create_external_integration( - ExternalIntegration.OPDS2_IMPORT - ) - self.setting = ConfigurationSetting.for_externalintegration( - ExternalIntegration.TOKEN_AUTH, self.integration + protocol=ExternalIntegration.OPDS2_IMPORT, + data_source_name="test", + external_account_id="http://opds2.example.org/feed", ) - self.setting.value = "http://example.org/token?userName={patron_id}" + self.collection.integration_configuration.context = { + ExternalIntegration.TOKEN_AUTH: "http://example.org/token?userName={patron_id}" + } self.mock_response = MagicMock(spec=Response) self.mock_response.status_code = 200 @@ -525,7 +526,7 @@ def test_opds2_with_authentication_tokens( work = works[0] - api = CirculationAPI(db.session, db.default_library()) + api = CirculationAPI(db.session, opds2_importer_fixture.library) patron = db.patron() # Borrow the book from the library diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 171b644f06..72ff75c013 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1,7 +1,8 @@ +from __future__ import annotations + import random from functools import partial from io import StringIO -from typing import Optional from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -18,10 +19,10 @@ SAMLNameIDFormat, SAMLSubject, ) -from core.config import IntegrationException from core.coverage import CoverageFailure -from core.metadata_layer import CirculationData, LinkData, Metadata +from core.metadata_layer import LinkData from core.model import ( + Collection, Contributor, CoverageRecord, DataSource, @@ -241,9 +242,12 @@ def test_use_dcterm_identifier_as_id_with_id_and_dcterms_identifier( opds_importer_fixture.db.session, ) - collection_to_test = db.default_collection() - collection_to_test.primary_identifier_source = ( - ExternalIntegration.DCTERMS_IDENTIFIER + collection_to_test = db.collection( + settings={ + "primary_identifier_source": ExternalIntegration.DCTERMS_IDENTIFIER, + }, + data_source_name="OPDS", + external_account_id="http://root.uri", ) importer = opds_importer_fixture.importer(collection=collection_to_test) @@ -289,7 +293,6 @@ def test_use_id_with_existing_dcterms_identifier( ) collection_to_test = db.default_collection() - collection_to_test.primary_identifier_source = None importer = opds_importer_fixture.importer(collection=collection_to_test) metadata, failures = importer.extract_feed_data( @@ -1366,188 +1369,6 @@ def test_update_work_for_edition_having_multiple_license_pools( assert lp.work == work assert lp2.work == work - def test_assert_importable_content(self, db: DatabaseTransactionFixture): - session = db.session - collection = db.collection( - protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS" - ) - - class Mock(OPDSImporter): - """An importer that may or may not be able to find - real open-access content. - """ - - # Set this variable to control whether any open-access links - # are "found" in the OPDS feed. - open_access_links: Optional[list] = None - - extract_feed_data_called_with = None - _is_open_access_link_called_with = [] - - def extract_feed_data(self, feed, feed_url): - # There's no need to return realistic metadata, - # since _open_access_links is also mocked. - self.extract_feed_data_called_with = (feed, feed_url) - return {"some": "metadata"}, {} - - def _open_access_links(self, metadatas): - self._open_access_links_called_with = metadatas - yield from self.open_access_links - - def _is_open_access_link(self, url, type): - self._is_open_access_link_called_with.append((url, type)) - return False - - class NoLinks(Mock): - "Simulate an OPDS feed that contains no open-access links." - open_access_links = [] - - # We won't be making any HTTP requests, even simulated ones. - do_get = MagicMock() - - # Here, there are no links at all. - importer = NoLinks(session, collection, do_get) - with pytest.raises(IntegrationException) as excinfo: - importer.assert_importable_content("feed", "url") - assert "No open-access links were found in the OPDS feed." in str(excinfo.value) - - # We extracted 'metadata' from the feed and URL. - assert ("feed", "url") == importer.extract_feed_data_called_with - - # But there were no open-access links in the 'metadata', - # so we had nothing to check. - assert [] == importer._is_open_access_link_called_with - - oa = Hyperlink.OPEN_ACCESS_DOWNLOAD - - class BadLinks(Mock): - """Simulate an OPDS feed that contains open-access links that - don't actually work, because _is_open_access always returns False - """ - - open_access_links = [ - LinkData(href="url1", rel=oa, media_type="text/html"), - LinkData(href="url2", rel=oa, media_type="application/json"), - LinkData( - href="I won't be tested", rel=oa, media_type="application/json" - ), - ] - - bad_links_importer = BadLinks(session, collection, do_get) - with pytest.raises(IntegrationException) as excinfo: - bad_links_importer.assert_importable_content( - "feed", "url", max_get_attempts=2 - ) - assert ( - "Was unable to GET supposedly open-access content such as url2 (tried 2 times)" - in str(excinfo.value) - ) - - # We called _is_open_access_link on the first and second links - # found in the 'metadata', but failed both times. - # - # We didn't bother with the third link because max_get_attempts was - # set to 2. - try1, try2 = bad_links_importer._is_open_access_link_called_with - assert ("url1", "text/html") == try1 - assert ("url2", "application/json") == try2 - - class GoodLink(Mock): - """Simulate an OPDS feed that contains two bad open-access links - and one good one. - """ - - _is_open_access_link_called_with = [] - open_access_links = [ - LinkData(href="bad", rel=oa, media_type="text/html"), - LinkData(href="good", rel=oa, media_type="application/json"), - LinkData(href="also bad", rel=oa, media_type="text/html"), - ] - - def _is_open_access_link(self, url, type): - self._is_open_access_link_called_with.append((url, type)) - if url == "bad": - return False - return "this is a book" - - good_link_importer = GoodLink(session, collection, do_get) - result = good_link_importer.assert_importable_content( - "feed", "url", max_get_attempts=5 - ) - assert True == result - - # The first link didn't work, but the second one did, - # so we didn't try the third one. - try1, try2 = good_link_importer._is_open_access_link_called_with - assert ("bad", "text/html") == try1 - assert ("good", "application/json") == try2 - - def test__open_access_links(self, db: DatabaseTransactionFixture): - session = db.session - - """Test our ability to find open-access links in Metadata objects.""" - m = OPDSImporter._open_access_links - - # No Metadata objects, no links. - assert [] == list(m([])) - - # This Metadata has no associated CirculationData and will be - # ignored. - no_circulation = Metadata(DataSource.GUTENBERG) - - # This CirculationData has no open-access links, so it will be - # ignored. - circulation = CirculationData(DataSource.GUTENBERG, db.identifier()) - no_open_access_links = Metadata(DataSource.GUTENBERG, circulation=circulation) - - # This has three links, but only the open-access links - # will be returned. - circulation = CirculationData(DataSource.GUTENBERG, db.identifier()) - oa = Hyperlink.OPEN_ACCESS_DOWNLOAD - for rel in [oa, Hyperlink.IMAGE, oa]: - circulation.links.append(LinkData(href=db.fresh_url(), rel=rel)) - two_open_access_links = Metadata(DataSource.GUTENBERG, circulation=circulation) - - oa_only = [x for x in circulation.links if x.rel == oa] - assert oa_only == list( - m([no_circulation, two_open_access_links, no_open_access_links]) - ) - - def test__is_open_access_link( - self, db: DatabaseTransactionFixture, opds_importer_fixture: OPDSImporterFixture - ): - session = db.session - http = DummyHTTPClient() - - # We only check that the response entity-body isn't tiny. 11 - # kilobytes of data is enough. - enough_content = "a" * (1024 * 11) - - # Set up an HTTP response that looks enough like a book - # to convince _is_open_access_link. - http.queue_response(200, content=enough_content) - monitor = opds_importer_fixture.importer(http_get=http.do_get) - - url = db.fresh_url() - type = "text/html" - assert "Found a book-like thing at %s" % url == monitor._is_open_access_link( - url, type - ) - - # We made a GET request to the appropriate URL. - assert url == http.requests.pop() - - # This HTTP response looks OK but it's not big enough to be - # any kind of book. - http.queue_response(200, content="not enough content") - monitor = opds_importer_fixture.importer(http_get=http.do_get) - assert False == monitor._is_open_access_link(url, None) - - # This HTTP response is clearly an error page. - http.queue_response(404, content=enough_content) - monitor = opds_importer_fixture.importer(http_get=http.do_get) - assert False == monitor._is_open_access_link(url, None) - def test_import_open_access_audiobook( self, opds_importer_fixture: OPDSImporterFixture ): @@ -1623,8 +1444,9 @@ def _wayfless_circulation_api( "OPDS collection with a WAYFless acquisition link", ExternalIntegration.OPDS_IMPORT, data_source_name="test", + external_account_id="http://wayfless.example.com/feed", ) - library.collections.append(collection) + collection.libraries.append(library) DatabaseTransactionFixture.set_settings( collection.integration_configuration, @@ -1776,6 +1598,25 @@ def test_combine_present_value_extends_dictionary(self): ) +class OPDSImportMonitorFixture: + def collection(self, feed_url: str | None = None) -> Collection: + feed_url = feed_url or "http://fake.opds/" + settings = {"external_account_id": feed_url, "data_source": "OPDS"} + return self.db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, settings=settings + ) + + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + + +@pytest.fixture() +def opds_import_monitor_fixture( + db: DatabaseTransactionFixture, +) -> OPDSImportMonitorFixture: + return OPDSImportMonitorFixture(db) + + class TestOPDSImportMonitor: def test_constructor(self, db: DatabaseTransactionFixture): session = db.session @@ -1786,49 +1627,45 @@ def test_constructor(self, db: DatabaseTransactionFixture): "OPDSImportMonitor can only be run in the context of a Collection." in str(excinfo.value) ) - - db.default_collection().integration_configuration.protocol = ( - ExternalIntegration.OVERDRIVE - ) + c1 = db.collection(protocol=ExternalIntegration.OVERDRIVE) with pytest.raises(ValueError) as excinfo: - OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + OPDSImportMonitor(session, c1, OPDSImporter) assert ( - "Collection Default Collection is configured for protocol Overdrive, not OPDS Import." + f"Collection {c1.name} is configured for protocol Overdrive, not OPDS Import." in str(excinfo.value) ) - db.default_collection().integration_configuration.protocol = ( - ExternalIntegration.OPDS_IMPORT - ) - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", None + c2 = db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, settings={"data_source": None} ) with pytest.raises(ValueError) as excinfo: - OPDSImportMonitor(session, db.default_collection(), OPDSImporter) - assert "Collection Default Collection has no associated data source." in str( + OPDSImportMonitor(session, c2, OPDSImporter) + assert f"Collection {c2.name} has no associated data source." in str( excinfo.value ) - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", "OPDS" + c3 = db.collection( + protocol=ExternalIntegration.OPDS_IMPORT, + settings={ + "data_source": "OPDS", + "external_account_id": "https://opds.import.com/feed?size=100", + }, ) - db.default_collection().external_account_id = ( - "https://opds.import.com/feed?size=100" - ) - monitor = OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + monitor = OPDSImportMonitor(session, c3, OPDSImporter) assert monitor._feed_base_url == "https://opds.import.com/" - def test_get(self, db: DatabaseTransactionFixture): + def test_get( + self, + db: DatabaseTransactionFixture, + ): session = db.session ## Test whether relative urls work - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, "data_source", "OPDS" - ) - db.default_collection().external_account_id = ( - "https://opds.import.com:9999/feed" + collection = db.collection( + external_account_id="https://opds.import.com:9999/feed", + data_source_name="OPDS", ) - monitor = OPDSImportMonitor(session, db.default_collection(), OPDSImporter) + monitor = OPDSImportMonitor(session, collection, OPDSImporter) with patch("core.opds_import.HTTP.get_with_timeout") as mock_get: monitor._get("/absolute/path", {}) @@ -1842,89 +1679,25 @@ def test_get(self, db: DatabaseTransactionFixture): "https://opds.import.com:9999/relative/path", ) - def test_external_integration(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - - monitor = OPDSImportMonitor( - session, - db.default_collection(), - import_class=OPDSImporter, - ) - assert ( - db.default_collection().external_integration - == monitor.external_integration(session) - ) - - def test__run_self_tests(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - """Verify the self-tests of an OPDS collection.""" - - class MockImporter(OPDSImporter): - def assert_importable_content(self, content, url): - self.assert_importable_content_called_with = (content, url) - return "looks good" - - class Mock(OPDSImportMonitor): - follow_one_link_called_with = [] - - # First we will get the first page of the OPDS feed. - def follow_one_link(self, url): - self.follow_one_link_called_with.append(url) - return ([], "some content") - - feed_url = db.fresh_url() - db.default_collection().external_account_id = feed_url - monitor = Mock(session, db.default_collection(), import_class=MockImporter) - [first_page, found_content] = monitor._run_self_tests(session) - expect = "Retrieve the first page of the OPDS feed (%s)" % feed_url - assert expect == first_page.name - assert True == first_page.success - assert ([], "some content") == first_page.result - - # follow_one_link was called once. - [link] = monitor.follow_one_link_called_with - assert monitor.feed_url == link - - # Then, assert_importable_content was called on the importer. - assert "Checking for importable content" == found_content.name - assert True == found_content.success - assert ( - "some content", - feed_url, - ) == monitor.importer.assert_importable_content_called_with # type: ignore[attr-defined] - assert "looks good" == found_content.result - - def test_hook_methods(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) + def test_hook_methods(self, db: DatabaseTransactionFixture): """By default, the OPDS URL and data source used by the importer come from the collection configuration. """ + collection = db.collection( + external_account_id="http://url/", data_source_name="OPDS" + ) monitor = OPDSImportMonitor( - session, - db.default_collection(), + db.session, + collection, import_class=OPDSImporter, ) - assert db.default_collection().external_account_id == monitor.opds_url( - db.default_collection() - ) - assert db.default_collection().data_source == monitor.data_source( - db.default_collection() - ) + assert collection.data_source == monitor.data_source(collection) - def test_feed_contains_new_data(self, opds_importer_fixture: OPDSImporterFixture): + def test_feed_contains_new_data( + self, + opds_importer_fixture: OPDSImporterFixture, + ): data, db, session = ( opds_importer_fixture, opds_importer_fixture.db, @@ -1937,21 +1710,25 @@ class MockOPDSImportMonitor(OPDSImportMonitor): def _get(self, url, headers): return 200, {"content-type": AtomFeed.ATOM_TYPE}, feed + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) monitor = OPDSImportMonitor( session, - db.default_collection(), + collection, import_class=OPDSImporter, ) timestamp = monitor.timestamp() # Nothing has been imported yet, so all data is new. - assert True == monitor.feed_contains_new_data(feed) - assert None == timestamp.start + assert monitor.feed_contains_new_data(feed) is True + assert timestamp.start is None # Now import the editions. monitor = MockOPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) monitor.run() @@ -1961,10 +1738,10 @@ def _get(self, url, headers): # The timestamp has been updated, although unlike most # Monitors the timestamp is purely informational. - assert timestamp.finish != None + assert timestamp.finish is not None editions = session.query(Edition).all() - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) # If there are CoverageRecords that record work are after the updated # dates, there's nothing new. @@ -1972,7 +1749,7 @@ def _get(self, url, headers): editions[0], data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -1980,22 +1757,22 @@ def _get(self, url, headers): editions[1], data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record2.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) - assert False == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is False # If the monitor is set up to force reimport, it doesn't # matter that there's nothing new--we act as though there is. monitor.force_reimport = True - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True monitor.force_reimport = False # If an entry was updated after the date given in that entry's # CoverageRecord, there's new data. record2.timestamp = datetime_utc(1970, 1, 1, 1, 1, 1) - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True # If a CoverageRecord is a transient failure, we try again # regardless of whether it's been updated. @@ -2003,16 +1780,16 @@ def _get(self, url, headers): r.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) r.exception = "Failure!" r.status = CoverageRecord.TRANSIENT_FAILURE - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True # If a CoverageRecord is a persistent failure, we don't try again... for r in [record, record2]: r.status = CoverageRecord.PERSISTENT_FAILURE - assert False == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is False # ...unless the feed updates. record.timestamp = datetime_utc(1970, 1, 1, 1, 1, 1) - assert True == monitor.feed_contains_new_data(feed) + assert monitor.feed_contains_new_data(feed) is True def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): data, db, session = ( @@ -2020,10 +1797,13 @@ def test_follow_one_link(self, opds_importer_fixture: OPDSImporterFixture): opds_importer_fixture.db, opds_importer_fixture.db.session, ) - + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) feed = data.content_server_mini_feed @@ -2046,14 +1826,14 @@ def follow(): assert 2 == session.query(Edition).count() editions = session.query(Edition).all() - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) for edition in editions: record, ignore = CoverageRecord.add_for( edition, data_source, CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) record.timestamp = datetime_utc(2016, 1, 1, 1, 1, 1) @@ -2092,14 +1872,17 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): opds_importer_fixture.db.session, ) # Check coverage records are created. - + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://root-url/index.xml", + data_source_name=data_source_name, + ) monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=DoomedOPDSImporter, ) - db.default_collection().external_account_id = "http://root-url/index.xml" - data_source = DataSource.lookup(session, DataSource.OA_CONTENT_SERVER) + data_source = DataSource.lookup(session, data_source_name) feed = data.content_server_mini_feed @@ -2120,10 +1903,10 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): editions[0].primary_identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) assert CoverageRecord.SUCCESS == record.status - assert None == record.exception + assert record.exception is None # The edition's primary identifier has some cover links whose # relative URL have been resolved relative to the Collection's @@ -2156,7 +1939,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): identifier, data_source, operation=CoverageRecord.IMPORT_OPERATION, - collection=db.default_collection(), + collection=collection, ) assert "Utter failure!" in failure.exception @@ -2164,13 +1947,7 @@ def test_import_one_feed(self, opds_importer_fixture: OPDSImporterFixture): # import_one_feed assert 2 == len(failures) - def test_run_once(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, - ) - + def test_run_once(self, db: DatabaseTransactionFixture): class MockOPDSImportMonitor(OPDSImportMonitor): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) @@ -2188,9 +1965,14 @@ def import_one_feed(self, feed): self.imports.append(feed) return [object(), object()], {"identifier": "Failure"} + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name + ) + monitor = MockOPDSImportMonitor( - session, - collection=db.default_collection(), + db.session, + collection=collection, import_class=OPDSImporter, ) @@ -2208,20 +1990,19 @@ def import_one_feed(self, feed): # The TimestampData returned by run_once does not include any # timing information; that's provided by run(). - assert None == progress.start - assert None == progress.finish + assert progress.start is None + assert progress.finish is None - def test_update_headers(self, opds_importer_fixture: OPDSImporterFixture): - data, db, session = ( - opds_importer_fixture, - opds_importer_fixture.db, - opds_importer_fixture.db.session, + def test_update_headers(self, db: DatabaseTransactionFixture): + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", data_source_name=data_source_name ) # Test the _update_headers helper method. monitor = OPDSImportMonitor( - session, - collection=db.default_collection(), + db.session, + collection=collection, import_class=OPDSImporter, ) @@ -2269,16 +2050,17 @@ def test_retry(self, opds_importer_fixture: OPDSImporterFixture): feed = data.content_server_mini_feed feed_url = "https://example.com/feed.opds" - # After we overrode the value of configuration setting we can instantiate OPDSImportMonitor. - # It'll load new "Max retry count"'s value from the database. - DatabaseTransactionFixture.set_settings( - db.default_collection().integration_configuration, - "connection_max_retry_count", - retry_count, + data_source_name = "OPDS" + collection = db.collection( + external_account_id="http://url/", + data_source_name=data_source_name, + settings={"connection_max_retry_count": retry_count}, ) + + # The importer takes its retry count from the collection settings. monitor = OPDSImportMonitor( session, - collection=db.default_collection(), + collection=collection, import_class=OPDSImporter, ) @@ -2307,7 +2089,9 @@ def __init__(self, db: DatabaseTransactionFixture): self.db = db self.session = db.session self.collection = db.collection( - protocol=OPDSAPI.label(), data_source_name="OPDS" + protocol=OPDSAPI.label(), + data_source_name="OPDS", + external_account_id="http://opds.example.com/feed", ) self.api = OPDSAPI(self.session, self.collection) diff --git a/tests/core/test_opds_validate.py b/tests/core/test_opds_validate.py index 68510b376b..a459cdebf0 100644 --- a/tests/core/test_opds_validate.py +++ b/tests/core/test_opds_validate.py @@ -19,11 +19,16 @@ def test_opds2_schema( db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture, ): - db.default_collection().protocol = ExternalIntegration.OPDS2_IMPORT - db.default_collection().data_source = DataSource.FEEDBOOKS + collection = db.collection( + protocol=ExternalIntegration.OPDS2_IMPORT, + data_source_name=DataSource.FEEDBOOKS, + settings={ + "external_account_id": "http://example.com/feed", + }, + ) validator = OPDS2SchemaValidation( db.session, - collection=db.default_collection(), + collection=collection, import_class=OPDS2Importer, parser=RWPMManifestParser(OPDS2FeedParserFactory()), ) @@ -38,15 +43,18 @@ def test_odl2_schema( db: DatabaseTransactionFixture, opds_files_fixture: OPDSFilesFixture, ): - db.default_collection().integration_configuration.settings_dict = { - "username": "username", - "password": "password", - } - db.default_collection().protocol = ExternalIntegration.ODL2 - db.default_collection().data_source = DataSource.FEEDBOOKS + collection = db.collection( + protocol=ExternalIntegration.ODL2, + data_source_name=DataSource.FEEDBOOKS, + settings={ + "username": "username", + "password": "password", + "external_account_id": "http://example.com/feed", + }, + ) validator = ODL2SchemaValidation( db.session, - collection=db.default_collection(), + collection=collection, import_class=ODL2Importer, parser=RWPMManifestParser(ODLFeedParserFactory()), ) diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index d732068e6c..f41269c58b 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -1204,7 +1204,10 @@ def test_success(self, db: DatabaseTransactionFixture): collection = get_one(db.session, Collection) assert "New Collection" == collection.name assert "url" == collection.integration_configuration.settings_dict["url"] - assert "acctid" == collection.external_account_id + assert ( + "acctid" + == collection.integration_configuration.settings_dict["external_account_id"] + ) assert ( "username" == collection.integration_configuration.settings_dict["username"] ) diff --git a/tests/core/test_selftest.py b/tests/core/test_selftest.py index 910b0d04c5..a676cb1d63 100644 --- a/tests/core/test_selftest.py +++ b/tests/core/test_selftest.py @@ -38,7 +38,7 @@ def test_success_representation(self, db: DatabaseTransactionFixture): ) # A SelfTestResult may have an associated Collection. - db.default_collection().name = "CollectionA" + db.default_collection().integration_configuration.name = "CollectionA" result.collection = db.default_collection() assert ( "" diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index e1f3393cf0..0942f63850 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -8,7 +8,7 @@ import time import uuid from textwrap import dedent -from typing import Generator, Iterable, List, Optional, Tuple +from typing import Any, Dict, Generator, Iterable, List, Optional, Tuple import pytest import sqlalchemy @@ -169,10 +169,9 @@ def _make_default_library(self) -> Library: "Default Collection", protocol=ExternalIntegration.OPDS_IMPORT, data_source_name="OPDS", + external_account_id="http://opds.example.com/feed", ) - collection.integration_configuration.for_library(library.id, create=True) - if collection not in library.collections: - library.collections.append(collection) + collection.libraries.append(library) return library @staticmethod @@ -207,7 +206,7 @@ def transaction(self) -> Transaction: def session(self) -> Session: return self._session - def default_collection(self): + def default_collection(self) -> Collection: """A Collection that will only be created once throughout a given test. @@ -296,19 +295,20 @@ def collection( username=None, password=None, data_source_name=None, + settings: Dict[str, Any] | None = None, ) -> Collection: name = name or self.fresh_str() - collection, ignore = get_one_or_create(self.session, Collection, name=name) - collection.external_account_id = external_account_id - integration = collection.create_external_integration(protocol) - integration.goal = ExternalIntegration.LICENSE_GOAL - config = collection.create_integration_configuration(protocol) - config.goal = Goals.LICENSE_GOAL - config.settings_dict = { - "url": url, - "username": username, - "password": password, - } + collection, _ = Collection.by_name_and_protocol(self.session, name, protocol) + settings = settings or {} + if url: + settings["url"] = url + if username: + settings["username"] = username + if password: + settings["password"] = password + if external_account_id: + settings["external_account_id"] = external_account_id + collection.integration_configuration.settings_dict = settings if data_source_name: collection.data_source = data_source_name @@ -727,8 +727,7 @@ def integration_configuration( else: libraries = [] - for library in libraries: - integration.for_library(library.id, create=True) + integration.libraries.extend(libraries) integration.settings_dict = kwargs return integration diff --git a/tests/fixtures/odl.py b/tests/fixtures/odl.py index 420e342ae7..d2732d7b16 100644 --- a/tests/fixtures/odl.py +++ b/tests/fixtures/odl.py @@ -17,7 +17,6 @@ Patron, Representation, Work, - get_one_or_create, ) from core.model.configuration import ExternalIntegration from core.util.http import HTTP @@ -91,26 +90,18 @@ def library(self): def collection(self, library, api_class=ODLAPI): """Create a mock ODL collection to use in tests.""" integration_protocol = api_class.label() - collection, ignore = get_one_or_create( + collection, _ = Collection.by_name_and_protocol( self.db.session, - Collection, - name=f"Test {api_class.__name__} Collection", - create_method_kwargs=dict( - external_account_id="http://odl", - ), + f"Test {api_class.__name__} Collection", + integration_protocol, ) - integration = collection.create_external_integration( - protocol=integration_protocol - ) - config = collection.create_integration_configuration(integration_protocol) - config.settings_dict = { + collection.integration_configuration.settings_dict = { "username": "a", "password": "b", - "url": "http://metadata", + "external_account_id": "http://odl", Collection.DATA_SOURCE_NAME_SETTING: "Feedbooks", } - config.for_library(library.id, create=True) - library.collections.append(collection) + collection.libraries.append(library) return collection def work(self, collection): @@ -290,7 +281,7 @@ def collection( self, library: Library, api_class: Type[ODL2API] = ODL2API ) -> Collection: collection = super().collection(library, api_class) - collection.name = "Test ODL2 Collection" + collection.integration_configuration.name = "Test ODL2 Collection" collection.integration_configuration.protocol = ExternalIntegration.ODL2 return collection diff --git a/tests/migration/test_20231101_2d72d6876c52.py b/tests/migration/test_20231101_2d72d6876c52.py new file mode 100644 index 0000000000..72ab925e7a --- /dev/null +++ b/tests/migration/test_20231101_2d72d6876c52.py @@ -0,0 +1,237 @@ +from typing import Any, Dict, Optional + +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy import inspect +from sqlalchemy.engine import Connection, Engine +from sqlalchemy.exc import IntegrityError + +from core.model import json_serializer +from tests.migration.conftest import ( + CreateConfigSetting, + CreateExternalIntegration, + CreateLibrary, +) + + +def create_integration_configuration( + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: Optional[Dict[str, Any]] = None, +) -> int: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (name, protocol, goal, settings, self_test_results) " + "VALUES (%s, %s, %s, %s, '{}') returning id", + name, + protocol, + goal, + settings_str, + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id + + +def create_integration_library_configuration( + connection: Connection, + integration_id: int, + library_id: int, + settings: Optional[Dict[str, Any]] = None, +) -> None: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + connection.execute( + "INSERT INTO integration_library_configurations (parent_id, library_id, settings) " + "VALUES (%s, %s, %s)", + integration_id, + library_id, + settings_str, + ) + + +def create_collection_library( + connection: Connection, collection_id: int, library_id: int +) -> None: + connection.execute( + "INSERT INTO collections_libraries (collection_id, library_id) " + "VALUES (%s, %s)", + collection_id, + library_id, + ) + + +def create_collection( + connection: Connection, + name: str, + integration_configuration_id: int, + external_account_id: Optional[str] = None, + external_integration_id: Optional[int] = None, +) -> int: + collection = connection.execute( + "INSERT INTO collections " + "(name, external_account_id, integration_configuration_id, external_integration_id) VALUES " + "(%s, %s, %s, %s) " + "returning id", + name, + external_account_id, + integration_configuration_id, + external_integration_id, + ).fetchone() + assert collection is not None + assert isinstance(collection.id, int) + return collection.id + + +def column_exists(engine: Engine, table_name: str, column_name: str) -> bool: + inspector = inspect(engine) + columns = [column["name"] for column in inspector.get_columns(table_name)] + return column_name in columns + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("2d72d6876c52") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + # Test setup, create all the data we need for the migration + library_1 = create_library(connection, "library_1") + library_2 = create_library(connection, "library_2") + + integration_1_settings = {"data_source": "integration_1"} + integration_1 = create_integration_configuration( + connection, + "integration_1", + "OPDS Import", + "LICENSE_GOAL", + settings=integration_1_settings, + ) + + integration_2_settings = { + "overdrive_website_id": "2", + "overdrive_client_key": "3", + "overdrive_client_secret": "4", + } + integration_2 = create_integration_configuration( + connection, + "collection_2", + "Overdrive", + "LICENSE_GOAL", + settings=integration_2_settings, + ) + + external_1 = create_external_integration(connection) + external_2 = create_external_integration(connection) + + create_config_setting( + connection, "token_auth_endpoint", "http://token.com/auth", external_1 + ) + + collection_1 = create_collection( + connection, "collection_1", integration_1, "http://test.com", external_1 + ) + collection_2 = create_collection( + connection, "collection_2", integration_2, "1", external_2 + ) + + create_integration_library_configuration(connection, integration_1, library_1) + create_integration_library_configuration(connection, integration_1, library_2) + create_collection_library(connection, collection_1, library_1) + create_collection_library(connection, collection_1, library_2) + + create_integration_library_configuration(connection, integration_2, library_2) + create_collection_library(connection, collection_2, library_2) + + # Test that the collections_libraries table has the correct forign key constraints + with pytest.raises(IntegrityError) as excinfo: + create_collection_library(connection, 99, 99) + assert "violates foreign key constraint" in str(excinfo.value) + + # Make sure we have the data we expect before we run the migration + integration_1_actual = connection.execute( + "select name, settings from integration_configurations where id = (%s)", + integration_1, + ).fetchone() + assert integration_1_actual is not None + assert integration_1_actual.name == "integration_1" + assert integration_1_actual.settings == integration_1_settings + assert ( + column_exists(alembic_engine, "integration_configurations", "context") + is False + ) + + integration_2_actual = connection.execute( + "select name, settings from integration_configurations where id = (%s)", + integration_2, + ).fetchone() + assert integration_2_actual is not None + assert integration_2_actual.name == "collection_2" + assert integration_2_actual.settings == integration_2_settings + assert ( + column_exists(alembic_engine, "integration_configurations", "context") + is False + ) + + # Run the migration + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + # Make sure the migration updated the integration name, added the context column, and updated the settings + # column to contain the external_account_id + integration_1_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_1, + ).fetchone() + assert integration_1_actual is not None + assert integration_1_actual.name == "collection_1" + assert integration_1_actual.settings != integration_1_settings + assert integration_1_actual.settings == { + "data_source": "integration_1", + "external_account_id": "http://test.com", + } + assert integration_1_actual.context == { + "token_auth_endpoint": "http://token.com/auth" + } + + integration_2_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_2, + ).fetchone() + assert integration_2_actual is not None + assert integration_2_actual.name == "collection_2" + assert integration_2_actual.settings != integration_2_settings + assert integration_2_actual.settings == { + "overdrive_website_id": "2", + "overdrive_client_key": "3", + "overdrive_client_secret": "4", + "external_account_id": "1", + } + assert integration_2_actual.context == {} + + # The foreign key constraints have been removed from the collections_libraries table + create_collection_library(connection, 99, 99) + + # If we try to run the migration, it will fail when it tries to add back the foreign key constraints + with pytest.raises(IntegrityError): + alembic_runner.migrate_down_one() + + # But if we remove the data that violates the foreign key constraints, the migration will run successfully + with alembic_engine.connect() as connection: + connection.execute( + "delete from collections_libraries where collection_id = 99 and library_id = 99" + ) + alembic_runner.migrate_down_one() From 9c730c9cc1d7612e8f9012f1c7e3f7d43855e3fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 09:37:46 -0800 Subject: [PATCH 173/262] Bump certifi from 2023.7.22 to 2023.11.17 (#1521) Bumps [certifi](https://github.com/certifi/python-certifi) from 2023.7.22 to 2023.11.17. - [Commits](https://github.com/certifi/python-certifi/compare/2023.07.22...2023.11.17) --- updated-dependencies: - dependency-name: certifi dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index f8216b3f52..06da6e58a9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -613,13 +613,13 @@ files = [ [[package]] name = "certifi" -version = "2023.7.22" +version = "2023.11.17" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ - {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, - {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, + {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, + {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, ] [[package]] From e8118174bf091c1045d6135c09f684f39110739f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 21 Nov 2023 09:39:33 -0800 Subject: [PATCH 174/262] Bump types-jsonschema from 4.19.0.4 to 4.20.0.0 (#1520) Bumps [types-jsonschema](https://github.com/python/typeshed) from 4.19.0.4 to 4.20.0.0. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-jsonschema dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 06da6e58a9..baef1515c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4133,13 +4133,13 @@ Flask = ">=2.0.0" [[package]] name = "types-jsonschema" -version = "4.19.0.4" +version = "4.20.0.0" description = "Typing stubs for jsonschema" optional = false python-versions = ">=3.8" files = [ - {file = "types-jsonschema-4.19.0.4.tar.gz", hash = "sha256:994feb6632818259c4b5dbd733867824cb475029a6abc2c2b5201a2268b6e7d2"}, - {file = "types_jsonschema-4.19.0.4-py3-none-any.whl", hash = "sha256:b73c3f4ba3cd8108602d1198a438e2698d5eb6b9db206ed89a33e24729b0abe7"}, + {file = "types-jsonschema-4.20.0.0.tar.gz", hash = "sha256:0de1032d243f1d3dba8b745ad84efe8c1af71665a9deb1827636ac535dcb79c1"}, + {file = "types_jsonschema-4.20.0.0-py3-none-any.whl", hash = "sha256:e6d5df18aaca4412f0aae246a294761a92040e93d7bc840f002b7329a8b72d26"}, ] [package.dependencies] From 6334442753507548f375db291a5b4657e0be225e Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Wed, 22 Nov 2023 12:23:51 +0530 Subject: [PATCH 175/262] Added verbose logging for the customlist sharing workflow (#1522) This will help debug issues occuring in the production environment --- api/admin/controller/custom_lists.py | 5 +++++ api/base_controller.py | 3 ++- core/query/customlist.py | 19 +++++++++++++++---- 3 files changed, 22 insertions(+), 5 deletions(-) diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index a5f867e4ac..21a339a2ba 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -434,6 +434,7 @@ def share_locally_POST( ) -> Union[ProblemDetail, Dict[str, int]]: successes = [] failures = [] + self.log.info(f"Begin sharing customlist '{customlist.name}'") for library in self._db.query(Library).all(): # Do not share with self if library == customlist.library: @@ -441,6 +442,9 @@ def share_locally_POST( # Do not attempt to re-share if library in customlist.shared_locally_with_libraries: + self.log.info( + f"Customlist '{customlist.name}' is already shared with library '{library.name}'" + ) continue # Attempt to share the list @@ -454,6 +458,7 @@ def share_locally_POST( successes.append(library) self._db.commit() + self.log.info(f"Done sharing customlist {customlist.name}") return self.CustomListSharePostResponse( successes=len(successes), failures=len(failures) ).dict() diff --git a/api/base_controller.py b/api/base_controller.py index 62dd1a000c..de11511c4c 100644 --- a/api/base_controller.py +++ b/api/base_controller.py @@ -6,10 +6,11 @@ from api.circulation_exceptions import * from api.problem_details import * from core.model import Library, Patron +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail -class BaseCirculationManagerController: +class BaseCirculationManagerController(LoggerMixin): """Define minimal standards for a circulation manager controller, mainly around authentication. """ diff --git a/core/query/customlist.py b/core/query/customlist.py index c1ce20dab4..8a50bafd49 100644 --- a/core/query/customlist.py +++ b/core/query/customlist.py @@ -2,7 +2,6 @@ import datetime import json -import logging from typing import TYPE_CHECKING from api.admin.problem_details import ( @@ -14,20 +13,28 @@ from core.model.customlist import CustomList, CustomListEntry from core.model.library import Library from core.model.licensing import LicensePool +from core.util.log import LoggerMixin from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: from sqlalchemy.orm import Session -class CustomListQueries: +class CustomListQueries(LoggerMixin): @classmethod def share_locally_with_library( cls, _db, customlist: CustomList, library: Library ) -> ProblemDetail | bool: # All customlist collections must be present in the library + log = cls.logger() + log.info( + f"Attempting to share customlist '{customlist.name}' with library '{library.name}'." + ) for collection in customlist.collections: if collection not in library.collections: + log.info( + f"Unable to share: Collection '{collection.name}' is missing from the library." + ) return CUSTOMLIST_SOURCE_COLLECTION_MISSING # All entries must be valid for the library @@ -43,14 +50,18 @@ def share_locally_with_library( .first() ) if valid_license is None: + log.info(f"Unable to share: No license for work '{entry.work.title}'.") return CUSTOMLIST_ENTRY_NOT_VALID_FOR_LIBRARY customlist.shared_locally_with_libraries.append(library) + log.info( + f"Successfully shared '{customlist.name}' with library '{library.name}'." + ) return True @classmethod def populate_query_pages( - self, + cls, _db: Session, custom_list: CustomList, start_page: int = 1, @@ -67,7 +78,7 @@ def populate_query_pages( :param json_query: If provided, use this json query rather than that of the custom list """ - log = logging.getLogger("Auto Update Custom List") + log = cls.logger() search = ExternalSearchIndex(_db) if not custom_list.auto_update_query: From 58c489bbcdd1ca8db75298bdfbbde52eb910b56d Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 23 Nov 2023 14:55:29 +0530 Subject: [PATCH 176/262] PP-745 Removed patron synchronization checks for annotations (#1524) * Removed patron synchronization checks for annotations Now the CM will record annotations regardless of patron settings --- api/annotations.py | 3 --- api/problem_details.py | 7 ------- core/model/patron.py | 3 --- tests/api/test_annotations.py | 4 +++- tests/api/test_controller_profile.py | 11 +---------- tests/core/models/test_patron.py | 13 ------------- 6 files changed, 4 insertions(+), 37 deletions(-) diff --git a/api/annotations.py b/api/annotations.py index 86d01af32b..263ca286a2 100644 --- a/api/annotations.py +++ b/api/annotations.py @@ -149,9 +149,6 @@ def detail(cls, annotation, with_context=True): class AnnotationParser: @classmethod def parse(cls, _db, data, patron): - if patron.synchronize_annotations != True: - return PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC - try: data = json.loads(data) if "id" in data and data["id"] is None: diff --git a/api/problem_details.py b/api/problem_details.py index 1860cf4b40..994824d1f1 100644 --- a/api/problem_details.py +++ b/api/problem_details.py @@ -271,13 +271,6 @@ detail=_("The analytics event must be a supported type."), ) -PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC = pd( - "http://librarysimplified.org/terms/problem/opt-in-required", - status_code=403, - title=_("Patron must opt in."), - detail=_("The patron must opt in to synchronize annotations to a server."), -) - INVALID_ANNOTATION_MOTIVATION = pd( "http://librarysimplified.org/terms/problem/invalid-annotation-motivation", status_code=400, diff --git a/core/model/patron.py b/core/model/patron.py index 00869856fe..db815b0011 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -729,9 +729,6 @@ def get_one_or_create(self, _db, patron, *args, **kwargs): """Find or create an Annotation, but only if the patron has annotation sync turned on. """ - if not patron.synchronize_annotations: - raise ValueError("Patron has opted out of synchronizing annotations.") - return get_one_or_create(_db, Annotation, patron=patron, *args, **kwargs) def set_inactive(self): diff --git a/tests/api/test_annotations.py b/tests/api/test_annotations.py index eae2d7547c..a6445169f7 100644 --- a/tests/api/test_annotations.py +++ b/tests/api/test_annotations.py @@ -749,4 +749,6 @@ def test_parse_jsonld_with_patron_opt_out( data_json, annotation_parser_fixture.patron_value, ) - assert PATRON_NOT_OPTED_IN_TO_ANNOTATION_SYNC == annotation + + # We no longer respect the patron settings for sync + assert isinstance(annotation, Annotation) diff --git a/tests/api/test_controller_profile.py b/tests/api/test_controller_profile.py index fd9119bea7..7830dcbb30 100644 --- a/tests/api/test_controller_profile.py +++ b/tests/api/test_controller_profile.py @@ -82,16 +82,7 @@ def test_put(self, profile_fixture: ProfileFixture): ) assert request_patron.synchronize_annotations is None - # This means we can't create annotations for them. - pytest.raises( - ValueError, - Annotation.get_one_or_create, - profile_fixture.db.session, - patron=request_patron, - identifier=identifier, - ) - - # But by sending a PUT request... + # By sending a PUT request... profile_fixture.manager.profiles.protocol() # ...we can change synchronize_annotations to True. diff --git a/tests/core/models/test_patron.py b/tests/core/models/test_patron.py index d6653725c0..d2dcb69f1a 100644 --- a/tests/core/models/test_patron.py +++ b/tests/core/models/test_patron.py @@ -428,19 +428,6 @@ def test_set_synchronize_annotations(self, db: DatabaseTransactionFixture): db.session.commit() assert 0 == len(p1.annotations) - # Patron #1 can no longer use Annotation.get_one_or_create. - pytest.raises( - ValueError, - Annotation.get_one_or_create, - db.session, - patron=p1, - identifier=identifier, - motivation=Annotation.IDLING, - ) - - # Patron #2's annotation is unaffected. - assert 1 == len(p2.annotations) - # But patron #2 can use Annotation.get_one_or_create. i2, is_new = Annotation.get_one_or_create( db.session, From fb338a4d0c2ebbf8d5a1adc43639e310e510816b Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 23 Nov 2023 08:41:37 -0400 Subject: [PATCH 177/262] Make sure default audience can be selected (PP-752) (#1526) --- api/saml/configuration/model.py | 10 +++++----- core/integration/settings.py | 11 ++++++++--- core/opds_import.py | 8 +++----- 3 files changed, 16 insertions(+), 13 deletions(-) diff --git a/api/saml/configuration/model.py b/api/saml/configuration/model.py index d67239be7c..7d6b50b5e9 100644 --- a/api/saml/configuration/model.py +++ b/api/saml/configuration/model.py @@ -1,8 +1,7 @@ import html from datetime import datetime -from enum import Enum from threading import Lock -from typing import Any, Dict, List, Optional, Pattern, Union +from typing import Any, Dict, List, Optional, Pattern from flask_babel import lazy_gettext as _ from onelogin.saml2.settings import OneLogin_Saml2_Settings @@ -33,6 +32,7 @@ from core.integration.settings import ( ConfigurationFormItem, ConfigurationFormItemType, + ConfigurationFormOptionsType, FormField, SettingsValidationError, ) @@ -52,9 +52,9 @@ def __init__(self) -> None: """Initialize a new instance of FederatedIdentityProviderOptions class.""" self._mutex = Lock() self._last_updated_at = datetime.min - self._options: Dict[Union[Enum, str], str] = {} + self._options: ConfigurationFormOptionsType = {} - def __call__(self, db: Session) -> Dict[Union[Enum, str], str]: + def __call__(self, db: Session) -> ConfigurationFormOptionsType: """Get federated identity provider options.""" with self._mutex: if self._needs_refresh(db): @@ -76,7 +76,7 @@ def _needs_refresh(self, db: Session) -> bool: return needs_refresh @staticmethod - def _fetch(db: Session) -> Dict[Union[Enum, str], str]: + def _fetch(db: Session) -> ConfigurationFormOptionsType: """Fetch federated identity provider options.""" identity_providers = ( db.query( diff --git a/core/integration/settings.py b/core/integration/settings.py index ec4eeb8071..87d5392ad0 100644 --- a/core/integration/settings.py +++ b/core/integration/settings.py @@ -155,6 +155,9 @@ class ConfigurationFormItemType(Enum): IMAGE = "image" +ConfigurationFormOptionsType = Mapping[Union[Enum, str, None], str] + + @dataclass(frozen=True) class ConfigurationFormItem: """ @@ -182,9 +185,9 @@ class ConfigurationFormItem: # When the type is SELECT, LIST, or MENU, the options are used to populate the # field in the admin interface. This can either be a callable that returns a # dictionary of options or a dictionary of options. - options: Callable[[Session], Dict[Enum | str, str]] | Mapping[ - Enum | str, str - ] | None = None + options: Callable[ + [Session], ConfigurationFormOptionsType + ] | ConfigurationFormOptionsType | None = None # Required is usually determined by the Pydantic model, but can be overridden # here, in the case where a field would not be required in the model, but is @@ -198,6 +201,8 @@ class ConfigurationFormItem: @staticmethod def get_form_value(value: Any) -> Any: + if value is None: + return "" if isinstance(value, Enum): return value.value if isinstance(value, bool): diff --git a/core/opds_import.py b/core/opds_import.py index ce9f1da987..58cc283cdc 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -112,8 +112,6 @@ class OPDSImporterSettings( SAMLWAYFlessSetttings, FormatPrioritiesSettings, ): - _NO_DEFAULT_AUDIENCE = "" - external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( label=_("URL"), @@ -125,8 +123,8 @@ class OPDSImporterSettings( form=ConfigurationFormItem(label=_("Data source name"), required=True) ) - default_audience: str = FormField( - default=_NO_DEFAULT_AUDIENCE, + default_audience: Optional[str] = FormField( + None, form=ConfigurationFormItem( label=_("Default audience"), description=_( @@ -135,7 +133,7 @@ class OPDSImporterSettings( ), type=ConfigurationFormItemType.SELECT, options={ - **{_NO_DEFAULT_AUDIENCE: _("No default audience")}, + **{None: _("No default audience")}, **{audience: audience for audience in sorted(Classifier.AUDIENCES)}, }, required=False, From 4b5467047e6d5fd37c54e3d36746ca05b555deca Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Thu, 23 Nov 2023 21:23:34 -0500 Subject: [PATCH 178/262] Add ISBN, when available, to playback time reports. (PP-572) (#1516) --- core/jobs/playtime_entries.py | 62 +++++++- tests/core/jobs/test_playtime_entries.py | 173 +++++++++++++++++++++-- 2 files changed, 222 insertions(+), 13 deletions(-) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index c9f72bc87f..4d4ed5d184 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -6,15 +6,17 @@ from collections import defaultdict from datetime import datetime, timedelta from tempfile import TemporaryFile -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Optional, cast import dateutil.parser import pytz +from sqlalchemy.orm import Session from sqlalchemy.sql.functions import sum from core.config import Configuration from core.model import get_one from core.model.edition import Edition +from core.model.identifier import Identifier, RecursiveEquivalencyCache from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary from core.util.datetime_helpers import previous_months, utc_now from core.util.email import EmailManager @@ -154,7 +156,15 @@ def do_run(self): # Write the data as a CSV writer = csv.writer(temp) writer.writerow( - ["date", "urn", "collection", "library", "title", "total seconds"] + [ + "date", + "urn", + "isbn", + "collection", + "library", + "title", + "total seconds", + ] ) for ( @@ -164,15 +174,25 @@ def do_run(self): identifier_id, total, ) in self._fetch_report_records(start=start, until=until): - edition = None + edition: Optional[Edition] = None + identifier: Optional[Identifier] = None if identifier_id: edition = get_one( self._db, Edition, primary_identifier_id=identifier_id ) + # Use the identifier from the edition where available. + # Otherwise, we'll have to look it up. + identifier = ( + edition.primary_identifier + if edition + else get_one(self._db, Identifier, id=identifier_id) + ) + isbn = self._isbn_for_identifier(identifier) title = edition and edition.title row = ( report_date_label, urn, + isbn, collection_name, library_name, title, @@ -218,3 +238,39 @@ def _fetch_report_records(self, start: datetime, until: datetime) -> Query: PlaytimeSummary.identifier_id, ) ) + + @staticmethod + def _isbn_for_identifier( + identifier: Optional[Identifier], + /, + *, + default_value: str = "", + ) -> str: + """Find the strongest ISBN match for the given identifier. + + :param identifier: The identifier to match. + :param default_value: The default value to return if the identifier is missing or a match is not found. + """ + if identifier is None: + return default_value + + if identifier.type == Identifier.ISBN: + return cast(str, identifier.identifier) + + # If our identifier is not an ISBN itself, we'll use our Recursive Equivalency + # mechanism to find the next best one that is, if available. + db = Session.object_session(identifier) + eq_subquery = db.query(RecursiveEquivalencyCache.identifier_id).filter( + RecursiveEquivalencyCache.parent_identifier_id == identifier.id + ) + equivalent_identifiers = ( + db.query(Identifier) + .filter(Identifier.id.in_(eq_subquery)) + .filter(Identifier.type == Identifier.ISBN) + ) + + isbn = next( + map(lambda id_: id_.identifier, equivalent_identifiers), + None, + ) + return isbn or default_value diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index b7bd95e8ab..4adedbbc2c 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -2,7 +2,7 @@ import re from datetime import datetime, timedelta -from typing import List +from typing import List, Optional from unittest.mock import MagicMock, call, patch import pytest @@ -11,13 +11,14 @@ from api.model.time_tracking import PlaytimeTimeEntry from core.config import Configuration +from core.equivalents_coverage import EquivalentIdentifiersCoverageProvider from core.jobs.playtime_entries import ( PlaytimeEntriesEmailReportsScript, PlaytimeEntriesSummationScript, ) from core.model import create from core.model.collection import Collection -from core.model.identifier import Identifier +from core.model.identifier import Equivalency, Identifier from core.model.library import Library from core.model.time_tracking import PlaytimeEntry, PlaytimeSummary from core.util.datetime_helpers import datetime_utc, previous_months, utc_now @@ -229,6 +230,28 @@ def test_do_run(self, db: DatabaseTransactionFixture): collection2 = db.collection() library2 = db.library() + isbn_ids: dict[str, Identifier] = { + "i1": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="080442957X" + ), + "i2": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="9788175257665" + ), + } + identifier.equivalencies = [ + Equivalency( + input_id=identifier.id, output_id=isbn_ids["i1"].id, strength=0.5 + ), + Equivalency( + input_id=isbn_ids["i1"].id, output_id=isbn_ids["i2"].id, strength=1 + ), + ] + strongest_isbn = isbn_ids["i2"].identifier + no_isbn = "" + + # We're using the RecursiveEquivalencyCache, so must refresh it. + EquivalentIdentifiersCoverageProvider(db.session).run() + playtime(db.session, identifier, collection, library, date3m(3), 1) playtime(db.session, identifier, collection, library, date3m(31), 2) playtime( @@ -249,7 +272,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): reporting_name = "test cm" - # Horrible unbracketted syntax for python 3.8 + # Horrible unbracketed syntax for python 3.8 with patch("core.jobs.playtime_entries.csv.writer") as writer, patch( "core.jobs.playtime_entries.EmailManager" ) as email, patch( @@ -259,8 +282,10 @@ def test_do_run(self, db: DatabaseTransactionFixture): Configuration.REPORTING_NAME_ENVIRONMENT_VARIABLE: reporting_name, }, ): + # Act PlaytimeEntriesEmailReportsScript(db.session).run() + # Assert assert ( writer().writerow.call_count == 6 ) # 1 header, 5 identifier,collection,library entries @@ -271,18 +296,65 @@ def test_do_run(self, db: DatabaseTransactionFixture): call_args = writer().writerow.call_args_list assert call_args == [ call( - ["date", "urn", "collection", "library", "title", "total seconds"] + [ + "date", + "urn", + "isbn", + "collection", + "library", + "title", + "total seconds", + ] ), # Header - call((column1, identifier.urn, collection2.name, library2.name, None, 300)), - call((column1, identifier.urn, collection2.name, library.name, None, 100)), - call((column1, identifier.urn, collection.name, library2.name, None, 200)), call( - (column1, identifier.urn, collection.name, library.name, None, 3) + ( + column1, + identifier.urn, + strongest_isbn, + collection2.name, + library2.name, + None, + 300, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection2.name, + library.name, + None, + 100, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection.name, + library2.name, + None, + 200, + ) + ), + call( + ( + column1, + identifier.urn, + strongest_isbn, + collection.name, + library.name, + None, + 3, + ) ), # Identifier without edition call( ( column1, identifier2.urn, + no_isbn, collection.name, library.name, edition.title, @@ -305,7 +377,7 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): identifier = db.identifier() collection = db.default_collection() library = db.default_library() - entry = playtime(db.session, identifier, collection, library, date3m(20), 1) + _ = playtime(db.session, identifier, collection, library, date3m(20), 1) with patch("core.jobs.playtime_entries.os.environ", new={}): script = PlaytimeEntriesEmailReportsScript(db.session) @@ -314,7 +386,88 @@ def test_no_reporting_email(self, db: DatabaseTransactionFixture): assert script._log.error.call_count == 1 assert script._log.warning.call_count == 1 - assert "date,urn,collection," in script._log.warning.call_args[0][0] + assert "date,urn,isbn,collection," in script._log.warning.call_args[0][0] + + @pytest.mark.parametrize( + "id_key, equivalents, default_value, expected_isbn", + [ + # If the identifier is an ISBN, we will not use an equivalency. + [ + "i1", + (("g1", "g2", 1), ("g2", "i1", 1), ("g1", "i2", 0.5)), + "", + "080442957X", + ], + [ + "i2", + (("g1", "g2", 1), ("g2", "i1", 0.5), ("g1", "i2", 1)), + "", + "9788175257665", + ], + ["i1", (("i1", "i2", 200),), "", "080442957X"], + ["i2", (("i2", "i1", 200),), "", "9788175257665"], + # If identifier is not an ISBN, but has an equivalency that is, use the strongest match. + [ + "g2", + (("g1", "g2", 1), ("g2", "i1", 1), ("g1", "i2", 0.5)), + "", + "080442957X", + ], + [ + "g2", + (("g1", "g2", 1), ("g2", "i1", 0.5), ("g1", "i2", 1)), + "", + "9788175257665", + ], + # If we don't find an equivalent ISBN identifier, then we'll use the default. + ["g2", (), "default value", "default value"], + ["g1", (("g1", "g2", 1),), "default value", "default value"], + # If identifier is None, expect default value. + [None, (), "default value", "default value"], + ], + ) + def test__isbn_for_identifier( + self, + db: DatabaseTransactionFixture, + id_key: str | None, + equivalents: tuple[tuple[str, str, int | float]], + default_value: str, + expected_isbn: str, + ): + ids: dict[str, Identifier] = { + "i1": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="080442957X" + ), + "i2": db.identifier( + identifier_type=Identifier.ISBN, foreign_id="9788175257665" + ), + "g1": db.identifier(identifier_type=Identifier.GUTENBERG_ID), + "g2": db.identifier(identifier_type=Identifier.GUTENBERG_ID), + } + equivalencies = [ + Equivalency( + input_id=ids[equivalent[0]].id, + output_id=ids[equivalent[1]].id, + strength=equivalent[2], + ) + for equivalent in equivalents + ] + test_identifier: Optional[Identifier] = ( + ids[id_key] if id_key is not None else None + ) + if test_identifier is not None: + test_identifier.equivalencies = equivalencies + + # We're using the RecursiveEquivalencyCache, so must refresh it. + EquivalentIdentifiersCoverageProvider(db.session).run() + + # Act + result = PlaytimeEntriesEmailReportsScript._isbn_for_identifier( + test_identifier, + default_value=default_value, + ) + # Assert + assert result == expected_isbn @pytest.mark.parametrize( "current_utc_time, start_arg, expected_start, until_arg, expected_until", From f81b7d5413011a99b77fcedbf7c03fdc194d3bc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 13:42:58 +0000 Subject: [PATCH 179/262] Bump types-flask-cors from 4.0.0.1 to 4.0.0.2 (#1528) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index baef1515c4..9f737354c4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4119,13 +4119,13 @@ files = [ [[package]] name = "types-flask-cors" -version = "4.0.0.1" +version = "4.0.0.2" description = "Typing stubs for Flask-Cors" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "types-Flask-Cors-4.0.0.1.tar.gz", hash = "sha256:68dbf195dec2a21932690751f6e7669f9eb7def6b15bfaaf507f96cc04bef5ec"}, - {file = "types_Flask_Cors-4.0.0.1-py3-none-any.whl", hash = "sha256:2846ceaceaf6dbbc023442ff85a15c9c1dbdf3c65fe3ed4b331f5c6b58f81c21"}, + {file = "types-Flask-Cors-4.0.0.2.tar.gz", hash = "sha256:7cf2f4305188334aea0f5eb1af15da16e1e8cfd2a1e69a3e2fab0719207b8f33"}, + {file = "types_Flask_Cors-4.0.0.2-py3-none-any.whl", hash = "sha256:2e870bc0234e6b771113960dd671f7ed99c00b1208a51285200e2b469913b475"}, ] [package.dependencies] From 02fd8fdce5e185b5e929e6d151096a0eca484e97 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 13:43:14 +0000 Subject: [PATCH 180/262] Bump types-psycopg2 from 2.9.21.16 to 2.9.21.17 (#1529) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9f737354c4..fae2af1290 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4158,13 +4158,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.16" +version = "2.9.21.17" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.16.tar.gz", hash = "sha256:44a3ae748173bb637cff31654d6bd12de9ad0c7ad73afe737df6152830ed82ed"}, - {file = "types_psycopg2-2.9.21.16-py3-none-any.whl", hash = "sha256:e2f24b651239ccfda320ab3457099af035cf37962c36c9fa26a4dc65991aebed"}, + {file = "types-psycopg2-2.9.21.17.tar.gz", hash = "sha256:dc84bef202eae39e59961c1174f4d3845f06f62d2704733b1be710c3be0c3b29"}, + {file = "types_psycopg2-2.9.21.17-py3-none-any.whl", hash = "sha256:635a5ee8fd11ec6a056494ca6533ab66475d967df9a4f8d4f8209887ba61ad2c"}, ] [[package]] From bc2ed3a060179f4ca5f6f293b8ae25bb97523105 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 13:43:36 +0000 Subject: [PATCH 181/262] Bump mypy from 1.7.0 to 1.7.1 (#1527) --- poetry.lock | 56 ++++++++++++++++++++++++++--------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index fae2af1290..3f9fd2d096 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2315,38 +2315,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.0" +version = "1.7.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5da84d7bf257fd8f66b4f759a904fd2c5a765f70d8b52dde62b521972a0a2357"}, - {file = "mypy-1.7.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a3637c03f4025f6405737570d6cbfa4f1400eb3c649317634d273687a09ffc2f"}, - {file = "mypy-1.7.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b633f188fc5ae1b6edca39dae566974d7ef4e9aaaae00bc36efe1f855e5173ac"}, - {file = "mypy-1.7.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed9a3997b90c6f891138e3f83fb8f475c74db4ccaa942a1c7bf99e83a989a1"}, - {file = "mypy-1.7.0-cp310-cp310-win_amd64.whl", hash = "sha256:1fe46e96ae319df21359c8db77e1aecac8e5949da4773c0274c0ef3d8d1268a9"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:df67fbeb666ee8828f675fee724cc2cbd2e4828cc3df56703e02fe6a421b7401"}, - {file = "mypy-1.7.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a79cdc12a02eb526d808a32a934c6fe6df07b05f3573d210e41808020aed8b5d"}, - {file = "mypy-1.7.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f65f385a6f43211effe8c682e8ec3f55d79391f70a201575def73d08db68ead1"}, - {file = "mypy-1.7.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e81ffd120ee24959b449b647c4b2fbfcf8acf3465e082b8d58fd6c4c2b27e46"}, - {file = "mypy-1.7.0-cp311-cp311-win_amd64.whl", hash = "sha256:f29386804c3577c83d76520abf18cfcd7d68264c7e431c5907d250ab502658ee"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:87c076c174e2c7ef8ab416c4e252d94c08cd4980a10967754f91571070bf5fbe"}, - {file = "mypy-1.7.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6cb8d5f6d0fcd9e708bb190b224089e45902cacef6f6915481806b0c77f7786d"}, - {file = "mypy-1.7.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d93e76c2256aa50d9c82a88e2f569232e9862c9982095f6d54e13509f01222fc"}, - {file = "mypy-1.7.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cddee95dea7990e2215576fae95f6b78a8c12f4c089d7e4367564704e99118d3"}, - {file = "mypy-1.7.0-cp312-cp312-win_amd64.whl", hash = "sha256:d01921dbd691c4061a3e2ecdbfbfad029410c5c2b1ee88946bf45c62c6c91210"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:185cff9b9a7fec1f9f7d8352dff8a4c713b2e3eea9c6c4b5ff7f0edf46b91e41"}, - {file = "mypy-1.7.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7b1e399c47b18feb6f8ad4a3eef3813e28c1e871ea7d4ea5d444b2ac03c418"}, - {file = "mypy-1.7.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc9fe455ad58a20ec68599139ed1113b21f977b536a91b42bef3ffed5cce7391"}, - {file = "mypy-1.7.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d0fa29919d2e720c8dbaf07d5578f93d7b313c3e9954c8ec05b6d83da592e5d9"}, - {file = "mypy-1.7.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b53655a295c1ed1af9e96b462a736bf083adba7b314ae775563e3fb4e6795f5"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c1b06b4b109e342f7dccc9efda965fc3970a604db70f8560ddfdee7ef19afb05"}, - {file = "mypy-1.7.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:bf7a2f0a6907f231d5e41adba1a82d7d88cf1f61a70335889412dec99feeb0f8"}, - {file = "mypy-1.7.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:551d4a0cdcbd1d2cccdcc7cb516bb4ae888794929f5b040bb51aae1846062901"}, - {file = "mypy-1.7.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:55d28d7963bef00c330cb6461db80b0b72afe2f3c4e2963c99517cf06454e665"}, - {file = "mypy-1.7.0-cp39-cp39-win_amd64.whl", hash = "sha256:870bd1ffc8a5862e593185a4c169804f2744112b4a7c55b93eb50f48e7a77010"}, - {file = "mypy-1.7.0-py3-none-any.whl", hash = "sha256:96650d9a4c651bc2a4991cf46f100973f656d69edc7faf91844e87fe627f7e96"}, - {file = "mypy-1.7.0.tar.gz", hash = "sha256:1e280b5697202efa698372d2f39e9a6713a0395a756b1c6bd48995f8d72690dc"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, + {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, + {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, + {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, + {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, + {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, + {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, + {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, + {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, + {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, + {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, + {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, + {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, + {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, + {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, + {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, + {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, + {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, + {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, + {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, + {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, + {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, + {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, ] [package.dependencies] From 295bdfa44acc44999981d580b03d9225747a33b7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 27 Nov 2023 16:48:53 +0000 Subject: [PATCH 182/262] Bump bcrypt from 4.0.1 to 4.1.0 (#1531) --- poetry.lock | 41 ++++++++++++++++++----------------------- 1 file changed, 18 insertions(+), 23 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3f9fd2d096..e3d6fb4f4b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -74,32 +74,27 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bcrypt" -version = "4.0.1" +version = "4.1.0" description = "Modern password hashing for your software and your servers" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "bcrypt-4.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:b1023030aec778185a6c16cf70f359cbb6e0c289fd564a7cfa29e727a1c38f8f"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:08d2947c490093a11416df18043c27abe3921558d2c03e2076ccb28a116cb6d0"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0eaa47d4661c326bfc9d08d16debbc4edf78778e6aaba29c1bc7ce67214d4410"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae88eca3024bb34bb3430f964beab71226e761f51b912de5133470b649d82344"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:a522427293d77e1c29e303fc282e2d71864579527a04ddcfda6d4f8396c6c36a"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fbdaec13c5105f0c4e5c52614d04f0bca5f5af007910daa8b6b12095edaa67b3"}, - {file = "bcrypt-4.0.1-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ca3204d00d3cb2dfed07f2d74a25f12fc12f73e606fcaa6975d1f7ae69cacbb2"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:089098effa1bc35dc055366740a067a2fc76987e8ec75349eb9484061c54f535"}, - {file = "bcrypt-4.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e9a51bbfe7e9802b5f3508687758b564069ba937748ad7b9e890086290d2f79e"}, - {file = "bcrypt-4.0.1-cp36-abi3-win32.whl", hash = "sha256:2caffdae059e06ac23fce178d31b4a702f2a3264c20bfb5ff541b338194d8fab"}, - {file = "bcrypt-4.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:8a68f4341daf7522fe8d73874de8906f3a339048ba406be6ddc1b3ccb16fc0d9"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf4fa8b2ca74381bb5442c089350f09a3f17797829d958fad058d6e44d9eb83c"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:67a97e1c405b24f19d08890e7ae0c4f7ce1e56a712a016746c8b2d7732d65d4b"}, - {file = "bcrypt-4.0.1-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b3b85202d95dd568efcb35b53936c5e3b3600c7cdcc6115ba461df3a8e89f38d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cbb03eec97496166b704ed663a53680ab57c5084b2fc98ef23291987b525cb7d"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:5ad4d32a28b80c5fa6671ccfb43676e8c1cc232887759d1cd7b6f56ea4355215"}, - {file = "bcrypt-4.0.1-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b57adba8a1444faf784394de3436233728a1ecaeb6e07e8c22c8848f179b893c"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:705b2cea8a9ed3d55b4491887ceadb0106acf7c6387699fca771af56b1cdeeda"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:2b3ac11cf45161628f1f3733263e63194f22664bf4d0c0f3ab34099c02134665"}, - {file = "bcrypt-4.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3100851841186c25f127731b9fa11909ab7b1df6fc4b9f8353f4f1fd952fbf71"}, - {file = "bcrypt-4.0.1.tar.gz", hash = "sha256:27d375903ac8261cfe4047f6709d16f7d18d39b1ec92aaf72af989552a650ebd"}, + {file = "bcrypt-4.1.0-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:a601e52d0318142d1de84ab213ae062a10168c1acd721a2125bcf97d8646809b"}, + {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e8e085b188827087bcda882a2ca14843164cde43d83aca02a67b94ed68b8d1f"}, + {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e86108bd26137c5acb86fdf5696a30433c7e9e6a81e3aef6c3746cb9ac535a8"}, + {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d9151b2098bf5598954a5d731c66c4e84321d3f4b9f167d4b73084df6d3958d"}, + {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:30be7a77166a97f85ec2a94100e9841ea97c38ca5a93335111fe96cd485ba250"}, + {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b5dea889347e80dbd86442b989128e12812b181a40ae4db496388ad36a8fe2b7"}, + {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:544a13c2555027d1042d249ba0e3a302cba105224420f06d20e61179207a7e02"}, + {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:adb15ff096c9cfdb1b152a5c032f1d4f7390eabd98fd27b0d789c536ef9e7b40"}, + {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81458aac2577582e22d9d2682a457992827472ba5010e9e77431317dfe804c98"}, + {file = "bcrypt-4.1.0-cp37-abi3-win32.whl", hash = "sha256:69740306830e26479a15e3686027aae67b2250e2a973b3f303bcabc1eb224f77"}, + {file = "bcrypt-4.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:15b9865d3fb52d30c8301f13ab074006dbacc219539470f93c13fd720cdc1558"}, + {file = "bcrypt-4.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4d9330d165e0079a32e998d0e1dfab00ada7062070b759ab793c3331ab7e9d78"}, + {file = "bcrypt-4.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d068ba10dfaf5ea7260cf703109f77350043b7efe58d6d6e4027694e8144989f"}, + {file = "bcrypt-4.1.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e16655bfe3077223d8b8e00c81a6d21b78b9b47a20b57a052e26c8ec4cdc7613"}, + {file = "bcrypt-4.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4417aa6c0701b8c20136d845198555f61e23d1ee64a8c33a13a9f2d6b6ed531c"}, + {file = "bcrypt-4.1.0.tar.gz", hash = "sha256:8807681e040e89ee3201249a7849342748e643b4bb2749dfc8dd1b34f6baa349"}, ] [package.extras] From e9e155b3e2cc89d5c0f3081586b88ca963da7bd1 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 27 Nov 2023 16:26:04 -0400 Subject: [PATCH 183/262] Properly track code coverage for scripts (#1532) --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 607e6604dd..c49b01b628 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,7 +13,7 @@ exclude_also = [ [tool.coverage.run] branch = true relative_files = true -source = ["api", "core"] +source = ["api", "core", "scripts"] [tool.isort] known_first_party = ["api", "core", "customlists"] From 481ac8c27169e5af6d0a9f48f80b0c7e5a805b3e Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 27 Nov 2023 18:35:47 -0400 Subject: [PATCH 184/262] Library short name being saved into library settings (PP-754) (#1525) --- ...remove_short_name_from_library_settings.py | 47 +++++++++++++++++++ api/admin/controller/integration_settings.py | 40 +++++++++++++++- .../api/admin/controller/test_patron_auth.py | 22 ++++++++- 3 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py diff --git a/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py b/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py new file mode 100644 index 0000000000..85aef3ecdd --- /dev/null +++ b/alembic/versions/20231122_e4b120a8d1d5_remove_short_name_from_library_settings.py @@ -0,0 +1,47 @@ +"""Remove short_name from library settings. + +Revision ID: e4b120a8d1d5 +Revises: 2d72d6876c52 +Create Date: 2023-11-22 16:28:55.759169+00:00 + +""" +from alembic import op +from core.migration.util import migration_logger +from core.model import json_serializer + +# revision identifiers, used by Alembic. +revision = "e4b120a8d1d5" +down_revision = "2d72d6876c52" +branch_labels = None +depends_on = None + + +log = migration_logger(revision) + + +def upgrade() -> None: + conn = op.get_bind() + + # Find all the library configurations that have a short_name key in their settings. + rows = conn.execute( + "select parent_id, library_id, settings from integration_library_configurations where settings ? 'short_name'" + ).all() + + for row in rows: + settings = row.settings.copy() + short_name = settings.get("short_name") + del settings["short_name"] + log.info( + f"Removing short_name {short_name} from library configuration " + f"(parent:{row.parent_id}/library:{row.library_id}) {settings}" + ) + settings_json = json_serializer(settings) + conn.execute( + "update integration_library_configurations set settings = (%s) where parent_id = (%s) and library_id = (%s)", + (settings_json, row.parent_id, row.library_id), + ) + + +def downgrade() -> None: + # No need to do anything here. The key was never used. + pass diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index b84fd1ac93..d0d9f13e5f 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -235,13 +235,49 @@ def get_changed_libraries( ) -> ChangedLibrariesTuple: """ Return a tuple of lists of libraries that have had their library settings - added, updated, or removed. + added, updated, or removed. No action is taken to add, update, or remove + the settings, this function just parses the submitted data and returns + the lists of libraries that need to be processed. + + :param service: The IntegrationConfiguration that the library settings should be + associated with. + :param libraries_data: A JSON string containing a list of dictionaries. + Each dictionary has a 'short_name' key that identifies which + library the settings are for, and then the rest of the dictionary is the + settings for that library. + + :return: A named tuple with three lists of libraries: + - new: A list of UpdatedLibrarySettingsTuple named tuples that contains the + IntegrationLibraryConfiguration and settings for each library with newly + added settings. + - updated: A list of UpdatedLibrarySettingsTuple named tuples that contains the + IntegrationLibraryConfiguration and settings for each library that had its + settings updated. + - removed: A list of IntegrationLibraryConfiguration objects for libraries that + had their settings removed. """ libraries = json.loads(libraries_data) existing_library_settings = { c.library.short_name: c for c in service.library_configurations } - submitted_library_settings = {l.get("short_name"): l for l in libraries} + + submitted_library_settings = {} + for library in libraries: + # Each library settings dictionary should have a 'short_name' key that identifies + # which library the settings are for. This key is removed from the dictionary as + # only the settings should be stored in the database. + short_name = library.get("short_name") + if short_name is None: + self.log.error( + f"Library settings missing short_name. Settings: {library}." + ) + raise ProblemError( + INVALID_INPUT.detailed( + "Invalid library settings, missing short_name." + ) + ) + del library["short_name"] + submitted_library_settings[short_name] = library removed = [ existing_library_settings[library] diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index ee375fb6e1..6fbb8be250 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -36,6 +36,7 @@ from core.integration.goals import Goals from core.model import AdminRole, Library, get_one from core.model.integration import IntegrationConfiguration +from core.problem_details import INVALID_INPUT from core.util.problem_detail import ProblemDetail if TYPE_CHECKING: @@ -367,7 +368,7 @@ def test_patron_auth_services_post_missing_patron_auth_name( assert isinstance(response, ProblemDetail) assert response.uri == MISSING_PATRON_AUTH_NAME.uri - def test_patron_auth_services_post_missing_patron_auth_no_such_library( + def test_patron_auth_services_post_no_such_library( self, post_response: Callable[..., Response | ProblemDetail], common_args: list[tuple[str, str]], @@ -384,6 +385,24 @@ def test_patron_auth_services_post_missing_patron_auth_no_such_library( assert isinstance(response, ProblemDetail) assert response.uri == NO_SUCH_LIBRARY.uri + def test_patron_auth_services_post_missing_short_name( + self, + post_response: Callable[..., Response | ProblemDetail], + common_args: list[tuple[str, str]], + ): + form = ImmutableMultiDict( + [ + ("name", "testing auth name"), + ("protocol", SimpleAuthenticationProvider.__module__), + ("libraries", json.dumps([{}])), + ] + + common_args + ) + response = post_response(form) + assert isinstance(response, ProblemDetail) + assert response.uri == INVALID_INPUT.uri + assert response.detail == "Invalid library settings, missing short_name." + def test_patron_auth_services_post_missing_patron_auth_multiple_basic( self, post_response: Callable[..., Response | ProblemDetail], @@ -503,6 +522,7 @@ def test_patron_auth_services_post_create( assert settings.test_password == "pass" [library_config] = auth_service.library_configurations assert library_config.library == default_library + assert "short_name" not in library_config.settings_dict assert ( library_config.settings_dict["library_identifier_restriction_criteria"] == "^1234" From 73aab83a8c9792e00f22abd55a1f68befddf8fb3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:16:15 +0000 Subject: [PATCH 185/262] Bump cryptography from 41.0.5 to 41.0.6 (#1536) --- poetry.lock | 50 +++++++++++++++++++++++++------------------------- 1 file changed, 25 insertions(+), 25 deletions(-) diff --git a/poetry.lock b/poetry.lock index e3d6fb4f4b..52024d8e3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -828,34 +828,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.5" +version = "41.0.6" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:da6a0ff8f1016ccc7477e6339e1d50ce5f59b88905585f77193ebd5068f1e797"}, - {file = "cryptography-41.0.5-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:b948e09fe5fb18517d99994184854ebd50b57248736fd4c720ad540560174ec5"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d38e6031e113b7421db1de0c1b1f7739564a88f1684c6b89234fbf6c11b75147"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e270c04f4d9b5671ebcc792b3ba5d4488bf7c42c3c241a3748e2599776f29696"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ec3b055ff8f1dce8e6ef28f626e0972981475173d7973d63f271b29c8a2897da"}, - {file = "cryptography-41.0.5-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:7d208c21e47940369accfc9e85f0de7693d9a5d843c2509b3846b2db170dfd20"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:8254962e6ba1f4d2090c44daf50a547cd5f0bf446dc658a8e5f8156cae0d8548"}, - {file = "cryptography-41.0.5-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:a48e74dad1fb349f3dc1d449ed88e0017d792997a7ad2ec9587ed17405667e6d"}, - {file = "cryptography-41.0.5-cp37-abi3-win32.whl", hash = "sha256:d3977f0e276f6f5bf245c403156673db103283266601405376f075c849a0b936"}, - {file = "cryptography-41.0.5-cp37-abi3-win_amd64.whl", hash = "sha256:73801ac9736741f220e20435f84ecec75ed70eda90f781a148f1bad546963d81"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3be3ca726e1572517d2bef99a818378bbcf7d7799d5372a46c79c29eb8d166c1"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e886098619d3815e0ad5790c973afeee2c0e6e04b4da90b88e6bd06e2a0b1b72"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:573eb7128cbca75f9157dcde974781209463ce56b5804983e11a1c462f0f4e88"}, - {file = "cryptography-41.0.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0c327cac00f082013c7c9fb6c46b7cc9fa3c288ca702c74773968173bda421bf"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:227ec057cd32a41c6651701abc0328135e472ed450f47c2766f23267b792a88e"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:22892cc830d8b2c89ea60148227631bb96a7da0c1b722f2aac8824b1b7c0b6b8"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5a70187954ba7292c7876734183e810b728b4f3965fbe571421cb2434d279179"}, - {file = "cryptography-41.0.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:88417bff20162f635f24f849ab182b092697922088b477a7abd6664ddd82291d"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c707f7afd813478e2019ae32a7c49cd932dd60ab2d2a93e796f68236b7e1fbf1"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:580afc7b7216deeb87a098ef0674d6ee34ab55993140838b14c9b83312b37b86"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1e91467c65fe64a82c689dc6cf58151158993b13eb7a7f3f4b7f395636723"}, - {file = "cryptography-41.0.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:0d2a6a598847c46e3e321a7aef8af1436f11c27f1254933746304ff014664d84"}, - {file = "cryptography-41.0.5.tar.gz", hash = "sha256:392cb88b597247177172e02da6b7a63deeff1937fa6fec3bbf902ebd75d97ec7"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:0f27acb55a4e77b9be8d550d762b0513ef3fc658cd3eb15110ebbcbd626db12c"}, + {file = "cryptography-41.0.6-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:ae236bb8760c1e55b7a39b6d4d32d2279bc6c7c8500b7d5a13b6fb9fc97be35b"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afda76d84b053923c27ede5edc1ed7d53e3c9f475ebaf63c68e69f1403c405a8"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da46e2b5df770070412c46f87bac0849b8d685c5f2679771de277a422c7d0b86"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ff369dd19e8fe0528b02e8df9f2aeb2479f89b1270d90f96a63500afe9af5cae"}, + {file = "cryptography-41.0.6-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b648fe2a45e426aaee684ddca2632f62ec4613ef362f4d681a9a6283d10e079d"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5daeb18e7886a358064a68dbcaf441c036cbdb7da52ae744e7b9207b04d3908c"}, + {file = "cryptography-41.0.6-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:068bc551698c234742c40049e46840843f3d98ad7ce265fd2bd4ec0d11306596"}, + {file = "cryptography-41.0.6-cp37-abi3-win32.whl", hash = "sha256:2132d5865eea673fe6712c2ed5fb4fa49dba10768bb4cc798345748380ee3660"}, + {file = "cryptography-41.0.6-cp37-abi3-win_amd64.whl", hash = "sha256:48783b7e2bef51224020efb61b42704207dde583d7e371ef8fc2a5fb6c0aabc7"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:8efb2af8d4ba9dbc9c9dd8f04d19a7abb5b49eab1f3694e7b5a16a5fc2856f5c"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c5a550dc7a3b50b116323e3d376241829fd326ac47bc195e04eb33a8170902a9"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:85abd057699b98fce40b41737afb234fef05c67e116f6f3650782c10862c43da"}, + {file = "cryptography-41.0.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f39812f70fc5c71a15aa3c97b2bbe213c3f2a460b79bd21c40d033bb34a9bf36"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:742ae5e9a2310e9dade7932f9576606836ed174da3c7d26bc3d3ab4bd49b9f65"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:35f3f288e83c3f6f10752467c48919a7a94b7d88cc00b0668372a0d2ad4f8ead"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4d03186af98b1c01a4eda396b137f29e4e3fb0173e30f885e27acec8823c1b09"}, + {file = "cryptography-41.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:b27a7fd4229abef715e064269d98a7e2909ebf92eb6912a9603c7e14c181928c"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:398ae1fc711b5eb78e977daa3cbf47cec20f2c08c5da129b7a296055fbb22aed"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7e00fb556bda398b99b0da289ce7053639d33b572847181d6483ad89835115f6"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:60e746b11b937911dc70d164060d28d273e31853bb359e2b2033c9e93e6f3c43"}, + {file = "cryptography-41.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3288acccef021e3c3c10d58933f44e8602cf04dba96d9796d70d537bb2f4bbc4"}, + {file = "cryptography-41.0.6.tar.gz", hash = "sha256:422e3e31d63743855e43e5a6fcc8b4acab860f560f9321b0ee6269cc7ed70cc3"}, ] [package.dependencies] From cb4f49f55005beeb5b2501520ec3aca178886be4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:16:40 +0000 Subject: [PATCH 186/262] Bump tox from 4.11.3 to 4.11.4 (#1534) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 52024d8e3c..d7fba89251 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4012,13 +4012,13 @@ files = [ [[package]] name = "tox" -version = "4.11.3" +version = "4.11.4" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.11.3-py3-none-any.whl", hash = "sha256:599af5e5bb0cad0148ac1558a0b66f8fff219ef88363483b8d92a81e4246f28f"}, - {file = "tox-4.11.3.tar.gz", hash = "sha256:5039f68276461fae6a9452a3b2c7295798f00a0e92edcd9a3b78ba1a73577951"}, + {file = "tox-4.11.4-py3-none-any.whl", hash = "sha256:2adb83d68f27116812b69aa36676a8d6a52249cb0d173649de0e7d0c2e3e7229"}, + {file = "tox-4.11.4.tar.gz", hash = "sha256:73a7240778fabf305aeb05ab8ea26e575e042ab5a18d71d0ed13e343a51d6ce1"}, ] [package.dependencies] From 18df5d5a7b06afd23a31273250cc90be11b092dc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 13:17:08 +0000 Subject: [PATCH 187/262] Bump types-psycopg2 from 2.9.21.17 to 2.9.21.18 (#1533) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d7fba89251..14cee444e8 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4153,13 +4153,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.17" +version = "2.9.21.18" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.17.tar.gz", hash = "sha256:dc84bef202eae39e59961c1174f4d3845f06f62d2704733b1be710c3be0c3b29"}, - {file = "types_psycopg2-2.9.21.17-py3-none-any.whl", hash = "sha256:635a5ee8fd11ec6a056494ca6533ab66475d967df9a4f8d4f8209887ba61ad2c"}, + {file = "types-psycopg2-2.9.21.18.tar.gz", hash = "sha256:5082c61e8e400ac9eda06ec2e0f11a9885c575339bc1bf9b61754021fb259de7"}, + {file = "types_psycopg2-2.9.21.18-py3-none-any.whl", hash = "sha256:66b8a882a878003f963a3a004dff328d7d34bfe1802c8bc625d24db79206130b"}, ] [[package]] From a99a74f93b096288a272135a9bb5b778f78cf9d1 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 29 Nov 2023 10:04:35 -0400 Subject: [PATCH 188/262] Remove old columns from collection table (#1498) Clean up leftover columns from #1494, now that it has gone into a release. --- ...21f500_remove_collection_unused_columns.py | 52 +++++++++++++++++++ core/model/collection.py | 35 ------------- 2 files changed, 52 insertions(+), 35 deletions(-) create mode 100644 alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py diff --git a/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py b/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py new file mode 100644 index 0000000000..385d613487 --- /dev/null +++ b/alembic/versions/20231103_382d7921f500_remove_collection_unused_columns.py @@ -0,0 +1,52 @@ +"""Remove collection unused columns. + +Revision ID: 382d7921f500 +Revises: e4b120a8d1d5 +Create Date: 2023-11-03 00:09:10.761425+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "382d7921f500" +down_revision = "e4b120a8d1d5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.drop_table("collections_libraries") + op.drop_column("collections", "external_integration_id") + op.drop_column("collections", "name") + op.drop_column("collections", "external_account_id") + + +def downgrade() -> None: + op.add_column( + "collections", + sa.Column( + "external_account_id", sa.VARCHAR(), autoincrement=False, nullable=True + ), + ) + op.add_column( + "collections", + sa.Column("name", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.add_column( + "collections", + sa.Column( + "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True + ), + ) + op.create_table( + "collections_libraries", + sa.Column("collection_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.UniqueConstraint( + "collection_id", + "library_id", + name="collections_libraries_collection_id_library_id_key", + ), + ) diff --git a/core/model/collection.py b/core/model/collection.py index 5c6c79feed..e0efae6699 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -8,7 +8,6 @@ ForeignKey, Integer, Table, - Unicode, UniqueConstraint, exists, select, @@ -48,26 +47,8 @@ class Collection(Base, HasSessionCache): __tablename__ = "collections" id = Column(Integer, primary_key=True, nullable=False) - # TODO: This should no longer be used. And will be removed in the next release. - # Collections store their configurations in integration configurations now. - # This is only left here in case there needs to be a rollback to the current - # release. - _name_deprecated = Column("name", Unicode) - DATA_SOURCE_NAME_SETTING = "data_source" - # TODO: This should no longer be used. And will be removed in the next release. - # Collections store their configurations in integration configurations now. - # This is only left here in case there needs to be a rollback to the current - # release. - _external_account_id_deprecated = Column("external_account_id", Unicode) - - # TODO: This should no longer be used. And will be removed in the next release. - # Collections store their configurations in integration configurations now. - # This is only left here in case there needs to be a rollback to the current - # release. - _external_integration_id_deprecated = Column("external_integration_id", Integer) - # How do we connect to the provider of this collection? Any url, # authentication information, or additional configuration goes # into the external integration, as does the 'protocol', which @@ -624,22 +605,6 @@ def delete(self, search_index: ExternalSearchIndex | None = None) -> None: _db.commit() -# TODO: This should no longer be used. And will be removed in the next release. -# Collections store their configurations in integration configurations now. -# This is only left here in case there needs to be a rollback to the current -# release. -_collections_libraries_deprecated: Table = Table( - "collections_libraries", - Base.metadata, - Column( - "collection_id", - Integer, - ), - Column("library_id", Integer), - UniqueConstraint("collection_id", "library_id"), -) - - collections_identifiers: Table = Table( "collections_identifiers", Base.metadata, From c34943b7e887ed5a2a07746980718eaf6cf5f93e Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Wed, 29 Nov 2023 11:44:38 -0500 Subject: [PATCH 189/262] Count all eligible licenses (PP-527) (#1535) * Active loans are already accounted for. Because `License.checkouts_available` already accounts for active loans, we were effectively reducing the number of concurrent loans available. --- core/model/licensing.py | 24 ++++++++++-------------- tests/core/models/test_licensing.py | 14 +++++++------- 2 files changed, 17 insertions(+), 21 deletions(-) diff --git a/core/model/licensing.py b/core/model/licensing.py index 31b2f8fb73..1e463dda32 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -146,6 +146,15 @@ class License(Base, LicenseFunctions): __table_args__ = (UniqueConstraint("identifier", "license_pool_id"),) + @property + def is_available_for_borrowing(self) -> bool: + """Can this license currently be used to borrow a book?""" + return ( + not self.is_inactive + and self.checkouts_available is not None + and self.checkouts_available > 0 + ) + def loan_to(self, patron: Patron, **kwargs) -> Tuple[Loan, bool]: loan, is_new = self.license_pool.loan_to(patron, **kwargs) loan.license = self @@ -1081,21 +1090,8 @@ def best_available_license(self) -> License | None: offer that model. """ best: Optional[License] = None - now = utc_now() - - for license in self.licenses: - if license.is_inactive: - continue - - active_loan_count = len( - [l for l in license.loans if not l.end or l.end > now] - ) - checkouts_available = ( - license.checkouts_available if license.checkouts_available else 0 - ) - if active_loan_count >= checkouts_available: - continue + for license in (l for l in self.licenses if l.is_available_for_borrowing): if ( not best or (license.is_time_limited and not best.is_time_limited) diff --git a/tests/core/models/test_licensing.py b/tests/core/models/test_licensing.py index be80c6d4d7..c152bbdc14 100644 --- a/tests/core/models/test_licensing.py +++ b/tests/core/models/test_licensing.py @@ -481,32 +481,32 @@ def test_best_available_license(self, licenses: TestLicenseFixture): # First, we use the time-limited license that's expiring first. assert time_limited_2 == licenses.pool.best_available_license() - time_limited_2.loan_to(licenses.db.patron()) + time_limited_2.checkout() # When that's not available, we use the next time-limited license. assert licenses.time_limited == licenses.pool.best_available_license() - licenses.time_limited.loan_to(licenses.db.patron()) + licenses.time_limited.checkout() # The time-and-loan-limited license also counts as time-limited for this. assert licenses.time_and_loan_limited == licenses.pool.best_available_license() - licenses.time_and_loan_limited.loan_to(licenses.db.patron()) + licenses.time_and_loan_limited.checkout() # Next is the perpetual license. assert licenses.perpetual == licenses.pool.best_available_license() - licenses.perpetual.loan_to(licenses.db.patron()) + licenses.perpetual.checkout() # Then the loan-limited license with the most remaining checkouts. assert licenses.loan_limited == licenses.pool.best_available_license() - licenses.loan_limited.loan_to(licenses.db.patron()) + licenses.loan_limited.checkout() # That license allows 2 concurrent checkouts, so it's still the # best license until it's checked out again. assert licenses.loan_limited == licenses.pool.best_available_license() - licenses.loan_limited.loan_to(licenses.db.patron()) + licenses.loan_limited.checkout() # There's one more loan-limited license. assert loan_limited_2 == licenses.pool.best_available_license() - loan_limited_2.loan_to(licenses.db.patron()) + loan_limited_2.checkout() # Now all licenses are either loaned out or expired. assert None == licenses.pool.best_available_license() From fb336c2b920c37a5d5262d9db49d0fe86c1508d1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 18:00:56 +0000 Subject: [PATCH 190/262] Bump bcrypt from 4.1.0 to 4.1.1 (#1537) --- poetry.lock | 34 +++++++++++++++++----------------- 1 file changed, 17 insertions(+), 17 deletions(-) diff --git a/poetry.lock b/poetry.lock index 14cee444e8..3cb47f28f5 100644 --- a/poetry.lock +++ b/poetry.lock @@ -74,27 +74,27 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "bcrypt" -version = "4.1.0" +version = "4.1.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.0-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:a601e52d0318142d1de84ab213ae062a10168c1acd721a2125bcf97d8646809b"}, - {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e8e085b188827087bcda882a2ca14843164cde43d83aca02a67b94ed68b8d1f"}, - {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e86108bd26137c5acb86fdf5696a30433c7e9e6a81e3aef6c3746cb9ac535a8"}, - {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d9151b2098bf5598954a5d731c66c4e84321d3f4b9f167d4b73084df6d3958d"}, - {file = "bcrypt-4.1.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:30be7a77166a97f85ec2a94100e9841ea97c38ca5a93335111fe96cd485ba250"}, - {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b5dea889347e80dbd86442b989128e12812b181a40ae4db496388ad36a8fe2b7"}, - {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:544a13c2555027d1042d249ba0e3a302cba105224420f06d20e61179207a7e02"}, - {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:adb15ff096c9cfdb1b152a5c032f1d4f7390eabd98fd27b0d789c536ef9e7b40"}, - {file = "bcrypt-4.1.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81458aac2577582e22d9d2682a457992827472ba5010e9e77431317dfe804c98"}, - {file = "bcrypt-4.1.0-cp37-abi3-win32.whl", hash = "sha256:69740306830e26479a15e3686027aae67b2250e2a973b3f303bcabc1eb224f77"}, - {file = "bcrypt-4.1.0-cp37-abi3-win_amd64.whl", hash = "sha256:15b9865d3fb52d30c8301f13ab074006dbacc219539470f93c13fd720cdc1558"}, - {file = "bcrypt-4.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4d9330d165e0079a32e998d0e1dfab00ada7062070b759ab793c3331ab7e9d78"}, - {file = "bcrypt-4.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d068ba10dfaf5ea7260cf703109f77350043b7efe58d6d6e4027694e8144989f"}, - {file = "bcrypt-4.1.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e16655bfe3077223d8b8e00c81a6d21b78b9b47a20b57a052e26c8ec4cdc7613"}, - {file = "bcrypt-4.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:4417aa6c0701b8c20136d845198555f61e23d1ee64a8c33a13a9f2d6b6ed531c"}, - {file = "bcrypt-4.1.0.tar.gz", hash = "sha256:8807681e040e89ee3201249a7849342748e643b4bb2749dfc8dd1b34f6baa349"}, + {file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fb931cd004a7ad36a89789caf18a54c20287ec1cd62161265344b9c4554fdb2e"}, + {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:12f40f78dcba4aa7d1354d35acf45fae9488862a4fb695c7eeda5ace6aae273f"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2ade10e8613a3b8446214846d3ddbd56cfe9205a7d64742f0b75458c868f7492"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f33b385c3e80b5a26b3a5e148e6165f873c1c202423570fdf45fe34e00e5f3e5"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:755b9d27abcab678e0b8fb4d0abdebeea1f68dd1183b3f518bad8d31fa77d8be"}, + {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7a7b8a87e51e5e8ca85b9fdaf3a5dc7aaf123365a09be7a27883d54b9a0c403"}, + {file = "bcrypt-4.1.1-cp37-abi3-win32.whl", hash = "sha256:3d6c4e0d6963c52f8142cdea428e875042e7ce8c84812d8e5507bd1e42534e07"}, + {file = "bcrypt-4.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:14d41933510717f98aac63378b7956bbe548986e435df173c841d7f2bd0b2de7"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24c2ebd287b5b11016f31d506ca1052d068c3f9dc817160628504690376ff050"}, + {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:476aa8e8aca554260159d4c7a97d6be529c8e177dbc1d443cb6b471e24e82c74"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12611c4b0a8b1c461646228344784a1089bc0c49975680a2f54f516e71e9b79e"}, + {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6450538a0fc32fb7ce4c6d511448c54c4ff7640b2ed81badf9898dcb9e5b737"}, + {file = "bcrypt-4.1.1.tar.gz", hash = "sha256:df37f5418d4f1cdcff845f60e747a015389fa4e63703c918330865e06ad80007"}, ] [package.extras] From 13fac2cbae8585380b7ed7e199dd10d599fc3c41 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 29 Nov 2023 18:01:35 +0000 Subject: [PATCH 191/262] Bump boto3 from 1.29.1 to 1.33.2 (#1538) --- poetry.lock | 855 ++++++++++++++++++++++++++-------------------------- 1 file changed, 434 insertions(+), 421 deletions(-) diff --git a/poetry.lock b/poetry.lock index 3cb47f28f5..edbbf04906 100644 --- a/poetry.lock +++ b/poetry.lock @@ -132,421 +132,434 @@ files = [ [[package]] name = "boto3" -version = "1.29.1" +version = "1.33.2" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.29.1-py3-none-any.whl", hash = "sha256:192695305fa65012d21f78ee852b91cb56dd571e84d51fb71f756302bf19d23f"}, - {file = "boto3-1.29.1.tar.gz", hash = "sha256:20285ebf4e98b2905a88aeb162b4f77ff908b2e3e31038b3223e593789290aa3"}, + {file = "boto3-1.33.2-py3-none-any.whl", hash = "sha256:fc7c0dd5fa74ae0d57e11747695bdba4ad164e62dee35db15b43762c392fbd92"}, + {file = "boto3-1.33.2.tar.gz", hash = "sha256:70626598dd6698d6da8f2854a1ae5010f175572e2a465b2aa86685c745c1013c"}, ] [package.dependencies] -botocore = ">=1.32.1,<1.33.0" +botocore = ">=1.33.2,<1.34.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.7.0,<0.8.0" +s3transfer = ">=0.8.0,<0.9.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.29.1" -description = "Type annotations for boto3 1.29.1 generated with mypy-boto3-builder 7.20.3" +version = "1.33.2" +description = "Type annotations for boto3 1.33.2 generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.29.1.tar.gz", hash = "sha256:6dfe6af913b1a4a43bc8d62c98bf98ec2f1ad3474fabeacc7d9a521c5bd689da"}, - {file = "boto3_stubs-1.29.1-py3-none-any.whl", hash = "sha256:3a923b7895f06b079f48cf316d69c3b61e02ca67fdc73c07f864ab09c448723d"}, + {file = "boto3-stubs-1.33.2.tar.gz", hash = "sha256:2e05ca8000f1feefe2b2efa3bef82367f97979ca2f47c39d7e036a2a399407bb"}, + {file = "boto3_stubs-1.33.2-py3-none-any.whl", hash = "sha256:53c42cb2336694edbdf1a666636442d6b2dab7e9ac7c4707e35e0074f6638b40"}, ] [package.dependencies] -boto3 = {version = "1.29.1", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.32.1", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.33.2", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.33.2", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" -mypy-boto3-cloudformation = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-dynamodb = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-ec2 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-lambda = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-logs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"logs\""} -mypy-boto3-rds = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-s3 = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} -mypy-boto3-sqs = {version = ">=1.29.0,<1.30.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-cloudformation = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-logs = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"logs\""} +mypy-boto3-rds = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)"] -account = ["mypy-boto3-account (>=1.29.0,<1.30.0)"] -acm = ["mypy-boto3-acm (>=1.29.0,<1.30.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.29.0,<1.30.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.29.0,<1.30.0)", "mypy-boto3-account (>=1.29.0,<1.30.0)", "mypy-boto3-acm (>=1.29.0,<1.30.0)", "mypy-boto3-acm-pca (>=1.29.0,<1.30.0)", "mypy-boto3-alexaforbusiness (>=1.29.0,<1.30.0)", "mypy-boto3-amp (>=1.29.0,<1.30.0)", "mypy-boto3-amplify (>=1.29.0,<1.30.0)", "mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)", "mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)", "mypy-boto3-apigateway (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)", "mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)", "mypy-boto3-appconfig (>=1.29.0,<1.30.0)", "mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)", "mypy-boto3-appfabric (>=1.29.0,<1.30.0)", "mypy-boto3-appflow (>=1.29.0,<1.30.0)", "mypy-boto3-appintegrations (>=1.29.0,<1.30.0)", "mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-application-insights (>=1.29.0,<1.30.0)", "mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-appmesh (>=1.29.0,<1.30.0)", "mypy-boto3-apprunner (>=1.29.0,<1.30.0)", "mypy-boto3-appstream (>=1.29.0,<1.30.0)", "mypy-boto3-appsync (>=1.29.0,<1.30.0)", "mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)", "mypy-boto3-athena (>=1.29.0,<1.30.0)", "mypy-boto3-auditmanager (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling (>=1.29.0,<1.30.0)", "mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)", "mypy-boto3-backup (>=1.29.0,<1.30.0)", "mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)", "mypy-boto3-backupstorage (>=1.29.0,<1.30.0)", "mypy-boto3-batch (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock (>=1.29.0,<1.30.0)", "mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-billingconductor (>=1.29.0,<1.30.0)", "mypy-boto3-braket (>=1.29.0,<1.30.0)", "mypy-boto3-budgets (>=1.29.0,<1.30.0)", "mypy-boto3-ce (>=1.29.0,<1.30.0)", "mypy-boto3-chime (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)", "mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)", "mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)", "mypy-boto3-cloud9 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)", "mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)", "mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-cloudfront (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)", "mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)", "mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)", "mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)", "mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)", "mypy-boto3-codeartifact (>=1.29.0,<1.30.0)", "mypy-boto3-codebuild (>=1.29.0,<1.30.0)", "mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)", "mypy-boto3-codecommit (>=1.29.0,<1.30.0)", "mypy-boto3-codedeploy (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)", "mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)", "mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)", "mypy-boto3-codepipeline (>=1.29.0,<1.30.0)", "mypy-boto3-codestar (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)", "mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)", "mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)", "mypy-boto3-comprehend (>=1.29.0,<1.30.0)", "mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)", "mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)", "mypy-boto3-config (>=1.29.0,<1.30.0)", "mypy-boto3-connect (>=1.29.0,<1.30.0)", "mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)", "mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)", "mypy-boto3-connectcases (>=1.29.0,<1.30.0)", "mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)", "mypy-boto3-controltower (>=1.29.0,<1.30.0)", "mypy-boto3-cur (>=1.29.0,<1.30.0)", "mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)", "mypy-boto3-databrew (>=1.29.0,<1.30.0)", "mypy-boto3-dataexchange (>=1.29.0,<1.30.0)", "mypy-boto3-datapipeline (>=1.29.0,<1.30.0)", "mypy-boto3-datasync (>=1.29.0,<1.30.0)", "mypy-boto3-datazone (>=1.29.0,<1.30.0)", "mypy-boto3-dax (>=1.29.0,<1.30.0)", "mypy-boto3-detective (>=1.29.0,<1.30.0)", "mypy-boto3-devicefarm (>=1.29.0,<1.30.0)", "mypy-boto3-devops-guru (>=1.29.0,<1.30.0)", "mypy-boto3-directconnect (>=1.29.0,<1.30.0)", "mypy-boto3-discovery (>=1.29.0,<1.30.0)", "mypy-boto3-dlm (>=1.29.0,<1.30.0)", "mypy-boto3-dms (>=1.29.0,<1.30.0)", "mypy-boto3-docdb (>=1.29.0,<1.30.0)", "mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)", "mypy-boto3-drs (>=1.29.0,<1.30.0)", "mypy-boto3-ds (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)", "mypy-boto3-ebs (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)", "mypy-boto3-ecr (>=1.29.0,<1.30.0)", "mypy-boto3-ecr-public (>=1.29.0,<1.30.0)", "mypy-boto3-ecs (>=1.29.0,<1.30.0)", "mypy-boto3-efs (>=1.29.0,<1.30.0)", "mypy-boto3-eks (>=1.29.0,<1.30.0)", "mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)", "mypy-boto3-elasticache (>=1.29.0,<1.30.0)", "mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)", "mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)", "mypy-boto3-elb (>=1.29.0,<1.30.0)", "mypy-boto3-elbv2 (>=1.29.0,<1.30.0)", "mypy-boto3-emr (>=1.29.0,<1.30.0)", "mypy-boto3-emr-containers (>=1.29.0,<1.30.0)", "mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-entityresolution (>=1.29.0,<1.30.0)", "mypy-boto3-es (>=1.29.0,<1.30.0)", "mypy-boto3-events (>=1.29.0,<1.30.0)", "mypy-boto3-evidently (>=1.29.0,<1.30.0)", "mypy-boto3-finspace (>=1.29.0,<1.30.0)", "mypy-boto3-finspace-data (>=1.29.0,<1.30.0)", "mypy-boto3-firehose (>=1.29.0,<1.30.0)", "mypy-boto3-fis (>=1.29.0,<1.30.0)", "mypy-boto3-fms (>=1.29.0,<1.30.0)", "mypy-boto3-forecast (>=1.29.0,<1.30.0)", "mypy-boto3-forecastquery (>=1.29.0,<1.30.0)", "mypy-boto3-frauddetector (>=1.29.0,<1.30.0)", "mypy-boto3-fsx (>=1.29.0,<1.30.0)", "mypy-boto3-gamelift (>=1.29.0,<1.30.0)", "mypy-boto3-glacier (>=1.29.0,<1.30.0)", "mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)", "mypy-boto3-glue (>=1.29.0,<1.30.0)", "mypy-boto3-grafana (>=1.29.0,<1.30.0)", "mypy-boto3-greengrass (>=1.29.0,<1.30.0)", "mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)", "mypy-boto3-groundstation (>=1.29.0,<1.30.0)", "mypy-boto3-guardduty (>=1.29.0,<1.30.0)", "mypy-boto3-health (>=1.29.0,<1.30.0)", "mypy-boto3-healthlake (>=1.29.0,<1.30.0)", "mypy-boto3-honeycode (>=1.29.0,<1.30.0)", "mypy-boto3-iam (>=1.29.0,<1.30.0)", "mypy-boto3-identitystore (>=1.29.0,<1.30.0)", "mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)", "mypy-boto3-importexport (>=1.29.0,<1.30.0)", "mypy-boto3-inspector (>=1.29.0,<1.30.0)", "mypy-boto3-inspector2 (>=1.29.0,<1.30.0)", "mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)", "mypy-boto3-iot (>=1.29.0,<1.30.0)", "mypy-boto3-iot-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)", "mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)", "mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)", "mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents (>=1.29.0,<1.30.0)", "mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)", "mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)", "mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)", "mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)", "mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)", "mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)", "mypy-boto3-iotwireless (>=1.29.0,<1.30.0)", "mypy-boto3-ivs (>=1.29.0,<1.30.0)", "mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)", "mypy-boto3-ivschat (>=1.29.0,<1.30.0)", "mypy-boto3-kafka (>=1.29.0,<1.30.0)", "mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-kendra (>=1.29.0,<1.30.0)", "mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)", "mypy-boto3-keyspaces (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)", "mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)", "mypy-boto3-kms (>=1.29.0,<1.30.0)", "mypy-boto3-lakeformation (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)", "mypy-boto3-lex-models (>=1.29.0,<1.30.0)", "mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)", "mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)", "mypy-boto3-lightsail (>=1.29.0,<1.30.0)", "mypy-boto3-location (>=1.29.0,<1.30.0)", "mypy-boto3-logs (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)", "mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)", "mypy-boto3-m2 (>=1.29.0,<1.30.0)", "mypy-boto3-machinelearning (>=1.29.0,<1.30.0)", "mypy-boto3-macie (>=1.29.0,<1.30.0)", "mypy-boto3-macie2 (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)", "mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)", "mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)", "mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)", "mypy-boto3-medialive (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)", "mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore (>=1.29.0,<1.30.0)", "mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)", "mypy-boto3-mediatailor (>=1.29.0,<1.30.0)", "mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)", "mypy-boto3-memorydb (>=1.29.0,<1.30.0)", "mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)", "mypy-boto3-mgh (>=1.29.0,<1.30.0)", "mypy-boto3-mgn (>=1.29.0,<1.30.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)", "mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)", "mypy-boto3-mobile (>=1.29.0,<1.30.0)", "mypy-boto3-mq (>=1.29.0,<1.30.0)", "mypy-boto3-mturk (>=1.29.0,<1.30.0)", "mypy-boto3-mwaa (>=1.29.0,<1.30.0)", "mypy-boto3-neptune (>=1.29.0,<1.30.0)", "mypy-boto3-neptunedata (>=1.29.0,<1.30.0)", "mypy-boto3-network-firewall (>=1.29.0,<1.30.0)", "mypy-boto3-networkmanager (>=1.29.0,<1.30.0)", "mypy-boto3-nimble (>=1.29.0,<1.30.0)", "mypy-boto3-oam (>=1.29.0,<1.30.0)", "mypy-boto3-omics (>=1.29.0,<1.30.0)", "mypy-boto3-opensearch (>=1.29.0,<1.30.0)", "mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)", "mypy-boto3-opsworks (>=1.29.0,<1.30.0)", "mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)", "mypy-boto3-organizations (>=1.29.0,<1.30.0)", "mypy-boto3-osis (>=1.29.0,<1.30.0)", "mypy-boto3-outposts (>=1.29.0,<1.30.0)", "mypy-boto3-panorama (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)", "mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)", "mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)", "mypy-boto3-personalize (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-events (>=1.29.0,<1.30.0)", "mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-pi (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)", "mypy-boto3-pipes (>=1.29.0,<1.30.0)", "mypy-boto3-polly (>=1.29.0,<1.30.0)", "mypy-boto3-pricing (>=1.29.0,<1.30.0)", "mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)", "mypy-boto3-proton (>=1.29.0,<1.30.0)", "mypy-boto3-qldb (>=1.29.0,<1.30.0)", "mypy-boto3-qldb-session (>=1.29.0,<1.30.0)", "mypy-boto3-quicksight (>=1.29.0,<1.30.0)", "mypy-boto3-ram (>=1.29.0,<1.30.0)", "mypy-boto3-rbin (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-rds-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-data (>=1.29.0,<1.30.0)", "mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)", "mypy-boto3-rekognition (>=1.29.0,<1.30.0)", "mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)", "mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)", "mypy-boto3-resource-groups (>=1.29.0,<1.30.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)", "mypy-boto3-robomaker (>=1.29.0,<1.30.0)", "mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)", "mypy-boto3-route53 (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)", "mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)", "mypy-boto3-route53domains (>=1.29.0,<1.30.0)", "mypy-boto3-route53resolver (>=1.29.0,<1.30.0)", "mypy-boto3-rum (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-s3control (>=1.29.0,<1.30.0)", "mypy-boto3-s3outposts (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)", "mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)", "mypy-boto3-savingsplans (>=1.29.0,<1.30.0)", "mypy-boto3-scheduler (>=1.29.0,<1.30.0)", "mypy-boto3-schemas (>=1.29.0,<1.30.0)", "mypy-boto3-sdb (>=1.29.0,<1.30.0)", "mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)", "mypy-boto3-securityhub (>=1.29.0,<1.30.0)", "mypy-boto3-securitylake (>=1.29.0,<1.30.0)", "mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)", "mypy-boto3-service-quotas (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)", "mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)", "mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)", "mypy-boto3-ses (>=1.29.0,<1.30.0)", "mypy-boto3-sesv2 (>=1.29.0,<1.30.0)", "mypy-boto3-shield (>=1.29.0,<1.30.0)", "mypy-boto3-signer (>=1.29.0,<1.30.0)", "mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)", "mypy-boto3-sms (>=1.29.0,<1.30.0)", "mypy-boto3-sms-voice (>=1.29.0,<1.30.0)", "mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)", "mypy-boto3-snowball (>=1.29.0,<1.30.0)", "mypy-boto3-sns (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)", "mypy-boto3-ssm (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)", "mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)", "mypy-boto3-sso (>=1.29.0,<1.30.0)", "mypy-boto3-sso-admin (>=1.29.0,<1.30.0)", "mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)", "mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)", "mypy-boto3-storagegateway (>=1.29.0,<1.30.0)", "mypy-boto3-sts (>=1.29.0,<1.30.0)", "mypy-boto3-support (>=1.29.0,<1.30.0)", "mypy-boto3-support-app (>=1.29.0,<1.30.0)", "mypy-boto3-swf (>=1.29.0,<1.30.0)", "mypy-boto3-synthetics (>=1.29.0,<1.30.0)", "mypy-boto3-textract (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-query (>=1.29.0,<1.30.0)", "mypy-boto3-timestream-write (>=1.29.0,<1.30.0)", "mypy-boto3-tnb (>=1.29.0,<1.30.0)", "mypy-boto3-transcribe (>=1.29.0,<1.30.0)", "mypy-boto3-transfer (>=1.29.0,<1.30.0)", "mypy-boto3-translate (>=1.29.0,<1.30.0)", "mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)", "mypy-boto3-voice-id (>=1.29.0,<1.30.0)", "mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)", "mypy-boto3-waf (>=1.29.0,<1.30.0)", "mypy-boto3-waf-regional (>=1.29.0,<1.30.0)", "mypy-boto3-wafv2 (>=1.29.0,<1.30.0)", "mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)", "mypy-boto3-wisdom (>=1.29.0,<1.30.0)", "mypy-boto3-workdocs (>=1.29.0,<1.30.0)", "mypy-boto3-worklink (>=1.29.0,<1.30.0)", "mypy-boto3-workmail (>=1.29.0,<1.30.0)", "mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces (>=1.29.0,<1.30.0)", "mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)", "mypy-boto3-xray (>=1.29.0,<1.30.0)"] -amp = ["mypy-boto3-amp (>=1.29.0,<1.30.0)"] -amplify = ["mypy-boto3-amplify (>=1.29.0,<1.30.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.29.0,<1.30.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.29.0,<1.30.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.29.0,<1.30.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.29.0,<1.30.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.29.0,<1.30.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.29.0,<1.30.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.29.0,<1.30.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.29.0,<1.30.0)"] -appflow = ["mypy-boto3-appflow (>=1.29.0,<1.30.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.29.0,<1.30.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.29.0,<1.30.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.29.0,<1.30.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.29.0,<1.30.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.29.0,<1.30.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.29.0,<1.30.0)"] -appstream = ["mypy-boto3-appstream (>=1.29.0,<1.30.0)"] -appsync = ["mypy-boto3-appsync (>=1.29.0,<1.30.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.29.0,<1.30.0)"] -athena = ["mypy-boto3-athena (>=1.29.0,<1.30.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.29.0,<1.30.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.29.0,<1.30.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.29.0,<1.30.0)"] -backup = ["mypy-boto3-backup (>=1.29.0,<1.30.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.29.0,<1.30.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.29.0,<1.30.0)"] -batch = ["mypy-boto3-batch (>=1.29.0,<1.30.0)"] -bedrock = ["mypy-boto3-bedrock (>=1.29.0,<1.30.0)"] -bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.29.0,<1.30.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.29.0,<1.30.0)"] -boto3 = ["boto3 (==1.29.1)", "botocore (==1.32.1)"] -braket = ["mypy-boto3-braket (>=1.29.0,<1.30.0)"] -budgets = ["mypy-boto3-budgets (>=1.29.0,<1.30.0)"] -ce = ["mypy-boto3-ce (>=1.29.0,<1.30.0)"] -chime = ["mypy-boto3-chime (>=1.29.0,<1.30.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.29.0,<1.30.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.29.0,<1.30.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.29.0,<1.30.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.29.0,<1.30.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.29.0,<1.30.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.29.0,<1.30.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.29.0,<1.30.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.29.0,<1.30.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.29.0,<1.30.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.29.0,<1.30.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.29.0,<1.30.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.29.0,<1.30.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.29.0,<1.30.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.29.0,<1.30.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.29.0,<1.30.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.29.0,<1.30.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.29.0,<1.30.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.29.0,<1.30.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.29.0,<1.30.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.29.0,<1.30.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.29.0,<1.30.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.29.0,<1.30.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.29.0,<1.30.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.29.0,<1.30.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.29.0,<1.30.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.29.0,<1.30.0)"] -codestar = ["mypy-boto3-codestar (>=1.29.0,<1.30.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.29.0,<1.30.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.29.0,<1.30.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.29.0,<1.30.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.29.0,<1.30.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.29.0,<1.30.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.29.0,<1.30.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.29.0,<1.30.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.29.0,<1.30.0)"] -config = ["mypy-boto3-config (>=1.29.0,<1.30.0)"] -connect = ["mypy-boto3-connect (>=1.29.0,<1.30.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.29.0,<1.30.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.29.0,<1.30.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.29.0,<1.30.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.29.0,<1.30.0)"] -controltower = ["mypy-boto3-controltower (>=1.29.0,<1.30.0)"] -cur = ["mypy-boto3-cur (>=1.29.0,<1.30.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.29.0,<1.30.0)"] -databrew = ["mypy-boto3-databrew (>=1.29.0,<1.30.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.29.0,<1.30.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.29.0,<1.30.0)"] -datasync = ["mypy-boto3-datasync (>=1.29.0,<1.30.0)"] -datazone = ["mypy-boto3-datazone (>=1.29.0,<1.30.0)"] -dax = ["mypy-boto3-dax (>=1.29.0,<1.30.0)"] -detective = ["mypy-boto3-detective (>=1.29.0,<1.30.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.29.0,<1.30.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.29.0,<1.30.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.29.0,<1.30.0)"] -discovery = ["mypy-boto3-discovery (>=1.29.0,<1.30.0)"] -dlm = ["mypy-boto3-dlm (>=1.29.0,<1.30.0)"] -dms = ["mypy-boto3-dms (>=1.29.0,<1.30.0)"] -docdb = ["mypy-boto3-docdb (>=1.29.0,<1.30.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.29.0,<1.30.0)"] -drs = ["mypy-boto3-drs (>=1.29.0,<1.30.0)"] -ds = ["mypy-boto3-ds (>=1.29.0,<1.30.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.29.0,<1.30.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.29.0,<1.30.0)"] -ebs = ["mypy-boto3-ebs (>=1.29.0,<1.30.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.29.0,<1.30.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.29.0,<1.30.0)"] -ecr = ["mypy-boto3-ecr (>=1.29.0,<1.30.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.29.0,<1.30.0)"] -ecs = ["mypy-boto3-ecs (>=1.29.0,<1.30.0)"] -efs = ["mypy-boto3-efs (>=1.29.0,<1.30.0)"] -eks = ["mypy-boto3-eks (>=1.29.0,<1.30.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.29.0,<1.30.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.29.0,<1.30.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.29.0,<1.30.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.29.0,<1.30.0)"] -elb = ["mypy-boto3-elb (>=1.29.0,<1.30.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.29.0,<1.30.0)"] -emr = ["mypy-boto3-emr (>=1.29.0,<1.30.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.29.0,<1.30.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.29.0,<1.30.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.29.0,<1.30.0)"] -es = ["mypy-boto3-es (>=1.29.0,<1.30.0)"] -essential = ["mypy-boto3-cloudformation (>=1.29.0,<1.30.0)", "mypy-boto3-dynamodb (>=1.29.0,<1.30.0)", "mypy-boto3-ec2 (>=1.29.0,<1.30.0)", "mypy-boto3-lambda (>=1.29.0,<1.30.0)", "mypy-boto3-rds (>=1.29.0,<1.30.0)", "mypy-boto3-s3 (>=1.29.0,<1.30.0)", "mypy-boto3-sqs (>=1.29.0,<1.30.0)"] -events = ["mypy-boto3-events (>=1.29.0,<1.30.0)"] -evidently = ["mypy-boto3-evidently (>=1.29.0,<1.30.0)"] -finspace = ["mypy-boto3-finspace (>=1.29.0,<1.30.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.29.0,<1.30.0)"] -firehose = ["mypy-boto3-firehose (>=1.29.0,<1.30.0)"] -fis = ["mypy-boto3-fis (>=1.29.0,<1.30.0)"] -fms = ["mypy-boto3-fms (>=1.29.0,<1.30.0)"] -forecast = ["mypy-boto3-forecast (>=1.29.0,<1.30.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.29.0,<1.30.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.29.0,<1.30.0)"] -fsx = ["mypy-boto3-fsx (>=1.29.0,<1.30.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.29.0,<1.30.0)"] -glacier = ["mypy-boto3-glacier (>=1.29.0,<1.30.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.29.0,<1.30.0)"] -glue = ["mypy-boto3-glue (>=1.29.0,<1.30.0)"] -grafana = ["mypy-boto3-grafana (>=1.29.0,<1.30.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.29.0,<1.30.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.29.0,<1.30.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.29.0,<1.30.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.29.0,<1.30.0)"] -health = ["mypy-boto3-health (>=1.29.0,<1.30.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.29.0,<1.30.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.29.0,<1.30.0)"] -iam = ["mypy-boto3-iam (>=1.29.0,<1.30.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.29.0,<1.30.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.29.0,<1.30.0)"] -importexport = ["mypy-boto3-importexport (>=1.29.0,<1.30.0)"] -inspector = ["mypy-boto3-inspector (>=1.29.0,<1.30.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.29.0,<1.30.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.29.0,<1.30.0)"] -iot = ["mypy-boto3-iot (>=1.29.0,<1.30.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.29.0,<1.30.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.29.0,<1.30.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.29.0,<1.30.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.29.0,<1.30.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.29.0,<1.30.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.29.0,<1.30.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.29.0,<1.30.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.29.0,<1.30.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.29.0,<1.30.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.29.0,<1.30.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.29.0,<1.30.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.29.0,<1.30.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.29.0,<1.30.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.29.0,<1.30.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.29.0,<1.30.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.29.0,<1.30.0)"] -ivs = ["mypy-boto3-ivs (>=1.29.0,<1.30.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.29.0,<1.30.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.29.0,<1.30.0)"] -kafka = ["mypy-boto3-kafka (>=1.29.0,<1.30.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.29.0,<1.30.0)"] -kendra = ["mypy-boto3-kendra (>=1.29.0,<1.30.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.29.0,<1.30.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.29.0,<1.30.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.29.0,<1.30.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.29.0,<1.30.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.29.0,<1.30.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.29.0,<1.30.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.29.0,<1.30.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.29.0,<1.30.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.29.0,<1.30.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.29.0,<1.30.0)"] -kms = ["mypy-boto3-kms (>=1.29.0,<1.30.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.29.0,<1.30.0)"] -lambda = ["mypy-boto3-lambda (>=1.29.0,<1.30.0)"] -launch-wizard = ["mypy-boto3-launch-wizard (>=1.29.0,<1.30.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.29.0,<1.30.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.29.0,<1.30.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.29.0,<1.30.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.29.0,<1.30.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.29.0,<1.30.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.29.0,<1.30.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.29.0,<1.30.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.29.0,<1.30.0)"] -location = ["mypy-boto3-location (>=1.29.0,<1.30.0)"] -logs = ["mypy-boto3-logs (>=1.29.0,<1.30.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.29.0,<1.30.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.29.0,<1.30.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.29.0,<1.30.0)"] -m2 = ["mypy-boto3-m2 (>=1.29.0,<1.30.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.29.0,<1.30.0)"] -macie = ["mypy-boto3-macie (>=1.29.0,<1.30.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.29.0,<1.30.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.29.0,<1.30.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.29.0,<1.30.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.29.0,<1.30.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.29.0,<1.30.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.29.0,<1.30.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.29.0,<1.30.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.29.0,<1.30.0)"] -medialive = ["mypy-boto3-medialive (>=1.29.0,<1.30.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.29.0,<1.30.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.29.0,<1.30.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.29.0,<1.30.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.29.0,<1.30.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.29.0,<1.30.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.29.0,<1.30.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.29.0,<1.30.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.29.0,<1.30.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.29.0,<1.30.0)"] -mgh = ["mypy-boto3-mgh (>=1.29.0,<1.30.0)"] -mgn = ["mypy-boto3-mgn (>=1.29.0,<1.30.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.29.0,<1.30.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.29.0,<1.30.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.29.0,<1.30.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.29.0,<1.30.0)"] -mobile = ["mypy-boto3-mobile (>=1.29.0,<1.30.0)"] -mq = ["mypy-boto3-mq (>=1.29.0,<1.30.0)"] -mturk = ["mypy-boto3-mturk (>=1.29.0,<1.30.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.29.0,<1.30.0)"] -neptune = ["mypy-boto3-neptune (>=1.29.0,<1.30.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.29.0,<1.30.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.29.0,<1.30.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.29.0,<1.30.0)"] -nimble = ["mypy-boto3-nimble (>=1.29.0,<1.30.0)"] -oam = ["mypy-boto3-oam (>=1.29.0,<1.30.0)"] -omics = ["mypy-boto3-omics (>=1.29.0,<1.30.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.29.0,<1.30.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.29.0,<1.30.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.29.0,<1.30.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.29.0,<1.30.0)"] -organizations = ["mypy-boto3-organizations (>=1.29.0,<1.30.0)"] -osis = ["mypy-boto3-osis (>=1.29.0,<1.30.0)"] -outposts = ["mypy-boto3-outposts (>=1.29.0,<1.30.0)"] -panorama = ["mypy-boto3-panorama (>=1.29.0,<1.30.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.29.0,<1.30.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.29.0,<1.30.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.29.0,<1.30.0)"] -personalize = ["mypy-boto3-personalize (>=1.29.0,<1.30.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.29.0,<1.30.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.29.0,<1.30.0)"] -pi = ["mypy-boto3-pi (>=1.29.0,<1.30.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.29.0,<1.30.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.29.0,<1.30.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.29.0,<1.30.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.29.0,<1.30.0)"] -pipes = ["mypy-boto3-pipes (>=1.29.0,<1.30.0)"] -polly = ["mypy-boto3-polly (>=1.29.0,<1.30.0)"] -pricing = ["mypy-boto3-pricing (>=1.29.0,<1.30.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.29.0,<1.30.0)"] -proton = ["mypy-boto3-proton (>=1.29.0,<1.30.0)"] -qldb = ["mypy-boto3-qldb (>=1.29.0,<1.30.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.29.0,<1.30.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.29.0,<1.30.0)"] -ram = ["mypy-boto3-ram (>=1.29.0,<1.30.0)"] -rbin = ["mypy-boto3-rbin (>=1.29.0,<1.30.0)"] -rds = ["mypy-boto3-rds (>=1.29.0,<1.30.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.29.0,<1.30.0)"] -redshift = ["mypy-boto3-redshift (>=1.29.0,<1.30.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.29.0,<1.30.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.29.0,<1.30.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.29.0,<1.30.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.29.0,<1.30.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.29.0,<1.30.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.29.0,<1.30.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.29.0,<1.30.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.29.0,<1.30.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.29.0,<1.30.0)"] -route53 = ["mypy-boto3-route53 (>=1.29.0,<1.30.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.29.0,<1.30.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.29.0,<1.30.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.29.0,<1.30.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.29.0,<1.30.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.29.0,<1.30.0)"] -rum = ["mypy-boto3-rum (>=1.29.0,<1.30.0)"] -s3 = ["mypy-boto3-s3 (>=1.29.0,<1.30.0)"] -s3control = ["mypy-boto3-s3control (>=1.29.0,<1.30.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.29.0,<1.30.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.29.0,<1.30.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.29.0,<1.30.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.29.0,<1.30.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.29.0,<1.30.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.29.0,<1.30.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.29.0,<1.30.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.29.0,<1.30.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.29.0,<1.30.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.29.0,<1.30.0)"] -schemas = ["mypy-boto3-schemas (>=1.29.0,<1.30.0)"] -sdb = ["mypy-boto3-sdb (>=1.29.0,<1.30.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.29.0,<1.30.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.29.0,<1.30.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.29.0,<1.30.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.29.0,<1.30.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.29.0,<1.30.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.29.0,<1.30.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.29.0,<1.30.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.29.0,<1.30.0)"] -ses = ["mypy-boto3-ses (>=1.29.0,<1.30.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.29.0,<1.30.0)"] -shield = ["mypy-boto3-shield (>=1.29.0,<1.30.0)"] -signer = ["mypy-boto3-signer (>=1.29.0,<1.30.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.29.0,<1.30.0)"] -sms = ["mypy-boto3-sms (>=1.29.0,<1.30.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.29.0,<1.30.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.29.0,<1.30.0)"] -snowball = ["mypy-boto3-snowball (>=1.29.0,<1.30.0)"] -sns = ["mypy-boto3-sns (>=1.29.0,<1.30.0)"] -sqs = ["mypy-boto3-sqs (>=1.29.0,<1.30.0)"] -ssm = ["mypy-boto3-ssm (>=1.29.0,<1.30.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.29.0,<1.30.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.29.0,<1.30.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.29.0,<1.30.0)"] -sso = ["mypy-boto3-sso (>=1.29.0,<1.30.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.29.0,<1.30.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.29.0,<1.30.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.29.0,<1.30.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.29.0,<1.30.0)"] -sts = ["mypy-boto3-sts (>=1.29.0,<1.30.0)"] -support = ["mypy-boto3-support (>=1.29.0,<1.30.0)"] -support-app = ["mypy-boto3-support-app (>=1.29.0,<1.30.0)"] -swf = ["mypy-boto3-swf (>=1.29.0,<1.30.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.29.0,<1.30.0)"] -textract = ["mypy-boto3-textract (>=1.29.0,<1.30.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.29.0,<1.30.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.29.0,<1.30.0)"] -tnb = ["mypy-boto3-tnb (>=1.29.0,<1.30.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.29.0,<1.30.0)"] -transfer = ["mypy-boto3-transfer (>=1.29.0,<1.30.0)"] -translate = ["mypy-boto3-translate (>=1.29.0,<1.30.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.29.0,<1.30.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.29.0,<1.30.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.29.0,<1.30.0)"] -waf = ["mypy-boto3-waf (>=1.29.0,<1.30.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.29.0,<1.30.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.29.0,<1.30.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.29.0,<1.30.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.29.0,<1.30.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.29.0,<1.30.0)"] -worklink = ["mypy-boto3-worklink (>=1.29.0,<1.30.0)"] -workmail = ["mypy-boto3-workmail (>=1.29.0,<1.30.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.29.0,<1.30.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.29.0,<1.30.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.29.0,<1.30.0)"] -xray = ["mypy-boto3-xray (>=1.29.0,<1.30.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)"] +account = ["mypy-boto3-account (>=1.33.0,<1.34.0)"] +acm = ["mypy-boto3-acm (>=1.33.0,<1.34.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.33.0,<1.34.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)", "mypy-boto3-account (>=1.33.0,<1.34.0)", "mypy-boto3-acm (>=1.33.0,<1.34.0)", "mypy-boto3-acm-pca (>=1.33.0,<1.34.0)", "mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-amp (>=1.33.0,<1.34.0)", "mypy-boto3-amplify (>=1.33.0,<1.34.0)", "mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)", "mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)", "mypy-boto3-apigateway (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)", "mypy-boto3-appconfig (>=1.33.0,<1.34.0)", "mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)", "mypy-boto3-appfabric (>=1.33.0,<1.34.0)", "mypy-boto3-appflow (>=1.33.0,<1.34.0)", "mypy-boto3-appintegrations (>=1.33.0,<1.34.0)", "mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-application-insights (>=1.33.0,<1.34.0)", "mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-appmesh (>=1.33.0,<1.34.0)", "mypy-boto3-apprunner (>=1.33.0,<1.34.0)", "mypy-boto3-appstream (>=1.33.0,<1.34.0)", "mypy-boto3-appsync (>=1.33.0,<1.34.0)", "mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)", "mypy-boto3-athena (>=1.33.0,<1.34.0)", "mypy-boto3-auditmanager (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)", "mypy-boto3-b2bi (>=1.33.0,<1.34.0)", "mypy-boto3-backup (>=1.33.0,<1.34.0)", "mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)", "mypy-boto3-backupstorage (>=1.33.0,<1.34.0)", "mypy-boto3-batch (>=1.33.0,<1.34.0)", "mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-billingconductor (>=1.33.0,<1.34.0)", "mypy-boto3-braket (>=1.33.0,<1.34.0)", "mypy-boto3-budgets (>=1.33.0,<1.34.0)", "mypy-boto3-ce (>=1.33.0,<1.34.0)", "mypy-boto3-chime (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)", "mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)", "mypy-boto3-cloud9 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)", "mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)", "mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)", "mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)", "mypy-boto3-codeartifact (>=1.33.0,<1.34.0)", "mypy-boto3-codebuild (>=1.33.0,<1.34.0)", "mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)", "mypy-boto3-codecommit (>=1.33.0,<1.34.0)", "mypy-boto3-codedeploy (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)", "mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-codepipeline (>=1.33.0,<1.34.0)", "mypy-boto3-codestar (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)", "mypy-boto3-comprehend (>=1.33.0,<1.34.0)", "mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)", "mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)", "mypy-boto3-config (>=1.33.0,<1.34.0)", "mypy-boto3-connect (>=1.33.0,<1.34.0)", "mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)", "mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)", "mypy-boto3-connectcases (>=1.33.0,<1.34.0)", "mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)", "mypy-boto3-controltower (>=1.33.0,<1.34.0)", "mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)", "mypy-boto3-cur (>=1.33.0,<1.34.0)", "mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)", "mypy-boto3-databrew (>=1.33.0,<1.34.0)", "mypy-boto3-dataexchange (>=1.33.0,<1.34.0)", "mypy-boto3-datapipeline (>=1.33.0,<1.34.0)", "mypy-boto3-datasync (>=1.33.0,<1.34.0)", "mypy-boto3-datazone (>=1.33.0,<1.34.0)", "mypy-boto3-dax (>=1.33.0,<1.34.0)", "mypy-boto3-detective (>=1.33.0,<1.34.0)", "mypy-boto3-devicefarm (>=1.33.0,<1.34.0)", "mypy-boto3-devops-guru (>=1.33.0,<1.34.0)", "mypy-boto3-directconnect (>=1.33.0,<1.34.0)", "mypy-boto3-discovery (>=1.33.0,<1.34.0)", "mypy-boto3-dlm (>=1.33.0,<1.34.0)", "mypy-boto3-dms (>=1.33.0,<1.34.0)", "mypy-boto3-docdb (>=1.33.0,<1.34.0)", "mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)", "mypy-boto3-drs (>=1.33.0,<1.34.0)", "mypy-boto3-ds (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)", "mypy-boto3-ebs (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)", "mypy-boto3-ecr (>=1.33.0,<1.34.0)", "mypy-boto3-ecr-public (>=1.33.0,<1.34.0)", "mypy-boto3-ecs (>=1.33.0,<1.34.0)", "mypy-boto3-efs (>=1.33.0,<1.34.0)", "mypy-boto3-eks (>=1.33.0,<1.34.0)", "mypy-boto3-eks-auth (>=1.33.0,<1.34.0)", "mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)", "mypy-boto3-elasticache (>=1.33.0,<1.34.0)", "mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)", "mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)", "mypy-boto3-elb (>=1.33.0,<1.34.0)", "mypy-boto3-elbv2 (>=1.33.0,<1.34.0)", "mypy-boto3-emr (>=1.33.0,<1.34.0)", "mypy-boto3-emr-containers (>=1.33.0,<1.34.0)", "mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-entityresolution (>=1.33.0,<1.34.0)", "mypy-boto3-es (>=1.33.0,<1.34.0)", "mypy-boto3-events (>=1.33.0,<1.34.0)", "mypy-boto3-evidently (>=1.33.0,<1.34.0)", "mypy-boto3-finspace (>=1.33.0,<1.34.0)", "mypy-boto3-finspace-data (>=1.33.0,<1.34.0)", "mypy-boto3-firehose (>=1.33.0,<1.34.0)", "mypy-boto3-fis (>=1.33.0,<1.34.0)", "mypy-boto3-fms (>=1.33.0,<1.34.0)", "mypy-boto3-forecast (>=1.33.0,<1.34.0)", "mypy-boto3-forecastquery (>=1.33.0,<1.34.0)", "mypy-boto3-frauddetector (>=1.33.0,<1.34.0)", "mypy-boto3-freetier (>=1.33.0,<1.34.0)", "mypy-boto3-fsx (>=1.33.0,<1.34.0)", "mypy-boto3-gamelift (>=1.33.0,<1.34.0)", "mypy-boto3-glacier (>=1.33.0,<1.34.0)", "mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)", "mypy-boto3-glue (>=1.33.0,<1.34.0)", "mypy-boto3-grafana (>=1.33.0,<1.34.0)", "mypy-boto3-greengrass (>=1.33.0,<1.34.0)", "mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)", "mypy-boto3-groundstation (>=1.33.0,<1.34.0)", "mypy-boto3-guardduty (>=1.33.0,<1.34.0)", "mypy-boto3-health (>=1.33.0,<1.34.0)", "mypy-boto3-healthlake (>=1.33.0,<1.34.0)", "mypy-boto3-honeycode (>=1.33.0,<1.34.0)", "mypy-boto3-iam (>=1.33.0,<1.34.0)", "mypy-boto3-identitystore (>=1.33.0,<1.34.0)", "mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)", "mypy-boto3-importexport (>=1.33.0,<1.34.0)", "mypy-boto3-inspector (>=1.33.0,<1.34.0)", "mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)", "mypy-boto3-inspector2 (>=1.33.0,<1.34.0)", "mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)", "mypy-boto3-iot (>=1.33.0,<1.34.0)", "mypy-boto3-iot-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)", "mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)", "mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)", "mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)", "mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)", "mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)", "mypy-boto3-iotwireless (>=1.33.0,<1.34.0)", "mypy-boto3-ivs (>=1.33.0,<1.34.0)", "mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)", "mypy-boto3-ivschat (>=1.33.0,<1.34.0)", "mypy-boto3-kafka (>=1.33.0,<1.34.0)", "mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-kendra (>=1.33.0,<1.34.0)", "mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)", "mypy-boto3-keyspaces (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)", "mypy-boto3-kms (>=1.33.0,<1.34.0)", "mypy-boto3-lakeformation (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)", "mypy-boto3-lex-models (>=1.33.0,<1.34.0)", "mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-lightsail (>=1.33.0,<1.34.0)", "mypy-boto3-location (>=1.33.0,<1.34.0)", "mypy-boto3-logs (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)", "mypy-boto3-m2 (>=1.33.0,<1.34.0)", "mypy-boto3-machinelearning (>=1.33.0,<1.34.0)", "mypy-boto3-macie2 (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)", "mypy-boto3-medialive (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)", "mypy-boto3-mediatailor (>=1.33.0,<1.34.0)", "mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)", "mypy-boto3-memorydb (>=1.33.0,<1.34.0)", "mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)", "mypy-boto3-mgh (>=1.33.0,<1.34.0)", "mypy-boto3-mgn (>=1.33.0,<1.34.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)", "mypy-boto3-mobile (>=1.33.0,<1.34.0)", "mypy-boto3-mq (>=1.33.0,<1.34.0)", "mypy-boto3-mturk (>=1.33.0,<1.34.0)", "mypy-boto3-mwaa (>=1.33.0,<1.34.0)", "mypy-boto3-neptune (>=1.33.0,<1.34.0)", "mypy-boto3-neptunedata (>=1.33.0,<1.34.0)", "mypy-boto3-network-firewall (>=1.33.0,<1.34.0)", "mypy-boto3-networkmanager (>=1.33.0,<1.34.0)", "mypy-boto3-nimble (>=1.33.0,<1.34.0)", "mypy-boto3-oam (>=1.33.0,<1.34.0)", "mypy-boto3-omics (>=1.33.0,<1.34.0)", "mypy-boto3-opensearch (>=1.33.0,<1.34.0)", "mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)", "mypy-boto3-opsworks (>=1.33.0,<1.34.0)", "mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)", "mypy-boto3-organizations (>=1.33.0,<1.34.0)", "mypy-boto3-osis (>=1.33.0,<1.34.0)", "mypy-boto3-outposts (>=1.33.0,<1.34.0)", "mypy-boto3-panorama (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)", "mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)", "mypy-boto3-personalize (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-events (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-pi (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)", "mypy-boto3-pipes (>=1.33.0,<1.34.0)", "mypy-boto3-polly (>=1.33.0,<1.34.0)", "mypy-boto3-pricing (>=1.33.0,<1.34.0)", "mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)", "mypy-boto3-proton (>=1.33.0,<1.34.0)", "mypy-boto3-qbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-qconnect (>=1.33.0,<1.34.0)", "mypy-boto3-qldb (>=1.33.0,<1.34.0)", "mypy-boto3-qldb-session (>=1.33.0,<1.34.0)", "mypy-boto3-quicksight (>=1.33.0,<1.34.0)", "mypy-boto3-ram (>=1.33.0,<1.34.0)", "mypy-boto3-rbin (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-rds-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-rekognition (>=1.33.0,<1.34.0)", "mypy-boto3-repostspace (>=1.33.0,<1.34.0)", "mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)", "mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)", "mypy-boto3-resource-groups (>=1.33.0,<1.34.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)", "mypy-boto3-robomaker (>=1.33.0,<1.34.0)", "mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)", "mypy-boto3-route53 (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)", "mypy-boto3-route53domains (>=1.33.0,<1.34.0)", "mypy-boto3-route53resolver (>=1.33.0,<1.34.0)", "mypy-boto3-rum (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-s3control (>=1.33.0,<1.34.0)", "mypy-boto3-s3outposts (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-savingsplans (>=1.33.0,<1.34.0)", "mypy-boto3-scheduler (>=1.33.0,<1.34.0)", "mypy-boto3-schemas (>=1.33.0,<1.34.0)", "mypy-boto3-sdb (>=1.33.0,<1.34.0)", "mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)", "mypy-boto3-securityhub (>=1.33.0,<1.34.0)", "mypy-boto3-securitylake (>=1.33.0,<1.34.0)", "mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)", "mypy-boto3-service-quotas (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)", "mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)", "mypy-boto3-ses (>=1.33.0,<1.34.0)", "mypy-boto3-sesv2 (>=1.33.0,<1.34.0)", "mypy-boto3-shield (>=1.33.0,<1.34.0)", "mypy-boto3-signer (>=1.33.0,<1.34.0)", "mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)", "mypy-boto3-sms (>=1.33.0,<1.34.0)", "mypy-boto3-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)", "mypy-boto3-snowball (>=1.33.0,<1.34.0)", "mypy-boto3-sns (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)", "mypy-boto3-ssm (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)", "mypy-boto3-sso (>=1.33.0,<1.34.0)", "mypy-boto3-sso-admin (>=1.33.0,<1.34.0)", "mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)", "mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)", "mypy-boto3-storagegateway (>=1.33.0,<1.34.0)", "mypy-boto3-sts (>=1.33.0,<1.34.0)", "mypy-boto3-support (>=1.33.0,<1.34.0)", "mypy-boto3-support-app (>=1.33.0,<1.34.0)", "mypy-boto3-swf (>=1.33.0,<1.34.0)", "mypy-boto3-synthetics (>=1.33.0,<1.34.0)", "mypy-boto3-textract (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-query (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-write (>=1.33.0,<1.34.0)", "mypy-boto3-tnb (>=1.33.0,<1.34.0)", "mypy-boto3-transcribe (>=1.33.0,<1.34.0)", "mypy-boto3-transfer (>=1.33.0,<1.34.0)", "mypy-boto3-translate (>=1.33.0,<1.34.0)", "mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)", "mypy-boto3-voice-id (>=1.33.0,<1.34.0)", "mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)", "mypy-boto3-waf (>=1.33.0,<1.34.0)", "mypy-boto3-waf-regional (>=1.33.0,<1.34.0)", "mypy-boto3-wafv2 (>=1.33.0,<1.34.0)", "mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)", "mypy-boto3-wisdom (>=1.33.0,<1.34.0)", "mypy-boto3-workdocs (>=1.33.0,<1.34.0)", "mypy-boto3-worklink (>=1.33.0,<1.34.0)", "mypy-boto3-workmail (>=1.33.0,<1.34.0)", "mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)", "mypy-boto3-xray (>=1.33.0,<1.34.0)"] +amp = ["mypy-boto3-amp (>=1.33.0,<1.34.0)"] +amplify = ["mypy-boto3-amplify (>=1.33.0,<1.34.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.33.0,<1.34.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.33.0,<1.34.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.33.0,<1.34.0)"] +appflow = ["mypy-boto3-appflow (>=1.33.0,<1.34.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.33.0,<1.34.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.33.0,<1.34.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.33.0,<1.34.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.33.0,<1.34.0)"] +appstream = ["mypy-boto3-appstream (>=1.33.0,<1.34.0)"] +appsync = ["mypy-boto3-appsync (>=1.33.0,<1.34.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)"] +athena = ["mypy-boto3-athena (>=1.33.0,<1.34.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.33.0,<1.34.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.33.0,<1.34.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.33.0,<1.34.0)"] +backup = ["mypy-boto3-backup (>=1.33.0,<1.34.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.33.0,<1.34.0)"] +batch = ["mypy-boto3-batch (>=1.33.0,<1.34.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.33.0,<1.34.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.33.0,<1.34.0)"] +boto3 = ["boto3 (==1.33.2)", "botocore (==1.33.2)"] +braket = ["mypy-boto3-braket (>=1.33.0,<1.34.0)"] +budgets = ["mypy-boto3-budgets (>=1.33.0,<1.34.0)"] +ce = ["mypy-boto3-ce (>=1.33.0,<1.34.0)"] +chime = ["mypy-boto3-chime (>=1.33.0,<1.34.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.33.0,<1.34.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.33.0,<1.34.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.33.0,<1.34.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.33.0,<1.34.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.33.0,<1.34.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.33.0,<1.34.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.33.0,<1.34.0)"] +codestar = ["mypy-boto3-codestar (>=1.33.0,<1.34.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.33.0,<1.34.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)"] +config = ["mypy-boto3-config (>=1.33.0,<1.34.0)"] +connect = ["mypy-boto3-connect (>=1.33.0,<1.34.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.33.0,<1.34.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)"] +controltower = ["mypy-boto3-controltower (>=1.33.0,<1.34.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)"] +cur = ["mypy-boto3-cur (>=1.33.0,<1.34.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)"] +databrew = ["mypy-boto3-databrew (>=1.33.0,<1.34.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.33.0,<1.34.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.33.0,<1.34.0)"] +datasync = ["mypy-boto3-datasync (>=1.33.0,<1.34.0)"] +datazone = ["mypy-boto3-datazone (>=1.33.0,<1.34.0)"] +dax = ["mypy-boto3-dax (>=1.33.0,<1.34.0)"] +detective = ["mypy-boto3-detective (>=1.33.0,<1.34.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.33.0,<1.34.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.33.0,<1.34.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.33.0,<1.34.0)"] +discovery = ["mypy-boto3-discovery (>=1.33.0,<1.34.0)"] +dlm = ["mypy-boto3-dlm (>=1.33.0,<1.34.0)"] +dms = ["mypy-boto3-dms (>=1.33.0,<1.34.0)"] +docdb = ["mypy-boto3-docdb (>=1.33.0,<1.34.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)"] +drs = ["mypy-boto3-drs (>=1.33.0,<1.34.0)"] +ds = ["mypy-boto3-ds (>=1.33.0,<1.34.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.33.0,<1.34.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)"] +ebs = ["mypy-boto3-ebs (>=1.33.0,<1.34.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.33.0,<1.34.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)"] +ecr = ["mypy-boto3-ecr (>=1.33.0,<1.34.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.33.0,<1.34.0)"] +ecs = ["mypy-boto3-ecs (>=1.33.0,<1.34.0)"] +efs = ["mypy-boto3-efs (>=1.33.0,<1.34.0)"] +eks = ["mypy-boto3-eks (>=1.33.0,<1.34.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.33.0,<1.34.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.33.0,<1.34.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)"] +elb = ["mypy-boto3-elb (>=1.33.0,<1.34.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.33.0,<1.34.0)"] +emr = ["mypy-boto3-emr (>=1.33.0,<1.34.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.33.0,<1.34.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.33.0,<1.34.0)"] +es = ["mypy-boto3-es (>=1.33.0,<1.34.0)"] +essential = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)"] +events = ["mypy-boto3-events (>=1.33.0,<1.34.0)"] +evidently = ["mypy-boto3-evidently (>=1.33.0,<1.34.0)"] +finspace = ["mypy-boto3-finspace (>=1.33.0,<1.34.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.33.0,<1.34.0)"] +firehose = ["mypy-boto3-firehose (>=1.33.0,<1.34.0)"] +fis = ["mypy-boto3-fis (>=1.33.0,<1.34.0)"] +fms = ["mypy-boto3-fms (>=1.33.0,<1.34.0)"] +forecast = ["mypy-boto3-forecast (>=1.33.0,<1.34.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.33.0,<1.34.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.33.0,<1.34.0)"] +freetier = ["mypy-boto3-freetier (>=1.33.0,<1.34.0)"] +fsx = ["mypy-boto3-fsx (>=1.33.0,<1.34.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.33.0,<1.34.0)"] +glacier = ["mypy-boto3-glacier (>=1.33.0,<1.34.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)"] +glue = ["mypy-boto3-glue (>=1.33.0,<1.34.0)"] +grafana = ["mypy-boto3-grafana (>=1.33.0,<1.34.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.33.0,<1.34.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.33.0,<1.34.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.33.0,<1.34.0)"] +health = ["mypy-boto3-health (>=1.33.0,<1.34.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.33.0,<1.34.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.33.0,<1.34.0)"] +iam = ["mypy-boto3-iam (>=1.33.0,<1.34.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.33.0,<1.34.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)"] +importexport = ["mypy-boto3-importexport (>=1.33.0,<1.34.0)"] +inspector = ["mypy-boto3-inspector (>=1.33.0,<1.34.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.33.0,<1.34.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)"] +iot = ["mypy-boto3-iot (>=1.33.0,<1.34.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.33.0,<1.34.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.33.0,<1.34.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.33.0,<1.34.0)"] +ivs = ["mypy-boto3-ivs (>=1.33.0,<1.34.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.33.0,<1.34.0)"] +kafka = ["mypy-boto3-kafka (>=1.33.0,<1.34.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)"] +kendra = ["mypy-boto3-kendra (>=1.33.0,<1.34.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.33.0,<1.34.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.33.0,<1.34.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)"] +kms = ["mypy-boto3-kms (>=1.33.0,<1.34.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.33.0,<1.34.0)"] +lambda = ["mypy-boto3-lambda (>=1.33.0,<1.34.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.33.0,<1.34.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.33.0,<1.34.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.33.0,<1.34.0)"] +location = ["mypy-boto3-location (>=1.33.0,<1.34.0)"] +logs = ["mypy-boto3-logs (>=1.33.0,<1.34.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)"] +m2 = ["mypy-boto3-m2 (>=1.33.0,<1.34.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.33.0,<1.34.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.33.0,<1.34.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)"] +medialive = ["mypy-boto3-medialive (>=1.33.0,<1.34.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.33.0,<1.34.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.33.0,<1.34.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.33.0,<1.34.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.33.0,<1.34.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)"] +mgh = ["mypy-boto3-mgh (>=1.33.0,<1.34.0)"] +mgn = ["mypy-boto3-mgn (>=1.33.0,<1.34.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)"] +mobile = ["mypy-boto3-mobile (>=1.33.0,<1.34.0)"] +mq = ["mypy-boto3-mq (>=1.33.0,<1.34.0)"] +mturk = ["mypy-boto3-mturk (>=1.33.0,<1.34.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.33.0,<1.34.0)"] +neptune = ["mypy-boto3-neptune (>=1.33.0,<1.34.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.33.0,<1.34.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.33.0,<1.34.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.33.0,<1.34.0)"] +nimble = ["mypy-boto3-nimble (>=1.33.0,<1.34.0)"] +oam = ["mypy-boto3-oam (>=1.33.0,<1.34.0)"] +omics = ["mypy-boto3-omics (>=1.33.0,<1.34.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.33.0,<1.34.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.33.0,<1.34.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)"] +organizations = ["mypy-boto3-organizations (>=1.33.0,<1.34.0)"] +osis = ["mypy-boto3-osis (>=1.33.0,<1.34.0)"] +outposts = ["mypy-boto3-outposts (>=1.33.0,<1.34.0)"] +panorama = ["mypy-boto3-panorama (>=1.33.0,<1.34.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)"] +personalize = ["mypy-boto3-personalize (>=1.33.0,<1.34.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.33.0,<1.34.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)"] +pi = ["mypy-boto3-pi (>=1.33.0,<1.34.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.33.0,<1.34.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)"] +pipes = ["mypy-boto3-pipes (>=1.33.0,<1.34.0)"] +polly = ["mypy-boto3-polly (>=1.33.0,<1.34.0)"] +pricing = ["mypy-boto3-pricing (>=1.33.0,<1.34.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)"] +proton = ["mypy-boto3-proton (>=1.33.0,<1.34.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.33.0,<1.34.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.33.0,<1.34.0)"] +qldb = ["mypy-boto3-qldb (>=1.33.0,<1.34.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.33.0,<1.34.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.33.0,<1.34.0)"] +ram = ["mypy-boto3-ram (>=1.33.0,<1.34.0)"] +rbin = ["mypy-boto3-rbin (>=1.33.0,<1.34.0)"] +rds = ["mypy-boto3-rds (>=1.33.0,<1.34.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.33.0,<1.34.0)"] +redshift = ["mypy-boto3-redshift (>=1.33.0,<1.34.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.33.0,<1.34.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.33.0,<1.34.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.33.0,<1.34.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.33.0,<1.34.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.33.0,<1.34.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)"] +route53 = ["mypy-boto3-route53 (>=1.33.0,<1.34.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.33.0,<1.34.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.33.0,<1.34.0)"] +rum = ["mypy-boto3-rum (>=1.33.0,<1.34.0)"] +s3 = ["mypy-boto3-s3 (>=1.33.0,<1.34.0)"] +s3control = ["mypy-boto3-s3control (>=1.33.0,<1.34.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.33.0,<1.34.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.33.0,<1.34.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.33.0,<1.34.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.33.0,<1.34.0)"] +schemas = ["mypy-boto3-schemas (>=1.33.0,<1.34.0)"] +sdb = ["mypy-boto3-sdb (>=1.33.0,<1.34.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.33.0,<1.34.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.33.0,<1.34.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.33.0,<1.34.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)"] +ses = ["mypy-boto3-ses (>=1.33.0,<1.34.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.33.0,<1.34.0)"] +shield = ["mypy-boto3-shield (>=1.33.0,<1.34.0)"] +signer = ["mypy-boto3-signer (>=1.33.0,<1.34.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)"] +sms = ["mypy-boto3-sms (>=1.33.0,<1.34.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.33.0,<1.34.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)"] +snowball = ["mypy-boto3-snowball (>=1.33.0,<1.34.0)"] +sns = ["mypy-boto3-sns (>=1.33.0,<1.34.0)"] +sqs = ["mypy-boto3-sqs (>=1.33.0,<1.34.0)"] +ssm = ["mypy-boto3-ssm (>=1.33.0,<1.34.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)"] +sso = ["mypy-boto3-sso (>=1.33.0,<1.34.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.33.0,<1.34.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.33.0,<1.34.0)"] +sts = ["mypy-boto3-sts (>=1.33.0,<1.34.0)"] +support = ["mypy-boto3-support (>=1.33.0,<1.34.0)"] +support-app = ["mypy-boto3-support-app (>=1.33.0,<1.34.0)"] +swf = ["mypy-boto3-swf (>=1.33.0,<1.34.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.33.0,<1.34.0)"] +textract = ["mypy-boto3-textract (>=1.33.0,<1.34.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.33.0,<1.34.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.33.0,<1.34.0)"] +tnb = ["mypy-boto3-tnb (>=1.33.0,<1.34.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.33.0,<1.34.0)"] +transfer = ["mypy-boto3-transfer (>=1.33.0,<1.34.0)"] +translate = ["mypy-boto3-translate (>=1.33.0,<1.34.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.33.0,<1.34.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)"] +waf = ["mypy-boto3-waf (>=1.33.0,<1.34.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.33.0,<1.34.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.33.0,<1.34.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.33.0,<1.34.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.33.0,<1.34.0)"] +worklink = ["mypy-boto3-worklink (>=1.33.0,<1.34.0)"] +workmail = ["mypy-boto3-workmail (>=1.33.0,<1.34.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.33.0,<1.34.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)"] +xray = ["mypy-boto3-xray (>=1.33.0,<1.34.0)"] [[package]] name = "botocore" -version = "1.32.1" +version = "1.33.2" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.32.1-py3-none-any.whl", hash = "sha256:1d9c0ff3eb7828a8bd8c5c7f12cd9d8c05c6fe4c616ef963fdaab538a0da3809"}, - {file = "botocore-1.32.1.tar.gz", hash = "sha256:fcf3cc2913afba8e5f7ebcc15e8f6bfae844ab64bf983bf5a6fe3bb54cce239d"}, + {file = "botocore-1.33.2-py3-none-any.whl", hash = "sha256:5c46b7e8450efbf7ddc2a0016eee7225a5564583122e25a20ca92a29a105225c"}, + {file = "botocore-1.33.2.tar.gz", hash = "sha256:16a30faac6e6f17961c009defb74ab1a3508b8abc58fab98e7cf96af0d91ea84"}, ] [package.dependencies] @@ -558,7 +571,7 @@ urllib3 = [ ] [package.extras] -crt = ["awscrt (==0.19.12)"] +crt = ["awscrt (==0.19.17)"] [[package]] name = "botocore-stubs" @@ -2357,13 +2370,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-cloudformation" -version = "1.29.0" -description = "Type annotations for boto3.CloudFormation 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.CloudFormation 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudformation-1.29.0.tar.gz", hash = "sha256:91b7202a439d31f7e6645f34ea810f1900f23214900fdf6de210a0704c14da70"}, - {file = "mypy_boto3_cloudformation-1.29.0-py3-none-any.whl", hash = "sha256:b719c35be8b4d5606e9b4fd66d4d0c0e3d5eaf9508a72099053c8e0640b652af"}, + {file = "mypy-boto3-cloudformation-1.33.0.tar.gz", hash = "sha256:e1c27b910ffce96ed0ae126e4c5755bdd90a56dc7548e543264334a35608df0a"}, + {file = "mypy_boto3_cloudformation-1.33.0-py3-none-any.whl", hash = "sha256:f56659fa8d804017075e3fb27ac8df17cd6c9ea3f686d79832bf5add51eb2965"}, ] [package.dependencies] @@ -2371,13 +2384,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.29.0" -description = "Type annotations for boto3.DynamoDB 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.DynamoDB 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.29.0.tar.gz", hash = "sha256:e9b0f1cf1d66d2cbc5d7177832dcd08d85cfa84983934aa361bfc3ca57e06edc"}, - {file = "mypy_boto3_dynamodb-1.29.0-py3-none-any.whl", hash = "sha256:a54d9bf0a9449423fa909586a6003e55ba1d64fc77107c228f4091020f83d134"}, + {file = "mypy-boto3-dynamodb-1.33.0.tar.gz", hash = "sha256:2cfe1089c89de61b1ec0e69a72ba3e6865a013ea0a37d318ab564983785d42f9"}, + {file = "mypy_boto3_dynamodb-1.33.0-py3-none-any.whl", hash = "sha256:619ea2cc311ced0ecb44b6e8d3bf3dd851fb7c53a34128b4ff6d6e6a11fdd41f"}, ] [package.dependencies] @@ -2385,13 +2398,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ec2" -version = "1.29.0" -description = "Type annotations for boto3.EC2 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.EC2 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ec2-1.29.0.tar.gz", hash = "sha256:1e636794205fee5069701a32919c03b40da1d031dbcd6bfca097c9001774dfc1"}, - {file = "mypy_boto3_ec2-1.29.0-py3-none-any.whl", hash = "sha256:9684add91b80889880ba1403f28e32d5f6dcf540f318de59913e0e9fa10965be"}, + {file = "mypy-boto3-ec2-1.33.0.tar.gz", hash = "sha256:07578937f974f6129a79d88f788e53a0a38d22e5a9c0c63d589f24e39b8fb90d"}, + {file = "mypy_boto3_ec2-1.33.0-py3-none-any.whl", hash = "sha256:ac24fe3a7d849735c84bf53abaa2ba809e39d2582e98bd5538adecb585fce9b0"}, ] [package.dependencies] @@ -2399,13 +2412,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-lambda" -version = "1.29.0" -description = "Type annotations for boto3.Lambda 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.Lambda 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.29.0.tar.gz", hash = "sha256:e4537261f7b675b1c165a7dc04d4b661f2f338a45e57bd2bee92d9a41a9cd407"}, - {file = "mypy_boto3_lambda-1.29.0-py3-none-any.whl", hash = "sha256:cc3f4dee77181feb2a1ec90f72258a32bdc75f83d01b3c637ca791073279d3e5"}, + {file = "mypy-boto3-lambda-1.33.0.tar.gz", hash = "sha256:beac0cb4b94f83a444242db16f601405bdfb6c15808c2c52720224d907e7af40"}, + {file = "mypy_boto3_lambda-1.33.0-py3-none-any.whl", hash = "sha256:10e0f04168f4064e89ba136218162003f1cb6826dcbfa95ca982d3cb792fc9f7"}, ] [package.dependencies] @@ -2413,13 +2426,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-logs" -version = "1.29.0" -description = "Type annotations for boto3.CloudWatchLogs 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.CloudWatchLogs 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-logs-1.29.0.tar.gz", hash = "sha256:c30f51dafad4578d74930f3dc7fa0e0cba7250546a0388ed614f8226e6180ffd"}, - {file = "mypy_boto3_logs-1.29.0-py3-none-any.whl", hash = "sha256:0f74e18b773e099d99050337a67127a1dd80441f810286d2bf1acdfc5c70a1ea"}, + {file = "mypy-boto3-logs-1.33.0.tar.gz", hash = "sha256:607520796f6caa69e5e921eb4899a370f6a7ed0c7ebdc1bf0396bec24af880f2"}, + {file = "mypy_boto3_logs-1.33.0-py3-none-any.whl", hash = "sha256:8ba1999f949f33541c0690054725a3d7d1e9084f7cc57fc69b58aabbbf5788b3"}, ] [package.dependencies] @@ -2427,13 +2440,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-rds" -version = "1.29.0" -description = "Type annotations for boto3.RDS 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.RDS 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-rds-1.29.0.tar.gz", hash = "sha256:2e7688620ec81a637fbb129ed4165592b118f255089de98013d3b95fb14bcf89"}, - {file = "mypy_boto3_rds-1.29.0-py3-none-any.whl", hash = "sha256:3cab2b07a29c06ad1f469bcd98a8796f23ae423f7f03a93d43b3a0cf4cb9877c"}, + {file = "mypy-boto3-rds-1.33.0.tar.gz", hash = "sha256:2a50e40aa473b34e6651672a4233873b5fafbe42218c33af27a8ecf8571e9169"}, + {file = "mypy_boto3_rds-1.33.0-py3-none-any.whl", hash = "sha256:dda63ea8b2358e549a73001ad7b56c3b4ce3da90f00b520aa37e3ada99e4ab69"}, ] [package.dependencies] @@ -2441,13 +2454,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.29.0" -description = "Type annotations for boto3.S3 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.2" +description = "Type annotations for boto3.S3 1.33.2 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.29.0.tar.gz", hash = "sha256:3c8473974e304aa512abbf6a47454d9834674e89db414545e2f0cb4fcdd227c9"}, - {file = "mypy_boto3_s3-1.29.0-py3-none-any.whl", hash = "sha256:f5040429b0c3814c6ec9c1a59256976186acb7376fd3b56c4e7e5d03272bb1a8"}, + {file = "mypy-boto3-s3-1.33.2.tar.gz", hash = "sha256:f54a3ad3288f4e4719ebada3dde68c320507b0fc451d59bc68af7e6ab15cbdad"}, + {file = "mypy_boto3_s3-1.33.2-py3-none-any.whl", hash = "sha256:9d463df6def30de31a467d49ab92ff7795d46709d56eff6f52216a08bac27918"}, ] [package.dependencies] @@ -2455,13 +2468,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-sqs" -version = "1.29.0" -description = "Type annotations for boto3.SQS 1.29.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.33.0" +description = "Type annotations for boto3.SQS 1.33.0 service generated with mypy-boto3-builder 7.20.3" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-sqs-1.29.0.tar.gz", hash = "sha256:0835256e3aabd27b2acf613c1b82a22b9de18412a0b07bd04d6d214c3f063906"}, - {file = "mypy_boto3_sqs-1.29.0-py3-none-any.whl", hash = "sha256:db88751bd7765f51c2b1f9061545ddb06639d301c3d981d3b3fa4b367f0ca8ea"}, + {file = "mypy-boto3-sqs-1.33.0.tar.gz", hash = "sha256:81f4838e81cbb0c088a10e287922fdf6a3f317cbab6647993ab9dbd567c0e8fb"}, + {file = "mypy_boto3_sqs-1.33.0-py3-none-any.whl", hash = "sha256:81f71d5f461e5e670d2ca93df92c93efdd7c29be33eabf8475df5f071e638583"}, ] [package.dependencies] @@ -3864,20 +3877,20 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.7.0" +version = "0.8.1" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.7.0-py3-none-any.whl", hash = "sha256:10d6923c6359175f264811ef4bf6161a3156ce8e350e705396a7557d6293c33a"}, - {file = "s3transfer-0.7.0.tar.gz", hash = "sha256:fd3889a66f5fe17299fe75b82eae6cf722554edca744ca5d5fe308b104883d2e"}, + {file = "s3transfer-0.8.1-py3-none-any.whl", hash = "sha256:d1c52af7bceca1650d0f27728b29bb4925184aead7b55bccacf893b79a108604"}, + {file = "s3transfer-0.8.1.tar.gz", hash = "sha256:e6cafd5643fc7b44fddfba1e5b521005675b0e07533ddad958a3554bc87d7330"}, ] [package.dependencies] -botocore = ">=1.12.36,<2.0a.0" +botocore = ">=1.33.2,<2.0a.0" [package.extras] -crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] +crt = ["botocore[crt] (>=1.33.2,<2.0a.0)"] [[package]] name = "sgmllib3k" From 87d76280683ded95176feb93986251bfb7b537e9 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 29 Nov 2023 15:38:50 -0400 Subject: [PATCH 192/262] Refactor integration controllers, to reduce copy / paste. (#1539) --- api/admin/controller/collection_settings.py | 37 ++++------------- api/admin/controller/discovery_services.py | 29 +------------ api/admin/controller/integration_settings.py | 41 ++++++++++++++++++- api/admin/controller/patron_auth_services.py | 26 +----------- api/admin/problem_details.py | 15 ++----- .../api/admin/controller/test_collections.py | 4 +- .../api/admin/controller/test_patron_auth.py | 4 +- 7 files changed, 60 insertions(+), 96 deletions(-) diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index d1402f08fb..c366c144eb 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -9,12 +9,9 @@ from api.admin.problem_details import ( CANNOT_DELETE_COLLECTION_WITH_CHILDREN, MISSING_COLLECTION, - MISSING_COLLECTION_NAME, MISSING_PARENT, MISSING_SERVICE, - NO_PROTOCOL_FOR_NEW_SERVICE, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, - UNKNOWN_PROTOCOL, ) from api.circulation import CirculationApiType from api.integration.registry.license_providers import LicenseProvidersRegistry @@ -79,37 +76,19 @@ def process_get(self) -> Response: mimetype="application/json", ) + def create_new_service(self, name: str, protocol: str) -> IntegrationConfiguration: + service = super().create_new_service(name, protocol) + # Make sure the new service is associated with a collection + create(self._db, Collection, integration_configuration=service) + return service + def process_post(self) -> Union[Response, ProblemDetail]: self.require_system_admin() try: form_data = flask.request.form - protocol = form_data.get("protocol", None, str) - id = form_data.get("id", None, int) - name = form_data.get("name", None, str) + libraries_data = self.get_libraries_data(form_data) parent_id = form_data.get("parent_id", None, int) - libraries_data = form_data.get("libraries", None, str) - - if protocol is None and id is None: - raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) - - if protocol is None or protocol not in self.registry: - self.log.warning( - f"Unknown patron authentication service protocol: {protocol}" - ) - raise ProblemError(UNKNOWN_PROTOCOL) - - if id is not None: - # Find an existing service to edit - integration = self.get_existing_service(id, name, protocol) - response_code = 200 - else: - # Create a new service - if not name: - raise ProblemError(MISSING_COLLECTION_NAME) - integration = self.create_new_service(name, protocol) - # Make sure the service is associated with a collection - create(self._db, Collection, integration_configuration=integration) - response_code = 201 + integration, protocol, response_code = self.get_service(form_data) impl_cls = self.registry[protocol] diff --git a/api/admin/controller/discovery_services.py b/api/admin/controller/discovery_services.py index ad2b9b3eda..30ffce8e6b 100644 --- a/api/admin/controller/discovery_services.py +++ b/api/admin/controller/discovery_services.py @@ -7,12 +7,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.controller.integration_settings import IntegrationSettingsController from api.admin.form_data import ProcessFormData -from api.admin.problem_details import ( - INCOMPLETE_CONFIGURATION, - INTEGRATION_URL_ALREADY_IN_USE, - NO_PROTOCOL_FOR_NEW_SERVICE, - UNKNOWN_PROTOCOL, -) +from api.admin.problem_details import INTEGRATION_URL_ALREADY_IN_USE from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from core.model import ( @@ -68,27 +63,7 @@ def set_up_default_registry(self) -> None: def process_post(self) -> Union[Response, ProblemDetail]: try: form_data = flask.request.form - protocol = form_data.get("protocol", None, str) - id = form_data.get("id", None, int) - name = form_data.get("name", None, str) - - if protocol is None and id is None: - raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) - - if protocol is None or protocol not in self.registry: - self.log.warning(f"Unknown service protocol: {protocol}") - raise ProblemError(UNKNOWN_PROTOCOL) - - if id is not None: - # Find an existing service to edit - service = self.get_existing_service(id, name, protocol) - response_code = 200 - else: - # Create a new service - if name is None: - raise ProblemError(INCOMPLETE_CONFIGURATION) - service = self.create_new_service(name, protocol) - response_code = 201 + service, protocol, response_code = self.get_service(form_data) impl_cls = self.registry[protocol] settings_class = impl_cls.settings_class() diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index d0d9f13e5f..8cb6e68a4f 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -1,15 +1,21 @@ +from __future__ import annotations + import json from abc import ABC, abstractmethod -from typing import Any, Dict, Generic, List, NamedTuple, Optional, Type, TypeVar +from typing import Any, Dict, Generic, List, NamedTuple, Optional, Tuple, Type, TypeVar import flask from flask import Response +from werkzeug.datastructures import ImmutableMultiDict from api.admin.problem_details import ( CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, MISSING_SERVICE, + MISSING_SERVICE_NAME, + NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, + UNKNOWN_PROTOCOL, ) from api.controller import CirculationManager from core.integration.base import ( @@ -196,6 +202,39 @@ def create_new_service(self, name: str, protocol: str) -> IntegrationConfigurati ) return new_service + def get_libraries_data( + self, form_data: ImmutableMultiDict[str, str] + ) -> Optional[str]: + libraries_data = form_data.get("libraries", None, str) + return libraries_data + + def get_service( + self, form_data: ImmutableMultiDict[str, str] + ) -> Tuple[IntegrationConfiguration, str, int]: + protocol = form_data.get("protocol", None, str) + _id = form_data.get("id", None, int) + name = form_data.get("name", None, str) + + if protocol is None and _id is None: + raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) + + if protocol is None or protocol not in self.registry: + self.log.warning(f"Unknown service protocol: {protocol}") + raise ProblemError(UNKNOWN_PROTOCOL) + + if _id is not None: + # Find an existing service to edit + service = self.get_existing_service(_id, name, protocol) + response_code = 200 + else: + # Create a new service + if name is None: + raise ProblemError(MISSING_SERVICE_NAME) + service = self.create_new_service(name, protocol) + response_code = 201 + + return service, protocol, response_code + def get_library(self, short_name: str) -> Library: """ Get a library by its short name. diff --git a/api/admin/controller/patron_auth_services.py b/api/admin/controller/patron_auth_services.py index ff3ae352f5..6974a0f70c 100644 --- a/api/admin/controller/patron_auth_services.py +++ b/api/admin/controller/patron_auth_services.py @@ -62,30 +62,8 @@ def process_get(self) -> Response: def process_post(self) -> Union[Response, ProblemDetail]: try: form_data = flask.request.form - protocol = form_data.get("protocol", None, str) - id = form_data.get("id", None, int) - name = form_data.get("name", None, str) - libraries_data = form_data.get("libraries", None, str) - - if protocol is None and id is None: - raise ProblemError(NO_PROTOCOL_FOR_NEW_SERVICE) - - if protocol is None or protocol not in self.registry: - self.log.warning( - f"Unknown patron authentication service protocol: {protocol}" - ) - raise ProblemError(UNKNOWN_PROTOCOL) - - if id is not None: - # Find an existing service to edit - auth_service = self.get_existing_service(id, name, protocol) - response_code = 200 - else: - # Create a new service - if name is None: - raise ProblemError(MISSING_PATRON_AUTH_NAME) - auth_service = self.create_new_service(name, protocol) - response_code = 201 + libraries_data = self.get_libraries_data(form_data) + auth_service, protocol, response_code = self.get_service(form_data) # Update settings impl_cls = self.registry[protocol] diff --git a/api/admin/problem_details.py b/api/admin/problem_details.py index 081e2586ac..cca2bd0ce0 100644 --- a/api/admin/problem_details.py +++ b/api/admin/problem_details.py @@ -154,13 +154,6 @@ detail=_("The specified collection does not exist."), ) -MISSING_COLLECTION_NAME = pd( - "http://librarysimplified.org/terms/problem/missing-collection-name", - status_code=400, - title=_("Missing collection name."), - detail=_("You must identify the collection by its name."), -) - MISSING_ANALYTICS_NAME = pd( "http://librarysimplified.org/terms/problem/missing-analytics-name", status_code=400, @@ -200,11 +193,11 @@ detail=_("A protocol can't be changed once it has been set."), ) -MISSING_PATRON_AUTH_NAME = pd( - "http://librarysimplified.org/terms/problem/missing-patron-auth-name", +MISSING_SERVICE_NAME = pd( + "http://librarysimplified.org/terms/problem/missing-service-name", status_code=400, - title=_("Missing patron auth service name."), - detail=_("You must identify the patron auth service by its name."), + title=_("Missing service name."), + detail=_("You must identify the service by its name."), ) PROTOCOL_DOES_NOT_SUPPORT_PARENTS = pd( diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index a399673791..fd793796f5 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -12,9 +12,9 @@ CANNOT_DELETE_COLLECTION_WITH_CHILDREN, INCOMPLETE_CONFIGURATION, INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_COLLECTION_NAME, MISSING_PARENT, MISSING_SERVICE, + MISSING_SERVICE_NAME, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, PROTOCOL_DOES_NOT_SUPPORT_PARENTS, @@ -164,7 +164,7 @@ def test_collections_get_collections_with_multiple_collections( [ pytest.param( {"protocol": "Overdrive"}, - MISSING_COLLECTION_NAME, + MISSING_SERVICE_NAME, False, id="missing_name", ), diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index 6fbb8be250..e4785bcbc6 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -17,8 +17,8 @@ INTEGRATION_NAME_ALREADY_IN_USE, INVALID_CONFIGURATION_OPTION, INVALID_LIBRARY_IDENTIFIER_RESTRICTION_REGULAR_EXPRESSION, - MISSING_PATRON_AUTH_NAME, MISSING_SERVICE, + MISSING_SERVICE_NAME, MULTIPLE_BASIC_AUTH_SERVICES, NO_PROTOCOL_FOR_NEW_SERVICE, NO_SUCH_LIBRARY, @@ -366,7 +366,7 @@ def test_patron_auth_services_post_missing_patron_auth_name( ) response = post_response(form) assert isinstance(response, ProblemDetail) - assert response.uri == MISSING_PATRON_AUTH_NAME.uri + assert response == MISSING_SERVICE_NAME def test_patron_auth_services_post_no_such_library( self, From 9babdac555d0fbbff0434058132d53f18034c873 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 29 Nov 2023 17:04:30 -0400 Subject: [PATCH 193/262] Convert MARC Generator to use Integration Configuration (PP-498) (#1523) --- ...ff8595_migrate_license_integrations_to_.py | 38 +- ...f58829fc1a_add_discovery_service_tables.py | 38 +- ...1_1e46a5bc33b5_migrate_catalog_services.py | 102 +++++ api/admin/controller/catalog_services.py | 203 ++++---- api/controller.py | 23 +- api/integration/registry/catalog_services.py | 9 + api/marc.py | 83 ++-- core/integration/goals.py | 1 + core/lane.py | 2 +- core/marc.py | 342 ++++++++------ .../migration/migrate_external_integration.py | 32 +- core/model/cachedfeed.py | 11 +- core/model/configuration.py | 7 - core/model/library.py | 2 +- pyproject.toml | 4 + scripts.py | 100 ++-- .../admin/controller/test_catalog_services.py | 432 +++++++++--------- tests/api/test_controller_marc.py | 20 +- tests/api/test_marc.py | 408 ++++++++++------- tests/api/test_scripts.py | 109 +++-- tests/core/test_marc.py | 131 +++--- tests/fixtures/database.py | 8 +- tests/migration/test_20231121_1e46a5bc33b5.py | 181 ++++++++ 23 files changed, 1388 insertions(+), 898 deletions(-) create mode 100644 alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py create mode 100644 api/integration/registry/catalog_services.py create mode 100644 tests/migration/test_20231121_1e46a5bc33b5.py diff --git a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py b/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py index ea99802e99..7c9158ef45 100644 --- a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py +++ b/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py @@ -13,12 +13,13 @@ from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import BaseSettings from core.migration.migrate_external_integration import ( - _migrate_external_integration, _migrate_library_settings, + _validate_and_load_settings, get_configuration_settings, get_integrations, get_library_for_integration, ) +from core.model import json_serializer # revision identifiers, used by Alembic. revision = "0af587ff8595" @@ -30,6 +31,41 @@ LICENSE_GOAL = "LICENSE_GOAL" +# This function is copied from core/migration/migrate_external_integration.py +# because the integration_configurations table has changed and this migration +# needs a copy of the function that references the old version of the table. +# +# It was copied here, because this old version can be deleted whenever this +# migration is deleted, so it makes sense to keep them together. +def _migrate_external_integration( + connection, + integration, + protocol_class, + goal, + settings_dict, + self_test_results, + name=None, +): + # Load and validate the settings before storing them in the database. + settings_class = protocol_class.settings_class() + settings_obj = _validate_and_load_settings(settings_class, settings_dict) + integration_configuration = connection.execute( + "insert into integration_configurations " + "(protocol, goal, name, settings, self_test_results) " + "values (%s, %s, %s, %s, %s)" + "returning id", + ( + integration.protocol, + goal, + name or integration.name, + json_serializer(settings_obj.dict()), + self_test_results, + ), + ).fetchone() + assert integration_configuration is not None + return integration_configuration[0] + + def upgrade() -> None: registry = LicenseProvidersRegistry() diff --git a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py b/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py index 70b14f1dcc..d851b9ea16 100644 --- a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py +++ b/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py @@ -10,12 +10,13 @@ from alembic import op from api.discovery.opds_registration import OpdsRegistrationService from core.migration.migrate_external_integration import ( - _migrate_external_integration, + _validate_and_load_settings, get_configuration_settings, get_integrations, get_library_for_integration, ) from core.migration.util import drop_enum, pg_update_enum +from core.model import json_serializer # revision identifiers, used by Alembic. revision = "0df58829fc1a" @@ -31,6 +32,41 @@ new_goals_enum = old_goals_enum + ["DISCOVERY_GOAL"] +# This function is copied from core/migration/migrate_external_integration.py +# because the integration_configurations table has changed and this migration +# needs a copy of the function that references the old version of the table. +# +# It was copied here, because this old version can be deleted whenever this +# migration is deleted, so it makes sense to keep them together. +def _migrate_external_integration( + connection, + integration, + protocol_class, + goal, + settings_dict, + self_test_results, + name=None, +): + # Load and validate the settings before storing them in the database. + settings_class = protocol_class.settings_class() + settings_obj = _validate_and_load_settings(settings_class, settings_dict) + integration_configuration = connection.execute( + "insert into integration_configurations " + "(protocol, goal, name, settings, self_test_results) " + "values (%s, %s, %s, %s, %s)" + "returning id", + ( + integration.protocol, + goal, + name or integration.name, + json_serializer(settings_obj.dict()), + self_test_results, + ), + ).fetchone() + assert integration_configuration is not None + return integration_configuration[0] + + def upgrade() -> None: op.create_table( "discovery_service_registrations", diff --git a/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py new file mode 100644 index 0000000000..0bc74f360d --- /dev/null +++ b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py @@ -0,0 +1,102 @@ +"""Migrate catalog services. + +Revision ID: 1e46a5bc33b5 +Revises: 382d7921f500 +Create Date: 2023-11-21 17:48:04.535064+00:00 + +""" +from typing import Optional + +from alembic import op +from core.marc import MARCExporter +from core.migration.migrate_external_integration import ( + _migrate_external_integration, + _migrate_library_settings, + get_configuration_settings, + get_integrations, + get_library_for_integration, +) +from core.migration.util import pg_update_enum + +# revision identifiers, used by Alembic. +revision = "1e46a5bc33b5" +down_revision = "382d7921f500" +branch_labels = None +depends_on = None + +CATALOG_GOAL = "CATALOG_GOAL" +old_goals_enum = ["PATRON_AUTH_GOAL", "LICENSE_GOAL", "DISCOVERY_GOAL"] +new_goals_enum = old_goals_enum + [CATALOG_GOAL] + + +def upgrade() -> None: + # Add the new enum value to our goals enum + pg_update_enum( + op, + "integration_configurations", + "goal", + "goals", + old_goals_enum, + new_goals_enum, + ) + + # Migrate the existing catalog services to integration configurations + connection = op.get_bind() + integrations = get_integrations(connection, "ils_catalog") + for integration in integrations: + _id, protocol, name = integration + + if protocol != "MARC Export": + raise RuntimeError(f"Unknown catalog service '{protocol}'") + + ( + settings_dict, + libraries_settings, + self_test_result, + ) = get_configuration_settings(connection, integration) + + # We moved the setting for update_frequency from the library settings to the integration settings. + update_frequency: Optional[str] = None + for library_id, library_settings in libraries_settings.items(): + if "marc_update_frequency" in library_settings: + frequency = library_settings["marc_update_frequency"] + del library_settings["marc_update_frequency"] + if update_frequency is not None and update_frequency != frequency: + raise RuntimeError( + f"Found different update frequencies for different libraries ({update_frequency}/{frequency})." + ) + update_frequency = frequency + + if update_frequency is not None: + settings_dict["marc_update_frequency"] = update_frequency + + integration_configuration_id = _migrate_external_integration( + connection, + integration.name, + MARCExporter.__name__, + MARCExporter, + CATALOG_GOAL, + settings_dict, + self_test_result, + ) + + integration_libraries = get_library_for_integration(connection, _id) + for library in integration_libraries: + _migrate_library_settings( + connection, + integration_configuration_id, + library.library_id, + libraries_settings[library.library_id], + MARCExporter, + ) + + +def downgrade() -> None: + pg_update_enum( + op, + "integration_configurations", + "goal", + "goals", + new_goals_enum, + old_goals_enum, + ) diff --git a/api/admin/controller/catalog_services.py b/api/admin/controller/catalog_services.py index 294a8358c6..7428aabbfb 100644 --- a/api/admin/controller/catalog_services.py +++ b/api/admin/controller/catalog_services.py @@ -1,29 +1,36 @@ +from typing import List, Type, Union + import flask from flask import Response -from flask_babel import lazy_gettext as _ - -from api.admin.controller.settings import SettingsController -from api.admin.problem_details import ( - CANNOT_CHANGE_PROTOCOL, - INTEGRATION_NAME_ALREADY_IN_USE, - MISSING_SERVICE, - MULTIPLE_SERVICES_FOR_LIBRARY, - UNKNOWN_PROTOCOL, + +from api.admin.controller.base import AdminPermissionsControllerMixin +from api.admin.controller.integration_settings import ( + IntegrationSettingsController, + UpdatedLibrarySettingsTuple, ) +from api.admin.form_data import ProcessFormData +from api.admin.problem_details import MULTIPLE_SERVICES_FOR_LIBRARY +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.goals import Goals +from core.integration.settings import BaseSettings from core.marc import MARCExporter -from core.model import ExternalIntegration, get_one -from core.util.problem_detail import ProblemDetail +from core.model import ( + IntegrationConfiguration, + IntegrationLibraryConfiguration, + json_serializer, + site_configuration_has_changed, +) +from core.util.problem_detail import ProblemDetail, ProblemError -class CatalogServicesController(SettingsController): - def __init__(self, manager): - super().__init__(manager) - service_apis = [MARCExporter] - self.protocols = self._get_integration_protocols( - service_apis, protocol_name_attr="NAME" - ) +class CatalogServicesController( + IntegrationSettingsController[MARCExporter], + AdminPermissionsControllerMixin, +): + def default_registry(self) -> CatalogServicesRegistry: + return CatalogServicesRegistry() - def process_catalog_services(self): + def process_catalog_services(self) -> Union[Response, ProblemDetail]: self.require_system_admin() if flask.request.method == "GET": @@ -31,104 +38,76 @@ def process_catalog_services(self): else: return self.process_post() - def process_get(self): - services = self._get_integration_info( - ExternalIntegration.CATALOG_GOAL, self.protocols - ) - return dict( - catalog_services=services, - protocols=self.protocols, + def process_get(self) -> Response: + return Response( + json_serializer( + { + "catalog_services": self.configured_services, + "protocols": list(self.protocols.values()), + } + ), + status=200, + mimetype="application/json", ) - def process_post(self): - protocol = flask.request.form.get("protocol") - is_new = False - error = self.validate_form_fields(protocol) - if error: - return error - - id = flask.request.form.get("id") - if id: - # Find an existing service to edit - service = get_one( - self._db, - ExternalIntegration, - id=id, - goal=ExternalIntegration.CATALOG_GOAL, + def library_integration_validation( + self, integration: IntegrationLibraryConfiguration + ) -> None: + """Check that the library didn't end up with multiple MARC integrations.""" + + library = integration.library + integrations = ( + self._db.query(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .filter( + IntegrationLibraryConfiguration.library_id == library.id, + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, ) - if not service: - return MISSING_SERVICE - if protocol != service.protocol: - return CANNOT_CHANGE_PROTOCOL - else: - # Create a new service - service, is_new = self._create_integration( - self.protocols, - protocol, - ExternalIntegration.CATALOG_GOAL, + .count() + ) + if integrations > 1: + raise ProblemError( + MULTIPLE_SERVICES_FOR_LIBRARY.detailed( + f"You tried to add a MARC export service to {library.short_name}, but it already has one." + ) ) - if isinstance(service, ProblemDetail): - return service - name = self.get_name(service) - if isinstance(name, ProblemDetail): + def process_updated_libraries( + self, + libraries: List[UpdatedLibrarySettingsTuple], + settings_class: Type[BaseSettings], + ) -> None: + super().process_updated_libraries(libraries, settings_class) + for integration, _ in libraries: + self.library_integration_validation(integration) + + def process_post(self) -> Union[Response, ProblemDetail]: + try: + form_data = flask.request.form + libraries_data = self.get_libraries_data(form_data) + catalog_service, protocol, response_code = self.get_service(form_data) + + # Update settings + impl_cls = self.registry[protocol] + settings_class = impl_cls.settings_class() + validated_settings = ProcessFormData.get_settings(settings_class, form_data) + catalog_service.settings_dict = validated_settings.dict() + + # Update library settings + if libraries_data: + self.process_libraries( + catalog_service, libraries_data, impl_cls.library_settings_class() + ) + + # Trigger a site configuration change + site_configuration_has_changed(self._db) + + except ProblemError as e: self._db.rollback() - return name - elif name: - service.name = name - - [protocol] = [p for p in self.protocols if p.get("name") == protocol] - - result = self._set_integration_settings_and_libraries(service, protocol) - if isinstance(result, ProblemDetail): - return result + return e.problem_detail - library_error = self.check_libraries(service) - if library_error: - self._db.rollback() - return library_error + return Response(str(catalog_service.id), response_code) - if is_new: - return Response(str(service.id), 201) - else: - return Response(str(service.id), 200) - - def validate_form_fields(self, protocol): - """Verify that the protocol which the user has selected is in the list - of recognized protocol options.""" - - if protocol and protocol not in [p.get("name") for p in self.protocols]: - return UNKNOWN_PROTOCOL - - def get_name(self, service): - """Check that there isn't already a service with this name""" - - name = flask.request.form.get("name") - if name: - if service.name != name: - service_with_name = get_one(self._db, ExternalIntegration, name=name) - if service_with_name: - return INTEGRATION_NAME_ALREADY_IN_USE - return name - - def check_libraries(self, service): - """Check that no library ended up with multiple MARC export integrations.""" - - for library in service.libraries: - marc_export_count = 0 - for integration in library.integrations: - if ( - integration.goal == ExternalIntegration.CATALOG_GOAL - and integration.protocol == ExternalIntegration.MARC_EXPORT - ): - marc_export_count += 1 - if marc_export_count > 1: - return MULTIPLE_SERVICES_FOR_LIBRARY.detailed( - _( - "You tried to add a MARC export service to %(library)s, but it already has one.", - library=library.short_name, - ) - ) - - def process_delete(self, service_id): - return self._delete_integration(service_id, ExternalIntegration.CATALOG_GOAL) + def process_delete(self, service_id: int) -> Response: + self.require_system_admin() + return self.delete_service(service_id) diff --git a/api/controller.py b/api/controller.py index d7a296faa3..25d00fbdf2 100644 --- a/api/controller.py +++ b/api/controller.py @@ -69,6 +69,7 @@ ) from core.feed.navigation import NavigationFeed from core.feed.opds import NavigationFacets +from core.integration.goals import Goals from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList from core.marc import MARCExporter from core.metadata_layer import ContributorData @@ -1272,19 +1273,29 @@ def download_page(self): body = "

Download MARC files for %s

" % library.name time_format = "%B %-d, %Y" - # Check if a MARC exporter is configured so we can show a + # Check if a MARC exporter is configured, so we can show a # message if it's not. - exporter = None - try: - exporter = MARCExporter.from_config(library) - except CannotLoadConfiguration as e: + integration_query = ( + select(IntegrationLibraryConfiguration) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, + IntegrationConfiguration.protocol == MARCExporter.__name__, + IntegrationLibraryConfiguration.library == library, + ) + ) + + session = Session.object_session(library) + integration = session.execute(integration_query).one_or_none() + + if not integration: body += ( "

" + _("No MARC exporter is currently configured for this library.") + "

" ) - if len(library.cachedmarcfiles) < 1 and exporter: + if len(library.cachedmarcfiles) < 1 and integration: body += "

" + _("MARC files aren't ready to download yet.") + "

" files_by_lane = defaultdict(dict) diff --git a/api/integration/registry/catalog_services.py b/api/integration/registry/catalog_services.py new file mode 100644 index 0000000000..f23627dbdb --- /dev/null +++ b/api/integration/registry/catalog_services.py @@ -0,0 +1,9 @@ +from core.integration.goals import Goals +from core.integration.registry import IntegrationRegistry +from core.marc import MARCExporter + + +class CatalogServicesRegistry(IntegrationRegistry[MARCExporter]): + def __init__(self) -> None: + super().__init__(Goals.CATALOG_GOAL) + self.register(MARCExporter) diff --git a/api/marc.py b/api/marc.py index 1ad5e8c549..54ed824f10 100644 --- a/api/marc.py +++ b/api/marc.py @@ -1,18 +1,28 @@ +from __future__ import annotations + import urllib.error import urllib.parse import urllib.request -from pymarc import Field, Subfield +from pymarc import Field, Record, Subfield from sqlalchemy import select from core.config import Configuration -from core.marc import Annotator, MARCExporter -from core.model import ConfigurationSetting, Session +from core.marc import Annotator, MarcExporterLibrarySettings +from core.model import ( + ConfigurationSetting, + Edition, + Identifier, + Library, + LicensePool, + Session, + Work, +) from core.model.discovery_service_registration import DiscoveryServiceRegistration class LibraryAnnotator(Annotator): - def __init__(self, library): + def __init__(self, library: Library) -> None: super().__init__() self.library = library _db = Session.object_session(library) @@ -20,55 +30,46 @@ def __init__(self, library): _db, Configuration.BASE_URL_KEY ).value - def value(self, key, integration): - _db = Session.object_session(integration) - return ConfigurationSetting.for_library_and_externalintegration( - _db, key, self.library, integration - ).value - def annotate_work_record( self, - work, - active_license_pool, - edition, - identifier, - record, - integration=None, - updated=None, - ): + work: Work, + active_license_pool: LicensePool, + edition: Edition, + identifier: Identifier, + record: Record, + settings: MarcExporterLibrarySettings | None, + ) -> None: super().annotate_work_record( - work, active_license_pool, edition, identifier, record, integration, updated + work, active_license_pool, edition, identifier, record, settings ) - if integration: - marc_org = self.value(MARCExporter.MARC_ORGANIZATION_CODE, integration) - include_summary = ( - self.value(MARCExporter.INCLUDE_SUMMARY, integration) == "true" - ) - include_genres = ( - self.value(MARCExporter.INCLUDE_SIMPLIFIED_GENRES, integration) - == "true" - ) + if settings is None: + return - if marc_org: - self.add_marc_organization_code(record, marc_org) + if settings.organization_code: + self.add_marc_organization_code(record, settings.organization_code) - if include_summary: - self.add_summary(record, work) + if settings.include_summary: + self.add_summary(record, work) - if include_genres: - self.add_simplified_genres(record, work) + if settings.include_genres: + self.add_simplified_genres(record, work) - self.add_web_client_urls(record, self.library, identifier, integration) + self.add_web_client_urls(record, self.library, identifier, settings) - def add_web_client_urls(self, record, library, identifier, integration=None): + def add_web_client_urls( + self, + record: Record, + library: Library, + identifier: Identifier, + exporter_settings: MarcExporterLibrarySettings, + ) -> None: _db = Session.object_session(library) settings = [] - if integration: - marc_setting = self.value(MARCExporter.WEB_CLIENT_URL, integration) - if marc_setting: - settings.append(marc_setting) + marc_setting = exporter_settings.web_client_url + if marc_setting: + settings.append(marc_setting) settings += [ s.web_client @@ -81,7 +82,7 @@ def add_web_client_urls(self, record, library, identifier, integration=None): ] qualified_identifier = urllib.parse.quote( - identifier.type + "/" + identifier.identifier, safe="" + f"{identifier.type}/{identifier.identifier}", safe="" ) for web_client_base_url in settings: diff --git a/core/integration/goals.py b/core/integration/goals.py index b7326f2ce8..99db3d2d64 100644 --- a/core/integration/goals.py +++ b/core/integration/goals.py @@ -9,3 +9,4 @@ class Goals(Enum): PATRON_AUTH_GOAL = "patron_auth" LICENSE_GOAL = "licenses" DISCOVERY_GOAL = "discovery" + CATALOG_GOAL = "catalog" diff --git a/core/lane.py b/core/lane.py index 7ddacb5af4..ee0f389052 100644 --- a/core/lane.py +++ b/core/lane.py @@ -2719,7 +2719,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # A Lane may have many CachedMARCFiles. cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( "CachedMARCFile", - backref="lane", + back_populates="lane", cascade="all, delete-orphan", ) diff --git a/core/marc.py b/core/marc.py index 928c65e698..0cf262f8be 100644 --- a/core/marc.py +++ b/core/marc.py @@ -1,21 +1,31 @@ +from __future__ import annotations + import re +from datetime import datetime from io import BytesIO -from typing import Optional +from typing import Callable, Mapping, Optional, Tuple -from flask_babel import lazy_gettext as _ +from pydantic import NonNegativeInt from pymarc import Field, Record, Subfield from sqlalchemy.orm.session import Session from core.classifier import Classifier -from core.config import CannotLoadConfiguration -from core.external_search import ExternalSearchIndex, SortKeyPagination -from core.lane import BaseFacets, Lane +from core.external_search import ExternalSearchIndex, Filter, SortKeyPagination +from core.integration.base import HasLibraryIntegrationConfiguration +from core.integration.settings import ( + BaseSettings, + ConfigurationFormItem, + ConfigurationFormItemType, + FormField, +) +from core.lane import BaseFacets, Lane, WorkList from core.model import ( CachedMARCFile, DeliveryMechanism, Edition, - ExternalIntegration, Identifier, + Library, + LicensePool, Representation, Work, get_one_or_create, @@ -23,16 +33,17 @@ from core.service.storage.s3 import MultipartS3ContextManager, S3Service from core.util import LanguageCodes from core.util.datetime_helpers import utc_now +from core.util.log import LoggerMixin -class Annotator: +class Annotator(LoggerMixin): """The Annotator knows how to add information about a Work to a MARC record.""" marc_cache_field = Work.marc_record.name # From https://www.loc.gov/standards/valuelist/marctarget.html - AUDIENCE_TERMS = { + AUDIENCE_TERMS: Mapping[str, str] = { Classifier.AUDIENCE_CHILDREN: "Juvenile", Classifier.AUDIENCE_YOUNG_ADULT: "Adolescent", Classifier.AUDIENCE_ADULTS_ONLY: "Adult", @@ -42,7 +53,7 @@ class Annotator: # TODO: Add remaining formats. Maybe there's a better place to # store this so it's easier to keep up-to-date. # There doesn't seem to be any particular vocabulary for this. - FORMAT_TERMS = { + FORMAT_TERMS: Mapping[Tuple[Optional[str], Optional[str]], str] = { (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM): "EPUB eBook", ( Representation.EPUB_MEDIA_TYPE, @@ -54,22 +65,21 @@ class Annotator: def annotate_work_record( self, - work, - active_license_pool, - edition, - identifier, - record, - integration=None, - updated=None, - ): + work: Work, + active_license_pool: LicensePool, + edition: Edition, + identifier: Identifier, + record: Record, + settings: MarcExporterLibrarySettings | None, + ) -> None: """Add metadata from this work to a MARC record. - :work: The Work whose record is being annotated. - :active_license_pool: Of all the LicensePools associated with this + :param work: The Work whose record is being annotated. + :param active_license_pool: Of all the LicensePools associated with this Work, the client has expressed interest in this one. - :edition: The Edition to use when associating bibliographic + :param edition: The Edition to use when associating bibliographic metadata with this entry. - :identifier: Of all the Identifiers associated with this + :param identifier: Of all the Identifiers associated with this Work, the client has expressed interest in this one. :param record: A MARCRecord object to be annotated. """ @@ -77,7 +87,7 @@ def annotate_work_record( self.add_formats(record, active_license_pool) @classmethod - def leader(cls, work): + def leader(cls, work: Work) -> str: # The record length is automatically updated once fields are added. initial_record_length = "00000" @@ -98,7 +108,9 @@ def leader(cls, work): return leader @classmethod - def add_control_fields(cls, record, identifier, pool, edition): + def add_control_fields( + cls, record: Record, identifier: Identifier, pool: LicensePool, edition: Edition + ) -> None: # Unique identifier for this record. record.add_field(Field(tag="001", data=identifier.urn)) @@ -145,11 +157,11 @@ def add_control_fields(cls, record, identifier, pool, edition): record.add_field(Field(tag="008", data=data)) @classmethod - def add_marc_organization_code(cls, record, marc_org): + def add_marc_organization_code(cls, record: Record, marc_org: str) -> None: record.add_field(Field(tag="003", data=marc_org)) @classmethod - def add_isbn(cls, record, identifier): + def add_isbn(cls, record: Record, identifier: Identifier) -> None: # Add the ISBN if we have one. isbn = None if identifier.type == Identifier.ISBN: @@ -176,18 +188,33 @@ def add_isbn(cls, record, identifier): ) @classmethod - def add_title(cls, record, edition): + def add_title(cls, record: Record, edition: Edition) -> None: # Non-filing characters are used to indicate when the beginning of a title # should not be used in sorting. This code tries to identify them by comparing # the title and the sort_title. non_filing_characters = 0 - if edition.title != edition.sort_title and ("," in edition.sort_title): + if ( + edition.title != edition.sort_title + and edition.sort_title is not None + and ("," in edition.sort_title) + ): stemmed = edition.sort_title[: edition.sort_title.rindex(",")] - non_filing_characters = edition.title.index(stemmed) - # MARC only supports up to 9 non-filing characters, but if we got more - # something is probably wrong anyway. - if non_filing_characters > 9: - non_filing_characters = 0 + if edition.title is None: + cls.logger().warning( + "Edition %s has a sort title, but no title.", edition.id + ) + non_filing_characters = 0 + else: + non_filing_characters = edition.title.index(stemmed) + # MARC only supports up to 9 non-filing characters, but if we got more + # something is probably wrong anyway. + if non_filing_characters > 9: + cls.logger().warning( + "Edition %s has %s non-filing characters, but MARC only supports up to 9.", + edition.id, + non_filing_characters, + ) + non_filing_characters = 0 subfields = [Subfield("a", str(edition.title or ""))] if edition.subtitle: @@ -203,13 +230,11 @@ def add_title(cls, record, edition): ) @classmethod - def add_contributors(cls, record, edition): + def add_contributors(cls, record: Record, edition: Edition) -> None: """Create contributor fields for this edition. TODO: Use canonical names from LoC. """ - contibutor_fields = [] - # If there's one author, use the 100 field. if edition.sort_author and len(edition.contributions) == 1: record.add_field( @@ -237,7 +262,7 @@ def add_contributors(cls, record, edition): ) @classmethod - def add_publisher(cls, record, edition): + def add_publisher(cls, record: Record, edition: Edition) -> None: if edition.publisher: publication_date = edition.issued or edition.published year = "" @@ -256,7 +281,7 @@ def add_publisher(cls, record, edition): ) @classmethod - def add_distributor(cls, record, pool): + def add_distributor(cls, record: Record, pool: LicensePool) -> None: # Distributor record.add_field( Field( @@ -267,7 +292,7 @@ def add_distributor(cls, record, pool): ) @classmethod - def add_physical_description(cls, record, edition): + def add_physical_description(cls, record: Record, edition: Edition) -> None: # These 3xx fields are for a physical description of the item. if edition.medium == Edition.BOOK_MEDIUM: record.add_field( @@ -376,8 +401,9 @@ def add_physical_description(cls, record, edition): ) @classmethod - def add_audience(cls, record, work): - audience = cls.AUDIENCE_TERMS.get(work.audience, "General") + def add_audience(cls, record: Record, work: Work) -> None: + work_audience = work.audience or Classifier.AUDIENCE_ADULT + audience = cls.AUDIENCE_TERMS.get(work_audience, "General") record.add_field( Field( tag="385", @@ -390,7 +416,7 @@ def add_audience(cls, record, work): ) @classmethod - def add_series(cls, record, edition): + def add_series(cls, record: Record, edition: Edition) -> None: if edition.series: subfields = [Subfield("a", str(edition.series))] if edition.series_position: @@ -404,7 +430,7 @@ def add_series(cls, record, edition): ) @classmethod - def add_system_details(cls, record): + def add_system_details(cls, record: Record) -> None: record.add_field( Field( tag="538", @@ -414,10 +440,8 @@ def add_system_details(cls, record): ) @classmethod - def add_formats(cls, record, pool): - formats = [] + def add_formats(cls, record: Record, pool: LicensePool) -> None: for lpdm in pool.delivery_mechanisms: - format = None dm = lpdm.delivery_mechanism format = cls.FORMAT_TERMS.get((dm.content_type, dm.drm_scheme)) if format: @@ -432,7 +456,7 @@ def add_formats(cls, record, pool): ) @classmethod - def add_summary(cls, record, work): + def add_summary(cls, record: Record, work: Work) -> None: summary = work.summary_text if summary: stripped = re.sub("<[^>]+?>", " ", summary) @@ -445,9 +469,8 @@ def add_summary(cls, record, work): ) @classmethod - def add_simplified_genres(cls, record, work): + def add_simplified_genres(cls, record: Record, work: Work) -> None: """Create subject fields for this work.""" - genres = [] genres = work.genres for genre in genres: @@ -463,7 +486,7 @@ def add_simplified_genres(cls, record, work): ) @classmethod - def add_ebooks_subject(cls, record): + def add_ebooks_subject(cls, record: Record) -> None: # This is a general subject that can be added to all records. record.add_field( Field( @@ -481,118 +504,127 @@ class MARCExporterFacets(BaseFacets): it only works updated since a certain time. """ - def __init__(self, start_time): + def __init__(self, start_time: Optional[datetime]): self.start_time = start_time - def modify_search_filter(self, filter): + def modify_search_filter(self, filter: Filter) -> None: filter.order = self.SORT_ORDER_TO_OPENSEARCH_FIELD_NAME[self.ORDER_LAST_UPDATE] filter.order_ascending = True filter.updated_after = self.start_time -class MARCExporter: - """Turn a work into a record for a MARC file.""" - - NAME = ExternalIntegration.MARC_EXPORT - - DESCRIPTION = _( - "Export metadata into MARC files that can be imported into an ILS manually." - ) - +class MarcExporterSettings(BaseSettings): # This setting (in days) controls how often MARC files should be # automatically updated. Since the crontab in docker isn't easily # configurable, we can run a script daily but check this to decide # whether to do anything. - UPDATE_FREQUENCY = "marc_update_frequency" - DEFAULT_UPDATE_FREQUENCY = 30 + update_frequency: NonNegativeInt = FormField( + 30, + form=ConfigurationFormItem( + label="Update frequency (in days)", + type=ConfigurationFormItemType.NUMBER, + required=True, + ), + alias="marc_update_frequency", + ) + +class MarcExporterLibrarySettings(BaseSettings): # MARC organization codes are assigned by the # Library of Congress and can be found here: # http://www.loc.gov/marc/organizations/org-search.php - MARC_ORGANIZATION_CODE = "marc_organization_code" + organization_code: Optional[str] = FormField( + None, + form=ConfigurationFormItem( + label="The MARC organization code for this library (003 field).", + description="MARC organization codes are assigned by the Library of Congress.", + type=ConfigurationFormItemType.TEXT, + ), + alias="marc_organization_code", + ) + + web_client_url: Optional[str] = FormField( + None, + form=ConfigurationFormItem( + label="The base URL for the web catalog for this library, for the 856 field.", + description="If using a library registry that provides a web catalog, this can be left blank.", + type=ConfigurationFormItemType.TEXT, + ), + alias="marc_web_client_url", + ) + + include_summary: bool = FormField( + False, + form=ConfigurationFormItem( + label="Include summaries in MARC records (520 field)", + type=ConfigurationFormItemType.SELECT, + options={"false": "Do not include summaries", "true": "Include summaries"}, + ), + ) + + include_genres: bool = FormField( + False, + form=ConfigurationFormItem( + label="Include Palace Collection Manager genres in MARC records (650 fields)", + type=ConfigurationFormItemType.SELECT, + options={ + "false": "Do not include Palace Collection Manager genres", + "true": "Include Palace Collection Manager genres", + }, + ), + alias="include_simplified_genres", + ) - WEB_CLIENT_URL = "marc_web_client_url" - INCLUDE_SUMMARY = "include_summary" - INCLUDE_SIMPLIFIED_GENRES = "include_simplified_genres" + +class MARCExporter( + HasLibraryIntegrationConfiguration[ + MarcExporterSettings, MarcExporterLibrarySettings + ] +): + """Turn a work into a record for a MARC file.""" # The minimum size each piece of a multipart upload should be MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 5 * 1024 * 1024 # 5MB - LIBRARY_SETTINGS = [ - { - "key": UPDATE_FREQUENCY, - "label": _("Update frequency (in days)"), - "description": _( - "The circulation manager will wait this number of days between generating MARC files." - ), - "type": "number", - "default": DEFAULT_UPDATE_FREQUENCY, - }, - { - "key": MARC_ORGANIZATION_CODE, - "label": _("The MARC organization code for this library (003 field)."), - "description": _( - "MARC organization codes are assigned by the Library of Congress." - ), - }, - { - "key": WEB_CLIENT_URL, - "label": _( - "The base URL for the web catalog for this library, for the 856 field." - ), - "description": _( - "If using a library registry that provides a web catalog, this can be left blank." - ), - }, - { - "key": INCLUDE_SUMMARY, - "label": _("Include summaries in MARC records (520 field)"), - "type": "select", - "options": [ - {"key": "false", "label": _("Do not include summaries")}, - {"key": "true", "label": _("Include summaries")}, - ], - "default": "false", - }, - { - "key": INCLUDE_SIMPLIFIED_GENRES, - "label": _( - "Include Palace Collection Manager genres in MARC records (650 fields)" - ), - "type": "select", - "options": [ - { - "key": "false", - "label": _("Do not include Palace Collection Manager genres"), - }, - {"key": "true", "label": _("Include Palace Collection Manager genres")}, - ], - "default": "false", - }, - ] + def __init__( + self, + _db: Session, + library: Library, + settings: MarcExporterSettings, + library_settings: MarcExporterLibrarySettings, + ): + self._db = _db + self.library = library + self.settings = settings + self.library_settings = library_settings @classmethod - def from_config(cls, library): - _db = Session.object_session(library) - integration = ExternalIntegration.lookup( - _db, - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - library=library, + def label(cls) -> str: + return "MARC Export" + + @classmethod + def description(cls) -> str: + return ( + "Export metadata into MARC files that can be imported into an ILS manually." ) - if not integration: - raise CannotLoadConfiguration( - "No MARC export service is configured for this library" - ) - return cls(_db, library, integration) - def __init__(self, _db, library, integration): - self._db = _db - self.library = library - self.integration = integration + @classmethod + def settings_class(cls) -> type[MarcExporterSettings]: + return MarcExporterSettings @classmethod - def create_record(cls, work, annotator, force_create=False, integration=None): + def library_settings_class(cls) -> type[MarcExporterLibrarySettings]: + return MarcExporterLibrarySettings + + @classmethod + def create_record( + cls, + work: Work, + annotator: Annotator | Callable[[], Annotator], + settings: MarcExporterSettings | None = None, + library_settings: MarcExporterLibrarySettings | None = None, + force_create: bool = False, + ) -> Optional[Record]: """Build a complete MARC record for a given work.""" if callable(annotator): annotator = annotator() @@ -617,8 +649,8 @@ def create_record(cls, work, annotator, force_create=False, integration=None): annotator.add_isbn(record, identifier) # TODO: The 240 and 130 fields are for translated works, so they can be grouped even - # though they have different titles. We do not group editions of the same work in - # different languages, so we can't use those yet. + # though they have different titles. We do not group editions of the same work in + # different languages, so we can't use those yet. annotator.add_title(record, edition) annotator.add_contributors(record, edition) @@ -634,14 +666,20 @@ def create_record(cls, work, annotator, force_create=False, integration=None): # Add additional fields that should not be cached. annotator.annotate_work_record( - work, pool, edition, identifier, record, integration + work, pool, edition, identifier, record, settings=library_settings ) return record - def _file_key(self, library, lane, end_time, start_time=None): + def _file_key( + self, + library: Library, + lane: Lane | WorkList, + end_time: datetime, + start_time: Optional[datetime] = None, + ) -> str: """The path to the hosted MARC file for the given library, lane, and date range.""" - root = library.short_name + root = str(library.short_name) if start_time: time_part = str(start_time) + "-" + str(end_time) else: @@ -651,14 +689,14 @@ def _file_key(self, library, lane, end_time, start_time=None): def records( self, - lane, - annotator, + lane: Lane | WorkList, + annotator: Annotator | Callable[[], Annotator], storage_service: Optional[S3Service], - start_time=None, - force_refresh=False, - search_engine=None, - query_batch_size=500, - ): + start_time: Optional[datetime] = None, + force_refresh: bool = False, + search_engine: Optional[ExternalSearchIndex] = None, + query_batch_size: int = 500, + ) -> None: """ Create and export a MARC file for the books in a lane. @@ -704,7 +742,11 @@ def records( # Create a record for each work and add it to the # MARC file in progress. record = self.create_record( - work, annotator, force_refresh, self.integration + work, + annotator, + self.settings, + self.library_settings, + force_refresh, ) if record: record_bytes = record.as_marc() @@ -746,8 +788,8 @@ def records( else: representation.mirror_exception = str(upload.exception) - def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager): - "Upload a batch of MARC records as one part of a multi-part upload." + def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager) -> None: + """Upload a batch of MARC records as one part of a multi-part upload.""" content = output.getvalue() if content: upload.upload_part(content) diff --git a/core/migration/migrate_external_integration.py b/core/migration/migrate_external_integration.py index 56e82a5a76..a05c9a435e 100644 --- a/core/migration/migrate_external_integration.py +++ b/core/migration/migrate_external_integration.py @@ -1,9 +1,13 @@ import json from collections import defaultdict -from typing import Dict, Tuple, Type, TypeVar +from typing import Any, Dict, Optional, Tuple, Type, TypeVar from sqlalchemy.engine import Connection, CursorResult, Row +from core.integration.base import ( + HasIntegrationConfiguration, + HasLibraryIntegrationConfiguration, +) from core.integration.settings import ( BaseSettings, ConfigurationFormItemType, @@ -43,7 +47,7 @@ def _validate_and_load_settings( def get_configuration_settings( connection: Connection, integration: Row, -) -> Tuple[Dict, Dict, str]: +) -> Tuple[Dict[str, str], Dict[str, Dict[str, str]], str]: settings = connection.execute( "select cs.library_id, cs.key, cs.value from configurationsettings cs " "where cs.external_integration_id = (%s)", @@ -68,31 +72,33 @@ def get_configuration_settings( def _migrate_external_integration( connection: Connection, - integration: Row, - protocol_class: Type, + name: str, + protocol: str, + protocol_class: Type[HasIntegrationConfiguration[BaseSettings]], goal: str, - settings_dict: Dict, + settings_dict: Dict[str, Any], self_test_results: str, - name=None, + context: Optional[Dict[str, Any]] = None, ) -> int: # Load and validate the settings before storing them in the database. settings_class = protocol_class.settings_class() settings_obj = _validate_and_load_settings(settings_class, settings_dict) integration_configuration = connection.execute( "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results) " - "values (%s, %s, %s, %s, %s)" + "(protocol, goal, name, settings, context, self_test_results) " + "values (%s, %s, %s, %s, %s, %s)" "returning id", ( - integration.protocol, + protocol, goal, - name or integration.name, + name, json_serializer(settings_obj.dict()), + json_serializer(context or {}), self_test_results, ), ).fetchone() assert integration_configuration is not None - return integration_configuration[0] + return integration_configuration[0] # type: ignore[no-any-return] def _migrate_library_settings( @@ -100,7 +106,9 @@ def _migrate_library_settings( integration_id: int, library_id: int, library_settings: Dict[str, str], - protocol_class: Type, + protocol_class: Type[ + HasLibraryIntegrationConfiguration[BaseSettings, BaseSettings] + ], ) -> None: library_settings_class = protocol_class.library_settings_class() library_settings_obj = _validate_and_load_settings( diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index a6603caa8f..e21571de1f 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -9,7 +9,8 @@ from core.model import Base if TYPE_CHECKING: - from core.model import Representation + from core.lane import Lane + from core.model import Library, Representation class CachedMARCFile(Base): @@ -21,8 +22,16 @@ class CachedMARCFile(Base): # Every MARC file is associated with a library and a lane. If the # lane is null, the file is for the top-level WorkList. library_id = Column(Integer, ForeignKey("libraries.id"), nullable=False, index=True) + library: Mapped[Library] = relationship( + "Library", + back_populates="cachedmarcfiles", + ) lane_id = Column(Integer, ForeignKey("lanes.id"), nullable=True, index=True) + lane: Mapped[Lane] = relationship( + "Lane", + back_populates="cachedmarcfiles", + ) # The representation for this file stores the URL where it was mirrored. representation_id = Column( diff --git a/core/model/configuration.py b/core/model/configuration.py index 7cd424e9b0..73cc741a66 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -62,10 +62,6 @@ class ExternalIntegration(Base): # collect logs of server-side events. LOGGING_GOAL = "logging" - # These integrations are associated with external services that - # a library uses to manage its catalog. - CATALOG_GOAL = "ils_catalog" - # Supported protocols for ExternalIntegrations with LICENSE_GOAL. OPDS_IMPORT = "OPDS Import" OPDS2_IMPORT = "OPDS 2.0 Import" @@ -120,9 +116,6 @@ class ExternalIntegration(Base): # Integrations with ANALYTICS_GOAL GOOGLE_ANALYTICS = "Google Analytics" - # Integrations with CATALOG_GOAL - MARC_EXPORT = "MARC Export" - # Keys for common configuration settings # If there is a special URL to use for access to this API, diff --git a/core/model/library.py b/core/model/library.py index 9991101bbe..a2a0e8cc02 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -114,7 +114,7 @@ class Library(Base, HasSessionCache): # A Library may have many CachedMARCFiles. cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( "CachedMARCFile", - backref="library", + back_populates="library", cascade="all, delete-orphan", ) diff --git a/pyproject.toml b/pyproject.toml index c49b01b628..a3e2fb4ffa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -67,6 +67,7 @@ disallow_untyped_defs = true module = [ "api.admin.announcement_list_validator", "api.admin.config", + "api.admin.controller.catalog_services", "api.admin.controller.collection_self_tests", "api.admin.controller.collection_settings", "api.admin.controller.discovery_service_library_registrations", @@ -84,11 +85,14 @@ module = [ "api.enki", "api.integration.*", "api.lcp.hash", + "api.marc", "api.odl", "api.odl2", "api.opds_for_distributors", "core.feed.*", "core.integration.*", + "core.marc", + "core.migration.*", "core.model.announcements", "core.model.collection", "core.model.hassessioncache", diff --git a/scripts.py b/scripts.py index 1fa5e82576..d1ef3aa4dc 100644 --- a/scripts.py +++ b/scripts.py @@ -5,10 +5,11 @@ import time from datetime import timedelta from pathlib import Path -from typing import Optional +from typing import Any, Optional, Sequence, Tuple, Union -from sqlalchemy import inspect +from sqlalchemy import inspect, select from sqlalchemy.engine import Connection +from sqlalchemy.exc import NoResultFound from sqlalchemy.orm import Session from alembic import command, config @@ -30,8 +31,9 @@ ) from api.overdrive import OverdriveAPI from core.external_search import ExternalSearchIndex -from core.lane import Lane -from core.marc import MARCExporter +from core.integration.goals import Goals +from core.lane import Lane, WorkList +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, CachedMARCFile, @@ -40,9 +42,11 @@ Contribution, DataSource, Edition, - ExternalIntegration, Hold, Identifier, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + Library, LicensePool, Loan, Patron, @@ -150,7 +154,7 @@ class CacheMARCFiles(LaneSweeperScript): name = "Cache MARC files" @classmethod - def arg_parser(cls, _db): + def arg_parser(cls, _db: Session) -> argparse.ArgumentParser: # type: ignore[override] parser = LaneSweeperScript.arg_parser(_db) parser.add_argument( "--max-depth", @@ -166,25 +170,50 @@ def arg_parser(cls, _db): ) return parser - def __init__(self, _db=None, cmd_args=None, *args, **kwargs): + def __init__( + self, + _db: Optional[Session] = None, + cmd_args: Optional[Sequence[str]] = None, + *args: Any, + **kwargs: Any, + ) -> None: super().__init__(_db, *args, **kwargs) self.parse_args(cmd_args) - def parse_args(self, cmd_args=None): + def parse_args( + self, cmd_args: Optional[Sequence[str]] = None + ) -> argparse.Namespace: parser = self.arg_parser(self._db) parsed = parser.parse_args(cmd_args) self.max_depth = parsed.max_depth self.force = parsed.force return parsed - def should_process_library(self, library): - integration = ExternalIntegration.lookup( - self._db, - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - library, + def settings( + self, library: Library + ) -> Tuple[MarcExporterSettings, MarcExporterLibrarySettings]: + integration_query = ( + select(IntegrationLibraryConfiguration) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, + IntegrationConfiguration.protocol == MARCExporter.__name__, + IntegrationLibraryConfiguration.library == library, + ) ) - return integration is not None + integration = self._db.execute(integration_query).scalar_one() + + library_settings = MARCExporter.library_settings_load(integration) + settings = MARCExporter.settings_load(integration.parent) + + return settings, library_settings + + def should_process_library(self, library: Library) -> bool: + try: + self.settings(library) + return True + except NoResultFound: + return False def process_library(self, library): if self.should_process_library(library): @@ -199,7 +228,9 @@ def should_process_lane(self, lane): return False return True - def process_lane(self, lane, exporter=None): + def process_lane( + self, lane: Union[Lane, WorkList], exporter: Optional[MARCExporter] = None + ) -> None: # Generate a MARC file for this lane, if one has not been generated recently enough. if isinstance(lane, Lane): library = lane.library @@ -207,26 +238,24 @@ def process_lane(self, lane, exporter=None): library = lane.get_library(self._db) annotator = MARCLibraryAnnotator(library) - exporter = exporter or MARCExporter.from_config(library) - - update_frequency = ConfigurationSetting.for_library_and_externalintegration( - self._db, MARCExporter.UPDATE_FREQUENCY, library, exporter.integration - ).int_value - if update_frequency is None: - update_frequency = MARCExporter.DEFAULT_UPDATE_FREQUENCY - - last_update = None - files_q = ( - self._db.query(CachedMARCFile) - .filter(CachedMARCFile.library == library) - .filter( + + if exporter is None: + settings, library_settings = self.settings(library) + exporter = MARCExporter(self._db, library, settings, library_settings) + + update_frequency = exporter.settings.update_frequency + + last_updated_file = self._db.execute( + select(CachedMARCFile.end_time) + .where( + CachedMARCFile.library == library, CachedMARCFile.lane == (lane if isinstance(lane, Lane) else None), ) .order_by(CachedMARCFile.end_time.desc()) - ) + ).first() + + last_update = last_updated_file.end_time if last_updated_file else None - if files_q.count() > 0: - last_update = files_q.first().end_time if ( not self.force and last_update @@ -245,17 +274,14 @@ def process_lane(self, lane, exporter=None): return # First update the file with ALL the records. - records = exporter.records(lane, annotator, storage_service) + exporter.records(lane, annotator, storage_service) # Then create a new file with changes since the last update. - start_time = None if last_update: # Allow one day of overlap to ensure we don't miss anything due to script timing. start_time = last_update - timedelta(days=1) - records = exporter.records( - lane, annotator, storage_service, start_time=start_time - ) + exporter.records(lane, annotator, storage_service, start_time=start_time) class AdobeAccountIDResetScript(PatronInputScript): diff --git a/tests/api/admin/controller/test_catalog_services.py b/tests/api/admin/controller/test_catalog_services.py index 6a6038ad91..19ebd756bf 100644 --- a/tests/api/admin/controller/test_catalog_services.py +++ b/tests/api/admin/controller/test_catalog_services.py @@ -1,7 +1,10 @@ import json +from contextlib import nullcontext +from typing import Dict, Optional, Type import flask import pytest +from flask import Response from werkzeug.datastructures import ImmutableMultiDict from api.admin.exceptions import AdminNotAuthorized @@ -9,201 +12,206 @@ CANNOT_CHANGE_PROTOCOL, INTEGRATION_NAME_ALREADY_IN_USE, MISSING_SERVICE, + MISSING_SERVICE_NAME, MULTIPLE_SERVICES_FOR_LIBRARY, + NO_PROTOCOL_FOR_NEW_SERVICE, UNKNOWN_PROTOCOL, ) -from core.marc import MARCExporter -from core.model import ( - AdminRole, - ConfigurationSetting, - ExternalIntegration, - create, - get_one, -) -from tests.fixtures.api_admin import SettingsControllerFixture +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.goals import Goals +from core.marc import MARCExporter, MarcExporterLibrarySettings +from core.model import AdminRole, IntegrationConfiguration, get_one +from core.util.problem_detail import ProblemDetail +from tests.fixtures.api_admin import AdminControllerFixture class TestCatalogServicesController: def test_catalog_services_get_with_no_services( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - with settings_ctrl_fixture.request_context_with_admin("/"): + with admin_ctrl_fixture.request_context_with_admin("/"): + pytest.raises( + AdminNotAuthorized, + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, + ) + + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) - assert response.get("catalog_services") == [] - protocols = response.get("protocols") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + assert data.get("catalog_services") == [] + protocols = data.get("protocols") + assert isinstance(protocols, list) assert 1 == len(protocols) - assert MARCExporter.NAME == protocols[0].get("name") - assert "settings" in protocols[0] - assert "library_settings" in protocols[0] - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) - settings_ctrl_fixture.ctrl.db.session.flush() - pytest.raises( - AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services, + assert protocols[0].get("name") == CatalogServicesRegistry().get_protocol( + MARCExporter ) + assert "settings" in protocols[0] + assert "library_settings" in protocols[0] def test_catalog_services_get_with_marc_exporter( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - integration, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.MARC_EXPORT, - goal=ExternalIntegration.CATALOG_GOAL, + db = admin_ctrl_fixture.ctrl.db + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + library_settings = MarcExporterLibrarySettings( + include_summary=True, include_genres=True, organization_code="US-MaBoDPL" + ) + + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + integration = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - integration.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.MARC_ORGANIZATION_CODE, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "US-MaBoDPL" - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "false" - ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - integration, - ).value = "true" - with settings_ctrl_fixture.request_context_with_admin("/"): + integration.libraries += [db.default_library()] + library_settings_integration = integration.for_library(db.default_library()) + MARCExporter.library_settings_update( + library_settings_integration, library_settings + ) + + with admin_ctrl_fixture.request_context_with_admin("/"): response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) - [service] = response.get("catalog_services") + assert isinstance(response, Response) + assert response.status_code == 200 + data = response.json + assert isinstance(data, dict) + services = data.get("catalog_services") + assert isinstance(services, list) + assert len(services) == 1 + service = services[0] assert integration.id == service.get("id") assert integration.name == service.get("name") assert integration.protocol == service.get("protocol") [library] = service.get("libraries") assert ( - settings_ctrl_fixture.ctrl.db.default_library().short_name + admin_ctrl_fixture.ctrl.db.default_library().short_name == library.get("short_name") ) - assert "US-MaBoDPL" == library.get(MARCExporter.MARC_ORGANIZATION_CODE) - assert "false" == library.get(MARCExporter.INCLUDE_SUMMARY) - assert "true" == library.get(MARCExporter.INCLUDE_SIMPLIFIED_GENRES) + assert "US-MaBoDPL" == library.get("organization_code") + assert library.get("include_summary") is True + assert library.get("include_genres") is True + @pytest.mark.parametrize( + "post_data,expected,admin,raises", + [ + pytest.param({}, None, False, AdminNotAuthorized, id="not admin"), + pytest.param({}, NO_PROTOCOL_FOR_NEW_SERVICE, True, None, id="no protocol"), + pytest.param( + {"protocol": "Unknown"}, + UNKNOWN_PROTOCOL, + True, + None, + id="unknown protocol", + ), + pytest.param( + {"protocol": "MARCExporter", "id": "123"}, + MISSING_SERVICE, + True, + None, + id="unknown id", + ), + pytest.param( + {"protocol": "MARCExporter", "id": ""}, + CANNOT_CHANGE_PROTOCOL, + True, + None, + id="cannot change protocol", + ), + pytest.param( + {"protocol": "MARCExporter"}, + MISSING_SERVICE_NAME, + True, + None, + id="no name", + ), + pytest.param( + {"protocol": "MARCExporter", "name": "existing integration"}, + INTEGRATION_NAME_ALREADY_IN_USE, + True, + None, + id="name already in use", + ), + pytest.param( + { + "protocol": "MARCExporter", + "name": "new name", + "libraries": json.dumps([{"short_name": "default"}]), + }, + MULTIPLE_SERVICES_FOR_LIBRARY, + True, + None, + id="multiple services for library", + ), + ], + ) def test_catalog_services_post_errors( - self, settings_ctrl_fixture: SettingsControllerFixture + self, + admin_ctrl_fixture: AdminControllerFixture, + post_data: Dict[str, str], + expected: Optional[ProblemDetail], + admin: bool, + raises: Optional[Type[Exception]], ): - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("protocol", "Unknown"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == UNKNOWN_PROTOCOL - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", "123"), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == MISSING_SERVICE - - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol="fake protocol", - goal=ExternalIntegration.CATALOG_GOAL, - name="name", - ) - - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("id", str(service.id)), - ("protocol", ExternalIntegration.MARC_EXPORT), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == CANNOT_CHANGE_PROTOCOL + if admin: + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - flask.request.form = ImmutableMultiDict( - [ - ("name", str(service.name)), - ("protocol", ExternalIntegration.MARC_EXPORT), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response == INTEGRATION_NAME_ALREADY_IN_USE + context_manager = pytest.raises(raises) if raises is not None else nullcontext() - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ExternalIntegration.MARC_EXPORT, - goal=ExternalIntegration.CATALOG_GOAL, + db = admin_ctrl_fixture.ctrl.db + service = db.integration_configuration( + "fake protocol", + Goals.CATALOG_GOAL, + name="existing integration", ) + service.libraries += [db.default_library()] - # This should be the last test to check since rolling back database - # changes in the test can cause it to crash. - service.libraries += [settings_ctrl_fixture.ctrl.db.default_library()] - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + if post_data.get("id") == "": + post_data["id"] = str(service.id) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): - ME = MARCExporter - flask.request.form = ImmutableMultiDict( - [ - ("name", "new name"), - ("protocol", ME.NAME), - ( - "libraries", - json.dumps( - [ - { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", - } - ] - ), - ), - ] - ) - response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() - ) - assert response.uri == MULTIPLE_SERVICES_FOR_LIBRARY.uri + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): + flask.request.form = ImmutableMultiDict(post_data) + with context_manager: + response = ( + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + ) + assert isinstance(response, ProblemDetail) + assert isinstance(expected, ProblemDetail) + assert response.uri == expected.uri + assert response.status_code == expected.status_code + assert response.title == expected.title def test_catalog_services_post_create( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - ME = MARCExporter + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), - ("protocol", ME.NAME), + ("protocol", protocol), ( "libraries", json.dumps( [ { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", + "short_name": db.default_library().short_name, + "include_summary": "false", + "include_genres": "true", } ] ), @@ -211,67 +219,55 @@ def test_catalog_services_post_create( ] ) response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) + assert isinstance(response, Response) assert response.status_code == 201 service = get_one( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - goal=ExternalIntegration.CATALOG_GOAL, + db.session, + IntegrationConfiguration, + goal=Goals.CATALOG_GOAL, ) - assert isinstance(service, ExternalIntegration) + assert isinstance(service, IntegrationConfiguration) - assert service.id == int(response.get_data()) - assert ME.NAME == service.protocol - assert "exporter name" == service.name - assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - assert ( - "false" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) - assert ( - "true" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) + assert int(response.get_data()) == service.id + assert service.protocol == protocol + assert service.name == "exporter name" + assert service.libraries == [db.default_library()] + + settings = MARCExporter.library_settings_load(service.library_configurations[0]) + assert settings.include_summary is False + assert settings.include_genres is True def test_catalog_services_post_edit( - self, settings_ctrl_fixture: SettingsControllerFixture + self, admin_ctrl_fixture: AdminControllerFixture ): - ME = MARCExporter + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ME.NAME, - goal=ExternalIntegration.CATALOG_GOAL, + service = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="POST"): + with admin_ctrl_fixture.request_context_with_admin("/", method="POST"): flask.request.form = ImmutableMultiDict( [ ("name", "exporter name"), ("id", str(service.id)), - ("protocol", ME.NAME), + ("protocol", protocol), ( "libraries", json.dumps( [ { - "short_name": settings_ctrl_fixture.ctrl.db.default_library().short_name, - ME.INCLUDE_SUMMARY: "false", - ME.INCLUDE_SIMPLIFIED_GENRES: "true", + "short_name": db.default_library().short_name, + "include_summary": "true", + "include_genres": "false", } ] ), @@ -279,60 +275,44 @@ def test_catalog_services_post_edit( ] ) response = ( - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_catalog_services() ) + assert isinstance(response, Response) assert response.status_code == 200 - assert service.id == int(response.get_data()) - assert ME.NAME == service.protocol - assert "exporter name" == service.name - assert [settings_ctrl_fixture.ctrl.db.default_library()] == service.libraries - assert ( - "false" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SUMMARY, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) - assert ( - "true" - == ConfigurationSetting.for_library_and_externalintegration( - settings_ctrl_fixture.ctrl.db.session, - ME.INCLUDE_SIMPLIFIED_GENRES, - settings_ctrl_fixture.ctrl.db.default_library(), - service, - ).value - ) + assert int(response.get_data()) == service.id + assert service.protocol == protocol + assert service.name == "exporter name" + assert service.libraries == [db.default_library()] - def test_catalog_services_delete( - self, settings_ctrl_fixture: SettingsControllerFixture - ): - ME = MARCExporter - service, ignore = create( - settings_ctrl_fixture.ctrl.db.session, - ExternalIntegration, - protocol=ME.NAME, - goal=ExternalIntegration.CATALOG_GOAL, + settings = MARCExporter.library_settings_load(service.library_configurations[0]) + assert settings.include_summary is True + assert settings.include_genres is False + + def test_catalog_services_delete(self, admin_ctrl_fixture: AdminControllerFixture): + db = admin_ctrl_fixture.ctrl.db + protocol = CatalogServicesRegistry().get_protocol(MARCExporter) + assert protocol is not None + + service = db.integration_configuration( + protocol, + Goals.CATALOG_GOAL, name="name", ) - with settings_ctrl_fixture.request_context_with_admin("/", method="DELETE"): - settings_ctrl_fixture.admin.remove_role(AdminRole.SYSTEM_ADMIN) + with admin_ctrl_fixture.request_context_with_admin("/", method="DELETE"): pytest.raises( AdminNotAuthorized, - settings_ctrl_fixture.manager.admin_catalog_services_controller.process_delete, + admin_ctrl_fixture.manager.admin_catalog_services_controller.process_delete, service.id, ) - settings_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) - response = settings_ctrl_fixture.manager.admin_catalog_services_controller.process_delete( + admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) + response = admin_ctrl_fixture.manager.admin_catalog_services_controller.process_delete( service.id ) + assert isinstance(response, Response) assert response.status_code == 200 - none_service = get_one( - settings_ctrl_fixture.ctrl.db.session, ExternalIntegration, id=service.id - ) + none_service = get_one(db.session, IntegrationConfiguration, id=service.id) assert none_service is None diff --git a/tests/api/test_controller_marc.py b/tests/api/test_controller_marc.py index 59652cd134..629da47382 100644 --- a/tests/api/test_controller_marc.py +++ b/tests/api/test_controller_marc.py @@ -1,6 +1,8 @@ import datetime -from core.model import CachedMARCFile, ExternalIntegration, Representation, create +from core.integration.goals import Goals +from core.marc import MARCExporter +from core.model import CachedMARCFile, Representation, create from core.util.datetime_helpers import utc_now from tests.fixtures.api_controller import CirculationControllerFixture @@ -17,10 +19,10 @@ def test_download_page_with_exporter_and_files( library = db.default_library() lane = db.lane(display_name="Test Lane") - exporter = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + db.integration_configuration( + MARCExporter.__name__, + Goals.CATALOG_GOAL, + libraries=[library], ) rep1, ignore = create( @@ -111,10 +113,10 @@ def test_download_page_with_exporter_but_no_files( library = db.default_library() - exporter = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + db.integration_configuration( + MARCExporter.__name__, + Goals.CATALOG_GOAL, + libraries=[library], ) with circulation_fixture.request_context_with_library("/"): diff --git a/tests/api/test_marc.py b/tests/api/test_marc.py index b11dbacf06..6559bb2f0b 100644 --- a/tests/api/test_marc.py +++ b/tests/api/test_marc.py @@ -1,13 +1,16 @@ +import functools import urllib.error import urllib.parse import urllib.request +from unittest.mock import create_autospec +import pytest from pymarc import Record from api.marc import LibraryAnnotator from core.config import Configuration -from core.marc import MARCExporter -from core.model import ConfigurationSetting, ExternalIntegration, create +from core.marc import MarcExporterLibrarySettings +from core.model import ConfigurationSetting, create from core.model.discovery_service_registration import DiscoveryServiceRegistration from tests.fixtures.database import ( DatabaseTransactionFixture, @@ -15,146 +18,50 @@ ) -class TestLibraryAnnotator: - def test_annotate_work_record(self, db: DatabaseTransactionFixture): +class LibraryAnnotatorFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.library = db.default_library() + # Mock class to verify that the correct methods # are called by annotate_work_record. - class MockAnnotator(LibraryAnnotator): - called_with = dict() - - def add_marc_organization_code(self, record, marc_org): - self.called_with["add_marc_organization_code"] = [record, marc_org] - - def add_summary(self, record, work): - self.called_with["add_summary"] = [record, work] - - def add_simplified_genres(self, record, work): - self.called_with["add_simplified_genres"] = [record, work] - - def add_web_client_urls(self, record, library, identifier, integration): - self.called_with["add_web_client_urls"] = [ - record, - library, - identifier, - integration, - ] - - # Also check that the parent class annotate_work_record is called. - def add_distributor(self, record, pool): - self.called_with["add_distributor"] = [record, pool] - - def add_formats(self, record, pool): - self.called_with["add_formats"] = [record, pool] - - annotator = MockAnnotator(db.default_library()) - record = Record() - work = db.work(with_license_pool=True) - pool = work.license_pools[0] - edition = pool.presentation_edition - identifier = pool.identifier - - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + self.mock_annotator = LibraryAnnotator(self.library) + self.mock_add_marc_organization_code = create_autospec( + spec=self.mock_annotator.add_marc_organization_code ) - - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration + self.mock_annotator.add_marc_organization_code = ( + self.mock_add_marc_organization_code + ) + self.mock_add_summary = create_autospec(spec=self.mock_annotator.add_summary) + self.mock_annotator.add_summary = self.mock_add_summary + self.mock_add_simplified_genres = create_autospec( + spec=self.mock_annotator.add_simplified_genres + ) + self.mock_annotator.add_simplified_genres = self.mock_add_simplified_genres + self.mock_add_web_client_urls = create_autospec( + spec=self.mock_annotator.add_web_client_urls + ) + self.mock_annotator.add_web_client_urls = self.mock_add_web_client_urls + self.mock_add_distributor = create_autospec( + spec=self.mock_annotator.add_distributor + ) + self.mock_annotator.add_distributor = self.mock_add_distributor + self.mock_add_formats = create_autospec(spec=self.mock_annotator.add_formats) + self.mock_annotator.add_formats = self.mock_add_formats + + self.record = Record() + self.work = db.work(with_license_pool=True) + self.pool = self.work.license_pools[0] + self.edition = self.pool.presentation_edition + self.identifier = self.pool.identifier + + self.mock_annotate_work_record = functools.partial( + self.mock_annotator.annotate_work_record, + work=self.work, + active_license_pool=self.pool, + edition=self.edition, + identifier=self.identifier, + record=self.record, ) - - # If there are no settings, the only methods called will be add_web_client_urls - # and the parent class methods. - assert "add_marc_organization_code" not in annotator.called_with - assert "add_summary" not in annotator.called_with - assert "add_simplified_genres" not in annotator.called_with - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - # If settings are false, the methods still won't be called. - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.INCLUDE_SUMMARY, db.default_library(), integration - ).value = "false" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - db.default_library(), - integration, - ).value = "false" - - annotator = MockAnnotator(db.default_library()) - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - - assert "add_marc_organization_code" not in annotator.called_with - assert "add_summary" not in annotator.called_with - assert "add_simplified_genres" not in annotator.called_with - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - # Once the include settings are true and the marc organization code is set, - # all methods are called. - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.INCLUDE_SUMMARY, db.default_library(), integration - ).value = "true" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.INCLUDE_SIMPLIFIED_GENRES, - db.default_library(), - integration, - ).value = "true" - - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.MARC_ORGANIZATION_CODE, - db.default_library(), - integration, - ).value = "marc org" - - annotator = MockAnnotator(db.default_library()) - annotator.annotate_work_record( - work, pool, edition, identifier, record, integration - ) - - assert [record, "marc org"] == annotator.called_with.get( - "add_marc_organization_code" - ) - assert [record, work] == annotator.called_with.get("add_summary") - assert [record, work] == annotator.called_with.get("add_simplified_genres") - assert [ - record, - db.default_library(), - identifier, - integration, - ] == annotator.called_with.get("add_web_client_urls") - assert [record, pool] == annotator.called_with.get("add_distributor") - assert [record, pool] == annotator.called_with.get("add_formats") - - def test_add_web_client_urls( - self, - db: DatabaseTransactionFixture, - create_integration_configuration: IntegrationConfigurationFixture, - ): - # Web client URLs can come from either the MARC export integration or - # a library registry integration. - - identifier = db.identifier(foreign_id="identifier") - lib_short_name = db.default_library().short_name # The URL for a work is constructed as: # - //works/ @@ -164,42 +71,154 @@ def test_add_web_client_urls( client_url_template = "{client_base}/book/{work_link}" qualified_identifier = urllib.parse.quote( - identifier.type + "/" + identifier.identifier, safe="" + self.identifier.type + "/" + self.identifier.identifier, safe="" ) cm_base_url = "http://test-circulation-manager" expected_work_link = work_link_template.format( - cm_base=cm_base_url, lib=lib_short_name, qid=qualified_identifier + cm_base=cm_base_url, lib=self.library.short_name, qid=qualified_identifier ) encoded_work_link = urllib.parse.quote(expected_work_link, safe="") - client_base_1 = "http://web_catalog" - client_base_2 = "http://another_web_catalog" - expected_client_url_1 = client_url_template.format( - client_base=client_base_1, work_link=encoded_work_link + self.client_base_1 = "http://web_catalog" + self.client_base_2 = "http://another_web_catalog" + self.expected_client_url_1 = client_url_template.format( + client_base=self.client_base_1, work_link=encoded_work_link ) - expected_client_url_2 = client_url_template.format( - client_base=client_base_2, work_link=encoded_work_link + self.expected_client_url_2 = client_url_template.format( + client_base=self.client_base_2, work_link=encoded_work_link ) # A few checks to ensure that our setup is useful. - assert lib_short_name is not None - assert len(lib_short_name) > 0 - assert client_base_1 != client_base_2 - assert expected_client_url_1 != expected_client_url_2 - assert expected_client_url_1.startswith(client_base_1) - assert expected_client_url_2.startswith(client_base_2) + assert self.library.short_name is not None + assert len(self.library.short_name) > 0 + assert self.client_base_1 != self.client_base_2 + assert self.expected_client_url_1 != self.expected_client_url_2 + assert self.expected_client_url_1.startswith(self.client_base_1) + assert self.expected_client_url_2.startswith(self.client_base_2) ConfigurationSetting.sitewide( db.session, Configuration.BASE_URL_KEY ).value = cm_base_url - annotator = LibraryAnnotator(db.default_library()) + self.annotator = LibraryAnnotator(self.library) + + self.add_web_client_urls = functools.partial( + self.annotator.add_web_client_urls, + record=self.record, + library=self.library, + identifier=self.identifier, + ) + + +@pytest.fixture +def library_annotator_fixture( + db: DatabaseTransactionFixture, +) -> LibraryAnnotatorFixture: + return LibraryAnnotatorFixture(db) + + +class TestLibraryAnnotator: + @pytest.mark.parametrize( + "settings", + [ + pytest.param(MarcExporterLibrarySettings(), id="defaults"), + pytest.param( + MarcExporterLibrarySettings(include_summary=False), id="summary_false" + ), + pytest.param( + MarcExporterLibrarySettings(include_genres=False), id="genres_false" + ), + pytest.param( + MarcExporterLibrarySettings( + include_summary=False, include_genres=False + ), + id="summary_and_genres_false", + ), + ], + ) + def test_annotate_work_record_default_settings( + self, + library_annotator_fixture: LibraryAnnotatorFixture, + settings: MarcExporterLibrarySettings, + ) -> None: + library_annotator_fixture.mock_annotate_work_record(settings=settings) + + # If there are no settings, or the settings are false, the only methods called will be add_web_client_urls + # and the parent class methods. + library_annotator_fixture.mock_add_marc_organization_code.assert_not_called() + library_annotator_fixture.mock_add_summary.assert_not_called() + library_annotator_fixture.mock_add_simplified_genres.assert_not_called() + library_annotator_fixture.mock_add_web_client_urls.assert_called_once_with( + library_annotator_fixture.record, + library_annotator_fixture.library, + library_annotator_fixture.identifier, + settings, + ) + library_annotator_fixture.mock_add_distributor.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.pool + ) + library_annotator_fixture.mock_add_formats.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.pool + ) + + def test_annotate_work_record_settings( + self, library_annotator_fixture: LibraryAnnotatorFixture + ) -> None: + # Once the include settings are true and the marc organization code is set, + # all methods are called. + settings = MarcExporterLibrarySettings( + include_summary=True, + include_genres=True, + organization_code="marc org", + web_client_url="http://web_catalog", + ) + + library_annotator_fixture.mock_annotate_work_record(settings=settings) + + library_annotator_fixture.mock_add_marc_organization_code.assert_called_once_with( + library_annotator_fixture.record, settings.organization_code + ) + + library_annotator_fixture.mock_add_summary.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.work + ) + + library_annotator_fixture.mock_add_simplified_genres.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.work + ) + + library_annotator_fixture.mock_add_web_client_urls.assert_called_once_with( + library_annotator_fixture.record, + library_annotator_fixture.library, + library_annotator_fixture.identifier, + settings, + ) + + library_annotator_fixture.mock_add_distributor.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.pool + ) + + library_annotator_fixture.mock_add_formats.assert_called_once_with( + library_annotator_fixture.record, library_annotator_fixture.pool + ) + + def test_add_web_client_urls_none( + self, library_annotator_fixture: LibraryAnnotatorFixture + ): + settings = MarcExporterLibrarySettings() # If no web catalog URLs are set for the library, nothing will be changed. - record = Record() - annotator.add_web_client_urls(record, db.default_library(), identifier) - assert [] == record.get_fields("856") + library_annotator_fixture.add_web_client_urls(exporter_settings=settings) + assert [] == library_annotator_fixture.record.get_fields("856") + + def test_add_web_client_urls_from_library_registry( + self, + db: DatabaseTransactionFixture, + create_integration_configuration: IntegrationConfigurationFixture, + library_annotator_fixture: LibraryAnnotatorFixture, + ): + settings = MarcExporterLibrarySettings() # Add a URL from a library registry. registry = create_integration_configuration.discovery_service() @@ -208,33 +227,68 @@ def test_add_web_client_urls( DiscoveryServiceRegistration, library=db.default_library(), integration=registry, - web_client=client_base_1, + web_client=library_annotator_fixture.client_base_1, + ) + + library_annotator_fixture.add_web_client_urls(exporter_settings=settings) + [field] = library_annotator_fixture.record.get_fields("856") + assert field.indicators == ["4", "0"] + assert ( + field.get_subfields("u")[0] + == library_annotator_fixture.expected_client_url_1 ) - record = Record() - annotator.add_web_client_urls(record, db.default_library(), identifier) - [field] = record.get_fields("856") - assert ["4", "0"] == field.indicators - assert expected_client_url_1 == field.get_subfields("u")[0] + def test_add_web_client_urls_from_configuration( + self, library_annotator_fixture: LibraryAnnotatorFixture + ): + # Add a manually configured URL on a MARC export integration. + settings = MarcExporterLibrarySettings( + web_client_url=library_annotator_fixture.client_base_2 + ) + library_annotator_fixture.add_web_client_urls(exporter_settings=settings) + [field] = library_annotator_fixture.record.get_fields("856") + assert field.indicators == ["4", "0"] + assert ( + field.get_subfields("u")[0] + == library_annotator_fixture.expected_client_url_2 + ) + + def test_add_web_client_urls_from_both( + self, + db: DatabaseTransactionFixture, + create_integration_configuration: IntegrationConfigurationFixture, + library_annotator_fixture: LibraryAnnotatorFixture, + ): + # Add a URL from a library registry. + registry = create_integration_configuration.discovery_service() + create( + db.session, + DiscoveryServiceRegistration, + library=db.default_library(), + integration=registry, + web_client=library_annotator_fixture.client_base_1, + ) # Add a manually configured URL on a MARC export integration. - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, - libraries=[db.default_library()], + settings = MarcExporterLibrarySettings( + web_client_url=library_annotator_fixture.client_base_2 ) - ConfigurationSetting.for_library_and_externalintegration( - db.session, MARCExporter.WEB_CLIENT_URL, db.default_library(), integration - ).value = client_base_2 + library_annotator_fixture.add_web_client_urls(exporter_settings=settings) - record = Record() - annotator.add_web_client_urls( - record, db.default_library(), identifier, integration + fields = library_annotator_fixture.record.get_fields("856") + assert len(fields) == 2 + + # The manually configured URL should be first. + [field_1, field_2] = fields + assert field_1.indicators == ["4", "0"] + assert ( + field_1.get_subfields("u")[0] + == library_annotator_fixture.expected_client_url_2 ) - [field1, field2] = record.get_fields("856") - assert ["4", "0"] == field1.indicators - assert expected_client_url_2 == field1.get_subfields("u")[0] - assert ["4", "0"] == field2.indicators - assert expected_client_url_1 == field2.get_subfields("u")[0] + assert field_2.indicators == ["4", "0"] + assert ( + field_2.get_subfields("u")[0] + == library_annotator_fixture.expected_client_url_1 + ) diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index f66b0ecc10..e881eae21a 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -16,15 +16,15 @@ from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from core.external_search import ExternalSearchIndex +from core.integration.goals import Goals from core.lane import WorkList -from core.marc import MARCExporter +from core.marc import MARCExporter, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, CachedMARCFile, ConfigurationSetting, Credential, DataSource, - ExternalIntegration, SessionManager, create, ) @@ -115,15 +115,25 @@ class TestCacheMARCFilesFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db self.lane = db.lane(genres=["Science Fiction"]) - self.integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, ExternalIntegration.CATALOG_GOAL - ) + self.integration = self.integration() + + self.mock_settings = MagicMock() + self.mock_library_settings = MagicMock() - self.exporter = MARCExporter(None, None, self.integration) + self.exporter = MARCExporter( + MagicMock(), MagicMock(), self.mock_settings, self.mock_library_settings + ) self.mock_records = MagicMock() self.mock_services = MagicMock() self.exporter.records = self.mock_records + def integration(self): + return self.db.integration_configuration( + protocol=MARCExporter.__name__, + goal=Goals.CATALOG_GOAL, + libraries=[self.db.default_library()], + ) + def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: cmd_args = cmd_args or [] return CacheMARCFiles( @@ -157,13 +167,13 @@ class TestCacheMARCFiles: def test_should_process_library(self, lane_script_fixture: LaneScriptFixture): db = lane_script_fixture.db script = CacheMARCFiles(db.session, cmd_args=[]) - assert False == script.should_process_library(db.default_library()) - integration = db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, + assert script.should_process_library(db.default_library()) is False + db.integration_configuration( + protocol=MARCExporter.__name__, + goal=Goals.CATALOG_GOAL, libraries=[db.default_library()], ) - assert True == script.should_process_library(db.default_library()) + assert script.should_process_library(db.default_library()) is True def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): db = lane_script_fixture.db @@ -179,18 +189,18 @@ def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): script = CacheMARCFiles(db.session, cmd_args=[]) script.max_depth = 1 - assert True == script.should_process_lane(parent) - assert True == script.should_process_lane(child) - assert False == script.should_process_lane(grandchild) - assert True == script.should_process_lane(wl) - assert False == script.should_process_lane(empty) + assert script.should_process_lane(parent) is True + assert script.should_process_lane(child) is True + assert script.should_process_lane(grandchild) is False + assert script.should_process_lane(wl) is True + assert script.should_process_lane(empty) is False script.max_depth = 0 - assert True == script.should_process_lane(parent) - assert False == script.should_process_lane(child) - assert False == script.should_process_lane(grandchild) - assert True == script.should_process_lane(wl) - assert False == script.should_process_lane(empty) + assert script.should_process_lane(parent) is True + assert script.should_process_lane(child) is False + assert script.should_process_lane(grandchild) is False + assert script.should_process_lane(wl) is True + assert script.should_process_lane(empty) is False def test_process_lane_never_run(self, cache_marc_files: TestCacheMARCFilesFixture): script = cache_marc_files.script() @@ -207,16 +217,11 @@ def test_process_lane_cached_update( # If we have a cached file already, and it's old enough, the script will # run the exporter twice, first to update that file and second to create # a file with changes since that first file was originally created. - db = cache_marc_files.db now = utc_now() last_week = now - datetime.timedelta(days=7) cache_marc_files.create_cached_file(last_week) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + settings = MarcExporterSettings(update_frequency=3) + cache_marc_files.exporter.settings = settings script = cache_marc_files.script() script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) @@ -236,16 +241,11 @@ def test_process_lane_cached_recent( self, cache_marc_files: TestCacheMARCFilesFixture ): # If we already have a recent cached file, the script won't do anything. - db = cache_marc_files.db now = utc_now() yesterday = now - datetime.timedelta(days=1) cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + settings = MarcExporterSettings(update_frequency=3) + cache_marc_files.exporter.settings = settings script = cache_marc_files.script() script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) @@ -255,17 +255,12 @@ def test_process_lane_cached_recent_force( self, cache_marc_files: TestCacheMARCFilesFixture ): # But we can force it to run anyway. - db = cache_marc_files.db now = utc_now() yesterday = now - datetime.timedelta(days=1) last_week = now - datetime.timedelta(days=7) cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 3 + settings = MarcExporterSettings(update_frequency=3) + cache_marc_files.exporter.settings = settings script = cache_marc_files.script(cmd_args=["--force"]) script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) @@ -290,17 +285,12 @@ def test_process_lane_cached_frequency_zero( ): # The update frequency can also be 0, in which case it will always run. # If we already have a recent cached file, the script won't do anything. - db = cache_marc_files.db now = utc_now() yesterday = now - datetime.timedelta(days=1) last_week = now - datetime.timedelta(days=7) cache_marc_files.create_cached_file(yesterday) - ConfigurationSetting.for_library_and_externalintegration( - db.session, - MARCExporter.UPDATE_FREQUENCY, - db.default_library(), - cache_marc_files.integration, - ).value = 0 + settings = MarcExporterSettings(update_frequency=0) + cache_marc_files.exporter.settings = settings script = cache_marc_files.script() script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) @@ -320,6 +310,27 @@ def test_process_lane_cached_frequency_zero( > last_week ) + def test_process_lane_creates_exporter( + self, cache_marc_files: TestCacheMARCFilesFixture + ): + # If the exporter doesn't exist, the script will create it. + script = cache_marc_files.script() + script.settings = MagicMock( + return_value=( + cache_marc_files.mock_settings, + cache_marc_files.mock_library_settings, + ) + ) + with patch("scripts.MARCExporter") as exporter: + script.process_lane(cache_marc_files.lane) + + exporter.assert_called_once_with( + cache_marc_files.db.session, + cache_marc_files.lane.library, + cache_marc_files.mock_settings, + cache_marc_files.mock_library_settings, + ) + class TestInstanceInitializationScript: # These are some basic tests for the instance initialization script. It is tested diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index cdb5fe9bce..bf54a5856c 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -1,6 +1,7 @@ from __future__ import annotations import datetime +import functools from typing import TYPE_CHECKING from unittest.mock import MagicMock from urllib.parse import quote @@ -9,8 +10,8 @@ from freezegun import freeze_time from pymarc import MARCReader, Record -from core.config import CannotLoadConfiguration from core.external_search import Filter +from core.integration.goals import Goals from core.lane import WorkList from core.marc import Annotator, MARCExporter, MARCExporterFacets from core.model import ( @@ -19,9 +20,9 @@ DataSource, DeliveryMechanism, Edition, - ExternalIntegration, Genre, Identifier, + IntegrationConfiguration, LicensePoolDeliveryMechanism, Representation, RightsStatus, @@ -38,28 +39,21 @@ class TestAnnotator: - def test_annotate_work_record(self, db: DatabaseTransactionFixture): - session = db.session - + def test_annotate_work_record(self, db: DatabaseTransactionFixture) -> None: # Verify that annotate_work_record adds the distributor and formats. - class MockAnnotator(Annotator): - add_distributor_called_with = None - add_formats_called_with = None - - def add_distributor(self, record, pool): - self.add_distributor_called_with = [record, pool] - - def add_formats(self, record, pool): - self.add_formats_called_with = [record, pool] + annotator = Annotator() + annotator.add_distributor = MagicMock() + annotator.add_formats = MagicMock() - annotator = MockAnnotator() record = Record() work = db.work(with_license_pool=True) pool = work.license_pools[0] - annotator.annotate_work_record(work, pool, None, None, record) - assert [record, pool] == annotator.add_distributor_called_with - assert [record, pool] == annotator.add_formats_called_with + annotator.annotate_work_record( + work, pool, MagicMock(), MagicMock(), record, MagicMock() + ) + annotator.add_distributor.assert_called_once_with(record, pool) + annotator.add_formats.assert_called_once_with(record, pool) def test_leader(self, db: DatabaseTransactionFixture): work = db.work(with_license_pool=True) @@ -479,7 +473,12 @@ def __init__(self, db: DatabaseTransactionFixture): self.integration = self._integration(db) self.now = utc_now() - self.exporter = MARCExporter.from_config(db.default_library()) + self.library = db.default_library() + self.settings = MagicMock() + self.library_settings = MagicMock() + self.exporter = MARCExporter( + self.db.session, self.library, self.settings, self.library_settings + ) self.annotator = Annotator() self.w1 = db.work(genre="Mystery", with_open_access_download=True) self.w2 = db.work(genre="Mystery", with_open_access_download=True) @@ -488,10 +487,10 @@ def __init__(self, db: DatabaseTransactionFixture): self.search_engine.mock_query_works([self.w1, self.w2]) @staticmethod - def _integration(db: DatabaseTransactionFixture): - return db.external_integration( - ExternalIntegration.MARC_EXPORT, - ExternalIntegration.CATALOG_GOAL, + def _integration(db: DatabaseTransactionFixture) -> IntegrationConfiguration: + return db.integration_configuration( + MARCExporter.__name__, + Goals.CATALOG_GOAL, libraries=[db.default_library()], ) @@ -506,31 +505,28 @@ def marc_exporter_fixture( class TestMARCExporter: - def test_from_config(self, db: DatabaseTransactionFixture): - pytest.raises( - CannotLoadConfiguration, MARCExporter.from_config, db.default_library() - ) - - integration = MarcExporterFixture._integration(db) - exporter = MARCExporter.from_config(db.default_library()) - assert integration == exporter.integration - assert db.default_library() == exporter.library - - other_library = db.library() - pytest.raises(CannotLoadConfiguration, MARCExporter.from_config, other_library) - - def test_create_record(self, db: DatabaseTransactionFixture): + def test_create_record( + self, db: DatabaseTransactionFixture, marc_exporter_fixture: MarcExporterFixture + ): work = db.work( with_license_pool=True, title="old title", authors=["old author"], data_source_name=DataSource.OVERDRIVE, ) - annotator = Annotator() + + create_record = functools.partial( + MARCExporter.create_record, + work=work, + annotator=marc_exporter_fixture.annotator, + settings=marc_exporter_fixture.settings, + library_settings=marc_exporter_fixture.library_settings, + ) # The record isn't cached yet, so a new record is created and cached. - assert None == work.marc_record - record = MARCExporter.create_record(work, annotator) + assert work.marc_record is None + record = create_record() + assert record is not None [title_field] = record.get_fields("245") assert "old title" == title_field.get_subfields("a")[0] [author_field] = record.get_fields("100") @@ -538,7 +534,8 @@ def test_create_record(self, db: DatabaseTransactionFixture): [distributor_field] = record.get_fields("264") assert DataSource.OVERDRIVE == distributor_field.get_subfields("b")[0] cached = work.marc_record - assert "old title" in cached + assert cached is not None + assert "old title" in cached # type: ignore[unreachable] assert "author, old" in cached # The distributor isn't part of the cached record. assert DataSource.OVERDRIVE not in cached @@ -551,7 +548,7 @@ def test_create_record(self, db: DatabaseTransactionFixture): # Now that the record is cached, creating a record will # use the cache. Distributor will be updated since it's # not part of the cached record. - record = MARCExporter.create_record(work, annotator) + record = create_record() [title_field] = record.get_fields("245") assert "old title" == title_field.get_subfields("a")[0] [author_field] = record.get_fields("100") @@ -560,7 +557,7 @@ def test_create_record(self, db: DatabaseTransactionFixture): assert DataSource.BIBLIOTHECA == distributor_field.get_subfields("b")[0] # But we can force an update to the cached record. - record = MARCExporter.create_record(work, annotator, force_create=True) + record = create_record(force_create=True) [title_field] = record.get_fields("245") assert "new title" == title_field.get_subfields("a")[0] [author_field] = record.get_fields("100") @@ -573,23 +570,21 @@ def test_create_record(self, db: DatabaseTransactionFixture): assert "new title" in cached assert "author, new" in cached - # If we pass in an integration, it's passed along to the annotator. - integration = MarcExporterFixture._integration(db) - - class MockAnnotator(Annotator): - integration = None - - def annotate_work_record( - self, work, pool, edition, identifier, record, integration - ): - self.integration = integration - - annotator = MockAnnotator() - record = MARCExporter.create_record(work, annotator, integration=integration) - assert integration == annotator.integration + # The settings we pass in get passed along to the annotator. + marc_exporter_fixture.annotator.annotate_work_record = MagicMock() + create_record(force_create=True) + assert marc_exporter_fixture.annotator.annotate_work_record.call_count == 1 + assert ( + marc_exporter_fixture.annotator.annotate_work_record.call_args.kwargs[ + "settings" + ] + == marc_exporter_fixture.library_settings + ) @freeze_time("2020-01-01 00:00:00") - def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): + def test_create_record_roundtrip( + self, db: DatabaseTransactionFixture, marc_exporter_fixture: MarcExporterFixture + ): # Create a marc record from a work with special characters # in both the title and author name and round-trip it to # the DB and back again to make sure we are creating records @@ -599,21 +594,29 @@ def test_create_record_roundtrip(self, db: DatabaseTransactionFixture): # a timestamp when it was created and we need the created # records to match. - annotator = Annotator() - # Creates a new record and saves it to the database work = db.work( title="Little Mimi\u2019s First Counting Lesson", authors=["Lagerlo\xf6f, Selma Ottiliana Lovisa,"], with_license_pool=True, ) - record = MARCExporter.create_record(work, annotator) - loaded_record = MARCExporter.create_record(work, annotator) + create_record = functools.partial( + MARCExporter.create_record, + work=work, + annotator=marc_exporter_fixture.annotator, + settings=marc_exporter_fixture.settings, + library_settings=marc_exporter_fixture.library_settings, + ) + record = create_record() + loaded_record = create_record() + assert record is not None + assert loaded_record is not None assert record.as_marc() == loaded_record.as_marc() - # Loads a existing record from the DB + # Loads an existing record from the DB new_work = get_one(db.session, Work, id=work.id) - new_record = MARCExporter.create_record(new_work, annotator) + new_record = create_record(work=new_work) + assert new_record is not None assert record.as_marc() == new_record.as_marc() @pytest.mark.parametrize("object_type", ["lane", "worklist"]) diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 0942f63850..20206bc275 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -722,11 +722,13 @@ def integration_configuration( goal=goal, name=(name or random_string(16)), ) - if libraries and not isinstance(libraries, list): - libraries = [libraries] - else: + + if libraries is None: libraries = [] + if not isinstance(libraries, list): + libraries = [libraries] + integration.libraries.extend(libraries) integration.settings_dict = kwargs diff --git a/tests/migration/test_20231121_1e46a5bc33b5.py b/tests/migration/test_20231121_1e46a5bc33b5.py new file mode 100644 index 0000000000..f58ab1718e --- /dev/null +++ b/tests/migration/test_20231121_1e46a5bc33b5.py @@ -0,0 +1,181 @@ +import pytest +from pytest_alembic import MigrationContext +from sqlalchemy.engine import Engine + +from api.integration.registry.catalog_services import CatalogServicesRegistry +from core.integration.base import integration_settings_load +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings +from tests.migration.conftest import ( + CreateConfigSetting, + CreateExternalIntegration, + CreateLibrary, +) + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + lib_1_id = create_library(conn, "Test Library 1") + lib_2_id = create_library(conn, "Test Library 2") + ext_id = create_external_integration( + conn, + protocol="MARC Export", + goal="ils_catalog", + name="MARC Export Test", + ) + + create_config_setting( + conn, "marc_update_frequency", "8", ext_id, lib_1_id, associate_library=True + ) + create_config_setting( + conn, + "marc_organization_code", + "org1", + ext_id, + lib_1_id, + associate_library=True, + ) + create_config_setting( + conn, "include_summary", "true", ext_id, lib_1_id, associate_library=True + ) + + create_config_setting( + conn, + "marc_organization_code", + "org2", + ext_id, + lib_2_id, + associate_library=True, + ) + create_config_setting( + conn, + "marc_web_client_url", + "http://web.com", + ext_id, + lib_2_id, + associate_library=True, + ) + create_config_setting( + conn, + "include_simplified_genres", + "true", + ext_id, + lib_2_id, + associate_library=True, + ) + + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as conn: + rows = conn.execute( + "select id, protocol, goal, settings from integration_configurations where name='MARC Export Test'" + ).all() + assert len(rows) == 1 + + integration = rows[0] + + protocol_cls = CatalogServicesRegistry()[integration.protocol] + assert protocol_cls == MARCExporter + settings = integration_settings_load( + protocol_cls.settings_class(), integration.settings + ) + assert isinstance(settings, MarcExporterSettings) + assert settings.update_frequency == 8 + + rows = conn.execute( + "select library_id, settings from integration_library_configurations where parent_id = %s order by library_id", + integration.id, + ).all() + assert len(rows) == 2 + [library_1_integration, library_2_integration] = rows + + assert library_1_integration.library_id == lib_1_id + assert library_2_integration.library_id == lib_2_id + + library_1_settings = integration_settings_load( + protocol_cls.library_settings_class(), library_1_integration.settings + ) + assert isinstance(library_1_settings, MarcExporterLibrarySettings) + assert library_1_settings.organization_code == "org1" + assert library_1_settings.include_summary is True + + library_2_settings = integration_settings_load( + protocol_cls.library_settings_class(), library_2_integration.settings + ) + assert isinstance(library_2_settings, MarcExporterLibrarySettings) + assert library_2_settings.organization_code == "org2" + assert library_2_settings.web_client_url == "http://web.com" + assert library_2_settings.include_genres is True + + +def test_different_update_frequency( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + lib_1_id = create_library(conn, "Test Library 1") + lib_2_id = create_library(conn, "Test Library 2") + ext_id = create_external_integration( + conn, + protocol="MARC Export", + goal="ils_catalog", + name="MARC Export Test", + ) + + create_config_setting( + conn, "marc_update_frequency", "8", ext_id, lib_1_id, associate_library=True + ) + + create_config_setting( + conn, + "marc_update_frequency", + "12", + ext_id, + lib_2_id, + associate_library=True, + ) + + with pytest.raises(RuntimeError) as excinfo: + alembic_runner.migrate_up_one() + + assert "Found different update frequencies for different libraries (8/12)." in str( + excinfo.value + ) + + +def test_unknown_protocol( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_external_integration: CreateExternalIntegration, + create_config_setting: CreateConfigSetting, +) -> None: + alembic_runner.migrate_down_to("1e46a5bc33b5") + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as conn: + ext_id = create_external_integration( + conn, + protocol="unknown", + goal="ils_catalog", + name="MARC Export Test", + ) + + with pytest.raises(RuntimeError) as excinfo: + alembic_runner.migrate_up_one() + + assert "Unknown catalog service" in str(excinfo.value) From 2af463c1a24c6b382fe90a142dcef91f7606923c Mon Sep 17 00:00:00 2001 From: RishiDiwanTT <90382027+RishiDiwanTT@users.noreply.github.com> Date: Thu, 30 Nov 2023 20:41:12 +0530 Subject: [PATCH 194/262] PP-750 Time tracking flag (#1530) * Added the should_track_playtime attribute to licensepools The migrations and migration tests have been added as well Any 'OPDS for Distributor' audiobooks should be enabled by default * Importing any Biblioboard feed will result in time tracked audiobooks * Feed items should contain the playtime links only if the pools allow for it * Added OPDS based palace:timeTracking parsing * OPDS2 importer types have been customized In order to import the custom timeTracking flag the manifest parser had to be customized with a feed that recognises the custom attribute in the metadata The type:ignore stubs are because of the lack of typing information from the ast.opds2 library * Default value for should_track_playtime is now False during the migration * Ignoring the time tracking flag for any non-audio medium * Removed old migration tests that have been run on production --- ...biblioboard_licensepools_time_tracking_.py | 57 +++++ ...60a578e_licensepools_time_tracking_flag.py | 30 +++ api/opds_for_distributors.py | 19 +- bin/opds2_import_monitor | 10 +- core/feed/annotator/loan_and_hold.py | 4 +- core/metadata_layer.py | 6 + core/model/licensing.py | 1 + core/opds2_import.py | 69 +++++- core/opds_import.py | 18 ++ .../api/feed/test_loan_and_hold_annotator.py | 1 + .../biblioboard_mini_feed.opds | 20 ++ tests/api/test_opds_for_distributors.py | 20 +- .../core/files/opds/content_server_mini.opds | 8 +- tests/core/files/opds2/feed.json | 8 +- tests/core/test_opds2_import.py | 16 +- tests/core/test_opds_import.py | 10 +- tests/migration/conftest.py | 145 ++++++++++- tests/migration/test_20230510_a9ed3f76d649.py | 231 ------------------ tests/migration/test_20230512_5a425ebe026c.py | 123 ---------- tests/migration/test_20230531_0af587ff8595.py | 142 ----------- tests/migration/test_20230711_3d380776c1bf.py | 75 ------ tests/migration/test_20230719_b3749bac3e55.py | 64 ----- tests/migration/test_20230905_2b672c6fb2b9.py | 167 ------------- tests/migration/test_20231124_1c14468b74ce.py | 95 +++++++ 24 files changed, 503 insertions(+), 836 deletions(-) create mode 100644 alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py create mode 100644 alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py delete mode 100644 tests/migration/test_20230510_a9ed3f76d649.py delete mode 100644 tests/migration/test_20230512_5a425ebe026c.py delete mode 100644 tests/migration/test_20230531_0af587ff8595.py delete mode 100644 tests/migration/test_20230711_3d380776c1bf.py delete mode 100644 tests/migration/test_20230719_b3749bac3e55.py delete mode 100644 tests/migration/test_20230905_2b672c6fb2b9.py create mode 100644 tests/migration/test_20231124_1c14468b74ce.py diff --git a/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py b/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py new file mode 100644 index 0000000000..e853aceb2b --- /dev/null +++ b/alembic/versions/20231124_1c14468b74ce_biblioboard_licensepools_time_tracking_.py @@ -0,0 +1,57 @@ +"""Biblioboard licensepools time tracking flag + +Revision ID: 1c14468b74ce +Revises: 6af9160a578e +Create Date: 2023-11-24 08:11:35.541207+00:00 + +""" +from alembic import op +from core.migration.util import migration_logger + +# revision identifiers, used by Alembic. +revision = "1c14468b74ce" +down_revision = "6af9160a578e" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + connection = op.get_bind() + log = migration_logger(revision) + + collections = connection.execute( + "select collections.id from integration_configurations \ + JOIN collections on collections.integration_configuration_id = integration_configurations.id \ + where integration_configurations.protocol = 'OPDS for Distributors'" + ).all() + + log.warning(f"Will update licensepools for collections: {collections}") + + collection_ids = [cid.id for cid in collections] + + if len(collection_ids) == 0: + log.info("No collections found to update!") + return + + pool_ids = connection.execute( + "select licensepools.id from licensepools \ + JOIN collections on collections.id = licensepools.collection_id \ + JOIN editions on editions.primary_identifier_id = licensepools.identifier_id \ + WHERE editions.medium = 'Audio' and licensepools.collection_id in %(collection_ids)s", + collection_ids=tuple(collection_ids), + ).all() + + pool_ids_list = [p.id for p in pool_ids] + # update licensepools + if len(pool_ids_list) == 0: + log.info("No licensepools to update!") + return + + connection.execute( + "UPDATE licensepools SET should_track_playtime=true WHERE id in %(ids)s returning id", + ids=tuple(pool_ids_list), + ).all() + + +def downgrade() -> None: + pass diff --git a/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py new file mode 100644 index 0000000000..9f7bd5a0b6 --- /dev/null +++ b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py @@ -0,0 +1,30 @@ +"""Licensepools time tracking flag + +Revision ID: 6af9160a578e +Revises: 1e46a5bc33b5 +Create Date: 2023-11-24 08:08:12.636590+00:00 + +""" +import sqlalchemy as sa + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "6af9160a578e" +down_revision = "1e46a5bc33b5" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.add_column( + "licensepools", + sa.Column("should_track_playtime", sa.Boolean(), nullable=True, default=False), + ) + session = op.get_bind() + session.execute("UPDATE licensepools SET should_track_playtime=false") + op.alter_column("licensepools", "should_track_playtime", nullable=False) + + +def downgrade() -> None: + op.drop_column("licensepools", "should_track_playtime") diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 7542b04abc..8df2bd7f4c 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -14,8 +14,9 @@ LibraryAuthorizationFailedException, ) from api.selftest import HasCollectionSelfTests +from core.coverage import CoverageFailure from core.integration.settings import BaseSettings, ConfigurationFormItem, FormField -from core.metadata_layer import FormatData, TimestampData +from core.metadata_layer import FormatData, Metadata, TimestampData from core.model import ( Collection, Credential, @@ -28,6 +29,7 @@ Session, get_one, ) +from core.model.constants import EditionConstants from core.opds_import import OPDSImporter, OPDSImporterSettings, OPDSImportMonitor from core.util import base64 from core.util.datetime_helpers import utc_now @@ -422,6 +424,21 @@ def _add_format_data(cls, circulation: CirculationData) -> None: ) ) + def extract_feed_data( + self, feed: str | bytes, feed_url: str | None = None + ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + metadatas, failures = super().extract_feed_data(feed, feed_url) + + # Force all audiobook licensepools to track playtime + for _, metadata in metadatas.items(): + if ( + metadata.medium == EditionConstants.AUDIO_MEDIUM + and metadata.circulation is not None + ): + metadata.circulation.should_track_playtime = True + + return metadatas, failures + class OPDSForDistributorsImportMonitor(OPDSImportMonitor): """Monitor an OPDS feed that requires or allows authentication, diff --git a/bin/opds2_import_monitor b/bin/opds2_import_monitor index 3223ba6cd0..57fe72cb51 100755 --- a/bin/opds2_import_monitor +++ b/bin/opds2_import_monitor @@ -7,17 +7,21 @@ bin_dir = os.path.split(__file__)[0] package_dir = os.path.join(bin_dir, "..") sys.path.append(os.path.abspath(package_dir)) -from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from core.model import ExternalIntegration -from core.opds2_import import OPDS2Importer, OPDS2ImportMonitor, RWPMManifestParser +from core.opds2_import import ( + OPDS2Importer, + OPDS2ImportMonitor, + PalaceOPDS2FeedParserFactory, + RWPMManifestParser, +) from core.scripts import OPDSImportScript import_script = OPDSImportScript( importer_class=OPDS2Importer, monitor_class=OPDS2ImportMonitor, protocol=ExternalIntegration.OPDS2_IMPORT, - parser=RWPMManifestParser(OPDS2FeedParserFactory()), + parser=RWPMManifestParser(PalaceOPDS2FeedParserFactory()), ) import_script.run() diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py index c395c59618..084aa8238f 100644 --- a/core/feed/annotator/loan_and_hold.py +++ b/core/feed/annotator/loan_and_hold.py @@ -4,7 +4,6 @@ from core.feed.annotator.circulation import LibraryAnnotator from core.feed.types import FeedData, Link, WorkEntry -from core.model.configuration import ExternalIntegration from core.model.constants import EditionConstants, LinkRelations from core.model.patron import Hold, Patron @@ -104,8 +103,7 @@ def annotate_work_entry( if ( edition.medium == EditionConstants.AUDIO_MEDIUM and active_license_pool - and active_license_pool.collection.protocol - == ExternalIntegration.OPDS_FOR_DISTRIBUTORS + and active_license_pool.should_track_playtime is True and work in self.active_loans_by_work ): entry.computed.other_links.append( diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 57cabc141c..51182a35f0 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -697,6 +697,7 @@ def __init__( links=None, licenses=None, last_checked=None, + should_track_playtime=False, ): """Constructor. @@ -747,6 +748,9 @@ def __init__( # instead of directly using the values that are given to CirculationData. self.licenses = licenses + # Whether the license should contain a playtime tracking link + self.should_track_playtime = should_track_playtime + @property def links(self): return self.__links @@ -877,6 +881,7 @@ def license_pool(self, _db, collection): license_pool.open_access = self.has_open_access_link license_pool.availability_time = self.last_checked license_pool.last_checked = self.last_checked + license_pool.should_track_playtime = self.should_track_playtime return license_pool, is_new @@ -969,6 +974,7 @@ def apply( # with the book reflect the formats in self.formats. old_lpdms = new_lpdms = [] if pool: + pool.should_track_playtime = self.should_track_playtime old_lpdms = list(pool.delivery_mechanisms) # Before setting and unsetting delivery mechanisms, which may diff --git a/core/model/licensing.py b/core/model/licensing.py index 1e463dda32..6c8b93c456 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -270,6 +270,7 @@ class LicensePool(Base): licenses_available: int = Column(Integer, default=0, index=True) licenses_reserved: int = Column(Integer, default=0) patrons_in_hold_queue = Column(Integer, default=0) + should_track_playtime = Column(Boolean, default=False, nullable=False) # This lets us cache the work of figuring out the best open access # link for this LicensePool. diff --git a/core/opds2_import.py b/core/opds2_import.py index 3206118939..516b4b59e8 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -22,8 +22,20 @@ from uritemplate import URITemplate from webpub_manifest_parser.core import ManifestParserFactory, ManifestParserResult from webpub_manifest_parser.core.analyzer import NodeFinder -from webpub_manifest_parser.core.ast import Link, Manifestlike +from webpub_manifest_parser.core.ast import ( + ArrayOfCollectionsProperty, + Link, + Manifestlike, +) +from webpub_manifest_parser.core.properties import BooleanProperty from webpub_manifest_parser.errors import BaseError +from webpub_manifest_parser.opds2 import ( + ManifestParser, + OPDS2CollectionRolesRegistry, + OPDS2FeedParserFactory, + OPDS2SemanticAnalyzer, + OPDS2SyntaxAnalyzer, +) from webpub_manifest_parser.opds2.registry import ( OPDS2LinkRelationsRegistry, OPDS2MediaTypesRegistry, @@ -135,6 +147,53 @@ def parse_manifest( return result +class PalaceOPDS2PresentationMetadata(opds2_ast.PresentationMetadata): # type: ignore[misc] + time_tracking = BooleanProperty( + "http://palaceproject.io/terms/timeTracking", False, default_value=False + ) + + +class PalaceOPDS2Publication(opds2_ast.OPDS2Publication): # type: ignore[misc] + metadata = opds2_ast.TypeProperty( + key="metadata", required=True, nested_type=PalaceOPDS2PresentationMetadata + ) + + +class PalaceOPDS2Feed(opds2_ast.OPDS2Feed): # type: ignore[misc] + publications = ArrayOfCollectionsProperty( + "publications", + required=False, + role=OPDS2CollectionRolesRegistry.PUBLICATIONS, + collection_type=PalaceOPDS2Publication, + ) + + +class PalaceOPDS2SyntaxAnalyzer(OPDS2SyntaxAnalyzer): # type: ignore[misc] + def _create_manifest(self) -> opds2_ast.OPDS2Feed: + return PalaceOPDS2Feed() + + +class PalaceOPDS2FeedParserFactory(OPDS2FeedParserFactory): # type: ignore[misc] + def create(self) -> ManifestParser: + """Create a new OPDS 2.0 parser. + + :return: OPDS 2.0 parser + :rtype: Parser + """ + media_types_registry = OPDS2MediaTypesRegistry() + link_relations_registry = OPDS2LinkRelationsRegistry() + collection_roles_registry = OPDS2CollectionRolesRegistry() + syntax_analyzer = ( + PalaceOPDS2SyntaxAnalyzer() + ) # This is the only change from the base class + semantic_analyzer = OPDS2SemanticAnalyzer( + media_types_registry, link_relations_registry, collection_roles_registry + ) + parser = ManifestParser(syntax_analyzer, semantic_analyzer) + + return parser + + class OPDS2ImporterSettings(OPDSImporterSettings): custom_accept_header: str = FormField( default="{}, {};q=0.9, */*;q=0.1".format( @@ -764,6 +823,13 @@ def _extract_publication_metadata( ) # Audiobook duration duration = publication.metadata.duration + # Not all parsers support time_tracking + time_tracking = getattr(publication.metadata, "time_tracking", False) + if medium != Edition.AUDIO_MEDIUM and time_tracking is True: + time_tracking = False + self.log.warning( + f"Ignoring the time tracking flag for entry {publication.metadata.identifier}" + ) feed_self_url = first_or_default( feed.links.get_by_rel(OPDS2LinkRelationsRegistry.SELF.key) @@ -797,6 +863,7 @@ def _extract_publication_metadata( licenses_reserved=0, patrons_in_hold_queue=0, formats=[], + should_track_playtime=time_tracking, ) formats = self._find_formats_in_non_open_access_acquisition_links( diff --git a/core/opds_import.py b/core/opds_import.py index 58cc283cdc..c84ad345f5 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -104,6 +104,7 @@ class OPDSXMLParser(XMLParser): "schema": "http://schema.org/", "atom": "http://www.w3.org/2005/Atom", "drm": "http://librarysimplified.org/terms/drm", + "palace": "http://palaceproject.io/terms", } @@ -818,6 +819,19 @@ def extract_feed_data( combined_circ["data_source"] = self.data_source_name combined_circ["primary_identifier"] = identifier_obj + + combined_circ["should_track_playtime"] = xml_data_dict.get( + "should_track_playtime", False + ) + if ( + combined_circ["should_track_playtime"] + and xml_data_dict["medium"] != Edition.AUDIO_MEDIUM + ): + combined_circ["should_track_playtime"] = False + self.log.warning( + f"Ignoring the time tracking flag for entry {identifier_obj.identifier}" + ) + circulation = CirculationData(**combined_circ) self._add_format_data(circulation) @@ -1379,6 +1393,10 @@ def _detail_for_elementtree_entry( # This entry had an issued tag, but it was in a format we couldn't parse. pass + data["should_track_playtime"] = False + time_tracking_tag = parser._xpath(entry_tag, "palace:timeTracking") + if time_tracking_tag: + data["should_track_playtime"] = time_tracking_tag[0].text.lower() == "true" return data @classmethod diff --git a/tests/api/feed/test_loan_and_hold_annotator.py b/tests/api/feed/test_loan_and_hold_annotator.py index 79df7ed502..1af8c79ce3 100644 --- a/tests/api/feed/test_loan_and_hold_annotator.py +++ b/tests/api/feed/test_loan_and_hold_annotator.py @@ -211,6 +211,7 @@ def test_annotate_work_entry(self, db: DatabaseTransactionFixture): protocol=ExternalIntegration.OPDS_FOR_DISTRIBUTORS ) work = db.work(with_license_pool=True, collection=opds_for_distributors) + work.active_license_pool().should_track_playtime = True edition = work.presentation_edition edition.medium = EditionConstants.AUDIO_MEDIUM edition.primary_identifier = identifier diff --git a/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds b/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds index 5c32697957..ee945e1938 100644 --- a/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds +++ b/tests/api/files/opds_for_distributors/biblioboard_mini_feed.opds @@ -31,6 +31,26 @@ History remembers Guinevere’s sin, but it was Arthur who transgressed first.Fo 2016-01-01T00:00:00Z
+Camelot's Queen (Volume 2) Audiobook + + + + + +Nicole Evelina + +urn:uuid:04377e87-ab69-41c8-a2a4-812d55dc0953 + +History remembers Guinevere’s sin, but it was Arthur who transgressed first.Forced into a marriage she neither anticipated nor desired, Guinevere finds herself High Queen, ruling and fighting alongside Arthur as they try to subdue the Saxons, Irish and Picts who threaten Britain from every direction. Though her heart still longs for her lost love, Guinevere slowly grows to care for her husband as they join together to defeat their enemies. Meanwhile, within the walls of Camelot their closest allies plot against them. One schemes to make Guinevere his own, another seeks revenge for past transgressions, while a third fixes her eyes on the throne. When the unthinkable happens and Guinevere is feared dead, Arthur installs a new woman in her place, one who will poison his affections toward her, threatening Guinevere’s fragile sanity and eventually driving her into the arms of her champion. Amid this tension a new challenge arises for the king and queen of Camelot: finding the Holy Grail, a sacred relic that promises lasting unity. But peace, as they will soon learn, can be just as dangerous as war. As the court begins to turn on itself, it becomes clear that the quest that was to be Arthur’s lasting legacy may end in the burning fires of condemnation.This highly anticipated sequel to Daughter of Destiny proves there is much more to Guinevere’s story than her marriage and an affair. See the legend you think you know through her eyes and live the adventure of Camelot’s golden days yourself – but be prepared to suffer its downfall as well. + +Copyright held by content provider +Lawson Gartner Pubishing +audio/mpeg +04377e87-ab69-41c8-a2a4-812d55dc0953 +en +2016-01-01T00:00:00Z + + Southern Spirits (Volume 1) diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 78be51e0b9..3d5dd064b3 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -574,7 +574,9 @@ def test_import(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): # Both works were created, since we can use their acquisition links # to give copies to patrons. - [camelot, southern] = sorted(imported_works, key=lambda x: x.title) + [camelot, camelot_audio, southern] = sorted( + imported_works, key=lambda x: x.title + ) # Each work has a license pool. [camelot_pool] = camelot.license_pools @@ -598,6 +600,22 @@ def test_import(self, opds_dist_api_fixture: OPDSForDistributorsAPIFixture): assert LicensePool.UNLIMITED_ACCESS == pool.licenses_owned assert LicensePool.UNLIMITED_ACCESS == pool.licenses_available assert (pool.work.last_update_time - now).total_seconds() <= 2 + assert pool.should_track_playtime == False + + # Audiobooks always track playtime + camelot_audio_pool = camelot_audio.license_pools[0] + assert camelot_audio_pool.should_track_playtime == True + [camelot_audio_acquisition_link] = [ + l + for l in camelot_audio_pool.identifier.links + if l.rel == Hyperlink.GENERIC_OPDS_ACQUISITION + and l.resource.representation.media_type + == Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE + ] + assert ( + "https://library.biblioboard.com/ext/api/media/04377e87-ab69-41c8-a2a4-812d55dc0953/assets/content.json" + == camelot_audio_acquisition_link.resource.representation.url + ) [camelot_acquisition_link] = [ l diff --git a/tests/core/files/opds/content_server_mini.opds b/tests/core/files/opds/content_server_mini.opds index 10bc4ad2e5..bf224cec67 100644 --- a/tests/core/files/opds/content_server_mini.opds +++ b/tests/core/files/opds/content_server_mini.opds @@ -1,10 +1,10 @@ - + http://localhost:5000/ Open-Access Content 2015-01-02T16:56:40Z - + urn:librarysimplified.org/terms/id/Gutenberg%20ID/10441 The Green Mouse @@ -31,7 +31,8 @@ en Project Gutenberg - + + true @@ -53,6 +54,7 @@ en Project Gutenberg + true diff --git a/tests/core/files/opds2/feed.json b/tests/core/files/opds2/feed.json index 918afa4fd1..5cbc4d5a47 100644 --- a/tests/core/files/opds2/feed.json +++ b/tests/core/files/opds2/feed.json @@ -12,12 +12,13 @@ "publications": [ { "metadata": { - "@type": "http://schema.org/Book", + "@type": "http://schema.org/Audiobook", "title": "Moby-Dick", "author": "Herman Melville", "identifier": "urn:isbn:978-3-16-148410-0", "duration": 100.2, "language": "en", + "http://palaceproject.io/terms/timeTracking": true, "publisher": { "name": "Test Publisher" }, @@ -36,12 +37,12 @@ { "rel": "self", "href": "http://example.org/publication.json", - "type": "application/opds-publication+json" + "type": "application/audiobook+json" }, { "rel": "http://opds-spec.org/acquisition/open-access", "href": "http://example.org/moby-dick.epub", - "type": "application/epub+zip" + "type": "application/audiobook+json" } ], "images": [ @@ -68,6 +69,7 @@ "@type": "http://schema.org/Book", "title": "Adventures of Huckleberry Finn", "description": "Adventures of Huckleberry Finn is a novel by Mark Twain, first published in the United Kingdom in December 1884 and in the United States in February 1885.", + "http://palaceproject.io/terms/timeTracking": true, "author": [ { "name": "Mark Twain" diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index 54b03dd196..bac2dd61d6 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -5,7 +5,6 @@ import pytest from _pytest.logging import LogCaptureFixture from requests import Response -from webpub_manifest_parser.opds2 import OPDS2FeedParserFactory from api.circulation import CirculationAPI, FulfillmentInfo from api.circulation_exceptions import CannotFulfill @@ -26,7 +25,12 @@ ) from core.model.collection import Collection from core.model.constants import IdentifierType -from core.opds2_import import OPDS2API, OPDS2Importer, RWPMManifestParser +from core.opds2_import import ( + OPDS2API, + OPDS2Importer, + PalaceOPDS2FeedParserFactory, + RWPMManifestParser, +) from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.opds2_files import OPDS2FilesFixture @@ -107,7 +111,7 @@ def opds2_importer_fixture( ) data.collection.data_source = data.data_source data.importer = OPDS2Importer( - db.session, data.collection, RWPMManifestParser(OPDS2FeedParserFactory()) + db.session, data.collection, RWPMManifestParser(PalaceOPDS2FeedParserFactory()) ) return data @@ -170,7 +174,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( assert "Moby-Dick" == moby_dick_edition.title assert "eng" == moby_dick_edition.language assert "eng" == moby_dick_edition.language - assert EditionConstants.BOOK_MEDIUM == moby_dick_edition.medium + assert EditionConstants.AUDIO_MEDIUM == moby_dick_edition.medium assert "Herman Melville" == moby_dick_edition.author assert moby_dick_edition.duration == 100.2 @@ -263,6 +267,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( assert moby_dick_license_pool.open_access assert LicensePool.UNLIMITED_ACCESS == moby_dick_license_pool.licenses_owned assert LicensePool.UNLIMITED_ACCESS == moby_dick_license_pool.licenses_available + assert True == moby_dick_license_pool.should_track_playtime assert 1 == len(moby_dick_license_pool.delivery_mechanisms) [moby_dick_delivery_mechanism] = moby_dick_license_pool.delivery_mechanisms @@ -271,7 +276,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( == moby_dick_delivery_mechanism.delivery_mechanism.drm_scheme ) assert ( - MediaTypes.EPUB_MEDIA_TYPE + MediaTypes.AUDIOBOOK_MANIFEST_MEDIA_TYPE == moby_dick_delivery_mechanism.delivery_mechanism.content_type ) @@ -288,6 +293,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( LicensePool.UNLIMITED_ACCESS == huckleberry_finn_license_pool.licenses_available ) + assert False == huckleberry_finn_license_pool.should_track_playtime assert 2 == len(huckleberry_finn_license_pool.delivery_mechanisms) huckleberry_finn_delivery_mechanisms = ( diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 72ff75c013..11cc9f1f53 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -226,6 +226,9 @@ def test_extract_metadata(self, opds_importer_fixture: OPDSImporterFixture): assert data_source_name == c1._data_source assert data_source_name == c2._data_source + assert m1.circulation.should_track_playtime == True + assert m2.circulation.should_track_playtime == False + [[failure]] = list(failures.values()) assert isinstance(failure, CoverageFailure) assert ( @@ -797,7 +800,7 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): assert crow.license_pools[0].collection == db.default_collection() assert mouse.work is not None - assert mouse.medium == Edition.PERIODICAL_MEDIUM + assert mouse.medium == Edition.AUDIO_MEDIUM # Four links have been added to the identifier of the 'mouse' # edition. @@ -894,7 +897,10 @@ def test_import(self, opds_importer_fixture: OPDSImporterFixture): # Bonus: make sure that delivery mechanisms are set appropriately. [mech] = mouse_pool.delivery_mechanisms - assert Representation.EPUB_MEDIA_TYPE == mech.delivery_mechanism.content_type + assert ( + Representation.AUDIOBOOK_MANIFEST_MEDIA_TYPE + == mech.delivery_mechanism.content_type + ) assert DeliveryMechanism.NO_DRM == mech.delivery_mechanism.drm_scheme assert "http://www.gutenberg.org/ebooks/10441.epub.images" == mech.resource.url diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index 99bef013a2..ec48df49e0 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -4,7 +4,7 @@ import random import string from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Protocol, Union +from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Protocol, Union, cast import pytest import pytest_alembic @@ -150,9 +150,7 @@ class CreateCollection(Protocol): def __call__( self, connection: Connection, - name: Optional[str] = None, - external_integration_id: Optional[int] = None, - external_account_id: Optional[str] = None, + integration_configuration_id: Optional[int] = None, ) -> int: ... @@ -161,16 +159,11 @@ def __call__( def create_collection(random_name: RandomName) -> CreateCollection: def fixture( connection: Connection, - name: Optional[str] = None, - external_integration_id: Optional[int] = None, - external_account_id: Optional[str] = None, + integration_configuration_id: Optional[int] = None, ) -> int: - if name is None: - name = random_name() collection = connection.execute( - "INSERT INTO collections (name, external_account_id, external_integration_id) VALUES" - + "(%s, %s, %s) returning id", - (name, external_account_id, external_integration_id), + "INSERT INTO collections (integration_configuration_id) VALUES (%s) returning id", + integration_configuration_id, ).fetchone() assert collection is not None assert isinstance(collection.id, int) @@ -258,3 +251,131 @@ def fixture( return setting.id return fixture + + +class CreateIntegrationConfiguration(Protocol): + def __call__( + self, + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: Optional[Dict[str, Any]] = None, + ) -> int: + ... + + +@pytest.fixture +def create_integration_configuration() -> CreateIntegrationConfiguration: + def fixture( + connection: Connection, + name: str, + protocol: str, + goal: str, + settings: Optional[Dict[str, Any]] = None, + ) -> int: + if settings is None: + settings = {} + + settings_str = json_serializer(settings) + + integration_configuration = connection.execute( + "INSERT INTO integration_configurations (name, protocol, goal, settings, self_test_results, context) " + "VALUES (%s, %s, %s, %s, '{}', '{}') returning id", + name, + protocol, + goal, + settings_str, + ).fetchone() + assert integration_configuration is not None + assert isinstance(integration_configuration.id, int) + return integration_configuration.id + + return fixture + + +class CreateEdition(Protocol): + def __call__( + self, + connection: Connection, + title: str, + medium: str, + primary_identifier_id: int, + ) -> int: + ... + + +@pytest.fixture +def create_edition() -> CreateEdition: + def fixture( + connection: Connection, title: str, medium: str, primary_identifier_id: int + ) -> int: + edition = connection.execute( + "INSERT INTO editions (title, medium, primary_identifier_id) VALUES (%s, %s, %s) returning id", + title, + medium, + primary_identifier_id, + ).fetchone() + assert edition is not None + return cast(int, edition.id) + + return fixture + + +class CreateIdentifier(Protocol): + def __call__( + self, + connection: Connection, + identifier: str, + type: str, + ) -> int: + ... + + +@pytest.fixture +def create_identifier() -> CreateIdentifier: + def fixture( + connection: Connection, + identifier: str, + type: str, + ) -> int: + identifier_row = connection.execute( + "INSERT INTO identifiers (identifier, type) VALUES (%s, %s) returning id", + identifier, + type, + ).fetchone() + assert identifier_row is not None + return cast(int, identifier_row.id) + + return fixture + + +class CreateLicensePool(Protocol): + def __call__( + self, + connection: Connection, + collection_id: int, + identifier_id: Optional[int] = None, + should_track_playtime: Optional[bool] = False, + ) -> int: + ... + + +@pytest.fixture +def create_license_pool() -> CreateLicensePool: + def fixture( + connection: Connection, + collection_id: int, + identifier_id: Optional[int] = None, + should_track_playtime: Optional[bool] = False, + ) -> int: + licensepool = connection.execute( + "INSERT into licensepools (collection_id, identifier_id, should_track_playtime) VALUES (%(id)s, %(identifier_id)s, %(track)s) returning id", + id=collection_id, + identifier_id=identifier_id, + track=should_track_playtime, + ).fetchone() + assert licensepool is not None + return cast(int, licensepool.id) + + return fixture diff --git a/tests/migration/test_20230510_a9ed3f76d649.py b/tests/migration/test_20230510_a9ed3f76d649.py deleted file mode 100644 index e7bbb0cb7e..0000000000 --- a/tests/migration/test_20230510_a9ed3f76d649.py +++ /dev/null @@ -1,231 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -from sqlalchemy import inspect - -if TYPE_CHECKING: - from pytest_alembic import MigrationContext - from sqlalchemy.engine import Engine - - from tests.migration.conftest import ( - CreateConfigSetting, - CreateExternalIntegration, - CreateLibrary, - ) - - -def assert_tables_exist(alembic_engine: Engine) -> None: - # We should have the tables for this migration - insp = inspect(alembic_engine) - assert "integration_configurations" in insp.get_table_names() - assert "integration_library_configurations" in insp.get_table_names() - assert "integration_errors" in insp.get_table_names() - - # We should have the enum defined in this migration - with alembic_engine.connect() as connection: - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'goals'") - assert result.rowcount == 1 - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'status'") - assert result.rowcount == 1 - - -def assert_tables_dont_exist(alembic_engine: Engine) -> None: - # We should not have the tables for this migration - insp = inspect(alembic_engine) - assert "integration_configurations" not in insp.get_table_names() - assert "integration_library_configurations" not in insp.get_table_names() - assert "integration_errors" not in insp.get_table_names() - - # We should not have the enum defined in this migration - with alembic_engine.connect() as connection: - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'goals'") - assert result.rowcount == 0 - result = connection.execute("SELECT * FROM pg_type WHERE typname = 'status'") - assert result.rowcount == 0 - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, -) -> None: - # Migrate to just before our migration - alembic_runner.migrate_down_to("a9ed3f76d649") - assert_tables_exist(alembic_engine) - - # Migrate down past our migration, running the downgrade migration - alembic_runner.migrate_down_one() - assert_tables_dont_exist(alembic_engine) - - # Insert configuration settings for testing - with alembic_engine.connect() as connection: - # Set up two libraries - library = create_library(connection) - library2 = create_library(connection) - - # Set up four integrations - sip_integration = create_external_integration( - connection, "api.sip", "patron_auth", "Integration 1" - ) - millenium_integration = create_external_integration( - connection, "api.millenium_patron", "patron_auth", "Integration 2" - ) - simple_integration = create_external_integration( - connection, "api.simple_authentication", "patron_auth", "Integration 3" - ) - unrelated_integration = create_external_integration( - connection, "unrelated", "other_goal", "Integration 4" - ) - - # Add configuration settings for the sip integration - create_config_setting(connection, "setting1", "value1", sip_integration) - create_config_setting(connection, "url", "sip url", sip_integration) - create_config_setting( - connection, "institution_id", "institution", sip_integration - ) - create_config_setting( - connection, - "self_test_results", - json.dumps({"test": "test"}), - sip_integration, - ) - create_config_setting( - connection, "patron status block", "false", sip_integration - ) - create_config_setting( - connection, "identifier_barcode_format", "", sip_integration - ) - create_config_setting( - connection, "institution_id", "bar", sip_integration, library - ) - - # Add configuration settings for the millenium integration - create_config_setting(connection, "setting2", "value2", millenium_integration) - create_config_setting( - connection, "url", "https://url.com", millenium_integration - ) - create_config_setting( - connection, "verify_certificate", "false", millenium_integration - ) - create_config_setting( - connection, "use_post_requests", "true", millenium_integration - ) - create_config_setting( - connection, - "identifier_blacklist", - json.dumps(["a", "b", "c"]), - millenium_integration, - ) - create_config_setting( - connection, - "library_identifier_field", - "foo", - millenium_integration, - library, - ) - - # Add configuration settings for the simple integration - create_config_setting(connection, "test_identifier", "123", simple_integration) - create_config_setting(connection, "test_password", "456", simple_integration) - - # Associate the millenium integration with the library - connection.execute( - "INSERT INTO externalintegrations_libraries (library_id, externalintegration_id) VALUES (%s, %s)", - (library, millenium_integration), - ) - - # Associate the simple integration with library 2 - connection.execute( - "INSERT INTO externalintegrations_libraries (library_id, externalintegration_id) VALUES (%s, %s)", - (library2, simple_integration), - ) - - # Migrate back up, running our upgrade migration - alembic_runner.migrate_up_one() - assert_tables_exist(alembic_engine) - - # Check that the configuration settings were migrated correctly - with alembic_engine.connect() as connection: - # Check that we have the correct number of integrations - integrations = connection.execute( - "SELECT * FROM integration_configurations", - ) - assert integrations.rowcount == 3 - - # Check that the sip integration was migrated correctly - # The unknown setting 'setting1' was dropped, self test results were migrated, and the patron status block - # setting was renamed, based on the field alias. - sip_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results FROM integration_configurations WHERE name = %s", - ("Integration 1",), - ).fetchone() - assert sip_result is not None - assert sip_result[0] == "api.sip" - assert sip_result[1] == "PATRON_AUTH_GOAL" - assert sip_result[2] == { - "patron_status_block": False, - "url": "sip url", - } - assert sip_result[3] == {"test": "test"} - - # Check that the millenium integration was migrated correctly - # The unknown setting 'setting2' was dropped, the list and bool values were serialized correctly, and - # the empty self test results were migrated as an empty dict. - millenium_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results, id FROM integration_configurations WHERE name = %s", - ("Integration 2",), - ).fetchone() - assert millenium_result is not None - assert millenium_result[0] == "api.millenium_patron" - assert millenium_result[1] == "PATRON_AUTH_GOAL" - assert millenium_result[2] == { - "url": "https://url.com", - "verify_certificate": False, - "use_post_requests": True, - "identifier_blacklist": ["a", "b", "c"], - } - assert millenium_result[3] == {} - - # Check that the simple integration was migrated correctly - simple_result = connection.execute( - "SELECT protocol, goal, settings, self_test_results, id FROM integration_configurations WHERE name = %s", - ("Integration 3",), - ).fetchone() - assert simple_result is not None - assert simple_result[0] == "api.simple_authentication" - assert simple_result[1] == "PATRON_AUTH_GOAL" - assert simple_result[2] == { - "test_identifier": "123", - "test_password": "456", - } - assert simple_result[3] == {} - - # Check that we have the correct number of library integrations - # The SIP integration has library settings, but no association with a library, so no - # library integration was created for it. And the simple auth integration has a library - # association, but no library settings, so we do create a integration with no settings for it. - integrations = connection.execute( - "SELECT parent_id, library_id, settings FROM integration_library_configurations ORDER BY library_id asc", - ) - assert integrations.rowcount == 2 - - # Check that the millenium integration was migrated correctly - [ - millenium_library_integration, - simple_library_integration, - ] = integrations.fetchall() - assert millenium_library_integration is not None - assert millenium_library_integration[0] == millenium_result[4] - assert millenium_library_integration[1] == library - assert millenium_library_integration[2] == { - "library_identifier_field": "foo", - } - - assert simple_library_integration is not None - assert simple_library_integration[0] == simple_result[4] - assert simple_library_integration[1] == library2 - assert simple_library_integration[2] == {} diff --git a/tests/migration/test_20230512_5a425ebe026c.py b/tests/migration/test_20230512_5a425ebe026c.py deleted file mode 100644 index a664656887..0000000000 --- a/tests/migration/test_20230512_5a425ebe026c.py +++ /dev/null @@ -1,123 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING, Callable, Optional - -import pytest - -if TYPE_CHECKING: - from pytest_alembic import MigrationContext - from sqlalchemy.engine import Connection, Engine - - from tests.migration.conftest import CreateConfigSetting, CreateExternalIntegration - - -@pytest.fixture -def create_test_settings( - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, -) -> Callable[..., int]: - def fixture( - connection: Connection, - url: str, - post: Optional[str] = None, - set_post: bool = True, - ) -> int: - integration = create_external_integration( - connection, protocol="api.millenium_patron" - ) - create_config_setting( - connection, integration_id=integration, key="url", value=url - ) - if set_post: - create_config_setting( - connection, - integration_id=integration, - key="use_post_requests", - value=post, - ) - - return integration - - return fixture - - -def assert_setting(connection: Connection, integration_id: int, value: str) -> None: - result = connection.execute( - "SELECT cs.value FROM configurationsettings cs join externalintegrations ei ON cs.external_integration_id = ei.id WHERE ei.id=%(id)s and cs.key='use_post_requests'", - id=integration_id, - ) - row = result.fetchone() - assert row is not None - assert row.value == value - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_test_settings: Callable[..., int], -) -> None: - alembic_runner.migrate_down_to("5a425ebe026c") - - # Test down migration - with alembic_engine.connect() as connection: - integration = create_test_settings( - connection, "https://vlc.thepalaceproject.org" - ) - - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "false") - - # Test up migration - with alembic_engine.connect() as connection: - integration_dev = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api", "false" - ) - integration_staging = create_test_settings( - connection, "https://vlc.staging.palaceproject.io/PATRONAPI", "false" - ) - integration_local1 = create_test_settings( - connection, "localhost:6500/PATRONAPI", "false" - ) - integration_local2 = create_test_settings( - connection, "http://localhost:6500/api", "false" - ) - integration_prod = create_test_settings( - connection, "https://vlc.thepalaceproject.org/anything...", "false" - ) - integration_other = create_test_settings( - connection, "https://vendor.millenium.com/PATRONAPI", "false" - ) - integration_null = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api" - ) - integration_missing = create_test_settings( - connection, "http://vlc.dev.palaceproject.io/api", set_post=False - ) - - alembic_runner.migrate_up_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "true") - assert_setting(connection, integration_dev, "true") - assert_setting(connection, integration_staging, "true") - assert_setting(connection, integration_local1, "true") - assert_setting(connection, integration_local2, "true") - assert_setting(connection, integration_prod, "true") - assert_setting(connection, integration_other, "false") - assert_setting(connection, integration_null, "true") - assert_setting(connection, integration_missing, "true") - - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - assert_setting(connection, integration, "false") - assert_setting(connection, integration_dev, "false") - assert_setting(connection, integration_staging, "false") - assert_setting(connection, integration_local1, "false") - assert_setting(connection, integration_local2, "false") - assert_setting(connection, integration_prod, "false") - assert_setting(connection, integration_other, "false") - assert_setting(connection, integration_null, "false") - assert_setting(connection, integration_missing, "false") diff --git a/tests/migration/test_20230531_0af587ff8595.py b/tests/migration/test_20230531_0af587ff8595.py deleted file mode 100644 index 26451e2bca..0000000000 --- a/tests/migration/test_20230531_0af587ff8595.py +++ /dev/null @@ -1,142 +0,0 @@ -from dataclasses import dataclass -from typing import Any, Dict - -from pytest_alembic import MigrationContext -from sqlalchemy.engine import Connection, Engine - -from tests.migration.conftest import ( - CreateCollection, - CreateConfigSetting, - CreateExternalIntegration, - CreateLibrary, -) - - -@dataclass -class IntegrationConfiguration: - name: str - goal: str - id: int - settings: Dict[str, Any] - library_settings: Dict[int, Dict[str, Any]] - - -def query_integration_configurations( - connection: Connection, goal: str, name: str -) -> IntegrationConfiguration: - result = connection.execute( - "select id, name, protocol, goal, settings from integration_configurations where goal=%s and name=%s", - (goal, name), - ).fetchone() - assert result is not None - - library_results = connection.execute( - "select library_id, settings from integration_library_configurations where parent_id=%s", - result.id, - ).fetchall() - - library_settings = {lr.library_id: lr.settings for lr in library_results} - return IntegrationConfiguration( - result.name, result.goal, result.id, result.settings, library_settings - ) - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, - create_collection: CreateCollection, -) -> None: - """Test the migration of configurationsettings to integration_configurations for the licenses type goals""" - # alembic_runner.set_revision("a9ed3f76d649") - alembic_runner.migrate_down_to("a9ed3f76d649") - with alembic_engine.connect() as connection: - library_id = create_library(connection) - integration_id = create_external_integration( - connection, "Axis 360", "licenses", "Test B&T" - ) - create_config_setting(connection, "username", "username", integration_id) - create_config_setting(connection, "password", "password", integration_id) - create_config_setting(connection, "url", "http://url", integration_id) - create_config_setting( - connection, - "default_loan_duration", - "77", - integration_id, - library_id, - associate_library=True, - ) - create_collection(connection, "Test B&T", integration_id, "ExternalAccountID") - - # Fake value, never used - create_config_setting( - connection, "external_account_id", "external_account_id", integration_id - ) - - alembic_runner.migrate_up_to("0af587ff8595") - - with alembic_engine.connect() as connection: - configuration = query_integration_configurations( - connection, "LICENSE_GOAL", "Test B&T" - ) - - assert configuration.settings == { - "username": "username", - "password": "password", - "url": "http://url", - "external_account_id": "ExternalAccountID", - } - assert configuration.library_settings == { - library_id: {"default_loan_duration": 77} - } - - -def test_key_rename( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_external_integration: CreateExternalIntegration, - create_config_setting: CreateConfigSetting, - create_collection: CreateCollection, -) -> None: - alembic_runner.migrate_down_to("a9ed3f76d649") - with alembic_engine.connect() as connection: - integration_id = create_external_integration( - connection, "Overdrive", "licenses", "Test Overdrive" - ) - create_config_setting( - connection, "overdrive_website_id", "website", integration_id - ) - create_config_setting( - connection, "overdrive_client_key", "overdrive_client_key", integration_id - ) - create_config_setting( - connection, - "overdrive_client_secret", - "overdrive_client_secret", - integration_id, - ) - create_collection( - connection, "Test Overdrive", integration_id, "ExternalAccountID" - ) - - # Fake value, never used - create_config_setting( - connection, "external_account_id", "external_account_id", integration_id - ) - - alembic_runner.migrate_up_to("0af587ff8595") - - with alembic_engine.connect() as connection: - configuration = query_integration_configurations( - connection, "LICENSE_GOAL", "Test Overdrive" - ) - - assert configuration.settings == { - "overdrive_website_id": "website", - "overdrive_client_key": "overdrive_client_key", - "overdrive_client_secret": "overdrive_client_secret", - "external_account_id": "ExternalAccountID", - } diff --git a/tests/migration/test_20230711_3d380776c1bf.py b/tests/migration/test_20230711_3d380776c1bf.py deleted file mode 100644 index f90b3c9695..0000000000 --- a/tests/migration/test_20230711_3d380776c1bf.py +++ /dev/null @@ -1,75 +0,0 @@ -from __future__ import annotations - -import json -from typing import TYPE_CHECKING - -from pytest_alembic import MigrationContext -from sqlalchemy import inspect -from sqlalchemy.engine import Engine - -if TYPE_CHECKING: - from tests.migration.conftest import CreateConfigSetting, CreateLibrary - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_config_setting: CreateConfigSetting, - create_library: CreateLibrary, -) -> None: - alembic_runner.migrate_down_to("3d380776c1bf") - - # Test down migration - assert inspect(alembic_engine).has_table("announcements") - alembic_runner.migrate_down_one() - assert not inspect(alembic_engine).has_table("announcements") - - a1 = { - "content": "This is a test library announcement", - "id": "13ab12b8-2e86-449d-b58d-7f3a944d4093", - "start": "1990-07-01", - "finish": "1990-07-31", - } - a2 = { - "content": "This is another test library announcement", - "id": "23e0ff93-42f6-4333-8d74-4b162237bd5c", - "start": "2022-02-20", - "finish": "2022-02-21", - } - a3 = { - "content": "This is a test global announcement", - "id": "171208b0-d9bc-433f-a957-444fd32e2993", - "start": "2025-01-01", - "finish": "2025-01-02", - } - - # Test up migration - with alembic_engine.connect() as connection: - library = create_library(connection) - - # Create some library announcements - create_config_setting( - connection, "announcements", json.dumps([a1, a2]), library_id=library - ) - - # Create some global announcements - create_config_setting(connection, "global_announcements", json.dumps([a3])) - - # Run the migration - alembic_runner.migrate_up_one() - - # Make sure settings are migrated into table correctly - with alembic_engine.connect() as connection: - announcements = connection.execute( - "SELECT * FROM announcements order by start" - ).all() - assert len(announcements) == 3 - for actual, expected in zip(announcements, [a1, a2, a3]): - assert str(actual.id) == expected["id"] - assert actual.content == expected["content"] - assert str(actual.start) == expected["start"] - assert str(actual.finish) == expected["finish"] - - assert announcements[0].library_id == library - assert announcements[1].library_id == library - assert announcements[2].library_id is None diff --git a/tests/migration/test_20230719_b3749bac3e55.py b/tests/migration/test_20230719_b3749bac3e55.py deleted file mode 100644 index f25f860070..0000000000 --- a/tests/migration/test_20230719_b3749bac3e55.py +++ /dev/null @@ -1,64 +0,0 @@ -import json - -from pytest_alembic import MigrationContext -from sqlalchemy import inspect -from sqlalchemy.engine import Engine - -from tests.migration.conftest import CreateConfigSetting, CreateLibrary - - -def column_exists(engine: Engine, table_name: str, column_name: str) -> bool: - inspector = inspect(engine) - columns = [column["name"] for column in inspector.get_columns(table_name)] - return column_name in columns - - -def test_migration( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_config_setting: CreateConfigSetting, - create_library: CreateLibrary, -) -> None: - alembic_runner.migrate_down_to("b3749bac3e55") - - # Make sure settings column exists - assert column_exists(alembic_engine, "libraries", "settings_dict") - - # Test down migration, make sure settings column is dropped - alembic_runner.migrate_down_one() - assert not column_exists(alembic_engine, "libraries", "settings_dict") - - # Create a library with some configuration settings - with alembic_engine.connect() as connection: - library = create_library(connection) - create_config_setting( - connection, "website", "https://foo.bar", library_id=library - ) - create_config_setting( - connection, "help_web", "https://foo.bar/helpme", library_id=library - ) - create_config_setting( - connection, "logo", "https://foo.bar/logo.png", library_id=library - ) - create_config_setting(connection, "key-pair", "foo", library_id=library) - create_config_setting(connection, "foo", "foo", library_id=library) - create_config_setting( - connection, - "enabled_entry_points", - json.dumps(["xyz", "abc"]), - library_id=library, - ) - - # Run the up migration, and make sure settings column is added - alembic_runner.migrate_up_one() - assert column_exists(alembic_engine, "libraries", "settings_dict") - - # Make sure settings are migrated into table correctly - with alembic_engine.connect() as connection: - result = connection.execute("select settings_dict from libraries").fetchone() - assert result is not None - settings_dict = result.settings_dict - assert len(settings_dict) == 3 - assert settings_dict["website"] == "https://foo.bar" - assert settings_dict["help_web"] == "https://foo.bar/helpme" - assert settings_dict["enabled_entry_points"] == ["xyz", "abc"] diff --git a/tests/migration/test_20230905_2b672c6fb2b9.py b/tests/migration/test_20230905_2b672c6fb2b9.py deleted file mode 100644 index fa3e94a605..0000000000 --- a/tests/migration/test_20230905_2b672c6fb2b9.py +++ /dev/null @@ -1,167 +0,0 @@ -import json -from typing import Any, Dict - -import pytest -from pytest_alembic import MigrationContext -from sqlalchemy.engine import Connection, Engine - -from tests.migration.conftest import CreateLibrary - - -class CreateConfiguration: - def __call__( - self, - connection: Connection, - goal: str, - protocol: str, - name: str, - settings: Dict[str, Any], - ) -> int: - integration_configuration = connection.execute( - "INSERT INTO integration_configurations (goal, protocol, name, settings, self_test_results) VALUES (%s, %s, %s, %s, '{}') returning id", - goal, - protocol, - name, - json.dumps(settings), - ).fetchone() - assert integration_configuration is not None - assert isinstance(integration_configuration.id, int) - return integration_configuration.id - - -@pytest.fixture -def create_integration_configuration() -> CreateConfiguration: - return CreateConfiguration() - - -def fetch_config(connection: Connection, _id: int) -> Dict[str, Any]: - integration_config = connection.execute( - "SELECT settings FROM integration_configurations where id=%s", _id - ).fetchone() - assert integration_config is not None - assert isinstance(integration_config.settings, dict) - return integration_config.settings - - -def fetch_library_config( - connection: Connection, parent_id: int, library_id: int -) -> Dict[str, Any]: - integration_lib_config = connection.execute( - "SELECT parent_id, settings FROM integration_library_configurations where parent_id=%s and library_id=%s", - parent_id, - library_id, - ).fetchone() - assert integration_lib_config is not None - assert isinstance(integration_lib_config.settings, dict) - return integration_lib_config.settings - - -MIGRATION_UID = "2b672c6fb2b9" - - -def test_settings_coersion( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_library: CreateLibrary, - create_integration_configuration: CreateConfiguration, -) -> None: - alembic_runner.migrate_down_to(MIGRATION_UID) - alembic_runner.migrate_down_one() - - with alembic_engine.connect() as connection: - config_id = create_integration_configuration( - connection, - "LICENSE_GOAL", - "Axis 360", - "axis-test-1", - dict( - verify_certificate="true", - loan_limit="20", - default_reservation_period="12", - key="value", - ), - ) - - # Test 2 library configs, to the same parent - library_id = create_library(connection) - library_id2 = create_library(connection) - - library_settings = dict( - hold_limit="30", - max_retry_count="2", - ebook_loan_duration="10", - default_loan_duration="11", - unchanged="value", - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id, - config_id, - json.dumps(library_settings), - ) - library_settings = dict( - hold_limit="31", - max_retry_count="3", - ebook_loan_duration="", - default_loan_duration="12", - unchanged="value1", - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id2, - config_id, - json.dumps(library_settings), - ) - - other_config_settings = dict( - verify_certificate="true", - loan_limit="20", - default_reservation_period="12", - key="value", - ) - other_config_id = create_integration_configuration( - connection, "PATRON_AUTH_GOAL", "Other", "other-test", other_config_settings - ) - connection.execute( - "INSERT INTO integration_library_configurations (library_id, parent_id, settings) VALUES (%s, %s, %s)", - library_id2, - other_config_id, - json.dumps(other_config_settings), - ) - - alembic_runner.migrate_up_one() - - axis_config = fetch_config(connection, config_id) - assert axis_config["verify_certificate"] == True - assert axis_config["loan_limit"] == 20 - assert axis_config["default_reservation_period"] == 12 - # Unknown settings remain as-is - assert axis_config["key"] == "value" - - odl_config = fetch_library_config( - connection, parent_id=config_id, library_id=library_id - ) - assert odl_config["hold_limit"] == 30 - assert odl_config["max_retry_count"] == 2 - assert odl_config["ebook_loan_duration"] == 10 - assert odl_config["default_loan_duration"] == 11 - # Unknown settings remain as-is - assert odl_config["unchanged"] == "value" - - odl_config2 = fetch_library_config( - connection, parent_id=config_id, library_id=library_id2 - ) - assert odl_config2["hold_limit"] == 31 - assert odl_config2["max_retry_count"] == 3 - assert odl_config2["ebook_loan_duration"] is None - assert odl_config2["default_loan_duration"] == 12 - # Unknown settings remain as-is - assert odl_config2["unchanged"] == "value1" - - # Other integration is unchanged - other_config = fetch_config(connection, other_config_id) - assert other_config == other_config_settings - other_library_config = fetch_library_config( - connection, parent_id=other_config_id, library_id=library_id2 - ) - assert other_library_config == other_config_settings diff --git a/tests/migration/test_20231124_1c14468b74ce.py b/tests/migration/test_20231124_1c14468b74ce.py new file mode 100644 index 0000000000..d96137c66f --- /dev/null +++ b/tests/migration/test_20231124_1c14468b74ce.py @@ -0,0 +1,95 @@ +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from pytest_alembic import MigrationContext + from sqlalchemy.engine import Engine + + from tests.migration.conftest import ( + CreateCollection, + CreateEdition, + CreateIdentifier, + CreateIntegrationConfiguration, + CreateLicensePool, + ) + +MIGRATION_UID = "1c14468b74ce" + + +def test_migration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_collection: CreateCollection, + create_integration_configuration: CreateIntegrationConfiguration, + create_edition: CreateEdition, + create_identifier: CreateIdentifier, + create_license_pool: CreateLicensePool, +) -> None: + alembic_runner.migrate_up_to(MIGRATION_UID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + ic_id_incorrect_protocol = create_integration_configuration( + connection, + "configuration_badprotocol", + "OPDS 1.0", + "LICENSE_GOAL", + {}, + ) + collection_id_incorrect_protocol = create_collection( + connection, + integration_configuration_id=ic_id_incorrect_protocol, + ) + + ic_id1 = create_integration_configuration( + connection, "configuration1", "OPDS for Distributors", "LICENSE_GOAL", {} + ) + collection_id = create_collection( + connection, integration_configuration_id=ic_id1 + ) + + identifier_id1 = create_identifier(connection, "identifier-1", "type") + edition_id1 = create_edition(connection, "title", "Audio", identifier_id1) + lp1_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id1, + should_track_playtime=False, + ) + + # Should not update because of incorrect medium + identifier_id2 = create_identifier(connection, "identifier-2", "type") + edition_id2 = create_edition(connection, "title", "Book", identifier_id2) + lp2_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id2, + should_track_playtime=False, + ) + + # Should not update because of incorrect collection protocol + lp3_id = create_license_pool( + connection, + collection_id_incorrect_protocol, + identifier_id=identifier_id1, + should_track_playtime=False, + ) + + # Should update this one as well + identifier_id3 = create_identifier(connection, "identifier-3", "other-type") + edition_id3 = create_edition(connection, "title-1", "Audio", identifier_id3) + lp4_id = create_license_pool( + connection, + collection_id, + identifier_id=identifier_id3, + should_track_playtime=False, + ) + + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + should_track = connection.execute( + "select should_track_playtime from licensepools order by id" + ).all() + assert should_track == [(True,), (False,), (False,), (True,)] From 9752886d16aeab3d2f2402fc097942f6481d9a5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 16:52:47 +0000 Subject: [PATCH 195/262] Bump types-psycopg2 from 2.9.21.18 to 2.9.21.19 (#1542) --- poetry.lock | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index edbbf04906..08cb8a6b91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -79,6 +79,7 @@ description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ + {file = "bcrypt-4.1.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:196008d91201bbb1aa4e666fee5e610face25d532e433a560cabb33bfdff958b"}, {file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"}, {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"}, {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"}, @@ -4166,13 +4167,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.18" +version = "2.9.21.19" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.18.tar.gz", hash = "sha256:5082c61e8e400ac9eda06ec2e0f11a9885c575339bc1bf9b61754021fb259de7"}, - {file = "types_psycopg2-2.9.21.18-py3-none-any.whl", hash = "sha256:66b8a882a878003f963a3a004dff328d7d34bfe1802c8bc625d24db79206130b"}, + {file = "types-psycopg2-2.9.21.19.tar.gz", hash = "sha256:ec3aae522dde9c41141597bc41123b4c955fb4093b1fc7ec6ee607795a0a088f"}, + {file = "types_psycopg2-2.9.21.19-py3-none-any.whl", hash = "sha256:8a4871df20c29e516622be8d66b91814c3262ff94112ff9e2f72a043d8fdf03c"}, ] [[package]] From fdac72f30295547045474f821e3ff9626c3f83b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 15:35:46 +0000 Subject: [PATCH 196/262] Bump boto3 from 1.33.2 to 1.33.5 (#1543) --- poetry.lock | 41 ++++++++++++++++++++++------------------- 1 file changed, 22 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 08cb8a6b91..28196023df 100644 --- a/poetry.lock +++ b/poetry.lock @@ -133,37 +133,37 @@ files = [ [[package]] name = "boto3" -version = "1.33.2" +version = "1.33.5" description = "The AWS SDK for Python" optional = false python-versions = ">= 3.7" files = [ - {file = "boto3-1.33.2-py3-none-any.whl", hash = "sha256:fc7c0dd5fa74ae0d57e11747695bdba4ad164e62dee35db15b43762c392fbd92"}, - {file = "boto3-1.33.2.tar.gz", hash = "sha256:70626598dd6698d6da8f2854a1ae5010f175572e2a465b2aa86685c745c1013c"}, + {file = "boto3-1.33.5-py3-none-any.whl", hash = "sha256:fcc24f62a1f512dd9b4a7a8af6f5fbfb3d69842a92aa2e79c2ca551ac49a4757"}, + {file = "boto3-1.33.5.tar.gz", hash = "sha256:6a1d938bbf11518b1d17ca8186168f3ba2a0e8b2bf3c82cdd810ecb884627d2a"}, ] [package.dependencies] -botocore = ">=1.33.2,<1.34.0" +botocore = ">=1.33.5,<1.34.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.8.0,<0.9.0" +s3transfer = ">=0.8.2,<0.9.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.33.2" -description = "Type annotations for boto3 1.33.2 generated with mypy-boto3-builder 7.20.3" +version = "1.33.5" +description = "Type annotations for boto3 1.33.5 generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.33.2.tar.gz", hash = "sha256:2e05ca8000f1feefe2b2efa3bef82367f97979ca2f47c39d7e036a2a399407bb"}, - {file = "boto3_stubs-1.33.2-py3-none-any.whl", hash = "sha256:53c42cb2336694edbdf1a666636442d6b2dab7e9ac7c4707e35e0074f6638b40"}, + {file = "boto3-stubs-1.33.5.tar.gz", hash = "sha256:40d7a52e60d477822655938083be43a9097a405f1d748ce86f5233685e0cddcc"}, + {file = "boto3_stubs-1.33.5-py3-none-any.whl", hash = "sha256:4f19917a817f5530c5a05924ff009929218664c75140f47fd57e3ba6d477ab48"}, ] [package.dependencies] -boto3 = {version = "1.33.2", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.33.2", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.33.5", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.33.5", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" mypy-boto3-cloudformation = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} mypy-boto3-dynamodb = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} @@ -182,7 +182,7 @@ account = ["mypy-boto3-account (>=1.33.0,<1.34.0)"] acm = ["mypy-boto3-acm (>=1.33.0,<1.34.0)"] acm-pca = ["mypy-boto3-acm-pca (>=1.33.0,<1.34.0)"] alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)", "mypy-boto3-account (>=1.33.0,<1.34.0)", "mypy-boto3-acm (>=1.33.0,<1.34.0)", "mypy-boto3-acm-pca (>=1.33.0,<1.34.0)", "mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-amp (>=1.33.0,<1.34.0)", "mypy-boto3-amplify (>=1.33.0,<1.34.0)", "mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)", "mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)", "mypy-boto3-apigateway (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)", "mypy-boto3-appconfig (>=1.33.0,<1.34.0)", "mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)", "mypy-boto3-appfabric (>=1.33.0,<1.34.0)", "mypy-boto3-appflow (>=1.33.0,<1.34.0)", "mypy-boto3-appintegrations (>=1.33.0,<1.34.0)", "mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-application-insights (>=1.33.0,<1.34.0)", "mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-appmesh (>=1.33.0,<1.34.0)", "mypy-boto3-apprunner (>=1.33.0,<1.34.0)", "mypy-boto3-appstream (>=1.33.0,<1.34.0)", "mypy-boto3-appsync (>=1.33.0,<1.34.0)", "mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)", "mypy-boto3-athena (>=1.33.0,<1.34.0)", "mypy-boto3-auditmanager (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)", "mypy-boto3-b2bi (>=1.33.0,<1.34.0)", "mypy-boto3-backup (>=1.33.0,<1.34.0)", "mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)", "mypy-boto3-backupstorage (>=1.33.0,<1.34.0)", "mypy-boto3-batch (>=1.33.0,<1.34.0)", "mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-billingconductor (>=1.33.0,<1.34.0)", "mypy-boto3-braket (>=1.33.0,<1.34.0)", "mypy-boto3-budgets (>=1.33.0,<1.34.0)", "mypy-boto3-ce (>=1.33.0,<1.34.0)", "mypy-boto3-chime (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)", "mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)", "mypy-boto3-cloud9 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)", "mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)", "mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)", "mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)", "mypy-boto3-codeartifact (>=1.33.0,<1.34.0)", "mypy-boto3-codebuild (>=1.33.0,<1.34.0)", "mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)", "mypy-boto3-codecommit (>=1.33.0,<1.34.0)", "mypy-boto3-codedeploy (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)", "mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-codepipeline (>=1.33.0,<1.34.0)", "mypy-boto3-codestar (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)", "mypy-boto3-comprehend (>=1.33.0,<1.34.0)", "mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)", "mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)", "mypy-boto3-config (>=1.33.0,<1.34.0)", "mypy-boto3-connect (>=1.33.0,<1.34.0)", "mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)", "mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)", "mypy-boto3-connectcases (>=1.33.0,<1.34.0)", "mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)", "mypy-boto3-controltower (>=1.33.0,<1.34.0)", "mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)", "mypy-boto3-cur (>=1.33.0,<1.34.0)", "mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)", "mypy-boto3-databrew (>=1.33.0,<1.34.0)", "mypy-boto3-dataexchange (>=1.33.0,<1.34.0)", "mypy-boto3-datapipeline (>=1.33.0,<1.34.0)", "mypy-boto3-datasync (>=1.33.0,<1.34.0)", "mypy-boto3-datazone (>=1.33.0,<1.34.0)", "mypy-boto3-dax (>=1.33.0,<1.34.0)", "mypy-boto3-detective (>=1.33.0,<1.34.0)", "mypy-boto3-devicefarm (>=1.33.0,<1.34.0)", "mypy-boto3-devops-guru (>=1.33.0,<1.34.0)", "mypy-boto3-directconnect (>=1.33.0,<1.34.0)", "mypy-boto3-discovery (>=1.33.0,<1.34.0)", "mypy-boto3-dlm (>=1.33.0,<1.34.0)", "mypy-boto3-dms (>=1.33.0,<1.34.0)", "mypy-boto3-docdb (>=1.33.0,<1.34.0)", "mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)", "mypy-boto3-drs (>=1.33.0,<1.34.0)", "mypy-boto3-ds (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)", "mypy-boto3-ebs (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)", "mypy-boto3-ecr (>=1.33.0,<1.34.0)", "mypy-boto3-ecr-public (>=1.33.0,<1.34.0)", "mypy-boto3-ecs (>=1.33.0,<1.34.0)", "mypy-boto3-efs (>=1.33.0,<1.34.0)", "mypy-boto3-eks (>=1.33.0,<1.34.0)", "mypy-boto3-eks-auth (>=1.33.0,<1.34.0)", "mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)", "mypy-boto3-elasticache (>=1.33.0,<1.34.0)", "mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)", "mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)", "mypy-boto3-elb (>=1.33.0,<1.34.0)", "mypy-boto3-elbv2 (>=1.33.0,<1.34.0)", "mypy-boto3-emr (>=1.33.0,<1.34.0)", "mypy-boto3-emr-containers (>=1.33.0,<1.34.0)", "mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-entityresolution (>=1.33.0,<1.34.0)", "mypy-boto3-es (>=1.33.0,<1.34.0)", "mypy-boto3-events (>=1.33.0,<1.34.0)", "mypy-boto3-evidently (>=1.33.0,<1.34.0)", "mypy-boto3-finspace (>=1.33.0,<1.34.0)", "mypy-boto3-finspace-data (>=1.33.0,<1.34.0)", "mypy-boto3-firehose (>=1.33.0,<1.34.0)", "mypy-boto3-fis (>=1.33.0,<1.34.0)", "mypy-boto3-fms (>=1.33.0,<1.34.0)", "mypy-boto3-forecast (>=1.33.0,<1.34.0)", "mypy-boto3-forecastquery (>=1.33.0,<1.34.0)", "mypy-boto3-frauddetector (>=1.33.0,<1.34.0)", "mypy-boto3-freetier (>=1.33.0,<1.34.0)", "mypy-boto3-fsx (>=1.33.0,<1.34.0)", "mypy-boto3-gamelift (>=1.33.0,<1.34.0)", "mypy-boto3-glacier (>=1.33.0,<1.34.0)", "mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)", "mypy-boto3-glue (>=1.33.0,<1.34.0)", "mypy-boto3-grafana (>=1.33.0,<1.34.0)", "mypy-boto3-greengrass (>=1.33.0,<1.34.0)", "mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)", "mypy-boto3-groundstation (>=1.33.0,<1.34.0)", "mypy-boto3-guardduty (>=1.33.0,<1.34.0)", "mypy-boto3-health (>=1.33.0,<1.34.0)", "mypy-boto3-healthlake (>=1.33.0,<1.34.0)", "mypy-boto3-honeycode (>=1.33.0,<1.34.0)", "mypy-boto3-iam (>=1.33.0,<1.34.0)", "mypy-boto3-identitystore (>=1.33.0,<1.34.0)", "mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)", "mypy-boto3-importexport (>=1.33.0,<1.34.0)", "mypy-boto3-inspector (>=1.33.0,<1.34.0)", "mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)", "mypy-boto3-inspector2 (>=1.33.0,<1.34.0)", "mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)", "mypy-boto3-iot (>=1.33.0,<1.34.0)", "mypy-boto3-iot-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)", "mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)", "mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)", "mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)", "mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)", "mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)", "mypy-boto3-iotwireless (>=1.33.0,<1.34.0)", "mypy-boto3-ivs (>=1.33.0,<1.34.0)", "mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)", "mypy-boto3-ivschat (>=1.33.0,<1.34.0)", "mypy-boto3-kafka (>=1.33.0,<1.34.0)", "mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-kendra (>=1.33.0,<1.34.0)", "mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)", "mypy-boto3-keyspaces (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)", "mypy-boto3-kms (>=1.33.0,<1.34.0)", "mypy-boto3-lakeformation (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)", "mypy-boto3-lex-models (>=1.33.0,<1.34.0)", "mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-lightsail (>=1.33.0,<1.34.0)", "mypy-boto3-location (>=1.33.0,<1.34.0)", "mypy-boto3-logs (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)", "mypy-boto3-m2 (>=1.33.0,<1.34.0)", "mypy-boto3-machinelearning (>=1.33.0,<1.34.0)", "mypy-boto3-macie2 (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)", "mypy-boto3-medialive (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)", "mypy-boto3-mediatailor (>=1.33.0,<1.34.0)", "mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)", "mypy-boto3-memorydb (>=1.33.0,<1.34.0)", "mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)", "mypy-boto3-mgh (>=1.33.0,<1.34.0)", "mypy-boto3-mgn (>=1.33.0,<1.34.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)", "mypy-boto3-mobile (>=1.33.0,<1.34.0)", "mypy-boto3-mq (>=1.33.0,<1.34.0)", "mypy-boto3-mturk (>=1.33.0,<1.34.0)", "mypy-boto3-mwaa (>=1.33.0,<1.34.0)", "mypy-boto3-neptune (>=1.33.0,<1.34.0)", "mypy-boto3-neptunedata (>=1.33.0,<1.34.0)", "mypy-boto3-network-firewall (>=1.33.0,<1.34.0)", "mypy-boto3-networkmanager (>=1.33.0,<1.34.0)", "mypy-boto3-nimble (>=1.33.0,<1.34.0)", "mypy-boto3-oam (>=1.33.0,<1.34.0)", "mypy-boto3-omics (>=1.33.0,<1.34.0)", "mypy-boto3-opensearch (>=1.33.0,<1.34.0)", "mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)", "mypy-boto3-opsworks (>=1.33.0,<1.34.0)", "mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)", "mypy-boto3-organizations (>=1.33.0,<1.34.0)", "mypy-boto3-osis (>=1.33.0,<1.34.0)", "mypy-boto3-outposts (>=1.33.0,<1.34.0)", "mypy-boto3-panorama (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)", "mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)", "mypy-boto3-personalize (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-events (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-pi (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)", "mypy-boto3-pipes (>=1.33.0,<1.34.0)", "mypy-boto3-polly (>=1.33.0,<1.34.0)", "mypy-boto3-pricing (>=1.33.0,<1.34.0)", "mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)", "mypy-boto3-proton (>=1.33.0,<1.34.0)", "mypy-boto3-qbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-qconnect (>=1.33.0,<1.34.0)", "mypy-boto3-qldb (>=1.33.0,<1.34.0)", "mypy-boto3-qldb-session (>=1.33.0,<1.34.0)", "mypy-boto3-quicksight (>=1.33.0,<1.34.0)", "mypy-boto3-ram (>=1.33.0,<1.34.0)", "mypy-boto3-rbin (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-rds-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-rekognition (>=1.33.0,<1.34.0)", "mypy-boto3-repostspace (>=1.33.0,<1.34.0)", "mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)", "mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)", "mypy-boto3-resource-groups (>=1.33.0,<1.34.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)", "mypy-boto3-robomaker (>=1.33.0,<1.34.0)", "mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)", "mypy-boto3-route53 (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)", "mypy-boto3-route53domains (>=1.33.0,<1.34.0)", "mypy-boto3-route53resolver (>=1.33.0,<1.34.0)", "mypy-boto3-rum (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-s3control (>=1.33.0,<1.34.0)", "mypy-boto3-s3outposts (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-savingsplans (>=1.33.0,<1.34.0)", "mypy-boto3-scheduler (>=1.33.0,<1.34.0)", "mypy-boto3-schemas (>=1.33.0,<1.34.0)", "mypy-boto3-sdb (>=1.33.0,<1.34.0)", "mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)", "mypy-boto3-securityhub (>=1.33.0,<1.34.0)", "mypy-boto3-securitylake (>=1.33.0,<1.34.0)", "mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)", "mypy-boto3-service-quotas (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)", "mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)", "mypy-boto3-ses (>=1.33.0,<1.34.0)", "mypy-boto3-sesv2 (>=1.33.0,<1.34.0)", "mypy-boto3-shield (>=1.33.0,<1.34.0)", "mypy-boto3-signer (>=1.33.0,<1.34.0)", "mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)", "mypy-boto3-sms (>=1.33.0,<1.34.0)", "mypy-boto3-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)", "mypy-boto3-snowball (>=1.33.0,<1.34.0)", "mypy-boto3-sns (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)", "mypy-boto3-ssm (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)", "mypy-boto3-sso (>=1.33.0,<1.34.0)", "mypy-boto3-sso-admin (>=1.33.0,<1.34.0)", "mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)", "mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)", "mypy-boto3-storagegateway (>=1.33.0,<1.34.0)", "mypy-boto3-sts (>=1.33.0,<1.34.0)", "mypy-boto3-support (>=1.33.0,<1.34.0)", "mypy-boto3-support-app (>=1.33.0,<1.34.0)", "mypy-boto3-swf (>=1.33.0,<1.34.0)", "mypy-boto3-synthetics (>=1.33.0,<1.34.0)", "mypy-boto3-textract (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-query (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-write (>=1.33.0,<1.34.0)", "mypy-boto3-tnb (>=1.33.0,<1.34.0)", "mypy-boto3-transcribe (>=1.33.0,<1.34.0)", "mypy-boto3-transfer (>=1.33.0,<1.34.0)", "mypy-boto3-translate (>=1.33.0,<1.34.0)", "mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)", "mypy-boto3-voice-id (>=1.33.0,<1.34.0)", "mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)", "mypy-boto3-waf (>=1.33.0,<1.34.0)", "mypy-boto3-waf-regional (>=1.33.0,<1.34.0)", "mypy-boto3-wafv2 (>=1.33.0,<1.34.0)", "mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)", "mypy-boto3-wisdom (>=1.33.0,<1.34.0)", "mypy-boto3-workdocs (>=1.33.0,<1.34.0)", "mypy-boto3-worklink (>=1.33.0,<1.34.0)", "mypy-boto3-workmail (>=1.33.0,<1.34.0)", "mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)", "mypy-boto3-xray (>=1.33.0,<1.34.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)", "mypy-boto3-account (>=1.33.0,<1.34.0)", "mypy-boto3-acm (>=1.33.0,<1.34.0)", "mypy-boto3-acm-pca (>=1.33.0,<1.34.0)", "mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-amp (>=1.33.0,<1.34.0)", "mypy-boto3-amplify (>=1.33.0,<1.34.0)", "mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)", "mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)", "mypy-boto3-apigateway (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)", "mypy-boto3-appconfig (>=1.33.0,<1.34.0)", "mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)", "mypy-boto3-appfabric (>=1.33.0,<1.34.0)", "mypy-boto3-appflow (>=1.33.0,<1.34.0)", "mypy-boto3-appintegrations (>=1.33.0,<1.34.0)", "mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-application-insights (>=1.33.0,<1.34.0)", "mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-appmesh (>=1.33.0,<1.34.0)", "mypy-boto3-apprunner (>=1.33.0,<1.34.0)", "mypy-boto3-appstream (>=1.33.0,<1.34.0)", "mypy-boto3-appsync (>=1.33.0,<1.34.0)", "mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)", "mypy-boto3-athena (>=1.33.0,<1.34.0)", "mypy-boto3-auditmanager (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)", "mypy-boto3-b2bi (>=1.33.0,<1.34.0)", "mypy-boto3-backup (>=1.33.0,<1.34.0)", "mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)", "mypy-boto3-backupstorage (>=1.33.0,<1.34.0)", "mypy-boto3-batch (>=1.33.0,<1.34.0)", "mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-billingconductor (>=1.33.0,<1.34.0)", "mypy-boto3-braket (>=1.33.0,<1.34.0)", "mypy-boto3-budgets (>=1.33.0,<1.34.0)", "mypy-boto3-ce (>=1.33.0,<1.34.0)", "mypy-boto3-chime (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)", "mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)", "mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)", "mypy-boto3-cloud9 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)", "mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)", "mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)", "mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)", "mypy-boto3-codeartifact (>=1.33.0,<1.34.0)", "mypy-boto3-codebuild (>=1.33.0,<1.34.0)", "mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)", "mypy-boto3-codecommit (>=1.33.0,<1.34.0)", "mypy-boto3-codedeploy (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)", "mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-codepipeline (>=1.33.0,<1.34.0)", "mypy-boto3-codestar (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)", "mypy-boto3-comprehend (>=1.33.0,<1.34.0)", "mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)", "mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)", "mypy-boto3-config (>=1.33.0,<1.34.0)", "mypy-boto3-connect (>=1.33.0,<1.34.0)", "mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)", "mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)", "mypy-boto3-connectcases (>=1.33.0,<1.34.0)", "mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)", "mypy-boto3-controltower (>=1.33.0,<1.34.0)", "mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)", "mypy-boto3-cur (>=1.33.0,<1.34.0)", "mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)", "mypy-boto3-databrew (>=1.33.0,<1.34.0)", "mypy-boto3-dataexchange (>=1.33.0,<1.34.0)", "mypy-boto3-datapipeline (>=1.33.0,<1.34.0)", "mypy-boto3-datasync (>=1.33.0,<1.34.0)", "mypy-boto3-datazone (>=1.33.0,<1.34.0)", "mypy-boto3-dax (>=1.33.0,<1.34.0)", "mypy-boto3-detective (>=1.33.0,<1.34.0)", "mypy-boto3-devicefarm (>=1.33.0,<1.34.0)", "mypy-boto3-devops-guru (>=1.33.0,<1.34.0)", "mypy-boto3-directconnect (>=1.33.0,<1.34.0)", "mypy-boto3-discovery (>=1.33.0,<1.34.0)", "mypy-boto3-dlm (>=1.33.0,<1.34.0)", "mypy-boto3-dms (>=1.33.0,<1.34.0)", "mypy-boto3-docdb (>=1.33.0,<1.34.0)", "mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)", "mypy-boto3-drs (>=1.33.0,<1.34.0)", "mypy-boto3-ds (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)", "mypy-boto3-ebs (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)", "mypy-boto3-ecr (>=1.33.0,<1.34.0)", "mypy-boto3-ecr-public (>=1.33.0,<1.34.0)", "mypy-boto3-ecs (>=1.33.0,<1.34.0)", "mypy-boto3-efs (>=1.33.0,<1.34.0)", "mypy-boto3-eks (>=1.33.0,<1.34.0)", "mypy-boto3-eks-auth (>=1.33.0,<1.34.0)", "mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)", "mypy-boto3-elasticache (>=1.33.0,<1.34.0)", "mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)", "mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)", "mypy-boto3-elb (>=1.33.0,<1.34.0)", "mypy-boto3-elbv2 (>=1.33.0,<1.34.0)", "mypy-boto3-emr (>=1.33.0,<1.34.0)", "mypy-boto3-emr-containers (>=1.33.0,<1.34.0)", "mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-entityresolution (>=1.33.0,<1.34.0)", "mypy-boto3-es (>=1.33.0,<1.34.0)", "mypy-boto3-events (>=1.33.0,<1.34.0)", "mypy-boto3-evidently (>=1.33.0,<1.34.0)", "mypy-boto3-finspace (>=1.33.0,<1.34.0)", "mypy-boto3-finspace-data (>=1.33.0,<1.34.0)", "mypy-boto3-firehose (>=1.33.0,<1.34.0)", "mypy-boto3-fis (>=1.33.0,<1.34.0)", "mypy-boto3-fms (>=1.33.0,<1.34.0)", "mypy-boto3-forecast (>=1.33.0,<1.34.0)", "mypy-boto3-forecastquery (>=1.33.0,<1.34.0)", "mypy-boto3-frauddetector (>=1.33.0,<1.34.0)", "mypy-boto3-freetier (>=1.33.0,<1.34.0)", "mypy-boto3-fsx (>=1.33.0,<1.34.0)", "mypy-boto3-gamelift (>=1.33.0,<1.34.0)", "mypy-boto3-glacier (>=1.33.0,<1.34.0)", "mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)", "mypy-boto3-glue (>=1.33.0,<1.34.0)", "mypy-boto3-grafana (>=1.33.0,<1.34.0)", "mypy-boto3-greengrass (>=1.33.0,<1.34.0)", "mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)", "mypy-boto3-groundstation (>=1.33.0,<1.34.0)", "mypy-boto3-guardduty (>=1.33.0,<1.34.0)", "mypy-boto3-health (>=1.33.0,<1.34.0)", "mypy-boto3-healthlake (>=1.33.0,<1.34.0)", "mypy-boto3-honeycode (>=1.33.0,<1.34.0)", "mypy-boto3-iam (>=1.33.0,<1.34.0)", "mypy-boto3-identitystore (>=1.33.0,<1.34.0)", "mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)", "mypy-boto3-importexport (>=1.33.0,<1.34.0)", "mypy-boto3-inspector (>=1.33.0,<1.34.0)", "mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)", "mypy-boto3-inspector2 (>=1.33.0,<1.34.0)", "mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)", "mypy-boto3-iot (>=1.33.0,<1.34.0)", "mypy-boto3-iot-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)", "mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)", "mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)", "mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)", "mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)", "mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)", "mypy-boto3-iotwireless (>=1.33.0,<1.34.0)", "mypy-boto3-ivs (>=1.33.0,<1.34.0)", "mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)", "mypy-boto3-ivschat (>=1.33.0,<1.34.0)", "mypy-boto3-kafka (>=1.33.0,<1.34.0)", "mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-kendra (>=1.33.0,<1.34.0)", "mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)", "mypy-boto3-keyspaces (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)", "mypy-boto3-kms (>=1.33.0,<1.34.0)", "mypy-boto3-lakeformation (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)", "mypy-boto3-lex-models (>=1.33.0,<1.34.0)", "mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-lightsail (>=1.33.0,<1.34.0)", "mypy-boto3-location (>=1.33.0,<1.34.0)", "mypy-boto3-logs (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)", "mypy-boto3-m2 (>=1.33.0,<1.34.0)", "mypy-boto3-machinelearning (>=1.33.0,<1.34.0)", "mypy-boto3-macie2 (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)", "mypy-boto3-medialive (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)", "mypy-boto3-mediatailor (>=1.33.0,<1.34.0)", "mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)", "mypy-boto3-memorydb (>=1.33.0,<1.34.0)", "mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)", "mypy-boto3-mgh (>=1.33.0,<1.34.0)", "mypy-boto3-mgn (>=1.33.0,<1.34.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)", "mypy-boto3-mobile (>=1.33.0,<1.34.0)", "mypy-boto3-mq (>=1.33.0,<1.34.0)", "mypy-boto3-mturk (>=1.33.0,<1.34.0)", "mypy-boto3-mwaa (>=1.33.0,<1.34.0)", "mypy-boto3-neptune (>=1.33.0,<1.34.0)", "mypy-boto3-neptunedata (>=1.33.0,<1.34.0)", "mypy-boto3-network-firewall (>=1.33.0,<1.34.0)", "mypy-boto3-networkmanager (>=1.33.0,<1.34.0)", "mypy-boto3-nimble (>=1.33.0,<1.34.0)", "mypy-boto3-oam (>=1.33.0,<1.34.0)", "mypy-boto3-omics (>=1.33.0,<1.34.0)", "mypy-boto3-opensearch (>=1.33.0,<1.34.0)", "mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)", "mypy-boto3-opsworks (>=1.33.0,<1.34.0)", "mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)", "mypy-boto3-organizations (>=1.33.0,<1.34.0)", "mypy-boto3-osis (>=1.33.0,<1.34.0)", "mypy-boto3-outposts (>=1.33.0,<1.34.0)", "mypy-boto3-panorama (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)", "mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)", "mypy-boto3-personalize (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-events (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-pi (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)", "mypy-boto3-pipes (>=1.33.0,<1.34.0)", "mypy-boto3-polly (>=1.33.0,<1.34.0)", "mypy-boto3-pricing (>=1.33.0,<1.34.0)", "mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)", "mypy-boto3-proton (>=1.33.0,<1.34.0)", "mypy-boto3-qbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-qconnect (>=1.33.0,<1.34.0)", "mypy-boto3-qldb (>=1.33.0,<1.34.0)", "mypy-boto3-qldb-session (>=1.33.0,<1.34.0)", "mypy-boto3-quicksight (>=1.33.0,<1.34.0)", "mypy-boto3-ram (>=1.33.0,<1.34.0)", "mypy-boto3-rbin (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-rds-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-rekognition (>=1.33.0,<1.34.0)", "mypy-boto3-repostspace (>=1.33.0,<1.34.0)", "mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)", "mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)", "mypy-boto3-resource-groups (>=1.33.0,<1.34.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)", "mypy-boto3-robomaker (>=1.33.0,<1.34.0)", "mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)", "mypy-boto3-route53 (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)", "mypy-boto3-route53domains (>=1.33.0,<1.34.0)", "mypy-boto3-route53resolver (>=1.33.0,<1.34.0)", "mypy-boto3-rum (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-s3control (>=1.33.0,<1.34.0)", "mypy-boto3-s3outposts (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-savingsplans (>=1.33.0,<1.34.0)", "mypy-boto3-scheduler (>=1.33.0,<1.34.0)", "mypy-boto3-schemas (>=1.33.0,<1.34.0)", "mypy-boto3-sdb (>=1.33.0,<1.34.0)", "mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)", "mypy-boto3-securityhub (>=1.33.0,<1.34.0)", "mypy-boto3-securitylake (>=1.33.0,<1.34.0)", "mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)", "mypy-boto3-service-quotas (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)", "mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)", "mypy-boto3-ses (>=1.33.0,<1.34.0)", "mypy-boto3-sesv2 (>=1.33.0,<1.34.0)", "mypy-boto3-shield (>=1.33.0,<1.34.0)", "mypy-boto3-signer (>=1.33.0,<1.34.0)", "mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)", "mypy-boto3-sms (>=1.33.0,<1.34.0)", "mypy-boto3-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)", "mypy-boto3-snowball (>=1.33.0,<1.34.0)", "mypy-boto3-sns (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)", "mypy-boto3-ssm (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)", "mypy-boto3-sso (>=1.33.0,<1.34.0)", "mypy-boto3-sso-admin (>=1.33.0,<1.34.0)", "mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)", "mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)", "mypy-boto3-storagegateway (>=1.33.0,<1.34.0)", "mypy-boto3-sts (>=1.33.0,<1.34.0)", "mypy-boto3-support (>=1.33.0,<1.34.0)", "mypy-boto3-support-app (>=1.33.0,<1.34.0)", "mypy-boto3-swf (>=1.33.0,<1.34.0)", "mypy-boto3-synthetics (>=1.33.0,<1.34.0)", "mypy-boto3-textract (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-query (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-write (>=1.33.0,<1.34.0)", "mypy-boto3-tnb (>=1.33.0,<1.34.0)", "mypy-boto3-transcribe (>=1.33.0,<1.34.0)", "mypy-boto3-transfer (>=1.33.0,<1.34.0)", "mypy-boto3-translate (>=1.33.0,<1.34.0)", "mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)", "mypy-boto3-voice-id (>=1.33.0,<1.34.0)", "mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)", "mypy-boto3-waf (>=1.33.0,<1.34.0)", "mypy-boto3-waf-regional (>=1.33.0,<1.34.0)", "mypy-boto3-wafv2 (>=1.33.0,<1.34.0)", "mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)", "mypy-boto3-wisdom (>=1.33.0,<1.34.0)", "mypy-boto3-workdocs (>=1.33.0,<1.34.0)", "mypy-boto3-worklink (>=1.33.0,<1.34.0)", "mypy-boto3-workmail (>=1.33.0,<1.34.0)", "mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)", "mypy-boto3-xray (>=1.33.0,<1.34.0)"] amp = ["mypy-boto3-amp (>=1.33.0,<1.34.0)"] amplify = ["mypy-boto3-amplify (>=1.33.0,<1.34.0)"] amplifybackend = ["mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)"] @@ -218,7 +218,7 @@ bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)"] bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)"] bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)"] billingconductor = ["mypy-boto3-billingconductor (>=1.33.0,<1.34.0)"] -boto3 = ["boto3 (==1.33.2)", "botocore (==1.33.2)"] +boto3 = ["boto3 (==1.33.5)", "botocore (==1.33.5)"] braket = ["mypy-boto3-braket (>=1.33.0,<1.34.0)"] budgets = ["mypy-boto3-budgets (>=1.33.0,<1.34.0)"] ce = ["mypy-boto3-ce (>=1.33.0,<1.34.0)"] @@ -229,6 +229,7 @@ chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)"] chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)"] chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)"] cleanrooms = ["mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)"] cloud9 = ["mypy-boto3-cloud9 (>=1.33.0,<1.34.0)"] cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)"] clouddirectory = ["mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)"] @@ -397,7 +398,9 @@ machinelearning = ["mypy-boto3-machinelearning (>=1.33.0,<1.34.0)"] macie2 = ["mypy-boto3-macie2 (>=1.33.0,<1.34.0)"] managedblockchain = ["mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)"] managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)"] marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)"] marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)"] marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)"] mediaconnect = ["mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)"] @@ -554,13 +557,13 @@ xray = ["mypy-boto3-xray (>=1.33.0,<1.34.0)"] [[package]] name = "botocore" -version = "1.33.2" +version = "1.33.5" description = "Low-level, data-driven core of boto 3." optional = false python-versions = ">= 3.7" files = [ - {file = "botocore-1.33.2-py3-none-any.whl", hash = "sha256:5c46b7e8450efbf7ddc2a0016eee7225a5564583122e25a20ca92a29a105225c"}, - {file = "botocore-1.33.2.tar.gz", hash = "sha256:16a30faac6e6f17961c009defb74ab1a3508b8abc58fab98e7cf96af0d91ea84"}, + {file = "botocore-1.33.5-py3-none-any.whl", hash = "sha256:c165207fb33e8352191d6a2770bce9f9bf01c62f5149824c4295d7f49bf96746"}, + {file = "botocore-1.33.5.tar.gz", hash = "sha256:aa4a5c7cf78a403280e50daba8966479e23577b4a5c20165f71fab7a9b405e99"}, ] [package.dependencies] @@ -3878,13 +3881,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.8.1" +version = "0.8.2" description = "An Amazon S3 Transfer Manager" optional = false python-versions = ">= 3.7" files = [ - {file = "s3transfer-0.8.1-py3-none-any.whl", hash = "sha256:d1c52af7bceca1650d0f27728b29bb4925184aead7b55bccacf893b79a108604"}, - {file = "s3transfer-0.8.1.tar.gz", hash = "sha256:e6cafd5643fc7b44fddfba1e5b521005675b0e07533ddad958a3554bc87d7330"}, + {file = "s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:c9e56cbe88b28d8e197cf841f1f0c130f246595e77ae5b5a05b69fe7cb83de76"}, + {file = "s3transfer-0.8.2.tar.gz", hash = "sha256:368ac6876a9e9ed91f6bc86581e319be08188dc60d50e0d56308ed5765446283"}, ] [package.dependencies] From 8adaeec878d7aac4ffe22b315166e72c39e12b7f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 18:29:27 +0000 Subject: [PATCH 197/262] Bump alembic from 1.12.1 to 1.13.0 (#1545) --- poetry.lock | 40 ++++++++++++++++++++++++++++++++++------ 1 file changed, 34 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index 28196023df..7e57f40b71 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,25 +2,25 @@ [[package]] name = "alembic" -version = "1.12.1" +version = "1.13.0" description = "A database migration tool for SQLAlchemy." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "alembic-1.12.1-py3-none-any.whl", hash = "sha256:47d52e3dfb03666ed945becb723d6482e52190917fdb47071440cfdba05d92cb"}, - {file = "alembic-1.12.1.tar.gz", hash = "sha256:bca5877e9678b454706347bc10b97cb7d67f300320fa5c3a94423e8266e2823f"}, + {file = "alembic-1.13.0-py3-none-any.whl", hash = "sha256:a23974ea301c3ee52705db809c7413cecd165290c6679b9998dd6c74342ca23a"}, + {file = "alembic-1.13.0.tar.gz", hash = "sha256:ab4b3b94d2e1e5f81e34be8a9b7b7575fc9dd5398fccb0bef351ec9b14872623"}, ] [package.dependencies] +"backports.zoneinfo" = {version = "*", optional = true, markers = "python_version < \"3.9\" and extra == \"tz\""} importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} importlib-resources = {version = "*", markers = "python_version < \"3.9\""} Mako = "*" -python-dateutil = {version = "*", optional = true, markers = "extra == \"tz\""} SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" [package.extras] -tz = ["python-dateutil"] +tz = ["backports.zoneinfo"] [[package]] name = "attrs" @@ -72,6 +72,34 @@ pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] +[[package]] +name = "backports-zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +optional = false +python-versions = ">=3.6" +files = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, +] + +[package.extras] +tzdata = ["tzdata"] + [[package]] name = "bcrypt" version = "4.1.1" From 487c97fea75392d92b1db20238c2e1ccdaf5efc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Dec 2023 18:29:51 +0000 Subject: [PATCH 198/262] Bump pyfakefs from 5.3.1 to 5.3.2 (#1544) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 7e57f40b71..ce7bbccd16 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3043,13 +3043,13 @@ email = ["email-validator (>=1.0.3)"] [[package]] name = "pyfakefs" -version = "5.3.1" +version = "5.3.2" description = "pyfakefs implements a fake file system that mocks the Python file system modules." optional = false python-versions = ">=3.7" files = [ - {file = "pyfakefs-5.3.1-py3-none-any.whl", hash = "sha256:dbe268b70da64f1506baf7d7a2a2248b96b56d28d61a68859272b5fdc321c39e"}, - {file = "pyfakefs-5.3.1.tar.gz", hash = "sha256:dd1fb374039fadccf35d3f3df7aa5d239482e0650dcd240e053d3b9e78740918"}, + {file = "pyfakefs-5.3.2-py3-none-any.whl", hash = "sha256:5a62194cfa24542a3c9080b66ce65d78b2e977957edfd3cd6fe98e8349bcca32"}, + {file = "pyfakefs-5.3.2.tar.gz", hash = "sha256:a83776a3c1046d4d103f2f530029aa6cdff5f0386dffd59c15ee16926135493c"}, ] [[package]] From 14aac2423fac99272b4715af94a4c3a701868471 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 1 Dec 2023 16:05:27 -0400 Subject: [PATCH 199/262] Add collection name and distributor facets to the crawlable feed. (#1546) --- api/controller.py | 8 ++- api/lanes.py | 10 +++- tests/api/test_controller_crawlfeed.py | 12 +++- tests/api/test_lanes.py | 79 +++++++++++++++++++++++--- 4 files changed, 94 insertions(+), 15 deletions(-) diff --git a/api/controller.py b/api/controller.py index 25d00fbdf2..bf3a2dbdaf 100644 --- a/api/controller.py +++ b/api/controller.py @@ -1047,11 +1047,13 @@ def _crawlable_feed( if isinstance(search_engine, ProblemDetail): return search_engine - annotator = annotator or self.manager.annotator(worklist) - # A crawlable feed has only one possible set of Facets, # so library settings are irrelevant. - facets = CrawlableFacets.default(None) + facets = self.manager.load_facets_from_request( + worklist=worklist, + base_class=CrawlableFacets, + ) + annotator = annotator or self.manager.annotator(worklist, facets=facets) return feed_class.page( _db=self._db, diff --git a/api/lanes.py b/api/lanes.py index 2827469049..c3a8b74680 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -1361,7 +1361,15 @@ class CrawlableFacets(Facets): @classmethod def available_facets(cls, config, facet_group_name): - return [cls.SETTINGS[facet_group_name]] + facets = [cls.SETTINGS[facet_group_name]] + + if ( + facet_group_name == Facets.DISTRIBUTOR_FACETS_GROUP_NAME + or facet_group_name == Facets.COLLECTION_NAME_FACETS_GROUP_NAME + ) and config is not None: + facets.extend(config.enabled_facets(facet_group_name)) + + return facets @classmethod def default_facet(cls, config, facet_group_name): diff --git a/tests/api/test_controller_crawlfeed.py b/tests/api/test_controller_crawlfeed.py index 03b7994b44..a868ac55ef 100644 --- a/tests/api/test_controller_crawlfeed.py +++ b/tests/api/test_controller_crawlfeed.py @@ -242,7 +242,7 @@ def works(self, _db, facets, pagination, *args, **kwargs): # Good pagination data -> feed_class.page() is called. sort_key = ["sort", "pagination", "key"] - with circulation_fixture.app.test_request_context( + with circulation_fixture.request_context_with_library( "/?size=23&key=%s" % json.dumps(sort_key) ): response = circulation_fixture.manager.opds_feeds._crawlable_feed( @@ -288,7 +288,7 @@ def works(self, _db, facets, pagination, *args, **kwargs): # If a custom Annotator is passed in to _crawlable_feed, it's # propagated to the page() call. mock_annotator = object() - with circulation_fixture.app.test_request_context("/"): + with circulation_fixture.request_context_with_library("/"): response = circulation_fixture.manager.opds_feeds._crawlable_feed( annotator=mock_annotator, **in_kwargs ) @@ -306,3 +306,11 @@ def works(self, _db, facets, pagination, *args, **kwargs): # There is one entry with the expected title. [entry] = feed["entries"] assert entry["title"] == work.title + + # The feed has the expected facet groups. + facet_groups = { + l["facetgroup"] + for l in feed["feed"]["links"] + if l["rel"] == "http://opds-spec.org/facet" + } + assert facet_groups == {"Collection Name", "Distributor"} diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index 87822ded17..9032b1d8ca 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -1,4 +1,5 @@ from collections import Counter +from typing import List from unittest.mock import MagicMock, patch import pytest @@ -31,7 +32,14 @@ from core.external_search import Filter from core.lane import DefaultSortOrderFacets, Facets, FeaturedFacets, Lane, WorkList from core.metadata_layer import ContributorData, Metadata -from core.model import Contributor, DataSource, Edition, ExternalIntegration, create +from core.model import ( + Contributor, + DataSource, + Edition, + ExternalIntegration, + Library, + create, +) from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture from tests.fixtures.search import ExternalSearchFixtureFake @@ -908,14 +916,67 @@ def test_overview_facets(self, lane_fixture: LaneFixture): class TestCrawlableFacets: def test_default(self, db: DatabaseTransactionFixture): facets = CrawlableFacets.default(db.default_library()) - assert CrawlableFacets.COLLECTION_FULL == facets.collection - assert CrawlableFacets.AVAILABLE_ALL == facets.availability - assert CrawlableFacets.ORDER_LAST_UPDATE == facets.order - assert False == facets.order_ascending - - # There's only one enabled value for each facet group. - for group in facets.enabled_facets: - assert 1 == len(group) + assert facets.collection == CrawlableFacets.COLLECTION_FULL + assert facets.availability == CrawlableFacets.AVAILABLE_ALL + assert facets.order == CrawlableFacets.ORDER_LAST_UPDATE + assert facets.order_ascending is False + + [ + order, + availability, + collection, + distributor, + collectionName, + ] = facets.enabled_facets + + # The default facets are the only ones enabled. + for facet in [order, availability, collection]: + assert len(facet) == 1 + + # Except for distributor and collectionName, which have the default + # and data for each collection in the library. + for facet in [distributor, collectionName]: + assert len(facet) == 1 + len(db.default_library().collections) + + @pytest.mark.parametrize( + "group_name, expected", + [ + (Facets.ORDER_FACET_GROUP_NAME, Facets.ORDER_LAST_UPDATE), + (Facets.AVAILABILITY_FACET_GROUP_NAME, Facets.AVAILABLE_ALL), + (Facets.COLLECTION_FACET_GROUP_NAME, Facets.COLLECTION_FULL), + (Facets.DISTRIBUTOR_FACETS_GROUP_NAME, Facets.DISTRIBUTOR_ALL), + (Facets.COLLECTION_NAME_FACETS_GROUP_NAME, Facets.COLLECTION_NAME_ALL), + ], + ) + def test_available_none(self, group_name: str, expected: List[str]) -> None: + assert CrawlableFacets.available_facets(None, group_name) == [expected] + + @pytest.mark.parametrize( + "group_name, expected", + [ + (Facets.ORDER_FACET_GROUP_NAME, [Facets.ORDER_LAST_UPDATE]), + (Facets.AVAILABILITY_FACET_GROUP_NAME, [Facets.AVAILABLE_ALL]), + (Facets.COLLECTION_FACET_GROUP_NAME, [Facets.COLLECTION_FULL]), + (Facets.DISTRIBUTOR_FACETS_GROUP_NAME, [Facets.DISTRIBUTOR_ALL, "foo"]), + ( + Facets.COLLECTION_NAME_FACETS_GROUP_NAME, + [Facets.COLLECTION_NAME_ALL, "foo"], + ), + ], + ) + def test_available(self, group_name: str, expected: List[str]): + mock = MagicMock(spec=Library) + mock.enabled_facets = MagicMock(return_value=["foo"]) + + assert CrawlableFacets.available_facets(mock, group_name) == expected + + if group_name in [ + Facets.DISTRIBUTOR_FACETS_GROUP_NAME, + Facets.COLLECTION_NAME_FACETS_GROUP_NAME, + ]: + assert mock.enabled_facets.call_count == 1 + else: + assert mock.enabled_facets.call_count == 0 class TestCrawlableCollectionBasedLane: From cfd6ebc770598e8dc3602888a1071acc6ca1f336 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 4 Dec 2023 11:11:20 -0400 Subject: [PATCH 200/262] Prune old migrations. (#1540) --- .../versions/20230213_6f96516c7a7b_initial.py | 46 ---- ...e32b5649_remove_admin_credential_column.py | 24 -- ...0230412_dac99ae0c6fd_integration_status.py | 61 ----- ...9f2ae7_rename_elasticsearch_integration.py | 26 -- ...7_5dcbc92c20b2_update_sirsi_auth_config.py | 55 ----- ...remove_import_coverage_records_without_.py | 24 -- ...3f76d649_add_integration_configurations.py | 222 ------------------ ...5ebe026c_migrate_millenium_apis_to_post.py | 131 ----------- .../20230525_0a1c9c3f5dd2_revert_pr_980.py | 109 --------- ...ff8595_migrate_license_integrations_to_.py | 138 ----------- ..._b883671b7bc5_add_the_license_type_goal.py | 75 ------ ..._opds_for_distributors_unlimited_access.py | 58 ----- ...f9c6bded6_remove_adobe_vendor_id_tables.py | 112 --------- ...6_04bbd03bf9f1_migrate_library_key_pair.py | 69 ------ ...30706_c471f553249b_migrate_library_logo.py | 66 ------ ...0711_3d380776c1bf_migrate_announcements.py | 87 ------- ...9_b3749bac3e55_migrate_library_settings.py | 57 ----- ..._2f1a51aa0ee8_remove_integration_client.py | 122 ---------- ...92c8e0c89f8_audiobook_playtime_tracking.py | 116 --------- ...f58829fc1a_add_discovery_service_tables.py | 187 --------------- ...1f_remove_self_hosted_from_licensepools.py | 31 --- ...c6fb2b9_type_coerce_collection_settings.py | 111 --------- ...a80073d5_remove_externalintegrationlink.py | 90 ------- ...8f391d_loan_and_hold_notification_times.py | 34 --- ...da_cleanup_google_anaytics_integrations.py | 7 +- tests/migration/conftest.py | 24 +- 26 files changed, 14 insertions(+), 2068 deletions(-) delete mode 100644 alembic/versions/20230213_6f96516c7a7b_initial.py delete mode 100644 alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py delete mode 100644 alembic/versions/20230412_dac99ae0c6fd_integration_status.py delete mode 100644 alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py delete mode 100644 alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py delete mode 100644 alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py delete mode 100644 alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py delete mode 100644 alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py delete mode 100644 alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py delete mode 100644 alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py delete mode 100644 alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py delete mode 100644 alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py delete mode 100644 alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py delete mode 100644 alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py delete mode 100644 alembic/versions/20230706_c471f553249b_migrate_library_logo.py delete mode 100644 alembic/versions/20230711_3d380776c1bf_migrate_announcements.py delete mode 100644 alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py delete mode 100644 alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py delete mode 100644 alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py delete mode 100644 alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py delete mode 100644 alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py delete mode 100644 alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py delete mode 100644 alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py delete mode 100644 alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py diff --git a/alembic/versions/20230213_6f96516c7a7b_initial.py b/alembic/versions/20230213_6f96516c7a7b_initial.py deleted file mode 100644 index fd9aaca19d..0000000000 --- a/alembic/versions/20230213_6f96516c7a7b_initial.py +++ /dev/null @@ -1,46 +0,0 @@ -"""initial - -Revision ID: 6f96516c7a7b -Revises: -Create Date: 2022-10-06 06:50:45.512958+00:00 - -""" - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "6f96516c7a7b" -down_revision = None -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # Remove some tables that are hanging around in some instances - # These have been removed from code some time ago - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_language" - ) - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_library_id" - ) - op.execute( - "ALTER TABLE IF EXISTS libraryalias DROP CONSTRAINT IF EXISTS ix_libraryalias_name" - ) - op.execute("DROP TABLE IF EXISTS libraryalias") - - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_license_pool_id" - ) - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_source" - ) - op.execute( - "ALTER TABLE IF EXISTS complaints DROP CONSTRAINT IF EXISTS ix_complaints_type" - ) - op.execute("DROP TABLE IF EXISTS complaints") - - -def downgrade() -> None: - # No need to re-add these tables, since they are long gone - ... diff --git a/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py b/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py deleted file mode 100644 index bbd5804de5..0000000000 --- a/alembic/versions/20230220_0c2fe32b5649_remove_admin_credential_column.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Remove admin.credential column - -Revision ID: 0c2fe32b5649 -Revises: 6f96516c7a7b -Create Date: 2023-02-20 12:36:15.204519+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "0c2fe32b5649" -down_revision = "6f96516c7a7b" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_column("admins", "credential") - - -def downgrade() -> None: - op.add_column("admins", sa.Column("credential", sa.Unicode(), nullable=True)) diff --git a/alembic/versions/20230412_dac99ae0c6fd_integration_status.py b/alembic/versions/20230412_dac99ae0c6fd_integration_status.py deleted file mode 100644 index 54f58f54f0..0000000000 --- a/alembic/versions/20230412_dac99ae0c6fd_integration_status.py +++ /dev/null @@ -1,61 +0,0 @@ -"""Integration Status - -Revision ID: dac99ae0c6fd -Revises: 0c2fe32b5649 -Create Date: 2023-04-12 06:58:21.560292+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "dac99ae0c6fd" -down_revision = "0c2fe32b5649" -branch_labels = None -depends_on = None - -status_enum = sa.Enum("green", "red", name="external_integration_status") - - -def upgrade() -> None: - # ### commands auto generated by Alembic ### - op.create_table( - "externalintegrationerrors", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("time", sa.DateTime(), nullable=True), - sa.Column("error", sa.Unicode(), nullable=True), - sa.Column("external_integration_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="fk_error_externalintegrations_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id"), - ) - - status_enum.create(op.get_bind()) - op.add_column( - "externalintegrations", - sa.Column( - "status", - status_enum, - server_default="green", - nullable=True, - ), - ) - op.add_column( - "externalintegrations", - sa.Column("last_status_update", sa.DateTime(), nullable=True), - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic ### - op.drop_table("externalintegrationerrors") - op.drop_column("externalintegrations", "last_status_update") - op.drop_column("externalintegrations", "status") - status_enum.drop(op.get_bind()) - # ### end Alembic commands ### diff --git a/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py b/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py deleted file mode 100644 index 2fbaa47c11..0000000000 --- a/alembic/versions/20230424_3ee5b99f2ae7_rename_elasticsearch_integration.py +++ /dev/null @@ -1,26 +0,0 @@ -"""Rename elasticsearch integration - -Revision ID: 3ee5b99f2ae7 -Revises: dac99ae0c6fd -Create Date: 2023-04-24 06:24:45.721475+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "3ee5b99f2ae7" -down_revision = "dac99ae0c6fd" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.execute( - "UPDATE externalintegrations SET protocol='Opensearch' where protocol='Elasticsearch'" - ) - - -def downgrade() -> None: - op.execute( - "UPDATE externalintegrations SET protocol='Elasticsearch' where protocol='Opensearch'" - ) diff --git a/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py b/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py deleted file mode 100644 index 8b767a808b..0000000000 --- a/alembic/versions/20230427_5dcbc92c20b2_update_sirsi_auth_config.py +++ /dev/null @@ -1,55 +0,0 @@ -"""update sirsi auth config - -Revision ID: 5dcbc92c20b2 -Revises: 3ee5b99f2ae7 -Create Date: 2023-04-27 22:53:36.584426+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5dcbc92c20b2" -down_revision = "3ee5b99f2ae7" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # Update the SirsiDynix auth config to use library_identifier_restriction - # instead of the old LIBRARY_PREFIX setting. - # This migration leaves the old LIBRARY_PREFIX setting in place, but unused - # in case we need to roll this back. We can clean up the old setting in a - # later migration. - connection = op.get_bind() - settings = connection.execute( - "select ei.id, cs.library_id, cs.value from externalintegrations as ei join " - "configurationsettings cs on ei.id = cs.external_integration_id " - "where ei.protocol = 'api.sirsidynix_authentication_provider' and " - "ei.goal = 'patron_auth' and cs.key = 'LIBRARY_PREFIX'" - ) - - for setting in settings: - connection.execute( - "UPDATE configurationsettings SET value = (%s) " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_restriction'", - (setting.value, setting.id, setting.library_id), - ) - connection.execute( - "UPDATE configurationsettings SET value = 'patronType' " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_field'", - (setting.id, setting.library_id), - ) - connection.execute( - "UPDATE configurationsettings SET value = 'prefix' " - "WHERE external_integration_id = (%s) and library_id = (%s) " - "and key = 'library_identifier_restriction_type'", - (setting.id, setting.library_id), - ) - - -def downgrade() -> None: - # These updated settings shouldn't cause any issues if left in place - # when downgrading so we leave them alone. - pass diff --git a/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py b/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py deleted file mode 100644 index 2e827d11b5..0000000000 --- a/alembic/versions/20230501_f9985f6b7767_remove_import_coverage_records_without_.py +++ /dev/null @@ -1,24 +0,0 @@ -"""Remove import coverage records without collections - -Revision ID: f9985f6b7767 -Revises: 5dcbc92c20b2 -Create Date: 2023-05-01 10:07:45.737475+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "f9985f6b7767" -down_revision = "5dcbc92c20b2" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.execute( - "DELETE FROM coveragerecords WHERE collection_id IS NULL AND operation='import'" - ) - - -def downgrade() -> None: - pass diff --git a/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py b/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py deleted file mode 100644 index e5545be1aa..0000000000 --- a/alembic/versions/20230510_a9ed3f76d649_add_integration_configurations.py +++ /dev/null @@ -1,222 +0,0 @@ -"""Add integration_configurations - -Revision ID: a9ed3f76d649 -Revises: 5a425ebe026c -Create Date: 2023-05-10 19:50:47.458800+00:00 - -""" -import json -from collections import defaultdict -from typing import Dict, Tuple, Type, TypeVar - -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.engine import Connection, Row - -from alembic import op -from api.authentication.base import AuthenticationProvider -from api.integration.registry.patron_auth import PatronAuthRegistry -from core.integration.settings import ( - BaseSettings, - ConfigurationFormItemType, - FormFieldInfo, -) -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "a9ed3f76d649" -down_revision = "5a425ebe026c" -branch_labels = None -depends_on = None - - -def _create_tables() -> None: - op.create_table( - "integration_configurations", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("protocol", sa.Unicode(), nullable=False), - sa.Column("goal", sa.Enum("PATRON_AUTH_GOAL", name="goals"), nullable=False), - sa.Column("name", sa.Unicode(), nullable=False), - sa.Column("settings", postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.Column( - "self_test_results", postgresql.JSONB(astext_type=sa.Text()), nullable=False - ), - sa.Column("status", sa.Enum("RED", "GREEN", name="status"), nullable=False), - sa.Column("last_status_update", sa.DateTime(), nullable=True), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint("name"), - ) - op.create_index( - op.f("ix_integration_configurations_goal"), - "integration_configurations", - ["goal"], - unique=False, - ) - op.create_table( - "integration_errors", - sa.Column("id", sa.Integer(), nullable=False), - sa.Column("time", sa.DateTime(), nullable=True), - sa.Column("error", sa.Unicode(), nullable=True), - sa.Column("integration_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], - ["integration_configurations.id"], - name="fk_integration_error_integration_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_table( - "integration_library_configurations", - sa.Column("parent_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("settings", postgresql.JSONB(astext_type=sa.Text()), nullable=False), - sa.ForeignKeyConstraint(["library_id"], ["libraries.id"], ondelete="CASCADE"), - sa.ForeignKeyConstraint( - ["parent_id"], ["integration_configurations.id"], ondelete="CASCADE" - ), - sa.PrimaryKeyConstraint("parent_id", "library_id"), - ) - - -T = TypeVar("T", bound=BaseSettings) - - -def _validate_and_load_settings( - settings_class: Type[T], settings_dict: Dict[str, str] -) -> T: - aliases = { - f.alias: f.name - for f in settings_class.__fields__.values() - if f.alias is not None - } - parsed_settings_dict = {} - for key, setting in settings_dict.items(): - if key in aliases: - key = aliases[key] - field = settings_class.__fields__.get(key) - if field is None or not isinstance(field.field_info, FormFieldInfo): - continue - config_item = field.field_info.form - if ( - config_item.type == ConfigurationFormItemType.LIST - or config_item.type == ConfigurationFormItemType.MENU - ): - parsed_settings_dict[key] = json.loads(setting) - else: - parsed_settings_dict[key] = setting - return settings_class(**parsed_settings_dict) - - -def _migrate_external_integration( - connection: Connection, - integration: Row, - protocol_class: Type[AuthenticationProvider], -) -> Tuple[int, Dict[str, Dict[str, str]]]: - settings = connection.execute( - "select cs.library_id, cs.key, cs.value from configurationsettings cs " - "where cs.external_integration_id = (%s)", - (integration.id,), - ) - settings_dict = {} - library_settings: Dict[str, Dict[str, str]] = defaultdict(dict) - self_test_results = json_serializer({}) - for setting in settings: - if not setting.value: - continue - if setting.key == "self_test_results": - self_test_results = setting.value - continue - if setting.library_id: - library_settings[setting.library_id][setting.key] = setting.value - else: - settings_dict[setting.key] = setting.value - - # Load and validate the settings before storing them in the database. - settings_class = protocol_class.settings_class() - settings_obj = _validate_and_load_settings(settings_class, settings_dict) - integration_configuration = connection.execute( - "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results, status) " - "values (%s, 'PATRON_AUTH_GOAL', %s, %s, %s, 'GREEN')" - "returning id", - ( - integration.protocol, - integration.name, - json_serializer(settings_obj.dict()), - self_test_results, - ), - ).fetchone() - assert integration_configuration is not None - return integration_configuration[0], library_settings - - -def _migrate_library_settings( - connection: Connection, - integration_id: int, - library_id: int, - library_settings: Dict[str, str], - protocol_class: Type[AuthenticationProvider], -) -> None: - library_settings_class = protocol_class.library_settings_class() - library_settings_obj = _validate_and_load_settings( - library_settings_class, library_settings - ) - connection.execute( - "insert into integration_library_configurations " - "(parent_id, library_id, settings) " - "values (%s, %s, %s)", - ( - integration_id, - library_id, - json_serializer(library_settings_obj.dict()), - ), - ) - - -def _migrate_settings() -> None: - connection = op.get_bind() - external_integrations = connection.execute( - "select ei.id, ei.protocol, ei.name from externalintegrations ei " - "where ei.goal = 'patron_auth'" - ) - - patron_auth_registry = PatronAuthRegistry() - for external_integration in external_integrations: - protocol_class = patron_auth_registry[external_integration.protocol] - integration_id, library_settings = _migrate_external_integration( - connection, external_integration, protocol_class - ) - external_integration_library = connection.execute( - "select library_id from externalintegrations_libraries where externalintegration_id = %s", - (external_integration.id,), - ) - for library in external_integration_library: - _migrate_library_settings( - connection, - integration_id, - library.library_id, - library_settings[library.library_id], - protocol_class, - ) - - -def upgrade() -> None: - # Add new tables for tracking integration configurations and errors. - _create_tables() - - # Migrate settings from the configurationsettings table into integration_configurations. - # We leave the existing settings in the table, but they will no longer be used. - _migrate_settings() - - -def downgrade() -> None: - op.drop_table("integration_library_configurations") - op.drop_table("integration_errors") - op.drop_index( - op.f("ix_integration_configurations_goal"), - table_name="integration_configurations", - ) - op.drop_table("integration_configurations") - sa.Enum(name="goals").drop(op.get_bind(), checkfirst=False) - sa.Enum(name="status").drop(op.get_bind(), checkfirst=False) diff --git a/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py b/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py deleted file mode 100644 index e85745be34..0000000000 --- a/alembic/versions/20230512_5a425ebe026c_migrate_millenium_apis_to_post.py +++ /dev/null @@ -1,131 +0,0 @@ -"""Migrate millenium APIs to POST - -Revision ID: 5a425ebe026c -Revises: f9985f6b7767 -Create Date: 2023-05-12 08:36:16.603825+00:00 - -""" -import logging -import re - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5a425ebe026c" -down_revision = "f9985f6b7767" -branch_labels = None -depends_on = None - - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - -KEY = "use_post_requests" - - -def match_expression(url: str) -> bool: - expressions = [ - r"^https?://vlc\.(.*?\.)?palaceproject\.io", - r"^https?://vlc\.thepalaceproject\.org", - r"^(http://)?localhost", - ] - for expr in expressions: - match = re.match(expr, url) - if match is not None: - return True - - return False - - -def upgrade() -> None: - """Set 'use_post_requests' to 'true' for 'api.millenium' integrations. - But only those that are for the following urls - - vlc.thepalaceproject.org - - vlc.*.palaceproject.io - - localhost - """ - conn = op.get_bind() - # Find the relevant external integrations - result_ids = conn.execute( - "SELECT id FROM externalintegrations where protocol='api.millenium_patron'" - ) - - # Query to pull specific config values - config_query = "SELECT value from configurationsettings where external_integration_id={integration_id} and key='{key}'" - - # For each millenium integration found - for [integration_id] in result_ids or []: - # Pull the URL setting - config_results = conn.execute( - config_query.format(integration_id=integration_id, key="url") - ) - url_results = list(config_results) - if config_results and len(url_results) > 0: - url = url_results[0][0] - else: - log.info(f"No URL found for integration: {integration_id}") - continue - - # Check if it is something we want to change at all - if not match_expression(url): - log.info(f"Not an internal millenium implementation: {url}") - continue - - # Pull the post requests setting - config_results = conn.execute( - config_query.format(integration_id=integration_id, key=f"{KEY}") - ) - post_results = list(config_results) - # This setting may or may not exist - if config_results and len(post_results) > 0: - use_post = post_results[0][0] - if use_post is None: - use_post = "false" - else: - use_post = None - - # Make the changes - if use_post is None: - log.info(f"'{KEY}' setting does not exist for {url}, creating...") - conn.execute( - "INSERT INTO configurationsettings(external_integration_id, library_id, key, value)" - + f" VALUES ({integration_id}, NULL, '{KEY}', 'true')" - ) - elif use_post == "false": - log.info(f"'{KEY}' is disabled for {url}, enabling...") - conn.execute( - "UPDATE configurationsettings SET value='true'" - + f"WHERE external_integration_id={integration_id} and key='{KEY}'" - ) - else: - log.info(f"'{KEY}' for {url} is already {use_post}, ignoring...") - - -def downgrade() -> None: - """Set all internal millenium integrations to not use POST""" - conn = op.get_bind() - result_ids = conn.execute( - "SELECT id FROM externalintegrations where protocol='api.millenium_patron'" - ) - for [integration_id] in result_ids: - log.info(f"Forcing '{KEY}' to 'false' for {integration_id}") - conn.execute( - "UPDATE configurationsettings SET value='false'" - + f" WHERE external_integration_id={integration_id} AND key='{KEY}'" - ) - - -if __name__ == "__main__": - # Some testing code - assert match_expression("http://vlc.dev.palaceproject.io/api") == True - assert match_expression("https://vlc.staging.palaceproject.io/PATRONAPI") == True - assert match_expression("localhost:6500/PATRONAPI") == True - assert match_expression("http://localhost:6500/api") == True - assert match_expression("https://vlc.thepalaceproject.org/anything...") == True - assert match_expression("https://vendor.millenium.com/PATRONAPI") == False - - import sys - - log.addHandler(logging.StreamHandler(sys.stdout)) - log.info("Match expression tests passed!!") diff --git a/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py b/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py deleted file mode 100644 index 6e7d95bd6c..0000000000 --- a/alembic/versions/20230525_0a1c9c3f5dd2_revert_pr_980.py +++ /dev/null @@ -1,109 +0,0 @@ -"""revert pr 980 - -Revision ID: 0a1c9c3f5dd2 -Revises: a9ed3f76d649 -Create Date: 2023-05-25 19:07:04.474551+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "0a1c9c3f5dd2" -down_revision = "a9ed3f76d649" -branch_labels = None -depends_on = None - - -ext_int_status_enum = sa.Enum("green", "red", name="external_integration_status") -int_status_enum = sa.Enum("GREEN", "RED", name="status") - - -def upgrade() -> None: - # Drop external integration errors tables - op.drop_table("externalintegrationerrors") - op.drop_column("externalintegrations", "last_status_update") - op.drop_column("externalintegrations", "status") - ext_int_status_enum.drop(op.get_bind()) - - # Drop integration errors tables - op.drop_table("integration_errors") - op.drop_column("integration_configurations", "status") - op.drop_column("integration_configurations", "last_status_update") - int_status_enum.drop(op.get_bind()) - - -def downgrade() -> None: - ext_int_status_enum.create(op.get_bind()) - op.add_column( - "externalintegrations", - sa.Column( - "status", - postgresql.ENUM("green", "red", name="external_integration_status"), - server_default=sa.text("'green'::external_integration_status"), - autoincrement=False, - nullable=True, - ), - ) - op.add_column( - "externalintegrations", - sa.Column( - "last_status_update", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - ) - op.create_table( - "externalintegrationerrors", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("error", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column( - "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="fk_error_externalintegrations_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id", name="externalintegrationerrors_pkey"), - ) - - int_status_enum.create(op.get_bind()) - op.add_column( - "integration_configurations", - sa.Column( - "last_status_update", - postgresql.TIMESTAMP(), - autoincrement=False, - nullable=True, - ), - ) - op.add_column( - "integration_configurations", - sa.Column( - "status", - postgresql.ENUM("RED", "GREEN", name="status"), - autoincrement=False, - nullable=False, - server_default=sa.text("'GREEN'::status"), - ), - ) - op.create_table( - "integration_errors", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("time", postgresql.TIMESTAMP(), autoincrement=False, nullable=True), - sa.Column("error", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("integration_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], - ["integration_configurations.id"], - name="fk_integration_error_integration_id", - ondelete="CASCADE", - ), - sa.PrimaryKeyConstraint("id", name="integration_errors_pkey"), - ) diff --git a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py b/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py deleted file mode 100644 index 7c9158ef45..0000000000 --- a/alembic/versions/20230531_0af587ff8595_migrate_license_integrations_to_.py +++ /dev/null @@ -1,138 +0,0 @@ -"""Migrate license integrations to configuration settings - -Revision ID: 0af587ff8595 -Revises: b883671b7bc5 -Create Date: 2023-05-31 12:34:42.550703+00:00 - -""" - -from typing import Type - -from alembic import op -from api.integration.registry.license_providers import LicenseProvidersRegistry -from core.integration.base import HasLibraryIntegrationConfiguration -from core.integration.settings import BaseSettings -from core.migration.migrate_external_integration import ( - _migrate_library_settings, - _validate_and_load_settings, - get_configuration_settings, - get_integrations, - get_library_for_integration, -) -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "0af587ff8595" -down_revision = "b883671b7bc5" -branch_labels = None -depends_on = None - - -LICENSE_GOAL = "LICENSE_GOAL" - - -# This function is copied from core/migration/migrate_external_integration.py -# because the integration_configurations table has changed and this migration -# needs a copy of the function that references the old version of the table. -# -# It was copied here, because this old version can be deleted whenever this -# migration is deleted, so it makes sense to keep them together. -def _migrate_external_integration( - connection, - integration, - protocol_class, - goal, - settings_dict, - self_test_results, - name=None, -): - # Load and validate the settings before storing them in the database. - settings_class = protocol_class.settings_class() - settings_obj = _validate_and_load_settings(settings_class, settings_dict) - integration_configuration = connection.execute( - "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results) " - "values (%s, %s, %s, %s, %s)" - "returning id", - ( - integration.protocol, - goal, - name or integration.name, - json_serializer(settings_obj.dict()), - self_test_results, - ), - ).fetchone() - assert integration_configuration is not None - return integration_configuration[0] - - -def upgrade() -> None: - registry = LicenseProvidersRegistry() - - connection = op.get_bind() - - # Fetch all license type integrations - # The old enum had 'licenses', the new enum has 'LICENSE_GOAL' - integrations = get_integrations(connection, "licenses") - for integration in integrations: - _id, protocol, name = integration - - # Get the right API class for it - api_class = registry.get(protocol, None) - if not api_class: - raise RuntimeError(f"Could not find API class for '{protocol}'") - - # Create the settings and library settings dicts from the configurationsettings - settings_dict, library_settings, self_test_result = get_configuration_settings( - connection, integration - ) - - # License type integrations take their external_account_id data from the collection. - # The configurationsetting for it seems to be unused, so we take the value from the collection - collection = connection.execute( - "select id, external_account_id, name from collections where external_integration_id = %s", - integration.id, - ).fetchone() - if not collection: - raise RuntimeError( - f"Could not fetch collection for integration {integration}" - ) - settings_class: Type[BaseSettings] = api_class.settings_class() - if "external_account_id" in settings_class.__fields__: - settings_dict["external_account_id"] = collection.external_account_id - - # Write the configurationsettings into the integration_configurations table - integration_id = _migrate_external_integration( - connection, - integration, - api_class, - LICENSE_GOAL, - settings_dict, - self_test_result, - name=collection.name, - ) - - # Connect the collection to the settings - connection.execute( - "UPDATE collections SET integration_configuration_id=%s where id=%s", - (integration_id, collection.id), - ) - - # If we have library settings too, then write each one into it's own row - if issubclass(api_class, HasLibraryIntegrationConfiguration): - integration_libraries = get_library_for_integration(connection, _id) - for library in integration_libraries: - _migrate_library_settings( - connection, - integration_id, - library.library_id, - library_settings[library.library_id], - api_class, - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - "DELETE from integration_configurations where goal = %s", LICENSE_GOAL - ) diff --git a/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py b/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py deleted file mode 100644 index cc334b48f4..0000000000 --- a/alembic/versions/20230531_b883671b7bc5_add_the_license_type_goal.py +++ /dev/null @@ -1,75 +0,0 @@ -"""Add the license type goal - -Revision ID: b883671b7bc5 -Revises: 0a1c9c3f5dd2 -Create Date: 2023-05-31 10:50:32.045821+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.exc import ProgrammingError - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "b883671b7bc5" -down_revision = "0a1c9c3f5dd2" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # We need to use an autocommit blcok since the next migration is going to use - # the new enum value immediately, so we must ensure the value is commited - # before the next migration runs - # Additionally, since we are autocommiting this change we MUST ensure we - # assume the schemas may already exist while upgrading to this change. - # This happens incase the data migration in 0af587 fails and an automatic rollback occurs. - # In which case, due to the autocommit, these schema changes will not get rolled back - with op.get_context().autocommit_block(): - op.execute(f"ALTER TYPE goals ADD VALUE IF NOT EXISTS 'LICENSE_GOAL'") - - try: - op.add_column( - "collections", - sa.Column("integration_configuration_id", sa.Integer(), nullable=True), - ) - except ProgrammingError as ex: - if "DuplicateColumn" not in str(ex): - raise - - try: - op.create_index( - op.f("ix_collections_integration_configuration_id"), - "collections", - ["integration_configuration_id"], - unique=True, - ) - except ProgrammingError as ex: - if "DuplicateTable" not in str(ex): - raise - - try: - op.create_foreign_key( - None, - "collections", - "integration_configurations", - ["integration_configuration_id"], - ["id"], - ondelete="SET NULL", - ) - except ProgrammingError as ex: - if "DuplicateColumn" not in str(ex): - raise - - -def downgrade() -> None: - """There is no way to drop single values from an Enum from postgres""" - op.drop_constraint( - "collections_integration_configuration_id_fkey", - "collections", - type_="foreignkey", - ) - op.drop_index( - op.f("ix_collections_integration_configuration_id"), table_name="collections" - ) - op.drop_column("collections", "integration_configuration_id") diff --git a/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py b/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py deleted file mode 100644 index 1273c14ae8..0000000000 --- a/alembic/versions/20230606_28717fc6e50f_opds_for_distributors_unlimited_access.py +++ /dev/null @@ -1,58 +0,0 @@ -"""opds for distributors unlimited access - -Revision ID: 28717fc6e50f -Revises: 0af587ff8595 -Create Date: 2023-06-06 10:08:35.892018+00:00 - -""" -from alembic import op - -# revision identifiers, used by Alembic. -revision = "28717fc6e50f" -down_revision = "0af587ff8595" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - connection = op.get_bind() - connection.execute( - """ - UPDATE - licensepools AS lp - SET - licenses_owned = -1, - licenses_available = -1 - FROM - collections c, - externalintegrations e - WHERE - lp.licenses_owned = 1 - and lp.licenses_available = 1 - and lp.collection_id = c.id - and c.external_integration_id = e.id - and e.protocol = 'OPDS for Distributors' - """ - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - """ - UPDATE - licensepools AS lp - SET - licenses_owned = 1, - licenses_available = 1 - FROM - collections c, - externalintegrations e - WHERE - lp.licenses_owned = -1 - and lp.licenses_available = -1 - and lp.collection_id = c.id - and c.external_integration_id = e.id - and e.protocol = 'OPDS for Distributors' - """ - ) diff --git a/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py b/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py deleted file mode 100644 index ff44f87321..0000000000 --- a/alembic/versions/20230628_f08f9c6bded6_remove_adobe_vendor_id_tables.py +++ /dev/null @@ -1,112 +0,0 @@ -"""Remove adobe vendor id tables - -Revision ID: f08f9c6bded6 -Revises: 28717fc6e50f -Create Date: 2023-06-28 19:07:27.735625+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "f08f9c6bded6" -down_revision = "28717fc6e50f" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index( - "ix_drmdeviceidentifiers_credential_id", table_name="drmdeviceidentifiers" - ) - op.drop_index( - "ix_drmdeviceidentifiers_device_identifier", table_name="drmdeviceidentifiers" - ) - op.drop_table("drmdeviceidentifiers") - op.drop_index( - "ix_delegatedpatronidentifiers_library_uri", - table_name="delegatedpatronidentifiers", - ) - op.drop_index( - "ix_delegatedpatronidentifiers_patron_identifier", - table_name="delegatedpatronidentifiers", - ) - op.drop_index( - "ix_delegatedpatronidentifiers_type", table_name="delegatedpatronidentifiers" - ) - op.drop_table("delegatedpatronidentifiers") - - -def downgrade() -> None: - op.create_table( - "delegatedpatronidentifiers", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("type", sa.VARCHAR(length=255), autoincrement=False, nullable=True), - sa.Column( - "library_uri", sa.VARCHAR(length=255), autoincrement=False, nullable=True - ), - sa.Column( - "patron_identifier", - sa.VARCHAR(length=255), - autoincrement=False, - nullable=True, - ), - sa.Column( - "delegated_identifier", sa.VARCHAR(), autoincrement=False, nullable=True - ), - sa.PrimaryKeyConstraint("id", name="delegatedpatronidentifiers_pkey"), - sa.UniqueConstraint( - "type", - "library_uri", - "patron_identifier", - name="delegatedpatronidentifiers_type_library_uri_patron_identifi_key", - ), - ) - op.create_index( - "ix_delegatedpatronidentifiers_type", - "delegatedpatronidentifiers", - ["type"], - unique=False, - ) - op.create_index( - "ix_delegatedpatronidentifiers_patron_identifier", - "delegatedpatronidentifiers", - ["patron_identifier"], - unique=False, - ) - op.create_index( - "ix_delegatedpatronidentifiers_library_uri", - "delegatedpatronidentifiers", - ["library_uri"], - unique=False, - ) - op.create_table( - "drmdeviceidentifiers", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("credential_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "device_identifier", - sa.VARCHAR(length=255), - autoincrement=False, - nullable=True, - ), - sa.ForeignKeyConstraint( - ["credential_id"], - ["credentials.id"], - name="drmdeviceidentifiers_credential_id_fkey", - ), - sa.PrimaryKeyConstraint("id", name="drmdeviceidentifiers_pkey"), - ) - op.create_index( - "ix_drmdeviceidentifiers_device_identifier", - "drmdeviceidentifiers", - ["device_identifier"], - unique=False, - ) - op.create_index( - "ix_drmdeviceidentifiers_credential_id", - "drmdeviceidentifiers", - ["credential_id"], - unique=False, - ) diff --git a/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py b/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py deleted file mode 100644 index 458328b647..0000000000 --- a/alembic/versions/20230706_04bbd03bf9f1_migrate_library_key_pair.py +++ /dev/null @@ -1,69 +0,0 @@ -"""Migrate library key pair - -Revision ID: 04bbd03bf9f1 -Revises: f08f9c6bded6 -Create Date: 2023-07-06 14:40:17.970603+00:00 - -""" -import json -import logging - -import sqlalchemy as sa -from Crypto.PublicKey import RSA - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "04bbd03bf9f1" -down_revision = "f08f9c6bded6" -branch_labels = None -depends_on = None - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -def upgrade() -> None: - # Add the new columns as nullable, add the values, then make them non-nullable - op.add_column( - "libraries", - sa.Column("public_key", sa.Unicode(), nullable=True), - ) - op.add_column( - "libraries", - sa.Column("private_key", sa.LargeBinary(), nullable=True), - ) - - # Now we update the value stored for the key pair - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'key-pair' and cs.external_integration_id IS NULL", - (library.id,), - ).fetchone() - if setting and setting.value: - _, private_key_str = json.loads(setting.value) - private_key = RSA.import_key(private_key_str) - else: - log.info(f"Library {library.short_name} has no key pair, generating one...") - private_key = RSA.generate(2048) - - private_key_bytes = private_key.export_key("DER") - public_key_str = private_key.publickey().export_key("PEM").decode("utf-8") - - connection.execute( - "update libraries set public_key = (%s), private_key = (%s) where id = (%s)", - (public_key_str, private_key_bytes, library.id), - ) - - # Then we make the columns non-nullable - op.alter_column("libraries", "public_key", nullable=False) - op.alter_column("libraries", "private_key", nullable=False) - - -def downgrade() -> None: - op.drop_column("libraries", "private_key") - op.drop_column("libraries", "public_key") diff --git a/alembic/versions/20230706_c471f553249b_migrate_library_logo.py b/alembic/versions/20230706_c471f553249b_migrate_library_logo.py deleted file mode 100644 index b56d5a4f96..0000000000 --- a/alembic/versions/20230706_c471f553249b_migrate_library_logo.py +++ /dev/null @@ -1,66 +0,0 @@ -"""Migrate library logo - -Revision ID: c471f553249b -Revises: 04bbd03bf9f1 -Create Date: 2023-07-06 19:37:59.269231+00:00 - -""" -import logging - -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "c471f553249b" -down_revision = "04bbd03bf9f1" -branch_labels = None -depends_on = None - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -def upgrade() -> None: - op.create_table( - "libraries_logos", - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("content", sa.LargeBinary(), nullable=False), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - ), - sa.PrimaryKeyConstraint("library_id"), - ) - - prefix = "data:image/png;base64," - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'logo'", - (library.id,), - ).first() - if setting and setting.value: - log.info(f"Library {library.short_name} has a logo, migrating it.") - logo_str = setting.value - - # We stored the logo with a data:image prefix before, but we - # don't need that anymore, so we remove it here. - if logo_str.startswith(prefix): - logo_str = logo_str[len(prefix) :] - - logo_bytes = logo_str.encode("utf-8") - connection.execute( - "insert into libraries_logos (library_id, content) values (%s, %s)", - (library.id, logo_bytes), - ) - else: - log.info(f"Library {library.short_name} has no logo, skipping.") - - -def downgrade() -> None: - op.drop_table("libraries_logos") diff --git a/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py b/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py deleted file mode 100644 index ac04381a35..0000000000 --- a/alembic/versions/20230711_3d380776c1bf_migrate_announcements.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Migrate announcements - -Revision ID: 3d380776c1bf -Revises: c471f553249b -Create Date: 2023-07-11 17:22:56.596888+00:00 - -""" -import json -from typing import Optional - -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql -from sqlalchemy.engine import Connection, Row - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "3d380776c1bf" -down_revision = "c471f553249b" -branch_labels = None -depends_on = None - - -def create_announcement( - connection: Connection, setting: Optional[Row], library_id: Optional[int] = None -) -> None: - if setting and setting.value: - announcements = json.loads(setting.value) - for announcement in announcements: - connection.execute( - "insert into announcements (id, content, start, finish, library_id) values (%s, %s, %s, %s, %s)", - ( - announcement["id"], - announcement["content"], - announcement["start"], - announcement["finish"], - library_id, - ), - ) - - -def upgrade() -> None: - # Create table for announcements - op.create_table( - "announcements", - sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), - sa.Column("content", sa.Unicode(), nullable=False), - sa.Column("start", sa.Date(), nullable=False), - sa.Column("finish", sa.Date(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - ), - sa.PrimaryKeyConstraint("id"), - ) - op.create_index( - op.f("ix_announcements_library_id"), - "announcements", - ["library_id"], - unique=False, - ) - - # Migrate announcements from configuration settings - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - - # Migrate library announcements - for library in libraries: - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.library_id = (%s) and cs.key = 'announcements' and cs.external_integration_id IS NULL", - (library.id,), - ).fetchone() - create_announcement(connection, setting, library.id) - - # Migrate global announcements - setting = connection.execute( - "select cs.value from configurationsettings cs " - "where cs.key = 'global_announcements' and cs.library_id IS NULL and cs.external_integration_id IS NULL", - ).fetchone() - create_announcement(connection, setting) - - -def downgrade() -> None: - op.drop_index(op.f("ix_announcements_library_id"), table_name="announcements") - op.drop_table("announcements") diff --git a/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py b/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py deleted file mode 100644 index 09638fbeb6..0000000000 --- a/alembic/versions/20230719_b3749bac3e55_migrate_library_settings.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Migrate library settings - -Revision ID: b3749bac3e55 -Revises: 3d380776c1bf -Create Date: 2023-07-19 16:13:14.831349+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op -from core.configuration.library import LibrarySettings -from core.migration.migrate_external_integration import _validate_and_load_settings -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "b3749bac3e55" -down_revision = "3d380776c1bf" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.add_column( - "libraries", - sa.Column( - "settings_dict", postgresql.JSONB(astext_type=sa.Text()), nullable=True - ), - ) - - connection = op.get_bind() - libraries = connection.execute("select id, short_name from libraries") - for library in libraries: - configuration_settings = connection.execute( - "select key, value from configurationsettings " - "where library_id = (%s) and external_integration_id IS NULL", - (library.id,), - ) - settings_dict = {} - for key, value in configuration_settings: - if key in ["announcements", "logo", "key-pair"]: - continue - if not value: - continue - settings_dict[key] = value - - settings = _validate_and_load_settings(LibrarySettings, settings_dict) - connection.execute( - "update libraries set settings_dict = (%s) where id = (%s)", - (json_serializer(settings.dict()), library.id), - ) - - op.alter_column("libraries", "settings_dict", nullable=False) - - -def downgrade() -> None: - op.drop_column("libraries", "settings_dict") diff --git a/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py b/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py deleted file mode 100644 index f13772ba3b..0000000000 --- a/alembic/versions/20230726_2f1a51aa0ee8_remove_integration_client.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Remove integration client - -Revision ID: 2f1a51aa0ee8 -Revises: 892c8e0c89f8 -Create Date: 2023-07-26 13:34:02.924885+00:00 - -""" -import sqlalchemy as sa -from sqlalchemy.dialects import postgresql - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "2f1a51aa0ee8" -down_revision = "892c8e0c89f8" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index("ix_datasources_integration_client_id", table_name="datasources") - op.drop_constraint( - "datasources_integration_client_id_fkey", "datasources", type_="foreignkey" - ) - op.drop_column("datasources", "integration_client_id") - op.drop_index("ix_holds_integration_client_id", table_name="holds") - op.drop_constraint("holds_integration_client_id_fkey", "holds", type_="foreignkey") - op.drop_column("holds", "integration_client_id") - op.drop_index("ix_loans_integration_client_id", table_name="loans") - op.drop_constraint("loans_integration_client_id_fkey", "loans", type_="foreignkey") - op.drop_column("loans", "integration_client_id") - op.drop_index( - "ix_integrationclients_shared_secret", table_name="integrationclients" - ) - op.drop_table("integrationclients") - - -def downgrade() -> None: - op.create_table( - "integrationclients", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column("url", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("shared_secret", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.Column("enabled", sa.BOOLEAN(), autoincrement=False, nullable=True), - sa.Column( - "created", - postgresql.TIMESTAMP(timezone=True), - autoincrement=False, - nullable=True, - ), - sa.Column( - "last_accessed", - postgresql.TIMESTAMP(timezone=True), - autoincrement=False, - nullable=True, - ), - sa.PrimaryKeyConstraint("id", name="integrationclients_pkey"), - sa.UniqueConstraint("url", name="integrationclients_url_key"), - ) - op.create_index( - "ix_integrationclients_shared_secret", - "integrationclients", - ["shared_secret"], - unique=False, - ) - op.add_column( - "loans", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "loans_integration_client_id_fkey", - "loans", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_loans_integration_client_id", - "loans", - ["integration_client_id"], - unique=False, - ) - op.add_column( - "holds", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "holds_integration_client_id_fkey", - "holds", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_holds_integration_client_id", - "holds", - ["integration_client_id"], - unique=False, - ) - op.add_column( - "datasources", - sa.Column( - "integration_client_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - ) - op.create_foreign_key( - "datasources_integration_client_id_fkey", - "datasources", - "integrationclients", - ["integration_client_id"], - ["id"], - ) - op.create_index( - "ix_datasources_integration_client_id", - "datasources", - ["integration_client_id"], - unique=False, - ) diff --git a/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py b/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py deleted file mode 100644 index 0c7312b0f2..0000000000 --- a/alembic/versions/20230728_892c8e0c89f8_audiobook_playtime_tracking.py +++ /dev/null @@ -1,116 +0,0 @@ -"""Audiobook playtime tracking - -Revision ID: 892c8e0c89f8 -Revises: b3749bac3e55 -Create Date: 2023-07-28 07:20:24.625484+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "892c8e0c89f8" -down_revision = "b3749bac3e55" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.create_table( - "playtime_entries", - sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("identifier_id", sa.Integer(), nullable=False), - sa.Column("collection_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), - sa.Column("total_seconds_played", sa.Integer(), nullable=False), - sa.Column("tracking_id", sa.String(length=64), nullable=False), - sa.Column("processed", sa.Boolean(), nullable=True), - sa.ForeignKeyConstraint( - ["collection_id"], - ["collections.id"], - onupdate="CASCADE", - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["identifier_id"], - ["identifiers.id"], - onupdate="CASCADE", - ondelete="CASCADE", - ), - sa.ForeignKeyConstraint( - ["library_id"], ["libraries.id"], onupdate="CASCADE", ondelete="CASCADE" - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "identifier_id", "collection_id", "library_id", "tracking_id" - ), - ) - op.create_table( - "playtime_summaries", - sa.Column("id", sa.Integer(), autoincrement=True, nullable=False), - sa.Column("identifier_id", sa.Integer(), nullable=True), - sa.Column("collection_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("identifier_str", sa.String(), nullable=False), - sa.Column("collection_name", sa.String(), nullable=False), - sa.Column("library_name", sa.String(), nullable=False), - sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), - sa.Column("total_seconds_played", sa.Integer(), nullable=True), - sa.ForeignKeyConstraint( - ["collection_id"], - ["collections.id"], - onupdate="CASCADE", - ondelete="SET NULL", - ), - sa.ForeignKeyConstraint( - ["identifier_id"], - ["identifiers.id"], - onupdate="CASCADE", - ondelete="SET NULL", - ), - sa.ForeignKeyConstraint( - ["library_id"], ["libraries.id"], onupdate="CASCADE", ondelete="SET NULL" - ), - sa.PrimaryKeyConstraint("id"), - sa.UniqueConstraint( - "identifier_str", "collection_name", "library_name", "timestamp" - ), - ) - op.create_index( - op.f("ix_playtime_summaries_collection_id"), - "playtime_summaries", - ["collection_id"], - unique=False, - ) - op.create_index( - op.f("ix_playtime_summaries_identifier_id"), - "playtime_summaries", - ["identifier_id"], - unique=False, - ) - op.create_index( - op.f("ix_playtime_summaries_library_id"), - "playtime_summaries", - ["library_id"], - unique=False, - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic - please adjust! ### - op.drop_index( - op.f("ix_playtime_summaries_library_id"), table_name="playtime_summaries" - ) - op.drop_index( - op.f("ix_playtime_summaries_identifier_id"), table_name="playtime_summaries" - ) - op.drop_index( - op.f("ix_playtime_summaries_collection_id"), table_name="playtime_summaries" - ) - op.drop_table("playtime_summaries") - op.drop_table("playtime_entries") - # ### end Alembic commands ### diff --git a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py b/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py deleted file mode 100644 index d851b9ea16..0000000000 --- a/alembic/versions/20230810_0df58829fc1a_add_discovery_service_tables.py +++ /dev/null @@ -1,187 +0,0 @@ -"""Add discovery service tables - -Revision ID: 0df58829fc1a -Revises: 2f1a51aa0ee8 -Create Date: 2023-08-10 15:49:36.784169+00:00 - -""" -import sqlalchemy as sa - -from alembic import op -from api.discovery.opds_registration import OpdsRegistrationService -from core.migration.migrate_external_integration import ( - _validate_and_load_settings, - get_configuration_settings, - get_integrations, - get_library_for_integration, -) -from core.migration.util import drop_enum, pg_update_enum -from core.model import json_serializer - -# revision identifiers, used by Alembic. -revision = "0df58829fc1a" -down_revision = "2f1a51aa0ee8" -branch_labels = None -depends_on = None - -old_goals_enum = [ - "PATRON_AUTH_GOAL", - "LICENSE_GOAL", -] - -new_goals_enum = old_goals_enum + ["DISCOVERY_GOAL"] - - -# This function is copied from core/migration/migrate_external_integration.py -# because the integration_configurations table has changed and this migration -# needs a copy of the function that references the old version of the table. -# -# It was copied here, because this old version can be deleted whenever this -# migration is deleted, so it makes sense to keep them together. -def _migrate_external_integration( - connection, - integration, - protocol_class, - goal, - settings_dict, - self_test_results, - name=None, -): - # Load and validate the settings before storing them in the database. - settings_class = protocol_class.settings_class() - settings_obj = _validate_and_load_settings(settings_class, settings_dict) - integration_configuration = connection.execute( - "insert into integration_configurations " - "(protocol, goal, name, settings, self_test_results) " - "values (%s, %s, %s, %s, %s)" - "returning id", - ( - integration.protocol, - goal, - name or integration.name, - json_serializer(settings_obj.dict()), - self_test_results, - ), - ).fetchone() - assert integration_configuration is not None - return integration_configuration[0] - - -def upgrade() -> None: - op.create_table( - "discovery_service_registrations", - sa.Column( - "status", - sa.Enum("SUCCESS", "FAILURE", name="registrationstatus"), - nullable=False, - ), - sa.Column( - "stage", - sa.Enum("TESTING", "PRODUCTION", name="registrationstage"), - nullable=False, - ), - sa.Column("web_client", sa.Unicode(), nullable=True), - sa.Column("short_name", sa.Unicode(), nullable=True), - sa.Column("shared_secret", sa.Unicode(), nullable=True), - sa.Column("integration_id", sa.Integer(), nullable=False), - sa.Column("library_id", sa.Integer(), nullable=False), - sa.Column("vendor_id", sa.Unicode(), nullable=True), - sa.ForeignKeyConstraint( - ["integration_id"], ["integration_configurations.id"], ondelete="CASCADE" - ), - sa.ForeignKeyConstraint(["library_id"], ["libraries.id"], ondelete="CASCADE"), - sa.PrimaryKeyConstraint("integration_id", "library_id"), - ) - pg_update_enum( - op, - "integration_configurations", - "goal", - "goals", - old_goals_enum, - new_goals_enum, - ) - - # Migrate data - connection = op.get_bind() - external_integrations = get_integrations(connection, "discovery") - for external_integration in external_integrations: - # This should always be the case, but we want to make sure - assert external_integration.protocol == "OPDS Registration" - - # Create the settings and library settings dicts from the configurationsettings - settings_dict, library_settings, self_test_result = get_configuration_settings( - connection, external_integration - ) - - # Write the configurationsettings into the integration_configurations table - integration_configuration_id = _migrate_external_integration( - connection, - external_integration, - OpdsRegistrationService, - "DISCOVERY_GOAL", - settings_dict, - self_test_result, - ) - - # Get the libraries that are associated with this external integration - interation_libraries = get_library_for_integration( - connection, external_integration.id - ) - - vendor_id = settings_dict.get("vendor_id") - - # Write the library settings into the discovery_service_registrations table - for library in interation_libraries: - library_id = library.library_id - library_settings_dict = library_settings[library_id] - - status = library_settings_dict.get("library-registration-status") - if status is None: - status = "FAILURE" - else: - status = status.upper() - - stage = library_settings_dict.get("library-registration-stage") - if stage is None: - stage = "TESTING" - else: - stage = stage.upper() - - web_client = library_settings_dict.get("library-registration-web-client") - short_name = library_settings_dict.get("username") - shared_secret = library_settings_dict.get("password") - - connection.execute( - "insert into discovery_service_registrations " - "(status, stage, web_client, short_name, shared_secret, integration_id, library_id, vendor_id) " - "values (%s, %s, %s, %s, %s, %s, %s, %s)", - ( - status, - stage, - web_client, - short_name, - shared_secret, - integration_configuration_id, - library_id, - vendor_id, - ), - ) - - -def downgrade() -> None: - connection = op.get_bind() - connection.execute( - "DELETE from integration_configurations where goal = %s", "DISCOVERY_GOAL" - ) - - op.drop_table("discovery_service_registrations") - drop_enum(op, "registrationstatus") - drop_enum(op, "registrationstage") - pg_update_enum( - op, - "integration_configurations", - "goal", - "goals", - new_goals_enum, - old_goals_enum, - ) diff --git a/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py b/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py deleted file mode 100644 index 2b2d0406a8..0000000000 --- a/alembic/versions/20230831_1c566151741f_remove_self_hosted_from_licensepools.py +++ /dev/null @@ -1,31 +0,0 @@ -"""Remove self_hosted from licensepools - -Revision ID: 1c566151741f -Revises: 2b672c6fb2b9 -Create Date: 2023-08-31 16:13:54.935093+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "1c566151741f" -down_revision = "2b672c6fb2b9" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index("ix_licensepools_self_hosted", table_name="licensepools") - op.drop_column("licensepools", "self_hosted") - - -def downgrade() -> None: - op.add_column( - "licensepools", - sa.Column("self_hosted", sa.BOOLEAN(), autoincrement=False, nullable=False), - ) - op.create_index( - "ix_licensepools_self_hosted", "licensepools", ["self_hosted"], unique=False - ) diff --git a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py b/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py deleted file mode 100644 index 0f11714085..0000000000 --- a/alembic/versions/20230905_2b672c6fb2b9_type_coerce_collection_settings.py +++ /dev/null @@ -1,111 +0,0 @@ -"""Type coerce collection settings - -Revision ID: 2b672c6fb2b9 -Revises: 0df58829fc1a -Create Date: 2023-09-05 06:40:35.739869+00:00 - -""" -import json -import logging -from copy import deepcopy -from typing import Any, Dict, Optional, Tuple - -from pydantic import PositiveInt, ValidationError, parse_obj_as - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "2b672c6fb2b9" -down_revision = "0df58829fc1a" -branch_labels = None -depends_on = None - - -log = logging.getLogger(f"palace.migration.{revision}") -log.setLevel(logging.INFO) -log.disabled = False - - -# All the settings types that have non-str types -ALL_SETTING_TYPES: Dict[str, Any] = { - "verify_certificate": Optional[bool], - "default_reservation_period": Optional[PositiveInt], - "loan_limit": Optional[PositiveInt], - "hold_limit": Optional[PositiveInt], - "max_retry_count": Optional[PositiveInt], - "ebook_loan_duration": Optional[PositiveInt], - "default_loan_duration": Optional[PositiveInt], -} - - -def _coerce_types(original_settings: Dict[str, Any]) -> Tuple[bool, Dict[str, Any]]: - """Coerce the types, in-place""" - modified = False - modified_settings = deepcopy(original_settings) - for setting_name, setting_type in ALL_SETTING_TYPES.items(): - if setting_name in original_settings: - # If the setting is an empty string, we set it to None - if original_settings[setting_name] == "": - setting = None - else: - setting = original_settings[setting_name] - - try: - modified = True - modified_settings[setting_name] = parse_obj_as(setting_type, setting) - except ValidationError as e: - log.error( - f"Error while parsing setting {setting_name}. Settings: {original_settings}." - ) - raise e - - return modified, modified_settings - - -def upgrade() -> None: - connection = op.get_bind() - # Fetch all integration settings with the 'licenses' goal - results = connection.execute( - "SELECT id, settings from integration_configurations where goal='LICENSE_GOAL';" - ).fetchall() - - # For each integration setting, we check id any of the non-str - # keys are present in the DB - # We then type-coerce that value - for settings_id, settings in results: - modified, updated_settings = _coerce_types(settings) - if modified: - log.info( - f"Updating settings for integration_configuration (id:{settings_id}). " - f"Original settings: {settings}. New settings: {updated_settings}." - ) - # If any of the values were modified, we update the DB - connection.execute( - "UPDATE integration_configurations SET settings=%s where id=%s", - json.dumps(updated_settings), - settings_id, - ) - - # Do the same for any Library settings - results = connection.execute( - "SELECT ilc.parent_id, ilc.library_id, ilc.settings from integration_library_configurations ilc " - "join integration_configurations ic on ilc.parent_id = ic.id where ic.goal='LICENSE_GOAL';" - ).fetchall() - - for parent_id, library_id, settings in results: - modified, updated_settings = _coerce_types(settings) - if modified: - log.info( - f"Updating settings for integration_library_configuration (parent_id:{parent_id}/library_id:{library_id}). " - f"Original settings: {settings}. New settings: {updated_settings}." - ) - connection.execute( - "UPDATE integration_library_configurations SET settings=%s where parent_id=%s and library_id=%s", - json.dumps(updated_settings), - parent_id, - library_id, - ) - - -def downgrade() -> None: - """There is no need to revert the types back to strings""" diff --git a/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py b/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py deleted file mode 100644 index 1bff2f4e0a..0000000000 --- a/alembic/versions/20230913_5d71a80073d5_remove_externalintegrationlink.py +++ /dev/null @@ -1,90 +0,0 @@ -"""Remove ExternalIntegrationLink. - -Revision ID: 5d71a80073d5 -Revises: 1c566151741f -Create Date: 2023-09-13 15:23:07.566404+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "5d71a80073d5" -down_revision = "1c566151741f" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - op.drop_index( - "ix_externalintegrationslinks_external_integration_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_library_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_other_integration_id", - table_name="externalintegrationslinks", - ) - op.drop_index( - "ix_externalintegrationslinks_purpose", table_name="externalintegrationslinks" - ) - op.drop_table("externalintegrationslinks") - - -def downgrade() -> None: - op.create_table( - "externalintegrationslinks", - sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), - sa.Column( - "external_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=True), - sa.Column( - "other_integration_id", sa.INTEGER(), autoincrement=False, nullable=True - ), - sa.Column("purpose", sa.VARCHAR(), autoincrement=False, nullable=True), - sa.ForeignKeyConstraint( - ["external_integration_id"], - ["externalintegrations.id"], - name="externalintegrationslinks_external_integration_id_fkey", - ), - sa.ForeignKeyConstraint( - ["library_id"], - ["libraries.id"], - name="externalintegrationslinks_library_id_fkey", - ), - sa.ForeignKeyConstraint( - ["other_integration_id"], - ["externalintegrations.id"], - name="externalintegrationslinks_other_integration_id_fkey", - ), - sa.PrimaryKeyConstraint("id", name="externalintegrationslinks_pkey"), - ) - op.create_index( - "ix_externalintegrationslinks_purpose", - "externalintegrationslinks", - ["purpose"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_other_integration_id", - "externalintegrationslinks", - ["other_integration_id"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_library_id", - "externalintegrationslinks", - ["library_id"], - unique=False, - ) - op.create_index( - "ix_externalintegrationslinks_external_integration_id", - "externalintegrationslinks", - ["external_integration_id"], - unique=False, - ) diff --git a/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py b/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py deleted file mode 100644 index 4c2abc3cbb..0000000000 --- a/alembic/versions/20231016_21a65b8f391d_loan_and_hold_notification_times.py +++ /dev/null @@ -1,34 +0,0 @@ -"""Loan and hold notification times - -Revision ID: 21a65b8f391d -Revises: 5d71a80073d5 -Create Date: 2023-10-16 09:46:58.743018+00:00 - -""" -import sqlalchemy as sa - -from alembic import op - -# revision identifiers, used by Alembic. -revision = "21a65b8f391d" -down_revision = "5d71a80073d5" -branch_labels = None -depends_on = None - - -def upgrade() -> None: - # ### commands auto generated by Alembic ### - op.add_column( - "holds", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) - ) - op.add_column( - "loans", sa.Column("patron_last_notified", sa.DateTime(), nullable=True) - ) - # ### end Alembic commands ### - - -def downgrade() -> None: - # ### commands auto generated by Alembic ### - op.drop_column("loans", "patron_last_notified") - op.drop_column("holds", "patron_last_notified") - # ### end Alembic commands ### diff --git a/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py index 13f071a200..cebc811c48 100644 --- a/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py +++ b/alembic/versions/20231019_0739d5558dda_cleanup_google_anaytics_integrations.py @@ -4,6 +4,11 @@ Revises: 21a65b8f391d Create Date: 2023-10-19 05:23:00.694886+00:00 +Note that this migration was changed for the v13.0.0 release, older migrations +were deleted from the repository history, and this was made the first migration +by changing the down_revision to None. + +See: https://alembic.sqlalchemy.org/en/latest/cookbook.html#building-an-up-to-date-database-from-scratch """ import sqlalchemy as sa @@ -11,7 +16,7 @@ # revision identifiers, used by Alembic. revision = "0739d5558dda" -down_revision = "21a65b8f391d" +down_revision = None branch_labels = None depends_on = None diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index ec48df49e0..ff26004cc8 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -9,7 +9,6 @@ import pytest import pytest_alembic from pytest_alembic.config import Config -from sqlalchemy import inspect from core.model import json_serializer from tests.fixtures.database import ApplicationFixture, DatabaseFixture @@ -112,26 +111,19 @@ def fixture( if short_name is None: short_name = random_name() - inspector = inspect(connection) - columns = [column["name"] for column in inspector.get_columns("libraries")] - args = { "name": name, "short_name": short_name, } - # See if we need to include public and private keys - if "public_key" in columns: - args["public_key"] = random_name() - args["private_key"] = random_name() - - # See if we need to include a settings dict - if "settings_dict" in columns: - settings_dict = { - "website": "http://library.com", - "help_web": "http://library.com/support", - } - args["settings_dict"] = json_serializer(settings_dict) + args["public_key"] = random_name() + args["private_key"] = random_name() + + settings_dict = { + "website": "http://library.com", + "help_web": "http://library.com/support", + } + args["settings_dict"] = json_serializer(settings_dict) keys = ",".join(args.keys()) values = ",".join([f"'{value}'" for value in args.values()]) From e4e8a09c799d67dc9c8e58718f154bd3bced3d3d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Dec 2023 21:22:24 +0000 Subject: [PATCH 201/262] Bump freezegun from 1.2.2 to 1.3.0 (#1550) --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index ce7bbccd16..4d88b79ca7 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1255,13 +1255,13 @@ flask = ["flask"] [[package]] name = "freezegun" -version = "1.2.2" +version = "1.3.0" description = "Let your Python tests travel through time" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "freezegun-1.2.2-py3-none-any.whl", hash = "sha256:ea1b963b993cb9ea195adbd893a48d573fda951b0da64f60883d7e988b606c9f"}, - {file = "freezegun-1.2.2.tar.gz", hash = "sha256:cd22d1ba06941384410cd967d8a99d5ae2442f57dfafeff2fda5de8dc5c05446"}, + {file = "freezegun-1.3.0-py3-none-any.whl", hash = "sha256:fc21de00b8e7f3dfb91414f876afe7f74b2f29ce9907bd81013bdbc7e8a632d0"}, + {file = "freezegun-1.3.0.tar.gz", hash = "sha256:56cc5dc34de38fe28a4d19bea65bbd3af6fd66020df8141b92bdb45c9be9f503"}, ] [package.dependencies] @@ -4552,4 +4552,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "c96cd038c5187aba1670c495d8dc0a03aae508ae60d76362782143168ce6a652" +content-hash = "6dd69fa4cb024f65fb063666ad8e175abf906ddd217b88b2b987b4b47530168a" diff --git a/pyproject.toml b/pyproject.toml index a3e2fb4ffa..1421c09539 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -252,7 +252,7 @@ tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "logs", "s3"]} -freezegun = "~1.2.2" +freezegun = "~1.3.0" Jinja2 = "^3.1.2" mypy = "^1.4.1" psycopg2-binary = "~2.9.5" From 55062b92bb1bbb4eb490258f9c42737d040e4ebb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 5 Dec 2023 16:44:20 +0000 Subject: [PATCH 202/262] Bump freezegun from 1.3.0 to 1.3.1 (#1555) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4d88b79ca7..cedfee217f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1255,13 +1255,13 @@ flask = ["flask"] [[package]] name = "freezegun" -version = "1.3.0" +version = "1.3.1" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.0-py3-none-any.whl", hash = "sha256:fc21de00b8e7f3dfb91414f876afe7f74b2f29ce9907bd81013bdbc7e8a632d0"}, - {file = "freezegun-1.3.0.tar.gz", hash = "sha256:56cc5dc34de38fe28a4d19bea65bbd3af6fd66020df8141b92bdb45c9be9f503"}, + {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, + {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, ] [package.dependencies] From a1f36438040ffc72c07865d3f2acea8eeb1397a6 Mon Sep 17 00:00:00 2001 From: dbernstein Date: Tue, 5 Dec 2023 09:13:26 -0800 Subject: [PATCH 203/262] =?UTF-8?q?Change=20the=20generate=5Fquicksight=5F?= =?UTF-8?q?url=20command=20params=20to=20library=5Fuuids=20fr=E2=80=A6=20(?= =?UTF-8?q?#1548)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Resolves: https://ebce-lyrasis.atlassian.net/browse/PP-783 --- api/admin/controller/quicksight.py | 22 +++++++++---------- api/admin/model/quicksight.py | 7 +++--- tests/api/admin/controller/test_quicksight.py | 18 ++++++++------- 3 files changed, 25 insertions(+), 22 deletions(-) diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py index 509098d6f9..e7b6b99339 100644 --- a/api/admin/controller/quicksight.py +++ b/api/admin/controller/quicksight.py @@ -56,16 +56,16 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: if admin.is_librarian(library): allowed_libraries.append(library) - if request_data.library_ids: - allowed_library_ids = list( - set(request_data.library_ids).intersection( - {l.id for l in allowed_libraries} + if request_data.library_uuids: + allowed_library_uuids = list( + set(map(str, request_data.library_uuids)).intersection( + {l.uuid for l in allowed_libraries} ) ) else: - allowed_library_ids = [l.id for l in allowed_libraries] + allowed_library_uuids = [l.uuid for l in allowed_libraries] - if not allowed_library_ids: + if not allowed_library_uuids: raise ProblemError( NOT_FOUND_ON_REMOTE.detailed( "No library was found for this Admin that matched the request." @@ -74,7 +74,7 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: libraries = self._db.execute( select(Library.name) - .where(Library.id.in_(allowed_library_ids)) + .where(Library.uuid.in_(allowed_library_uuids)) .order_by(Library.name) ).all() @@ -96,19 +96,19 @@ def generate_quicksight_url(self, dashboard_name) -> Dict: ], ) except Exception as ex: - log.error(f"Error while fetching the Quisksight Embed url: {ex}") + log.error(f"Error while fetching the Quicksight Embed url: {ex}") raise ProblemError( INTERNAL_SERVER_ERROR.detailed( - "Error while fetching the Quisksight Embed url." + "Error while fetching the Quicksight Embed url." ) ) embed_url = response.get("EmbedUrl") if response.get("Status") // 100 != 2 or embed_url is None: - log.error(f"QuiskSight Embed url error response {response}") + log.error(f"Quicksight Embed url error response {response}") raise ProblemError( INTERNAL_SERVER_ERROR.detailed( - "Error while fetching the Quisksight Embed url." + "Error while fetching the Quicksight Embed url." ) ) diff --git a/api/admin/model/quicksight.py b/api/admin/model/quicksight.py index 752f889e37..a789adc4c8 100644 --- a/api/admin/model/quicksight.py +++ b/api/admin/model/quicksight.py @@ -1,4 +1,5 @@ from typing import List +from uuid import UUID from pydantic import Field, validator @@ -6,12 +7,12 @@ class QuicksightGenerateUrlRequest(CustomBaseModel): - library_ids: List[int] = Field( + library_uuids: List[UUID] = Field( description="The list of libraries to include in the dataset, an empty list is equivalent to all the libraries the user is allowed to access." ) - @validator("library_ids", pre=True) - def parse_library_ids(cls, value): + @validator("library_uuids", pre=True) + def parse_library_uuids(cls, value) -> List[str]: return str_comma_list_validator(value) diff --git a/tests/api/admin/controller/test_quicksight.py b/tests/api/admin/controller/test_quicksight.py index 5240ae4941..d71dd61845 100644 --- a/tests/api/admin/controller/test_quicksight.py +++ b/tests/api/admin/controller/test_quicksight.py @@ -1,3 +1,4 @@ +import uuid from unittest import mock import pytest @@ -54,8 +55,9 @@ def test_generate_quicksight_url( ) generate_method.return_value = {"Status": 201, "EmbedUrl": "https://embed"} + random_uuid = str(uuid.uuid4()) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={default.id},{library1.id},30000", + f"/?library_uuids={default.uuid},{library1.uuid},{random_uuid}", admin=system_admin, ) as ctx: response = ctrl.generate_quicksight_url("primary") @@ -86,7 +88,7 @@ def test_generate_quicksight_url( admin1.add_role(AdminRole.LIBRARY_MANAGER, library1) with quicksight_fixture.request_context_with_admin( - f"/?library_ids=1,{library1.id}", + f"/?library_uuids={default.uuid},{library1.uuid}", admin=admin1, ) as ctx: generate_method.reset_mock() @@ -129,7 +131,7 @@ def test_generate_quicksight_url_errors( mock_qs_arns.return_value = arns with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library.id}", + f"/?library_uuids={library.uuid}", admin=admin, ) as ctx: with pytest.raises(ProblemError) as raised: @@ -148,7 +150,7 @@ def test_generate_quicksight_url_errors( ) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library_not_allowed.id}", + f"/?library_uuids={library_not_allowed.uuid}", admin=admin, ) as ctx: mock_qs_arns.return_value = arns @@ -160,7 +162,7 @@ def test_generate_quicksight_url_errors( ) with quicksight_fixture.request_context_with_admin( - f"/?library_ids={library.id}", + f"/?library_uuids={library.uuid}", admin=admin, ) as ctx: # Bad response from boto @@ -171,7 +173,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) # 200 status, but no url @@ -182,7 +184,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) # Boto threw an error @@ -193,7 +195,7 @@ def test_generate_quicksight_url_errors( ctrl.generate_quicksight_url("primary") assert ( raised.value.problem_detail.detail - == "Error while fetching the Quisksight Embed url." + == "Error while fetching the Quicksight Embed url." ) def test_get_dashboard_names(self, quicksight_fixture: QuickSightControllerFixture): From d1b51551c480d2bbaec111cd9a6bb052a6c50810 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 5 Dec 2023 13:46:28 -0400 Subject: [PATCH 204/262] Add the details of the hold notification to the body of the notification. (#1553) --- core/util/notifications.py | 8 +- tests/core/util/test_notifications.py | 119 ++++++++++++++------------ 2 files changed, 68 insertions(+), 59 deletions(-) diff --git a/core/util/notifications.py b/core/util/notifications.py index 4c11f9af74..cc8729e269 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -195,9 +195,11 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: loans_api = f"{url}/{hold.patron.library.short_name}/loans" work: Work = hold.work identifier: Identifier = hold.license_pool.identifier - title = f'Your hold on "{work.title}" is available!' + title = "Your hold is available!" + body = f'Your hold on "{work.title}" is available!' data = dict( title=title, + body=body, event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, loans_endpoint=loans_api, identifier=identifier.identifier, @@ -209,7 +211,9 @@ def send_holds_notifications(cls, holds: list[Hold]) -> list[str]: if hold.patron.authorization_identifier: data["authorization_identifier"] = hold.patron.authorization_identifier - resp = cls.send_messages(tokens, messaging.Notification(title=title), data) + resp = cls.send_messages( + tokens, messaging.Notification(title=title, body=body), data + ) if len(resp) > 0: # Atleast one notification succeeded hold.patron_last_notified = utc_now().date() diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index 07dd2af3f3..2d7b7fe6d2 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,7 +1,7 @@ import logging import re from datetime import datetime -from typing import Generator +from typing import Any, Generator from unittest import mock from unittest.mock import MagicMock @@ -14,7 +14,7 @@ from requests_mock import Mocker from core.config import Configuration -from core.model import create, get_one, get_one_or_create +from core.model import Hold, create, get_one, get_one_or_create from core.model.configuration import ConfigurationSetting from core.model.constants import NotificationConstants from core.model.devicetokens import DeviceToken, DeviceTokenTypes @@ -272,68 +272,73 @@ def test_holds_notification(self, push_notf_fixture: PushNotificationsFixture): work1: Work = db.work(with_license_pool=True) work2: Work = db.work(with_license_pool=True) p1 = work1.active_license_pool() + assert p1 is not None p2 = work2.active_license_pool() - if p1 and p2: # mypy complains if we don't do this - hold1, _ = p1.on_hold_to(patron1, position=0) - hold2, _ = p2.on_hold_to(patron2, position=0) - - with mock.patch("core.util.notifications.messaging") as messaging: + assert p2 is not None + hold1, _ = p1.on_hold_to(patron1, position=0) + hold2, _ = p2.on_hold_to(patron2, position=0) + + with mock.patch("core.util.notifications.messaging") as mock_messaging: + # Mock the notification method to return the kwargs passed to it + # so that we can make sure we are making the expected calls + mock_messaging.Notification.side_effect = lambda **kwargs: kwargs PushNotifications.send_holds_notifications([hold1, hold2]) assert ( hold1.patron_last_notified == hold2.patron_last_notified == utc_now().date() ) loans_api = "http://localhost/default/loans" - assert messaging.Message.call_count == 3 - assert messaging.Message.call_args_list == [ - mock.call( - token="test-token-1", - notification=messaging.Notification( - title=f'Your hold on "{work1.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work1.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold1.patron.external_identifier, - authorization_identifier=hold1.patron.authorization_identifier, - identifier=hold1.license_pool.identifier.identifier, - type=hold1.license_pool.identifier.type, - library=hold1.patron.library.short_name, - ), - ), - mock.call( - token="test-token-2", - notification=messaging.Notification( - title=f'Your hold on "{work1.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work1.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold1.patron.external_identifier, - authorization_identifier=hold1.patron.authorization_identifier, - identifier=hold1.license_pool.identifier.identifier, - type=hold1.license_pool.identifier.type, - library=hold1.patron.library.short_name, - ), - ), - mock.call( - token="test-token-3", - notification=messaging.Notification( - title=f'Your hold on "{work2.title}" is available!', - ), - data=dict( - title=f'Your hold on "{work2.title}" is available!', - event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, - loans_endpoint=loans_api, - external_identifier=hold2.patron.external_identifier, - identifier=hold2.license_pool.identifier.identifier, - type=hold2.license_pool.identifier.type, - library=hold2.patron.library.short_name, - ), - ), - ] + + def assert_message_call( + actual: Any, + token: str, + work: Work, + hold: Hold, + include_auth_id: bool = True, + ) -> None: + data = dict( + title="Your hold is available!", + body=f'Your hold on "{work.title}" is available!', + event_type=NotificationConstants.HOLD_AVAILABLE_TYPE, + loans_endpoint=loans_api, + identifier=hold.license_pool.identifier.identifier, + type=hold.license_pool.identifier.type, + library=hold.patron.library.short_name, + external_identifier=hold.patron.external_identifier, + ) + + if include_auth_id: + data["authorization_identifier"] = hold.patron.authorization_identifier + + notification = dict( + title=data["title"], + body=data["body"], + ) + + assert actual == mock.call( + token=token, + notification=notification, + data=data, + ) + + # We should have sent 3 messages, one for each token + assert mock_messaging.Message.call_count == 3 + + # We should have sent 2 notifications, one for each patron. + # Because patron1 has 2 tokens, they will get the same notification for + # each token. + assert mock_messaging.Notification.call_count == 2 + + [ + message_call1, + message_call2, + message_call3, + ] = mock_messaging.Message.call_args_list + assert_message_call(message_call1, "test-token-1", work1, hold1) + assert_message_call(message_call2, "test-token-2", work1, hold1) + assert_message_call( + message_call3, "test-token-3", work2, hold2, include_auth_id=False + ) def test_send_messages( self, From 40aca33e6a2938fd882e1b5a6acd6fd9bf98daa9 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Tue, 5 Dec 2023 15:55:03 -0400 Subject: [PATCH 205/262] MARC export by collection (PP-59) (#1547) MARC files should be generated and grouped by Collection, rather then lane and should be able to be enabled / disabled on a collection by collection basis. --- ..._0039f3f12014_marc_export_by_collection.py | 62 ++ api/admin/controller/collection_settings.py | 10 + api/admin/controller/work_editor.py | 2 - api/axis.py | 4 +- api/bibliotheca.py | 4 +- api/circulation.py | 22 +- api/controller.py | 108 +-- api/controller_marc.py | 181 +++++ api/enki.py | 3 +- api/marc.py | 102 --- api/overdrive.py | 3 +- bin/marc_record_coverage | 12 - core/coverage.py | 14 - core/lane.py | 12 +- core/marc.py | 377 +++++----- core/model/__init__.py | 13 +- core/model/cachedfeed.py | 15 +- core/model/collection.py | 2 + core/model/coverage.py | 1 - core/model/library.py | 10 +- core/model/marcfile.py | 55 ++ core/model/resource.py | 12 +- core/model/work.py | 19 +- core/opds_import.py | 9 +- core/scripts.py | 3 - core/service/storage/s3.py | 13 +- core/util/uuid.py | 25 + docker/services/cron/cron.d/circulation | 3 - pyproject.toml | 2 + scripts.py | 182 +++-- tests/api/test_controller_marc.py | 379 ++++++---- tests/api/test_marc.py | 294 -------- tests/api/test_scripts.py | 559 ++++++++++----- tests/core/models/test_marcfile.py | 42 ++ tests/core/models/test_work.py | 18 +- tests/core/service/storage/test_s3.py | 26 +- tests/core/test_coverage.py | 20 - tests/core/test_marc.py | 673 ++++++++++-------- tests/core/test_opds_import.py | 2 +- tests/core/test_scripts.py | 4 +- tests/core/util/test_uuid.py | 40 ++ tests/fixtures/s3.py | 29 +- tests/migration/test_20231124_1c14468b74ce.py | 2 +- 43 files changed, 1824 insertions(+), 1544 deletions(-) create mode 100644 alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py create mode 100644 api/controller_marc.py delete mode 100644 api/marc.py delete mode 100755 bin/marc_record_coverage create mode 100644 core/model/marcfile.py create mode 100644 core/util/uuid.py delete mode 100644 tests/api/test_marc.py create mode 100644 tests/core/models/test_marcfile.py create mode 100644 tests/core/util/test_uuid.py diff --git a/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py b/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py new file mode 100644 index 0000000000..9f1e1a0eb3 --- /dev/null +++ b/alembic/versions/20231128_0039f3f12014_marc_export_by_collection.py @@ -0,0 +1,62 @@ +"""MARC Export by collection. + +Revision ID: 0039f3f12014 +Revises: 1c14468b74ce +Create Date: 2023-11-28 20:19:55.520740+00:00 + +""" +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "0039f3f12014" +down_revision = "1c14468b74ce" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + op.create_table( + "marcfiles", + sa.Column("id", postgresql.UUID(as_uuid=True), nullable=False), + sa.Column("library_id", sa.Integer(), nullable=True), + sa.Column("collection_id", sa.Integer(), nullable=True), + sa.Column("key", sa.Unicode(), nullable=False), + sa.Column("created", sa.DateTime(timezone=True), nullable=False), + sa.Column("since", sa.DateTime(timezone=True), nullable=True), + sa.ForeignKeyConstraint( + ["collection_id"], + ["collections.id"], + ondelete="SET NULL", + ), + sa.ForeignKeyConstraint( + ["library_id"], + ["libraries.id"], + ondelete="SET NULL", + ), + sa.PrimaryKeyConstraint("id"), + ) + op.create_index( + op.f("ix_marcfiles_collection_id"), "marcfiles", ["collection_id"], unique=False + ) + op.create_index( + op.f("ix_marcfiles_created"), "marcfiles", ["created"], unique=False + ) + op.create_index( + op.f("ix_marcfiles_library_id"), "marcfiles", ["library_id"], unique=False + ) + op.add_column( + "collections", sa.Column("export_marc_records", sa.Boolean(), nullable=True) + ) + op.execute("UPDATE collections SET export_marc_records = 'f'") + op.alter_column("collections", "export_marc_records", nullable=False) + + +def downgrade() -> None: + op.drop_column("collections", "export_marc_records") + op.drop_index(op.f("ix_marcfiles_library_id"), table_name="marcfiles") + op.drop_index(op.f("ix_marcfiles_created"), table_name="marcfiles") + op.drop_index(op.f("ix_marcfiles_collection_id"), table_name="marcfiles") + op.drop_table("marcfiles") diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index c366c144eb..60da10aec8 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -43,7 +43,11 @@ def configured_service_info( if service_info: # Add 'marked_for_deletion' to the service info service_info["marked_for_deletion"] = service.collection.marked_for_deletion + service_info["parent_id"] = service.collection.parent_id + service_info["settings"]["export_marc_records"] = str( + service.collection.export_marc_records + ).lower() if user and user.can_see_collection(service.collection): return service_info return None @@ -88,6 +92,9 @@ def process_post(self) -> Union[Response, ProblemDetail]: form_data = flask.request.form libraries_data = self.get_libraries_data(form_data) parent_id = form_data.get("parent_id", None, int) + export_marc_records = ( + form_data.get("export_marc_records", None, str) == "true" + ) integration, protocol, response_code = self.get_service(form_data) impl_cls = self.registry[protocol] @@ -110,6 +117,9 @@ def process_post(self) -> Union[Response, ProblemDetail]: else: settings_class = impl_cls.settings_class() + # Set export_marc_records flag on the collection + integration.collection.export_marc_records = export_marc_records + # Update settings validated_settings = ProcessFormData.get_settings(settings_class, form_data) integration.settings_dict = validated_settings.dict() diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index bc79a1bfe4..b79fdba327 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -339,7 +339,6 @@ def edit(self, identifier_type, identifier): # problem the user is trying to fix. policy = PresentationCalculationPolicy( classify=True, - regenerate_marc_record=True, update_search_index=True, calculate_quality=changed_rating, choose_summary=changed_summary, @@ -621,7 +620,6 @@ def edit_classifications(self, identifier_type, identifier): # Update presentation policy = PresentationCalculationPolicy( classify=True, - regenerate_marc_record=True, update_search_index=True, ) work.calculate_presentation(policy=policy) diff --git a/api/axis.py b/api/axis.py index 567be25189..faf8a50d4e 100644 --- a/api/axis.py +++ b/api/axis.py @@ -40,6 +40,7 @@ from api.circulation import ( APIAwareFulfillmentInfo, BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationLoanSettings, CirculationInternalFormatsMixin, FulfillmentInfo, @@ -54,7 +55,6 @@ from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider, CoverageFailure from core.integration.settings import ( - BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, @@ -109,7 +109,7 @@ class Axis360APIConstants: } -class Axis360Settings(BaseSettings): +class Axis360Settings(BaseCirculationApiSettings): username: str = FormField( form=ConfigurationFormItem(label=_("Username"), required=True) ) diff --git a/api/bibliotheca.py b/api/bibliotheca.py index a66df81e45..56b6f2e342 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -22,6 +22,7 @@ from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationLoanSettings, FulfillmentInfo, HoldInfo, @@ -35,7 +36,6 @@ from core.config import CannotLoadConfiguration from core.coverage import BibliographicCoverageProvider from core.integration.settings import ( - BaseSettings, ConfigurationFormItem, ConfigurationFormItemType, FormField, @@ -82,7 +82,7 @@ from core.util.xmlparser import XMLParser, XMLProcessor -class BibliothecaSettings(BaseSettings): +class BibliothecaSettings(BaseCirculationApiSettings): username: str = FormField( form=ConfigurationFormItem( label=_("Account ID"), diff --git a/api/circulation.py b/api/circulation.py index 097f4f2ed4..e96148f5cd 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -559,7 +559,21 @@ def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> s return internal_format -SettingsType = TypeVar("SettingsType", bound=BaseSettings, covariant=True) +class BaseCirculationApiSettings(BaseSettings): + _additional_form_fields = { + "export_marc_records": ConfigurationFormItem( + label="Generate MARC Records", + type=ConfigurationFormItemType.SELECT, + description="Generate MARC Records for this collection. This setting only applies if a MARC Exporter is configured.", + options={ + "false": "Do not generate MARC records", + "true": "Generate MARC records", + }, + ) + } + + +SettingsType = TypeVar("SettingsType", bound=BaseCirculationApiSettings, covariant=True) LibrarySettingsType = TypeVar("LibrarySettingsType", bound=BaseSettings, covariant=True) @@ -710,7 +724,7 @@ def update_availability(self, licensepool: LicensePool) -> None: ... -CirculationApiType = BaseCirculationAPI[BaseSettings, BaseSettings] +CirculationApiType = BaseCirculationAPI[BaseCirculationApiSettings, BaseSettings] class PatronActivityCirculationAPI( @@ -1446,7 +1460,9 @@ def patron_activity( class PatronActivityThread(Thread): def __init__( self, - api: PatronActivityCirculationAPI[BaseSettings, BaseSettings], + api: PatronActivityCirculationAPI[ + BaseCirculationApiSettings, BaseSettings + ], patron: Patron, pin: str, ) -> None: diff --git a/api/controller.py b/api/controller.py index bf3a2dbdaf..1bd1df4f9e 100644 --- a/api/controller.py +++ b/api/controller.py @@ -5,7 +5,6 @@ import logging import os import urllib.parse -from collections import defaultdict from time import mktime from typing import TYPE_CHECKING, Any from wsgiref.handlers import format_date_time @@ -30,6 +29,7 @@ from api.circulation import CirculationAPI from api.circulation_exceptions import * from api.config import CannotLoadConfiguration, Configuration +from api.controller_marc import MARCRecordController from api.custom_index import CustomIndexView from api.lanes import ( ContributorFacets, @@ -52,7 +52,6 @@ from api.odl2 import ODL2API from api.problem_details import * from api.saml.controller import SAMLController -from core.analytics import Analytics from core.app_server import ApplicationVersionController from core.app_server import URNLookupController as CoreURNLookupController from core.app_server import ( @@ -69,9 +68,7 @@ ) from core.feed.navigation import NavigationFeed from core.feed.opds import NavigationFacets -from core.integration.goals import Goals from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList -from core.marc import MARCExporter from core.metadata_layer import ContributorData from core.model import ( Annotation, @@ -215,10 +212,11 @@ class CirculationManager: def __init__( self, _db, - analytics: Analytics = Provide[Services.analytics.analytics], + services: Services = Provide[Services], ): self._db = _db - self.analytics = analytics + self.services = services + self.analytics = services.analytics.analytics() self.site_configuration_last_update = ( Configuration.site_configuration_last_update(self._db, timeout=0) ) @@ -406,7 +404,7 @@ def setup_one_time_controllers(self): """ self.index_controller = IndexController(self) self.opds_feeds = OPDSFeedController(self) - self.marc_records = MARCRecordController(self) + self.marc_records = MARCRecordController(self.services.storage.public()) self.loans = LoanController(self) self.annotations = AnnotationController(self) self.urn_lookup = URNLookupController(self) @@ -1261,102 +1259,6 @@ class FeedRequestParameters: problem: ProblemDetail | None = None -class MARCRecordController(CirculationManagerController): - DOWNLOAD_TEMPLATE = """ - - - -%(body)s - -""" - - def download_page(self): - library = flask.request.library - body = "

Download MARC files for %s

" % library.name - time_format = "%B %-d, %Y" - - # Check if a MARC exporter is configured, so we can show a - # message if it's not. - integration_query = ( - select(IntegrationLibraryConfiguration) - .join(IntegrationConfiguration) - .where( - IntegrationConfiguration.goal == Goals.CATALOG_GOAL, - IntegrationConfiguration.protocol == MARCExporter.__name__, - IntegrationLibraryConfiguration.library == library, - ) - ) - - session = Session.object_session(library) - integration = session.execute(integration_query).one_or_none() - - if not integration: - body += ( - "

" - + _("No MARC exporter is currently configured for this library.") - + "

" - ) - - if len(library.cachedmarcfiles) < 1 and integration: - body += "

" + _("MARC files aren't ready to download yet.") + "

" - - files_by_lane = defaultdict(dict) - for file in library.cachedmarcfiles: - if file.start_time == None: - files_by_lane[file.lane]["full"] = file - else: - if not files_by_lane[file.lane].get("updates"): - files_by_lane[file.lane]["updates"] = [] - files_by_lane[file.lane]["updates"].append(file) - - # TODO: By default the MARC script only caches one level of lanes, - # so sorting by priority is good enough. - lanes = sorted( - list(files_by_lane.keys()), key=lambda x: x.priority if x else -1 - ) - - for lane in lanes: - files = files_by_lane[lane] - body += "
" - body += "

%s

" % (lane.display_name if lane else _("All Books")) - if files.get("full"): - file = files.get("full") - full_url = file.representation.mirror_url - full_label = _( - "Full file - last updated %(update_time)s", - update_time=file.end_time.strftime(time_format), - ) - body += '{}'.format( - files.get("full").representation.mirror_url, - full_label, - ) - - if files.get("updates"): - body += "

%s

" % _("Update-only files") - body += "
    " - files.get("updates").sort(key=lambda x: x.end_time) - for update in files.get("updates"): - update_url = update.representation.mirror_url - update_label = _( - "Updates from %(start_time)s to %(end_time)s", - start_time=update.start_time.strftime(time_format), - end_time=update.end_time.strftime(time_format), - ) - body += '
  • {}
  • '.format( - update_url, - update_label, - ) - body += "
" - - body += "
" - body += "
" - - html = self.DOWNLOAD_TEMPLATE % dict(body=body) - headers = dict() - headers["Content-Type"] = "text/html" - return Response(html, 200, headers) - - class LoanController(CirculationManagerController): def sync(self): """Sync the authenticated patron's loans and holds with all third-party diff --git a/api/controller_marc.py b/api/controller_marc.py new file mode 100644 index 0000000000..007c917ffc --- /dev/null +++ b/api/controller_marc.py @@ -0,0 +1,181 @@ +from __future__ import annotations + +from collections import defaultdict +from dataclasses import dataclass, field +from datetime import datetime +from typing import Dict, Optional + +import flask +from flask import Response +from sqlalchemy import select +from sqlalchemy.orm import Session + +from core.integration.goals import Goals +from core.marc import MARCExporter +from core.model import ( + Collection, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + Library, + MarcFile, +) +from core.service.storage.s3 import S3Service + + +@dataclass +class MarcFileDeltaResult: + key: str + since: datetime + created: datetime + + +@dataclass +class MarcFileFullResult: + key: str + created: datetime + + +@dataclass +class MarcFileCollectionResult: + full: MarcFileFullResult | None = None + deltas: list[MarcFileDeltaResult] = field(default_factory=list) + + +class MARCRecordController: + DOWNLOAD_TEMPLATE = """ + + + +%(body)s + +""" + + def __init__(self, storage_service: Optional[S3Service]) -> None: + self.storage_service = storage_service + + @staticmethod + def library() -> Library: + return flask.request.library # type: ignore[no-any-return,attr-defined] + + @staticmethod + def has_integration(session: Session, library: Library) -> bool: + integration_query = ( + select(IntegrationLibraryConfiguration) + .join(IntegrationConfiguration) + .where( + IntegrationConfiguration.goal == Goals.CATALOG_GOAL, + IntegrationConfiguration.protocol == MARCExporter.__name__, + IntegrationLibraryConfiguration.library == library, + ) + ) + integration = session.execute(integration_query).one_or_none() + return integration is not None + + @staticmethod + def get_files( + session: Session, library: Library + ) -> Dict[str, MarcFileCollectionResult]: + marc_files = session.execute( + select( + IntegrationConfiguration.name, + MarcFile.key, + MarcFile.since, + MarcFile.created, + ) + .select_from(MarcFile) + .join(Collection) + .join(IntegrationConfiguration) + .join(IntegrationLibraryConfiguration) + .where( + MarcFile.library == library, + Collection.export_marc_records == True, + IntegrationLibraryConfiguration.library == library, + ) + .order_by( + IntegrationConfiguration.name, + MarcFile.created.desc(), + ) + ).all() + + files_by_collection: Dict[str, MarcFileCollectionResult] = defaultdict( + MarcFileCollectionResult + ) + for file_row in marc_files: + if file_row.since is None: + full_file_result = MarcFileFullResult( + key=file_row.key, + created=file_row.created, + ) + if files_by_collection[file_row.name].full is not None: + # We already have a newer full file, so skip this one. + continue + files_by_collection[file_row.name].full = full_file_result + else: + delta_file_result = MarcFileDeltaResult( + key=file_row.key, + since=file_row.since, + created=file_row.created, + ) + files_by_collection[file_row.name].deltas.append(delta_file_result) + return files_by_collection + + def download_page_body(self, session: Session, library: Library) -> str: + time_format = "%B %-d, %Y" + + # Check if a MARC exporter is configured, so we can show a + # message if it's not. + integration = self.has_integration(session, library) + + if not integration: + return ( + "

" + + "No MARC exporter is currently configured for this library." + + "

" + ) + + if not self.storage_service: + return "

" + "No storage service is currently configured." + "

" + + # Get the MARC files for this library. + marc_files = self.get_files(session, library) + + if len(marc_files) == 0: + return "

" + "MARC files aren't ready to download yet." + "

" + + body = "" + for collection_name, files in marc_files.items(): + body += "
" + body += f"

{collection_name}

" + if files.full is not None: + file = files.full + full_url = self.storage_service.generate_url(file.key) + full_label = ( + f"Full file - last updated {file.created.strftime(time_format)}" + ) + body += f'{full_label}' + + if files.deltas: + body += f"

Update-only files

" + body += "
    " + for update in files.deltas: + update_url = self.storage_service.generate_url(update.key) + update_label = f"Updates from {update.since.strftime(time_format)} to {update.created.strftime(time_format)}" + body += f'
  • {update_label}
  • ' + body += "
" + + body += "
" + body += "
" + + return body + + def download_page(self) -> Response: + library = self.library() + body = "

Download MARC files for %s

" % library.name + + session = Session.object_session(library) + body += self.download_page_body(session, library) + + html = self.DOWNLOAD_TEMPLATE % dict(body=body) + headers = dict() + headers["Content-Type"] = "text/html" + return Response(html, 200, headers) diff --git a/api/enki.py b/api/enki.py index a3b192828f..baba060812 100644 --- a/api/enki.py +++ b/api/enki.py @@ -14,6 +14,7 @@ from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, FulfillmentInfo, HoldInfo, LoanInfo, @@ -65,7 +66,7 @@ class EnkiConstants: PRODUCTION_BASE_URL = "https://enkilibrary.org/API/" -class EnkiSettings(BaseSettings): +class EnkiSettings(BaseCirculationApiSettings): url: HttpUrl = FormField( default=EnkiConstants.PRODUCTION_BASE_URL, form=ConfigurationFormItem( diff --git a/api/marc.py b/api/marc.py deleted file mode 100644 index 54ed824f10..0000000000 --- a/api/marc.py +++ /dev/null @@ -1,102 +0,0 @@ -from __future__ import annotations - -import urllib.error -import urllib.parse -import urllib.request - -from pymarc import Field, Record, Subfield -from sqlalchemy import select - -from core.config import Configuration -from core.marc import Annotator, MarcExporterLibrarySettings -from core.model import ( - ConfigurationSetting, - Edition, - Identifier, - Library, - LicensePool, - Session, - Work, -) -from core.model.discovery_service_registration import DiscoveryServiceRegistration - - -class LibraryAnnotator(Annotator): - def __init__(self, library: Library) -> None: - super().__init__() - self.library = library - _db = Session.object_session(library) - self.base_url = ConfigurationSetting.sitewide( - _db, Configuration.BASE_URL_KEY - ).value - - def annotate_work_record( - self, - work: Work, - active_license_pool: LicensePool, - edition: Edition, - identifier: Identifier, - record: Record, - settings: MarcExporterLibrarySettings | None, - ) -> None: - super().annotate_work_record( - work, active_license_pool, edition, identifier, record, settings - ) - - if settings is None: - return - - if settings.organization_code: - self.add_marc_organization_code(record, settings.organization_code) - - if settings.include_summary: - self.add_summary(record, work) - - if settings.include_genres: - self.add_simplified_genres(record, work) - - self.add_web_client_urls(record, self.library, identifier, settings) - - def add_web_client_urls( - self, - record: Record, - library: Library, - identifier: Identifier, - exporter_settings: MarcExporterLibrarySettings, - ) -> None: - _db = Session.object_session(library) - settings = [] - - marc_setting = exporter_settings.web_client_url - if marc_setting: - settings.append(marc_setting) - - settings += [ - s.web_client - for s in _db.execute( - select(DiscoveryServiceRegistration.web_client).where( - DiscoveryServiceRegistration.library == library, - DiscoveryServiceRegistration.web_client != None, - ) - ).all() - ] - - qualified_identifier = urllib.parse.quote( - f"{identifier.type}/{identifier.identifier}", safe="" - ) - - for web_client_base_url in settings: - link = "{}/{}/works/{}".format( - self.base_url, - library.short_name, - qualified_identifier, - ) - encoded_link = urllib.parse.quote(link, safe="") - url = f"{web_client_base_url}/book/{encoded_link}" - record.add_field( - Field( - tag="856", - indicators=["4", "0"], - subfields=[Subfield(code="u", value=url)], - ) - ) diff --git a/api/overdrive.py b/api/overdrive.py index 092c4dad85..9c988a7b56 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -25,6 +25,7 @@ from api.circulation import ( BaseCirculationAPI, + BaseCirculationApiSettings, BaseCirculationEbookLoanSettings, CirculationInternalFormatsMixin, DeliveryMechanismInfo, @@ -125,7 +126,7 @@ class OverdriveConstants: ILS_NAME_DEFAULT = "default" -class OverdriveSettings(ConnectionSetting): +class OverdriveSettings(ConnectionSetting, BaseCirculationApiSettings): """The basic Overdrive configuration""" external_account_id: Optional[str] = FormField( diff --git a/bin/marc_record_coverage b/bin/marc_record_coverage deleted file mode 100755 index 45a6c79920..0000000000 --- a/bin/marc_record_coverage +++ /dev/null @@ -1,12 +0,0 @@ -#!/usr/bin/env python -"""Make sure all presentation-ready works have up-to-date MARC records.""" -import os -import sys - -bin_dir = os.path.split(__file__)[0] -package_dir = os.path.join(bin_dir, "..") -sys.path.append(os.path.abspath(package_dir)) -from core.coverage import MARCRecordWorkCoverageProvider -from core.scripts import RunWorkCoverageProviderScript - -RunWorkCoverageProviderScript(MARCRecordWorkCoverageProvider).run() diff --git a/core/coverage.py b/core/coverage.py index 577dacfe6f..bdea56bbd1 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1528,20 +1528,6 @@ class WorkPresentationProvider(PresentationReadyWorkCoverageProvider): DEFAULT_BATCH_SIZE = 100 -class MARCRecordWorkCoverageProvider(WorkPresentationProvider): - """Make sure all presentation-ready works have an up-to-date MARC - record. - """ - - SERVICE_NAME = "MARC Record Work Coverage Provider" - OPERATION = WorkCoverageRecord.GENERATE_MARC_OPERATION - DEFAULT_BATCH_SIZE = 1000 - - def process_item(self, work): - work.calculate_marc_record() - return work - - class WorkPresentationEditionCoverageProvider(WorkPresentationProvider): """Make sure each Work has an up-to-date presentation edition. diff --git a/core/lane.py b/core/lane.py index ee0f389052..3d252d52ca 100644 --- a/core/lane.py +++ b/core/lane.py @@ -4,7 +4,7 @@ import logging import time from collections import defaultdict -from typing import TYPE_CHECKING, Any, List, Optional +from typing import Any, List, Optional from urllib.parse import quote_plus from flask_babel import lazy_gettext as _ @@ -69,9 +69,6 @@ from core.util.opds_writer import OPDSFeed from core.util.problem_detail import ProblemDetail -if TYPE_CHECKING: - from core.model import CachedMARCFile # noqa: autoflake - class BaseFacets(FacetConstants): """Basic faceting class that doesn't modify a search filter at all. @@ -2716,13 +2713,6 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # admin interface can see all the lanes, visible or not. _visible = Column("visible", Boolean, default=True, nullable=False) - # A Lane may have many CachedMARCFiles. - cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( - "CachedMARCFile", - back_populates="lane", - cascade="all, delete-orphan", - ) - __table_args__ = (UniqueConstraint("parent_id", "display_name"),) def __init__(self, *args, **kwargs): diff --git a/core/marc.py b/core/marc.py index 0cf262f8be..efc8a3b04a 100644 --- a/core/marc.py +++ b/core/marc.py @@ -1,16 +1,20 @@ from __future__ import annotations import re +import urllib.parse from datetime import datetime from io import BytesIO -from typing import Callable, Mapping, Optional, Tuple +from typing import List, Mapping, Optional, Tuple +from uuid import UUID, uuid4 +import pytz from pydantic import NonNegativeInt from pymarc import Field, Record, Subfield +from sqlalchemy import select +from sqlalchemy.engine import ScalarResult from sqlalchemy.orm.session import Session from core.classifier import Classifier -from core.external_search import ExternalSearchIndex, Filter, SortKeyPagination from core.integration.base import HasLibraryIntegrationConfiguration from core.integration.settings import ( BaseSettings, @@ -18,30 +22,29 @@ ConfigurationFormItemType, FormField, ) -from core.lane import BaseFacets, Lane, WorkList from core.model import ( - CachedMARCFile, + Collection, DeliveryMechanism, Edition, Identifier, Library, LicensePool, + MarcFile, Representation, Work, - get_one_or_create, + create, ) -from core.service.storage.s3 import MultipartS3ContextManager, S3Service +from core.service.storage.s3 import S3Service from core.util import LanguageCodes from core.util.datetime_helpers import utc_now from core.util.log import LoggerMixin +from core.util.uuid import uuid_encode class Annotator(LoggerMixin): """The Annotator knows how to add information about a Work to a MARC record.""" - marc_cache_field = Work.marc_record.name - # From https://www.loc.gov/standards/valuelist/marctarget.html AUDIENCE_TERMS: Mapping[str, str] = { Classifier.AUDIENCE_CHILDREN: "Juvenile", @@ -63,17 +66,33 @@ class Annotator(LoggerMixin): (Representation.PDF_MEDIA_TYPE, DeliveryMechanism.ADOBE_DRM): "Adobe PDF eBook", } + def __init__( + self, + cm_url: str, + library_short_name: str, + web_client_urls: List[str], + organization_code: Optional[str], + include_summary: bool, + include_genres: bool, + ) -> None: + self.cm_url = cm_url + self.library_short_name = library_short_name + self.web_client_urls = web_client_urls + self.organization_code = organization_code + self.include_summary = include_summary + self.include_genres = include_genres + def annotate_work_record( self, + revised: bool, work: Work, active_license_pool: LicensePool, edition: Edition, identifier: Identifier, - record: Record, - settings: MarcExporterLibrarySettings | None, - ) -> None: + ) -> Record: """Add metadata from this work to a MARC record. + :param revised: Whether this record is being revised. :param work: The Work whose record is being annotated. :param active_license_pool: Of all the LicensePools associated with this Work, the client has expressed interest in this one. @@ -81,19 +100,56 @@ def annotate_work_record( metadata with this entry. :param identifier: Of all the Identifiers associated with this Work, the client has expressed interest in this one. - :param record: A MARCRecord object to be annotated. + + :return: A pymarc Record object. """ + record = Record(leader=self.leader(revised), force_utf8=True) + self.add_control_fields(record, identifier, active_license_pool, edition) + self.add_isbn(record, identifier) + + # TODO: The 240 and 130 fields are for translated works, so they can be grouped even + # though they have different titles. We do not group editions of the same work in + # different languages, so we can't use those yet. + + self.add_title(record, edition) + self.add_contributors(record, edition) + self.add_publisher(record, edition) + self.add_physical_description(record, edition) + self.add_audience(record, work) + self.add_series(record, edition) + self.add_system_details(record) + self.add_ebooks_subject(record) self.add_distributor(record, active_license_pool) self.add_formats(record, active_license_pool) + if self.organization_code: + self.add_marc_organization_code(record, self.organization_code) + + if self.include_summary: + self.add_summary(record, work) + + if self.include_genres: + self.add_genres(record, work) + + self.add_web_client_urls( + record, + identifier, + self.library_short_name, + self.cm_url, + self.web_client_urls, + ) + + return record + @classmethod - def leader(cls, work: Work) -> str: + def leader(cls, revised: bool) -> str: # The record length is automatically updated once fields are added. initial_record_length = "00000" - record_status = "n" # New record - if getattr(work, cls.marc_cache_field): + if revised: record_status = "c" # Corrected or revised + else: + record_status = "n" # New record # Distributors consistently seem to use type "a" - language material - for # ebooks, though there is also type "m" for computer files. @@ -469,7 +525,7 @@ def add_summary(cls, record: Record, work: Work) -> None: ) @classmethod - def add_simplified_genres(cls, record: Record, work: Work) -> None: + def add_genres(cls, record: Record, work: Work) -> None: """Create subject fields for this work.""" genres = work.genres @@ -498,19 +554,34 @@ def add_ebooks_subject(cls, record: Record) -> None: ) ) + @classmethod + def add_web_client_urls( + cls, + record: Record, + identifier: Identifier, + library_short_name: str, + cm_url: str, + web_client_urls: List[str], + ) -> None: + qualified_identifier = urllib.parse.quote( + f"{identifier.type}/{identifier.identifier}", safe="" + ) -class MARCExporterFacets(BaseFacets): - """A faceting object used to configure the search engine so that - it only works updated since a certain time. - """ - - def __init__(self, start_time: Optional[datetime]): - self.start_time = start_time - - def modify_search_filter(self, filter: Filter) -> None: - filter.order = self.SORT_ORDER_TO_OPENSEARCH_FIELD_NAME[self.ORDER_LAST_UPDATE] - filter.order_ascending = True - filter.updated_after = self.start_time + for web_client_base_url in web_client_urls: + link = "{}/{}/works/{}".format( + cm_url, + library_short_name, + qualified_identifier, + ) + encoded_link = urllib.parse.quote(link, safe="") + url = f"{web_client_base_url}/book/{encoded_link}" + record.add_field( + Field( + tag="856", + indicators=["4", "0"], + subfields=[Subfield(code="u", value=url)], + ) + ) class MarcExporterSettings(BaseSettings): @@ -579,7 +650,8 @@ class MarcExporterLibrarySettings(BaseSettings): class MARCExporter( HasLibraryIntegrationConfiguration[ MarcExporterSettings, MarcExporterLibrarySettings - ] + ], + LoggerMixin, ): """Turn a work into a record for a MARC file.""" @@ -589,14 +661,10 @@ class MARCExporter( def __init__( self, _db: Session, - library: Library, - settings: MarcExporterSettings, - library_settings: MarcExporterLibrarySettings, + storage_service: S3Service, ): self._db = _db - self.library = library - self.settings = settings - self.library_settings = library_settings + self.storage_service = storage_service @classmethod def label(cls) -> str: @@ -619,16 +687,11 @@ def library_settings_class(cls) -> type[MarcExporterLibrarySettings]: @classmethod def create_record( cls, + revised: bool, work: Work, - annotator: Annotator | Callable[[], Annotator], - settings: MarcExporterSettings | None = None, - library_settings: MarcExporterLibrarySettings | None = None, - force_create: bool = False, + annotator: Annotator, ) -> Optional[Record]: """Build a complete MARC record for a given work.""" - if callable(annotator): - annotator = annotator() - pool = work.active_license_pool() if not pool: return None @@ -636,161 +699,127 @@ def create_record( edition = pool.presentation_edition identifier = pool.identifier - _db = Session.object_session(work) - - record = None - existing_record = getattr(work, annotator.marc_cache_field) - if existing_record and not force_create: - record = Record(data=existing_record.encode("utf-8"), force_utf8=True) - - if not record: - record = Record(leader=annotator.leader(work), force_utf8=True) - annotator.add_control_fields(record, identifier, pool, edition) - annotator.add_isbn(record, identifier) - - # TODO: The 240 and 130 fields are for translated works, so they can be grouped even - # though they have different titles. We do not group editions of the same work in - # different languages, so we can't use those yet. - - annotator.add_title(record, edition) - annotator.add_contributors(record, edition) - annotator.add_publisher(record, edition) - annotator.add_physical_description(record, edition) - annotator.add_audience(record, work) - annotator.add_series(record, edition) - annotator.add_system_details(record) - annotator.add_ebooks_subject(record) - - data = record.as_marc() - setattr(work, annotator.marc_cache_field, data.decode("utf8")) - - # Add additional fields that should not be cached. - annotator.annotate_work_record( - work, pool, edition, identifier, record, settings=library_settings - ) - return record + return annotator.annotate_work_record(revised, work, pool, edition, identifier) + + @staticmethod + def _date_to_string(date: datetime) -> str: + return date.astimezone(pytz.UTC).strftime("%Y-%m-%d") def _file_key( self, + uuid: UUID, library: Library, - lane: Lane | WorkList, - end_time: datetime, - start_time: Optional[datetime] = None, + collection: Collection, + creation_time: datetime, + since_time: Optional[datetime] = None, ) -> str: - """The path to the hosted MARC file for the given library, lane, + """The path to the hosted MARC file for the given library, collection, and date range.""" - root = str(library.short_name) - if start_time: - time_part = str(start_time) + "-" + str(end_time) + root = "marc" + short_name = str(library.short_name) + creation = self._date_to_string(creation_time) + + if since_time: + file_type = f"delta.{self._date_to_string(since_time)}.{creation}" else: - time_part = str(end_time) - parts = [root, time_part, lane.display_name] - return "/".join(parts) + ".mrc" + file_type = f"full.{creation}" + + uuid_encoded = uuid_encode(uuid) + collection_name = collection.name.replace(" ", "_") + filename = f"{collection_name}.{file_type}.{uuid_encoded}.mrc" + parts = [root, short_name, filename] + return "/".join(parts) + + def query_works( + self, + collection: Collection, + since_time: Optional[datetime], + creation_time: datetime, + batch_size: int, + ) -> ScalarResult: + query = ( + select(Work) + .join(LicensePool) + .join(Collection) + .where( + Collection.id == collection.id, + Work.last_update_time <= creation_time, + ) + ) + + if since_time is not None: + query = query.where(Work.last_update_time >= since_time) + + return self._db.execute(query).unique().yield_per(batch_size).scalars() def records( self, - lane: Lane | WorkList, - annotator: Annotator | Callable[[], Annotator], - storage_service: Optional[S3Service], - start_time: Optional[datetime] = None, - force_refresh: bool = False, - search_engine: Optional[ExternalSearchIndex] = None, - query_batch_size: int = 500, + library: Library, + collection: Collection, + annotator: Annotator, + *, + creation_time: datetime, + since_time: Optional[datetime] = None, + batch_size: int = 500, ) -> None: """ - Create and export a MARC file for the books in a lane. - - :param lane: The Lane to export books from. - :param annotator: The Annotator to use when creating MARC records. - :param storage_service: The storage service integration to use for MARC files. - :param start_time: Only include records that were created or modified after this time. - :param force_refresh: Create new records even when cached records are available. - :param query_batch_size: Number of works to retrieve with a single Opensearch query. + Create and export a MARC file for the books in a collection. """ + uuid = uuid4() + key = self._file_key(uuid, library, collection, creation_time, since_time) - # We store the content, if it's not empty. If it's empty, we create a CachedMARCFile - # and Representation, but don't actually store it. - if storage_service is None: - raise Exception("No storage service is configured") - - search_engine = search_engine or ExternalSearchIndex(self._db) - - # End time is before we start the query, because if any records are changed - # during the processing we may not catch them, and they should be handled - # again on the next run. - end_time = utc_now() - - facets = MARCExporterFacets(start_time=start_time) - pagination = SortKeyPagination(size=query_batch_size) - - key = self._file_key(self.library, lane, end_time, start_time) - - with storage_service.multipart( + with self.storage_service.multipart( key, content_type=Representation.MARC_MEDIA_TYPE, ) as upload: this_batch = BytesIO() - while pagination is not None: - # Retrieve one 'page' of works from the search index. - works = lane.works( - self._db, - pagination=pagination, - facets=facets, - search_engine=search_engine, + + works = self.query_works(collection, since_time, creation_time, batch_size) + for work in works: + # Create a record for each work and add it to the MARC file in progress. + record = self.create_record( + since_time is not None, + work, + annotator, ) - for work in works: - # Create a record for each work and add it to the - # MARC file in progress. - record = self.create_record( - work, - annotator, - self.settings, - self.library_settings, - force_refresh, - ) - if record: - record_bytes = record.as_marc() - this_batch.write(record_bytes) - if ( - this_batch.getbuffer().nbytes - >= self.MINIMUM_UPLOAD_BATCH_SIZE_BYTES - ): - # We've reached or exceeded the upload threshold. - # Upload one part of the multipart document. - self._upload_batch(this_batch, upload) - this_batch = BytesIO() - pagination = pagination.next_page + if record: + record_bytes = record.as_marc() + this_batch.write(record_bytes) + if ( + this_batch.getbuffer().nbytes + >= self.MINIMUM_UPLOAD_BATCH_SIZE_BYTES + ): + # We've reached or exceeded the upload threshold. + # Upload one part of the multipart document. + upload.upload_part(this_batch.getvalue()) + this_batch.seek(0) + this_batch.truncate() # Upload the final part of the multi-document, if # necessary. - self._upload_batch(this_batch, upload) # type: ignore[unreachable] + if this_batch.getbuffer().nbytes > 0: + upload.upload_part(this_batch.getvalue()) - representation, ignore = get_one_or_create( - self._db, - Representation, - url=upload.url, - media_type=Representation.MARC_MEDIA_TYPE, - ) - representation.fetched_at = end_time - if not upload.exception: - cached, is_new = get_one_or_create( + if upload.complete: + create( self._db, - CachedMARCFile, - library=self.library, - lane=(lane if isinstance(lane, Lane) else None), - start_time=start_time, - create_method_kwargs=dict(representation=representation), + MarcFile, + id=uuid, + library=library, + collection=collection, + created=creation_time, + since=since_time, + key=key, ) - if not is_new: - cached.representation = representation - cached.end_time = end_time - representation.set_as_mirrored(upload.url) else: - representation.mirror_exception = str(upload.exception) - - def _upload_batch(self, output: BytesIO, upload: MultipartS3ContextManager) -> None: - """Upload a batch of MARC records as one part of a multi-part upload.""" - content = output.getvalue() - if content: - upload.upload_part(content) - output.close() + if upload.exception: + # Log the exception and move on to the next file. We will try again next script run. + self.log.error( + f"Failed to upload MARC file for {library.short_name}/{collection.name}: {upload.exception}", + exc_info=upload.exception, + ) + else: + # There were no records to upload. This is not an error, but we should log it. + self.log.info( + f"No MARC records to upload for {library.short_name}/{collection.name}." + ) diff --git a/core/model/__init__.py b/core/model/__init__.py index 67a4ab9966..8e313232dc 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -211,13 +211,13 @@ class PresentationCalculationPolicy: def __init__( self, + *, choose_edition=True, set_edition_metadata=True, classify=True, choose_summary=True, calculate_quality=True, choose_cover=True, - regenerate_marc_record=False, update_search_index=False, verbose=True, equivalent_identifier_levels=DEFAULT_LEVELS, @@ -238,8 +238,6 @@ def __init__( quality of the Work? :param choose_cover: Should we reconsider which of the available cover images is the best? - :param regenerate_marc_record: Should we regenerate the MARC record - for this Work? :param update_search_index: Should we reindex this Work's entry in the search index? :param verbose: Should we print out information about the work we're @@ -272,11 +270,6 @@ def __init__( self.calculate_quality = calculate_quality self.choose_cover = choose_cover - # Regenerate MARC records, except that they will - # never be generated unless a MARC organization code is set - # in a sitewide configuration setting. - self.regenerate_marc_record = regenerate_marc_record - # Similarly for update_search_index. self.update_search_index = update_search_index @@ -292,7 +285,6 @@ def recalculate_everything(cls): everything, even when it doesn't seem necessary. """ return PresentationCalculationPolicy( - regenerate_marc_record=True, update_search_index=True, ) @@ -522,7 +514,7 @@ def _bulk_operation(self): SAMLFederation, ) from core.model.admin import Admin, AdminRole -from core.model.cachedfeed import CachedMARCFile +from core.model.cachedfeed import _CachedMARCFile_deprecated from core.model.circulationevent import CirculationEvent from core.model.classification import Classification, Genre, Subject from core.model.collection import ( @@ -561,6 +553,7 @@ def _bulk_operation(self): RightsStatus, ) from core.model.listeners import * +from core.model.marcfile import MarcFile from core.model.measurement import Measurement from core.model.patron import ( Annotation, diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py index e21571de1f..ec2525c834 100644 --- a/core/model/cachedfeed.py +++ b/core/model/cachedfeed.py @@ -1,4 +1,3 @@ -# Cached Marc Files from __future__ import annotations from typing import TYPE_CHECKING @@ -13,8 +12,12 @@ from core.model import Library, Representation -class CachedMARCFile(Base): - """A record that a MARC file has been created and cached for a particular lane.""" +class _CachedMARCFile_deprecated(Base): + """ + A record that a MARC file has been created and cached for a particular lane. + + This table is deprecated and will be removed in a future release. + """ __tablename__ = "cachedmarcfiles" id = Column(Integer, primary_key=True) @@ -24,22 +27,18 @@ class CachedMARCFile(Base): library_id = Column(Integer, ForeignKey("libraries.id"), nullable=False, index=True) library: Mapped[Library] = relationship( "Library", - back_populates="cachedmarcfiles", ) lane_id = Column(Integer, ForeignKey("lanes.id"), nullable=True, index=True) lane: Mapped[Lane] = relationship( "Lane", - back_populates="cachedmarcfiles", ) # The representation for this file stores the URL where it was mirrored. representation_id = Column( Integer, ForeignKey("representations.id"), nullable=False ) - representation: Mapped[Representation] = relationship( - "Representation", back_populates="marc_file" - ) + representation: Mapped[Representation] = relationship("Representation") start_time = Column(DateTime(timezone=True), nullable=True, index=True) end_time = Column(DateTime(timezone=True), nullable=True, index=True) diff --git a/core/model/collection.py b/core/model/collection.py index e0efae6699..7b4b5b7941 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -136,6 +136,8 @@ class Collection(Base, HasSessionCache): "CustomList", secondary=lambda: collections_customlists, backref="collections" ) + export_marc_records = Column(Boolean, default=False, nullable=False) + # Most data sources offer different catalogs to different # libraries. Data sources in this list offer the same catalog to # every library. diff --git a/core/model/coverage.py b/core/model/coverage.py index e1204fe9ea..cce96fcca9 100644 --- a/core/model/coverage.py +++ b/core/model/coverage.py @@ -614,7 +614,6 @@ class WorkCoverageRecord(Base, BaseCoverageRecord): CLASSIFY_OPERATION = "classify" SUMMARY_OPERATION = "summary" QUALITY_OPERATION = "quality" - GENERATE_MARC_OPERATION = "generate-marc" UPDATE_SEARCH_INDEX_OPERATION = "update-search-index" id = Column(Integer, primary_key=True) diff --git a/core/model/library.py b/core/model/library.py index a2a0e8cc02..28cdc2e087 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -50,9 +50,8 @@ if TYPE_CHECKING: from core.lane import Lane - from core.model import ( # noqa: autoflake + from core.model import ( AdminRole, - CachedMARCFile, CirculationEvent, Collection, ConfigurationSetting, @@ -111,13 +110,6 @@ class Library(Base, HasSessionCache): "AdminRole", back_populates="library", cascade="all, delete-orphan" ) - # A Library may have many CachedMARCFiles. - cachedmarcfiles: Mapped[List[CachedMARCFile]] = relationship( - "CachedMARCFile", - back_populates="library", - cascade="all, delete-orphan", - ) - # A Library may have many CustomLists. custom_lists: Mapped[List[CustomList]] = relationship( "CustomList", backref="library", uselist=True diff --git a/core/model/marcfile.py b/core/model/marcfile.py new file mode 100644 index 0000000000..2658670ad7 --- /dev/null +++ b/core/model/marcfile.py @@ -0,0 +1,55 @@ +from __future__ import annotations + +import uuid +from typing import TYPE_CHECKING + +from sqlalchemy import Column, DateTime, ForeignKey, Integer, Unicode +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, relationship + +from core.model import Base + +if TYPE_CHECKING: + from core.model import Collection, Library + + +class MarcFile(Base): + """A record that a MARC file has been created and cached for a particular library and collection.""" + + __tablename__ = "marcfiles" + id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + + # The library should never be null in normal operation, but if a library is deleted, we don't want to lose the + # record of the MARC file, so we set the library to null. + # TODO: We need a job to clean up these records. + library_id = Column( + Integer, + ForeignKey("libraries.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + library: Mapped[Library] = relationship( + "Library", + ) + + # The collection should never be null in normal operation, but similar to the library, if a collection is deleted, + # we don't want to lose the record of the MARC file, so we set the collection to null. + # TODO: We need a job to clean up these records. + collection_id = Column( + Integer, + ForeignKey("collections.id", ondelete="SET NULL"), + nullable=True, + index=True, + ) + collection: Mapped[Collection] = relationship( + "Collection", + ) + + # The key in s3 used to store the file. + key = Column(Unicode, nullable=False) + + # The creation date of the file. + created = Column(DateTime(timezone=True), nullable=False, index=True) + + # If the file is a delta, the date of the previous file. If the file is a full file, null. + since = Column(DateTime(timezone=True), nullable=True) diff --git a/core/model/resource.py b/core/model/resource.py index 4aef497ddf..99e6f87f72 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -10,7 +10,7 @@ import traceback from hashlib import md5 from io import BytesIO -from typing import TYPE_CHECKING, Dict, List, Tuple +from typing import Dict, List, Tuple from urllib.parse import quote, urlparse, urlsplit import requests @@ -42,9 +42,6 @@ from core.util.datetime_helpers import utc_now from core.util.http import HTTP -if TYPE_CHECKING: - from core.model import CachedMARCFile - class Resource(Base): """An external resource that may be mirrored locally. @@ -542,13 +539,6 @@ class Representation(Base, MediaTypes): # data root. local_content_path = Column(Unicode) - # A Representation may be a CachedMARCFile. - marc_file: Mapped[CachedMARCFile] = relationship( - "CachedMARCFile", - back_populates="representation", - cascade="all, delete-orphan", - ) - # At any given time, we will have a single representation for a # given URL and media type. __table_args__ = (UniqueConstraint("url", "media_type"),) diff --git a/core/model/work.py b/core/model/work.py index 8ce42a3964..0f3b97dfd8 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -212,12 +212,13 @@ class Work(Base): # A precalculated MARC record containing metadata about this # work that would be relevant to display in a library's public # catalog. - marc_record = Column(String, default=None) + # TODO: This field has been deprecated and will be removed in a future release. + _marc_record = Column("marc_record", String, default=None) # These fields are potentially large and can be deferred if you # don't need all the data in a Work. LARGE_FIELDS = [ - "marc_record", + "_marc_record", "summary_text", ] @@ -1017,9 +1018,6 @@ def calculate_presentation( # change it. self.last_update_time = utc_now() - if changed or policy.regenerate_marc_record: - self.calculate_marc_record() - if (changed or policy.update_search_index) and not exclude_search: self.external_index_needs_updating() @@ -1147,17 +1145,6 @@ def _ensure(s): l = [_ensure(s) for s in l] return "\n".join(l) - def calculate_marc_record(self): - from core.marc import Annotator, MARCExporter - - _db = Session.object_session(self) - record = MARCExporter.create_record( - self, annotator=Annotator, force_create=True - ) - WorkCoverageRecord.add_for( - self, operation=WorkCoverageRecord.GENERATE_MARC_OPERATION - ) - def active_license_pool(self, library: Library | None = None) -> LicensePool | None: # The active license pool is the one that *would* be # associated with a loan, were a loan to be issued right diff --git a/core/opds_import.py b/core/opds_import.py index c84ad345f5..e7d482fd2d 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -35,7 +35,13 @@ from pydantic import AnyHttpUrl from sqlalchemy.orm.session import Session -from api.circulation import BaseCirculationAPI, FulfillmentInfo, HoldInfo, LoanInfo +from api.circulation import ( + BaseCirculationAPI, + BaseCirculationApiSettings, + FulfillmentInfo, + HoldInfo, + LoanInfo, +) from api.circulation_exceptions import CurrentlyAvailable, FormatNotAvailable, NotOnHold from api.saml.credential import SAMLCredentialManager from core.classifier import Classifier @@ -112,6 +118,7 @@ class OPDSImporterSettings( ConnectionSetting, SAMLWAYFlessSetttings, FormatPrioritiesSettings, + BaseCirculationApiSettings, ): external_account_id: AnyHttpUrl = FormField( form=ConfigurationFormItem( diff --git a/core/scripts.py b/core/scripts.py index 23b018f5d9..f79677e942 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -1562,7 +1562,6 @@ def do_run(self): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=True, update_search_index=True, verbose=True, ) @@ -1731,7 +1730,6 @@ class WorkClassificationScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=False, update_search_index=False, ) @@ -1879,7 +1877,6 @@ class WorkOPDSScript(WorkPresentationScript): choose_summary=False, calculate_quality=False, choose_cover=False, - regenerate_marc_record=True, update_search_index=True, ) diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py index 65cb5539ba..39273ac58c 100644 --- a/core/service/storage/s3.py +++ b/core/service/storage/s3.py @@ -1,7 +1,6 @@ from __future__ import annotations import dataclasses -import logging import sys from io import BytesIO from string import Formatter @@ -77,15 +76,13 @@ def __exit__( ) self._upload_abort() self._exception = exc_val - if isinstance(exc_val, (ClientError, BotoCoreError)): - return True - return False + return True def upload_part(self, content: bytes) -> None: if self.complete or self.exception or self.upload_id is None: raise RuntimeError("Upload already complete or aborted.") - logging.info( + self.log.info( f"Uploading part {self.part_number} of {self.key} to {self.bucket}" ) result = self.client.upload_part( @@ -100,7 +97,7 @@ def upload_part(self, content: bytes) -> None: def _upload_complete(self) -> None: if not self.parts: - logging.info(f"Upload of {self.key} was empty.") + self.log.info(f"Upload of {self.key} was empty.") self._upload_abort() elif self.upload_id is None: raise RuntimeError("Upload ID not set.") @@ -114,7 +111,7 @@ def _upload_complete(self) -> None: self._complete = True def _upload_abort(self) -> None: - logging.info(f"Aborting upload of {self.key}.") + self.log.info(f"Aborting upload of {self.key}.") if self.upload_id is not None: self.client.abort_multipart_upload( Bucket=self.bucket, @@ -122,7 +119,7 @@ def _upload_abort(self) -> None: UploadId=self.upload_id, ) else: - logging.error("Upload ID not set, unable to abort.") + self.log.error("Upload ID not set, unable to abort.") @property def url(self) -> str: diff --git a/core/util/uuid.py b/core/util/uuid.py new file mode 100644 index 0000000000..07d81774a8 --- /dev/null +++ b/core/util/uuid.py @@ -0,0 +1,25 @@ +from base64 import urlsafe_b64decode +from uuid import UUID + +from core.util.base64 import urlsafe_b64encode + + +def uuid_encode(uuid: UUID) -> str: + """ + Encode a UUID to a URL-safe base64 string with = padding removed, + provides a compact representation of the UUID to use in URLs. + """ + encoded = urlsafe_b64encode(uuid.bytes) + unpadded = encoded.rstrip("=") + return unpadded + + +def uuid_decode(encoded: str) -> UUID: + """ + Decode a URL-safe base64 string to a UUID. Reverse of uuid_encode. + """ + if len(encoded) != 22: + raise ValueError("Invalid base64 string for UUID") + padding = "==" + decoded_bytes = urlsafe_b64decode(encoded + padding) + return UUID(bytes=decoded_bytes) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index 7a8a40f6e0..f600cbcd80 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -32,9 +32,6 @@ HOME=/var/www/circulation # those works. 30 22 * * * root core/bin/run work_classify_unchecked_subjects >> /var/log/cron.log 2>&1 -# If any works have out-of-date OPDS entries or MARC records, rebuild them, -40 23 * * * root core/bin/run marc_record_coverage >> /var/log/cron.log 2>&1 - # Remove miscellaneous expired things from the database 0 2 * * * root core/bin/run database_reaper >> /var/log/cron.log 2>&1 diff --git a/pyproject.toml b/pyproject.toml index 1421c09539..8902bd3a42 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,6 +81,7 @@ module = [ "api.adobe_vendor_id", "api.axis", "api.circulation", + "api.controller_marc", "api.discovery.*", "api.enki", "api.integration.*", @@ -109,6 +110,7 @@ module = [ "core.util.notifications", "core.util.problem_detail", "core.util.string_helpers", + "core.util.uuid", "core.util.worker_pools", "core.util.xmlparser", "tests.fixtures.authenticator", diff --git a/scripts.py b/scripts.py index d1ef3aa4dc..21505ebdc9 100644 --- a/scripts.py +++ b/scripts.py @@ -1,11 +1,12 @@ import argparse +import datetime import logging import os import sys import time from datetime import timedelta from pathlib import Path -from typing import Any, Optional, Sequence, Tuple, Union +from typing import Any, List, Optional, Sequence, Tuple, Type from sqlalchemy import inspect, select from sqlalchemy.engine import Connection @@ -21,7 +22,6 @@ from api.config import CannotLoadConfiguration, Configuration from api.lanes import create_default_lanes from api.local_analytics_exporter import LocalAnalyticsExporter -from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from api.nyt import NYTBestSellerAPI from api.opds_for_distributors import ( @@ -32,15 +32,17 @@ from api.overdrive import OverdriveAPI from core.external_search import ExternalSearchIndex from core.integration.goals import Goals -from core.lane import Lane, WorkList +from core.lane import Lane +from core.marc import Annotator as MarcAnnotator from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, - CachedMARCFile, CirculationEvent, + Collection, ConfigurationSetting, Contribution, DataSource, + DiscoveryServiceRegistration, Edition, Hold, Identifier, @@ -49,6 +51,7 @@ Library, LicensePool, Loan, + MarcFile, Patron, SessionManager, get_one, @@ -56,7 +59,6 @@ ) from core.scripts import ( IdentifierInputScript, - LaneSweeperScript, LibraryInputScript, OPDSImportScript, PatronInputScript, @@ -148,20 +150,14 @@ def q(self): ) -class CacheMARCFiles(LaneSweeperScript): +class CacheMARCFiles(LibraryInputScript): """Generate and cache MARC files for each input library.""" name = "Cache MARC files" @classmethod def arg_parser(cls, _db: Session) -> argparse.ArgumentParser: # type: ignore[override] - parser = LaneSweeperScript.arg_parser(_db) - parser.add_argument( - "--max-depth", - help="Stop processing lanes once you reach this depth.", - type=int, - default=0, - ) + parser = super().arg_parser(_db) parser.add_argument( "--force", help="Generate new MARC files even if MARC files have already been generated recently enough", @@ -174,18 +170,26 @@ def __init__( self, _db: Optional[Session] = None, cmd_args: Optional[Sequence[str]] = None, + exporter: Optional[MARCExporter] = None, *args: Any, **kwargs: Any, ) -> None: super().__init__(_db, *args, **kwargs) + self.force = False self.parse_args(cmd_args) + self.storage_service = self.services.storage.public() + + self.cm_base_url = ConfigurationSetting.sitewide( + self._db, Configuration.BASE_URL_KEY + ).value + + self.exporter = exporter or MARCExporter(self._db, self.storage_service) def parse_args( self, cmd_args: Optional[Sequence[str]] = None ) -> argparse.Namespace: parser = self.arg_parser(self._db) parsed = parser.parse_args(cmd_args) - self.max_depth = parsed.max_depth self.force = parsed.force return parsed @@ -208,80 +212,134 @@ def settings( return settings, library_settings - def should_process_library(self, library: Library) -> bool: + def process_libraries(self, libraries: Sequence[Library]) -> None: + if not self.storage_service: + self.log.info("No storage service was found.") + return + + super().process_libraries(libraries) + + def get_collections(self, library: Library) -> Sequence[Collection]: + return self._db.scalars( + select(Collection).where( + Collection.libraries.contains(library), + Collection.export_marc_records == True, + ) + ).all() + + def get_web_client_urls( + self, library: Library, url: Optional[str] = None + ) -> List[str]: + """Find web client URLs configured by the registry for this library.""" + urls = [ + s.web_client + for s in self._db.execute( + select(DiscoveryServiceRegistration.web_client).where( + DiscoveryServiceRegistration.library == library, + DiscoveryServiceRegistration.web_client != None, + ) + ).all() + ] + + if url: + urls.append(url) + + return urls + + def process_library( + self, library: Library, annotator_cls: Type[MarcAnnotator] = MarcAnnotator + ) -> None: try: - self.settings(library) - return True + settings, library_settings = self.settings(library) except NoResultFound: - return False + return - def process_library(self, library): - if self.should_process_library(library): - super().process_library(library) - self.log.info("Processed library %s" % library.name) - - def should_process_lane(self, lane): - if isinstance(lane, Lane): - if self.max_depth is not None and lane.depth > self.max_depth: - return False - if lane.size == 0: - return False - return True - - def process_lane( - self, lane: Union[Lane, WorkList], exporter: Optional[MARCExporter] = None - ) -> None: - # Generate a MARC file for this lane, if one has not been generated recently enough. - if isinstance(lane, Lane): - library = lane.library - else: - library = lane.get_library(self._db) + self.log.info("Processing library %s" % library.name) - annotator = MARCLibraryAnnotator(library) + update_frequency = int(settings.update_frequency) - if exporter is None: - settings, library_settings = self.settings(library) - exporter = MARCExporter(self._db, library, settings, library_settings) + # Find the collections for this library. + collections = self.get_collections(library) - update_frequency = exporter.settings.update_frequency + # Find web client URLs configured by the registry for this library. + web_client_urls = self.get_web_client_urls( + library, library_settings.web_client_url + ) + + annotator = annotator_cls( + self.cm_base_url, + library.short_name or "", + web_client_urls, + library_settings.organization_code, + library_settings.include_summary, + library_settings.include_genres, + ) + + # We set the creation time to be the start of the batch. Any updates that happen during the batch will be + # included in the next batch. + creation_time = utc_now() + + for collection in collections: + self.process_collection( + library, + collection, + annotator, + update_frequency, + creation_time, + ) + def last_updated( + self, library: Library, collection: Collection + ) -> Optional[datetime.datetime]: + """Find the most recent MarcFile creation time.""" last_updated_file = self._db.execute( - select(CachedMARCFile.end_time) + select(MarcFile.created) .where( - CachedMARCFile.library == library, - CachedMARCFile.lane == (lane if isinstance(lane, Lane) else None), + MarcFile.library == library, + MarcFile.collection == collection, ) - .order_by(CachedMARCFile.end_time.desc()) + .order_by(MarcFile.created.desc()) ).first() - last_update = last_updated_file.end_time if last_updated_file else None + return last_updated_file.created if last_updated_file else None + + def process_collection( + self, + library: Library, + collection: Collection, + annotator: MarcAnnotator, + update_frequency: int, + creation_time: datetime.datetime, + ) -> None: + last_update = self.last_updated(library, collection) if ( not self.force and last_update - and (last_update > utc_now() - timedelta(days=update_frequency)) + and (last_update > creation_time - timedelta(days=update_frequency)) ): self.log.info( - "Skipping lane %s because last update was less than %d days ago" - % (lane.display_name, update_frequency) + f"Skipping collection {collection.name} because last update was less than {update_frequency} days ago" ) return - # Find the storage service - storage_service = self.services.storage.public() - if not storage_service: - self.log.info("No storage service was found.") - return - # First update the file with ALL the records. - exporter.records(lane, annotator, storage_service) + self.exporter.records( + library, collection, annotator, creation_time=creation_time + ) # Then create a new file with changes since the last update. if last_update: - # Allow one day of overlap to ensure we don't miss anything due to script timing. - start_time = last_update - timedelta(days=1) + self.exporter.records( + library, + collection, + annotator, + creation_time=creation_time, + since_time=last_update, + ) - exporter.records(lane, annotator, storage_service, start_time=start_time) + self._db.commit() + self.log.info("Processed collection %s" % collection.name) class AdobeAccountIDResetScript(PatronInputScript): diff --git a/tests/api/test_controller_marc.py b/tests/api/test_controller_marc.py index 629da47382..ed0c5c40a9 100644 --- a/tests/api/test_controller_marc.py +++ b/tests/api/test_controller_marc.py @@ -1,173 +1,282 @@ +from __future__ import annotations + import datetime +from typing import Optional +from unittest.mock import MagicMock + +import pytest +from flask import Response +from api.controller_marc import MARCRecordController from core.integration.goals import Goals from core.marc import MARCExporter -from core.model import CachedMARCFile, Representation, create +from core.model import Collection, Library, MarcFile, create +from core.service.storage.s3 import S3Service from core.util.datetime_helpers import utc_now -from tests.fixtures.api_controller import CirculationControllerFixture - +from tests.fixtures.database import DatabaseTransactionFixture -class TestMARCRecordController: - def test_download_page_with_exporter_and_files( - self, circulation_fixture: CirculationControllerFixture - ): - db = circulation_fixture.db - now = utc_now() - yesterday = now - datetime.timedelta(days=1) +class MARCRecordControllerFixture: + def __init__(self, db: DatabaseTransactionFixture): + self.db = db + self.mock_s3_service = MagicMock(spec=S3Service) + self.mock_s3_service.generate_url = lambda x: "http://s3.url/" + x + self.controller = MARCRecordController(self.mock_s3_service) + self.library = db.default_library() + self.collection = db.default_collection() + self.collection.export_marc_records = True - library = db.default_library() - lane = db.lane(display_name="Test Lane") + # stub out the library function to return the default library, + # since we don't have a request context + self.controller.library = lambda: self.library - db.integration_configuration( + def integration(self, library: Optional[Library] = None): + library = library or self.library + return self.db.integration_configuration( MARCExporter.__name__, Goals.CATALOG_GOAL, libraries=[library], ) - rep1, ignore = create( - db.session, - Representation, - url="http://mirror1", - mirror_url="http://mirror1", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, + def file( + self, + library: Optional[Library] = None, + collection: Optional[Collection] = None, + key: Optional[str] = None, + created: Optional[datetime.datetime] = None, + since: Optional[datetime.datetime] = None, + ): + key = key or self.db.fresh_str() + created = created or utc_now() + library = library or self.library + collection = collection or self.collection + + return create( + self.db.session, + MarcFile, + library=library, + collection=collection, + created=created, + since=since, + key=key, ) - cache1, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep1, - end_time=now, + + def get_response_html(self, response: Response) -> str: + assert response.status_code == 200 + assert response.content_type == "text/html" + html = response.get_data(as_text=True) + assert ("Download MARC files for %s" % self.library.name) in html + return html + + +@pytest.fixture +def marc_record_controller_fixture( + db: DatabaseTransactionFixture, +) -> MARCRecordControllerFixture: + return MARCRecordControllerFixture(db) + + +class TestMARCRecordController: + def test_has_integration( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + # No integration is configured. + assert not marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, ) - rep2, ignore = create( - db.session, - Representation, - url="http://mirror2", - mirror_url="http://mirror2", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=yesterday, + # An integration is configured, but not for this library. + other_library = marc_record_controller_fixture.db.library() + marc_record_controller_fixture.integration(library=other_library) + assert not marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, ) - cache2, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=lane, - representation=rep2, - end_time=yesterday, + + # An integration is configured for this library. + marc_record_controller_fixture.integration() + assert marc_record_controller_fixture.controller.has_integration( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, ) - rep3, ignore = create( - db.session, - Representation, - url="http://mirror3", - mirror_url="http://mirror3", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, + def test_get_files_no_files( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + assert ( + marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + == {} ) - cache3, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep3, - end_time=now, - start_time=yesterday, + + def test_get_files_one_collection( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + now = utc_now() + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + + # Only a single full file is given, the most recent one. Even + # though there are older full files, they are ignored. + marc_record_controller_fixture.file(created=now) + marc_record_controller_fixture.file(created=yesterday) + + # There are multiple delta files, and they are all returned. + marc_record_controller_fixture.file(created=now, since=yesterday) + marc_record_controller_fixture.file(created=last_week, since=yesterday) + + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, ) - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - - assert "

All Books

" in html - assert ( - 'Full file - last updated %s' - % now.strftime("%B %-d, %Y") - in html - ) - assert "

Update-only files

" in html - assert ( - 'Updates from %s to %s' - % (yesterday.strftime("%B %-d, %Y"), now.strftime("%B %-d, %Y")) - in html - ) + assert len(files) == 1 + assert files["Default Collection"].full is not None + assert files["Default Collection"].full.created == now - assert "

Test Lane

" in html - assert ( - 'Full file - last updated %s' - % yesterday.strftime("%B %-d, %Y") - in html - ) + assert len(files["Default Collection"].deltas) == 2 - def test_download_page_with_exporter_but_no_files( - self, circulation_fixture: CirculationControllerFixture + # The delta files are sorted by their created date, so the latest + # delta file is first. + [delta_now, delta_last_week] = files["Default Collection"].deltas + assert delta_now.created == now + assert delta_now.since == yesterday + assert delta_last_week.created == last_week + assert delta_last_week.since == yesterday + + def test_get_files_collection_removed_from_library( + self, marc_record_controller_fixture: MARCRecordControllerFixture ): - db = circulation_fixture.db + marc_record_controller_fixture.file(created=utc_now()) + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + assert len(files) == 1 + + # The collection is removed from the library, so it's not returned. + marc_record_controller_fixture.collection.libraries = [] + files = marc_record_controller_fixture.controller.get_files( + marc_record_controller_fixture.db.session, + marc_record_controller_fixture.library, + ) + assert len(files) == 0 + + def test_get_files_multiple_collections( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + db = marc_record_controller_fixture.db now = utc_now() - yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) - library = db.default_library() + # Add a full file to the default collection. + collection_1 = marc_record_controller_fixture.collection + marc_record_controller_fixture.file(collection=collection_1, created=last_week) - db.integration_configuration( - MARCExporter.__name__, - Goals.CATALOG_GOAL, - libraries=[library], + # Create a second collection, with a full file and a delta. + collection_2 = db.collection(name="Second Collection") + collection_2.export_marc_records = True + collection_2.libraries = [marc_record_controller_fixture.library] + marc_record_controller_fixture.file(collection=collection_2, created=now) + marc_record_controller_fixture.file( + collection=collection_2, created=now, since=last_week ) - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert "MARC files aren't ready" in html + # Create a third collection that doesn't export MARC records. + collection_3 = db.collection() + collection_3.export_marc_records = False + collection_3.libraries = [marc_record_controller_fixture.library] + marc_record_controller_fixture.file(collection=collection_3, created=now) - def test_download_page_no_exporter( - self, circulation_fixture: CirculationControllerFixture + # Create a fourth collection that doesn't belong to the library. + collection_4 = db.collection() + collection_4.export_marc_records = True + collection_4.libraries = [] + marc_record_controller_fixture.file(collection=collection_4, created=now) + + files = marc_record_controller_fixture.controller.get_files( + db.session, + marc_record_controller_fixture.library, + ) + + assert len(files) == 2 + + # The returned collections are sorted by name. + assert list(files.keys()) == [collection_1.name, collection_2.name] + + [collection_1_result, collection_2_result] = files.values() + + assert collection_1_result.full is not None + assert collection_1_result.full.created == last_week + assert len(collection_1_result.deltas) == 0 + + assert collection_2_result.full is not None + assert collection_2_result.full.created == now + assert len(collection_2_result.deltas) == 1 + + def test_download_page_with_full_and_delta( + self, marc_record_controller_fixture: MARCRecordControllerFixture ): - db = circulation_fixture.db - library = db.default_library() - - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert ("No MARC exporter is currently configured") in html - - # If the exporter was deleted after some MARC files were cached, - # they will still be available to download. now = utc_now() - rep, ignore = create( - db.session, - Representation, - url="http://mirror1", - mirror_url="http://mirror1", - media_type=Representation.MARC_MEDIA_TYPE, - mirrored_at=now, + yesterday = now - datetime.timedelta(days=1) + last_week = now - datetime.timedelta(days=7) + + marc_record_controller_fixture.integration() + marc_record_controller_fixture.file(key="full", created=now) + marc_record_controller_fixture.file(key="old_full", created=yesterday) + marc_record_controller_fixture.file(key="delta_1", created=now, since=yesterday) + marc_record_controller_fixture.file( + key="delta_2", created=yesterday, since=last_week ) - cache, ignore = create( - db.session, - CachedMARCFile, - library=db.default_library(), - lane=None, - representation=rep, - end_time=now, + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + + assert ( + 'Full file - last updated %s' + % now.strftime("%B %-d, %Y") + in html + ) + assert '' not in html + assert "

Update-only files

" in html + assert ( + '
Updates from %s to %s' + % (yesterday.strftime("%B %-d, %Y"), now.strftime("%B %-d, %Y")) + in html + ) + assert ( + 'Updates from %s to %s' + % (last_week.strftime("%B %-d, %Y"), yesterday.strftime("%B %-d, %Y")) + in html ) - with circulation_fixture.request_context_with_library("/"): - response = circulation_fixture.manager.marc_records.download_page() - assert 200 == response.status_code - html = response.get_data(as_text=True) - assert ("Download MARC files for %s" % library.name) in html - assert "No MARC exporter is currently configured" in html - assert "

All Books

" in html - assert ( - 'Full file - last updated %s' - % now.strftime("%B %-d, %Y") - in html - ) + def test_download_page_with_exporter_but_no_files( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "MARC files aren't ready" in html + + def test_download_page_no_exporter( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No MARC exporter is currently configured" in html + + def test_download_page_no_storage_service( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + controller = marc_record_controller_fixture.controller + controller.storage_service = None + + response = controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No storage service is currently configured" in html diff --git a/tests/api/test_marc.py b/tests/api/test_marc.py deleted file mode 100644 index 6559bb2f0b..0000000000 --- a/tests/api/test_marc.py +++ /dev/null @@ -1,294 +0,0 @@ -import functools -import urllib.error -import urllib.parse -import urllib.request -from unittest.mock import create_autospec - -import pytest -from pymarc import Record - -from api.marc import LibraryAnnotator -from core.config import Configuration -from core.marc import MarcExporterLibrarySettings -from core.model import ConfigurationSetting, create -from core.model.discovery_service_registration import DiscoveryServiceRegistration -from tests.fixtures.database import ( - DatabaseTransactionFixture, - IntegrationConfigurationFixture, -) - - -class LibraryAnnotatorFixture: - def __init__(self, db: DatabaseTransactionFixture): - self.library = db.default_library() - - # Mock class to verify that the correct methods - # are called by annotate_work_record. - self.mock_annotator = LibraryAnnotator(self.library) - self.mock_add_marc_organization_code = create_autospec( - spec=self.mock_annotator.add_marc_organization_code - ) - self.mock_annotator.add_marc_organization_code = ( - self.mock_add_marc_organization_code - ) - self.mock_add_summary = create_autospec(spec=self.mock_annotator.add_summary) - self.mock_annotator.add_summary = self.mock_add_summary - self.mock_add_simplified_genres = create_autospec( - spec=self.mock_annotator.add_simplified_genres - ) - self.mock_annotator.add_simplified_genres = self.mock_add_simplified_genres - self.mock_add_web_client_urls = create_autospec( - spec=self.mock_annotator.add_web_client_urls - ) - self.mock_annotator.add_web_client_urls = self.mock_add_web_client_urls - self.mock_add_distributor = create_autospec( - spec=self.mock_annotator.add_distributor - ) - self.mock_annotator.add_distributor = self.mock_add_distributor - self.mock_add_formats = create_autospec(spec=self.mock_annotator.add_formats) - self.mock_annotator.add_formats = self.mock_add_formats - - self.record = Record() - self.work = db.work(with_license_pool=True) - self.pool = self.work.license_pools[0] - self.edition = self.pool.presentation_edition - self.identifier = self.pool.identifier - - self.mock_annotate_work_record = functools.partial( - self.mock_annotator.annotate_work_record, - work=self.work, - active_license_pool=self.pool, - edition=self.edition, - identifier=self.identifier, - record=self.record, - ) - - # The URL for a work is constructed as: - # - //works/ - work_link_template = "{cm_base}/{lib}/works/{qid}" - # It is then encoded and the web client URL is constructed in this form: - # - /book/ - client_url_template = "{client_base}/book/{work_link}" - - qualified_identifier = urllib.parse.quote( - self.identifier.type + "/" + self.identifier.identifier, safe="" - ) - cm_base_url = "http://test-circulation-manager" - - expected_work_link = work_link_template.format( - cm_base=cm_base_url, lib=self.library.short_name, qid=qualified_identifier - ) - encoded_work_link = urllib.parse.quote(expected_work_link, safe="") - - self.client_base_1 = "http://web_catalog" - self.client_base_2 = "http://another_web_catalog" - self.expected_client_url_1 = client_url_template.format( - client_base=self.client_base_1, work_link=encoded_work_link - ) - self.expected_client_url_2 = client_url_template.format( - client_base=self.client_base_2, work_link=encoded_work_link - ) - - # A few checks to ensure that our setup is useful. - assert self.library.short_name is not None - assert len(self.library.short_name) > 0 - assert self.client_base_1 != self.client_base_2 - assert self.expected_client_url_1 != self.expected_client_url_2 - assert self.expected_client_url_1.startswith(self.client_base_1) - assert self.expected_client_url_2.startswith(self.client_base_2) - - ConfigurationSetting.sitewide( - db.session, Configuration.BASE_URL_KEY - ).value = cm_base_url - - self.annotator = LibraryAnnotator(self.library) - - self.add_web_client_urls = functools.partial( - self.annotator.add_web_client_urls, - record=self.record, - library=self.library, - identifier=self.identifier, - ) - - -@pytest.fixture -def library_annotator_fixture( - db: DatabaseTransactionFixture, -) -> LibraryAnnotatorFixture: - return LibraryAnnotatorFixture(db) - - -class TestLibraryAnnotator: - @pytest.mark.parametrize( - "settings", - [ - pytest.param(MarcExporterLibrarySettings(), id="defaults"), - pytest.param( - MarcExporterLibrarySettings(include_summary=False), id="summary_false" - ), - pytest.param( - MarcExporterLibrarySettings(include_genres=False), id="genres_false" - ), - pytest.param( - MarcExporterLibrarySettings( - include_summary=False, include_genres=False - ), - id="summary_and_genres_false", - ), - ], - ) - def test_annotate_work_record_default_settings( - self, - library_annotator_fixture: LibraryAnnotatorFixture, - settings: MarcExporterLibrarySettings, - ) -> None: - library_annotator_fixture.mock_annotate_work_record(settings=settings) - - # If there are no settings, or the settings are false, the only methods called will be add_web_client_urls - # and the parent class methods. - library_annotator_fixture.mock_add_marc_organization_code.assert_not_called() - library_annotator_fixture.mock_add_summary.assert_not_called() - library_annotator_fixture.mock_add_simplified_genres.assert_not_called() - library_annotator_fixture.mock_add_web_client_urls.assert_called_once_with( - library_annotator_fixture.record, - library_annotator_fixture.library, - library_annotator_fixture.identifier, - settings, - ) - library_annotator_fixture.mock_add_distributor.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.pool - ) - library_annotator_fixture.mock_add_formats.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.pool - ) - - def test_annotate_work_record_settings( - self, library_annotator_fixture: LibraryAnnotatorFixture - ) -> None: - # Once the include settings are true and the marc organization code is set, - # all methods are called. - settings = MarcExporterLibrarySettings( - include_summary=True, - include_genres=True, - organization_code="marc org", - web_client_url="http://web_catalog", - ) - - library_annotator_fixture.mock_annotate_work_record(settings=settings) - - library_annotator_fixture.mock_add_marc_organization_code.assert_called_once_with( - library_annotator_fixture.record, settings.organization_code - ) - - library_annotator_fixture.mock_add_summary.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.work - ) - - library_annotator_fixture.mock_add_simplified_genres.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.work - ) - - library_annotator_fixture.mock_add_web_client_urls.assert_called_once_with( - library_annotator_fixture.record, - library_annotator_fixture.library, - library_annotator_fixture.identifier, - settings, - ) - - library_annotator_fixture.mock_add_distributor.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.pool - ) - - library_annotator_fixture.mock_add_formats.assert_called_once_with( - library_annotator_fixture.record, library_annotator_fixture.pool - ) - - def test_add_web_client_urls_none( - self, library_annotator_fixture: LibraryAnnotatorFixture - ): - settings = MarcExporterLibrarySettings() - - # If no web catalog URLs are set for the library, nothing will be changed. - library_annotator_fixture.add_web_client_urls(exporter_settings=settings) - assert [] == library_annotator_fixture.record.get_fields("856") - - def test_add_web_client_urls_from_library_registry( - self, - db: DatabaseTransactionFixture, - create_integration_configuration: IntegrationConfigurationFixture, - library_annotator_fixture: LibraryAnnotatorFixture, - ): - settings = MarcExporterLibrarySettings() - - # Add a URL from a library registry. - registry = create_integration_configuration.discovery_service() - create( - db.session, - DiscoveryServiceRegistration, - library=db.default_library(), - integration=registry, - web_client=library_annotator_fixture.client_base_1, - ) - - library_annotator_fixture.add_web_client_urls(exporter_settings=settings) - [field] = library_annotator_fixture.record.get_fields("856") - assert field.indicators == ["4", "0"] - assert ( - field.get_subfields("u")[0] - == library_annotator_fixture.expected_client_url_1 - ) - - def test_add_web_client_urls_from_configuration( - self, library_annotator_fixture: LibraryAnnotatorFixture - ): - # Add a manually configured URL on a MARC export integration. - settings = MarcExporterLibrarySettings( - web_client_url=library_annotator_fixture.client_base_2 - ) - library_annotator_fixture.add_web_client_urls(exporter_settings=settings) - [field] = library_annotator_fixture.record.get_fields("856") - assert field.indicators == ["4", "0"] - assert ( - field.get_subfields("u")[0] - == library_annotator_fixture.expected_client_url_2 - ) - - def test_add_web_client_urls_from_both( - self, - db: DatabaseTransactionFixture, - create_integration_configuration: IntegrationConfigurationFixture, - library_annotator_fixture: LibraryAnnotatorFixture, - ): - # Add a URL from a library registry. - registry = create_integration_configuration.discovery_service() - create( - db.session, - DiscoveryServiceRegistration, - library=db.default_library(), - integration=registry, - web_client=library_annotator_fixture.client_base_1, - ) - - # Add a manually configured URL on a MARC export integration. - settings = MarcExporterLibrarySettings( - web_client_url=library_annotator_fixture.client_base_2 - ) - - library_annotator_fixture.add_web_client_urls(exporter_settings=settings) - - fields = library_annotator_fixture.record.get_fields("856") - assert len(fields) == 2 - - # The manually configured URL should be first. - [field_1, field_2] = fields - assert field_1.indicators == ["4", "0"] - assert ( - field_1.get_subfields("u")[0] - == library_annotator_fixture.expected_client_url_2 - ) - - assert field_2.indicators == ["4", "0"] - assert ( - field_2.get_subfields("u")[0] - == library_annotator_fixture.expected_client_url_1 - ) diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index e881eae21a..d2811e7e7d 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -4,27 +4,29 @@ import logging from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING, Any, Optional -from unittest.mock import MagicMock, patch +from typing import TYPE_CHECKING, Optional +from unittest.mock import MagicMock, call, create_autospec, patch import pytest from _pytest.logging import LogCaptureFixture +from sqlalchemy.exc import NoResultFound from alembic.util import CommandError from api.adobe_vendor_id import AuthdataUtility from api.config import Configuration -from api.marc import LibraryAnnotator as MARCLibraryAnnotator from api.novelist import NoveListAPI from core.external_search import ExternalSearchIndex from core.integration.goals import Goals -from core.lane import WorkList -from core.marc import MARCExporter, MarcExporterSettings +from core.marc import MARCExporter, MarcExporterLibrarySettings, MarcExporterSettings from core.model import ( LOCK_ID_DB_INIT, - CachedMARCFile, ConfigurationSetting, Credential, DataSource, + DiscoveryServiceRegistration, + IntegrationConfiguration, + Library, + MarcFile, SessionManager, create, ) @@ -43,7 +45,10 @@ if TYPE_CHECKING: from tests.fixtures.authenticator import SimpleAuthIntegrationFixture - from tests.fixtures.database import DatabaseTransactionFixture + from tests.fixtures.database import ( + DatabaseTransactionFixture, + IntegrationConfigurationFixture, + ) class TestAdobeAccountIDResetScript: @@ -111,226 +116,436 @@ def lane_script_fixture( return LaneScriptFixture(db, library_fixture) -class TestCacheMARCFilesFixture: +class CacheMARCFilesFixture: def __init__(self, db: DatabaseTransactionFixture): self.db = db - self.lane = db.lane(genres=["Science Fiction"]) - self.integration = self.integration() - - self.mock_settings = MagicMock() - self.mock_library_settings = MagicMock() - - self.exporter = MARCExporter( - MagicMock(), MagicMock(), self.mock_settings, self.mock_library_settings - ) - self.mock_records = MagicMock() self.mock_services = MagicMock() - self.exporter.records = self.mock_records + self.exporter = MagicMock(spec=MARCExporter) + self.library = self.db.default_library() + self.collection = self.db.collection() + self.collection.export_marc_records = True + self.collection.libraries += [self.library] + self.cm_base_url = "http://test-circulation-manager/" + + ConfigurationSetting.sitewide( + db.session, Configuration.BASE_URL_KEY + ).value = self.cm_base_url + + def integration( + self, library: Optional[Library] = None + ) -> IntegrationConfiguration: + if library is None: + library = self.library - def integration(self): return self.db.integration_configuration( protocol=MARCExporter.__name__, goal=Goals.CATALOG_GOAL, - libraries=[self.db.default_library()], + libraries=[library], ) def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: cmd_args = cmd_args or [] return CacheMARCFiles( - self.db.session, services=self.mock_services, cmd_args=cmd_args - ) - - def assert_call(self, call: Any) -> None: - assert call.args[0] == self.lane - assert isinstance(call.args[1], MARCLibraryAnnotator) - assert call.args[2] == self.mock_services.storage.public.return_value - - def create_cached_file(self, end_time: datetime.datetime) -> CachedMARCFile: - representation, _ = self.db.representation() - cached, _ = create( self.db.session, - CachedMARCFile, - library=self.db.default_library(), - lane=self.lane, - representation=representation, - end_time=end_time, + exporter=self.exporter, + services=self.mock_services, + cmd_args=cmd_args, ) - return cached @pytest.fixture -def cache_marc_files(db: DatabaseTransactionFixture) -> TestCacheMARCFilesFixture: - return TestCacheMARCFilesFixture(db) +def cache_marc_files(db: DatabaseTransactionFixture) -> CacheMARCFilesFixture: + return CacheMARCFilesFixture(db) class TestCacheMARCFiles: - def test_should_process_library(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - script = CacheMARCFiles(db.session, cmd_args=[]) - assert script.should_process_library(db.default_library()) is False - db.integration_configuration( - protocol=MARCExporter.__name__, - goal=Goals.CATALOG_GOAL, - libraries=[db.default_library()], + def test_settings(self, cache_marc_files: CacheMARCFilesFixture): + # Test that the script gets the correct settings. + test_library = cache_marc_files.library + other_library = cache_marc_files.db.library() + + expected_settings = MarcExporterSettings(update_frequency=3) + expected_library_settings = MarcExporterLibrarySettings( + organization_code="test", + include_summary=True, + include_genres=True, ) - assert script.should_process_library(db.default_library()) is True - - def test_should_process_lane(self, lane_script_fixture: LaneScriptFixture): - db = lane_script_fixture.db - parent = db.lane() - parent.size = 100 - child = db.lane(parent=parent) - child.size = 10 - grandchild = db.lane(parent=child) - grandchild.size = 1 - wl = WorkList() - empty = db.lane(fiction=False) - empty.size = 0 - - script = CacheMARCFiles(db.session, cmd_args=[]) - script.max_depth = 1 - assert script.should_process_lane(parent) is True - assert script.should_process_lane(child) is True - assert script.should_process_lane(grandchild) is False - assert script.should_process_lane(wl) is True - assert script.should_process_lane(empty) is False - - script.max_depth = 0 - assert script.should_process_lane(parent) is True - assert script.should_process_lane(child) is False - assert script.should_process_lane(grandchild) is False - assert script.should_process_lane(wl) is True - assert script.should_process_lane(empty) is False - - def test_process_lane_never_run(self, cache_marc_files: TestCacheMARCFilesFixture): + + other_library_settings = MarcExporterLibrarySettings( + organization_code="other", + ) + + integration = cache_marc_files.integration(test_library) + integration.libraries += [other_library] + + test_library_integration = integration.for_library(test_library) + assert test_library_integration is not None + other_library_integration = integration.for_library(other_library) + assert other_library_integration is not None + MARCExporter.settings_update(integration, expected_settings) + MARCExporter.library_settings_update( + test_library_integration, expected_library_settings + ) + MARCExporter.library_settings_update( + other_library_integration, other_library_settings + ) + script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + actual_settings, actual_library_settings = script.settings(test_library) - # If the script has never been run before, it runs the exporter once - # to create a file with all records. - assert cache_marc_files.mock_records.call_count == 1 - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args) + assert actual_settings == expected_settings + assert actual_library_settings == expected_library_settings + + def test_settings_none(self, cache_marc_files: CacheMARCFilesFixture): + # If there are no settings, the setting function raises an exception. + test_library = cache_marc_files.library + script = cache_marc_files.script() + with pytest.raises(NoResultFound): + script.settings(test_library) - def test_process_lane_cached_update( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_process_libraries_no_storage( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture ): - # If we have a cached file already, and it's old enough, the script will - # run the exporter twice, first to update that file and second to create - # a file with changes since that first file was originally created. - now = utc_now() - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(last_week) - settings = MarcExporterSettings(update_frequency=3) - cache_marc_files.exporter.settings = settings + # If there is no storage integration, the script logs an error and returns. + script = cache_marc_files.script() + script.storage_service = None + caplog.set_level(logging.INFO) + script.process_libraries([MagicMock(), MagicMock()]) + assert "No storage service was found" in caplog.text + + def test_get_collections(self, cache_marc_files: CacheMARCFilesFixture): + # Test that the script gets the correct collections. + test_library = cache_marc_files.library + collection1 = cache_marc_files.collection + + # Second collection is configured to export MARC records. + collection2 = cache_marc_files.db.collection() + collection2.export_marc_records = True + collection2.libraries += [test_library] + + # Third collection is not configured to export MARC records. + collection3 = cache_marc_files.db.collection() + collection3.export_marc_records = False + collection3.libraries += [test_library] + + # Fourth collection is configured to export MARC records, but is + # configured to export only to a different library. + other_library = cache_marc_files.db.library() + other_collection = cache_marc_files.db.collection() + other_collection.export_marc_records = True + other_collection.libraries += [other_library] script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 2 - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + # We should get back the two collections that are configured to export + # MARC records to this library. + collections = script.get_collections(test_library) + assert set(collections) == {collection1, collection2} - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < last_week - ) + # Set collection3 to export MARC records to this library. + collection3.export_marc_records = True - def test_process_lane_cached_recent( - self, cache_marc_files: TestCacheMARCFilesFixture - ): - # If we already have a recent cached file, the script won't do anything. - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - cache_marc_files.create_cached_file(yesterday) - settings = MarcExporterSettings(update_frequency=3) - cache_marc_files.exporter.settings = settings + # We should get back all three collections that are configured to export + # MARC records to this library. + collections = script.get_collections(test_library) + assert set(collections) == {collection1, collection2, collection3} + def test_get_web_client_urls( + self, + db: DatabaseTransactionFixture, + cache_marc_files: CacheMARCFilesFixture, + create_integration_configuration: IntegrationConfigurationFixture, + ): + # No web client URLs are returned if there are no discovery service registrations. script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 0 + assert script.get_web_client_urls(cache_marc_files.library) == [] + + # If we pass in a configured web client URL, that URL is returned. + assert script.get_web_client_urls( + cache_marc_files.library, "http://web-client" + ) == ["http://web-client"] + + # Add a URL from a library registry. + registry = create_integration_configuration.discovery_service() + create( + db.session, + DiscoveryServiceRegistration, + library=cache_marc_files.library, + integration=registry, + web_client="http://web-client-url/", + ) + assert script.get_web_client_urls(cache_marc_files.library) == [ + "http://web-client-url/" + ] - def test_process_lane_cached_recent_force( - self, cache_marc_files: TestCacheMARCFilesFixture + # URL from library registry and configured URL are both returned. + assert script.get_web_client_urls( + cache_marc_files.library, "http://web-client" + ) == [ + "http://web-client-url/", + "http://web-client", + ] + + def test_process_library_not_configured( + self, + cache_marc_files: CacheMARCFilesFixture, ): - # But we can force it to run anyway. - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(yesterday) + script = cache_marc_files.script() + mock_process_collection = create_autospec(script.process_collection) + script.process_collection = mock_process_collection + mock_settings = create_autospec(script.settings) + script.settings = mock_settings + mock_settings.side_effect = NoResultFound + + # If there is no integration configuration for the library, the script + # does nothing. + script.process_library(cache_marc_files.library) + mock_process_collection.assert_not_called() + + def test_process_library(self, cache_marc_files: CacheMARCFilesFixture): + script = cache_marc_files.script() + mock_annotator_cls = MagicMock() + mock_process_collection = create_autospec(script.process_collection) + script.process_collection = mock_process_collection + mock_settings = create_autospec(script.settings) + script.settings = mock_settings settings = MarcExporterSettings(update_frequency=3) - cache_marc_files.exporter.settings = settings + library_settings = MarcExporterLibrarySettings( + organization_code="test", + web_client_url="http://web-client-url/", + include_summary=True, + include_genres=False, + ) + mock_settings.return_value = ( + settings, + library_settings, + ) - script = cache_marc_files.script(cmd_args=["--force"]) - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) - assert cache_marc_files.mock_records.call_count == 2 + before_call_time = utc_now() - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + # If there is an integration configuration for the library, the script + # processes all the collections for that library. + script.process_library( + cache_marc_files.library, annotator_cls=mock_annotator_cls + ) + + after_call_time = utc_now() + + mock_annotator_cls.assert_called_once_with( + cache_marc_files.cm_base_url, + cache_marc_files.library.short_name, + [library_settings.web_client_url], + library_settings.organization_code, + library_settings.include_summary, + library_settings.include_genres, + ) - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) + assert mock_process_collection.call_count == 1 + ( + library, + collection, + annotator, + update_frequency, + creation_time, + ) = mock_process_collection.call_args.args + assert library == cache_marc_files.library + assert collection == cache_marc_files.collection + assert annotator == mock_annotator_cls.return_value + assert update_frequency == settings.update_frequency + assert creation_time > before_call_time + assert creation_time < after_call_time + + def test_last_updated( + self, db: DatabaseTransactionFixture, cache_marc_files: CacheMARCFilesFixture + ): + script = cache_marc_files.script() + + # If there is no cached file, we return None. assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < yesterday + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + is None + ) + + # If there is a cached file, we return the time it was created. + file1 = MarcFile( + library=cache_marc_files.library, + collection=cache_marc_files.collection, + created=datetime_utc(1984, 5, 8), + key="file1", ) + db.session.add(file1) assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - > last_week + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + == file1.created ) - def test_process_lane_cached_frequency_zero( - self, cache_marc_files: TestCacheMARCFilesFixture - ): - # The update frequency can also be 0, in which case it will always run. - # If we already have a recent cached file, the script won't do anything. - now = utc_now() - yesterday = now - datetime.timedelta(days=1) - last_week = now - datetime.timedelta(days=7) - cache_marc_files.create_cached_file(yesterday) - settings = MarcExporterSettings(update_frequency=0) - cache_marc_files.exporter.settings = settings + # If there are multiple cached files, we return the time of the most recent one. + file2 = MarcFile( + library=cache_marc_files.library, + collection=cache_marc_files.collection, + created=utc_now(), + key="file2", + ) + db.session.add(file2) + assert ( + script.last_updated(cache_marc_files.library, cache_marc_files.collection) + == file2.created + ) + + def test_force(self, cache_marc_files: CacheMARCFilesFixture): script = cache_marc_files.script() - script.process_lane(cache_marc_files.lane, cache_marc_files.exporter) + assert script.force is False - assert cache_marc_files.mock_records.call_count == 2 + script = cache_marc_files.script(cmd_args=["--force"]) + assert script.force is True + + @pytest.mark.parametrize( + "last_updated, force, update_frequency, run_exporter", + [ + pytest.param(None, False, 10, True, id="never_run_before"), + pytest.param(None, False, 10, True, id="never_run_before_w_force"), + pytest.param( + utc_now() - datetime.timedelta(days=5), + False, + 10, + False, + id="recently_run", + ), + pytest.param( + utc_now() - datetime.timedelta(days=5), + True, + 10, + True, + id="recently_run_w_force", + ), + pytest.param( + utc_now() - datetime.timedelta(days=5), + False, + 0, + True, + id="recently_run_w_frequency_0", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + False, + 10, + True, + id="not_recently_run", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + True, + 10, + True, + id="not_recently_run_w_force", + ), + pytest.param( + utc_now() - datetime.timedelta(days=15), + False, + 0, + True, + id="not_recently_run_w_frequency_0", + ), + ], + ) + def test_process_collection_skip( + self, + cache_marc_files: CacheMARCFilesFixture, + caplog: LogCaptureFixture, + last_updated: Optional[datetime.datetime], + force: bool, + update_frequency: int, + run_exporter: bool, + ): + script = cache_marc_files.script() + script.exporter = MagicMock() + now = utc_now() + caplog.set_level(logging.INFO) + + script.force = force + script.last_updated = MagicMock(return_value=last_updated) + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + MagicMock(), + update_frequency, + now, + ) - # First call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[0]) + if run_exporter: + assert script.exporter.records.call_count > 0 + assert "Processed collection" in caplog.text + else: + assert script.exporter.records.call_count == 0 + assert "Skipping collection" in caplog.text - # Second call - cache_marc_files.assert_call(cache_marc_files.mock_records.call_args_list[1]) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - < yesterday + def test_process_collection_never_called( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture + ): + # If the collection has not been processed before, the script processes + # the collection and created a full export. + caplog.set_level(logging.INFO) + script = cache_marc_files.script() + mock_exporter = MagicMock(spec=MARCExporter) + script.exporter = mock_exporter + script.last_updated = MagicMock(return_value=None) + mock_annotator = MagicMock() + creation_time = utc_now() + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + 10, + creation_time, ) - assert ( - cache_marc_files.mock_records.call_args_list[1].kwargs["start_time"] - > last_week + mock_exporter.records.assert_called_once_with( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, ) + assert "Processed collection" in caplog.text - def test_process_lane_creates_exporter( - self, cache_marc_files: TestCacheMARCFilesFixture + def test_process_collection_with_last_updated( + self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture ): - # If the exporter doesn't exist, the script will create it. + # If the collection has been processed before, the script processes + # the collection, created a full export and a delta export. + caplog.set_level(logging.INFO) script = cache_marc_files.script() - script.settings = MagicMock( - return_value=( - cache_marc_files.mock_settings, - cache_marc_files.mock_library_settings, - ) + mock_exporter = MagicMock(spec=MARCExporter) + script.exporter = mock_exporter + last_updated = utc_now() - datetime.timedelta(days=20) + script.last_updated = MagicMock(return_value=last_updated) + mock_annotator = MagicMock() + creation_time = utc_now() + script.process_collection( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + 10, + creation_time, + ) + assert "Processed collection" in caplog.text + assert mock_exporter.records.call_count == 2 + + full_call = call( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, ) - with patch("scripts.MARCExporter") as exporter: - script.process_lane(cache_marc_files.lane) - - exporter.assert_called_once_with( - cache_marc_files.db.session, - cache_marc_files.lane.library, - cache_marc_files.mock_settings, - cache_marc_files.mock_library_settings, + + delta_call = call( + cache_marc_files.library, + cache_marc_files.collection, + mock_annotator, + creation_time=creation_time, + since_time=last_updated, ) + mock_exporter.records.assert_has_calls([full_call, delta_call]) + class TestInstanceInitializationScript: # These are some basic tests for the instance initialization script. It is tested diff --git a/tests/core/models/test_marcfile.py b/tests/core/models/test_marcfile.py new file mode 100644 index 0000000000..7ee27cb2af --- /dev/null +++ b/tests/core/models/test_marcfile.py @@ -0,0 +1,42 @@ +from datetime import datetime + +import pytest +from sqlalchemy import select + +from core.model import MarcFile +from tests.fixtures.database import DatabaseTransactionFixture + + +@pytest.mark.parametrize( + "delete_library, delete_collection", + [ + (False, True), + (True, False), + (True, True), + ], +) +def test_delete_library_collection( + db: DatabaseTransactionFixture, delete_library: bool, delete_collection: bool +) -> None: + library = db.default_library() + collection = db.default_collection() + session = db.session + + file = MarcFile( + library=library, collection=collection, key="key", created=datetime.now() + ) + session.add(file) + session.commit() + + if delete_library: + session.delete(library) + if delete_collection: + session.delete(collection) + session.commit() + + marc_files = session.scalars(select(MarcFile)).all() + assert len(marc_files) == 1 + [marc_file] = marc_files + + assert marc_file.library is None if delete_library else library + assert marc_file.collection is None if delete_collection else collection diff --git a/tests/core/models/test_work.py b/tests/core/models/test_work.py index ae96d2b267..f2a12c2710 100644 --- a/tests/core/models/test_work.py +++ b/tests/core/models/test_work.py @@ -264,7 +264,7 @@ def test_calculate_presentation( # The Work now has a complete set of WorkCoverageRecords # associated with it, reflecting all the operations that - # occured as part of calculate_presentation(). + # occurred as part of calculate_presentation(). # # All the work has actually been done, except for the work of # updating the search index, which has been registered and @@ -278,16 +278,15 @@ def test_calculate_presentation( (wcr.CLASSIFY_OPERATION, success), (wcr.SUMMARY_OPERATION, success), (wcr.QUALITY_OPERATION, success), - (wcr.GENERATE_MARC_OPERATION, success), (wcr.UPDATE_SEARCH_INDEX_OPERATION, wcr.REGISTERED), } assert expect == {(x.operation, x.status) for x in records} # Now mark the pool with the presentation edition as suppressed. # work.calculate_presentation() will call work.mark_licensepools_as_superceded(), - # which will mark the suppressed pool as superceded and take its edition out of the running. + # which will mark the suppressed pool as superseded and take its edition out of the running. # Make sure that work's presentation edition and work's author, etc. - # fields are updated accordingly, and that the superceded pool's edition + # fields are updated accordingly, and that the superseded pool's edition # knows it's no longer the champ. pool2.suppressed = True @@ -314,7 +313,7 @@ def test_calculate_presentation( # Updating availability also modified work.last_update_time. assert (utc_now() - work.last_update_time) < datetime.timedelta(seconds=2) - # make a staff (admin interface) edition. its fields should supercede all others below it + # make a staff (admin interface) edition. its fields should supersede all others below it # except when it has no contributors, and they do. pool2.suppressed = False @@ -333,7 +332,7 @@ def test_calculate_presentation( work.calculate_presentation(search_index_client=index) - # The title of the Work got superceded. + # The title of the Work got superseded. assert "The Staff Title" == work.title # The author of the Work is still the author of edition2 and was not clobbered. @@ -1634,13 +1633,6 @@ def test_for_unchecked_subjects(self, db: DatabaseTransactionFixture): classification2.subject.checked = True assert [] == qu.all() - def test_calculate_marc_record(self, db: DatabaseTransactionFixture): - work = db.work(with_license_pool=True) - - work.calculate_marc_record() - assert work.title in work.marc_record - assert "online resource" in work.marc_record - def test_active_licensepool_ignores_superceded_licensepools( self, db: DatabaseTransactionFixture ): diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index 8b62e3c699..5a168d1f20 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -200,9 +200,7 @@ def test_multipart_upload(self, s3_service_fixture: S3ServiceFixture): assert upload.exception is None s3_service_fixture.mock_s3_client.complete_multipart_upload.assert_called_once() - def test_multipart_upload_boto_exception( - self, s3_service_fixture: S3ServiceFixture - ): + def test_multipart_upload_exception(self, s3_service_fixture: S3ServiceFixture): service = s3_service_fixture.service() exception = BotoCoreError() s3_service_fixture.mock_s3_client.upload_part.side_effect = exception @@ -219,28 +217,6 @@ def test_multipart_upload_boto_exception( assert upload.exception is exception s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() - def test_multipart_upload_other_exception( - self, s3_service_fixture: S3ServiceFixture - ): - service = s3_service_fixture.service() - exception = ValueError("foo") - s3_service_fixture.mock_s3_client.upload_part.side_effect = exception - - # A non-boto exception is raised during upload, the upload is aborted - # and the exception is raised. - with pytest.raises(ValueError) as excinfo: - with service.multipart(key="key") as upload: - assert upload.complete is False - assert upload.url == "https://region.test.com/bucket/key" - assert upload.exception is None - upload.upload_part(b"test") - - assert upload.complete is False - assert upload.exception is exception - s3_service_fixture.mock_s3_client.abort_multipart_upload.assert_called_once() - assert excinfo.value is exception - - # Calling upload_part after the upload is complete raises an error. with pytest.raises(RuntimeError): upload.upload_part(b"foo") diff --git a/tests/core/test_coverage.py b/tests/core/test_coverage.py index 9fb3d48e01..e96e69bcbe 100644 --- a/tests/core/test_coverage.py +++ b/tests/core/test_coverage.py @@ -7,7 +7,6 @@ CoverageFailure, CoverageProviderProgress, IdentifierCoverageProvider, - MARCRecordWorkCoverageProvider, PresentationReadyWorkCoverageProvider, WorkClassificationCoverageProvider, WorkPresentationEditionCoverageProvider, @@ -2193,22 +2192,3 @@ def test_process_item(self, db: DatabaseTransactionFixture): policy.calculate_quality, ] ) - - -class TestMARCRecordWorkCoverageProvider: - def test_run(self, db: DatabaseTransactionFixture): - provider = MARCRecordWorkCoverageProvider(db.session) - work = db.work(with_license_pool=True) - work.marc_record = "old junk" - work.presentation_ready = False - - # The work is not presentation-ready, so nothing happens. - provider.run() - assert "old junk" == work.marc_record - - # The work is presentation-ready, so its MARC record is - # regenerated. - work.presentation_ready = True - provider.run() - assert work.title in work.marc_record - assert "online resource" in work.marc_record diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index bf54a5856c..940139ec4f 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -2,80 +2,213 @@ import datetime import functools +import logging +import urllib from typing import TYPE_CHECKING -from unittest.mock import MagicMock -from urllib.parse import quote +from unittest.mock import MagicMock, create_autospec, patch import pytest -from freezegun import freeze_time +from _pytest.logging import LogCaptureFixture from pymarc import MARCReader, Record -from core.external_search import Filter -from core.integration.goals import Goals -from core.lane import WorkList -from core.marc import Annotator, MARCExporter, MARCExporterFacets +from core.marc import Annotator, MARCExporter from core.model import ( - CachedMARCFile, Contributor, DataSource, DeliveryMechanism, Edition, Genre, Identifier, - IntegrationConfiguration, LicensePoolDeliveryMechanism, + MarcFile, Representation, RightsStatus, - Work, - get_one, ) from core.util.datetime_helpers import datetime_utc, utc_now -from tests.mocks.search import ExternalSearchIndexFake +from core.util.uuid import uuid_encode if TYPE_CHECKING: from tests.fixtures.database import DatabaseTransactionFixture - from tests.fixtures.s3 import S3ServiceFixture - from tests.fixtures.search import ExternalSearchFixtureFake + from tests.fixtures.s3 import MockS3Service, S3ServiceFixture + + +class AnnotateWorkRecordFixture: + def __init__(self): + self.cm_url = "http://cm.url" + self.short_name = "short_name" + self.web_client_urls = ["http://webclient.url"] + self.organization_name = "org" + self.include_summary = True + self.include_genres = True + + self.annotator = Annotator( + self.cm_url, + self.short_name, + self.web_client_urls, + self.organization_name, + self.include_summary, + self.include_genres, + ) + + self.revised = MagicMock() + self.work = MagicMock() + self.pool = MagicMock() + self.edition = MagicMock() + self.identifier = MagicMock() + + self.mock_leader = create_autospec(self.annotator.leader, return_value=" " * 24) + self.mock_add_control_fields = create_autospec( + self.annotator.add_control_fields + ) + self.mock_add_marc_organization_code = create_autospec( + self.annotator.add_marc_organization_code + ) + self.mock_add_isbn = create_autospec(self.annotator.add_isbn) + self.mock_add_title = create_autospec(self.annotator.add_title) + self.mock_add_contributors = create_autospec(self.annotator.add_contributors) + self.mock_add_publisher = create_autospec(self.annotator.add_publisher) + self.mock_add_distributor = create_autospec(self.annotator.add_distributor) + self.mock_add_physical_description = create_autospec( + self.annotator.add_physical_description + ) + self.mock_add_audience = create_autospec(self.annotator.add_audience) + self.mock_add_series = create_autospec(self.annotator.add_series) + self.mock_add_system_details = create_autospec( + self.annotator.add_system_details + ) + self.mock_add_formats = create_autospec(self.annotator.add_formats) + self.mock_add_summary = create_autospec(self.annotator.add_summary) + self.mock_add_genres = create_autospec(self.annotator.add_genres) + self.mock_add_ebooks_subject = create_autospec( + self.annotator.add_ebooks_subject + ) + self.mock_add_web_client_urls = create_autospec( + self.annotator.add_web_client_urls + ) + + self.annotator.leader = self.mock_leader + self.annotator.add_control_fields = self.mock_add_control_fields + self.annotator.add_marc_organization_code = self.mock_add_marc_organization_code + self.annotator.add_isbn = self.mock_add_isbn + self.annotator.add_title = self.mock_add_title + self.annotator.add_contributors = self.mock_add_contributors + self.annotator.add_publisher = self.mock_add_publisher + self.annotator.add_distributor = self.mock_add_distributor + self.annotator.add_physical_description = self.mock_add_physical_description + self.annotator.add_audience = self.mock_add_audience + self.annotator.add_series = self.mock_add_series + self.annotator.add_system_details = self.mock_add_system_details + self.annotator.add_formats = self.mock_add_formats + self.annotator.add_summary = self.mock_add_summary + self.annotator.add_genres = self.mock_add_genres + self.annotator.add_ebooks_subject = self.mock_add_ebooks_subject + self.annotator.add_web_client_urls = self.mock_add_web_client_urls + + self.annotate_work_record = functools.partial( + self.annotator.annotate_work_record, + self.revised, + self.work, + self.pool, + self.edition, + self.identifier, + ) -class TestAnnotator: - def test_annotate_work_record(self, db: DatabaseTransactionFixture) -> None: - # Verify that annotate_work_record adds the distributor and formats. - annotator = Annotator() - annotator.add_distributor = MagicMock() - annotator.add_formats = MagicMock() +@pytest.fixture +def annotate_work_record_fixture() -> AnnotateWorkRecordFixture: + return AnnotateWorkRecordFixture() - record = Record() - work = db.work(with_license_pool=True) - pool = work.license_pools[0] - annotator.annotate_work_record( - work, pool, MagicMock(), MagicMock(), record, MagicMock() +class TestAnnotator: + def test_annotate_work_record( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + with patch("core.marc.Record") as mock_record: + fixture.annotate_work_record() + + mock_record.assert_called_once_with( + force_utf8=True, leader=fixture.mock_leader.return_value + ) + fixture.mock_leader.assert_called_once_with(fixture.revised) + record = mock_record() + fixture.mock_add_control_fields.assert_called_once_with( + record, fixture.identifier, fixture.pool, fixture.edition + ) + fixture.mock_add_marc_organization_code.assert_called_once_with( + record, fixture.organization_name + ) + fixture.mock_add_isbn.assert_called_once_with(record, fixture.identifier) + fixture.mock_add_title.assert_called_once_with(record, fixture.edition) + fixture.mock_add_contributors.assert_called_once_with(record, fixture.edition) + fixture.mock_add_publisher.assert_called_once_with(record, fixture.edition) + fixture.mock_add_distributor.assert_called_once_with(record, fixture.pool) + fixture.mock_add_physical_description.assert_called_once_with( + record, fixture.edition + ) + fixture.mock_add_audience.assert_called_once_with(record, fixture.work) + fixture.mock_add_series.assert_called_once_with(record, fixture.edition) + fixture.mock_add_system_details.assert_called_once_with(record) + fixture.mock_add_formats.assert_called_once_with(record, fixture.pool) + fixture.mock_add_summary.assert_called_once_with(record, fixture.work) + fixture.mock_add_genres.assert_called_once_with(record, fixture.work) + fixture.mock_add_ebooks_subject.assert_called_once_with(record) + fixture.mock_add_web_client_urls.assert_called_once_with( + record, + fixture.identifier, + fixture.short_name, + fixture.cm_url, + fixture.web_client_urls, ) - annotator.add_distributor.assert_called_once_with(record, pool) - annotator.add_formats.assert_called_once_with(record, pool) - def test_leader(self, db: DatabaseTransactionFixture): - work = db.work(with_license_pool=True) - leader = Annotator.leader(work) - assert "00000nam 2200000 4500" == leader + def test_annotate_work_record_no_summary( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.include_summary = False + fixture.annotate_work_record() + + assert fixture.mock_add_summary.call_count == 0 + + def test_annotate_work_record_no_genres( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.include_genres = False + fixture.annotate_work_record() + + assert fixture.mock_add_genres.call_count == 0 + + def test_annotate_work_record_no_organization_code( + self, annotate_work_record_fixture: AnnotateWorkRecordFixture + ) -> None: + fixture = annotate_work_record_fixture + fixture.annotator.organization_code = None + fixture.annotate_work_record() - # If there's already a marc record cached, the record status changes. - work.marc_record = "cached" - leader = Annotator.leader(work) - assert "00000cam 2200000 4500" == leader + assert fixture.mock_add_marc_organization_code.call_count == 0 - def _check_control_field(self, record, tag, expected): + def test_leader(self): + leader = Annotator.leader(False) + assert leader == "00000nam 2200000 4500" + + # If the record is revised, the leader is different. + leader = Annotator.leader(True) + assert leader == "00000cam 2200000 4500" + + @staticmethod + def _check_control_field(record, tag, expected): [field] = record.get_fields(tag) - assert expected == field.value() + assert field.value() == expected - def _check_field(self, record, tag, expected_subfields, expected_indicators=None): + @staticmethod + def _check_field(record, tag, expected_subfields, expected_indicators=None): if not expected_indicators: expected_indicators = [" ", " "] [field] = record.get_fields(tag) - assert expected_indicators == field.indicators + assert field.indicators == expected_indicators for subfield, value in expected_subfields.items(): - assert value == field.get_subfields(subfield)[0] + assert field.get_subfields(subfield)[0] == value def test_add_control_fields(self, db: DatabaseTransactionFixture): # This edition has one format and was published before 1900. @@ -449,7 +582,7 @@ def test_add_simplified_genres(self, db: DatabaseTransactionFixture): work.genres = [fantasy, romance] record = Record() - Annotator.add_simplified_genres(record, work) + Annotator.add_genres(record, work) fields = record.get_fields("650") [fantasy_field, romance_field] = sorted( fields, key=lambda x: x.get_subfields("a")[0] @@ -466,42 +599,111 @@ def test_add_ebooks_subject(self): Annotator.add_ebooks_subject(record) self._check_field(record, "655", {"a": "Electronic books."}, [" ", "0"]) + def test_add_web_client_urls_empty(self): + record = MagicMock(spec=Record) + identifier = MagicMock() + Annotator.add_web_client_urls(record, identifier, "", "", []) + assert record.add_field.call_count == 0 + + def test_add_web_client_urls(self, db: DatabaseTransactionFixture): + record = Record() + identifier = db.identifier() + short_name = "short_name" + cm_url = "http://cm.url" + web_client_urls = ["http://webclient1.url", "http://webclient2.url"] + Annotator.add_web_client_urls( + record, identifier, short_name, cm_url, web_client_urls + ) + fields = record.get_fields("856") + assert len(fields) == 2 + [field1, field2] = fields + assert field1.indicators == ["4", "0"] + assert field2.indicators == ["4", "0"] + + # The URL for a work is constructed as: + # - //works/ + work_link_template = "{cm_base}/{lib}/works/{qid}" + # It is then encoded and the web client URL is constructed in this form: + # - /book/ + client_url_template = "{client_base}/book/{work_link}" + + qualified_identifier = urllib.parse.quote( + identifier.type + "/" + identifier.identifier, safe="" + ) + + expected_work_link = work_link_template.format( + cm_base=cm_url, lib=short_name, qid=qualified_identifier + ) + encoded_work_link = urllib.parse.quote(expected_work_link, safe="") + + expected_client_url_1 = client_url_template.format( + client_base=web_client_urls[0], work_link=encoded_work_link + ) + expected_client_url_2 = client_url_template.format( + client_base=web_client_urls[1], work_link=encoded_work_link + ) + + # A few checks to ensure that our setup is useful. + assert web_client_urls[0] != web_client_urls[1] + assert expected_client_url_1 != expected_client_url_2 + assert expected_client_url_1.startswith(web_client_urls[0]) + assert expected_client_url_2.startswith(web_client_urls[1]) + + assert field1.get_subfields("u")[0] == expected_client_url_1 + assert field2.get_subfields("u")[0] == expected_client_url_2 + class MarcExporterFixture: - def __init__(self, db: DatabaseTransactionFixture): + def __init__(self, db: DatabaseTransactionFixture, s3: MockS3Service): self.db = db - self.integration = self._integration(db) self.now = utc_now() self.library = db.default_library() - self.settings = MagicMock() - self.library_settings = MagicMock() - self.exporter = MARCExporter( - self.db.session, self.library, self.settings, self.library_settings - ) - self.annotator = Annotator() - self.w1 = db.work(genre="Mystery", with_open_access_download=True) - self.w2 = db.work(genre="Mystery", with_open_access_download=True) + self.s3_service = s3 + self.exporter = MARCExporter(self.db.session, s3) + self.mock_annotator = MagicMock(spec=Annotator) + assert self.library.short_name is not None + self.annotator = Annotator( + "http://cm.url", + self.library.short_name, + ["http://webclient.url"], + "org", + True, + True, + ) + + self.library = db.library() + self.collection = db.collection() + self.collection.libraries.append(self.library) - self.search_engine = ExternalSearchIndexFake(db.session) - self.search_engine.mock_query_works([self.w1, self.w2]) + self.now = utc_now() + self.yesterday = self.now - datetime.timedelta(days=1) + self.last_week = self.now - datetime.timedelta(days=7) - @staticmethod - def _integration(db: DatabaseTransactionFixture) -> IntegrationConfiguration: - return db.integration_configuration( - MARCExporter.__name__, - Goals.CATALOG_GOAL, - libraries=[db.default_library()], + self.w1 = db.work( + genre="Mystery", with_open_access_download=True, collection=self.collection + ) + self.w1.last_update_time = self.yesterday + self.w2 = db.work( + genre="Mystery", with_open_access_download=True, collection=self.collection + ) + self.w2.last_update_time = self.last_week + + self.records = functools.partial( + self.exporter.records, + self.library, + self.collection, + annotator=self.annotator, + creation_time=self.now, ) @pytest.fixture def marc_exporter_fixture( db: DatabaseTransactionFixture, - external_search_fake_fixture: ExternalSearchFixtureFake, + s3_service_fixture: S3ServiceFixture, ) -> MarcExporterFixture: - # external_search_fake_fixture is used only for the integration it creates - return MarcExporterFixture(db) + return MarcExporterFixture(db, s3_service_fixture.mock_service()) class TestMARCExporter: @@ -515,263 +717,156 @@ def test_create_record( data_source_name=DataSource.OVERDRIVE, ) + mock_revised = MagicMock() + create_record = functools.partial( MARCExporter.create_record, + revised=mock_revised, work=work, - annotator=marc_exporter_fixture.annotator, - settings=marc_exporter_fixture.settings, - library_settings=marc_exporter_fixture.library_settings, + annotator=marc_exporter_fixture.mock_annotator, ) - # The record isn't cached yet, so a new record is created and cached. - assert work.marc_record is None record = create_record() assert record is not None - [title_field] = record.get_fields("245") - assert "old title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, old" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.OVERDRIVE == distributor_field.get_subfields("b")[0] - cached = work.marc_record - assert cached is not None - assert "old title" in cached # type: ignore[unreachable] - assert "author, old" in cached - # The distributor isn't part of the cached record. - assert DataSource.OVERDRIVE not in cached - - work.presentation_edition.title = "new title" - work.presentation_edition.sort_author = "author, new" - new_data_source = DataSource.lookup(db.session, DataSource.BIBLIOTHECA) - work.license_pools[0].data_source = new_data_source - - # Now that the record is cached, creating a record will - # use the cache. Distributor will be updated since it's - # not part of the cached record. - record = create_record() - [title_field] = record.get_fields("245") - assert "old title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, old" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.BIBLIOTHECA == distributor_field.get_subfields("b")[0] - - # But we can force an update to the cached record. - record = create_record(force_create=True) - [title_field] = record.get_fields("245") - assert "new title" == title_field.get_subfields("a")[0] - [author_field] = record.get_fields("100") - assert "author, new" == author_field.get_subfields("a")[0] - [distributor_field] = record.get_fields("264") - assert DataSource.BIBLIOTHECA == distributor_field.get_subfields("b")[0] - cached = work.marc_record - assert "old title" not in cached - assert "author, old" not in cached - assert "new title" in cached - assert "author, new" in cached - - # The settings we pass in get passed along to the annotator. - marc_exporter_fixture.annotator.annotate_work_record = MagicMock() - create_record(force_create=True) - assert marc_exporter_fixture.annotator.annotate_work_record.call_count == 1 - assert ( - marc_exporter_fixture.annotator.annotate_work_record.call_args.kwargs[ - "settings" - ] - == marc_exporter_fixture.library_settings - ) - @freeze_time("2020-01-01 00:00:00") - def test_create_record_roundtrip( - self, db: DatabaseTransactionFixture, marc_exporter_fixture: MarcExporterFixture - ): - # Create a marc record from a work with special characters - # in both the title and author name and round-trip it to - # the DB and back again to make sure we are creating records - # we can understand. - # - # We freeze the current time here, because a MARC record has - # a timestamp when it was created and we need the created - # records to match. - - # Creates a new record and saves it to the database - work = db.work( - title="Little Mimi\u2019s First Counting Lesson", - authors=["Lagerlo\xf6f, Selma Ottiliana Lovisa,"], - with_license_pool=True, - ) - create_record = functools.partial( - MARCExporter.create_record, - work=work, - annotator=marc_exporter_fixture.annotator, - settings=marc_exporter_fixture.settings, - library_settings=marc_exporter_fixture.library_settings, + # Make sure we pass the expected arguments to Annotator.annotate_work_record + marc_exporter_fixture.mock_annotator.annotate_work_record.assert_called_once_with( + mock_revised, + work, + work.license_pools[0], + work.license_pools[0].presentation_edition, + work.license_pools[0].identifier, ) - record = create_record() - loaded_record = create_record() - assert record is not None - assert loaded_record is not None - assert record.as_marc() == loaded_record.as_marc() - - # Loads an existing record from the DB - new_work = get_one(db.session, Work, id=work.id) - new_record = create_record(work=new_work) - assert new_record is not None - assert record.as_marc() == new_record.as_marc() - @pytest.mark.parametrize("object_type", ["lane", "worklist"]) - def test_records_lane( + def test_records( self, - object_type: str, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, ): - if object_type == "lane": - lane_or_wl = db.lane("Test Lane", genres=["Mystery"]) - elif object_type == "worklist": - lane_or_wl = WorkList() - lane_or_wl.initialize(db.default_library(), display_name="All Books") - else: - raise RuntimeError() - exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine + storage_service = marc_exporter_fixture.s3_service + creation_time = marc_exporter_fixture.now - # If there's a storage protocol but not corresponding storage integration, - # it raises an exception. - pytest.raises(Exception, exporter.records, lane_or_wl, annotator) + marc_exporter_fixture.records() - storage_service = s3_service_fixture.mock_service() - exporter.records( - lane_or_wl, - annotator, - storage_service, - query_batch_size=1, - search_engine=search_engine, - ) - - # The file was mirrored and a CachedMARCFile was created to track the mirrored file. + # The file was mirrored and a MarcFile was created to track the mirrored file. assert len(storage_service.uploads) == 1 - [cache] = db.session.query(CachedMARCFile).all() - assert cache.library == db.default_library() - if object_type == "lane": - assert cache.lane == lane_or_wl - else: - assert cache.lane is None - assert cache.representation.content is None - assert storage_service.uploads[0].key == "{}/{}/{}.mrc".format( - db.default_library().short_name, - str(cache.representation.fetched_at), - lane_or_wl.display_name, - ) - assert quote(storage_service.uploads[0].key) in cache.representation.mirror_url - assert cache.start_time is None - assert marc_exporter_fixture.now < cache.end_time + [cache] = db.session.query(MarcFile).all() + assert cache.library == marc_exporter_fixture.library + assert cache.collection == marc_exporter_fixture.collection + + short_name = marc_exporter_fixture.library.short_name + collection_name = marc_exporter_fixture.collection.name + date_str = creation_time.strftime("%Y-%m-%d") + uuid_str = uuid_encode(cache.id) + + assert ( + cache.key + == f"marc/{short_name}/{collection_name}.full.{date_str}.{uuid_str}.mrc" + ) + assert cache.created == creation_time + assert cache.since is None records = list(MARCReader(storage_service.uploads[0].content)) assert len(records) == 2 title_fields = [record.get_fields("245") for record in records] titles = [fields[0].get_subfields("a")[0] for fields in title_fields] - assert set(titles) == { + assert titles == [ marc_exporter_fixture.w1.title, marc_exporter_fixture.w2.title, - } + ] + + def test_records_since_time( + self, + db: DatabaseTransactionFixture, + marc_exporter_fixture: MarcExporterFixture, + ): + # If the `since` parameter is set, only works updated since that time + # are included in the export and the filename reflects that we created + # a partial export. + since = marc_exporter_fixture.now - datetime.timedelta(days=3) + storage_service = marc_exporter_fixture.s3_service + creation_time = marc_exporter_fixture.now + + marc_exporter_fixture.records( + since_time=since, + ) + [cache] = db.session.query(MarcFile).all() + assert cache.library == marc_exporter_fixture.library + assert cache.collection == marc_exporter_fixture.collection + + short_name = marc_exporter_fixture.library.short_name + collection_name = marc_exporter_fixture.collection.name + from_date = since.strftime("%Y-%m-%d") + to_date = creation_time.strftime("%Y-%m-%d") + uuid_str = uuid_encode(cache.id) + + assert ( + cache.key + == f"marc/{short_name}/{collection_name}.delta.{from_date}.{to_date}.{uuid_str}.mrc" + ) + assert cache.created == creation_time + assert cache.since == since - assert marc_exporter_fixture.w1.title in marc_exporter_fixture.w1.marc_record - assert marc_exporter_fixture.w2.title in marc_exporter_fixture.w2.marc_record + # Only the work updated since the `since` time is included in the export. + [record] = list(MARCReader(storage_service.uploads[0].content)) + [title_field] = record.get_fields("245") + assert title_field.get_subfields("a")[0] == marc_exporter_fixture.w1.title - def test_records_start_time( + def test_records_none( self, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, + caplog: LogCaptureFixture, ): - # If a start time is set, it's used in the mirror url. - # - # (Our mock search engine returns everthing in its 'index', - # so this doesn't test that the start time is actually used to - # find works -- that's in the search index tests and the - # tests of MARCExporterFacets.) - start_time = marc_exporter_fixture.now - datetime.timedelta(days=3) - exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine - lane = db.lane("Test Lane", genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() - - exporter.records( - lane, - annotator, - storage_service, - start_time=start_time, - query_batch_size=2, - search_engine=search_engine, - ) - [cache] = db.session.query(CachedMARCFile).all() - - assert cache.library == db.default_library() - assert cache.lane == lane - assert cache.representation.content is None - assert storage_service.uploads[0].key == "{}/{}-{}/{}.mrc".format( - db.default_library().short_name, - str(start_time), - str(cache.representation.fetched_at), - lane.display_name, - ) - assert cache.start_time == start_time - assert marc_exporter_fixture.now < cache.end_time - - def test_records_empty_search( + # If there are no works to export, no file is created and a log message is generated. + caplog.set_level(logging.INFO) + + storage_service = marc_exporter_fixture.s3_service + + # Remove the works from the database. + db.session.delete(marc_exporter_fixture.w1) + db.session.delete(marc_exporter_fixture.w2) + + marc_exporter_fixture.records() + + assert [] == storage_service.uploads + assert db.session.query(MarcFile).count() == 0 + assert len(caplog.records) == 1 + assert "No MARC records to upload" in caplog.text + + def test_records_exception( self, db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, + caplog: LogCaptureFixture, ): - # If the search engine returns no contents for the lane, - # nothing will be mirrored, but a CachedMARCFile is still - # created to track that we checked for updates. + # If an exception occurs while exporting, no file is created and a log message is generated. + caplog.set_level(logging.ERROR) + exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - empty_search_engine = ExternalSearchIndexFake(db.session) - lane = db.lane("Test Lane", genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() + storage_service = marc_exporter_fixture.s3_service - exporter.records( - lane, - annotator, - storage_service, - search_engine=empty_search_engine, - ) + # Mock our query function to raise an exception. + exporter.query_works = MagicMock(side_effect=Exception("Boom!")) + + marc_exporter_fixture.records() assert [] == storage_service.uploads - [cache] = db.session.query(CachedMARCFile).all() - assert cache.library == db.default_library() - assert cache.lane == lane - assert cache.representation.content is None - assert cache.start_time is None - assert marc_exporter_fixture.now < cache.end_time + assert db.session.query(MarcFile).count() == 0 + assert len(caplog.records) == 1 + assert "Failed to upload MARC file" in caplog.text + assert "Boom!" in caplog.text def test_records_minimum_size( self, - db: DatabaseTransactionFixture, - s3_service_fixture: S3ServiceFixture, marc_exporter_fixture: MarcExporterFixture, ): - lane = db.lane(genres=["Mystery"]) - storage_service = s3_service_fixture.mock_service() exporter = marc_exporter_fixture.exporter - annotator = marc_exporter_fixture.annotator - search_engine = marc_exporter_fixture.search_engine - - # Make sure we page exactly how many times we need to - works = [ - db.work(genre="Mystery", with_open_access_download=True) for _ in range(4) - ] - search_engine.mock_query_works(works) + storage_service = marc_exporter_fixture.s3_service exporter.MINIMUM_UPLOAD_BATCH_SIZE_BYTES = 100 + # Mock the "records" generated, and force the response to be of certain sizes created_record_mock = MagicMock() created_record_mock.as_marc = MagicMock( @@ -779,14 +874,13 @@ def test_records_minimum_size( ) exporter.create_record = lambda *args: created_record_mock - exporter.records( - lane, - annotator, - storage_service, - search_engine=search_engine, - query_batch_size=1, + # Mock the query_works to return 4 works + exporter.query_works = MagicMock( + return_value=[MagicMock(), MagicMock(), MagicMock(), MagicMock()] ) + marc_exporter_fixture.records() + assert storage_service.mocked_multipart_upload is not None # Even though there are 4 parts, we upload in 3 batches due to minimum size limitations # The "4"th part gets uploaded due it being the tail piece @@ -796,26 +890,3 @@ def test_records_minimum_size( b"2" * 20 + b"3" * 500, b"4" * 10, ] - - -class TestMARCExporterFacets: - def test_modify_search_filter(self): - # A facet object. - facets = MARCExporterFacets("some start time") - - # A filter about to be modified by the facet object. - filter = Filter() - filter.order_ascending = False - - facets.modify_search_filter(filter) - - # updated_after has been set and results are to be returned in - # order of increasing last_update_time. - assert "last_update_time" == filter.order - assert True == filter.order_ascending - assert "some start time" == filter.updated_after - - def test_scoring_functions(self): - # A no-op. - facets = MARCExporterFacets("some start time") - assert [] == facets.scoring_functions(object()) diff --git a/tests/core/test_opds_import.py b/tests/core/test_opds_import.py index 11cc9f1f53..3e1c2626d9 100644 --- a/tests/core/test_opds_import.py +++ b/tests/core/test_opds_import.py @@ -1406,7 +1406,7 @@ def test_import_open_access_audiobook( [august_pool] = imported_pools assert True == august_pool.open_access - assert download_manifest_url == august_pool._open_access_download_url + assert download_manifest_url == august_pool.open_access_download_url [lpdm] = august_pool.delivery_mechanisms assert ( diff --git a/tests/core/test_scripts.py b/tests/core/test_scripts.py index f41269c58b..18344d44ba 100644 --- a/tests/core/test_scripts.py +++ b/tests/core/test_scripts.py @@ -2000,7 +2000,7 @@ def test_do_run( work = db.work(with_license_pool=True) work2 = db.work(with_license_pool=True) wcr = WorkCoverageRecord - decoys = [wcr.QUALITY_OPERATION, wcr.GENERATE_MARC_OPERATION] + decoys = [wcr.QUALITY_OPERATION, wcr.SUMMARY_OPERATION] # Set up some coverage records. for operation in decoys + [wcr.UPDATE_SEARCH_INDEX_OPERATION]: @@ -2044,7 +2044,7 @@ def test_do_run(self, db: DatabaseTransactionFixture): work = db.work() work2 = db.work() wcr = WorkCoverageRecord - decoys = [wcr.QUALITY_OPERATION, wcr.GENERATE_MARC_OPERATION] + decoys = [wcr.QUALITY_OPERATION, wcr.SUMMARY_OPERATION] # Set up some coverage records. for operation in decoys + [wcr.UPDATE_SEARCH_INDEX_OPERATION]: diff --git a/tests/core/util/test_uuid.py b/tests/core/util/test_uuid.py new file mode 100644 index 0000000000..03bc27aa04 --- /dev/null +++ b/tests/core/util/test_uuid.py @@ -0,0 +1,40 @@ +from uuid import UUID + +import pytest + +from core.util.uuid import uuid_decode, uuid_encode + + +@pytest.mark.parametrize( + "uuid,expected", + [ + ("804184d9-ac4f-4cd3-8ad0-a362d71a7431", "gEGE2axPTNOK0KNi1xp0MQ"), + ("e34f3186-c563-4211-a52a-3a866b214963", "408xhsVjQhGlKjqGayFJYw"), + ("c4b0e2a0-9e4a-4b0e-8f4e-2d6d9d5a8a1e", "xLDioJ5KSw6PTi1tnVqKHg"), + ("55ff6224-8ced-41f8-9fb2-eda74657ff56", "Vf9iJIztQfifsu2nRlf_Vg"), + ], +) +def test_uuid_encode_decode(uuid: str, expected: str): + # Create a UUID object from the string + uuid_obj = UUID(uuid) + + # Test that we can encode the uuid and get the expected result + encoded = uuid_encode(uuid_obj) + assert len(encoded) == 22 + assert encoded == expected + + # Test that we can round-trip the encoded string back to a UUID + decoded = uuid_decode(encoded) + assert isinstance(decoded, UUID) + assert str(decoded) == uuid + assert decoded == uuid_obj + + +def test_uuid_decode_error(): + # Invalid length + with pytest.raises(ValueError): + uuid_decode("gE") + + # Invalid characters + with pytest.raises(ValueError): + uuid_decode("/~") diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index 60c10e5687..b332a44244 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -2,17 +2,7 @@ import functools import sys -from types import TracebackType -from typing import ( - TYPE_CHECKING, - BinaryIO, - List, - Literal, - NamedTuple, - Optional, - Protocol, - Type, -) +from typing import TYPE_CHECKING, BinaryIO, List, NamedTuple, Optional, Protocol from unittest.mock import MagicMock import pytest @@ -57,22 +47,19 @@ def __init__( def __enter__(self) -> Self: return self - def __exit__( - self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> Literal[False]: + def upload_part(self, content: bytes) -> None: + self.content_parts.append(content) + self.content += content + + def _upload_complete(self) -> None: if self.content: self._complete = True self.parent.uploads.append( MockS3ServiceUpload(self.key, self.content, self.media_type) ) - return False - def upload_part(self, content: bytes) -> None: - self.content_parts.append(content) - self.content += content + def _upload_abort(self) -> None: + ... class MockS3Service(S3Service): diff --git a/tests/migration/test_20231124_1c14468b74ce.py b/tests/migration/test_20231124_1c14468b74ce.py index d96137c66f..1e197ead20 100644 --- a/tests/migration/test_20231124_1c14468b74ce.py +++ b/tests/migration/test_20231124_1c14468b74ce.py @@ -26,7 +26,7 @@ def test_migration( create_identifier: CreateIdentifier, create_license_pool: CreateLicensePool, ) -> None: - alembic_runner.migrate_up_to(MIGRATION_UID) + alembic_runner.migrate_down_to(MIGRATION_UID) alembic_runner.migrate_down_one() with alembic_engine.connect() as connection: From d5e4df970d13921d32ef93e3dbe4324c78197b06 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 6 Dec 2023 10:32:58 -0400 Subject: [PATCH 206/262] Additional warning when marc exporter has no collections configured. (#1556) --- api/controller_marc.py | 10 +++++++++- tests/api/test_controller_marc.py | 10 ++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/api/controller_marc.py b/api/controller_marc.py index 007c917ffc..d72fe4d999 100644 --- a/api/controller_marc.py +++ b/api/controller_marc.py @@ -140,7 +140,15 @@ def download_page_body(self, session: Session, library: Library) -> str: marc_files = self.get_files(session, library) if len(marc_files) == 0: - return "

" + "MARC files aren't ready to download yet." + "

" + # Are there any collections configured to export MARC records? + if any(c.export_marc_records for c in library.collections): + return "

" + "MARC files aren't ready to download yet." + "

" + else: + return ( + "

" + + "No collections are configured to export MARC records." + + "

" + ) body = "" for collection_name, files in marc_files.items(): diff --git a/tests/api/test_controller_marc.py b/tests/api/test_controller_marc.py index ed0c5c40a9..0ddc6f5637 100644 --- a/tests/api/test_controller_marc.py +++ b/tests/api/test_controller_marc.py @@ -254,6 +254,16 @@ def test_download_page_with_full_and_delta( in html ) + def test_download_page_with_exporter_but_no_collection( + self, marc_record_controller_fixture: MARCRecordControllerFixture + ): + marc_record_controller_fixture.integration() + marc_record_controller_fixture.collection.export_marc_records = False + + response = marc_record_controller_fixture.controller.download_page() + html = marc_record_controller_fixture.get_response_html(response) + assert "No collections are configured to export MARC records" in html + def test_download_page_with_exporter_but_no_files( self, marc_record_controller_fixture: MARCRecordControllerFixture ): From 14c4a3da868054cd90ca40bd0f58e8d928024756 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Dec 2023 16:45:30 +0000 Subject: [PATCH 207/262] Bump actions/setup-python from 4 to 5 (#1558) --- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 9af0a6d96c..07e42157e0 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -14,7 +14,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index 232d36124a..c108cf186d 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -13,7 +13,7 @@ jobs: - uses: actions/checkout@v4 - name: Set up Python 🐍 - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 1d985105db..18fc21a9d9 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -32,7 +32,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} @@ -82,7 +82,7 @@ jobs: run: sudo ethtool -K eth0 tx off rx off - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 @@ -329,7 +329,7 @@ jobs: uses: docker/setup-buildx-action@v3 - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: 3.9 From 875afbe556069304f69b9fbbddf52a50e69c3a61 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 6 Dec 2023 13:06:39 -0400 Subject: [PATCH 208/262] MARC export by collection - Cleanup Migration (PP-59) (#1552) * Cleanup unused DB table and column. * Add a cleanup migration * Fix some mypy issues * Handle more complicated S3 urls. * Code review feedback: split migrations. --- ...dbea3d43b_marc_export_cleanup_migration.py | 97 +++++++ ...31206_e06f965879ab_marc_s3_file_cleanup.py | 77 ++++++ core/model/__init__.py | 1 - core/model/cachedfeed.py | 44 ---- core/model/work.py | 8 - core/service/storage/s3.py | 3 + tests/core/service/storage/test_s3.py | 27 ++ tests/migration/conftest.py | 105 ++++++-- tests/migration/test_20231206_e06f965879ab.py | 241 ++++++++++++++++++ 9 files changed, 535 insertions(+), 68 deletions(-) create mode 100644 alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py create mode 100644 alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py delete mode 100644 core/model/cachedfeed.py create mode 100644 tests/migration/test_20231206_e06f965879ab.py diff --git a/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py b/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py new file mode 100644 index 0000000000..1d3dd425bb --- /dev/null +++ b/alembic/versions/20231204_d3cdbea3d43b_marc_export_cleanup_migration.py @@ -0,0 +1,97 @@ +"""MARC Export cleanup migration. + +Revision ID: d3cdbea3d43b +Revises: e06f965879ab +Create Date: 2023-12-04 17:23:26.396526+00:00 + +""" + +import sqlalchemy as sa +from sqlalchemy.dialects import postgresql + +from alembic import op + +# revision identifiers, used by Alembic. +revision = "d3cdbea3d43b" +down_revision = "e06f965879ab" +branch_labels = None +depends_on = None + + +def upgrade() -> None: + # remove the coverage records for the cachedmarcfiles + op.execute("DELETE FROM coveragerecords WHERE operation = 'generate-marc'") + + # Remove the foreign key constraint on the cachedmarcfiles table + op.drop_constraint( + "cachedmarcfiles_representation_id_fkey", + "cachedmarcfiles", + type_="foreignkey", + ) + + # Remove the representations for the cachedmarcfiles + op.execute( + "DELETE FROM representations WHERE id IN (SELECT representation_id FROM cachedmarcfiles)" + ) + + # Remove the cachedmarcfiles + op.drop_index("ix_cachedmarcfiles_end_time", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_lane_id", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_library_id", table_name="cachedmarcfiles") + op.drop_index("ix_cachedmarcfiles_start_time", table_name="cachedmarcfiles") + op.drop_table("cachedmarcfiles") + + # Remove the unused marc_record column from the works table + op.drop_column("works", "marc_record") + + +def downgrade() -> None: + op.add_column( + "works", + sa.Column("marc_record", sa.VARCHAR(), autoincrement=False, nullable=True), + ) + op.create_table( + "cachedmarcfiles", + sa.Column("id", sa.INTEGER(), autoincrement=True, nullable=False), + sa.Column("library_id", sa.INTEGER(), autoincrement=False, nullable=False), + sa.Column("lane_id", sa.INTEGER(), autoincrement=False, nullable=True), + sa.Column( + "representation_id", sa.INTEGER(), autoincrement=False, nullable=False + ), + sa.Column( + "start_time", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.Column( + "end_time", + postgresql.TIMESTAMP(timezone=True), + autoincrement=False, + nullable=True, + ), + sa.ForeignKeyConstraint( + ["lane_id"], ["lanes.id"], name="cachedmarcfiles_lane_id_fkey" + ), + sa.ForeignKeyConstraint( + ["library_id"], ["libraries.id"], name="cachedmarcfiles_library_id_fkey" + ), + sa.ForeignKeyConstraint( + ["representation_id"], + ["representations.id"], + name="cachedmarcfiles_representation_id_fkey", + ), + sa.PrimaryKeyConstraint("id", name="cachedmarcfiles_pkey"), + ) + op.create_index( + "ix_cachedmarcfiles_start_time", "cachedmarcfiles", ["start_time"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_library_id", "cachedmarcfiles", ["library_id"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_lane_id", "cachedmarcfiles", ["lane_id"], unique=False + ) + op.create_index( + "ix_cachedmarcfiles_end_time", "cachedmarcfiles", ["end_time"], unique=False + ) diff --git a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py new file mode 100644 index 0000000000..e64bfc78b3 --- /dev/null +++ b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py @@ -0,0 +1,77 @@ +"""MARC S3 file cleanup. + +Revision ID: e06f965879ab +Revises: 0039f3f12014 +Create Date: 2023-12-06 16:04:36.936466+00:00 + +""" +from typing import Optional +from urllib.parse import urlparse + +from alembic import op +from core.migration.util import migration_logger +from core.service.container import container_instance + +# revision identifiers, used by Alembic. +revision = "e06f965879ab" +down_revision = "0039f3f12014" +branch_labels = None +depends_on = None + + +def parse_key_from_url(url: str, bucket: str) -> Optional[str]: + """Parse the key from a URL. + + :param url: The URL to parse. + :return: The key, or None if the URL is not a valid S3 URL. + """ + parsed_url = urlparse(url) + + if f"/{bucket}/" in parsed_url.path: + return parsed_url.path.split(f"/{bucket}/", 1)[1] + + if bucket in parsed_url.netloc: + return parsed_url.path.lstrip("/") + + return None + + +def upgrade() -> None: + # Before removing the cachedmarcfiles table, we want to clean up + # the cachedmarcfiles stored in s3. + # + # Note: if you are running this migration on a development system and you want + # to skip deleting these files you can just comment out the migration code below. + services = container_instance() + public_s3 = services.storage.public() + log = migration_logger(revision) + + # Check if there are any cachedmarcfiles in s3 + connection = op.get_bind() + cached_files = connection.execute( + "SELECT r.mirror_url FROM cachedmarcfiles cmf JOIN representations r ON cmf.representation_id = r.id" + ).all() + if public_s3 is None and len(cached_files) > 0: + raise RuntimeError( + "There are cachedmarcfiles in the database, but no public s3 storage configured!" + ) + + keys_to_delete = [] + for cached_file in cached_files: + url = cached_file.mirror_url + bucket = public_s3.bucket + key = parse_key_from_url(url, bucket) + if key is None: + raise RuntimeError(f"Unexpected URL format: {url} (bucket: {bucket})") + generated_url = public_s3.generate_url(key) + if generated_url != url: + raise RuntimeError(f"URL mismatch: {url} != {generated_url}") + keys_to_delete.append(key) + + for key in keys_to_delete: + log.info(f"Deleting {key} from s3 bucket {public_s3.bucket}") + public_s3.delete(key) + + +def downgrade() -> None: + pass diff --git a/core/model/__init__.py b/core/model/__init__.py index 8e313232dc..fa58803d01 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -514,7 +514,6 @@ def _bulk_operation(self): SAMLFederation, ) from core.model.admin import Admin, AdminRole -from core.model.cachedfeed import _CachedMARCFile_deprecated from core.model.circulationevent import CirculationEvent from core.model.classification import Classification, Genre, Subject from core.model.collection import ( diff --git a/core/model/cachedfeed.py b/core/model/cachedfeed.py deleted file mode 100644 index ec2525c834..0000000000 --- a/core/model/cachedfeed.py +++ /dev/null @@ -1,44 +0,0 @@ -from __future__ import annotations - -from typing import TYPE_CHECKING - -from sqlalchemy import Column, DateTime, ForeignKey, Integer -from sqlalchemy.orm import Mapped, relationship - -from core.model import Base - -if TYPE_CHECKING: - from core.lane import Lane - from core.model import Library, Representation - - -class _CachedMARCFile_deprecated(Base): - """ - A record that a MARC file has been created and cached for a particular lane. - - This table is deprecated and will be removed in a future release. - """ - - __tablename__ = "cachedmarcfiles" - id = Column(Integer, primary_key=True) - - # Every MARC file is associated with a library and a lane. If the - # lane is null, the file is for the top-level WorkList. - library_id = Column(Integer, ForeignKey("libraries.id"), nullable=False, index=True) - library: Mapped[Library] = relationship( - "Library", - ) - - lane_id = Column(Integer, ForeignKey("lanes.id"), nullable=True, index=True) - lane: Mapped[Lane] = relationship( - "Lane", - ) - - # The representation for this file stores the URL where it was mirrored. - representation_id = Column( - Integer, ForeignKey("representations.id"), nullable=False - ) - representation: Mapped[Representation] = relationship("Representation") - - start_time = Column(DateTime(timezone=True), nullable=True, index=True) - end_time = Column(DateTime(timezone=True), nullable=True, index=True) diff --git a/core/model/work.py b/core/model/work.py index 0f3b97dfd8..b2120a353f 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -19,7 +19,6 @@ ForeignKey, Integer, Numeric, - String, Unicode, ) from sqlalchemy.dialects.postgresql import INT4RANGE @@ -209,16 +208,9 @@ class Work(Base): # will be made to make the Work presentation ready. presentation_ready_exception = Column(Unicode, default=None, index=True) - # A precalculated MARC record containing metadata about this - # work that would be relevant to display in a library's public - # catalog. - # TODO: This field has been deprecated and will be removed in a future release. - _marc_record = Column("marc_record", String, default=None) - # These fields are potentially large and can be deferred if you # don't need all the data in a Work. LARGE_FIELDS = [ - "_marc_record", "summary_text", ] diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py index 39273ac58c..c64360ae09 100644 --- a/core/service/storage/s3.py +++ b/core/service/storage/s3.py @@ -177,6 +177,9 @@ def generate_url(self, key: str) -> str: bucket=self.bucket, key=quote(key), region=self.region ) + def delete(self, key: str) -> None: + self.client.delete_object(Bucket=self.bucket, Key=key) + def store( self, key: str, diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index 5a168d1f20..c68c253acd 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -99,6 +99,15 @@ def test_generate_url( url = service.generate_url(key) assert url == expected + def test_delete(self, s3_service_fixture: S3ServiceFixture): + """The S3Service.delete method deletes the object from the bucket.""" + service = s3_service_fixture.service() + service.client.delete_object = MagicMock() + service.delete("key") + service.client.delete_object.assert_called_once_with( + Bucket=s3_service_fixture.bucket, Key="key" + ) + @pytest.mark.parametrize( "content", ["foo bar baz", b"byte string"], @@ -288,6 +297,24 @@ def s3_service_integration_fixture() -> ( @pytest.mark.minio class TestS3ServiceIntegration: + def test_delete(self, s3_service_integration_fixture: S3ServiceIntegrationFixture): + """The S3Service.delete method deletes the object from the bucket.""" + service = s3_service_integration_fixture.public + bucket = service.bucket + + raw_client = s3_service_integration_fixture.s3_client + content = BytesIO() + content.write(b"foo bar baz") + raw_client.upload_fileobj(content, bucket, "key") + + bucket_contents = raw_client.list_objects(Bucket=bucket).get("Contents", []) + assert len(bucket_contents) == 1 + assert bucket_contents[0]["Key"] == "key" + + service.delete("key") + bucket_contents = raw_client.list_objects(Bucket=bucket).get("Contents", []) + assert len(bucket_contents) == 0 + @pytest.mark.parametrize( "key, bucket, content, content_type", [ diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index ff26004cc8..a5b2c87869 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -314,32 +314,31 @@ def fixture( return fixture -class CreateIdentifier(Protocol): +class CreateIdentifier: def __call__( self, connection: Connection, - identifier: str, - type: str, - ) -> int: - ... - - -@pytest.fixture -def create_identifier() -> CreateIdentifier: - def fixture( - connection: Connection, - identifier: str, - type: str, + identifier: Optional[str] = None, + type: Optional[str] = None, ) -> int: + identifier = identifier or self.random_name() + type = type or self.random_name() identifier_row = connection.execute( "INSERT INTO identifiers (identifier, type) VALUES (%s, %s) returning id", identifier, type, ).fetchone() assert identifier_row is not None - return cast(int, identifier_row.id) + assert isinstance(identifier_row.id, int) + return identifier_row.id - return fixture + def __init__(self, random_name: RandomName) -> None: + self.random_name = random_name + + +@pytest.fixture +def create_identifier(random_name: RandomName) -> CreateIdentifier: + return CreateIdentifier(random_name) class CreateLicensePool(Protocol): @@ -371,3 +370,79 @@ def fixture( return cast(int, licensepool.id) return fixture + + +class CreateLane: + def __call__( + self, + connection: Connection, + library_id: int, + name: Optional[str] = None, + priority: int = 0, + inherit_parent_restrictions: bool = False, + include_self_in_grouped_feed: bool = False, + visible: bool = True, + ) -> int: + name = name or self.random_name() + lane = connection.execute( + "INSERT INTO lanes " + "(library_id, display_name, priority, size, inherit_parent_restrictions, " + "include_self_in_grouped_feed, visible) " + " VALUES (%s, %s, %s, 0, %s, %s, %s) returning id", + library_id, + name, + priority, + inherit_parent_restrictions, + include_self_in_grouped_feed, + visible, + ).fetchone() + assert lane is not None + assert isinstance(lane.id, int) + return lane.id + + def __init__(self, random_name: RandomName) -> None: + self.random_name = random_name + + +@pytest.fixture +def create_lane(random_name: RandomName) -> CreateLane: + return CreateLane(random_name) + + +class CreateCoverageRecord: + def __call__( + self, + connection: Connection, + operation: Optional[str] = None, + identifier_id: Optional[int] = None, + collection_id: Optional[int] = None, + ) -> int: + if identifier_id is None: + identifier_id = self.create_identifier(connection) + + if operation is None: + operation = self.random_name() + + row = connection.execute( + "INSERT INTO coveragerecords (operation, identifier_id, collection_id, timestamp) " + "VALUES (%s, %s, %s, '2021-01-01') returning id", + operation, + identifier_id, + collection_id, + ).first() + assert row is not None + assert isinstance(row.id, int) + return row.id + + def __init__( + self, create_identifier: CreateIdentifier, random_name: RandomName + ) -> None: + self.create_identifier = create_identifier + self.random_name = random_name + + +@pytest.fixture +def create_coverage_record( + create_identifier: CreateIdentifier, random_name: RandomName +) -> CreateCoverageRecord: + return CreateCoverageRecord(create_identifier, random_name) diff --git a/tests/migration/test_20231206_e06f965879ab.py b/tests/migration/test_20231206_e06f965879ab.py new file mode 100644 index 0000000000..d977004046 --- /dev/null +++ b/tests/migration/test_20231206_e06f965879ab.py @@ -0,0 +1,241 @@ +from typing import Optional, Tuple +from unittest.mock import MagicMock, call + +import pytest +from _pytest.logging import LogCaptureFixture +from pytest_alembic import MigrationContext +from sqlalchemy import inspect +from sqlalchemy.engine import Connection, Engine + +from core.service.container import container_instance +from core.service.storage.s3 import S3Service +from tests.migration.conftest import ( + CreateCoverageRecord, + CreateIdentifier, + CreateLane, + CreateLibrary, +) + + +class CreateCachedMarcFile: + def __call__( + self, + connection: Connection, + url: str, + library_id: Optional[int] = None, + lane_id: Optional[int] = None, + ) -> Tuple[int, int]: + if library_id is None: + library_id = self.create_library(connection) + + if lane_id is None: + lane_id = self.create_lane(connection, library_id) + + representation_id = self.representation(connection, url) + + row = connection.execute( + "INSERT INTO cachedmarcfiles (representation_id, start_time, end_time, lane_id, library_id) " + "VALUES (%s, %s, %s, %s, %s) returning id", + (representation_id, "2021-01-01", "2021-01-02", library_id, lane_id), + ).first() + assert row is not None + file_id = row.id + + return representation_id, file_id + + def representation(self, connection: Connection, url: str) -> int: + row = connection.execute( + "INSERT INTO representations (media_type, mirror_url) " + "VALUES ('application/marc', %s) returning id", + url, + ).first() + assert row is not None + assert isinstance(row.id, int) + return row.id + + def __init__( + self, + create_library: CreateLibrary, + create_lane: CreateLane, + ) -> None: + self.create_library = create_library + self.create_lane = create_lane + + +@pytest.fixture +def create_cachedmarcfile( + create_library: CreateLibrary, + create_lane: CreateLane, + create_identifier: CreateIdentifier, +) -> CreateCachedMarcFile: + return CreateCachedMarcFile(create_library, create_lane) + + +MIGRATION_ID = "e06f965879ab" + + +def test_migration_no_s3_integration( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_cachedmarcfile: CreateCachedMarcFile, + caplog: LogCaptureFixture, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + container = container_instance() + with container.storage.public.override(None): + # If there is no public s3 integration, and no cachedmarcfiles in the database, the migration should succeed + alembic_runner.migrate_up_one() + + alembic_runner.migrate_down_one() + # If there is no public s3 integration, but there are cachedmarcfiles in the database, the migration should fail + with alembic_engine.connect() as connection: + create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") + + with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( + None + ): + alembic_runner.migrate_up_one() + + assert ( + "There are cachedmarcfiles in the database, but no public s3 storage configured!" + in str(excinfo.value) + ) + + +def test_migration_bucket_url_not_found( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_cachedmarcfile: CreateCachedMarcFile, + caplog: LogCaptureFixture, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + container = container_instance() + mock_storage = MagicMock(spec=S3Service) + mock_storage.bucket = "test-bucket42" + + # If we can't parse the key from the URL, the migration should fail + with alembic_engine.connect() as connection: + create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") + + with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( + mock_storage + ): + alembic_runner.migrate_up_one() + + assert "Unexpected URL format" in str(excinfo.value) + + +def test_migration_bucket_url_different( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_cachedmarcfile: CreateCachedMarcFile, + caplog: LogCaptureFixture, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + container = container_instance() + mock_storage = MagicMock(spec=S3Service) + + # If the generated URL doesn't match the original URL, the migration should fail + mock_storage.bucket = "test-bucket" + mock_storage.generate_url.return_value = ( + "http://s3.amazonaws.com/test-bucket/different-url.mrc" + ) + + with alembic_engine.connect() as connection: + create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") + + with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( + mock_storage + ): + alembic_runner.migrate_up_one() + + assert "URL mismatch" in str(excinfo.value) + + +def test_migration_success( + alembic_runner: MigrationContext, + alembic_engine: Engine, + create_library: CreateLibrary, + create_lane: CreateLane, + caplog: LogCaptureFixture, + create_cachedmarcfile: CreateCachedMarcFile, + create_coverage_record: CreateCoverageRecord, +) -> None: + alembic_runner.migrate_down_to(MIGRATION_ID) + alembic_runner.migrate_down_one() + + with alembic_engine.connect() as connection: + library_id = create_library(connection, "test-library") + lane_id = create_lane(connection, library_id, "test-lane") + + url1 = "http://s3.amazonaws.com/test-bucket/1.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url1, + ) + url2 = "http://test-bucket.us-west-2.s3.amazonaws.com/2.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url2, + ) + url3 = "http://test-bucket.custom-domain.com/3.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=url3, + ) + unrelated_representation = create_cachedmarcfile.representation( + connection, "http://s3.amazonaws.com/test-bucket/4.mrc" + ) + + create_coverage_record(connection, "generate-marc") + unrelated_coverage_record = create_coverage_record(connection) + + mock_storage = MagicMock(spec=S3Service) + mock_storage.bucket = "test-bucket" + mock_storage.generate_url.side_effect = [url1, url2, url3] + + container = container_instance() + with container.storage.public.override(mock_storage): + alembic_runner.migrate_up_one() + + # We should have checked that the generated url is the same and deleted the files from s3 + assert mock_storage.generate_url.call_count == 3 + assert mock_storage.delete.call_count == 3 + assert mock_storage.delete.call_args_list == [ + call("1.mrc"), + call("2.mrc"), + call("3.mrc"), + ] + + # But the representations and coveragerecords should still be there + with alembic_engine.connect() as connection: + assert connection.execute("SELECT id FROM representations").rowcount == 4 + assert connection.execute("SELECT id FROM coveragerecords").rowcount == 2 + + # The next migration takes care of those + alembic_runner.migrate_up_one() + + with alembic_engine.connect() as connection: + # The representation and coveragerecord that were not associated should still be there + assert connection.execute("SELECT id FROM representations").fetchall() == [ + (unrelated_representation,) + ] + assert connection.execute("SELECT id FROM coveragerecords").fetchall() == [ + (unrelated_coverage_record,) + ] + + # Cachedmarcfiles should be gone + inspector = inspect(connection) + assert inspector.has_table("cachedmarcfiles") is False From 96e0ca9a8a5c89092a96df9a819be9be14e0957c Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 6 Dec 2023 17:05:32 -0400 Subject: [PATCH 209/262] Fix 2d72d6876c52 migration failure with overdrive advantage collections (#1560) 20231101_2d72d6876c52_remove_collection_external_integration.py failed to properly account for Overdrive advantage collections, so the migration would fail with the following error: pydantic.error_wrappers.ValidationError: 3 validation errors for OverdriveSettings overdrive_website_id field required (type=value_error.missing) overdrive_client_key field required (type=value_error.missing) overdrive_client_secret field required (type=value_error.missing) --- ..._remove_collection_external_integration.py | 16 ++++++++-- tests/migration/test_20231101_2d72d6876c52.py | 32 +++++++++++++++++-- 2 files changed, 42 insertions(+), 6 deletions(-) diff --git a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py index 2401ebb2f0..af04b6db99 100644 --- a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py +++ b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py @@ -11,6 +11,7 @@ from alembic import op from api.integration.registry.license_providers import LicenseProvidersRegistry +from core.integration.base import HasChildIntegrationConfiguration from core.migration.util import migration_logger from core.model import json_serializer @@ -88,8 +89,8 @@ def upgrade() -> None: # also make sure that the new settings are valid for the integration before saving them # to the database. rows = conn.execute( - "SELECT ic.id as integration_id, ic.settings, ic.protocol, ic.goal, c.external_account_id FROM collections c " - "JOIN integration_configurations ic ON c.integration_configuration_id = ic.id" + "SELECT ic.id as integration_id, ic.settings, ic.protocol, ic.goal, c.external_account_id, c.parent_id " + "FROM collections c JOIN integration_configurations ic ON c.integration_configuration_id = ic.id" ).all() registry = LicenseProvidersRegistry() @@ -103,7 +104,16 @@ def upgrade() -> None: raise RuntimeError( f"Could not find implementation for protocol {row.protocol}" ) - settings_obj = impl_class.settings_class()(**settings_dict) + if row.parent_id is not None: + if issubclass(impl_class, HasChildIntegrationConfiguration): + settings_obj = impl_class.child_settings_class()(**settings_dict) + else: + raise RuntimeError( + f"Integration {row.integration_id} is a child integration, " + f"but {row.protocol} does not support child integrations." + ) + else: + settings_obj = impl_class.settings_class()(**settings_dict) new_settings_dict = settings_obj.dict() if row.settings != new_settings_dict: new_settings = json_serializer(new_settings_dict) diff --git a/tests/migration/test_20231101_2d72d6876c52.py b/tests/migration/test_20231101_2d72d6876c52.py index 72ab925e7a..63ea342832 100644 --- a/tests/migration/test_20231101_2d72d6876c52.py +++ b/tests/migration/test_20231101_2d72d6876c52.py @@ -75,16 +75,18 @@ def create_collection( integration_configuration_id: int, external_account_id: Optional[str] = None, external_integration_id: Optional[int] = None, + parent_id: Optional[int] = None, ) -> int: collection = connection.execute( "INSERT INTO collections " - "(name, external_account_id, integration_configuration_id, external_integration_id) VALUES " - "(%s, %s, %s, %s) " + "(name, external_account_id, integration_configuration_id, external_integration_id, parent_id) VALUES " + "(%s, %s, %s, %s, %s) " "returning id", name, external_account_id, integration_configuration_id, external_integration_id, + parent_id, ).fetchone() assert collection is not None assert isinstance(collection.id, int) @@ -133,9 +135,18 @@ def test_migration( "LICENSE_GOAL", settings=integration_2_settings, ) + integration_3_settings: Dict[str, str] = {} + integration_3 = create_integration_configuration( + connection, + "collection_3", + "Overdrive", + "LICENSE_GOAL", + settings=integration_3_settings, + ) external_1 = create_external_integration(connection) external_2 = create_external_integration(connection) + external_3 = create_external_integration(connection) create_config_setting( connection, "token_auth_endpoint", "http://token.com/auth", external_1 @@ -147,6 +158,9 @@ def test_migration( collection_2 = create_collection( connection, "collection_2", integration_2, "1", external_2 ) + collection_3 = create_collection( + connection, "collection_3", integration_3, "5656", external_3, collection_2 + ) create_integration_library_configuration(connection, integration_1, library_1) create_integration_library_configuration(connection, integration_1, library_2) @@ -156,7 +170,7 @@ def test_migration( create_integration_library_configuration(connection, integration_2, library_2) create_collection_library(connection, collection_2, library_2) - # Test that the collections_libraries table has the correct forign key constraints + # Test that the collections_libraries table has the correct foreign key constraints with pytest.raises(IntegrityError) as excinfo: create_collection_library(connection, 99, 99) assert "violates foreign key constraint" in str(excinfo.value) @@ -222,6 +236,18 @@ def test_migration( } assert integration_2_actual.context == {} + integration_3_actual = connection.execute( + "select name, settings, context from integration_configurations where id = (%s)", + integration_3, + ).fetchone() + assert integration_3_actual is not None + assert integration_3_actual.name == "collection_3" + assert integration_3_actual.settings != integration_3_settings + assert integration_3_actual.settings == { + "external_account_id": "5656", + } + assert integration_3_actual.context == {} + # The foreign key constraints have been removed from the collections_libraries table create_collection_library(connection, 99, 99) From a170786a58d6c41b51d62c6b178e2b47c26238a5 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Wed, 6 Dec 2023 17:22:59 -0400 Subject: [PATCH 210/262] Fix 2d72d6876c52 migration failure collection name duplication (#1561) * Fix issue with migration where name was already in use. * Make sure we don't end up in an infinite loop. --- ..._remove_collection_external_integration.py | 34 +++++++++++++++++-- tests/migration/test_20231101_2d72d6876c52.py | 2 +- 2 files changed, 32 insertions(+), 4 deletions(-) diff --git a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py index af04b6db99..20260dbf9f 100644 --- a/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py +++ b/alembic/versions/20231101_2d72d6876c52_remove_collection_external_integration.py @@ -5,6 +5,8 @@ Create Date: 2023-11-01 22:42:06.754873+00:00 """ +from collections import deque +from dataclasses import dataclass import sqlalchemy as sa from sqlalchemy.dialects import postgresql @@ -25,6 +27,15 @@ log = migration_logger(revision) +@dataclass +class RenameQueryRow: + collection_id: int + integration_id: int + integration_name: str + collection_name: str + deferral_count: int = 0 + + def upgrade() -> None: conn = op.get_bind() @@ -39,14 +50,31 @@ def upgrade() -> None: "ON c.integration_configuration_id = ic.id WHERE c.name != ic.name" ).all() - for row in rows: + integration_names = {row.integration_name for row in rows} + collection_renames = deque(RenameQueryRow(**r) for r in rows) + while collection_renames: + rename = collection_renames.popleft() + if rename.collection_name in integration_names: + # The collection name is already in use by an integration, so we need to rename the + # integration first. + log.info( + f"Collection name {rename.collection_name} is already in use. Deferring rename." + ) + rename.deferral_count += 1 + if rename.deferral_count > 3: + raise RuntimeError( + f"Unable to rename collection {rename.collection_id}. Max deferral count reached." + ) + collection_renames.append(rename) + continue log.info( - f"Updating name for collection {row.collection_id} from {row.integration_name} to {row.collection_name}." + f"Updating name for collection {rename.collection_id} from {rename.integration_name} to {rename.collection_name}." ) conn.execute( "UPDATE integration_configurations SET name = (%s) WHERE id = (%s)", - (row.collection_name, row.integration_id), + (rename.collection_name, rename.integration_id), ) + integration_names.remove(rename.integration_name) op.alter_column("collections", "name", existing_type=sa.VARCHAR(), nullable=True) op.drop_index("ix_collections_name", table_name="collections") diff --git a/tests/migration/test_20231101_2d72d6876c52.py b/tests/migration/test_20231101_2d72d6876c52.py index 63ea342832..48e27ae8ec 100644 --- a/tests/migration/test_20231101_2d72d6876c52.py +++ b/tests/migration/test_20231101_2d72d6876c52.py @@ -138,7 +138,7 @@ def test_migration( integration_3_settings: Dict[str, str] = {} integration_3 = create_integration_configuration( connection, - "collection_3", + "collection_1", "Overdrive", "LICENSE_GOAL", settings=integration_3_settings, From d6d4f54e41bcc1f4694769983f8bf55899f84a48 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 7 Dec 2023 15:06:19 -0400 Subject: [PATCH 211/262] Fix for e06f965879ab MARC file deletion migration (PP-773) (#1563) e06f965879ab was hard to test, since it involves both the DB and S3. After the migration ran on Minotaur and Cerberus, there are a couple changes needed: - The latest code puts the MARC file URL in Representation.mirror_url and Representation.url but older versions of the code only put the url in Representation.url. So older files were failing this migration because mirror_url is NULL. So its safe to just use URL for everything (verified this with a query against production DBs). - Some records have NULL in url (as well as mirror_url). I'm not sure how / when this happened. But there is no way we can delete the file in this case, so it doesn't fail the migration, just outputs a log message. - The URLs are stored URL escaped, and S3 keys are expected not to be escaped, so we have to call unquote on the keys. - Don't fail the migration if we can't parse the key out of the URL. Just log what happened, don't delete that file and move on. --- ...31206_e06f965879ab_marc_s3_file_cleanup.py | 28 +++++++----- tests/migration/test_20231206_e06f965879ab.py | 45 ++++++------------- 2 files changed, 31 insertions(+), 42 deletions(-) diff --git a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py index e64bfc78b3..509cc14165 100644 --- a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py +++ b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py @@ -6,7 +6,7 @@ """ from typing import Optional -from urllib.parse import urlparse +from urllib.parse import unquote, urlparse from alembic import op from core.migration.util import migration_logger @@ -19,28 +19,33 @@ depends_on = None -def parse_key_from_url(url: str, bucket: str) -> Optional[str]: +def parse_key_from_url(url: Optional[str], bucket: str) -> Optional[str]: """Parse the key from a URL. :param url: The URL to parse. :return: The key, or None if the URL is not a valid S3 URL. """ + if url is None: + return None + parsed_url = urlparse(url) if f"/{bucket}/" in parsed_url.path: - return parsed_url.path.split(f"/{bucket}/", 1)[1] - - if bucket in parsed_url.netloc: - return parsed_url.path.lstrip("/") + key = parsed_url.path.split(f"/{bucket}/", 1)[1] + elif bucket in parsed_url.netloc: + key = parsed_url.path.lstrip("/") + else: + return None - return None + # The key stored in the DB is URL encoded, so we need to decode it + return unquote(key) def upgrade() -> None: # Before removing the cachedmarcfiles table, we want to clean up # the cachedmarcfiles stored in s3. # - # Note: if you are running this migration on a development system and you want + # Note: if you are running this migration on a development system, and you want # to skip deleting these files you can just comment out the migration code below. services = container_instance() public_s3 = services.storage.public() @@ -49,7 +54,7 @@ def upgrade() -> None: # Check if there are any cachedmarcfiles in s3 connection = op.get_bind() cached_files = connection.execute( - "SELECT r.mirror_url FROM cachedmarcfiles cmf JOIN representations r ON cmf.representation_id = r.id" + "SELECT r.url FROM cachedmarcfiles cmf JOIN representations r ON cmf.representation_id = r.id" ).all() if public_s3 is None and len(cached_files) > 0: raise RuntimeError( @@ -58,11 +63,12 @@ def upgrade() -> None: keys_to_delete = [] for cached_file in cached_files: - url = cached_file.mirror_url + url = cached_file.url bucket = public_s3.bucket key = parse_key_from_url(url, bucket) if key is None: - raise RuntimeError(f"Unexpected URL format: {url} (bucket: {bucket})") + log.info(f"Skipping cachedmarcfile with invalid URL: {url}") + continue generated_url = public_s3.generate_url(key) if generated_url != url: raise RuntimeError(f"URL mismatch: {url} != {generated_url}") diff --git a/tests/migration/test_20231206_e06f965879ab.py b/tests/migration/test_20231206_e06f965879ab.py index d977004046..c233bbf3b3 100644 --- a/tests/migration/test_20231206_e06f965879ab.py +++ b/tests/migration/test_20231206_e06f965879ab.py @@ -21,7 +21,7 @@ class CreateCachedMarcFile: def __call__( self, connection: Connection, - url: str, + url: Optional[str], library_id: Optional[int] = None, lane_id: Optional[int] = None, ) -> Tuple[int, int]: @@ -43,9 +43,9 @@ def __call__( return representation_id, file_id - def representation(self, connection: Connection, url: str) -> int: + def representation(self, connection: Connection, url: Optional[str]) -> int: row = connection.execute( - "INSERT INTO representations (media_type, mirror_url) " + "INSERT INTO representations (media_type, url) " "VALUES ('application/marc', %s) returning id", url, ).first() @@ -104,31 +104,6 @@ def test_migration_no_s3_integration( ) -def test_migration_bucket_url_not_found( - alembic_runner: MigrationContext, - alembic_engine: Engine, - create_cachedmarcfile: CreateCachedMarcFile, - caplog: LogCaptureFixture, -) -> None: - alembic_runner.migrate_down_to(MIGRATION_ID) - alembic_runner.migrate_down_one() - - container = container_instance() - mock_storage = MagicMock(spec=S3Service) - mock_storage.bucket = "test-bucket42" - - # If we can't parse the key from the URL, the migration should fail - with alembic_engine.connect() as connection: - create_cachedmarcfile(connection, "http://s3.amazonaws.com/test-bucket/1.mrc") - - with pytest.raises(RuntimeError) as excinfo, container.storage.public.override( - mock_storage - ): - alembic_runner.migrate_up_one() - - assert "Unexpected URL format" in str(excinfo.value) - - def test_migration_bucket_url_different( alembic_runner: MigrationContext, alembic_engine: Engine, @@ -188,7 +163,13 @@ def test_migration_success( lane_id=lane_id, url=url2, ) - url3 = "http://test-bucket.custom-domain.com/3.mrc" + create_cachedmarcfile( + connection, + library_id=library_id, + lane_id=lane_id, + url=None, + ) + url3 = "https://test-bucket.s3.us-west-2.amazonaws.com/test-1/2023-02-17%2006%3A38%3A01.837167%2B00%3A00-2023-03-21%2005%3A41%3A28.262257%2B00%3A00/Fiction.mrc" create_cachedmarcfile( connection, library_id=library_id, @@ -216,12 +197,14 @@ def test_migration_success( assert mock_storage.delete.call_args_list == [ call("1.mrc"), call("2.mrc"), - call("3.mrc"), + call( + "test-1/2023-02-17 06:38:01.837167+00:00-2023-03-21 05:41:28.262257+00:00/Fiction.mrc" + ), ] # But the representations and coveragerecords should still be there with alembic_engine.connect() as connection: - assert connection.execute("SELECT id FROM representations").rowcount == 4 + assert connection.execute("SELECT id FROM representations").rowcount == 5 assert connection.execute("SELECT id FROM coveragerecords").rowcount == 2 # The next migration takes care of those From 54311c86760932f988d422a9a61065ee86daaffe Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 8 Dec 2023 11:46:04 -0400 Subject: [PATCH 212/262] Allow pyinstrument to profile our test runs (#1557) * Allow pyinstrument to profile our test runs. * Better README. --- README.md | 17 +++++++++++++++++ tests/api/conftest.py | 6 ++++++ tests/core/conftest.py | 7 +++++++ 3 files changed, 30 insertions(+) diff --git a/README.md b/README.md index 63cc4f36d1..cf627c3c1d 100644 --- a/README.md +++ b/README.md @@ -743,6 +743,23 @@ module under the hood to do the profiling. This profiler uses [PyInstrument](https://pyinstrument.readthedocs.io/en/latest/) to profile the code. +#### Profiling tests suite + +PyInstrument can also be used to profile the test suite. This can be useful to identify slow tests, or to identify +performance regressions. + +To profile the core test suite, run the following command: + +```sh +pyinstrument -m pytest --no-cov tests/core/ +``` + +To profile the API test suite, run the following command: + +```sh +pyinstrument -m pytest --no-cov tests/api/ +``` + #### Environment Variables - `PALACE_PYINSTRUMENT`: Profiling will the enabled if this variable is set. The saved profile data will be available at diff --git a/tests/api/conftest.py b/tests/api/conftest.py index 109f98c015..d24165c8e1 100644 --- a/tests/api/conftest.py +++ b/tests/api/conftest.py @@ -1,3 +1,4 @@ +from freezegun.config import configure as fg_configure from pytest import register_assert_rewrite register_assert_rewrite("tests.fixtures.database") @@ -39,3 +40,8 @@ "tests.fixtures.tls_server", "tests.fixtures.vendor_id", ] + +# Make sure if we are using pyinstrument to profile tests, that +# freezegun doesn't interfere with it. +# See: https://github.com/spulec/freezegun#ignore-packages +fg_configure(extend_ignore_list=["pyinstrument"]) diff --git a/tests/core/conftest.py b/tests/core/conftest.py index fc9177cad2..d1fc72c50e 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -1,3 +1,5 @@ +from freezegun.config import configure as fg_configure + pytest_plugins = [ "tests.fixtures.announcements", "tests.fixtures.csv_files", @@ -13,3 +15,8 @@ "tests.fixtures.time", "tests.fixtures.tls_server", ] + +# Make sure if we are using pyinstrument to profile tests, that +# freezegun doesn't interfere with it. +# See: https://github.com/spulec/freezegun#ignore-packages +fg_configure(extend_ignore_list=["pyinstrument"]) From ec82afe47b6256f8d1dce7917520674219f945f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 16:10:16 +0000 Subject: [PATCH 213/262] Bump firebase-admin from 6.2.0 to 6.3.0 (#1559) --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index cedfee217f..b814e526c6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -598,8 +598,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, + {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, ] [package.extras] @@ -1164,13 +1164,13 @@ testing = ["covdefaults (>=2.3)", "coverage (>=7.3)", "diff-cover (>=7.7)", "pyt [[package]] name = "firebase-admin" -version = "6.2.0" +version = "6.3.0" description = "Firebase Admin Python SDK" optional = false python-versions = ">=3.7" files = [ - {file = "firebase_admin-6.2.0-py3-none-any.whl", hash = "sha256:e3c42351fb6194d7279a6fd9209a947005fb4ee7e9037d19762e6cb3da4a82e1"}, - {file = "firebase_admin-6.2.0.tar.gz", hash = "sha256:e3b334d18bbea039f2f3e8a792ad6870d2a7cc79a13ed10659dedd63f5b475e4"}, + {file = "firebase_admin-6.3.0-py3-none-any.whl", hash = "sha256:fcada47664f38b6da67fd924108b98029370554c9f762895d3f83e912cac5ab9"}, + {file = "firebase_admin-6.3.0.tar.gz", hash = "sha256:f040625b8cd3a15f99f84a797fe288ad5993c4034c355b7df3c37a99d39400e6"}, ] [package.dependencies] @@ -1322,12 +1322,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1429,8 +1429,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" From fc4ce80c2ac1b86fce94517f9a1fe520a8a84e35 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Dec 2023 16:10:48 +0000 Subject: [PATCH 214/262] Bump types-psycopg2 from 2.9.21.19 to 2.9.21.20 (#1562) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index b814e526c6..754df43618 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4198,13 +4198,13 @@ files = [ [[package]] name = "types-psycopg2" -version = "2.9.21.19" +version = "2.9.21.20" description = "Typing stubs for psycopg2" optional = false python-versions = ">=3.7" files = [ - {file = "types-psycopg2-2.9.21.19.tar.gz", hash = "sha256:ec3aae522dde9c41141597bc41123b4c955fb4093b1fc7ec6ee607795a0a088f"}, - {file = "types_psycopg2-2.9.21.19-py3-none-any.whl", hash = "sha256:8a4871df20c29e516622be8d66b91814c3262ff94112ff9e2f72a043d8fdf03c"}, + {file = "types-psycopg2-2.9.21.20.tar.gz", hash = "sha256:73baea689575bf5bb1b915b783fb0524044c6242928aeef1ae5a9e32f0780d3d"}, + {file = "types_psycopg2-2.9.21.20-py3-none-any.whl", hash = "sha256:5b1e2e1d9478f8a298ea7038f8ea988e0ccc1f0af39f84636d57ef0da6f29e95"}, ] [[package]] From 5910e724d9c4cbb3bc58772ae84d0ab3ec26e5d7 Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Fri, 8 Dec 2023 17:26:38 -0500 Subject: [PATCH 215/262] Add per-medium breakouts to dashboard statistics inventories. (PP-728) (#1549) * Add per-medium breakouts to inventories. * Remove self-hosted inventory property, since we don't self-host (incompatible changes). * Resolve PEP-8 and mypy issues with SQLAlchemy boolean filters. * Strict type checking for dashboard stats module. * Bump admin UI to v1.12.0. --- api/admin/config.py | 2 +- api/admin/dashboard_stats.py | 378 +++++++++++++++--------- api/admin/model/dashboard_statistics.py | 14 +- pyproject.toml | 1 + tests/api/admin/test_dashboard_stats.py | 207 +++++++++---- 5 files changed, 405 insertions(+), 197 deletions(-) diff --git a/api/admin/config.py b/api/admin/config.py index 957b0c3394..dc28340a83 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -17,7 +17,7 @@ class OperationalMode(str, Enum): class Configuration(LoggerMixin): APP_NAME = "Palace Collection Manager" PACKAGE_NAME = "@thepalaceproject/circulation-admin" - PACKAGE_VERSION = "1.11.0" + PACKAGE_VERSION = "1.12.0" STATIC_ASSETS = { "admin_js": "circulation-admin.js", diff --git a/api/admin/dashboard_stats.py b/api/admin/dashboard_stats.py index 07df5a1c08..9b8d4b6e6f 100644 --- a/api/admin/dashboard_stats.py +++ b/api/admin/dashboard_stats.py @@ -1,13 +1,13 @@ from __future__ import annotations -import typing +import dataclasses from datetime import datetime from functools import partial -from typing import Callable, Iterable +from typing import TYPE_CHECKING, Callable, Iterable from sqlalchemy.orm import Session -from sqlalchemy.sql import func -from sqlalchemy.sql.expression import and_, or_ +from sqlalchemy.sql import func, select +from sqlalchemy.sql.expression import and_, false, or_, true from api.admin.model.dashboard_statistics import ( CollectionInventory, @@ -16,7 +16,25 @@ PatronStatistics, StatisticsResponse, ) -from core.model import Admin, Collection, Hold, Library, LicensePool, Loan, Patron +from core.model import ( + Admin, + Collection, + Edition, + Hold, + Library, + LicensePool, + Loan, + Patron, +) + +if TYPE_CHECKING: + from sqlalchemy.sql.elements import ( + BinaryExpression, + BooleanClauseList, + ClauseElement, + ) + from sqlalchemy.sql.expression import ColumnElement + from sqlalchemy.sql.type_api import TypeEngine def generate_statistics(admin: Admin, db: Session) -> StatisticsResponse: @@ -24,17 +42,17 @@ def generate_statistics(admin: Admin, db: Session) -> StatisticsResponse: class Statistics: - METERED_LICENSE_FILTER = and_( # type: ignore[type-var] + METERED_LICENSE_FILTER = and_( LicensePool.licenses_owned > 0, - LicensePool.unlimited_access == False, - LicensePool.open_access == False, + LicensePool.unlimited_access == false(), + LicensePool.open_access == false(), ) - UNLIMITED_LICENSE_FILTER = and_( # type: ignore[type-var] - LicensePool.unlimited_access == True, - LicensePool.open_access == False, + UNLIMITED_LICENSE_FILTER = and_( + LicensePool.unlimited_access == true(), + LicensePool.open_access == false(), ) - OPEN_ACCESS_FILTER = LicensePool.open_access == True - AT_LEAST_ONE_LENDABLE_FILTER = or_( + OPEN_ACCESS_FILTER = LicensePool.open_access == true() + AT_LEAST_ONE_LOANABLE_FILTER = or_( UNLIMITED_LICENSE_FILTER, OPEN_ACCESS_FILTER, and_(METERED_LICENSE_FILTER, LicensePool.licenses_available > 0), @@ -43,6 +61,70 @@ class Statistics: def __init__(self, session: Session): self._db = session + def stats(self, admin: Admin) -> StatisticsResponse: + """Build and return a statistics response for admin user's authorized libraries.""" + + # Determine which libraries and collections are authorized for this user. + authorized_libraries = self._libraries_for_admin(admin) + authorized_collections_by_library = { + lib.short_name: set(lib.all_collections) for lib in authorized_libraries + } + all_authorized_collections: list[Collection] = [ + c for c in self._db.query(Collection) if admin.can_see_collection(c) + ] + + collection_inventories = sorted( + (self._create_collection_inventory(c) for c in all_authorized_collections), + key=lambda c: c.id, + ) + ( + collection_inventory_summary, + collection_inventory_summary_by_medium, + ) = _summarize_collection_inventories( + collection_inventories, all_authorized_collections + ) + + inventories_by_library = { + library_key: _summarize_collection_inventories( + collection_inventories, collections + ) + for library_key, collections in authorized_collections_by_library.items() + } + patron_stats_by_library = { + lib.short_name: self._gather_patron_stats(lib) + for lib in authorized_libraries + } + library_statistics = [ + LibraryStatistics( + key=lib.short_name, + name=lib.name or "(missing library name)", + patron_statistics=patron_stats_by_library[lib.short_name], + inventory_summary=inventories_by_library[lib.short_name][0], + inventory_by_medium=inventories_by_library[lib.short_name][1], + collection_ids=sorted( + [ + c.id + for c in authorized_collections_by_library[lib.short_name] + if c.id is not None + ] + ), + ) + for lib in authorized_libraries + ] + + # Accumulate patron summary statistics from authorized libraries. + patron_summary = sum( + patron_stats_by_library.values(), PatronStatistics.zeroed() + ) + + return StatisticsResponse( + collections=collection_inventories, + libraries=library_statistics, + inventory_summary=collection_inventory_summary, + inventory_by_medium=collection_inventory_summary_by_medium, + patron_summary=patron_summary, + ) + def _libraries_for_admin(self, admin: Admin) -> list[Library]: """Return a list of libraries to which this user has access.""" return [ @@ -51,55 +133,78 @@ def _libraries_for_admin(self, admin: Admin) -> list[Library]: if admin.is_librarian(library) ] - def _collection_count(self, collection_filter, query_filter) -> int: - return ( - self._db.query(LicensePool) - .filter(collection_filter) - .filter(query_filter) - .count() + def _collection_statistics_by_medium_query( + self, + collection_filter: BinaryExpression[TypeEngine[bool]], + query_filter: BooleanClauseList[ClauseElement], + /, + columns: list[ColumnElement[TypeEngine[int]]], + ) -> dict[str, dict[str, int]]: + stats_with_medium = ( + self._db.execute( + select( + Edition.medium, + *columns, + ) + .select_from(LicensePool) + .join(Edition, Edition.id == LicensePool.presentation_edition_id) + .where(collection_filter) + .where(query_filter) + .group_by(Edition.medium) + ) + .mappings() + .all() ) + return { + row["medium"]: {k: v for k, v in row.items() if k != "medium"} + for row in stats_with_medium + } - def _gather_collection_stats(self, collection: Collection) -> CollectionInventory: + def _run_collection_stats_queries( + self, collection: Collection + ) -> _CollectionStatisticsQueryResults: collection_filter = LicensePool.collection_id == collection.id - _count: Callable = partial(self._collection_count, collection_filter) - - metered_license_title_count = _count(self.METERED_LICENSE_FILTER) - unlimited_license_title_count = _count(self.UNLIMITED_LICENSE_FILTER) - open_access_title_count = _count(self.OPEN_ACCESS_FILTER) - # TODO: We no longer support self-hosted books, so this should always be 0. - # this value is still included in the response for backwards compatibility, - # but should be removed in a future release. - self_hosted_title_count = 0 - at_least_one_loanable_count = _count(self.AT_LEAST_ONE_LENDABLE_FILTER) - - licenses_owned_count, licenses_available_count = map( - lambda x: x if x is not None else 0, - self._db.query( - func.sum(LicensePool.licenses_owned), - func.sum(LicensePool.licenses_available), - ) - .filter(collection_filter) - .filter(self.METERED_LICENSE_FILTER) - .all()[0], + _query_stats_group: Callable[..., dict[str, dict[str, int]]] = partial( + self._collection_statistics_by_medium_query, collection_filter + ) + count = func.count().label("count") + return _CollectionStatisticsQueryResults( + metered_title_counts=_query_stats_group( + self.METERED_LICENSE_FILTER, columns=[count] + ), + unlimited_title_counts=_query_stats_group( + self.UNLIMITED_LICENSE_FILTER, columns=[count] + ), + open_access_title_counts=_query_stats_group( + self.OPEN_ACCESS_FILTER, columns=[count] + ), + loanable_title_counts=_query_stats_group( + self.AT_LEAST_ONE_LOANABLE_FILTER, columns=[count] + ), + metered_license_stats=_query_stats_group( + self.METERED_LICENSE_FILTER, + columns=[ + func.sum(LicensePool.licenses_owned).label("owned"), + func.sum(LicensePool.licenses_available).label("available"), + ], + ), ) + def _create_collection_inventory( + self, collection: Collection + ) -> CollectionInventory: + """Return a CollectionInventory for the given collection.""" + + statistics = self._run_collection_stats_queries(collection) + inventory_by_medium = statistics.inventories_by_medium() + summary_inventory = sum( + inventory_by_medium.values(), InventoryStatistics.zeroed() + ) return CollectionInventory( id=collection.id, name=collection.name, - inventory=InventoryStatistics( - titles=metered_license_title_count - + unlimited_license_title_count - + open_access_title_count, - available_titles=at_least_one_loanable_count, - self_hosted_titles=self_hosted_title_count, - open_access_titles=open_access_title_count, - licensed_titles=metered_license_title_count - + unlimited_license_title_count, - unlimited_license_titles=unlimited_license_title_count, - metered_license_titles=metered_license_title_count, - metered_licenses_owned=licenses_owned_count, - metered_licenses_available=licenses_available_count, - ), + inventory=summary_inventory, + inventory_by_medium=inventory_by_medium, ) def _gather_patron_stats(self, library: Library) -> PatronStatistics: @@ -129,95 +234,94 @@ def _gather_patron_stats(self, library: Library) -> PatronStatistics: holds=hold_count, ) - def _collection_level_statistics( - self, - collections: typing.Collection[Collection], - ) -> tuple[list[CollectionInventory], InventoryStatistics]: - """Return individual and summary statistics for the given collections. - - The list of per-collection statistics is sorted by the collection `id`. - """ - collection_stats = [self._gather_collection_stats(c) for c in collections] - summary_stats = sum( - (c.inventory for c in collection_stats), InventoryStatistics.zeroed() - ) - return sorted(collection_stats, key=lambda c: c.id), summary_stats - - @staticmethod - def lookup_stats( - collection_inventories: Iterable[CollectionInventory], - collections: Iterable[Collection], - defaults: Iterable[InventoryStatistics] | None = None, - ) -> Iterable[InventoryStatistics]: - """Return the inventory dictionaries for the specified collections.""" - defaults = defaults if defaults is not None else [InventoryStatistics.zeroed()] - collection_ids = {c.id for c in collections} - return ( - ( - stats.inventory - for stats in collection_inventories - if stats.id in collection_ids - ) - if collection_ids - else defaults - ) - def stats(self, admin: Admin) -> StatisticsResponse: - """Build and return a statistics response for user's authorized libraries.""" +def _summarize_collection_inventories( + collection_inventories: Iterable[CollectionInventory], + collections: Iterable[Collection], +) -> tuple[InventoryStatistics, dict[str, InventoryStatistics]]: + """Summarize the inventories associated with the specified collections. - # Determine which libraries and collections are authorized for this user. - authorized_libraries = self._libraries_for_admin(admin) - authorized_collections_by_library = { - lib.short_name: set(lib.all_collections) for lib in authorized_libraries - } - all_authorized_collections: list[Collection] = [ - c for c in self._db.query(Collection) if admin.can_see_collection(c) - ] + The collections represented by the specified `collection_inventories` + must be a superset of the specified `collections`. - # Gather collection-level statistics for authorized collections. - ( - collection_inventories, - collection_inventory_summary, - ) = self._collection_level_statistics(all_authorized_collections) - - # Gather library-level statistics for the authorized libraries by - # summing up the values of each of libraries associated collections. - inventory_by_library = { - library_key: sum( - self.lookup_stats(collection_inventories, collections), - InventoryStatistics.zeroed(), + :param collections: `collections` for which to summarize inventory information. + :param collection_inventories: `CollectionInventory`s for the collections. + :return: Summary inventory and summary inventory by medium. + """ + included_collection_inventories = ( + inv for inv in collection_inventories if inv.id in {c.id for c in collections} + ) + + summary_inventory = InventoryStatistics.zeroed() + summary_inventory_by_medium: dict[str, InventoryStatistics] = {} + + for ci in included_collection_inventories: + summary_inventory += ci.inventory + inventory_by_medium = ci.inventory_by_medium or {} + for medium, inventory in inventory_by_medium.items(): + summary_inventory_by_medium[medium] = ( + summary_inventory_by_medium.get(medium, InventoryStatistics.zeroed()) + + inventory ) - for library_key, collections in authorized_collections_by_library.items() - } - patron_stats_by_library = { - lib.short_name: self._gather_patron_stats(lib) - for lib in authorized_libraries + return summary_inventory, summary_inventory_by_medium + + +@dataclasses.dataclass(frozen=True) +class _CollectionStatisticsQueryResults: + unlimited_title_counts: dict[str, dict[str, int]] + open_access_title_counts: dict[str, dict[str, int]] + loanable_title_counts: dict[str, dict[str, int]] + metered_title_counts: dict[str, dict[str, int]] + metered_license_stats: dict[str, dict[str, int]] + + def inventories_by_medium(self) -> dict[str, InventoryStatistics]: + """Return a mapping of all mediums present to their associated inventories.""" + return { + medium: self.inventory_for_medium(medium) + for medium in self.mediums_present() } - library_statistics = [ - LibraryStatistics( - key=lib.short_name, - name=lib.name or "(missing library name)", - patron_statistics=patron_stats_by_library[lib.short_name], - inventory_summary=inventory_by_library[lib.short_name], - collection_ids=sorted( - [ - c.id - for c in authorized_collections_by_library[lib.short_name] - if c.id is not None - ] - ), - ) - for lib in authorized_libraries - ] - # Accumulate patron summary statistics from authorized libraries. - patron_summary = sum( - patron_stats_by_library.values(), PatronStatistics.zeroed() + def mediums_present(self) -> set[str]: + """Returns a list of the mediums present in these collection statistics.""" + statistics = dataclasses.asdict(self) + return set().union(*(stat.keys() for stat in statistics.values())) + + def inventory_for_medium(self, medium: str) -> InventoryStatistics: + """Return statistics for the specified medium.""" + unlimited_titles = self._lookup_property( + "unlimited_title_counts", medium, "count" + ) + open_access_titles = self._lookup_property( + "open_access_title_counts", medium, "count" + ) + loanable_titles = self._lookup_property( + "loanable_title_counts", medium, "count" + ) + metered_titles = self._lookup_property("metered_title_counts", medium, "count") + metered_owned_licenses = self._lookup_property( + "metered_license_stats", medium, "owned" + ) + metered_available_licenses = self._lookup_property( + "metered_license_stats", medium, "available" ) - return StatisticsResponse( - collections=collection_inventories, - libraries=library_statistics, - inventory_summary=collection_inventory_summary, - patron_summary=patron_summary, + return InventoryStatistics( + titles=metered_titles + unlimited_titles + open_access_titles, + available_titles=loanable_titles, + open_access_titles=open_access_titles, + licensed_titles=metered_titles + unlimited_titles, + unlimited_license_titles=unlimited_titles, + metered_license_titles=metered_titles, + metered_licenses_owned=metered_owned_licenses, + metered_licenses_available=metered_available_licenses, ) + + def _lookup_property( + self, + group: str, + medium: str, + column_name: str, + ) -> int: + """Return value for a statistic, if present; else, return zero.""" + field: dict[str, dict[str, int]] = getattr(self, group, {}) + return field.get(medium, {}).get(column_name, 0) diff --git a/api/admin/model/dashboard_statistics.py b/api/admin/model/dashboard_statistics.py index 91b7a6f6f5..3aec268780 100644 --- a/api/admin/model/dashboard_statistics.py +++ b/api/admin/model/dashboard_statistics.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from typing import Any, List +from typing import Any, Dict, List from pydantic import Field, NonNegativeInt @@ -54,9 +54,6 @@ class InventoryStatistics(StatisticsBaseModel): available_titles: NonNegativeInt = Field( description="Number of books available to lend." ) - self_hosted_titles: NonNegativeInt = Field( - description="Number of books that are self-hosted." - ) open_access_titles: NonNegativeInt = Field( description="Number of books with an Open Access license." ) @@ -90,6 +87,9 @@ class LibraryStatistics(CustomBaseModel): inventory_summary: InventoryStatistics = Field( description="Summary of inventory statistics for this library." ) + inventory_by_medium: Dict[str, InventoryStatistics] = Field( + description="Per-medium inventory statistics for this library." + ) collection_ids: List[int] = Field( description="List of associated collection identifiers." ) @@ -103,6 +103,9 @@ class CollectionInventory(CustomBaseModel): inventory: InventoryStatistics = Field( description="Inventory statistics for this collection." ) + inventory_by_medium: Dict[str, InventoryStatistics] = Field( + description="Per-medium inventory statistics for this collection." + ) class StatisticsResponse(CustomBaseModel): @@ -117,6 +120,9 @@ class StatisticsResponse(CustomBaseModel): inventory_summary: InventoryStatistics = Field( description="Summary inventory across all included collections." ) + inventory_by_medium: Dict[str, InventoryStatistics] = Field( + description="Per-medium summary inventory across all included collections." + ) patron_summary: PatronStatistics = Field( description="Summary patron statistics across all libraries." ) diff --git a/pyproject.toml b/pyproject.toml index 8902bd3a42..a8b7c3d405 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -76,6 +76,7 @@ module = [ "api.admin.controller.library_settings", "api.admin.controller.patron_auth_service_self_tests", "api.admin.controller.patron_auth_services", + "api.admin.dashboard_stats", "api.admin.form_data", "api.admin.model.dashboard_statistics", "api.adobe_vendor_id", diff --git a/tests/api/admin/test_dashboard_stats.py b/tests/api/admin/test_dashboard_stats.py index 95b2de7747..bc184e296d 100644 --- a/tests/api/admin/test_dashboard_stats.py +++ b/tests/api/admin/test_dashboard_stats.py @@ -95,7 +95,7 @@ def test_stats_patrons(admin_statistics_session: AdminStatisticsSessionFixture): assert 1 == patron_data.loans assert 1 == patron_data.holds - # These patrons are in a different library.. + # These patrons are in a different library. l2 = db.library() patron4 = db.patron(library=l2) pool.loan_to(patron4, end=utc_now() + timedelta(days=5)) @@ -156,7 +156,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 0 == inventory_data.titles assert 0 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 0 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -193,7 +192,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 2 == inventory_data.titles assert 1 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 2 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -215,7 +213,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture summary_inventory = response.inventory_summary assert 2 == library_inventory.titles assert 1 == library_inventory.available_titles - assert 0 == library_inventory.self_hosted_titles assert 0 == library_inventory.open_access_titles assert 2 == library_inventory.licensed_titles assert 0 == library_inventory.unlimited_license_titles @@ -225,7 +222,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture assert 3 == summary_inventory.titles assert 2 == summary_inventory.available_titles - assert 0 == summary_inventory.self_hosted_titles assert 0 == summary_inventory.open_access_titles assert 3 == summary_inventory.licensed_titles assert 0 == summary_inventory.unlimited_license_titles @@ -245,7 +241,6 @@ def test_stats_inventory(admin_statistics_session: AdminStatisticsSessionFixture for inventory_data in [library_inventory, summary_inventory]: assert 2 == inventory_data.titles assert 1 == inventory_data.available_titles - assert 0 == inventory_data.self_hosted_titles assert 0 == inventory_data.open_access_titles assert 2 == inventory_data.licensed_titles assert 0 == inventory_data.unlimited_license_titles @@ -265,7 +260,6 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert empty_inventory == InventoryStatistics( titles=0, available_titles=0, - self_hosted_titles=0, open_access_titles=0, licensed_titles=0, unlimited_license_titles=0, @@ -287,7 +281,6 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert new_metered_inventory == InventoryStatistics( titles=2, available_titles=2, - self_hosted_titles=0, open_access_titles=0, licensed_titles=2, unlimited_license_titles=0, @@ -301,6 +294,8 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu # Initially, there is no inventory. response = session.get_statistics() assert response.inventory_summary == empty_inventory + assert {} == response.inventory_by_medium + assert 0 == len(response.libraries) default_library = db.library("Default Library", "default") default_collection = db.collection(name="Default Collection") @@ -325,23 +320,32 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu expected_summary_inventory = expected_library_inventory.copy() response = session.get_statistics() - assert ( - expected_library_inventory - == response.libraries_by_key.get(default_library.short_name).inventory_summary - ) + library_stats_data = response.libraries_by_key.get(default_library.short_name) + assert 1 == len(response.collections) + assert 1 == len(response.inventory_by_medium) + assert 1 == len(library_stats_data.collection_ids) + assert 1 == len(library_stats_data.inventory_by_medium) assert expected_summary_inventory == response.inventory_summary + assert "Book" in response.inventory_by_medium + assert expected_summary_inventory == response.inventory_by_medium.get("Book") + assert expected_library_inventory == library_stats_data.inventory_summary + assert "Book" in library_stats_data.inventory_by_medium + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) c2 = db.collection() c3 = db.collection() c3.libraries += [default_library] # c2 adds a 5/10 metered license title. - _, pool = db.edition( + edition, pool = db.edition( with_license_pool=True, with_open_access_download=False, data_source_name=DataSource.OVERDRIVE, collection=c2, ) + edition.medium = "Audio" pool.open_access = False pool.licenses_owned = 10 pool.licenses_available = 5 @@ -379,7 +383,8 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu pool.licenses_owned = 5 pool.licenses_available = 5 - added_library_inventory = empty_inventory.copy( + c1_previous_book_inventory = expected_library_inventory + c1_added_book_inventory = empty_inventory.copy( update={ "titles": 1, "available_titles": 0, @@ -389,18 +394,41 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu "metered_licenses_available": 0, } ) - added_summary_inventory = empty_inventory.copy( + + c2_audio_inventory = empty_inventory.copy( update={ - "titles": 3, - "available_titles": 2, - "licensed_titles": 3, - "metered_license_titles": 3, - "metered_licenses_owned": 18, - "metered_licenses_available": 10, + "titles": 1, + "available_titles": 1, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 10, + "metered_licenses_available": 5, } ) - expected_library_inventory += added_library_inventory - expected_summary_inventory += added_summary_inventory + c2_book_inventory = empty_inventory.copy( + update={ + "titles": 1, + "available_titles": 1, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 5, + "metered_licenses_available": 5, + } + ) + + c3_book_inventory = empty_inventory.copy() + + # All collections are included in summaries, since our admin is a sysadmin. + expected_library_inventory = ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + ) + expected_summary_inventory = ( + c1_previous_book_inventory + + c1_added_book_inventory + + c3_book_inventory + + c2_audio_inventory + + c2_book_inventory + ) response = session.get_statistics() library_stats_data = response.libraries_by_key.get(default_library.short_name) @@ -408,59 +436,128 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu library_collections_by_id = { id_: all_collections_by_id[id_] for id_ in library_stats_data.collection_ids } - assert expected_library_inventory == library_stats_data.inventory_summary + assert 3 == len(response.collections) + assert expected_summary_inventory == response.inventory_summary + assert 2 == len(response.inventory_by_medium) + assert "Audio" in response.inventory_by_medium + assert "Book" in response.inventory_by_medium + assert c2_audio_inventory == response.inventory_by_medium.get("Audio") + assert ( + c1_previous_book_inventory + + c1_added_book_inventory + + c2_book_inventory + + c3_book_inventory + == response.inventory_by_medium.get("Book") + ) + assert expected_summary_inventory == response.inventory_by_medium.get( + "Audio" + ) + response.inventory_by_medium.get("Book") + + assert expected_library_inventory == library_stats_data.inventory_summary assert 2 == len(library_stats_data.collection_ids) - assert 3 == len(response.collections) + assert 1 == len(library_stats_data.inventory_by_medium) + assert "Book" in library_stats_data.inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + == library_stats_data.inventory_by_medium.get("Book") + ) + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) for collections in [library_collections_by_id, all_collections_by_id]: - default_inventory = collections[default_collection.id].inventory - c3_inventory = collections[c3.id].inventory - assert 1 == default_inventory.licensed_titles - assert 1 == default_inventory.open_access_titles - assert 3 == default_inventory.metered_licenses_owned - assert 0 == default_inventory.metered_licenses_available - - assert 0 == c3_inventory.licensed_titles - assert 0 == c3_inventory.open_access_titles - assert 0 == c3_inventory.metered_licenses_owned - assert 0 == c3_inventory.metered_licenses_available - - # assert None == library_collections_data.get(c2.name) - # c2_data = total_collections_data.get(c2.name) + default_stats = collections[default_collection.id] + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_stats.inventory + ) + + default_inventory_by_medium = default_stats.inventory_by_medium + assert "Audio" not in default_inventory_by_medium + assert "Book" in default_inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_inventory_by_medium["Book"] + ) + + c3_stats = collections[c3.id] + assert c3_book_inventory == c3_stats.inventory + + c3_inventory_by_medium = c3_stats.inventory_by_medium + assert "Book" not in c3_inventory_by_medium + assert "Audio" not in c3_inventory_by_medium + assert library_collections_by_id.get(c2.id) is None - c2_inventory = all_collections_by_id[c2.id].inventory - assert 2 == c2_inventory.licensed_titles - assert 0 == c2_inventory.open_access_titles - assert 15 == c2_inventory.metered_licenses_owned - assert 10 == c2_inventory.metered_licenses_available + + c2_stats = all_collections_by_id[c2.id] + assert c2_audio_inventory + c2_book_inventory == c2_stats.inventory + + c2_inventory_by_medium = c2_stats.inventory_by_medium + assert "Book" in c2_inventory_by_medium + assert "Audio" in c2_inventory_by_medium + assert c2_audio_inventory == c2_inventory_by_medium["Audio"] + assert c2_book_inventory == c2_inventory_by_medium["Book"] admin.remove_role(AdminRole.SYSTEM_ADMIN) admin.add_role(AdminRole.LIBRARY_MANAGER, default_library) # c2 is no longer included in the totals since the admin user's # library is not associated with it. + expected_library_inventory = ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + ) + expected_summary_inventory = expected_library_inventory + response = session.get_statistics() library_stats_data = response.libraries_by_key.get(default_library.short_name) all_collections_by_id = {c.id: c for c in response.collections} library_collections_by_id = { id: all_collections_by_id[id] for id in library_stats_data.collection_ids } + assert 2 == len(response.collections) + + assert expected_summary_inventory == response.inventory_summary + assert 1 == len(response.inventory_by_medium) + assert "Book" in response.inventory_by_medium + assert expected_summary_inventory == response.inventory_by_medium.get("Book") + + assert expected_library_inventory == library_stats_data.inventory_summary + assert 2 == len(library_stats_data.collection_ids) + assert 1 == len(library_stats_data.inventory_by_medium) + assert "Book" in library_stats_data.inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + c3_book_inventory + == library_stats_data.inventory_by_medium.get("Book") + ) + assert expected_library_inventory == library_stats_data.inventory_by_medium.get( + "Book" + ) + for collections in [library_collections_by_id, all_collections_by_id]: assert 2 == len(collections) assert collections.get(c2.id) is None - default_inventory = collections[default_collection.id].inventory - assert 1 == default_inventory.licensed_titles - assert 1 == default_inventory.open_access_titles - assert 3 == default_inventory.metered_licenses_owned - assert 0 == default_inventory.metered_licenses_available - - c3_inventory = collections[c3.id].inventory - assert 0 == c3_inventory.licensed_titles - assert 0 == c3_inventory.open_access_titles - assert 0 == c3_inventory.metered_licenses_owned - assert 0 == c3_inventory.metered_licenses_available + default_stats = collections[default_collection.id] + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_stats.inventory + ) + + default_inventory_by_medium = default_stats.inventory_by_medium + assert "Audio" not in default_inventory_by_medium + assert "Book" in default_inventory_by_medium + assert ( + c1_previous_book_inventory + c1_added_book_inventory + == default_inventory_by_medium["Book"] + ) + + c3_stats = collections[c3.id] + assert c3_book_inventory == c3_stats.inventory + + c3_inventory_by_medium = c3_stats.inventory_by_medium + assert "Book" not in c3_inventory_by_medium + assert "Audio" not in c3_inventory_by_medium def test_stats_parent_collection_permissions( From ba3e35c02ab1485e541b403cf8f501c2b7e675ba Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 8 Dec 2023 18:55:44 -0400 Subject: [PATCH 216/262] Refactor mock webserver into a fixture (#1564) --- pyproject.toml | 1 + tests/api/test_overdrive.py | 2 +- tests/core/conftest.py | 1 + tests/core/test_http.py | 71 +++++---- tests/core/util/test_mock_web_server.py | 198 +----------------------- tests/customlists/conftest.py | 3 + tests/customlists/test_export.py | 16 +- tests/customlists/test_import.py | 16 +- tests/fixtures/webserver.py | 186 ++++++++++++++++++++++ 9 files changed, 239 insertions(+), 255 deletions(-) create mode 100644 tests/customlists/conftest.py create mode 100644 tests/fixtures/webserver.py diff --git a/pyproject.toml b/pyproject.toml index a8b7c3d405..0c02843daa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -115,6 +115,7 @@ module = [ "core.util.worker_pools", "core.util.xmlparser", "tests.fixtures.authenticator", + "tests.fixtures.webserver", "tests.migration.*", ] no_implicit_reexport = true diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index a9b18ab69e..8d2211abf8 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -59,9 +59,9 @@ from core.util.http import BadResponseException from tests.api.mockapi.overdrive import MockOverdriveAPI from tests.core.mock import DummyHTTPClient, MockRequestsResponse -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse from tests.fixtures.database import DatabaseTransactionFixture from tests.fixtures.library import LibraryFixture +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse if TYPE_CHECKING: from tests.fixtures.api_overdrive_files import OverdriveAPIFilesFixture diff --git a/tests/core/conftest.py b/tests/core/conftest.py index d1fc72c50e..1a48ae45fd 100644 --- a/tests/core/conftest.py +++ b/tests/core/conftest.py @@ -14,6 +14,7 @@ "tests.fixtures.services", "tests.fixtures.time", "tests.fixtures.tls_server", + "tests.fixtures.webserver", ] # Make sure if we are using pyinstrument to profile tests, that diff --git a/tests/core/test_http.py b/tests/core/test_http.py index 88b0c785f6..1ccd4f53af 100644 --- a/tests/core/test_http.py +++ b/tests/core/test_http.py @@ -1,57 +1,66 @@ -import logging +import functools +from dataclasses import dataclass +from typing import Callable import pytest +import requests from core.util.http import HTTP, RequestNetworkException from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse +@dataclass +class TestHttpFixture: + server: MockAPIServer + request_with_timeout: Callable[..., requests.Response] + + @pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" +def test_http_fixture(mock_web_server: MockAPIServer): + # Make sure we don't wait for retries, as that will slow down the tests. + request_with_timeout = functools.partial( + HTTP.request_with_timeout, timeout=1, backoff_factor=0 + ) + return TestHttpFixture( + server=mock_web_server, request_with_timeout=request_with_timeout ) - _server.stop() class TestHTTP: - def test_retries_unspecified(self, mock_web_server: MockAPIServer): + def test_retries_unspecified(self, test_http_fixture: TestHttpFixture): for i in range(1, 7): response = MockAPIServerResponse() response.content = b"Ouch." response.status_code = 502 - mock_web_server.enqueue_response("GET", "/test", response) + test_http_fixture.server.enqueue_response("GET", "/test", response) with pytest.raises(RequestNetworkException): - HTTP.request_with_timeout("GET", mock_web_server.url("/test")) + test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test") + ) - assert len(mock_web_server.requests()) == 6 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 6 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - def test_retries_none(self, mock_web_server: MockAPIServer): + def test_retries_none(self, test_http_fixture: TestHttpFixture): response = MockAPIServerResponse() response.content = b"Ouch." response.status_code = 502 - mock_web_server.enqueue_response("GET", "/test", response) + test_http_fixture.server.enqueue_response("GET", "/test", response) with pytest.raises(RequestNetworkException): - HTTP.request_with_timeout( - "GET", mock_web_server.url("/test"), max_retry_count=0 + test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test"), max_retry_count=0 ) - assert len(mock_web_server.requests()) == 1 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 1 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - def test_retries_3(self, mock_web_server: MockAPIServer): + def test_retries_3(self, test_http_fixture: TestHttpFixture): response0 = MockAPIServerResponse() response0.content = b"Ouch." response0.status_code = 502 @@ -64,24 +73,24 @@ def test_retries_3(self, mock_web_server: MockAPIServer): response2.content = b"OK!" response2.status_code = 200 - mock_web_server.enqueue_response("GET", "/test", response0) - mock_web_server.enqueue_response("GET", "/test", response1) - mock_web_server.enqueue_response("GET", "/test", response2) + test_http_fixture.server.enqueue_response("GET", "/test", response0) + test_http_fixture.server.enqueue_response("GET", "/test", response1) + test_http_fixture.server.enqueue_response("GET", "/test", response2) - response = HTTP.request_with_timeout( - "GET", mock_web_server.url("/test"), max_retry_count=3 + response = test_http_fixture.request_with_timeout( + "GET", test_http_fixture.server.url("/test"), max_retry_count=3 ) assert response.status_code == 200 - assert len(mock_web_server.requests()) == 3 - request = mock_web_server.requests().pop() + assert len(test_http_fixture.server.requests()) == 3 + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - request = mock_web_server.requests().pop() + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" - request = mock_web_server.requests().pop() + request = test_http_fixture.server.requests().pop() assert request.path == "/test" assert request.method == "GET" diff --git a/tests/core/util/test_mock_web_server.py b/tests/core/util/test_mock_web_server.py index 4c866a5ddc..ca71728b24 100644 --- a/tests/core/util/test_mock_web_server.py +++ b/tests/core/util/test_mock_web_server.py @@ -1,189 +1,7 @@ -import logging -import threading -from http.server import BaseHTTPRequestHandler, HTTPServer -from typing import Dict, List, Optional, Tuple - import pytest from core.util.http import HTTP, RequestNetworkException - - -class MockAPIServerRequest: - """A request made to a server.""" - - headers: Dict[str, str] - payload: bytes - method: str - path: str - - def __init__(self): - self.headers = {} - self.payload = b"" - self.method = "GET" - self.path = "/" - - -class MockAPIServerResponse: - """A response returned from a server.""" - - status_code: int - content: bytes - headers: Dict[str, str] - close_obnoxiously: bool - - def __init__(self): - self.status_code = 200 - self.content = b"" - self.headers = {} - self.close_obnoxiously = False - - def set_content(self, data: bytes): - """A convenience method that automatically sets the correct content length for data.""" - self.content = data - self.headers["content-length"] = str(len(data)) - - -class MockAPIServerRequestHandler(BaseHTTPRequestHandler): - """Basic request handler.""" - - def _send_everything(self, _response: MockAPIServerResponse): - if _response.close_obnoxiously: - return - - self.send_response(_response.status_code) - for key in _response.headers.keys(): - _value = _response.headers.get(key) - if _value: - self.send_header(key, _value) - - self.end_headers() - self.wfile.write(_response.content) - self.wfile.flush() - - def _read_everything(self) -> MockAPIServerRequest: - _request = MockAPIServerRequest() - _request.method = self.command - for k in self.headers.keys(): - header = self.headers.get(k, None) - if header is not None: - _request.headers[k] = header - _request.path = self.path - _readable = int(self.headers.get("Content-Length") or 0) - if _readable > 0: - _request.payload = self.rfile.read(_readable) - return _request - - def _handle_everything(self): - _request = self._read_everything() - _response = self.server.mock_api_server.dequeue_response(_request) - if _response is None: - logging.error( - f"failed to find a response for {_request.method} {_request.path}" - ) - raise AssertionError( - f"No available response for {_request.method} {_request.path}!" - ) - self._send_everything(_response) - - def do_GET(self): - logging.info("GET") - self._handle_everything() - - def do_POST(self): - logging.info("POST") - self._handle_everything() - - def do_PUT(self): - logging.info("PUT") - self._handle_everything() - - def version_string(self) -> str: - return "" - - def date_time_string(self, timestamp: Optional[int] = 0) -> str: - return "Sat, 1 January 2000 00:00:00 UTC" - - -class MockAPIInternalServer(HTTPServer): - mock_api_server: "MockAPIServer" - - def __init__(self, server_address: Tuple[str, int], bind_and_activate: bool): - super().__init__(server_address, MockAPIServerRequestHandler, bind_and_activate) - self.allow_reuse_address = True - - -class MockAPIServer: - """Embedded web server.""" - - _address: str - _port: int - _server: HTTPServer - _server_thread: threading.Thread - _responses: Dict[str, Dict[str, List[MockAPIServerResponse]]] - _requests: List[MockAPIServerRequest] - - def __init__(self, address: str, port: int): - self._address = address - self._port = port - self._server = MockAPIInternalServer( - (self._address, self._port), bind_and_activate=True - ) - self._server.mock_api_server = self - self._server_thread = threading.Thread(target=self._server.serve_forever) - self._responses = {} - self._requests = [] - - def start(self) -> None: - self._server_thread.start() - - def stop(self) -> None: - self._server.shutdown() - self._server.server_close() - self._server_thread.join(timeout=10) - - def enqueue_response( - self, request_method: str, request_path: str, response: MockAPIServerResponse - ): - _by_method = self._responses.get(request_method) or {} - _by_path = _by_method.get(request_path) or [] - _by_path.append(response) - _by_method[request_path] = _by_path - self._responses[request_method] = _by_method - - def dequeue_response( - self, request: MockAPIServerRequest - ) -> Optional[MockAPIServerResponse]: - self._requests.append(request) - _by_method = self._responses.get(request.method) or {} - _by_path = _by_method.get(request.path) or [] - if len(_by_path) > 0: - return _by_path.pop(0) - return None - - def address(self) -> str: - return self._address - - def port(self) -> int: - return self._port - - def url(self, path: str) -> str: - return f"http://{self.address()}:{self.port()}{path}" - - def requests(self) -> List[MockAPIServerRequest]: - return list(self._requests) - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestMockAPIServer: @@ -224,11 +42,8 @@ def test_server_post(self, mock_web_server: MockAPIServer): def test_server_get_no_response(self, mock_web_server: MockAPIServer): url = mock_web_server.url("/x/y/z") - try: - HTTP.request_with_timeout("GET", url) - except RequestNetworkException: - return - raise AssertionError("Failed to fail!") + with pytest.raises(RequestNetworkException): + HTTP.request_with_timeout("GET", url, timeout=1, backoff_factor=0) def test_server_get_dies(self, mock_web_server: MockAPIServer): _r = MockAPIServerResponse() @@ -236,8 +51,5 @@ def test_server_get_dies(self, mock_web_server: MockAPIServer): mock_web_server.enqueue_response("GET", "/x/y/z", _r) url = mock_web_server.url("/x/y/z") - try: - HTTP.request_with_timeout("GET", url) - except RequestNetworkException: - return - raise AssertionError("Failed to fail!") + with pytest.raises(RequestNetworkException): + HTTP.request_with_timeout("GET", url, timeout=1, backoff_factor=0) diff --git a/tests/customlists/conftest.py b/tests/customlists/conftest.py new file mode 100644 index 0000000000..12f2b7f9e0 --- /dev/null +++ b/tests/customlists/conftest.py @@ -0,0 +1,3 @@ +pytest_plugins = [ + "tests.fixtures.webserver", +] diff --git a/tests/customlists/test_export.py b/tests/customlists/test_export.py index 2495cc0895..f7befb879e 100644 --- a/tests/customlists/test_export.py +++ b/tests/customlists/test_export.py @@ -1,5 +1,4 @@ import json -import logging from pathlib import Path import pytest @@ -9,20 +8,7 @@ CustomListExportFailed, CustomListExports, ) -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestExports: diff --git a/tests/customlists/test_import.py b/tests/customlists/test_import.py index ed05e2fa9b..406070df58 100644 --- a/tests/customlists/test_import.py +++ b/tests/customlists/test_import.py @@ -1,5 +1,4 @@ import json -import logging from pathlib import Path from typing import List @@ -11,20 +10,7 @@ CustomListReport, CustomListsReport, ) -from tests.core.util.test_mock_web_server import MockAPIServer, MockAPIServerResponse - - -@pytest.fixture -def mock_web_server(): - """A test fixture that yields a usable mock web server for the lifetime of the test.""" - _server = MockAPIServer("127.0.0.1", 10256) - _server.start() - logging.info(f"starting mock web server on {_server.address()}:{_server.port()}") - yield _server - logging.info( - f"shutting down mock web server on {_server.address()}:{_server.port()}" - ) - _server.stop() +from tests.fixtures.webserver import MockAPIServer, MockAPIServerResponse class TestImports: diff --git a/tests/fixtures/webserver.py b/tests/fixtures/webserver.py new file mode 100644 index 0000000000..04ac0577a6 --- /dev/null +++ b/tests/fixtures/webserver.py @@ -0,0 +1,186 @@ +import threading +from http.server import BaseHTTPRequestHandler, HTTPServer +from typing import Dict, Generator, List, Optional, Tuple + +import pytest + +from core.util.log import LoggerMixin + + +class MockAPIServerRequest: + """A request made to a server.""" + + headers: Dict[str, str] + payload: bytes + method: str + path: str + + def __init__(self) -> None: + self.headers = {} + self.payload = b"" + self.method = "GET" + self.path = "/" + + +class MockAPIServerResponse: + """A response returned from a server.""" + + status_code: int + content: bytes + headers: Dict[str, str] + close_obnoxiously: bool + + def __init__(self) -> None: + self.status_code = 200 + self.content = b"" + self.headers = {} + self.close_obnoxiously = False + + def set_content(self, data: bytes) -> None: + """A convenience method that automatically sets the correct content length for data.""" + self.content = data + self.headers["content-length"] = str(len(data)) + + +class MockAPIServerRequestHandler(BaseHTTPRequestHandler, LoggerMixin): + """Basic request handler.""" + + def _send_everything(self, _response: MockAPIServerResponse) -> None: + if _response.close_obnoxiously: + return + + self.send_response(_response.status_code) + for key in _response.headers.keys(): + _value = _response.headers.get(key) + if _value: + self.send_header(key, _value) + + self.end_headers() + self.wfile.write(_response.content) + self.wfile.flush() + + def _read_everything(self) -> MockAPIServerRequest: + _request = MockAPIServerRequest() + _request.method = self.command + for k in self.headers.keys(): + header = self.headers.get(k, None) + if header is not None: + _request.headers[k] = header + _request.path = self.path + _readable = int(self.headers.get("Content-Length") or 0) + if _readable > 0: + _request.payload = self.rfile.read(_readable) + return _request + + def _handle_everything(self) -> None: + _request = self._read_everything() + assert isinstance(self.server, MockAPIInternalServer) + _response = self.server.mock_api_server.dequeue_response(_request) + if _response is None: + self.log.error( + f"failed to find a response for {_request.method} {_request.path}" + ) + raise AssertionError( + f"No available response for {_request.method} {_request.path}!" + ) + self._send_everything(_response) + + def do_GET(self) -> None: + self.log.info("GET") + self._handle_everything() + + def do_POST(self) -> None: + self.log.info("POST") + self._handle_everything() + + def do_PUT(self) -> None: + self.log.info("PUT") + self._handle_everything() + + def version_string(self) -> str: + return "" + + def date_time_string(self, timestamp: Optional[int] = 0) -> str: + return "Sat, 1 January 2000 00:00:00 UTC" + + +class MockAPIInternalServer(HTTPServer): + mock_api_server: "MockAPIServer" + + def __init__(self, server_address: Tuple[str, int], bind_and_activate: bool): + super().__init__(server_address, MockAPIServerRequestHandler, bind_and_activate) + self.allow_reuse_address = True + + +class MockAPIServer(LoggerMixin): + """Embedded web server.""" + + _address: str + _port: int + _server: HTTPServer + _server_thread: threading.Thread + _responses: Dict[str, Dict[str, List[MockAPIServerResponse]]] + _requests: List[MockAPIServerRequest] + + def __init__(self, address: str, port: int): + self._address = address + self._port = port + self._server = MockAPIInternalServer( + (self._address, self._port), bind_and_activate=True + ) + self._server.mock_api_server = self + self._server_thread = threading.Thread(target=self._server.serve_forever) + self._responses = {} + self._requests = [] + + def start(self) -> None: + self.log.info(f"starting mock web server on {self.address()}:{self.port()}") + self._server_thread.start() + + def stop(self) -> None: + self.log.info( + f"shutting down mock web server on {self.address()}:{self.port()}" + ) + self._server.shutdown() + self._server.server_close() + self._server_thread.join(timeout=10) + + def enqueue_response( + self, request_method: str, request_path: str, response: MockAPIServerResponse + ) -> None: + _by_method = self._responses.get(request_method) or {} + _by_path = _by_method.get(request_path) or [] + _by_path.append(response) + _by_method[request_path] = _by_path + self._responses[request_method] = _by_method + + def dequeue_response( + self, request: MockAPIServerRequest + ) -> Optional[MockAPIServerResponse]: + self._requests.append(request) + _by_method = self._responses.get(request.method) or {} + _by_path = _by_method.get(request.path) or [] + if len(_by_path) > 0: + return _by_path.pop(0) + return None + + def address(self) -> str: + return self._address + + def port(self) -> int: + return self._port + + def url(self, path: str) -> str: + return f"http://{self.address()}:{self.port()}{path}" + + def requests(self) -> List[MockAPIServerRequest]: + return list(self._requests) + + +@pytest.fixture +def mock_web_server() -> Generator[MockAPIServer, None, None]: + """A test fixture that yields a usable mock web server for the lifetime of the test.""" + _server = MockAPIServer("127.0.0.1", 10256) + _server.start() + yield _server + _server.stop() From 139fbf75f30994f77c78174daf097ff6479f93ad Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Mon, 11 Dec 2023 14:22:52 -0500 Subject: [PATCH 217/262] Handle `null` medium in dashboard stats. (#1568) --- api/admin/dashboard_stats.py | 5 ++- tests/api/admin/test_dashboard_stats.py | 48 ++++++++++++++++++++----- 2 files changed, 43 insertions(+), 10 deletions(-) diff --git a/api/admin/dashboard_stats.py b/api/admin/dashboard_stats.py index 9b8d4b6e6f..765052225e 100644 --- a/api/admin/dashboard_stats.py +++ b/api/admin/dashboard_stats.py @@ -196,7 +196,10 @@ def _create_collection_inventory( """Return a CollectionInventory for the given collection.""" statistics = self._run_collection_stats_queries(collection) - inventory_by_medium = statistics.inventories_by_medium() + # Ensure that the key is a string, even if the medium is null. + inventory_by_medium = { + str(m): inv for m, inv in statistics.inventories_by_medium().items() + } summary_inventory = sum( inventory_by_medium.values(), InventoryStatistics.zeroed() ) diff --git a/tests/api/admin/test_dashboard_stats.py b/tests/api/admin/test_dashboard_stats.py index bc184e296d..edbef2fe7c 100644 --- a/tests/api/admin/test_dashboard_stats.py +++ b/tests/api/admin/test_dashboard_stats.py @@ -350,6 +350,18 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu pool.licenses_owned = 10 pool.licenses_available = 5 + # We currently have active BiblioBoard editions with no (null) medium, + # so let's add one of those to make sure we handle those. + edition, pool = db.edition( + with_license_pool=True, + with_open_access_download=False, + collection=c2, + ) + edition.medium = None + pool.open_access = False + pool.licenses_owned = 2 + pool.licenses_available = 0 + # c3 does not add a title, since no licenses owned. _, pool = db.edition( with_license_pool=True, @@ -415,6 +427,16 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu "metered_licenses_available": 5, } ) + c2_no_medium_inventory = empty_inventory.copy( + update={ + "titles": 1, + "available_titles": 0, + "licensed_titles": 1, + "metered_license_titles": 1, + "metered_licenses_owned": 2, + "metered_licenses_available": 0, + } + ) c3_book_inventory = empty_inventory.copy() @@ -428,6 +450,7 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu + c3_book_inventory + c2_audio_inventory + c2_book_inventory + + c2_no_medium_inventory ) response = session.get_statistics() @@ -439,10 +462,12 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert 3 == len(response.collections) assert expected_summary_inventory == response.inventory_summary - assert 2 == len(response.inventory_by_medium) + assert 3 == len(response.inventory_by_medium) assert "Audio" in response.inventory_by_medium assert "Book" in response.inventory_by_medium + assert "None" in response.inventory_by_medium assert c2_audio_inventory == response.inventory_by_medium.get("Audio") + assert c2_no_medium_inventory == response.inventory_by_medium.get("None") assert ( c1_previous_book_inventory + c1_added_book_inventory @@ -450,9 +475,11 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu + c3_book_inventory == response.inventory_by_medium.get("Book") ) - assert expected_summary_inventory == response.inventory_by_medium.get( - "Audio" - ) + response.inventory_by_medium.get("Book") + assert expected_summary_inventory == ( + response.inventory_by_medium.get("Audio") + + response.inventory_by_medium.get("Book") + + response.inventory_by_medium.get("None") + ) assert expected_library_inventory == library_stats_data.inventory_summary assert 2 == len(library_stats_data.collection_ids) @@ -491,13 +518,18 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu assert library_collections_by_id.get(c2.id) is None c2_stats = all_collections_by_id[c2.id] - assert c2_audio_inventory + c2_book_inventory == c2_stats.inventory + assert ( + c2_audio_inventory + c2_book_inventory + c2_no_medium_inventory + == c2_stats.inventory + ) c2_inventory_by_medium = c2_stats.inventory_by_medium assert "Book" in c2_inventory_by_medium assert "Audio" in c2_inventory_by_medium + assert "None" in c2_inventory_by_medium assert c2_audio_inventory == c2_inventory_by_medium["Audio"] assert c2_book_inventory == c2_inventory_by_medium["Book"] + assert c2_no_medium_inventory == c2_inventory_by_medium["None"] admin.remove_role(AdminRole.SYSTEM_ADMIN) admin.add_role(AdminRole.LIBRARY_MANAGER, default_library) @@ -545,7 +577,7 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu ) default_inventory_by_medium = default_stats.inventory_by_medium - assert "Audio" not in default_inventory_by_medium + assert 1 == len(default_inventory_by_medium) assert "Book" in default_inventory_by_medium assert ( c1_previous_book_inventory + c1_added_book_inventory @@ -555,9 +587,7 @@ def test_stats_collections(admin_statistics_session: AdminStatisticsSessionFixtu c3_stats = collections[c3.id] assert c3_book_inventory == c3_stats.inventory - c3_inventory_by_medium = c3_stats.inventory_by_medium - assert "Book" not in c3_inventory_by_medium - assert "Audio" not in c3_inventory_by_medium + assert 0 == len(c3_stats.inventory_by_medium) def test_stats_parent_collection_permissions( From c72013aacff09a4ba46dda62f101356add6517ef Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 15:27:30 +0000 Subject: [PATCH 218/262] Bump typing-extensions from 4.8.0 to 4.9.0 (#1566) --- poetry.lock | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/poetry.lock b/poetry.lock index 754df43618..d7b95b521e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -598,8 +598,8 @@ files = [ jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" urllib3 = [ - {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, + {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, ] [package.extras] @@ -1322,12 +1322,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, + {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1429,8 +1429,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, + {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -4292,13 +4292,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.8.0" +version = "4.9.0" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, - {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, + {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, + {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, ] [[package]] From 05fe073a1b695f7b505dba37163d463b55d8627f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Dec 2023 16:59:52 +0000 Subject: [PATCH 219/262] Bump feedparser from 6.0.10 to 6.0.11 (#1567) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index d7b95b521e..fd3bf24d4a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1132,13 +1132,13 @@ tests = ["coverage", "coveralls", "dill", "mock", "nose"] [[package]] name = "feedparser" -version = "6.0.10" +version = "6.0.11" description = "Universal feed parser, handles RSS 0.9x, RSS 1.0, RSS 2.0, CDF, Atom 0.3, and Atom 1.0 feeds" optional = false python-versions = ">=3.6" files = [ - {file = "feedparser-6.0.10-py3-none-any.whl", hash = "sha256:79c257d526d13b944e965f6095700587f27388e50ea16fd245babe4dfae7024f"}, - {file = "feedparser-6.0.10.tar.gz", hash = "sha256:27da485f4637ce7163cdeab13a80312b93b7d0c1b775bef4a47629a3110bca51"}, + {file = "feedparser-6.0.11-py3-none-any.whl", hash = "sha256:0be7ee7b395572b19ebeb1d6aafb0028dee11169f1c934e0ed67d54992f4ad45"}, + {file = "feedparser-6.0.11.tar.gz", hash = "sha256:c9d0407b64c6f2a065d0ebb292c2b35c01050cc0dc33757461aaabdc4c4184d5"}, ] [package.dependencies] @@ -4552,4 +4552,4 @@ testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools" [metadata] lock-version = "2.0" python-versions = ">=3.8,<4" -content-hash = "6dd69fa4cb024f65fb063666ad8e175abf906ddd217b88b2b987b4b47530168a" +content-hash = "4e3e2002e9f0848b4a7b1be5f051233ae5aa5dc04b488aadaa8726f8bfd2f1e5" diff --git a/pyproject.toml b/pyproject.toml index 0c02843daa..6bbdf443e0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -194,7 +194,7 @@ click = "^8.1.3" contextlib2 = "21.6.0" dependency-injector = "^4.41" expiringdict = "1.2.2" -feedparser = "6.0.10" +feedparser = "6.0.11" firebase-admin = "^6.0.1" Flask = "^3.0" Flask-Babel = "^4.0" From d85d76bdb4bedf22c7aa65334c4c3b6375889a6f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 16:22:07 +0000 Subject: [PATCH 220/262] Bump github/codeql-action from 2 to 3 (#1569) --- .github/workflows/codeql-analysis.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 7ff3741fa9..d44ce760a1 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -35,12 +35,12 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: python config-file: ./.github/codeql/config.yml - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 with: category: ".github/workflows/codeql-analysis.yml:analyze/language:python" From e451cb2b4e7a0326bdb527c88661562b07f26bf4 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 14 Dec 2023 12:29:00 -0400 Subject: [PATCH 221/262] Break api/controller.py into multiple files (#1565) * Break circulation controllers out into their own files. --- api/admin/controller/__init__.py | 2 +- api/admin/controller/custom_lists.py | 2 +- api/admin/controller/dashboard.py | 2 +- ...discovery_service_library_registrations.py | 2 +- api/admin/controller/feed.py | 2 +- api/admin/controller/integration_settings.py | 2 +- api/admin/controller/lanes.py | 2 +- api/admin/controller/library_settings.py | 2 +- api/admin/controller/patron.py | 2 +- api/admin/controller/quicksight.py | 2 +- api/admin/controller/settings.py | 2 +- api/admin/controller/timestamps.py | 2 +- api/admin/controller/work_editor.py | 2 +- api/app.py | 2 +- api/circulation_manager.py | 433 +++ api/controller.py | 2352 ----------------- api/controller/analytics.py | 34 + api/controller/annotation.py | 94 + .../base.py} | 0 api/controller/circulation_manager.py | 281 ++ api/controller/device_tokens.py | 62 + api/controller/index.py | 84 + api/controller/loan.py | 579 ++++ .../marc.py} | 0 api/controller/odl_notification.py | 37 + api/controller/opds_feed.py | 456 ++++ api/controller/patron_auth_token.py | 39 + api/controller/playtime_entries.py | 53 + api/controller/profile.py | 30 + api/controller/static_file.py | 27 + api/controller/urn_lookup.py | 22 + api/controller/work.py | 293 ++ api/discovery/registration_script.py | 2 +- api/util/flask.py | 2 +- core/service/container.py | 2 +- pyproject.toml | 2 +- tests/api/admin/test_routes.py | 2 +- tests/api/controller/__init__.py | 0 .../test_analytics.py} | 0 .../test_annotation.py} | 0 .../test_base.py} | 6 +- .../test_crawlfeed.py} | 0 .../test_fixture.py} | 0 .../test_index.py} | 0 .../test_loan.py} | 2 +- .../test_marc.py} | 2 +- .../test_multilib.py} | 0 .../test_odl_notify.py} | 0 .../test_opds_feed.py} | 2 +- .../test_patron_access_token.py} | 2 +- .../test_playtime_entries.py} | 0 .../test_profile.py} | 0 .../test_scopedsession.py} | 0 .../test_staticfile.py} | 0 .../test_urn_lookup.py} | 0 .../test_work.py} | 0 tests/api/mockapi/circulation.py | 2 +- tests/api/test_controller_cm.py | 2 +- tests/api/test_device_tokens.py | 2 +- tests/fixtures/api_admin.py | 2 +- tests/fixtures/api_controller.py | 3 +- tests/fixtures/api_routes.py | 2 +- 62 files changed, 2557 insertions(+), 2384 deletions(-) create mode 100644 api/circulation_manager.py delete mode 100644 api/controller.py create mode 100644 api/controller/analytics.py create mode 100644 api/controller/annotation.py rename api/{base_controller.py => controller/base.py} (100%) create mode 100644 api/controller/circulation_manager.py create mode 100644 api/controller/device_tokens.py create mode 100644 api/controller/index.py create mode 100644 api/controller/loan.py rename api/{controller_marc.py => controller/marc.py} (100%) create mode 100644 api/controller/odl_notification.py create mode 100644 api/controller/opds_feed.py create mode 100644 api/controller/patron_auth_token.py create mode 100644 api/controller/playtime_entries.py create mode 100644 api/controller/profile.py create mode 100644 api/controller/static_file.py create mode 100644 api/controller/urn_lookup.py create mode 100644 api/controller/work.py create mode 100644 tests/api/controller/__init__.py rename tests/api/{test_controller_analytics.py => controller/test_analytics.py} (100%) rename tests/api/{test_controller_annotation.py => controller/test_annotation.py} (100%) rename tests/api/{test_controller_base.py => controller/test_base.py} (99%) rename tests/api/{test_controller_crawlfeed.py => controller/test_crawlfeed.py} (100%) rename tests/api/{test_controller_fixture.py => controller/test_fixture.py} (100%) rename tests/api/{test_controller_index.py => controller/test_index.py} (100%) rename tests/api/{test_controller_loan.py => controller/test_loan.py} (99%) rename tests/api/{test_controller_marc.py => controller/test_marc.py} (99%) rename tests/api/{test_controller_multilib.py => controller/test_multilib.py} (100%) rename tests/api/{test_controller_odl_notify.py => controller/test_odl_notify.py} (100%) rename tests/api/{test_controller_opdsfeed.py => controller/test_opds_feed.py} (99%) rename tests/api/{test_controller_patron_access_token.py => controller/test_patron_access_token.py} (96%) rename tests/api/{test_controller_playtime_entries.py => controller/test_playtime_entries.py} (100%) rename tests/api/{test_controller_profile.py => controller/test_profile.py} (100%) rename tests/api/{test_controller_scopedsession.py => controller/test_scopedsession.py} (100%) rename tests/api/{test_controller_staticfile.py => controller/test_staticfile.py} (100%) rename tests/api/{test_controller_urn_lookup.py => controller/test_urn_lookup.py} (100%) rename tests/api/{test_controller_work.py => controller/test_work.py} (100%) diff --git a/api/admin/controller/__init__.py b/api/admin/controller/__init__.py index 5aaf9ee57f..ff613a1d51 100644 --- a/api/admin/controller/__init__.py +++ b/api/admin/controller/__init__.py @@ -5,7 +5,7 @@ from api.admin.controller.quicksight import QuickSightController if TYPE_CHECKING: - from api.controller import CirculationManager + from api.circulation_manager import CirculationManager def setup_admin_controllers(manager: CirculationManager): diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index 21a339a2ba..df6afc25c9 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -21,7 +21,7 @@ MISSING_COLLECTION, MISSING_CUSTOM_LIST, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.problem_details import CANNOT_DELETE_SHARED_LIST from core.app_server import load_pagination_from_request from core.feed.acquisition import OPDSAcquisitionFeed diff --git a/api/admin/controller/dashboard.py b/api/admin/controller/dashboard.py index fd53cb6e66..b41d46fba0 100644 --- a/api/admin/controller/dashboard.py +++ b/api/admin/controller/dashboard.py @@ -8,7 +8,7 @@ from sqlalchemy.orm import Session from api.admin.model.dashboard_statistics import StatisticsResponse -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.local_analytics_exporter import LocalAnalyticsExporter from core.feed.annotator.admin import AdminAnnotator from core.model import ( diff --git a/api/admin/controller/discovery_service_library_registrations.py b/api/admin/controller/discovery_service_library_registrations.py index 61ab9047a0..964bc5e9e9 100644 --- a/api/admin/controller/discovery_service_library_registrations.py +++ b/api/admin/controller/discovery_service_library_registrations.py @@ -11,7 +11,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.problem_details import MISSING_SERVICE, NO_SUCH_LIBRARY -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from core.integration.goals import Goals diff --git a/api/admin/controller/feed.py b/api/admin/controller/feed.py index ea91c10c38..441a28ac1f 100644 --- a/api/admin/controller/feed.py +++ b/api/admin/controller/feed.py @@ -4,7 +4,7 @@ from flask import url_for from api.admin.controller.base import AdminPermissionsControllerMixin -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.app_server import load_pagination_from_request from core.classifier import genres from core.feed.admin import AdminFeed diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index 8cb6e68a4f..1ddcee3b9d 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -17,7 +17,7 @@ NO_SUCH_LIBRARY, UNKNOWN_PROTOCOL, ) -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from core.integration.base import ( HasChildIntegrationConfiguration, HasIntegrationConfiguration, diff --git a/api/admin/controller/lanes.py b/api/admin/controller/lanes.py index 5aa02093ad..ac80b125be 100644 --- a/api/admin/controller/lanes.py +++ b/api/admin/controller/lanes.py @@ -16,7 +16,7 @@ NO_CUSTOM_LISTS_FOR_LANE, NO_DISPLAY_NAME_FOR_LANE, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.lanes import create_default_lanes from core.lane import Lane from core.model import CustomList, Library, create, get_one diff --git a/api/admin/controller/library_settings.py b/api/admin/controller/library_settings.py index 706576d680..ba04e759b9 100644 --- a/api/admin/controller/library_settings.py +++ b/api/admin/controller/library_settings.py @@ -17,8 +17,8 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.form_data import ProcessFormData from api.admin.problem_details import * +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.lanes import create_default_lanes from core.configuration.library import LibrarySettings from core.model import ( diff --git a/api/admin/controller/patron.py b/api/admin/controller/patron.py index 31204c98c3..3e328b68b6 100644 --- a/api/admin/controller/patron.py +++ b/api/admin/controller/patron.py @@ -9,7 +9,7 @@ from api.adobe_vendor_id import AuthdataUtility from api.authentication.base import CannotCreateLocalPatron, PatronData from api.authenticator import LibraryAuthenticator -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.util.problem_detail import ProblemDetail diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py index e7b6b99339..7b5d0512b1 100644 --- a/api/admin/controller/quicksight.py +++ b/api/admin/controller/quicksight.py @@ -10,7 +10,7 @@ QuicksightGenerateUrlRequest, QuicksightGenerateUrlResponse, ) -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.problem_details import NOT_FOUND_ON_REMOTE from core.config import Configuration from core.model.admin import Admin diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index d4314b2d85..d07d9c858e 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -23,7 +23,7 @@ UNKNOWN_PROTOCOL, ) from api.admin.validator import Validator -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.external_search import ExternalSearchIndex from core.integration.base import ( HasChildIntegrationConfiguration, diff --git a/api/admin/controller/timestamps.py b/api/admin/controller/timestamps.py index ba848e5a26..43f0c8fe33 100644 --- a/api/admin/controller/timestamps.py +++ b/api/admin/controller/timestamps.py @@ -1,7 +1,7 @@ from __future__ import annotations from api.admin.controller.base import AdminPermissionsControllerMixin -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.model import Timestamp diff --git a/api/admin/controller/work_editor.py b/api/admin/controller/work_editor.py index b79fdba327..7363cdc7e1 100644 --- a/api/admin/controller/work_editor.py +++ b/api/admin/controller/work_editor.py @@ -6,7 +6,7 @@ from api.admin.controller.base import AdminPermissionsControllerMixin from api.admin.problem_details import * -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.classifier import NO_NUMBER, NO_VALUE, SimplifiedGenreClassifier, genres from core.feed.acquisition import OPDSAcquisitionFeed from core.feed.annotator.admin import AdminAnnotator diff --git a/api/app.py b/api/app.py index 320604f429..9e3daf8a60 100644 --- a/api/app.py +++ b/api/app.py @@ -8,8 +8,8 @@ from flask_pydantic_spec import FlaskPydanticSpec from api.admin.controller import setup_admin_controllers +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.util.flask import PalaceFlask from api.util.profilers import ( PalaceCProfileProfiler, diff --git a/api/circulation_manager.py b/api/circulation_manager.py new file mode 100644 index 0000000000..641093bf50 --- /dev/null +++ b/api/circulation_manager.py @@ -0,0 +1,433 @@ +from __future__ import annotations + +import logging +import urllib.parse +from typing import TYPE_CHECKING + +import flask +from dependency_injector.wiring import Provide, inject +from expiringdict import ExpiringDict +from flask_babel import lazy_gettext as _ +from sqlalchemy import select + +from api.authenticator import Authenticator +from api.circulation import CirculationAPI +from api.config import Configuration +from api.controller.analytics import AnalyticsController +from api.controller.annotation import AnnotationController +from api.controller.device_tokens import DeviceTokensController +from api.controller.index import IndexController +from api.controller.loan import LoanController +from api.controller.marc import MARCRecordController +from api.controller.odl_notification import ODLNotificationController +from api.controller.opds_feed import OPDSFeedController +from api.controller.patron_auth_token import PatronAuthTokenController +from api.controller.playtime_entries import PlaytimeEntriesController +from api.controller.profile import ProfileController +from api.controller.static_file import StaticFileController +from api.controller.urn_lookup import URNLookupController +from api.controller.work import WorkController +from api.custom_index import CustomIndexView +from api.lanes import load_lanes +from api.problem_details import * +from api.saml.controller import SAMLController +from core.app_server import ApplicationVersionController, load_facets_from_request +from core.external_search import ExternalSearchIndex +from core.feed.annotator.circulation import ( + CirculationManagerAnnotator, + LibraryAnnotator, +) +from core.lane import Lane, WorkList +from core.model import ConfigurationSetting, Library +from core.model.discovery_service_registration import DiscoveryServiceRegistration +from core.service.container import Services +from core.util.log import elapsed_time_logging, log_elapsed_time + +if TYPE_CHECKING: + from api.admin.controller.admin_search import AdminSearchController + from api.admin.controller.announcement_service import AnnouncementSettings + from api.admin.controller.catalog_services import CatalogServicesController + from api.admin.controller.collection_self_tests import CollectionSelfTestsController + from api.admin.controller.collection_settings import CollectionSettingsController + from api.admin.controller.custom_lists import CustomListsController + from api.admin.controller.dashboard import DashboardController + from api.admin.controller.discovery_service_library_registrations import ( + DiscoveryServiceLibraryRegistrationsController, + ) + from api.admin.controller.discovery_services import DiscoveryServicesController + from api.admin.controller.feed import FeedController + from api.admin.controller.individual_admin_settings import ( + IndividualAdminSettingsController, + ) + from api.admin.controller.lanes import LanesController + from api.admin.controller.library_settings import LibrarySettingsController + from api.admin.controller.metadata_service_self_tests import ( + MetadataServiceSelfTestsController, + ) + from api.admin.controller.metadata_services import MetadataServicesController + from api.admin.controller.patron import PatronController + from api.admin.controller.patron_auth_service_self_tests import ( + PatronAuthServiceSelfTestsController, + ) + from api.admin.controller.patron_auth_services import PatronAuthServicesController + from api.admin.controller.quicksight import QuickSightController + from api.admin.controller.reset_password import ResetPasswordController + from api.admin.controller.search_service_self_tests import ( + SearchServiceSelfTestsController, + ) + from api.admin.controller.self_tests import SelfTestsController + from api.admin.controller.settings import SettingsController + from api.admin.controller.sign_in import SignInController + from api.admin.controller.sitewide_services import ( + SearchServicesController, + SitewideServicesController, + ) + from api.admin.controller.sitewide_settings import ( + SitewideConfigurationSettingsController, + ) + from api.admin.controller.timestamps import TimestampsController + from api.admin.controller.view import ViewController + from api.admin.controller.work_editor import WorkController as AdminWorkController + + +class CirculationManager: + log = logging.getLogger("api.circulation_manager.CirculationManager") + + # API Controllers + index_controller: IndexController + opds_feeds: OPDSFeedController + marc_records: MARCRecordController + loans: LoanController + annotations: AnnotationController + urn_lookup: URNLookupController + work_controller: WorkController + analytics_controller: AnalyticsController + profiles: ProfileController + patron_devices: DeviceTokensController + version: ApplicationVersionController + odl_notification_controller: ODLNotificationController + static_files: StaticFileController + playtime_entries: PlaytimeEntriesController + + # Admin controllers + admin_sign_in_controller: SignInController + admin_reset_password_controller: ResetPasswordController + timestamps_controller: TimestampsController + admin_work_controller: AdminWorkController + admin_feed_controller: FeedController + admin_custom_lists_controller: CustomListsController + admin_lanes_controller: LanesController + admin_dashboard_controller: DashboardController + admin_settings_controller: SettingsController + admin_patron_controller: PatronController + admin_self_tests_controller: SelfTestsController + admin_discovery_services_controller: DiscoveryServicesController + admin_discovery_service_library_registrations_controller: DiscoveryServiceLibraryRegistrationsController + admin_metadata_services_controller: MetadataServicesController + admin_metadata_service_self_tests_controller: MetadataServiceSelfTestsController + admin_patron_auth_services_controller: PatronAuthServicesController + admin_patron_auth_service_self_tests_controller: PatronAuthServiceSelfTestsController + admin_collection_settings_controller: CollectionSettingsController + admin_collection_self_tests_controller: CollectionSelfTestsController + admin_sitewide_configuration_settings_controller: SitewideConfigurationSettingsController + admin_library_settings_controller: LibrarySettingsController + admin_individual_admin_settings_controller: IndividualAdminSettingsController + admin_sitewide_services_controller: SitewideServicesController + admin_search_service_self_tests_controller: SearchServiceSelfTestsController + admin_search_services_controller: SearchServicesController + admin_catalog_services_controller: CatalogServicesController + admin_announcement_service: AnnouncementSettings + admin_search_controller: AdminSearchController + admin_view_controller: ViewController + admin_quicksight_controller: QuickSightController + + @inject + def __init__( + self, + _db, + services: Services = Provide[Services], + ): + self._db = _db + self.services = services + self.analytics = services.analytics.analytics() + self.site_configuration_last_update = ( + Configuration.site_configuration_last_update(self._db, timeout=0) + ) + self.setup_one_time_controllers() + self.load_settings() + + def load_facets_from_request(self, *args, **kwargs): + """Load a faceting object from the incoming request, but also apply some + application-specific access restrictions: + + * You can't use nonstandard caching rules unless you're an authenticated administrator. + * You can't access a WorkList that's not accessible to you. + """ + + facets = load_facets_from_request(*args, **kwargs) + + worklist = kwargs.get("worklist") + if worklist is not None: + # Try to get the index controller. If it's not initialized + # for any reason, don't run this check -- we have bigger + # problems. + index_controller = getattr(self, "index_controller", None) + if index_controller and not worklist.accessible_to( + index_controller.request_patron + ): + return NO_SUCH_LANE.detailed(_("Lane does not exist")) + + return facets + + def reload_settings_if_changed(self): + """If the site configuration has been updated, reload the + CirculationManager's configuration from the database. + """ + last_update = Configuration.site_configuration_last_update(self._db) + if last_update > self.site_configuration_last_update: + self.load_settings() + self.site_configuration_last_update = last_update + + @log_elapsed_time(log_method=log.info, message_prefix="load_settings") + def load_settings(self): + """Load all necessary configuration settings and external + integrations from the database. + + This is called once when the CirculationManager is + initialized. It may also be called later to reload the site + configuration after changes are made in the administrative + interface. + """ + with elapsed_time_logging( + log_method=self.log.debug, + skip_start=True, + message_prefix="load_settings - load libraries", + ): + libraries = self._db.query(Library).all() + + with elapsed_time_logging( + log_method=self.log.debug, + skip_start=True, + message_prefix="load_settings - populate caches", + ): + # Populate caches + Library.cache_warm(self._db, lambda: libraries) + ConfigurationSetting.cache_warm(self._db) + + self.auth = Authenticator(self._db, libraries, self.analytics) + + self.setup_external_search() + + # Track the Lane configuration for each library by mapping its + # short name to the top-level lane. + new_top_level_lanes = {} + # Create a CirculationAPI for each library. + new_circulation_apis = {} + # Potentially load a CustomIndexView for each library + new_custom_index_views = {} + + with elapsed_time_logging( + log_method=self.log.debug, + message_prefix="load_settings - per-library lanes, custom indexes, api", + ): + for library in libraries: + new_top_level_lanes[library.id] = load_lanes(self._db, library) + new_custom_index_views[library.id] = CustomIndexView.for_library( + library + ) + new_circulation_apis[library.id] = self.setup_circulation( + library, self.analytics + ) + + self.top_level_lanes = new_top_level_lanes + self.circulation_apis = new_circulation_apis + self.custom_index_views = new_custom_index_views + + # Assemble the list of patron web client domains from individual + # library registration settings as well as a sitewide setting. + patron_web_domains = set() + + def get_domain(url): + url = url.strip() + if url == "*": + return url + ( + scheme, + netloc, + path, + parameters, + query, + fragment, + ) = urllib.parse.urlparse(url) + if scheme and netloc: + return scheme + "://" + netloc + else: + return None + + sitewide_patron_web_client_urls = ConfigurationSetting.sitewide( + self._db, Configuration.PATRON_WEB_HOSTNAMES + ).value + if sitewide_patron_web_client_urls: + for url in sitewide_patron_web_client_urls.split("|"): + domain = get_domain(url) + if domain: + patron_web_domains.add(domain) + + domains = self._db.execute( + select(DiscoveryServiceRegistration.web_client).where( + DiscoveryServiceRegistration.web_client != None + ) + ).all() + for row in domains: + patron_web_domains.add(get_domain(row.web_client)) + + self.patron_web_domains = patron_web_domains + self.setup_configuration_dependent_controllers() + authentication_document_cache_time = int( + ConfigurationSetting.sitewide( + self._db, Configuration.AUTHENTICATION_DOCUMENT_CACHE_TIME + ).value_or_default(3600) + ) + self.authentication_for_opds_documents = ExpiringDict( + max_len=1000, max_age_seconds=authentication_document_cache_time + ) + + @property + def external_search(self): + """Retrieve or create a connection to the search interface. + + This is created lazily so that a failure to connect only + affects feeds that depend on the search engine, not the whole + circulation manager. + """ + if not self._external_search: + self.setup_external_search() + return self._external_search + + def setup_external_search(self): + try: + self._external_search = self.setup_search() + self.external_search_initialization_exception = None + except Exception as e: + self.log.error("Exception initializing search engine: %s", e) + self._external_search = None + self.external_search_initialization_exception = e + return self._external_search + + def log_lanes(self, lanelist=None, level=0): + """Output information about the lane layout.""" + lanelist = lanelist or self.top_level_lane.sublanes + for lane in lanelist: + self.log.debug("%s%r", "-" * level, lane) + if lane.sublanes: + self.log_lanes(lane.sublanes, level + 1) + + def setup_search(self): + """Set up a search client.""" + search = ExternalSearchIndex(self._db) + if not search: + self.log.warn("No external search server configured.") + return None + return search + + def setup_circulation(self, library, analytics): + """Set up the Circulation object.""" + return CirculationAPI(self._db, library, analytics=analytics) + + def setup_one_time_controllers(self): + """Set up all the controllers that will be used by the web app. + + This method will be called only once, no matter how many times the + site configuration changes. + """ + self.index_controller = IndexController(self) + self.opds_feeds = OPDSFeedController(self) + self.marc_records = MARCRecordController(self.services.storage.public()) + self.loans = LoanController(self) + self.annotations = AnnotationController(self) + self.urn_lookup = URNLookupController(self) + self.work_controller = WorkController(self) + self.analytics_controller = AnalyticsController(self) + self.profiles = ProfileController(self) + self.patron_devices = DeviceTokensController(self) + self.version = ApplicationVersionController() + self.odl_notification_controller = ODLNotificationController(self) + self.static_files = StaticFileController(self) + self.patron_auth_token = PatronAuthTokenController(self) + self.playtime_entries = PlaytimeEntriesController(self) + + def setup_configuration_dependent_controllers(self): + """Set up all the controllers that depend on the + current site configuration. + + This method will be called fresh every time the site + configuration changes. + """ + self.saml_controller = SAMLController(self, self.auth) + + def annotator(self, lane, facets=None, *args, **kwargs): + """Create an appropriate OPDS annotator for the given lane. + + :param lane: A Lane or WorkList. + :param facets: A faceting object. + :param annotator_class: Instantiate this annotator class if possible. + Intended for use in unit tests. + """ + library = None + if lane and isinstance(lane, Lane): + library = lane.library + elif lane and isinstance(lane, WorkList): + library = lane.get_library(self._db) + if not library and hasattr(flask.request, "library"): + library = flask.request.library + + # If no library is provided, the best we can do is a generic + # annotator for this application. + if not library: + return CirculationManagerAnnotator(lane) + + # At this point we know the request is in a library context, so we + # can create a LibraryAnnotator customized for that library. + + # Some features are only available if a patron authentication + # mechanism is set up for this library. + authenticator = self.auth.library_authenticators.get(library.short_name) + library_identifies_patrons = ( + authenticator is not None and authenticator.identifies_individuals + ) + annotator_class = kwargs.pop("annotator_class", LibraryAnnotator) + return annotator_class( + self.circulation_apis[library.id], + lane, + library, + top_level_title="All Books", + library_identifies_patrons=library_identifies_patrons, + facets=facets, + *args, + **kwargs, + ) + + @property + def authentication_for_opds_document(self): + """Make sure the current request's library has an Authentication For + OPDS document in the cache, then return the cached version. + + If the cache is disabled, a fresh document is created every time. + + If the query argument `debug` is provided and the + WSGI_DEBUG_KEY site-wide setting is set to True, the + authentication document is annotated with a '_debug' section + describing the current WSGI environment. Since this can reveal + internal details of deployment, it should only be enabled when + diagnosing deployment problems. + """ + name = flask.request.library.short_name + value = self.authentication_for_opds_documents.get(name, None) + if value is None: + # The document was not in the cache, either because it's + # expired or because the cache itself has been disabled. + # Create a new one and stick it in the cache for next + # time. + value = self.auth.create_authentication_document() + self.authentication_for_opds_documents[name] = value + return value diff --git a/api/controller.py b/api/controller.py deleted file mode 100644 index 1bd1df4f9e..0000000000 --- a/api/controller.py +++ /dev/null @@ -1,2352 +0,0 @@ -from __future__ import annotations - -import email -import json -import logging -import os -import urllib.parse -from time import mktime -from typing import TYPE_CHECKING, Any -from wsgiref.handlers import format_date_time - -import flask -import pytz -from attr import define -from dependency_injector.wiring import Provide, inject -from expiringdict import ExpiringDict -from flask import Response, make_response, redirect -from flask_babel import lazy_gettext as _ -from lxml import etree -from pydantic import ValidationError -from sqlalchemy import select -from sqlalchemy.orm import eagerload -from sqlalchemy.orm.exc import NoResultFound - -from api.annotations import AnnotationParser, AnnotationWriter -from api.authentication.access_token import AccessTokenProvider -from api.authenticator import Authenticator, CirculationPatronProfileStorage -from api.base_controller import BaseCirculationManagerController -from api.circulation import CirculationAPI -from api.circulation_exceptions import * -from api.config import CannotLoadConfiguration, Configuration -from api.controller_marc import MARCRecordController -from api.custom_index import CustomIndexView -from api.lanes import ( - ContributorFacets, - ContributorLane, - CrawlableCollectionBasedLane, - CrawlableCustomListBasedLane, - CrawlableFacets, - HasSeriesFacets, - JackpotFacets, - JackpotWorkList, - RecommendationLane, - RelatedBooksLane, - SeriesFacets, - SeriesLane, - load_lanes, -) -from api.model.patron_auth import PatronAuthAccessToken -from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse -from api.odl import ODLAPI -from api.odl2 import ODL2API -from api.problem_details import * -from api.saml.controller import SAMLController -from core.app_server import ApplicationVersionController -from core.app_server import URNLookupController as CoreURNLookupController -from core.app_server import ( - load_facets_from_request, - load_pagination_from_request, - url_for, -) -from core.entrypoint import EverythingEntryPoint -from core.external_search import ExternalSearchIndex, SortKeyPagination -from core.feed.acquisition import OPDSAcquisitionFeed -from core.feed.annotator.circulation import ( - CirculationManagerAnnotator, - LibraryAnnotator, -) -from core.feed.navigation import NavigationFeed -from core.feed.opds import NavigationFacets -from core.lane import Facets, FeaturedFacets, Lane, Pagination, SearchFacets, WorkList -from core.metadata_layer import ContributorData -from core.model import ( - Annotation, - CirculationEvent, - Collection, - ConfigurationSetting, - CustomList, - DataSource, - DeliveryMechanism, - Hold, - Identifier, - IntegrationConfiguration, - IntegrationLibraryConfiguration, - Library, - LicensePool, - LicensePoolDeliveryMechanism, - Loan, - Patron, - Representation, - Session, - get_one, -) -from core.model.devicetokens import ( - DeviceToken, - DuplicateDeviceTokenError, - InvalidTokenTypeError, -) -from core.model.discovery_service_registration import DiscoveryServiceRegistration -from core.opensearch import OpenSearchDocument -from core.query.playtime_entries import PlaytimeEntries -from core.service.container import Services -from core.user_profile import ProfileController as CoreProfileController -from core.util.authentication_for_opds import AuthenticationForOPDSDocument -from core.util.datetime_helpers import utc_now -from core.util.http import RemoteIntegrationException -from core.util.log import elapsed_time_logging, log_elapsed_time -from core.util.opds_writer import OPDSFeed -from core.util.problem_detail import ProblemError - -if TYPE_CHECKING: - from werkzeug import Response as wkResponse - - from api.admin.controller.admin_search import AdminSearchController - from api.admin.controller.announcement_service import AnnouncementSettings - from api.admin.controller.catalog_services import CatalogServicesController - from api.admin.controller.collection_self_tests import CollectionSelfTestsController - from api.admin.controller.collection_settings import CollectionSettingsController - from api.admin.controller.custom_lists import CustomListsController - from api.admin.controller.dashboard import DashboardController - from api.admin.controller.discovery_service_library_registrations import ( - DiscoveryServiceLibraryRegistrationsController, - ) - from api.admin.controller.discovery_services import DiscoveryServicesController - from api.admin.controller.feed import FeedController - from api.admin.controller.individual_admin_settings import ( - IndividualAdminSettingsController, - ) - from api.admin.controller.lanes import LanesController - from api.admin.controller.library_settings import LibrarySettingsController - from api.admin.controller.metadata_service_self_tests import ( - MetadataServiceSelfTestsController, - ) - from api.admin.controller.metadata_services import MetadataServicesController - from api.admin.controller.patron import PatronController - from api.admin.controller.patron_auth_service_self_tests import ( - PatronAuthServiceSelfTestsController, - ) - from api.admin.controller.patron_auth_services import PatronAuthServicesController - from api.admin.controller.quicksight import QuickSightController - from api.admin.controller.reset_password import ResetPasswordController - from api.admin.controller.search_service_self_tests import ( - SearchServiceSelfTestsController, - ) - from api.admin.controller.self_tests import SelfTestsController - from api.admin.controller.settings import SettingsController - from api.admin.controller.sign_in import SignInController - from api.admin.controller.sitewide_services import ( - SearchServicesController, - SitewideServicesController, - ) - from api.admin.controller.sitewide_settings import ( - SitewideConfigurationSettingsController, - ) - from api.admin.controller.timestamps import TimestampsController - from api.admin.controller.view import ViewController - from api.admin.controller.work_editor import WorkController as AdminWorkController - - -class CirculationManager: - log = logging.getLogger("api.controller.CirculationManager") - - # API Controllers - index_controller: IndexController - opds_feeds: OPDSFeedController - marc_records: MARCRecordController - loans: LoanController - annotations: AnnotationController - urn_lookup: URNLookupController - work_controller: WorkController - analytics_controller: AnalyticsController - profiles: ProfileController - patron_devices: DeviceTokensController - version: ApplicationVersionController - odl_notification_controller: ODLNotificationController - static_files: StaticFileController - playtime_entries: PlaytimeEntriesController - - # Admin controllers - admin_sign_in_controller: SignInController - admin_reset_password_controller: ResetPasswordController - timestamps_controller: TimestampsController - admin_work_controller: AdminWorkController - admin_feed_controller: FeedController - admin_custom_lists_controller: CustomListsController - admin_lanes_controller: LanesController - admin_dashboard_controller: DashboardController - admin_settings_controller: SettingsController - admin_patron_controller: PatronController - admin_self_tests_controller: SelfTestsController - admin_discovery_services_controller: DiscoveryServicesController - admin_discovery_service_library_registrations_controller: DiscoveryServiceLibraryRegistrationsController - admin_metadata_services_controller: MetadataServicesController - admin_metadata_service_self_tests_controller: MetadataServiceSelfTestsController - admin_patron_auth_services_controller: PatronAuthServicesController - admin_patron_auth_service_self_tests_controller: PatronAuthServiceSelfTestsController - admin_collection_settings_controller: CollectionSettingsController - admin_collection_self_tests_controller: CollectionSelfTestsController - admin_sitewide_configuration_settings_controller: SitewideConfigurationSettingsController - admin_library_settings_controller: LibrarySettingsController - admin_individual_admin_settings_controller: IndividualAdminSettingsController - admin_sitewide_services_controller: SitewideServicesController - admin_search_service_self_tests_controller: SearchServiceSelfTestsController - admin_search_services_controller: SearchServicesController - admin_catalog_services_controller: CatalogServicesController - admin_announcement_service: AnnouncementSettings - admin_search_controller: AdminSearchController - admin_view_controller: ViewController - admin_quicksight_controller: QuickSightController - - @inject - def __init__( - self, - _db, - services: Services = Provide[Services], - ): - self._db = _db - self.services = services - self.analytics = services.analytics.analytics() - self.site_configuration_last_update = ( - Configuration.site_configuration_last_update(self._db, timeout=0) - ) - self.setup_one_time_controllers() - self.load_settings() - - def load_facets_from_request(self, *args, **kwargs): - """Load a faceting object from the incoming request, but also apply some - application-specific access restrictions: - - * You can't use nonstandard caching rules unless you're an authenticated administrator. - * You can't access a WorkList that's not accessible to you. - """ - - facets = load_facets_from_request(*args, **kwargs) - - worklist = kwargs.get("worklist") - if worklist is not None: - # Try to get the index controller. If it's not initialized - # for any reason, don't run this check -- we have bigger - # problems. - index_controller = getattr(self, "index_controller", None) - if index_controller and not worklist.accessible_to( - index_controller.request_patron - ): - return NO_SUCH_LANE.detailed(_("Lane does not exist")) - - return facets - - def reload_settings_if_changed(self): - """If the site configuration has been updated, reload the - CirculationManager's configuration from the database. - """ - last_update = Configuration.site_configuration_last_update(self._db) - if last_update > self.site_configuration_last_update: - self.load_settings() - self.site_configuration_last_update = last_update - - @log_elapsed_time(log_method=log.info, message_prefix="load_settings") - def load_settings(self): - """Load all necessary configuration settings and external - integrations from the database. - - This is called once when the CirculationManager is - initialized. It may also be called later to reload the site - configuration after changes are made in the administrative - interface. - """ - with elapsed_time_logging( - log_method=self.log.debug, - skip_start=True, - message_prefix="load_settings - load libraries", - ): - libraries = self._db.query(Library).all() - - with elapsed_time_logging( - log_method=self.log.debug, - skip_start=True, - message_prefix="load_settings - populate caches", - ): - # Populate caches - Library.cache_warm(self._db, lambda: libraries) - ConfigurationSetting.cache_warm(self._db) - - self.auth = Authenticator(self._db, libraries, self.analytics) - - self.setup_external_search() - - # Track the Lane configuration for each library by mapping its - # short name to the top-level lane. - new_top_level_lanes = {} - # Create a CirculationAPI for each library. - new_circulation_apis = {} - # Potentially load a CustomIndexView for each library - new_custom_index_views = {} - - with elapsed_time_logging( - log_method=self.log.debug, - message_prefix="load_settings - per-library lanes, custom indexes, api", - ): - for library in libraries: - new_top_level_lanes[library.id] = load_lanes(self._db, library) - new_custom_index_views[library.id] = CustomIndexView.for_library( - library - ) - new_circulation_apis[library.id] = self.setup_circulation( - library, self.analytics - ) - - self.top_level_lanes = new_top_level_lanes - self.circulation_apis = new_circulation_apis - self.custom_index_views = new_custom_index_views - - # Assemble the list of patron web client domains from individual - # library registration settings as well as a sitewide setting. - patron_web_domains = set() - - def get_domain(url): - url = url.strip() - if url == "*": - return url - scheme, netloc, path, parameters, query, fragment = urllib.parse.urlparse( - url - ) - if scheme and netloc: - return scheme + "://" + netloc - else: - return None - - sitewide_patron_web_client_urls = ConfigurationSetting.sitewide( - self._db, Configuration.PATRON_WEB_HOSTNAMES - ).value - if sitewide_patron_web_client_urls: - for url in sitewide_patron_web_client_urls.split("|"): - domain = get_domain(url) - if domain: - patron_web_domains.add(domain) - - domains = self._db.execute( - select(DiscoveryServiceRegistration.web_client).where( - DiscoveryServiceRegistration.web_client != None - ) - ).all() - for row in domains: - patron_web_domains.add(get_domain(row.web_client)) - - self.patron_web_domains = patron_web_domains - self.setup_configuration_dependent_controllers() - authentication_document_cache_time = int( - ConfigurationSetting.sitewide( - self._db, Configuration.AUTHENTICATION_DOCUMENT_CACHE_TIME - ).value_or_default(3600) - ) - self.authentication_for_opds_documents = ExpiringDict( - max_len=1000, max_age_seconds=authentication_document_cache_time - ) - - @property - def external_search(self): - """Retrieve or create a connection to the search interface. - - This is created lazily so that a failure to connect only - affects feeds that depend on the search engine, not the whole - circulation manager. - """ - if not self._external_search: - self.setup_external_search() - return self._external_search - - def setup_external_search(self): - try: - self._external_search = self.setup_search() - self.external_search_initialization_exception = None - except Exception as e: - self.log.error("Exception initializing search engine: %s", e) - self._external_search = None - self.external_search_initialization_exception = e - return self._external_search - - def log_lanes(self, lanelist=None, level=0): - """Output information about the lane layout.""" - lanelist = lanelist or self.top_level_lane.sublanes - for lane in lanelist: - self.log.debug("%s%r", "-" * level, lane) - if lane.sublanes: - self.log_lanes(lane.sublanes, level + 1) - - def setup_search(self): - """Set up a search client.""" - search = ExternalSearchIndex(self._db) - if not search: - self.log.warn("No external search server configured.") - return None - return search - - def setup_circulation(self, library, analytics): - """Set up the Circulation object.""" - return CirculationAPI(self._db, library, analytics=analytics) - - def setup_one_time_controllers(self): - """Set up all the controllers that will be used by the web app. - - This method will be called only once, no matter how many times the - site configuration changes. - """ - self.index_controller = IndexController(self) - self.opds_feeds = OPDSFeedController(self) - self.marc_records = MARCRecordController(self.services.storage.public()) - self.loans = LoanController(self) - self.annotations = AnnotationController(self) - self.urn_lookup = URNLookupController(self) - self.work_controller = WorkController(self) - self.analytics_controller = AnalyticsController(self) - self.profiles = ProfileController(self) - self.patron_devices = DeviceTokensController(self) - self.version = ApplicationVersionController() - self.odl_notification_controller = ODLNotificationController(self) - self.static_files = StaticFileController(self) - self.patron_auth_token = PatronAuthTokenController(self) - self.playtime_entries = PlaytimeEntriesController(self) - - def setup_configuration_dependent_controllers(self): - """Set up all the controllers that depend on the - current site configuration. - - This method will be called fresh every time the site - configuration changes. - """ - self.saml_controller = SAMLController(self, self.auth) - - def annotator(self, lane, facets=None, *args, **kwargs): - """Create an appropriate OPDS annotator for the given lane. - - :param lane: A Lane or WorkList. - :param facets: A faceting object. - :param annotator_class: Instantiate this annotator class if possible. - Intended for use in unit tests. - """ - library = None - if lane and isinstance(lane, Lane): - library = lane.library - elif lane and isinstance(lane, WorkList): - library = lane.get_library(self._db) - if not library and hasattr(flask.request, "library"): - library = flask.request.library - - # If no library is provided, the best we can do is a generic - # annotator for this application. - if not library: - return CirculationManagerAnnotator(lane) - - # At this point we know the request is in a library context, so we - # can create a LibraryAnnotator customized for that library. - - # Some features are only available if a patron authentication - # mechanism is set up for this library. - authenticator = self.auth.library_authenticators.get(library.short_name) - library_identifies_patrons = ( - authenticator is not None and authenticator.identifies_individuals - ) - annotator_class = kwargs.pop("annotator_class", LibraryAnnotator) - return annotator_class( - self.circulation_apis[library.id], - lane, - library, - top_level_title="All Books", - library_identifies_patrons=library_identifies_patrons, - facets=facets, - *args, - **kwargs, - ) - - @property - def authentication_for_opds_document(self): - """Make sure the current request's library has an Authentication For - OPDS document in the cache, then return the cached version. - - If the cache is disabled, a fresh document is created every time. - - If the query argument `debug` is provided and the - WSGI_DEBUG_KEY site-wide setting is set to True, the - authentication document is annotated with a '_debug' section - describing the current WSGI environment. Since this can reveal - internal details of deployment, it should only be enabled when - diagnosing deployment problems. - """ - name = flask.request.library.short_name - value = self.authentication_for_opds_documents.get(name, None) - if value is None: - # The document was not in the cache, either because it's - # expired or because the cache itself has been disabled. - # Create a new one and stick it in the cache for next - # time. - value = self.auth.create_authentication_document() - self.authentication_for_opds_documents[name] = value - return value - - -class CirculationManagerController(BaseCirculationManagerController): - def get_patron_circ_objects(self, object_class, patron, license_pools): - if not patron: - return [] - pool_ids = [pool.id for pool in license_pools] - - return ( - self._db.query(object_class) - .filter( - object_class.patron_id == patron.id, - object_class.license_pool_id.in_(pool_ids), - ) - .options(eagerload(object_class.license_pool)) - .all() - ) - - def get_patron_loan(self, patron, license_pools): - loans = self.get_patron_circ_objects(Loan, patron, license_pools) - if loans: - loan = loans[0] - return loan, loan.license_pool - return None, None - - def get_patron_hold(self, patron, license_pools): - holds = self.get_patron_circ_objects(Hold, patron, license_pools) - if holds: - hold = holds[0] - return hold, hold.license_pool - return None, None - - @property - def circulation(self): - """Return the appropriate CirculationAPI for the request Library.""" - library_id = flask.request.library.id - return self.manager.circulation_apis[library_id] - - @property - def search_engine(self): - """Return the configured external search engine, or a - ProblemDetail if none is configured. - """ - search_engine = self.manager.external_search - if not search_engine: - return REMOTE_INTEGRATION_FAILED.detailed( - _("The search index for this site is not properly configured.") - ) - return search_engine - - def handle_conditional_request(self, last_modified=None): - """Handle a conditional HTTP request. - - :param last_modified: A datetime representing the time this - resource was last modified. - - :return: a Response, if the incoming request can be handled - conditionally. Otherwise, None. - """ - if not last_modified: - return None - - # If-Modified-Since values have resolution of one second. If - # last_modified has millisecond resolution, change its - # resolution to one second. - if last_modified.microsecond: - last_modified = last_modified.replace(microsecond=0) - - if_modified_since = flask.request.headers.get("If-Modified-Since") - if not if_modified_since: - return None - - try: - parsed_if_modified_since = email.utils.parsedate_to_datetime( - if_modified_since - ) - except TypeError: - # Parse error <= Python 3.9 - return None - except ValueError: - # Parse error >= Python 3.10 - return None - if not parsed_if_modified_since: - return None - - # "[I]f the date is conforming to the RFCs it will represent a - # time in UTC but with no indication of the actual source - # timezone of the message the date comes from." - if parsed_if_modified_since.tzinfo is None: - parsed_if_modified_since = parsed_if_modified_since.replace(tzinfo=pytz.UTC) - - if parsed_if_modified_since >= last_modified: - return Response(status=304) - return None - - def load_lane(self, lane_identifier): - """Turn user input into a Lane object.""" - library_id = flask.request.library.id - - lane = None - if lane_identifier is None: - # Return the top-level lane. - lane = self.manager.top_level_lanes[library_id] - if isinstance(lane, Lane): - lane = self._db.merge(lane) - elif isinstance(lane, WorkList): - lane.children = [self._db.merge(child) for child in lane.children] - else: - try: - lane_identifier = int(lane_identifier) - except ValueError as e: - pass - - if isinstance(lane_identifier, int): - lane = get_one( - self._db, Lane, id=lane_identifier, library_id=library_id - ) - - if lane and not lane.accessible_to(self.request_patron): - # The authenticated patron cannot access the lane they - # requested. Act like the lane does not exist. - lane = None - - if not lane: - return NO_SUCH_LANE.detailed( - _( - "Lane %(lane_identifier)s does not exist or is not associated with library %(library_id)s", - lane_identifier=lane_identifier, - library_id=library_id, - ) - ) - - return lane - - def load_work(self, library, identifier_type, identifier): - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - # We know there is at least one LicensePool, and all LicensePools - # for an Identifier have the same Work. - work = pools[0].work - - if work and not work.age_appropriate_for_patron(self.request_patron): - # This work is not age-appropriate for the authenticated - # patron. Don't show it. - work = NOT_AGE_APPROPRIATE - return work - - def load_licensepools(self, library, identifier_type, identifier): - """Turn user input into one or more LicensePool objects. - - :param library: The LicensePools must be associated with one of this - Library's Collections. - :param identifier_type: A type of identifier, e.g. "ISBN" - :param identifier: An identifier string, used with `identifier_type` - to look up an Identifier. - """ - _db = Session.object_session(library) - pools = ( - _db.scalars( - select(LicensePool) - .join(Collection, LicensePool.collection_id == Collection.id) - .join(Identifier, LicensePool.identifier_id == Identifier.id) - .join( - IntegrationConfiguration, - Collection.integration_configuration_id - == IntegrationConfiguration.id, - ) - .join( - IntegrationLibraryConfiguration, - IntegrationConfiguration.id - == IntegrationLibraryConfiguration.parent_id, - ) - .where( - Identifier.type == identifier_type, - Identifier.identifier == identifier, - IntegrationLibraryConfiguration.library_id == library.id, - ) - ) - .unique() - .all() - ) - if not pools: - return NO_LICENSES.detailed( - _("The item you're asking about (%s/%s) isn't in this collection.") - % (identifier_type, identifier) - ) - return pools - - def load_licensepool(self, license_pool_id): - """Turns user input into a LicensePool""" - license_pool = get_one(self._db, LicensePool, id=license_pool_id) - if not license_pool: - return INVALID_INPUT.detailed( - _("License Pool #%s does not exist.") % license_pool_id - ) - - return license_pool - - def load_licensepooldelivery(self, pool, mechanism_id): - """Turn user input into a LicensePoolDeliveryMechanism object.""" - mechanism = get_one( - self._db, - LicensePoolDeliveryMechanism, - data_source=pool.data_source, - identifier=pool.identifier, - delivery_mechanism_id=mechanism_id, - on_multiple="interchangeable", - ) - return mechanism or BAD_DELIVERY_MECHANISM - - def apply_borrowing_policy(self, patron, license_pool): - """Apply the borrowing policy of the patron's library to the - book they're trying to check out. - - This prevents a patron from borrowing an age-inappropriate book - or from placing a hold in a library that prohibits holds. - - Generally speaking, both of these operations should be - prevented before they get to this point; this is an extra - layer of protection. - - :param patron: A `Patron`. It's okay if this turns out to be a - `ProblemDetail` or `None` due to a problem earlier in the - process. - :param license_pool`: The `LicensePool` the patron is trying to act on. - """ - if patron is None or isinstance(patron, ProblemDetail): - # An earlier stage in the process failed to authenticate - # the patron. - return patron - - work = license_pool.work - if work is not None and not work.age_appropriate_for_patron(patron): - return NOT_AGE_APPROPRIATE - - if ( - not patron.library.settings.allow_holds - and license_pool.licenses_available == 0 - and not license_pool.open_access - and not license_pool.unlimited_access - ): - return FORBIDDEN_BY_POLICY.detailed( - _("Library policy prohibits the placement of holds."), status_code=403 - ) - return None - - -class IndexController(CirculationManagerController): - """Redirect the patron to the appropriate feed.""" - - def __call__(self): - # If this library provides a custom index view, use that. - library = flask.request.library - custom = self.manager.custom_index_views.get(library.id) - if custom is not None: - annotator = self.manager.annotator(None) - return custom(library, annotator) - - # The simple case: the app is equally open to all clients. - library_short_name = flask.request.library.short_name - if not self.has_root_lanes(): - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - _external=True, - ) - ) - - # The more complex case. We must authorize the patron, check - # their type, and redirect them to an appropriate feed. - return self.appropriate_index_for_patron_type() - - def authentication_document(self): - """Serve this library's Authentication For OPDS document.""" - return Response( - self.manager.authentication_for_opds_document, - 200, - {"Content-Type": AuthenticationForOPDSDocument.MEDIA_TYPE}, - ) - - def has_root_lanes(self): - """Does the active library feature root lanes for patrons of - certain types? - - :return: A boolean - """ - return flask.request.library.has_root_lanes - - def authenticated_patron_root_lane(self): - patron = self.authenticated_patron_from_request() - if isinstance(patron, ProblemDetail): - return patron - if isinstance(patron, Response): - return patron - return patron.root_lane - - def appropriate_index_for_patron_type(self): - library_short_name = flask.request.library.short_name - root_lane = self.authenticated_patron_root_lane() - if isinstance(root_lane, ProblemDetail): - return root_lane - if isinstance(root_lane, Response): - return root_lane - if root_lane is None: - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - _external=True, - ) - ) - - return redirect( - url_for( - "acquisition_groups", - library_short_name=library_short_name, - lane_identifier=root_lane.id, - _external=True, - ) - ) - - -class OPDSFeedController(CirculationManagerController): - def groups(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Build or retrieve a grouped acquisition feed. - - :param lane_identifier: An identifier that uniquely identifiers - the WorkList whose feed we want. - :param feed_class: A replacement for AcquisitionFeed, for use in - tests. - """ - library = flask.request.library - - # Special case: a patron with a root lane who attempts to access - # the library's top-level WorkList is redirected to their root - # lane (as though they had accessed the index controller) - # rather than being denied access. - if lane_identifier is None: - patron = self.request_patron - if patron is not None and patron.root_lane: - return redirect( - url_for( - "acquisition_groups", - library_short_name=library.short_name, - lane_identifier=patron.root_lane.id, - _external=True, - ) - ) - - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - - if not lane.children: - # This lane has no children. Although we can technically - # create a grouped feed, it would be an unsatisfying - # gateway to a paginated feed. We should just serve the - # paginated feed. - return self.feed(lane_identifier, feed_class) - - facet_class_kwargs = dict( - minimum_featured_quality=library.settings.minimum_featured_quality, - ) - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=FeaturedFacets, - base_class_constructor_kwargs=facet_class_kwargs, - ) - if isinstance(facets, ProblemDetail): - return facets - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - url = url_for( - "acquisition_groups", - lane_identifier=lane_identifier, - library_short_name=library.short_name, - _external=True, - ) - - annotator = self.manager.annotator(lane, facets) - return feed_class.groups( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - search_engine=search_engine, - ).as_response(mime_types=flask.request.accept_mimetypes) - - def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Build or retrieve a paginated acquisition feed. - - :param lane_identifier: An identifier that uniquely identifiers - the WorkList whose feed we want. - :param feed_class: A replacement for AcquisitionFeed, for use in - tests. - """ - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - facets = self.manager.load_facets_from_request(worklist=lane) - if isinstance(facets, ProblemDetail): - return facets - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - library_short_name = flask.request.library.short_name - url = url_for( - "feed", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - ) - - annotator = self.manager.annotator(lane, facets=facets) - max_age = flask.request.args.get("max_age") - feed = feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - pagination=pagination, - search_engine=search_engine, - ) - return feed.as_response( - max_age=int(max_age) if max_age else lane.max_cache_age(), - mime_types=flask.request.accept_mimetypes, - ) - - def navigation(self, lane_identifier): - """Build or retrieve a navigation feed, for clients that do not support groups.""" - - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - library = flask.request.library - library_short_name = library.short_name - url = url_for( - "navigation_feed", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - ) - - title = lane.display_name - facet_class_kwargs = dict( - minimum_featured_quality=library.settings.minimum_featured_quality, - ) - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=NavigationFacets, - base_class_constructor_kwargs=facet_class_kwargs, - ) - annotator = self.manager.annotator(lane, facets) - return NavigationFeed.navigation( - _db=self._db, - title=title, - url=url, - worklist=lane, - annotator=annotator, - facets=facets, - ).as_response(max_age=lane.max_cache_age()) - - def crawlable_library_feed(self): - """Build or retrieve a crawlable acquisition feed for the - request library. - """ - library = flask.request.library - url = url_for( - "crawlable_library_feed", - library_short_name=library.short_name, - _external=True, - ) - title = library.name - lane = CrawlableCollectionBasedLane() - lane.initialize(library) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def crawlable_collection_feed(self, collection_name): - """Build or retrieve a crawlable acquisition feed for the - requested collection. - """ - collection = Collection.by_name(self._db, collection_name) - if not collection: - return NO_SUCH_COLLECTION - title = collection.name - url = url_for( - "crawlable_collection_feed", collection_name=collection.name, _external=True - ) - lane = CrawlableCollectionBasedLane() - lane.initialize([collection]) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def crawlable_list_feed(self, list_name): - """Build or retrieve a crawlable, paginated acquisition feed for the - named CustomList, sorted by update date. - """ - # TODO: A library is not strictly required here, since some - # CustomLists aren't associated with a library, but this isn't - # a use case we need to support now. - library = flask.request.library - list = CustomList.find(self._db, list_name, library=library) - if not list: - return NO_SUCH_LIST - library_short_name = library.short_name - title = list.name - url = url_for( - "crawlable_list_feed", - list_name=list.name, - library_short_name=library_short_name, - _external=True, - ) - lane = CrawlableCustomListBasedLane() - lane.initialize(library, list) - return self._crawlable_feed(title=title, url=url, worklist=lane) - - def _crawlable_feed( - self, title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed - ): - """Helper method to create a crawlable feed. - - :param title: The title to use for the feed. - :param url: The URL from which the feed will be served. - :param worklist: A crawlable Lane which controls which works show up - in the feed. - :param annotator: A custom Annotator to use when generating the feed. - :param feed_class: A drop-in replacement for OPDSAcquisitionFeed - for use in tests. - """ - pagination = load_pagination_from_request( - SortKeyPagination, default_size=Pagination.DEFAULT_CRAWLABLE_SIZE - ) - if isinstance(pagination, ProblemDetail): - return pagination - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - # A crawlable feed has only one possible set of Facets, - # so library settings are irrelevant. - facets = self.manager.load_facets_from_request( - worklist=worklist, - base_class=CrawlableFacets, - ) - annotator = annotator or self.manager.annotator(worklist, facets=facets) - - return feed_class.page( - _db=self._db, - title=title, - url=url, - worklist=worklist, - annotator=annotator, - facets=facets, - pagination=pagination, - search_engine=search_engine, - ).as_response( - mime_types=flask.request.accept_mimetypes, max_age=worklist.max_cache_age() - ) - - def _load_search_facets(self, lane): - entrypoints = list(flask.request.library.entrypoints) - if len(entrypoints) > 1: - # There is more than one enabled EntryPoint. - # By default, search them all. - default_entrypoint = EverythingEntryPoint - else: - # There is only one enabled EntryPoint, - # and no need for a special default. - default_entrypoint = None - return self.manager.load_facets_from_request( - worklist=lane, - base_class=SearchFacets, - default_entrypoint=default_entrypoint, - ) - - def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): - """Search for books.""" - lane = self.load_lane(lane_identifier) - if isinstance(lane, ProblemDetail): - return lane - - # Although the search query goes against Opensearch, we must - # use normal pagination because the results are sorted by - # match quality, not bibliographic information. - pagination = load_pagination_from_request( - Pagination, default_size=Pagination.DEFAULT_SEARCH_SIZE - ) - if isinstance(pagination, ProblemDetail): - return pagination - - facets = self._load_search_facets(lane) - if isinstance(facets, ProblemDetail): - return facets - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - # Check whether there is a query string -- if not, we want to - # send an OpenSearch document explaining how to search. - query = flask.request.args.get("q") - library_short_name = flask.request.library.short_name - - # Create a function that, when called, generates a URL to the - # search controller. - # - # We'll call this one way if there is no query string in the - # request arguments, and another way if there is a query - # string. - make_url_kwargs = dict(list(facets.items())) - make_url = lambda: url_for( - "lane_search", - lane_identifier=lane_identifier, - library_short_name=library_short_name, - _external=True, - **make_url_kwargs, - ) - if not query: - # Send the search form - open_search_doc = OpenSearchDocument.for_lane(lane, make_url()) - headers = {"Content-Type": "application/opensearchdescription+xml"} - return Response(open_search_doc, 200, headers) - - # We have a query -- add it to the keyword arguments used when - # generating a URL. - make_url_kwargs["q"] = query.encode("utf8") - - # Run a search. - annotator = self.manager.annotator(lane, facets) - info = OpenSearchDocument.search_info(lane) - response = feed_class.search( - _db=self._db, - title=info["name"], - url=make_url(), - lane=lane, - search_engine=search_engine, - query=query, - annotator=annotator, - pagination=pagination, - facets=facets, - ) - if isinstance(response, ProblemDetail): - return response - return response.as_response( - mime_types=flask.request.accept_mimetypes, max_age=lane.max_cache_age() - ) - - def _qa_feed( - self, feed_factory, feed_title, controller_name, facet_class, worklist_factory - ): - """Create some kind of OPDS feed designed for consumption by an - automated QA process. - - :param feed_factory: This function will be called to create the feed. - It must either be AcquisitionFeed.groups or Acquisition.page, - or it must take the same arguments as those methods. - :param feed_title: String title of the feed. - :param controller_name: Controller name to use when generating - the URL to the feed. - :param facet_class: Faceting class to load (through - load_facets_from_request). - :param worklist_factory: Function that takes (Library, Facets) - and returns a Worklist configured to generate the feed. - :return: A ProblemDetail if there's a problem loading the faceting - object; otherwise the return value of `feed_factory`. - """ - library = flask.request.library - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - url = url_for( - controller_name, library_short_name=library.short_name, _external=True - ) - - facets = load_facets_from_request( - base_class=facet_class, default_entrypoint=EverythingEntryPoint - ) - if isinstance(facets, ProblemDetail): - return facets - - worklist = worklist_factory(library, facets) - annotator = self.manager.annotator(worklist) - - # Since this feed will be consumed by an automated client, and - # we're choosing titles for specific purposes, there's no - # reason to put more than a single item in each group. - pagination = Pagination(size=1) - return feed_factory( - _db=self._db, - title=feed_title, - url=url, - pagination=pagination, - worklist=worklist, - annotator=annotator, - search_engine=search_engine, - facets=facets, - max_age=0, - ) - - def qa_feed(self, feed_class=OPDSAcquisitionFeed): - """Create an OPDS feed containing the information necessary to - run a full set of integration tests against this server and - the vendors it relies on. - - :param feed_class: Class to substitute for AcquisitionFeed during - tests. - """ - - def factory(library, facets): - return JackpotWorkList(library, facets) - - return self._qa_feed( - feed_factory=feed_class.groups, - feed_title="QA test feed", - controller_name="qa_feed", - facet_class=JackpotFacets, - worklist_factory=factory, - ) - - def qa_series_feed(self, feed_class=OPDSAcquisitionFeed): - """Create an OPDS feed containing books that belong to _some_ - series, without regard to _which_ series. - - :param feed_class: Class to substitute for AcquisitionFeed during - tests. - """ - - def factory(library, facets): - wl = WorkList() - wl.initialize(library) - return wl - - return self._qa_feed( - feed_factory=feed_class.page, - feed_title="QA series test feed", - controller_name="qa_series_feed", - facet_class=HasSeriesFacets, - worklist_factory=factory, - ) - - -@define -class FeedRequestParameters: - """Frequently used request parameters for feed requests""" - - library: Library | None = None - pagination: Pagination | None = None - facets: Facets | None = None - problem: ProblemDetail | None = None - - -class LoanController(CirculationManagerController): - def sync(self): - """Sync the authenticated patron's loans and holds with all third-party - providers. - - :return: A Response containing an OPDS feed with up-to-date information. - """ - patron = flask.request.patron - - # Save some time if we don't believe the patron's loans or holds have - # changed since the last time the client requested this feed. - response = self.handle_conditional_request(patron.last_loan_activity_sync) - if isinstance(response, Response): - return response - - # TODO: SimplyE used to make a HEAD request to the bookshelf feed - # as a quick way of checking authentication. Does this still happen? - # It shouldn't -- the patron profile feed should be used instead. - # If it's not used, we can take this out. - if flask.request.method == "HEAD": - return Response() - - # First synchronize our local list of loans and holds with all - # third-party loan providers. - if patron.authorization_identifier: - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - try: - self.circulation.sync_bookshelf(patron, credential) - except Exception as e: - # If anything goes wrong, omit the sync step and just - # display the current active loans, as we understand them. - self.manager.log.error( - "ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e - ) - - # Then make the feed. - feed = OPDSAcquisitionFeed.active_loans_for(self.circulation, patron) - response = feed.as_response( - max_age=0, - private=True, - mime_types=flask.request.accept_mimetypes, - ) - - last_modified = patron.last_loan_activity_sync - if last_modified: - response.last_modified = last_modified - return response - - def borrow(self, identifier_type, identifier, mechanism_id=None): - """Create a new loan or hold for a book. - - :return: A Response containing an OPDS entry that includes a link of rel - "http://opds-spec.org/acquisition", which can be used to fetch the - book or the license file. - """ - patron = flask.request.patron - library = flask.request.library - - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - - result = self.best_lendable_pool( - library, patron, identifier_type, identifier, mechanism_id - ) - if not result: - # No LicensePools were found and no ProblemDetail - # was returned. Send a generic ProblemDetail. - return NO_LICENSES.detailed(_("I've never heard of this work.")) - if isinstance(result, ProblemDetail): - # There was a problem determining the appropriate - # LicensePool to use. - return result - - if isinstance(result, Loan): - # We already have a Loan, so there's no need to go to the API. - loan_or_hold = result - is_new = False - else: - # We need to actually go out to the API - # and try to take out a loan. - pool, mechanism = result - loan_or_hold, is_new = self._borrow(patron, credential, pool, mechanism) - - if isinstance(loan_or_hold, ProblemDetail): - return loan_or_hold - - # At this point we have either a loan or a hold. If a loan, serve - # a feed that tells the patron how to fulfill the loan. If a hold, - # serve a feed that talks about the hold. - response_kwargs = {} - if is_new: - response_kwargs["status"] = 201 - else: - response_kwargs["status"] = 200 - return OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, loan_or_hold, **response_kwargs - ) - - def _borrow(self, patron, credential, pool, mechanism): - """Go out to the API, try to take out a loan, and handle errors as - problem detail documents. - - :param patron: The Patron who's trying to take out the loan - :param credential: A Credential to use when authenticating - as this Patron with the external API. - :param pool: The LicensePool for the book the Patron wants. - :mechanism: The DeliveryMechanism to request when asking for - a loan. - :return: a 2-tuple (result, is_new) `result` is a Loan (if one - could be created or found), a Hold (if a Loan could not be - created but a Hold could be), or a ProblemDetail (if the - entire operation failed). - """ - result = None - is_new = False - try: - loan, hold, is_new = self.circulation.borrow( - patron, credential, pool, mechanism - ) - result = loan or hold - except NoOpenAccessDownload as e: - result = NO_LICENSES.detailed( - _("Couldn't find an open-access download link for this book."), - status_code=404, - ) - except PatronAuthorizationFailedException as e: - result = INVALID_CREDENTIALS - except (PatronLoanLimitReached, PatronHoldLimitReached) as e: - result = e.as_problem_detail_document().with_debug(str(e)) - except DeliveryMechanismError as e: - result = BAD_DELIVERY_MECHANISM.with_debug( - str(e), status_code=e.status_code - ) - except OutstandingFines as e: - result = OUTSTANDING_FINES.detailed( - _( - "You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", - fine_amount=patron.fines, - ) - ) - except AuthorizationExpired as e: - result = e.as_problem_detail_document(debug=False) - except AuthorizationBlocked as e: - result = e.as_problem_detail_document(debug=False) - except CannotLoan as e: - result = CHECKOUT_FAILED.with_debug(str(e)) - except CannotHold as e: - result = HOLD_FAILED.with_debug(str(e)) - except CannotRenew as e: - result = RENEW_FAILED.with_debug(str(e)) - except NotFoundOnRemote as e: - result = NOT_FOUND_ON_REMOTE - except CirculationException as e: - # Generic circulation error. - result = CHECKOUT_FAILED.with_debug(str(e)) - - if result is None: - # This shouldn't happen, but if it does, it means no exception - # was raised but we just didn't get a loan or hold. Return a - # generic circulation error. - result = HOLD_FAILED - return result, is_new - - def best_lendable_pool( - self, library, patron, identifier_type, identifier, mechanism_id - ): - """ - Of the available LicensePools for the given Identifier, return the - one that's the best candidate for loaning out right now. - - :return: A Loan if this patron already has an active loan, otherwise a LicensePool. - """ - # Turn source + identifier into a set of LicensePools - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - # Something went wrong. - return pools - - best = None - mechanism = None - problem_doc = None - - existing_loans = ( - self._db.query(Loan) - .filter( - Loan.license_pool_id.in_([lp.id for lp in pools]), Loan.patron == patron - ) - .all() - ) - if existing_loans: - # The patron already has at least one loan on this book already. - # To make the "borrow" operation idempotent, return one of - # those loans instead of an error. - return existing_loans[0] - - # We found a number of LicensePools. Try to locate one that - # we can actually loan to the patron. - for pool in pools: - problem_doc = self.apply_borrowing_policy(patron, pool) - if problem_doc: - # As a matter of policy, the patron is not allowed to borrow - # this book. - continue - - # Beyond this point we know that site policy does not prohibit - # us from lending this pool to this patron. - - if mechanism_id: - # But the patron has requested a license pool that - # supports a specific delivery mechanism. This pool - # must offer that mechanism. - mechanism = self.load_licensepooldelivery(pool, mechanism_id) - if isinstance(mechanism, ProblemDetail): - problem_doc = mechanism - continue - - # Beyond this point we have a license pool that we can - # actually loan or put on hold. - - # But there might be many such LicensePools, and we want - # to pick the one that will get the book to the patron - # with the shortest wait. - if ( - not best - or pool.licenses_available > best.licenses_available - or pool.patrons_in_hold_queue < best.patrons_in_hold_queue - ): - best = pool - - if not best: - # We were unable to find any LicensePool that fit the - # criteria. - return problem_doc - return best, mechanism - - def fulfill( - self, - license_pool_id: int, - mechanism_id: int | None = None, - do_get: Any | None = None, - ) -> wkResponse | ProblemDetail: - """Fulfill a book that has already been checked out, - or which can be fulfilled with no active loan. - - If successful, this will serve the patron a downloadable copy - of the book, a key (such as a DRM license file or bearer - token) which can be used to get the book, or an OPDS entry - containing a link to the book. - - :param license_pool_id: Database ID of a LicensePool. - :param mechanism_id: Database ID of a DeliveryMechanism. - """ - do_get = do_get or Representation.simple_http_get - - # Unlike most controller methods, this one has different - # behavior whether or not the patron is authenticated. This is - # why we're about to do something we don't usually do--call - # authenticated_patron_from_request from within a controller - # method. - authentication_response = self.authenticated_patron_from_request() - if isinstance(authentication_response, Patron): - # The patron is authenticated. - patron = authentication_response - else: - # The patron is not authenticated, either due to bad credentials - # (in which case authentication_response is a Response) - # or due to an integration error with the auth provider (in - # which case it is a ProblemDetail). - # - # There's still a chance this request can succeed, but if not, - # we'll be sending out authentication_response. - patron = None - library = flask.request.library # type: ignore - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - - # Turn source + identifier into a LicensePool. - pool = self.load_licensepool(license_pool_id) - if isinstance(pool, ProblemDetail): - return pool - - loan, loan_license_pool = self.get_patron_loan(patron, [pool]) - - requested_license_pool = loan_license_pool or pool - - # Find the LicensePoolDeliveryMechanism they asked for. - mechanism = None - if mechanism_id: - mechanism = self.load_licensepooldelivery( - requested_license_pool, mechanism_id - ) - if isinstance(mechanism, ProblemDetail): - return mechanism - - if (not loan or not loan_license_pool) and not ( - self.can_fulfill_without_loan( - library, patron, requested_license_pool, mechanism - ) - ): - if patron: - # Since a patron was identified, the problem is they have - # no active loan. - return NO_ACTIVE_LOAN.detailed( - _("You have no active loan for this title.") - ) - else: - # Since no patron was identified, the problem is - # whatever problem was revealed by the earlier - # authenticated_patron_from_request() call -- either the - # patron didn't authenticate or there's a problem - # integrating with the auth provider. - return authentication_response - - if not mechanism: - # See if the loan already has a mechanism set. We can use that. - if loan and loan.fulfillment: - mechanism = loan.fulfillment - else: - return BAD_DELIVERY_MECHANISM.detailed( - _("You must specify a delivery mechanism to fulfill this loan.") - ) - - try: - fulfillment = self.circulation.fulfill( - patron, - credential, - requested_license_pool, - mechanism, - ) - except DeliveryMechanismConflict as e: - return DELIVERY_CONFLICT.detailed(str(e)) - except NoActiveLoan as e: - return NO_ACTIVE_LOAN.detailed( - _("Can't fulfill loan because you have no active loan for this book."), - status_code=e.status_code, - ) - except FormatNotAvailable as e: - return NO_ACCEPTABLE_FORMAT.with_debug(str(e), status_code=e.status_code) - except CannotFulfill as e: - return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) - except DeliveryMechanismError as e: - return BAD_DELIVERY_MECHANISM.with_debug(str(e), status_code=e.status_code) - - # A subclass of FulfillmentInfo may want to bypass the whole - # response creation process. - response = fulfillment.as_response - if response is not None: - return response - - headers = dict() - encoding_header = dict() - if ( - fulfillment.data_source_name == DataSource.ENKI - and mechanism.delivery_mechanism.drm_scheme_media_type - == DeliveryMechanism.NO_DRM - ): - encoding_header["Accept-Encoding"] = "deflate" - - if mechanism.delivery_mechanism.is_streaming: - # If this is a streaming delivery mechanism, create an OPDS entry - # with a fulfillment link to the streaming reader url. - feed = OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, loan, fulfillment=fulfillment - ) - if isinstance(feed, ProblemDetail): - # This should typically never happen, since we've gone through the entire fulfill workflow - # But for the sake of return-type completeness we are adding this here - return feed - if isinstance(feed, Response): - return feed - else: - content = etree.tostring(feed) - status_code = 200 - headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE - elif fulfillment.content_link_redirect is True: - # The fulfillment API has asked us to not be a proxy and instead redirect the client directly - return redirect(fulfillment.content_link) - else: - content = fulfillment.content - if fulfillment.content_link: - # If we have a link to the content on a remote server, web clients may not - # be able to access it if the remote server does not support CORS requests. - - # If the pool is open access though, the web client can link directly to the - # file to download it, so it's safe to redirect. - if requested_license_pool.open_access: - return redirect(fulfillment.content_link) - - # Otherwise, we need to fetch the content and return it instead - # of redirecting to it, since it may be downloaded through an - # indirect acquisition link. - try: - status_code, headers, content = do_get( - fulfillment.content_link, headers=encoding_header - ) - headers = dict(headers) - except RemoteIntegrationException as e: - return e.as_problem_detail_document(debug=False) - else: - status_code = 200 - if fulfillment.content_type: - headers["Content-Type"] = fulfillment.content_type - - return Response(response=content, status=status_code, headers=headers) - - def can_fulfill_without_loan(self, library, patron, pool, lpdm): - """Is it acceptable to fulfill the given LicensePoolDeliveryMechanism - for the given Patron without creating a Loan first? - - This question is usually asked because no Patron has been - authenticated, and thus no Loan can be created, but somebody - wants a book anyway. - - :param library: A Library. - :param patron: A Patron, probably None. - :param lpdm: A LicensePoolDeliveryMechanism. - """ - authenticator = self.manager.auth.library_authenticators.get(library.short_name) - if authenticator and authenticator.identifies_individuals: - # This library identifies individual patrons, so there is - # no reason to fulfill books without a loan. Even if the - # books are free and the 'loans' are nominal, having a - # Loan object makes it possible for a patron to sync their - # collection across devices, so that's the way we do it. - return False - - # If the library doesn't require that individual patrons - # identify themselves, it's up to the CirculationAPI object. - # Most of them will say no. (This would indicate that the - # collection is improperly associated with a library that - # doesn't identify its patrons.) - return self.circulation.can_fulfill_without_loan(patron, pool, lpdm) - - def revoke(self, license_pool_id): - patron = flask.request.patron - pool = self.load_licensepool(license_pool_id) - if isinstance(pool, ProblemDetail): - return pool - - loan, _ignore = self.get_patron_loan(patron, [pool]) - - if loan: - hold = None - else: - hold, _ignore = self.get_patron_hold(patron, [pool]) - - if not loan and not hold: - if not pool.work: - title = "this book" - else: - title = '"%s"' % pool.work.title - return NO_ACTIVE_LOAN_OR_HOLD.detailed( - _( - 'Can\'t revoke because you have no active loan or hold for "%(title)s".', - title=title, - ), - status_code=404, - ) - - header = self.authorization_header() - credential = self.manager.auth.get_credential_from_header(header) - if loan: - try: - self.circulation.revoke_loan(patron, credential, pool) - except RemoteRefusedReturn as e: - title = _( - "Loan deleted locally but remote refused. Loan is likely to show up again on next sync." - ) - return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503) - except CannotReturn as e: - title = _("Loan deleted locally but remote failed.") - return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug( - str(e) - ) - elif hold: - if not self.circulation.can_revoke_hold(pool, hold): - title = _("Cannot release a hold once it enters reserved state.") - return CANNOT_RELEASE_HOLD.detailed(title, 400) - try: - self.circulation.release_hold(patron, credential, pool) - except CannotReleaseHold as e: - title = _("Hold released locally but remote failed.") - return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e)) - - work = pool.work - annotator = self.manager.annotator(None) - return OPDSAcquisitionFeed.entry_as_response( - OPDSAcquisitionFeed.single_entry(work, annotator) - ) - - def detail(self, identifier_type, identifier): - if flask.request.method == "DELETE": - return self.revoke_loan_or_hold(identifier_type, identifier) - - patron = flask.request.patron - library = flask.request.library - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - loan, pool = self.get_patron_loan(patron, pools) - if loan: - hold = None - else: - hold, pool = self.get_patron_hold(patron, pools) - - if not loan and not hold: - return NO_ACTIVE_LOAN_OR_HOLD.detailed( - _( - 'You have no active loan or hold for "%(title)s".', - title=pool.work.title, - ), - status_code=404, - ) - - if flask.request.method == "GET": - if loan: - item = loan - else: - item = hold - return OPDSAcquisitionFeed.single_entry_loans_feed(self.circulation, item) - - -class AnnotationController(CirculationManagerController): - def container(self, identifier=None, accept_post=True): - headers = dict() - if accept_post: - headers["Allow"] = "GET,HEAD,OPTIONS,POST" - headers["Accept-Post"] = AnnotationWriter.CONTENT_TYPE - else: - headers["Allow"] = "GET,HEAD,OPTIONS" - - if flask.request.method == "HEAD": - return Response(status=200, headers=headers) - - patron = flask.request.patron - - if flask.request.method == "GET": - headers["Link"] = [ - '; rel="type"', - '; rel="http://www.w3.org/ns/ldp#constrainedBy"', - ] - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - - container, timestamp = AnnotationWriter.annotation_container_for( - patron, identifier=identifier - ) - etag = 'W/""' - if timestamp: - etag = 'W/"%s"' % timestamp - headers["Last-Modified"] = format_date_time( - mktime(timestamp.timetuple()) - ) - headers["ETag"] = etag - - content = json.dumps(container) - return Response(content, status=200, headers=headers) - - data = flask.request.data - annotation = AnnotationParser.parse(self._db, data, patron) - - if isinstance(annotation, ProblemDetail): - return annotation - - content = json.dumps(AnnotationWriter.detail(annotation)) - status_code = 200 - headers["Link"] = '; rel="type"' - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - return Response(content, status_code, headers) - - def container_for_work(self, identifier_type, identifier): - id_obj, ignore = Identifier.for_foreign_id( - self._db, identifier_type, identifier - ) - return self.container(identifier=id_obj, accept_post=False) - - def detail(self, annotation_id): - headers = dict() - headers["Allow"] = "GET,HEAD,OPTIONS,DELETE" - - if flask.request.method == "HEAD": - return Response(status=200, headers=headers) - - patron = flask.request.patron - - annotation = get_one( - self._db, Annotation, patron=patron, id=annotation_id, active=True - ) - - if not annotation: - return NO_ANNOTATION - - if flask.request.method == "DELETE": - annotation.set_inactive() - return Response() - - content = json.dumps(AnnotationWriter.detail(annotation)) - status_code = 200 - headers["Link"] = '; rel="type"' - headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE - return Response(content, status_code, headers) - - -class WorkController(CirculationManagerController): - def _lane_details(self, languages, audiences): - if languages: - languages = languages.split(",") - if audiences: - audiences = [urllib.parse.unquote_plus(a) for a in audiences.split(",")] - - return languages, audiences - - def contributor( - self, contributor_name, languages, audiences, feed_class=OPDSAcquisitionFeed - ): - """Serve a feed of books written by a particular author""" - library = flask.request.library - if not contributor_name: - return NO_SUCH_LANE.detailed(_("No contributor provided")) - - # contributor_name is probably a display_name, but it could be a - # sort_name. Pass it in for both fields and - # ContributorData.lookup() will do its best to figure it out. - contributor = ContributorData.lookup( - self._db, sort_name=contributor_name, display_name=contributor_name - ) - if not contributor: - return NO_SUCH_LANE.detailed( - _("Unknown contributor: %s") % contributor_name - ) - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - languages, audiences = self._lane_details(languages, audiences) - - lane = ContributorLane( - library, contributor, languages=languages, audiences=audiences - ) - facets = self.manager.load_facets_from_request( - worklist=lane, base_class=ContributorFacets - ) - if isinstance(facets, ProblemDetail): - return facets - - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane, facets) - - url = annotator.feed_url( - lane, - facets=facets, - pagination=pagination, - ) - - return feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - def permalink(self, identifier_type, identifier): - """Serve an entry for a single book. - - This does not include any loan or hold-specific information for - the authenticated patron. - - This is different from the /works lookup protocol, in that it - returns a single entry while the /works lookup protocol returns a - feed containing any number of entries. - """ - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - patron = flask.request.patron - - if patron: - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - - loan, pool = self.get_patron_loan(patron, pools) - hold = None - - if not loan: - hold, pool = self.get_patron_hold(patron, pools) - - item = loan or hold - pool = pool or pools[0] - - return OPDSAcquisitionFeed.single_entry_loans_feed( - self.circulation, item or pool - ) - else: - annotator = self.manager.annotator(lane=None) - - return OPDSAcquisitionFeed.entry_as_response( - OPDSAcquisitionFeed.single_entry(work, annotator), - max_age=OPDSFeed.DEFAULT_MAX_AGE, - ) - - def related( - self, - identifier_type, - identifier, - novelist_api=None, - feed_class=OPDSAcquisitionFeed, - ): - """Serve a groups feed of books related to a given book.""" - - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if work is None: - return NOT_FOUND_ON_REMOTE - - if isinstance(work, ProblemDetail): - return work - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - try: - lane_name = f"Books Related to {work.title} by {work.author}" - lane = RelatedBooksLane(library, work, lane_name, novelist_api=novelist_api) - except ValueError as e: - # No related books were found. - return NO_SUCH_LANE.detailed(str(e)) - - facets = self.manager.load_facets_from_request( - worklist=lane, - base_class=FeaturedFacets, - base_class_constructor_kwargs=dict( - minimum_featured_quality=library.settings.minimum_featured_quality - ), - ) - if isinstance(facets, ProblemDetail): - return facets - - annotator = self.manager.annotator(lane) - url = annotator.feed_url( - lane, - facets=facets, - ) - - return feed_class.groups( - _db=self._db, - title=lane.DISPLAY_NAME, - url=url, - worklist=lane, - annotator=annotator, - pagination=None, - facets=facets, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - def recommendations( - self, - identifier_type, - identifier, - novelist_api=None, - feed_class=OPDSAcquisitionFeed, - ): - """Serve a feed of recommendations related to a given book.""" - - library = flask.request.library - work = self.load_work(library, identifier_type, identifier) - if isinstance(work, ProblemDetail): - return work - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - lane_name = f"Recommendations for {work.title} by {work.author}" - try: - lane = RecommendationLane( - library=library, - work=work, - display_name=lane_name, - novelist_api=novelist_api, - ) - except CannotLoadConfiguration as e: - # NoveList isn't configured. - return NO_SUCH_LANE.detailed(_("Recommendations not available")) - - facets = self.manager.load_facets_from_request(worklist=lane) - if isinstance(facets, ProblemDetail): - return facets - - # We use a normal Pagination object because recommendations - # are looked up in a third-party API and paginated through the - # database lookup. - pagination = load_pagination_from_request(Pagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane) - url = annotator.feed_url( - lane, - facets=facets, - pagination=pagination, - ) - - return feed_class.page( - _db=self._db, - title=lane.DISPLAY_NAME, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response(max_age=lane.max_cache_age()) - - def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): - """Serve a feed of books in a given series.""" - library = flask.request.library - if not series_name: - return NO_SUCH_LANE.detailed(_("No series provided")) - - search_engine = self.search_engine - if isinstance(search_engine, ProblemDetail): - return search_engine - - languages, audiences = self._lane_details(languages, audiences) - lane = SeriesLane( - library, series_name=series_name, languages=languages, audiences=audiences - ) - - facets = self.manager.load_facets_from_request( - worklist=lane, base_class=SeriesFacets - ) - if isinstance(facets, ProblemDetail): - return facets - - pagination = load_pagination_from_request(SortKeyPagination) - if isinstance(pagination, ProblemDetail): - return pagination - - annotator = self.manager.annotator(lane) - - url = annotator.feed_url(lane, facets=facets, pagination=pagination) - return feed_class.page( - _db=self._db, - title=lane.display_name, - url=url, - worklist=lane, - facets=facets, - pagination=pagination, - annotator=annotator, - search_engine=search_engine, - ).as_response( - max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes - ) - - -class ProfileController(CirculationManagerController): - """Implement the User Profile Management Protocol.""" - - def _controller(self, patron): - """Instantiate a CoreProfileController that actually does the work.""" - storage = CirculationPatronProfileStorage(patron, flask.url_for) - return CoreProfileController(storage) - - def protocol(self): - """Handle a UPMP request.""" - patron = flask.request.patron - controller = self._controller(patron) - if flask.request.method == "GET": - result = controller.get() - else: - result = controller.put(flask.request.headers, flask.request.data) - if isinstance(result, ProblemDetail): - return result - return make_response(*result) - - -class DeviceTokensController(CirculationManagerController): - def get_patron_device(self): - patron = flask.request.patron - device_token = flask.request.args["device_token"] - token: DeviceToken = ( - self._db.query(DeviceToken) - .filter( - DeviceToken.patron_id == patron.id, - DeviceToken.device_token == device_token, - ) - .first() - ) - if not token: - return DEVICE_TOKEN_NOT_FOUND - return dict(token_type=token.token_type, device_token=token.device_token), 200 - - def create_patron_device(self): - patron = flask.request.patron - device_token = flask.request.json["device_token"] - token_type = flask.request.json["token_type"] - - try: - device = DeviceToken.create(self._db, token_type, device_token, patron) - except InvalidTokenTypeError: - return DEVICE_TOKEN_TYPE_INVALID - except DuplicateDeviceTokenError: - return dict(exists=True), 200 - - return "", 201 - - def delete_patron_device(self): - patron = flask.request.patron - device_token = flask.request.json["device_token"] - token_type = flask.request.json["token_type"] - - try: - device: DeviceToken = ( - self._db.query(DeviceToken) - .filter( - DeviceToken.patron == patron, - DeviceToken.device_token == device_token, - DeviceToken.token_type == token_type, - ) - .one() - ) - self._db.delete(device) - except NoResultFound: - return DEVICE_TOKEN_NOT_FOUND - - return Response("", 204) - - -class URNLookupController(CoreURNLookupController): - def __init__(self, manager): - self.manager = manager - super().__init__(manager._db) - - def work_lookup(self, route_name): - """Build a CirculationManagerAnnotor based on the current library's - top-level WorkList, and use it to generate an OPDS lookup - feed. - """ - library = flask.request.library - top_level_worklist = self.manager.top_level_lanes[library.id] - annotator = CirculationManagerAnnotator(top_level_worklist) - return super().work_lookup(annotator, route_name) - - -class AnalyticsController(CirculationManagerController): - def track_event(self, identifier_type, identifier, event_type): - # TODO: It usually doesn't matter, but there should be - # a way to distinguish between different LicensePools for the - # same book. - if event_type in CirculationEvent.CLIENT_EVENTS: - library = flask.request.library - # Authentication on the AnalyticsController is optional, - # so flask.request.patron may or may not be set. - patron = getattr(flask.request, "patron", None) - neighborhood = None - if patron: - neighborhood = getattr(patron, "neighborhood", None) - pools = self.load_licensepools(library, identifier_type, identifier) - if isinstance(pools, ProblemDetail): - return pools - self.manager.analytics.collect_event( - library, pools[0], event_type, utc_now(), neighborhood=neighborhood - ) - return Response({}, 200) - else: - return INVALID_ANALYTICS_EVENT_TYPE - - -class PlaytimeEntriesController(CirculationManagerController): - def track_playtimes(self, collection_id, identifier_type, identifier_idn): - library: Library = flask.request.library - identifier = get_one( - self._db, Identifier, type=identifier_type, identifier=identifier_idn - ) - collection = Collection.by_id(self._db, collection_id) - - if not identifier: - return NOT_FOUND_ON_REMOTE.detailed( - f"The identifier {identifier_type}/{identifier_idn} was not found." - ) - if not collection: - return NOT_FOUND_ON_REMOTE.detailed( - f"The collection {collection_id} was not found." - ) - - if collection not in library.collections: - return INVALID_INPUT.detailed("Collection was not found in the Library.") - - if not identifier.licensed_through_collection(collection): - return INVALID_INPUT.detailed( - "This Identifier was not found in the Collection." - ) - - try: - data = PlaytimeEntriesPost(**flask.request.json) - except ValidationError as ex: - return INVALID_INPUT.detailed(ex.json()) - - responses, summary = PlaytimeEntries.insert_playtime_entries( - self._db, identifier, collection, library, data - ) - - response_data = PlaytimeEntriesPostResponse( - summary=summary, responses=responses - ) - response = flask.jsonify(response_data.dict()) - response.status_code = 207 - return response - - -class ODLNotificationController(CirculationManagerController): - """Receive notifications from an ODL distributor when the - status of a loan changes. - """ - - def notify(self, loan_id): - library = flask.request.library - status_doc = flask.request.data - loan = get_one(self._db, Loan, id=loan_id) - - if not loan: - return NO_ACTIVE_LOAN.detailed(_("No loan was found for this identifier.")) - - collection = loan.license_pool.collection - if collection.protocol not in (ODLAPI.label(), ODL2API.label()): - return INVALID_LOAN_FOR_ODL_NOTIFICATION - - api = self.manager.circulation_apis[library.id].api_for_license_pool( - loan.license_pool - ) - api.update_loan(loan, json.loads(status_doc)) - return Response(_("Success"), 200) - - -class StaticFileController(CirculationManagerController): - def static_file(self, directory, filename): - max_age = ConfigurationSetting.sitewide( - self._db, Configuration.STATIC_FILE_CACHE_TIME - ).int_value - return flask.send_from_directory(directory, filename, max_age=max_age) - - def image(self, filename): - directory = os.path.join( - os.path.abspath(os.path.dirname(__file__)), "..", "resources", "images" - ) - return self.static_file(directory, filename) - - -class PatronAuthTokenController(CirculationManagerController): - def get_token(self): - """Create a Patron Auth access token for an authenticated patron""" - patron = flask.request.patron - auth = flask.request.authorization - token_expiry = 3600 - - if not patron or auth.type.lower() != "basic": - return PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE - - try: - token = AccessTokenProvider.generate_token( - self._db, - patron, - auth["password"], - expires_in=token_expiry, - ) - except ProblemError as ex: - logging.getLogger(self.__class__.__name__).error( - f"Could not generate Patron Auth Access Token: {ex}" - ) - return ex.problem_detail - - return PatronAuthAccessToken( - access_token=token, expires_in=token_expiry, token_type="Bearer" - ).api_dict() diff --git a/api/controller/analytics.py b/api/controller/analytics.py new file mode 100644 index 0000000000..17a4bc21e1 --- /dev/null +++ b/api/controller/analytics.py @@ -0,0 +1,34 @@ +from __future__ import annotations + +import flask +from flask import Response + +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import INVALID_ANALYTICS_EVENT_TYPE +from core.model import CirculationEvent +from core.util.datetime_helpers import utc_now +from core.util.problem_detail import ProblemDetail + + +class AnalyticsController(CirculationManagerController): + def track_event(self, identifier_type, identifier, event_type): + # TODO: It usually doesn't matter, but there should be + # a way to distinguish between different LicensePools for the + # same book. + if event_type in CirculationEvent.CLIENT_EVENTS: + library = flask.request.library + # Authentication on the AnalyticsController is optional, + # so flask.request.patron may or may not be set. + patron = getattr(flask.request, "patron", None) + neighborhood = None + if patron: + neighborhood = getattr(patron, "neighborhood", None) + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + self.manager.analytics.collect_event( + library, pools[0], event_type, utc_now(), neighborhood=neighborhood + ) + return Response({}, 200) + else: + return INVALID_ANALYTICS_EVENT_TYPE diff --git a/api/controller/annotation.py b/api/controller/annotation.py new file mode 100644 index 0000000000..64eabbcf97 --- /dev/null +++ b/api/controller/annotation.py @@ -0,0 +1,94 @@ +from __future__ import annotations + +import json +from time import mktime +from wsgiref.handlers import format_date_time + +import flask +from flask import Response + +from api.annotations import AnnotationParser, AnnotationWriter +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import NO_ANNOTATION +from core.model import Annotation, Identifier, get_one +from core.util.problem_detail import ProblemDetail + + +class AnnotationController(CirculationManagerController): + def container(self, identifier=None, accept_post=True): + headers = dict() + if accept_post: + headers["Allow"] = "GET,HEAD,OPTIONS,POST" + headers["Accept-Post"] = AnnotationWriter.CONTENT_TYPE + else: + headers["Allow"] = "GET,HEAD,OPTIONS" + + if flask.request.method == "HEAD": + return Response(status=200, headers=headers) + + patron = flask.request.patron + + if flask.request.method == "GET": + headers["Link"] = [ + '; rel="type"', + '; rel="http://www.w3.org/ns/ldp#constrainedBy"', + ] + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + + container, timestamp = AnnotationWriter.annotation_container_for( + patron, identifier=identifier + ) + etag = 'W/""' + if timestamp: + etag = 'W/"%s"' % timestamp + headers["Last-Modified"] = format_date_time( + mktime(timestamp.timetuple()) + ) + headers["ETag"] = etag + + content = json.dumps(container) + return Response(content, status=200, headers=headers) + + data = flask.request.data + annotation = AnnotationParser.parse(self._db, data, patron) + + if isinstance(annotation, ProblemDetail): + return annotation + + content = json.dumps(AnnotationWriter.detail(annotation)) + status_code = 200 + headers["Link"] = '; rel="type"' + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + return Response(content, status_code, headers) + + def container_for_work(self, identifier_type, identifier): + id_obj, ignore = Identifier.for_foreign_id( + self._db, identifier_type, identifier + ) + return self.container(identifier=id_obj, accept_post=False) + + def detail(self, annotation_id): + headers = dict() + headers["Allow"] = "GET,HEAD,OPTIONS,DELETE" + + if flask.request.method == "HEAD": + return Response(status=200, headers=headers) + + patron = flask.request.patron + + annotation = get_one( + self._db, Annotation, patron=patron, id=annotation_id, active=True + ) + + if not annotation: + return NO_ANNOTATION + + if flask.request.method == "DELETE": + annotation.set_inactive() + return Response() + + content = json.dumps(AnnotationWriter.detail(annotation)) + status_code = 200 + headers["Link"] = '; rel="type"' + headers["Content-Type"] = AnnotationWriter.CONTENT_TYPE + return Response(content, status_code, headers) diff --git a/api/base_controller.py b/api/controller/base.py similarity index 100% rename from api/base_controller.py rename to api/controller/base.py diff --git a/api/controller/circulation_manager.py b/api/controller/circulation_manager.py new file mode 100644 index 0000000000..79aafc1170 --- /dev/null +++ b/api/controller/circulation_manager.py @@ -0,0 +1,281 @@ +from __future__ import annotations + +import email + +import flask +import pytz +from flask import Response +from flask_babel import lazy_gettext as _ +from sqlalchemy import select +from sqlalchemy.orm import Session, eagerload + +from api.controller.base import BaseCirculationManagerController +from api.problem_details import ( + BAD_DELIVERY_MECHANISM, + FORBIDDEN_BY_POLICY, + NO_LICENSES, + NO_SUCH_LANE, + NOT_AGE_APPROPRIATE, + REMOTE_INTEGRATION_FAILED, +) +from core.lane import Lane, WorkList +from core.model import ( + Collection, + Hold, + Identifier, + IntegrationConfiguration, + IntegrationLibraryConfiguration, + LicensePool, + LicensePoolDeliveryMechanism, + Loan, + get_one, +) +from core.problem_details import INVALID_INPUT +from core.util.problem_detail import ProblemDetail + + +class CirculationManagerController(BaseCirculationManagerController): + def get_patron_circ_objects(self, object_class, patron, license_pools): + if not patron: + return [] + pool_ids = [pool.id for pool in license_pools] + + return ( + self._db.query(object_class) + .filter( + object_class.patron_id == patron.id, + object_class.license_pool_id.in_(pool_ids), + ) + .options(eagerload(object_class.license_pool)) + .all() + ) + + def get_patron_loan(self, patron, license_pools): + loans = self.get_patron_circ_objects(Loan, patron, license_pools) + if loans: + loan = loans[0] + return loan, loan.license_pool + return None, None + + def get_patron_hold(self, patron, license_pools): + holds = self.get_patron_circ_objects(Hold, patron, license_pools) + if holds: + hold = holds[0] + return hold, hold.license_pool + return None, None + + @property + def circulation(self): + """Return the appropriate CirculationAPI for the request Library.""" + library_id = flask.request.library.id + return self.manager.circulation_apis[library_id] + + @property + def search_engine(self): + """Return the configured external search engine, or a + ProblemDetail if none is configured. + """ + search_engine = self.manager.external_search + if not search_engine: + return REMOTE_INTEGRATION_FAILED.detailed( + _("The search index for this site is not properly configured.") + ) + return search_engine + + def handle_conditional_request(self, last_modified=None): + """Handle a conditional HTTP request. + + :param last_modified: A datetime representing the time this + resource was last modified. + + :return: a Response, if the incoming request can be handled + conditionally. Otherwise, None. + """ + if not last_modified: + return None + + # If-Modified-Since values have resolution of one second. If + # last_modified has millisecond resolution, change its + # resolution to one second. + if last_modified.microsecond: + last_modified = last_modified.replace(microsecond=0) + + if_modified_since = flask.request.headers.get("If-Modified-Since") + if not if_modified_since: + return None + + try: + parsed_if_modified_since = email.utils.parsedate_to_datetime( + if_modified_since + ) + except TypeError: + # Parse error <= Python 3.9 + return None + except ValueError: + # Parse error >= Python 3.10 + return None + if not parsed_if_modified_since: + return None + + # "[I]f the date is conforming to the RFCs it will represent a + # time in UTC but with no indication of the actual source + # timezone of the message the date comes from." + if parsed_if_modified_since.tzinfo is None: + parsed_if_modified_since = parsed_if_modified_since.replace(tzinfo=pytz.UTC) + + if parsed_if_modified_since >= last_modified: + return Response(status=304) + return None + + def load_lane(self, lane_identifier): + """Turn user input into a Lane object.""" + library_id = flask.request.library.id + + lane = None + if lane_identifier is None: + # Return the top-level lane. + lane = self.manager.top_level_lanes[library_id] + if isinstance(lane, Lane): + lane = self._db.merge(lane) + elif isinstance(lane, WorkList): + lane.children = [self._db.merge(child) for child in lane.children] + else: + try: + lane_identifier = int(lane_identifier) + except ValueError as e: + pass + + if isinstance(lane_identifier, int): + lane = get_one( + self._db, Lane, id=lane_identifier, library_id=library_id + ) + + if lane and not lane.accessible_to(self.request_patron): + # The authenticated patron cannot access the lane they + # requested. Act like the lane does not exist. + lane = None + + if not lane: + return NO_SUCH_LANE.detailed( + _( + "Lane %(lane_identifier)s does not exist or is not associated with library %(library_id)s", + lane_identifier=lane_identifier, + library_id=library_id, + ) + ) + + return lane + + def load_work(self, library, identifier_type, identifier): + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + # We know there is at least one LicensePool, and all LicensePools + # for an Identifier have the same Work. + work = pools[0].work + + if work and not work.age_appropriate_for_patron(self.request_patron): + # This work is not age-appropriate for the authenticated + # patron. Don't show it. + work = NOT_AGE_APPROPRIATE + return work + + def load_licensepools(self, library, identifier_type, identifier): + """Turn user input into one or more LicensePool objects. + + :param library: The LicensePools must be associated with one of this + Library's Collections. + :param identifier_type: A type of identifier, e.g. "ISBN" + :param identifier: An identifier string, used with `identifier_type` + to look up an Identifier. + """ + _db = Session.object_session(library) + pools = ( + _db.scalars( + select(LicensePool) + .join(Collection, LicensePool.collection_id == Collection.id) + .join(Identifier, LicensePool.identifier_id == Identifier.id) + .join( + IntegrationConfiguration, + Collection.integration_configuration_id + == IntegrationConfiguration.id, + ) + .join( + IntegrationLibraryConfiguration, + IntegrationConfiguration.id + == IntegrationLibraryConfiguration.parent_id, + ) + .where( + Identifier.type == identifier_type, + Identifier.identifier == identifier, + IntegrationLibraryConfiguration.library_id == library.id, + ) + ) + .unique() + .all() + ) + if not pools: + return NO_LICENSES.detailed( + _("The item you're asking about (%s/%s) isn't in this collection.") + % (identifier_type, identifier) + ) + return pools + + def load_licensepool(self, license_pool_id): + """Turns user input into a LicensePool""" + license_pool = get_one(self._db, LicensePool, id=license_pool_id) + if not license_pool: + return INVALID_INPUT.detailed( + _("License Pool #%s does not exist.") % license_pool_id + ) + + return license_pool + + def load_licensepooldelivery(self, pool, mechanism_id): + """Turn user input into a LicensePoolDeliveryMechanism object.""" + mechanism = get_one( + self._db, + LicensePoolDeliveryMechanism, + data_source=pool.data_source, + identifier=pool.identifier, + delivery_mechanism_id=mechanism_id, + on_multiple="interchangeable", + ) + return mechanism or BAD_DELIVERY_MECHANISM + + def apply_borrowing_policy(self, patron, license_pool): + """Apply the borrowing policy of the patron's library to the + book they're trying to check out. + + This prevents a patron from borrowing an age-inappropriate book + or from placing a hold in a library that prohibits holds. + + Generally speaking, both of these operations should be + prevented before they get to this point; this is an extra + layer of protection. + + :param patron: A `Patron`. It's okay if this turns out to be a + `ProblemDetail` or `None` due to a problem earlier in the + process. + :param license_pool`: The `LicensePool` the patron is trying to act on. + """ + if patron is None or isinstance(patron, ProblemDetail): + # An earlier stage in the process failed to authenticate + # the patron. + return patron + + work = license_pool.work + if work is not None and not work.age_appropriate_for_patron(patron): + return NOT_AGE_APPROPRIATE + + if ( + not patron.library.settings.allow_holds + and license_pool.licenses_available == 0 + and not license_pool.open_access + and not license_pool.unlimited_access + ): + return FORBIDDEN_BY_POLICY.detailed( + _("Library policy prohibits the placement of holds."), status_code=403 + ) + return None diff --git a/api/controller/device_tokens.py b/api/controller/device_tokens.py new file mode 100644 index 0000000000..399d0f2d65 --- /dev/null +++ b/api/controller/device_tokens.py @@ -0,0 +1,62 @@ +from __future__ import annotations + +import flask +from flask import Response +from sqlalchemy.exc import NoResultFound + +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import DEVICE_TOKEN_NOT_FOUND, DEVICE_TOKEN_TYPE_INVALID +from core.model import DeviceToken +from core.model.devicetokens import DuplicateDeviceTokenError, InvalidTokenTypeError + + +class DeviceTokensController(CirculationManagerController): + def get_patron_device(self): + patron = flask.request.patron + device_token = flask.request.args["device_token"] + token: DeviceToken = ( + self._db.query(DeviceToken) + .filter( + DeviceToken.patron_id == patron.id, + DeviceToken.device_token == device_token, + ) + .first() + ) + if not token: + return DEVICE_TOKEN_NOT_FOUND + return dict(token_type=token.token_type, device_token=token.device_token), 200 + + def create_patron_device(self): + patron = flask.request.patron + device_token = flask.request.json["device_token"] + token_type = flask.request.json["token_type"] + + try: + device = DeviceToken.create(self._db, token_type, device_token, patron) + except InvalidTokenTypeError: + return DEVICE_TOKEN_TYPE_INVALID + except DuplicateDeviceTokenError: + return dict(exists=True), 200 + + return "", 201 + + def delete_patron_device(self): + patron = flask.request.patron + device_token = flask.request.json["device_token"] + token_type = flask.request.json["token_type"] + + try: + device: DeviceToken = ( + self._db.query(DeviceToken) + .filter( + DeviceToken.patron == patron, + DeviceToken.device_token == device_token, + DeviceToken.token_type == token_type, + ) + .one() + ) + self._db.delete(device) + except NoResultFound: + return DEVICE_TOKEN_NOT_FOUND + + return Response("", 204) diff --git a/api/controller/index.py b/api/controller/index.py new file mode 100644 index 0000000000..7f1eb98718 --- /dev/null +++ b/api/controller/index.py @@ -0,0 +1,84 @@ +from __future__ import annotations + +import flask +from flask import Response, redirect, url_for + +from api.controller.circulation_manager import CirculationManagerController +from core.util.authentication_for_opds import AuthenticationForOPDSDocument +from core.util.problem_detail import ProblemDetail + + +class IndexController(CirculationManagerController): + """Redirect the patron to the appropriate feed.""" + + def __call__(self): + # If this library provides a custom index view, use that. + library = flask.request.library + custom = self.manager.custom_index_views.get(library.id) + if custom is not None: + annotator = self.manager.annotator(None) + return custom(library, annotator) + + # The simple case: the app is equally open to all clients. + library_short_name = flask.request.library.short_name + if not self.has_root_lanes(): + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + _external=True, + ) + ) + + # The more complex case. We must authorize the patron, check + # their type, and redirect them to an appropriate feed. + return self.appropriate_index_for_patron_type() + + def authentication_document(self): + """Serve this library's Authentication For OPDS document.""" + return Response( + self.manager.authentication_for_opds_document, + 200, + {"Content-Type": AuthenticationForOPDSDocument.MEDIA_TYPE}, + ) + + def has_root_lanes(self): + """Does the active library feature root lanes for patrons of + certain types? + + :return: A boolean + """ + return flask.request.library.has_root_lanes + + def authenticated_patron_root_lane(self): + patron = self.authenticated_patron_from_request() + if isinstance(patron, ProblemDetail): + return patron + if isinstance(patron, Response): + return patron + return patron.root_lane + + def appropriate_index_for_patron_type(self): + library_short_name = flask.request.library.short_name + root_lane = self.authenticated_patron_root_lane() + if isinstance(root_lane, ProblemDetail): + return root_lane + if isinstance(root_lane, Response): + return root_lane + if root_lane is None: + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + _external=True, + ) + ) + + return redirect( + url_for( + "acquisition_groups", + library_short_name=library_short_name, + lane_identifier=root_lane.id, + _external=True, + ) + ) diff --git a/api/controller/loan.py b/api/controller/loan.py new file mode 100644 index 0000000000..9d8c33f687 --- /dev/null +++ b/api/controller/loan.py @@ -0,0 +1,579 @@ +from __future__ import annotations + +from typing import Any + +import flask +from flask import Response, redirect +from flask_babel import lazy_gettext as _ +from lxml import etree +from werkzeug import Response as wkResponse + +from api.circulation_exceptions import ( + AuthorizationBlocked, + AuthorizationExpired, + CannotFulfill, + CannotHold, + CannotLoan, + CannotReleaseHold, + CannotRenew, + CannotReturn, + CirculationException, + DeliveryMechanismConflict, + DeliveryMechanismError, + FormatNotAvailable, + NoActiveLoan, + NoOpenAccessDownload, + NotFoundOnRemote, + OutstandingFines, + PatronAuthorizationFailedException, + PatronHoldLimitReached, + PatronLoanLimitReached, + RemoteRefusedReturn, +) +from api.controller.circulation_manager import CirculationManagerController +from api.problem_details import ( + BAD_DELIVERY_MECHANISM, + CANNOT_FULFILL, + CANNOT_RELEASE_HOLD, + CHECKOUT_FAILED, + COULD_NOT_MIRROR_TO_REMOTE, + DELIVERY_CONFLICT, + HOLD_FAILED, + INVALID_CREDENTIALS, + NO_ACCEPTABLE_FORMAT, + NO_ACTIVE_LOAN, + NO_ACTIVE_LOAN_OR_HOLD, + NO_LICENSES, + NOT_FOUND_ON_REMOTE, + OUTSTANDING_FINES, + RENEW_FAILED, +) +from core.feed.acquisition import OPDSAcquisitionFeed +from core.model import DataSource, DeliveryMechanism, Loan, Patron, Representation +from core.util.http import RemoteIntegrationException +from core.util.opds_writer import OPDSFeed +from core.util.problem_detail import ProblemDetail + + +class LoanController(CirculationManagerController): + def sync(self): + """Sync the authenticated patron's loans and holds with all third-party + providers. + + :return: A Response containing an OPDS feed with up-to-date information. + """ + patron = flask.request.patron + + # Save some time if we don't believe the patron's loans or holds have + # changed since the last time the client requested this feed. + response = self.handle_conditional_request(patron.last_loan_activity_sync) + if isinstance(response, Response): + return response + + # TODO: SimplyE used to make a HEAD request to the bookshelf feed + # as a quick way of checking authentication. Does this still happen? + # It shouldn't -- the patron profile feed should be used instead. + # If it's not used, we can take this out. + if flask.request.method == "HEAD": + return Response() + + # First synchronize our local list of loans and holds with all + # third-party loan providers. + if patron.authorization_identifier: + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + try: + self.circulation.sync_bookshelf(patron, credential) + except Exception as e: + # If anything goes wrong, omit the sync step and just + # display the current active loans, as we understand them. + self.manager.log.error( + "ERROR DURING SYNC for %s: %r", patron.id, e, exc_info=e + ) + + # Then make the feed. + feed = OPDSAcquisitionFeed.active_loans_for(self.circulation, patron) + response = feed.as_response( + max_age=0, + private=True, + mime_types=flask.request.accept_mimetypes, + ) + + last_modified = patron.last_loan_activity_sync + if last_modified: + response.last_modified = last_modified + return response + + def borrow(self, identifier_type, identifier, mechanism_id=None): + """Create a new loan or hold for a book. + + :return: A Response containing an OPDS entry that includes a link of rel + "http://opds-spec.org/acquisition", which can be used to fetch the + book or the license file. + """ + patron = flask.request.patron + library = flask.request.library + + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + + result = self.best_lendable_pool( + library, patron, identifier_type, identifier, mechanism_id + ) + if not result: + # No LicensePools were found and no ProblemDetail + # was returned. Send a generic ProblemDetail. + return NO_LICENSES.detailed(_("I've never heard of this work.")) + if isinstance(result, ProblemDetail): + # There was a problem determining the appropriate + # LicensePool to use. + return result + + if isinstance(result, Loan): + # We already have a Loan, so there's no need to go to the API. + loan_or_hold = result + is_new = False + else: + # We need to actually go out to the API + # and try to take out a loan. + pool, mechanism = result + loan_or_hold, is_new = self._borrow(patron, credential, pool, mechanism) + + if isinstance(loan_or_hold, ProblemDetail): + return loan_or_hold + + # At this point we have either a loan or a hold. If a loan, serve + # a feed that tells the patron how to fulfill the loan. If a hold, + # serve a feed that talks about the hold. + response_kwargs = {} + if is_new: + response_kwargs["status"] = 201 + else: + response_kwargs["status"] = 200 + return OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, loan_or_hold, **response_kwargs + ) + + def _borrow(self, patron, credential, pool, mechanism): + """Go out to the API, try to take out a loan, and handle errors as + problem detail documents. + + :param patron: The Patron who's trying to take out the loan + :param credential: A Credential to use when authenticating + as this Patron with the external API. + :param pool: The LicensePool for the book the Patron wants. + :mechanism: The DeliveryMechanism to request when asking for + a loan. + :return: a 2-tuple (result, is_new) `result` is a Loan (if one + could be created or found), a Hold (if a Loan could not be + created but a Hold could be), or a ProblemDetail (if the + entire operation failed). + """ + result = None + is_new = False + try: + loan, hold, is_new = self.circulation.borrow( + patron, credential, pool, mechanism + ) + result = loan or hold + except NoOpenAccessDownload as e: + result = NO_LICENSES.detailed( + _("Couldn't find an open-access download link for this book."), + status_code=404, + ) + except PatronAuthorizationFailedException as e: + result = INVALID_CREDENTIALS + except (PatronLoanLimitReached, PatronHoldLimitReached) as e: + result = e.as_problem_detail_document().with_debug(str(e)) + except DeliveryMechanismError as e: + result = BAD_DELIVERY_MECHANISM.with_debug( + str(e), status_code=e.status_code + ) + except OutstandingFines as e: + result = OUTSTANDING_FINES.detailed( + _( + "You must pay your $%(fine_amount).2f outstanding fines before you can borrow more books.", + fine_amount=patron.fines, + ) + ) + except AuthorizationExpired as e: + result = e.as_problem_detail_document(debug=False) + except AuthorizationBlocked as e: + result = e.as_problem_detail_document(debug=False) + except CannotLoan as e: + result = CHECKOUT_FAILED.with_debug(str(e)) + except CannotHold as e: + result = HOLD_FAILED.with_debug(str(e)) + except CannotRenew as e: + result = RENEW_FAILED.with_debug(str(e)) + except NotFoundOnRemote as e: + result = NOT_FOUND_ON_REMOTE + except CirculationException as e: + # Generic circulation error. + result = CHECKOUT_FAILED.with_debug(str(e)) + + if result is None: + # This shouldn't happen, but if it does, it means no exception + # was raised but we just didn't get a loan or hold. Return a + # generic circulation error. + result = HOLD_FAILED + return result, is_new + + def best_lendable_pool( + self, library, patron, identifier_type, identifier, mechanism_id + ): + """ + Of the available LicensePools for the given Identifier, return the + one that's the best candidate for loaning out right now. + + :return: A Loan if this patron already has an active loan, otherwise a LicensePool. + """ + # Turn source + identifier into a set of LicensePools + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + # Something went wrong. + return pools + + best = None + mechanism = None + problem_doc = None + + existing_loans = ( + self._db.query(Loan) + .filter( + Loan.license_pool_id.in_([lp.id for lp in pools]), Loan.patron == patron + ) + .all() + ) + if existing_loans: + # The patron already has at least one loan on this book already. + # To make the "borrow" operation idempotent, return one of + # those loans instead of an error. + return existing_loans[0] + + # We found a number of LicensePools. Try to locate one that + # we can actually loan to the patron. + for pool in pools: + problem_doc = self.apply_borrowing_policy(patron, pool) + if problem_doc: + # As a matter of policy, the patron is not allowed to borrow + # this book. + continue + + # Beyond this point we know that site policy does not prohibit + # us from lending this pool to this patron. + + if mechanism_id: + # But the patron has requested a license pool that + # supports a specific delivery mechanism. This pool + # must offer that mechanism. + mechanism = self.load_licensepooldelivery(pool, mechanism_id) + if isinstance(mechanism, ProblemDetail): + problem_doc = mechanism + continue + + # Beyond this point we have a license pool that we can + # actually loan or put on hold. + + # But there might be many such LicensePools, and we want + # to pick the one that will get the book to the patron + # with the shortest wait. + if ( + not best + or pool.licenses_available > best.licenses_available + or pool.patrons_in_hold_queue < best.patrons_in_hold_queue + ): + best = pool + + if not best: + # We were unable to find any LicensePool that fit the + # criteria. + return problem_doc + return best, mechanism + + def fulfill( + self, + license_pool_id: int, + mechanism_id: int | None = None, + do_get: Any | None = None, + ) -> wkResponse | ProblemDetail: + """Fulfill a book that has already been checked out, + or which can be fulfilled with no active loan. + + If successful, this will serve the patron a downloadable copy + of the book, a key (such as a DRM license file or bearer + token) which can be used to get the book, or an OPDS entry + containing a link to the book. + + :param license_pool_id: Database ID of a LicensePool. + :param mechanism_id: Database ID of a DeliveryMechanism. + """ + do_get = do_get or Representation.simple_http_get + + # Unlike most controller methods, this one has different + # behavior whether or not the patron is authenticated. This is + # why we're about to do something we don't usually do--call + # authenticated_patron_from_request from within a controller + # method. + authentication_response = self.authenticated_patron_from_request() + if isinstance(authentication_response, Patron): + # The patron is authenticated. + patron = authentication_response + else: + # The patron is not authenticated, either due to bad credentials + # (in which case authentication_response is a Response) + # or due to an integration error with the auth provider (in + # which case it is a ProblemDetail). + # + # There's still a chance this request can succeed, but if not, + # we'll be sending out authentication_response. + patron = None + library = flask.request.library # type: ignore + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + + # Turn source + identifier into a LicensePool. + pool = self.load_licensepool(license_pool_id) + if isinstance(pool, ProblemDetail): + return pool + + loan, loan_license_pool = self.get_patron_loan(patron, [pool]) + + requested_license_pool = loan_license_pool or pool + + # Find the LicensePoolDeliveryMechanism they asked for. + mechanism = None + if mechanism_id: + mechanism = self.load_licensepooldelivery( + requested_license_pool, mechanism_id + ) + if isinstance(mechanism, ProblemDetail): + return mechanism + + if (not loan or not loan_license_pool) and not ( + self.can_fulfill_without_loan( + library, patron, requested_license_pool, mechanism + ) + ): + if patron: + # Since a patron was identified, the problem is they have + # no active loan. + return NO_ACTIVE_LOAN.detailed( + _("You have no active loan for this title.") + ) + else: + # Since no patron was identified, the problem is + # whatever problem was revealed by the earlier + # authenticated_patron_from_request() call -- either the + # patron didn't authenticate or there's a problem + # integrating with the auth provider. + return authentication_response + + if not mechanism: + # See if the loan already has a mechanism set. We can use that. + if loan and loan.fulfillment: + mechanism = loan.fulfillment + else: + return BAD_DELIVERY_MECHANISM.detailed( + _("You must specify a delivery mechanism to fulfill this loan.") + ) + + try: + fulfillment = self.circulation.fulfill( + patron, + credential, + requested_license_pool, + mechanism, + ) + except DeliveryMechanismConflict as e: + return DELIVERY_CONFLICT.detailed(str(e)) + except NoActiveLoan as e: + return NO_ACTIVE_LOAN.detailed( + _("Can't fulfill loan because you have no active loan for this book."), + status_code=e.status_code, + ) + except FormatNotAvailable as e: + return NO_ACCEPTABLE_FORMAT.with_debug(str(e), status_code=e.status_code) + except CannotFulfill as e: + return CANNOT_FULFILL.with_debug(str(e), status_code=e.status_code) + except DeliveryMechanismError as e: + return BAD_DELIVERY_MECHANISM.with_debug(str(e), status_code=e.status_code) + + # A subclass of FulfillmentInfo may want to bypass the whole + # response creation process. + response = fulfillment.as_response + if response is not None: + return response + + headers = dict() + encoding_header = dict() + if ( + fulfillment.data_source_name == DataSource.ENKI + and mechanism.delivery_mechanism.drm_scheme_media_type + == DeliveryMechanism.NO_DRM + ): + encoding_header["Accept-Encoding"] = "deflate" + + if mechanism.delivery_mechanism.is_streaming: + # If this is a streaming delivery mechanism, create an OPDS entry + # with a fulfillment link to the streaming reader url. + feed = OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, loan, fulfillment=fulfillment + ) + if isinstance(feed, ProblemDetail): + # This should typically never happen, since we've gone through the entire fulfill workflow + # But for the sake of return-type completeness we are adding this here + return feed + if isinstance(feed, Response): + return feed + else: + content = etree.tostring(feed) + status_code = 200 + headers["Content-Type"] = OPDSFeed.ACQUISITION_FEED_TYPE + elif fulfillment.content_link_redirect is True: + # The fulfillment API has asked us to not be a proxy and instead redirect the client directly + return redirect(fulfillment.content_link) + else: + content = fulfillment.content + if fulfillment.content_link: + # If we have a link to the content on a remote server, web clients may not + # be able to access it if the remote server does not support CORS requests. + + # If the pool is open access though, the web client can link directly to the + # file to download it, so it's safe to redirect. + if requested_license_pool.open_access: + return redirect(fulfillment.content_link) + + # Otherwise, we need to fetch the content and return it instead + # of redirecting to it, since it may be downloaded through an + # indirect acquisition link. + try: + status_code, headers, content = do_get( + fulfillment.content_link, headers=encoding_header + ) + headers = dict(headers) + except RemoteIntegrationException as e: + return e.as_problem_detail_document(debug=False) + else: + status_code = 200 + if fulfillment.content_type: + headers["Content-Type"] = fulfillment.content_type + + return Response(response=content, status=status_code, headers=headers) + + def can_fulfill_without_loan(self, library, patron, pool, lpdm): + """Is it acceptable to fulfill the given LicensePoolDeliveryMechanism + for the given Patron without creating a Loan first? + + This question is usually asked because no Patron has been + authenticated, and thus no Loan can be created, but somebody + wants a book anyway. + + :param library: A Library. + :param patron: A Patron, probably None. + :param lpdm: A LicensePoolDeliveryMechanism. + """ + authenticator = self.manager.auth.library_authenticators.get(library.short_name) + if authenticator and authenticator.identifies_individuals: + # This library identifies individual patrons, so there is + # no reason to fulfill books without a loan. Even if the + # books are free and the 'loans' are nominal, having a + # Loan object makes it possible for a patron to sync their + # collection across devices, so that's the way we do it. + return False + + # If the library doesn't require that individual patrons + # identify themselves, it's up to the CirculationAPI object. + # Most of them will say no. (This would indicate that the + # collection is improperly associated with a library that + # doesn't identify its patrons.) + return self.circulation.can_fulfill_without_loan(patron, pool, lpdm) + + def revoke(self, license_pool_id): + patron = flask.request.patron + pool = self.load_licensepool(license_pool_id) + if isinstance(pool, ProblemDetail): + return pool + + loan, _ignore = self.get_patron_loan(patron, [pool]) + + if loan: + hold = None + else: + hold, _ignore = self.get_patron_hold(patron, [pool]) + + if not loan and not hold: + if not pool.work: + title = "this book" + else: + title = '"%s"' % pool.work.title + return NO_ACTIVE_LOAN_OR_HOLD.detailed( + _( + 'Can\'t revoke because you have no active loan or hold for "%(title)s".', + title=title, + ), + status_code=404, + ) + + header = self.authorization_header() + credential = self.manager.auth.get_credential_from_header(header) + if loan: + try: + self.circulation.revoke_loan(patron, credential, pool) + except RemoteRefusedReturn as e: + title = _( + "Loan deleted locally but remote refused. Loan is likely to show up again on next sync." + ) + return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, status_code=503) + except CannotReturn as e: + title = _("Loan deleted locally but remote failed.") + return COULD_NOT_MIRROR_TO_REMOTE.detailed(title, 503).with_debug( + str(e) + ) + elif hold: + if not self.circulation.can_revoke_hold(pool, hold): + title = _("Cannot release a hold once it enters reserved state.") + return CANNOT_RELEASE_HOLD.detailed(title, 400) + try: + self.circulation.release_hold(patron, credential, pool) + except CannotReleaseHold as e: + title = _("Hold released locally but remote failed.") + return CANNOT_RELEASE_HOLD.detailed(title, 503).with_debug(str(e)) + + work = pool.work + annotator = self.manager.annotator(None) + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator) + ) + + def detail(self, identifier_type, identifier): + if flask.request.method == "DELETE": + return self.revoke_loan_or_hold(identifier_type, identifier) + + patron = flask.request.patron + library = flask.request.library + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + loan, pool = self.get_patron_loan(patron, pools) + if loan: + hold = None + else: + hold, pool = self.get_patron_hold(patron, pools) + + if not loan and not hold: + return NO_ACTIVE_LOAN_OR_HOLD.detailed( + _( + 'You have no active loan or hold for "%(title)s".', + title=pool.work.title, + ), + status_code=404, + ) + + if flask.request.method == "GET": + if loan: + item = loan + else: + item = hold + return OPDSAcquisitionFeed.single_entry_loans_feed(self.circulation, item) diff --git a/api/controller_marc.py b/api/controller/marc.py similarity index 100% rename from api/controller_marc.py rename to api/controller/marc.py diff --git a/api/controller/odl_notification.py b/api/controller/odl_notification.py new file mode 100644 index 0000000000..9babcae469 --- /dev/null +++ b/api/controller/odl_notification.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +import json + +import flask +from flask import Response +from flask_babel import lazy_gettext as _ + +from api.controller.circulation_manager import CirculationManagerController +from api.odl import ODLAPI +from api.odl2 import ODL2API +from api.problem_details import INVALID_LOAN_FOR_ODL_NOTIFICATION, NO_ACTIVE_LOAN +from core.model import Loan, get_one + + +class ODLNotificationController(CirculationManagerController): + """Receive notifications from an ODL distributor when the + status of a loan changes. + """ + + def notify(self, loan_id): + library = flask.request.library + status_doc = flask.request.data + loan = get_one(self._db, Loan, id=loan_id) + + if not loan: + return NO_ACTIVE_LOAN.detailed(_("No loan was found for this identifier.")) + + collection = loan.license_pool.collection + if collection.protocol not in (ODLAPI.label(), ODL2API.label()): + return INVALID_LOAN_FOR_ODL_NOTIFICATION + + api = self.manager.circulation_apis[library.id].api_for_license_pool( + loan.license_pool + ) + api.update_loan(loan, json.loads(status_doc)) + return Response(_("Success"), 200) diff --git a/api/controller/opds_feed.py b/api/controller/opds_feed.py new file mode 100644 index 0000000000..26dd89e446 --- /dev/null +++ b/api/controller/opds_feed.py @@ -0,0 +1,456 @@ +from __future__ import annotations + +import flask +from flask import Response, redirect, url_for + +from api.controller.circulation_manager import CirculationManagerController +from api.lanes import ( + CrawlableCollectionBasedLane, + CrawlableCustomListBasedLane, + CrawlableFacets, + HasSeriesFacets, + JackpotFacets, + JackpotWorkList, +) +from api.problem_details import NO_SUCH_COLLECTION, NO_SUCH_LIST +from core.app_server import load_facets_from_request, load_pagination_from_request +from core.entrypoint import EverythingEntryPoint +from core.external_search import SortKeyPagination +from core.feed.acquisition import OPDSAcquisitionFeed +from core.feed.navigation import NavigationFeed +from core.feed.opds import NavigationFacets +from core.lane import FeaturedFacets, Pagination, SearchFacets, WorkList +from core.model import Collection, CustomList +from core.opensearch import OpenSearchDocument +from core.util.problem_detail import ProblemDetail + + +class OPDSFeedController(CirculationManagerController): + def groups(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Build or retrieve a grouped acquisition feed. + + :param lane_identifier: An identifier that uniquely identifiers + the WorkList whose feed we want. + :param feed_class: A replacement for AcquisitionFeed, for use in + tests. + """ + library = flask.request.library + + # Special case: a patron with a root lane who attempts to access + # the library's top-level WorkList is redirected to their root + # lane (as though they had accessed the index controller) + # rather than being denied access. + if lane_identifier is None: + patron = self.request_patron + if patron is not None and patron.root_lane: + return redirect( + url_for( + "acquisition_groups", + library_short_name=library.short_name, + lane_identifier=patron.root_lane.id, + _external=True, + ) + ) + + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + + if not lane.children: + # This lane has no children. Although we can technically + # create a grouped feed, it would be an unsatisfying + # gateway to a paginated feed. We should just serve the + # paginated feed. + return self.feed(lane_identifier, feed_class) + + facet_class_kwargs = dict( + minimum_featured_quality=library.settings.minimum_featured_quality, + ) + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=FeaturedFacets, + base_class_constructor_kwargs=facet_class_kwargs, + ) + if isinstance(facets, ProblemDetail): + return facets + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + url = url_for( + "acquisition_groups", + lane_identifier=lane_identifier, + library_short_name=library.short_name, + _external=True, + ) + + annotator = self.manager.annotator(lane, facets) + return feed_class.groups( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + search_engine=search_engine, + ).as_response(mime_types=flask.request.accept_mimetypes) + + def feed(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Build or retrieve a paginated acquisition feed. + + :param lane_identifier: An identifier that uniquely identifiers + the WorkList whose feed we want. + :param feed_class: A replacement for AcquisitionFeed, for use in + tests. + """ + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + facets = self.manager.load_facets_from_request(worklist=lane) + if isinstance(facets, ProblemDetail): + return facets + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + library_short_name = flask.request.library.short_name + url = url_for( + "feed", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + ) + + annotator = self.manager.annotator(lane, facets=facets) + max_age = flask.request.args.get("max_age") + feed = feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + pagination=pagination, + search_engine=search_engine, + ) + return feed.as_response( + max_age=int(max_age) if max_age else lane.max_cache_age(), + mime_types=flask.request.accept_mimetypes, + ) + + def navigation(self, lane_identifier): + """Build or retrieve a navigation feed, for clients that do not support groups.""" + + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + library = flask.request.library + library_short_name = library.short_name + url = url_for( + "navigation_feed", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + ) + + title = lane.display_name + facet_class_kwargs = dict( + minimum_featured_quality=library.settings.minimum_featured_quality, + ) + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=NavigationFacets, + base_class_constructor_kwargs=facet_class_kwargs, + ) + annotator = self.manager.annotator(lane, facets) + return NavigationFeed.navigation( + _db=self._db, + title=title, + url=url, + worklist=lane, + annotator=annotator, + facets=facets, + ).as_response(max_age=lane.max_cache_age()) + + def crawlable_library_feed(self): + """Build or retrieve a crawlable acquisition feed for the + request library. + """ + library = flask.request.library + url = url_for( + "crawlable_library_feed", + library_short_name=library.short_name, + _external=True, + ) + title = library.name + lane = CrawlableCollectionBasedLane() + lane.initialize(library) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def crawlable_collection_feed(self, collection_name): + """Build or retrieve a crawlable acquisition feed for the + requested collection. + """ + collection = Collection.by_name(self._db, collection_name) + if not collection: + return NO_SUCH_COLLECTION + title = collection.name + url = url_for( + "crawlable_collection_feed", collection_name=collection.name, _external=True + ) + lane = CrawlableCollectionBasedLane() + lane.initialize([collection]) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def crawlable_list_feed(self, list_name): + """Build or retrieve a crawlable, paginated acquisition feed for the + named CustomList, sorted by update date. + """ + # TODO: A library is not strictly required here, since some + # CustomLists aren't associated with a library, but this isn't + # a use case we need to support now. + library = flask.request.library + list = CustomList.find(self._db, list_name, library=library) + if not list: + return NO_SUCH_LIST + library_short_name = library.short_name + title = list.name + url = url_for( + "crawlable_list_feed", + list_name=list.name, + library_short_name=library_short_name, + _external=True, + ) + lane = CrawlableCustomListBasedLane() + lane.initialize(library, list) + return self._crawlable_feed(title=title, url=url, worklist=lane) + + def _crawlable_feed( + self, title, url, worklist, annotator=None, feed_class=OPDSAcquisitionFeed + ): + """Helper method to create a crawlable feed. + + :param title: The title to use for the feed. + :param url: The URL from which the feed will be served. + :param worklist: A crawlable Lane which controls which works show up + in the feed. + :param annotator: A custom Annotator to use when generating the feed. + :param feed_class: A drop-in replacement for OPDSAcquisitionFeed + for use in tests. + """ + pagination = load_pagination_from_request( + SortKeyPagination, default_size=Pagination.DEFAULT_CRAWLABLE_SIZE + ) + if isinstance(pagination, ProblemDetail): + return pagination + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + # A crawlable feed has only one possible set of Facets, + # so library settings are irrelevant. + facets = self.manager.load_facets_from_request( + worklist=worklist, + base_class=CrawlableFacets, + ) + annotator = annotator or self.manager.annotator(worklist, facets=facets) + + return feed_class.page( + _db=self._db, + title=title, + url=url, + worklist=worklist, + annotator=annotator, + facets=facets, + pagination=pagination, + search_engine=search_engine, + ).as_response( + mime_types=flask.request.accept_mimetypes, max_age=worklist.max_cache_age() + ) + + def _load_search_facets(self, lane): + entrypoints = list(flask.request.library.entrypoints) + if len(entrypoints) > 1: + # There is more than one enabled EntryPoint. + # By default, search them all. + default_entrypoint = EverythingEntryPoint + else: + # There is only one enabled EntryPoint, + # and no need for a special default. + default_entrypoint = None + return self.manager.load_facets_from_request( + worklist=lane, + base_class=SearchFacets, + default_entrypoint=default_entrypoint, + ) + + def search(self, lane_identifier, feed_class=OPDSAcquisitionFeed): + """Search for books.""" + lane = self.load_lane(lane_identifier) + if isinstance(lane, ProblemDetail): + return lane + + # Although the search query goes against Opensearch, we must + # use normal pagination because the results are sorted by + # match quality, not bibliographic information. + pagination = load_pagination_from_request( + Pagination, default_size=Pagination.DEFAULT_SEARCH_SIZE + ) + if isinstance(pagination, ProblemDetail): + return pagination + + facets = self._load_search_facets(lane) + if isinstance(facets, ProblemDetail): + return facets + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + # Check whether there is a query string -- if not, we want to + # send an OpenSearch document explaining how to search. + query = flask.request.args.get("q") + library_short_name = flask.request.library.short_name + + # Create a function that, when called, generates a URL to the + # search controller. + # + # We'll call this one way if there is no query string in the + # request arguments, and another way if there is a query + # string. + make_url_kwargs = dict(list(facets.items())) + make_url = lambda: url_for( + "lane_search", + lane_identifier=lane_identifier, + library_short_name=library_short_name, + _external=True, + **make_url_kwargs, + ) + if not query: + # Send the search form + open_search_doc = OpenSearchDocument.for_lane(lane, make_url()) + headers = {"Content-Type": "application/opensearchdescription+xml"} + return Response(open_search_doc, 200, headers) + + # We have a query -- add it to the keyword arguments used when + # generating a URL. + make_url_kwargs["q"] = query.encode("utf8") + + # Run a search. + annotator = self.manager.annotator(lane, facets) + info = OpenSearchDocument.search_info(lane) + response = feed_class.search( + _db=self._db, + title=info["name"], + url=make_url(), + lane=lane, + search_engine=search_engine, + query=query, + annotator=annotator, + pagination=pagination, + facets=facets, + ) + if isinstance(response, ProblemDetail): + return response + return response.as_response( + mime_types=flask.request.accept_mimetypes, max_age=lane.max_cache_age() + ) + + def _qa_feed( + self, feed_factory, feed_title, controller_name, facet_class, worklist_factory + ): + """Create some kind of OPDS feed designed for consumption by an + automated QA process. + + :param feed_factory: This function will be called to create the feed. + It must either be AcquisitionFeed.groups or Acquisition.page, + or it must take the same arguments as those methods. + :param feed_title: String title of the feed. + :param controller_name: Controller name to use when generating + the URL to the feed. + :param facet_class: Faceting class to load (through + load_facets_from_request). + :param worklist_factory: Function that takes (Library, Facets) + and returns a Worklist configured to generate the feed. + :return: A ProblemDetail if there's a problem loading the faceting + object; otherwise the return value of `feed_factory`. + """ + library = flask.request.library + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + url = url_for( + controller_name, library_short_name=library.short_name, _external=True + ) + + facets = load_facets_from_request( + base_class=facet_class, default_entrypoint=EverythingEntryPoint + ) + if isinstance(facets, ProblemDetail): + return facets + + worklist = worklist_factory(library, facets) + annotator = self.manager.annotator(worklist) + + # Since this feed will be consumed by an automated client, and + # we're choosing titles for specific purposes, there's no + # reason to put more than a single item in each group. + pagination = Pagination(size=1) + return feed_factory( + _db=self._db, + title=feed_title, + url=url, + pagination=pagination, + worklist=worklist, + annotator=annotator, + search_engine=search_engine, + facets=facets, + max_age=0, + ) + + def qa_feed(self, feed_class=OPDSAcquisitionFeed): + """Create an OPDS feed containing the information necessary to + run a full set of integration tests against this server and + the vendors it relies on. + + :param feed_class: Class to substitute for AcquisitionFeed during + tests. + """ + + def factory(library, facets): + return JackpotWorkList(library, facets) + + return self._qa_feed( + feed_factory=feed_class.groups, + feed_title="QA test feed", + controller_name="qa_feed", + facet_class=JackpotFacets, + worklist_factory=factory, + ) + + def qa_series_feed(self, feed_class=OPDSAcquisitionFeed): + """Create an OPDS feed containing books that belong to _some_ + series, without regard to _which_ series. + + :param feed_class: Class to substitute for AcquisitionFeed during + tests. + """ + + def factory(library, facets): + wl = WorkList() + wl.initialize(library) + return wl + + return self._qa_feed( + feed_factory=feed_class.page, + feed_title="QA series test feed", + controller_name="qa_series_feed", + facet_class=HasSeriesFacets, + worklist_factory=factory, + ) diff --git a/api/controller/patron_auth_token.py b/api/controller/patron_auth_token.py new file mode 100644 index 0000000000..9c5010f426 --- /dev/null +++ b/api/controller/patron_auth_token.py @@ -0,0 +1,39 @@ +from __future__ import annotations + +import logging + +import flask + +from api.authentication.access_token import AccessTokenProvider +from api.controller.circulation_manager import CirculationManagerController +from api.model.patron_auth import PatronAuthAccessToken +from api.problem_details import PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE +from core.util.problem_detail import ProblemError + + +class PatronAuthTokenController(CirculationManagerController): + def get_token(self): + """Create a Patron Auth access token for an authenticated patron""" + patron = flask.request.patron + auth = flask.request.authorization + token_expiry = 3600 + + if not patron or auth.type.lower() != "basic": + return PATRON_AUTH_ACCESS_TOKEN_NOT_POSSIBLE + + try: + token = AccessTokenProvider.generate_token( + self._db, + patron, + auth["password"], + expires_in=token_expiry, + ) + except ProblemError as ex: + logging.getLogger(self.__class__.__name__).error( + f"Could not generate Patron Auth Access Token: {ex}" + ) + return ex.problem_detail + + return PatronAuthAccessToken( + access_token=token, expires_in=token_expiry, token_type="Bearer" + ).api_dict() diff --git a/api/controller/playtime_entries.py b/api/controller/playtime_entries.py new file mode 100644 index 0000000000..8f52129d10 --- /dev/null +++ b/api/controller/playtime_entries.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +import flask +from pydantic import ValidationError + +from api.controller.circulation_manager import CirculationManagerController +from api.model.time_tracking import PlaytimeEntriesPost, PlaytimeEntriesPostResponse +from api.problem_details import NOT_FOUND_ON_REMOTE +from core.model import Collection, Identifier, Library, get_one +from core.problem_details import INVALID_INPUT +from core.query.playtime_entries import PlaytimeEntries + + +class PlaytimeEntriesController(CirculationManagerController): + def track_playtimes(self, collection_id, identifier_type, identifier_idn): + library: Library = flask.request.library + identifier = get_one( + self._db, Identifier, type=identifier_type, identifier=identifier_idn + ) + collection = Collection.by_id(self._db, collection_id) + + if not identifier: + return NOT_FOUND_ON_REMOTE.detailed( + f"The identifier {identifier_type}/{identifier_idn} was not found." + ) + if not collection: + return NOT_FOUND_ON_REMOTE.detailed( + f"The collection {collection_id} was not found." + ) + + if collection not in library.collections: + return INVALID_INPUT.detailed("Collection was not found in the Library.") + + if not identifier.licensed_through_collection(collection): + return INVALID_INPUT.detailed( + "This Identifier was not found in the Collection." + ) + + try: + data = PlaytimeEntriesPost(**flask.request.json) + except ValidationError as ex: + return INVALID_INPUT.detailed(ex.json()) + + responses, summary = PlaytimeEntries.insert_playtime_entries( + self._db, identifier, collection, library, data + ) + + response_data = PlaytimeEntriesPostResponse( + summary=summary, responses=responses + ) + response = flask.jsonify(response_data.dict()) + response.status_code = 207 + return response diff --git a/api/controller/profile.py b/api/controller/profile.py new file mode 100644 index 0000000000..8cf6d393eb --- /dev/null +++ b/api/controller/profile.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +import flask +from flask import make_response + +from api.authenticator import CirculationPatronProfileStorage +from api.controller.circulation_manager import CirculationManagerController +from core.user_profile import ProfileController as CoreProfileController +from core.util.problem_detail import ProblemDetail + + +class ProfileController(CirculationManagerController): + """Implement the User Profile Management Protocol.""" + + def _controller(self, patron): + """Instantiate a CoreProfileController that actually does the work.""" + storage = CirculationPatronProfileStorage(patron, flask.url_for) + return CoreProfileController(storage) + + def protocol(self): + """Handle a UPMP request.""" + patron = flask.request.patron + controller = self._controller(patron) + if flask.request.method == "GET": + result = controller.get() + else: + result = controller.put(flask.request.headers, flask.request.data) + if isinstance(result, ProblemDetail): + return result + return make_response(*result) diff --git a/api/controller/static_file.py b/api/controller/static_file.py new file mode 100644 index 0000000000..4016f09663 --- /dev/null +++ b/api/controller/static_file.py @@ -0,0 +1,27 @@ +from __future__ import annotations + +import os + +import flask + +from api.config import Configuration +from api.controller.circulation_manager import CirculationManagerController +from core.model import ConfigurationSetting + + +class StaticFileController(CirculationManagerController): + def static_file(self, directory, filename): + max_age = ConfigurationSetting.sitewide( + self._db, Configuration.STATIC_FILE_CACHE_TIME + ).int_value + return flask.send_from_directory(directory, filename, max_age=max_age) + + def image(self, filename): + directory = os.path.join( + os.path.abspath(os.path.dirname(__file__)), + "..", + "..", + "resources", + "images", + ) + return self.static_file(directory, filename) diff --git a/api/controller/urn_lookup.py b/api/controller/urn_lookup.py new file mode 100644 index 0000000000..fe38b113f5 --- /dev/null +++ b/api/controller/urn_lookup.py @@ -0,0 +1,22 @@ +from __future__ import annotations + +import flask + +from core.app_server import URNLookupController as CoreURNLookupController +from core.feed.annotator.circulation import CirculationManagerAnnotator + + +class URNLookupController(CoreURNLookupController): + def __init__(self, manager): + self.manager = manager + super().__init__(manager._db) + + def work_lookup(self, route_name): + """Build a CirculationManagerAnnotor based on the current library's + top-level WorkList, and use it to generate an OPDS lookup + feed. + """ + library = flask.request.library + top_level_worklist = self.manager.top_level_lanes[library.id] + annotator = CirculationManagerAnnotator(top_level_worklist) + return super().work_lookup(annotator, route_name) diff --git a/api/controller/work.py b/api/controller/work.py new file mode 100644 index 0000000000..d50928f8ae --- /dev/null +++ b/api/controller/work.py @@ -0,0 +1,293 @@ +from __future__ import annotations + +import urllib.parse + +import flask +from flask_babel import lazy_gettext as _ + +from api.controller.circulation_manager import CirculationManagerController +from api.lanes import ( + ContributorFacets, + ContributorLane, + RecommendationLane, + RelatedBooksLane, + SeriesFacets, + SeriesLane, +) +from api.problem_details import NO_SUCH_LANE, NOT_FOUND_ON_REMOTE +from core.app_server import load_pagination_from_request +from core.config import CannotLoadConfiguration +from core.external_search import SortKeyPagination +from core.feed.acquisition import OPDSAcquisitionFeed +from core.lane import FeaturedFacets, Pagination +from core.metadata_layer import ContributorData +from core.util.opds_writer import OPDSFeed +from core.util.problem_detail import ProblemDetail + + +class WorkController(CirculationManagerController): + def _lane_details(self, languages, audiences): + if languages: + languages = languages.split(",") + if audiences: + audiences = [urllib.parse.unquote_plus(a) for a in audiences.split(",")] + + return languages, audiences + + def contributor( + self, contributor_name, languages, audiences, feed_class=OPDSAcquisitionFeed + ): + """Serve a feed of books written by a particular author""" + library = flask.request.library + if not contributor_name: + return NO_SUCH_LANE.detailed(_("No contributor provided")) + + # contributor_name is probably a display_name, but it could be a + # sort_name. Pass it in for both fields and + # ContributorData.lookup() will do its best to figure it out. + contributor = ContributorData.lookup( + self._db, sort_name=contributor_name, display_name=contributor_name + ) + if not contributor: + return NO_SUCH_LANE.detailed( + _("Unknown contributor: %s") % contributor_name + ) + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + languages, audiences = self._lane_details(languages, audiences) + + lane = ContributorLane( + library, contributor, languages=languages, audiences=audiences + ) + facets = self.manager.load_facets_from_request( + worklist=lane, base_class=ContributorFacets + ) + if isinstance(facets, ProblemDetail): + return facets + + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane, facets) + + url = annotator.feed_url( + lane, + facets=facets, + pagination=pagination, + ) + + return feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) + + def permalink(self, identifier_type, identifier): + """Serve an entry for a single book. + + This does not include any loan or hold-specific information for + the authenticated patron. + + This is different from the /works lookup protocol, in that it + returns a single entry while the /works lookup protocol returns a + feed containing any number of entries. + """ + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if isinstance(work, ProblemDetail): + return work + + patron = flask.request.patron + + if patron: + pools = self.load_licensepools(library, identifier_type, identifier) + if isinstance(pools, ProblemDetail): + return pools + + loan, pool = self.get_patron_loan(patron, pools) + hold = None + + if not loan: + hold, pool = self.get_patron_hold(patron, pools) + + item = loan or hold + pool = pool or pools[0] + + return OPDSAcquisitionFeed.single_entry_loans_feed( + self.circulation, item or pool + ) + else: + annotator = self.manager.annotator(lane=None) + + return OPDSAcquisitionFeed.entry_as_response( + OPDSAcquisitionFeed.single_entry(work, annotator), + max_age=OPDSFeed.DEFAULT_MAX_AGE, + ) + + def related( + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, + ): + """Serve a groups feed of books related to a given book.""" + + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if work is None: + return NOT_FOUND_ON_REMOTE + + if isinstance(work, ProblemDetail): + return work + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + try: + lane_name = f"Books Related to {work.title} by {work.author}" + lane = RelatedBooksLane(library, work, lane_name, novelist_api=novelist_api) + except ValueError as e: + # No related books were found. + return NO_SUCH_LANE.detailed(str(e)) + + facets = self.manager.load_facets_from_request( + worklist=lane, + base_class=FeaturedFacets, + base_class_constructor_kwargs=dict( + minimum_featured_quality=library.settings.minimum_featured_quality + ), + ) + if isinstance(facets, ProblemDetail): + return facets + + annotator = self.manager.annotator(lane) + url = annotator.feed_url( + lane, + facets=facets, + ) + + return feed_class.groups( + _db=self._db, + title=lane.DISPLAY_NAME, + url=url, + worklist=lane, + annotator=annotator, + pagination=None, + facets=facets, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) + + def recommendations( + self, + identifier_type, + identifier, + novelist_api=None, + feed_class=OPDSAcquisitionFeed, + ): + """Serve a feed of recommendations related to a given book.""" + + library = flask.request.library + work = self.load_work(library, identifier_type, identifier) + if isinstance(work, ProblemDetail): + return work + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + lane_name = f"Recommendations for {work.title} by {work.author}" + try: + lane = RecommendationLane( + library=library, + work=work, + display_name=lane_name, + novelist_api=novelist_api, + ) + except CannotLoadConfiguration as e: + # NoveList isn't configured. + return NO_SUCH_LANE.detailed(_("Recommendations not available")) + + facets = self.manager.load_facets_from_request(worklist=lane) + if isinstance(facets, ProblemDetail): + return facets + + # We use a normal Pagination object because recommendations + # are looked up in a third-party API and paginated through the + # database lookup. + pagination = load_pagination_from_request(Pagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane) + url = annotator.feed_url( + lane, + facets=facets, + pagination=pagination, + ) + + return feed_class.page( + _db=self._db, + title=lane.DISPLAY_NAME, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response(max_age=lane.max_cache_age()) + + def series(self, series_name, languages, audiences, feed_class=OPDSAcquisitionFeed): + """Serve a feed of books in a given series.""" + library = flask.request.library + if not series_name: + return NO_SUCH_LANE.detailed(_("No series provided")) + + search_engine = self.search_engine + if isinstance(search_engine, ProblemDetail): + return search_engine + + languages, audiences = self._lane_details(languages, audiences) + lane = SeriesLane( + library, series_name=series_name, languages=languages, audiences=audiences + ) + + facets = self.manager.load_facets_from_request( + worklist=lane, base_class=SeriesFacets + ) + if isinstance(facets, ProblemDetail): + return facets + + pagination = load_pagination_from_request(SortKeyPagination) + if isinstance(pagination, ProblemDetail): + return pagination + + annotator = self.manager.annotator(lane) + + url = annotator.feed_url(lane, facets=facets, pagination=pagination) + return feed_class.page( + _db=self._db, + title=lane.display_name, + url=url, + worklist=lane, + facets=facets, + pagination=pagination, + annotator=annotator, + search_engine=search_engine, + ).as_response( + max_age=lane.max_cache_age(), mime_types=flask.request.accept_mimetypes + ) diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 4d75cd7b5f..0e5ba71f21 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -6,8 +6,8 @@ from flask import url_for from sqlalchemy.orm import Session +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.discovery.opds_registration import OpdsRegistrationService from api.integration.registry.discovery import DiscoveryRegistry from api.util.flask import PalaceFlask diff --git a/api/util/flask.py b/api/util/flask.py index 88ff868d33..bcf5d0f0db 100644 --- a/api/util/flask.py +++ b/api/util/flask.py @@ -1,7 +1,7 @@ from flask import Flask from sqlalchemy.orm import Session -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager class PalaceFlask(Flask): diff --git a/core/service/container.py b/core/service/container.py index b05986a57a..273dbdc3b9 100644 --- a/core/service/container.py +++ b/core/service/container.py @@ -46,7 +46,7 @@ def create_container() -> Services: "api.axis", "api.bibliotheca", "api.enki", - "api.controller", + "api.circulation_manager", "api.overdrive", "core.feed.annotator.circulation", ] diff --git a/pyproject.toml b/pyproject.toml index 6bbdf443e0..02f03c1645 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,7 +82,7 @@ module = [ "api.adobe_vendor_id", "api.axis", "api.circulation", - "api.controller_marc", + "api.controller.marc", "api.discovery.*", "api.enki", "api.integration.*", diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 9cbd5561c3..9c9984d64b 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -11,7 +11,7 @@ from api.admin import routes from api.admin.controller import setup_admin_controllers from api.admin.problem_details import * -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from core.util.problem_detail import ProblemDetail, ProblemError from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.api_controller import ControllerFixture diff --git a/tests/api/controller/__init__.py b/tests/api/controller/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/api/test_controller_analytics.py b/tests/api/controller/test_analytics.py similarity index 100% rename from tests/api/test_controller_analytics.py rename to tests/api/controller/test_analytics.py diff --git a/tests/api/test_controller_annotation.py b/tests/api/controller/test_annotation.py similarity index 100% rename from tests/api/test_controller_annotation.py rename to tests/api/controller/test_annotation.py diff --git a/tests/api/test_controller_base.py b/tests/api/controller/test_base.py similarity index 99% rename from tests/api/test_controller_base.py rename to tests/api/controller/test_base.py index 6e20a57c10..bafee80e22 100644 --- a/tests/api/test_controller_base.py +++ b/tests/api/controller/test_base.py @@ -86,7 +86,7 @@ def test_authenticated_patron_from_request( # No authorization header -> 401 error. with patch( - "api.base_controller.BaseCirculationManagerController.authorization_header", + "api.controller.base.BaseCirculationManagerController.authorization_header", lambda x: None, ): with circulation_fixture.request_context_with_library("/"): @@ -101,7 +101,7 @@ def remote_failure(self, header): raise RemoteInitiatedServerError("argh", "service") with patch( - "api.base_controller.BaseCirculationManagerController.authenticated_patron", + "api.controller.base.BaseCirculationManagerController.authenticated_patron", remote_failure, ): with circulation_fixture.request_context_with_library( @@ -118,7 +118,7 @@ def remote_failure(self, header): # Credentials provided but don't identify anyone in particular # -> 401 error. with patch( - "api.base_controller.BaseCirculationManagerController.authenticated_patron", + "api.controller.base.BaseCirculationManagerController.authenticated_patron", lambda self, x: None, ): with circulation_fixture.request_context_with_library( diff --git a/tests/api/test_controller_crawlfeed.py b/tests/api/controller/test_crawlfeed.py similarity index 100% rename from tests/api/test_controller_crawlfeed.py rename to tests/api/controller/test_crawlfeed.py diff --git a/tests/api/test_controller_fixture.py b/tests/api/controller/test_fixture.py similarity index 100% rename from tests/api/test_controller_fixture.py rename to tests/api/controller/test_fixture.py diff --git a/tests/api/test_controller_index.py b/tests/api/controller/test_index.py similarity index 100% rename from tests/api/test_controller_index.py rename to tests/api/controller/test_index.py diff --git a/tests/api/test_controller_loan.py b/tests/api/controller/test_loan.py similarity index 99% rename from tests/api/test_controller_loan.py rename to tests/api/controller/test_loan.py index a46188a4a8..af16e530be 100644 --- a/tests/api/test_controller_loan.py +++ b/tests/api/controller/test_loan.py @@ -872,7 +872,7 @@ def test_fulfill_without_single_item_feed(self, loan_fixture: LoanFixture): authenticated = controller.authenticated_patron_from_request() loan_fixture.pool.loan_to(authenticated) with patch( - "api.controller.OPDSAcquisitionFeed.single_entry_loans_feed" + "api.controller.opds_feed.OPDSAcquisitionFeed.single_entry_loans_feed" ) as feed, patch.object(circulation, "fulfill") as fulfill: # Complex setup # The fulfillmentInfo should not be have response type diff --git a/tests/api/test_controller_marc.py b/tests/api/controller/test_marc.py similarity index 99% rename from tests/api/test_controller_marc.py rename to tests/api/controller/test_marc.py index 0ddc6f5637..8d43b60a71 100644 --- a/tests/api/test_controller_marc.py +++ b/tests/api/controller/test_marc.py @@ -7,7 +7,7 @@ import pytest from flask import Response -from api.controller_marc import MARCRecordController +from api.controller.marc import MARCRecordController from core.integration.goals import Goals from core.marc import MARCExporter from core.model import Collection, Library, MarcFile, create diff --git a/tests/api/test_controller_multilib.py b/tests/api/controller/test_multilib.py similarity index 100% rename from tests/api/test_controller_multilib.py rename to tests/api/controller/test_multilib.py diff --git a/tests/api/test_controller_odl_notify.py b/tests/api/controller/test_odl_notify.py similarity index 100% rename from tests/api/test_controller_odl_notify.py rename to tests/api/controller/test_odl_notify.py diff --git a/tests/api/test_controller_opdsfeed.py b/tests/api/controller/test_opds_feed.py similarity index 99% rename from tests/api/test_controller_opdsfeed.py rename to tests/api/controller/test_opds_feed.py index e237c72e63..66f321d57d 100644 --- a/tests/api/test_controller_opdsfeed.py +++ b/tests/api/controller/test_opds_feed.py @@ -6,7 +6,7 @@ import feedparser from flask import url_for -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from api.lanes import HasSeriesFacets, JackpotFacets, JackpotWorkList from api.problem_details import REMOTE_INTEGRATION_FAILED from core.app_server import load_facets_from_request diff --git a/tests/api/test_controller_patron_access_token.py b/tests/api/controller/test_patron_access_token.py similarity index 96% rename from tests/api/test_controller_patron_access_token.py rename to tests/api/controller/test_patron_access_token.py index ef4cca8d70..9c4a51dff9 100644 --- a/tests/api/test_controller_patron_access_token.py +++ b/tests/api/controller/test_patron_access_token.py @@ -8,7 +8,7 @@ from tests.fixtures.database import DatabaseTransactionFixture if TYPE_CHECKING: - from api.controller import PatronAuthTokenController + from api.controller.patron_auth_token import PatronAuthTokenController class PatronAuthTokenControllerFixture(CirculationControllerFixture): diff --git a/tests/api/test_controller_playtime_entries.py b/tests/api/controller/test_playtime_entries.py similarity index 100% rename from tests/api/test_controller_playtime_entries.py rename to tests/api/controller/test_playtime_entries.py diff --git a/tests/api/test_controller_profile.py b/tests/api/controller/test_profile.py similarity index 100% rename from tests/api/test_controller_profile.py rename to tests/api/controller/test_profile.py diff --git a/tests/api/test_controller_scopedsession.py b/tests/api/controller/test_scopedsession.py similarity index 100% rename from tests/api/test_controller_scopedsession.py rename to tests/api/controller/test_scopedsession.py diff --git a/tests/api/test_controller_staticfile.py b/tests/api/controller/test_staticfile.py similarity index 100% rename from tests/api/test_controller_staticfile.py rename to tests/api/controller/test_staticfile.py diff --git a/tests/api/test_controller_urn_lookup.py b/tests/api/controller/test_urn_lookup.py similarity index 100% rename from tests/api/test_controller_urn_lookup.py rename to tests/api/controller/test_urn_lookup.py diff --git a/tests/api/test_controller_work.py b/tests/api/controller/test_work.py similarity index 100% rename from tests/api/test_controller_work.py rename to tests/api/controller/test_work.py diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index a6181c4216..3a588a3151 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -11,7 +11,7 @@ LoanInfo, PatronActivityCirculationAPI, ) -from api.controller import CirculationManager +from api.circulation_manager import CirculationManager from core.external_search import ExternalSearchIndex from core.integration.settings import BaseSettings from core.model import DataSource, Hold, Loan, get_one_or_create diff --git a/tests/api/test_controller_cm.py b/tests/api/test_controller_cm.py index a43de03acd..7be453e768 100644 --- a/tests/api/test_controller_cm.py +++ b/tests/api/test_controller_cm.py @@ -1,8 +1,8 @@ from unittest.mock import MagicMock from api.authenticator import LibraryAuthenticator +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from api.custom_index import CustomIndexView from api.problem_details import * from core.feed.annotator.circulation import ( diff --git a/tests/api/test_device_tokens.py b/tests/api/test_device_tokens.py index 1a9775b808..f807a1f8ad 100644 --- a/tests/api/test_device_tokens.py +++ b/tests/api/test_device_tokens.py @@ -5,7 +5,7 @@ from tests.fixtures.api_controller import ControllerFixture -@patch("api.controller.flask") +@patch("api.controller.device_tokens.flask") class TestDeviceTokens: def test_create_invalid_type(self, flask, controller_fixture: ControllerFixture): db = controller_fixture.db diff --git a/tests/fixtures/api_admin.py b/tests/fixtures/api_admin.py index 2f0df6d32f..d1e1e3647d 100644 --- a/tests/fixtures/api_admin.py +++ b/tests/fixtures/api_admin.py @@ -6,8 +6,8 @@ from api.admin.controller import setup_admin_controllers from api.app import initialize_admin +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager from core.integration.goals import Goals from core.model import create from core.model.admin import Admin, AdminRole diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index cffdf7175a..207e4026a2 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -11,8 +11,9 @@ from api.adobe_vendor_id import AuthdataUtility from api.app import app +from api.circulation_manager import CirculationManager from api.config import Configuration -from api.controller import CirculationManager, CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from api.integration.registry.patron_auth import PatronAuthRegistry from api.lanes import create_default_lanes from api.simple_authentication import SimpleAuthenticationProvider diff --git a/tests/fixtures/api_routes.py b/tests/fixtures/api_routes.py index f1010e456c..2dce5f9cdb 100644 --- a/tests/fixtures/api_routes.py +++ b/tests/fixtures/api_routes.py @@ -6,7 +6,7 @@ from werkzeug.exceptions import MethodNotAllowed from api import routes -from api.controller import CirculationManagerController +from api.controller.circulation_manager import CirculationManagerController from tests.api.mockapi.circulation import MockCirculationManager from tests.fixtures.api_controller import ControllerFixture from tests.fixtures.database import DatabaseTransactionFixture From cb1ad8aea91b2506da279832d8ac9a4e77ec08b4 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 14 Dec 2023 13:03:36 -0400 Subject: [PATCH 222/262] Fix flakey marc test. (#1571) --- tests/core/test_marc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tests/core/test_marc.py b/tests/core/test_marc.py index 940139ec4f..fad3fee05d 100644 --- a/tests/core/test_marc.py +++ b/tests/core/test_marc.py @@ -770,11 +770,11 @@ def test_records( assert len(records) == 2 title_fields = [record.get_fields("245") for record in records] - titles = [fields[0].get_subfields("a")[0] for fields in title_fields] - assert titles == [ + titles = {fields[0].get_subfields("a")[0] for fields in title_fields} + assert titles == { marc_exporter_fixture.w1.title, marc_exporter_fixture.w2.title, - ] + } def test_records_since_time( self, From 222298c1554292c0cd26e4b31bc89f7b125e3e2d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 14 Dec 2023 17:19:32 +0000 Subject: [PATCH 223/262] Bump boto3 from 1.33.5 to 1.34.0 (#1570) --- poetry.lock | 874 ++++++++++++++++++++++++++-------------------------- 1 file changed, 437 insertions(+), 437 deletions(-) diff --git a/poetry.lock b/poetry.lock index fd3bf24d4a..ca7238a40e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -161,437 +161,437 @@ files = [ [[package]] name = "boto3" -version = "1.33.5" +version = "1.34.0" description = "The AWS SDK for Python" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "boto3-1.33.5-py3-none-any.whl", hash = "sha256:fcc24f62a1f512dd9b4a7a8af6f5fbfb3d69842a92aa2e79c2ca551ac49a4757"}, - {file = "boto3-1.33.5.tar.gz", hash = "sha256:6a1d938bbf11518b1d17ca8186168f3ba2a0e8b2bf3c82cdd810ecb884627d2a"}, + {file = "boto3-1.34.0-py3-none-any.whl", hash = "sha256:8b3c4d4e720c0ad706590c284b8f30c76de3472c1ce1bac610425f99bf6ab53b"}, + {file = "boto3-1.34.0.tar.gz", hash = "sha256:c9b400529932ed4652304756528ab235c6730aa5d00cb4d9e4848ce460c82c16"}, ] [package.dependencies] -botocore = ">=1.33.5,<1.34.0" +botocore = ">=1.34.0,<1.35.0" jmespath = ">=0.7.1,<2.0.0" -s3transfer = ">=0.8.2,<0.9.0" +s3transfer = ">=0.9.0,<0.10.0" [package.extras] crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] [[package]] name = "boto3-stubs" -version = "1.33.5" -description = "Type annotations for boto3 1.33.5 generated with mypy-boto3-builder 7.21.0" +version = "1.34.0" +description = "Type annotations for boto3 1.34.0 generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "boto3-stubs-1.33.5.tar.gz", hash = "sha256:40d7a52e60d477822655938083be43a9097a405f1d748ce86f5233685e0cddcc"}, - {file = "boto3_stubs-1.33.5-py3-none-any.whl", hash = "sha256:4f19917a817f5530c5a05924ff009929218664c75140f47fd57e3ba6d477ab48"}, + {file = "boto3-stubs-1.34.0.tar.gz", hash = "sha256:39ad9a9ab399c012713a719d90feb1dee819d310f136a0c7d7fddc477d7f251e"}, + {file = "boto3_stubs-1.34.0-py3-none-any.whl", hash = "sha256:477b7da7432ab26123324249411111f2350e0b5ef0418e7bc1124600c810ac41"}, ] [package.dependencies] -boto3 = {version = "1.33.5", optional = true, markers = "extra == \"boto3\""} -botocore = {version = "1.33.5", optional = true, markers = "extra == \"boto3\""} +boto3 = {version = "1.34.0", optional = true, markers = "extra == \"boto3\""} +botocore = {version = "1.34.0", optional = true, markers = "extra == \"boto3\""} botocore-stubs = "*" -mypy-boto3-cloudformation = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-dynamodb = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-ec2 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-lambda = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-logs = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"logs\""} -mypy-boto3-rds = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} -mypy-boto3-s3 = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} -mypy-boto3-sqs = {version = ">=1.33.0,<1.34.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-cloudformation = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-dynamodb = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-ec2 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-lambda = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-logs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"logs\""} +mypy-boto3-rds = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} +mypy-boto3-s3 = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\" or extra == \"s3\""} +mypy-boto3-sqs = {version = ">=1.34.0,<1.35.0", optional = true, markers = "extra == \"essential\""} types-s3transfer = "*" typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [package.extras] -accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)"] -account = ["mypy-boto3-account (>=1.33.0,<1.34.0)"] -acm = ["mypy-boto3-acm (>=1.33.0,<1.34.0)"] -acm-pca = ["mypy-boto3-acm-pca (>=1.33.0,<1.34.0)"] -alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)"] -all = ["mypy-boto3-accessanalyzer (>=1.33.0,<1.34.0)", "mypy-boto3-account (>=1.33.0,<1.34.0)", "mypy-boto3-acm (>=1.33.0,<1.34.0)", "mypy-boto3-acm-pca (>=1.33.0,<1.34.0)", "mypy-boto3-alexaforbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-amp (>=1.33.0,<1.34.0)", "mypy-boto3-amplify (>=1.33.0,<1.34.0)", "mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)", "mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)", "mypy-boto3-apigateway (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)", "mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)", "mypy-boto3-appconfig (>=1.33.0,<1.34.0)", "mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)", "mypy-boto3-appfabric (>=1.33.0,<1.34.0)", "mypy-boto3-appflow (>=1.33.0,<1.34.0)", "mypy-boto3-appintegrations (>=1.33.0,<1.34.0)", "mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-application-insights (>=1.33.0,<1.34.0)", "mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-appmesh (>=1.33.0,<1.34.0)", "mypy-boto3-apprunner (>=1.33.0,<1.34.0)", "mypy-boto3-appstream (>=1.33.0,<1.34.0)", "mypy-boto3-appsync (>=1.33.0,<1.34.0)", "mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)", "mypy-boto3-athena (>=1.33.0,<1.34.0)", "mypy-boto3-auditmanager (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling (>=1.33.0,<1.34.0)", "mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)", "mypy-boto3-b2bi (>=1.33.0,<1.34.0)", "mypy-boto3-backup (>=1.33.0,<1.34.0)", "mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)", "mypy-boto3-backupstorage (>=1.33.0,<1.34.0)", "mypy-boto3-batch (>=1.33.0,<1.34.0)", "mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-billingconductor (>=1.33.0,<1.34.0)", "mypy-boto3-braket (>=1.33.0,<1.34.0)", "mypy-boto3-budgets (>=1.33.0,<1.34.0)", "mypy-boto3-ce (>=1.33.0,<1.34.0)", "mypy-boto3-chime (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)", "mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)", "mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)", "mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)", "mypy-boto3-cloud9 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)", "mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)", "mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront (>=1.33.0,<1.34.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)", "mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)", "mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)", "mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)", "mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)", "mypy-boto3-codeartifact (>=1.33.0,<1.34.0)", "mypy-boto3-codebuild (>=1.33.0,<1.34.0)", "mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)", "mypy-boto3-codecommit (>=1.33.0,<1.34.0)", "mypy-boto3-codedeploy (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)", "mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)", "mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)", "mypy-boto3-codepipeline (>=1.33.0,<1.34.0)", "mypy-boto3-codestar (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)", "mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)", "mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)", "mypy-boto3-comprehend (>=1.33.0,<1.34.0)", "mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)", "mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)", "mypy-boto3-config (>=1.33.0,<1.34.0)", "mypy-boto3-connect (>=1.33.0,<1.34.0)", "mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)", "mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)", "mypy-boto3-connectcases (>=1.33.0,<1.34.0)", "mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)", "mypy-boto3-controltower (>=1.33.0,<1.34.0)", "mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)", "mypy-boto3-cur (>=1.33.0,<1.34.0)", "mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)", "mypy-boto3-databrew (>=1.33.0,<1.34.0)", "mypy-boto3-dataexchange (>=1.33.0,<1.34.0)", "mypy-boto3-datapipeline (>=1.33.0,<1.34.0)", "mypy-boto3-datasync (>=1.33.0,<1.34.0)", "mypy-boto3-datazone (>=1.33.0,<1.34.0)", "mypy-boto3-dax (>=1.33.0,<1.34.0)", "mypy-boto3-detective (>=1.33.0,<1.34.0)", "mypy-boto3-devicefarm (>=1.33.0,<1.34.0)", "mypy-boto3-devops-guru (>=1.33.0,<1.34.0)", "mypy-boto3-directconnect (>=1.33.0,<1.34.0)", "mypy-boto3-discovery (>=1.33.0,<1.34.0)", "mypy-boto3-dlm (>=1.33.0,<1.34.0)", "mypy-boto3-dms (>=1.33.0,<1.34.0)", "mypy-boto3-docdb (>=1.33.0,<1.34.0)", "mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)", "mypy-boto3-drs (>=1.33.0,<1.34.0)", "mypy-boto3-ds (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)", "mypy-boto3-ebs (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)", "mypy-boto3-ecr (>=1.33.0,<1.34.0)", "mypy-boto3-ecr-public (>=1.33.0,<1.34.0)", "mypy-boto3-ecs (>=1.33.0,<1.34.0)", "mypy-boto3-efs (>=1.33.0,<1.34.0)", "mypy-boto3-eks (>=1.33.0,<1.34.0)", "mypy-boto3-eks-auth (>=1.33.0,<1.34.0)", "mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)", "mypy-boto3-elasticache (>=1.33.0,<1.34.0)", "mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)", "mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)", "mypy-boto3-elb (>=1.33.0,<1.34.0)", "mypy-boto3-elbv2 (>=1.33.0,<1.34.0)", "mypy-boto3-emr (>=1.33.0,<1.34.0)", "mypy-boto3-emr-containers (>=1.33.0,<1.34.0)", "mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-entityresolution (>=1.33.0,<1.34.0)", "mypy-boto3-es (>=1.33.0,<1.34.0)", "mypy-boto3-events (>=1.33.0,<1.34.0)", "mypy-boto3-evidently (>=1.33.0,<1.34.0)", "mypy-boto3-finspace (>=1.33.0,<1.34.0)", "mypy-boto3-finspace-data (>=1.33.0,<1.34.0)", "mypy-boto3-firehose (>=1.33.0,<1.34.0)", "mypy-boto3-fis (>=1.33.0,<1.34.0)", "mypy-boto3-fms (>=1.33.0,<1.34.0)", "mypy-boto3-forecast (>=1.33.0,<1.34.0)", "mypy-boto3-forecastquery (>=1.33.0,<1.34.0)", "mypy-boto3-frauddetector (>=1.33.0,<1.34.0)", "mypy-boto3-freetier (>=1.33.0,<1.34.0)", "mypy-boto3-fsx (>=1.33.0,<1.34.0)", "mypy-boto3-gamelift (>=1.33.0,<1.34.0)", "mypy-boto3-glacier (>=1.33.0,<1.34.0)", "mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)", "mypy-boto3-glue (>=1.33.0,<1.34.0)", "mypy-boto3-grafana (>=1.33.0,<1.34.0)", "mypy-boto3-greengrass (>=1.33.0,<1.34.0)", "mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)", "mypy-boto3-groundstation (>=1.33.0,<1.34.0)", "mypy-boto3-guardduty (>=1.33.0,<1.34.0)", "mypy-boto3-health (>=1.33.0,<1.34.0)", "mypy-boto3-healthlake (>=1.33.0,<1.34.0)", "mypy-boto3-honeycode (>=1.33.0,<1.34.0)", "mypy-boto3-iam (>=1.33.0,<1.34.0)", "mypy-boto3-identitystore (>=1.33.0,<1.34.0)", "mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)", "mypy-boto3-importexport (>=1.33.0,<1.34.0)", "mypy-boto3-inspector (>=1.33.0,<1.34.0)", "mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)", "mypy-boto3-inspector2 (>=1.33.0,<1.34.0)", "mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)", "mypy-boto3-iot (>=1.33.0,<1.34.0)", "mypy-boto3-iot-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)", "mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)", "mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)", "mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents (>=1.33.0,<1.34.0)", "mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)", "mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)", "mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)", "mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)", "mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)", "mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)", "mypy-boto3-iotwireless (>=1.33.0,<1.34.0)", "mypy-boto3-ivs (>=1.33.0,<1.34.0)", "mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)", "mypy-boto3-ivschat (>=1.33.0,<1.34.0)", "mypy-boto3-kafka (>=1.33.0,<1.34.0)", "mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-kendra (>=1.33.0,<1.34.0)", "mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)", "mypy-boto3-keyspaces (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)", "mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)", "mypy-boto3-kms (>=1.33.0,<1.34.0)", "mypy-boto3-lakeformation (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)", "mypy-boto3-lex-models (>=1.33.0,<1.34.0)", "mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)", "mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)", "mypy-boto3-lightsail (>=1.33.0,<1.34.0)", "mypy-boto3-location (>=1.33.0,<1.34.0)", "mypy-boto3-logs (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)", "mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)", "mypy-boto3-m2 (>=1.33.0,<1.34.0)", "mypy-boto3-machinelearning (>=1.33.0,<1.34.0)", "mypy-boto3-macie2 (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)", "mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)", "mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)", "mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)", "mypy-boto3-medialive (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)", "mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore (>=1.33.0,<1.34.0)", "mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)", "mypy-boto3-mediatailor (>=1.33.0,<1.34.0)", "mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)", "mypy-boto3-memorydb (>=1.33.0,<1.34.0)", "mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)", "mypy-boto3-mgh (>=1.33.0,<1.34.0)", "mypy-boto3-mgn (>=1.33.0,<1.34.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)", "mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)", "mypy-boto3-mobile (>=1.33.0,<1.34.0)", "mypy-boto3-mq (>=1.33.0,<1.34.0)", "mypy-boto3-mturk (>=1.33.0,<1.34.0)", "mypy-boto3-mwaa (>=1.33.0,<1.34.0)", "mypy-boto3-neptune (>=1.33.0,<1.34.0)", "mypy-boto3-neptunedata (>=1.33.0,<1.34.0)", "mypy-boto3-network-firewall (>=1.33.0,<1.34.0)", "mypy-boto3-networkmanager (>=1.33.0,<1.34.0)", "mypy-boto3-nimble (>=1.33.0,<1.34.0)", "mypy-boto3-oam (>=1.33.0,<1.34.0)", "mypy-boto3-omics (>=1.33.0,<1.34.0)", "mypy-boto3-opensearch (>=1.33.0,<1.34.0)", "mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)", "mypy-boto3-opsworks (>=1.33.0,<1.34.0)", "mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)", "mypy-boto3-organizations (>=1.33.0,<1.34.0)", "mypy-boto3-osis (>=1.33.0,<1.34.0)", "mypy-boto3-outposts (>=1.33.0,<1.34.0)", "mypy-boto3-panorama (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)", "mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)", "mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)", "mypy-boto3-personalize (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-events (>=1.33.0,<1.34.0)", "mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-pi (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)", "mypy-boto3-pipes (>=1.33.0,<1.34.0)", "mypy-boto3-polly (>=1.33.0,<1.34.0)", "mypy-boto3-pricing (>=1.33.0,<1.34.0)", "mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)", "mypy-boto3-proton (>=1.33.0,<1.34.0)", "mypy-boto3-qbusiness (>=1.33.0,<1.34.0)", "mypy-boto3-qconnect (>=1.33.0,<1.34.0)", "mypy-boto3-qldb (>=1.33.0,<1.34.0)", "mypy-boto3-qldb-session (>=1.33.0,<1.34.0)", "mypy-boto3-quicksight (>=1.33.0,<1.34.0)", "mypy-boto3-ram (>=1.33.0,<1.34.0)", "mypy-boto3-rbin (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-rds-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-data (>=1.33.0,<1.34.0)", "mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)", "mypy-boto3-rekognition (>=1.33.0,<1.34.0)", "mypy-boto3-repostspace (>=1.33.0,<1.34.0)", "mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)", "mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)", "mypy-boto3-resource-groups (>=1.33.0,<1.34.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)", "mypy-boto3-robomaker (>=1.33.0,<1.34.0)", "mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)", "mypy-boto3-route53 (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)", "mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)", "mypy-boto3-route53domains (>=1.33.0,<1.34.0)", "mypy-boto3-route53resolver (>=1.33.0,<1.34.0)", "mypy-boto3-rum (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-s3control (>=1.33.0,<1.34.0)", "mypy-boto3-s3outposts (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)", "mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)", "mypy-boto3-savingsplans (>=1.33.0,<1.34.0)", "mypy-boto3-scheduler (>=1.33.0,<1.34.0)", "mypy-boto3-schemas (>=1.33.0,<1.34.0)", "mypy-boto3-sdb (>=1.33.0,<1.34.0)", "mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)", "mypy-boto3-securityhub (>=1.33.0,<1.34.0)", "mypy-boto3-securitylake (>=1.33.0,<1.34.0)", "mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)", "mypy-boto3-service-quotas (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)", "mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)", "mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)", "mypy-boto3-ses (>=1.33.0,<1.34.0)", "mypy-boto3-sesv2 (>=1.33.0,<1.34.0)", "mypy-boto3-shield (>=1.33.0,<1.34.0)", "mypy-boto3-signer (>=1.33.0,<1.34.0)", "mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)", "mypy-boto3-sms (>=1.33.0,<1.34.0)", "mypy-boto3-sms-voice (>=1.33.0,<1.34.0)", "mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)", "mypy-boto3-snowball (>=1.33.0,<1.34.0)", "mypy-boto3-sns (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)", "mypy-boto3-ssm (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)", "mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)", "mypy-boto3-sso (>=1.33.0,<1.34.0)", "mypy-boto3-sso-admin (>=1.33.0,<1.34.0)", "mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)", "mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)", "mypy-boto3-storagegateway (>=1.33.0,<1.34.0)", "mypy-boto3-sts (>=1.33.0,<1.34.0)", "mypy-boto3-support (>=1.33.0,<1.34.0)", "mypy-boto3-support-app (>=1.33.0,<1.34.0)", "mypy-boto3-swf (>=1.33.0,<1.34.0)", "mypy-boto3-synthetics (>=1.33.0,<1.34.0)", "mypy-boto3-textract (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-query (>=1.33.0,<1.34.0)", "mypy-boto3-timestream-write (>=1.33.0,<1.34.0)", "mypy-boto3-tnb (>=1.33.0,<1.34.0)", "mypy-boto3-transcribe (>=1.33.0,<1.34.0)", "mypy-boto3-transfer (>=1.33.0,<1.34.0)", "mypy-boto3-translate (>=1.33.0,<1.34.0)", "mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)", "mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)", "mypy-boto3-voice-id (>=1.33.0,<1.34.0)", "mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)", "mypy-boto3-waf (>=1.33.0,<1.34.0)", "mypy-boto3-waf-regional (>=1.33.0,<1.34.0)", "mypy-boto3-wafv2 (>=1.33.0,<1.34.0)", "mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)", "mypy-boto3-wisdom (>=1.33.0,<1.34.0)", "mypy-boto3-workdocs (>=1.33.0,<1.34.0)", "mypy-boto3-worklink (>=1.33.0,<1.34.0)", "mypy-boto3-workmail (>=1.33.0,<1.34.0)", "mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)", "mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)", "mypy-boto3-xray (>=1.33.0,<1.34.0)"] -amp = ["mypy-boto3-amp (>=1.33.0,<1.34.0)"] -amplify = ["mypy-boto3-amplify (>=1.33.0,<1.34.0)"] -amplifybackend = ["mypy-boto3-amplifybackend (>=1.33.0,<1.34.0)"] -amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.33.0,<1.34.0)"] -apigateway = ["mypy-boto3-apigateway (>=1.33.0,<1.34.0)"] -apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.33.0,<1.34.0)"] -apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.33.0,<1.34.0)"] -appconfig = ["mypy-boto3-appconfig (>=1.33.0,<1.34.0)"] -appconfigdata = ["mypy-boto3-appconfigdata (>=1.33.0,<1.34.0)"] -appfabric = ["mypy-boto3-appfabric (>=1.33.0,<1.34.0)"] -appflow = ["mypy-boto3-appflow (>=1.33.0,<1.34.0)"] -appintegrations = ["mypy-boto3-appintegrations (>=1.33.0,<1.34.0)"] -application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.33.0,<1.34.0)"] -application-insights = ["mypy-boto3-application-insights (>=1.33.0,<1.34.0)"] -applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.33.0,<1.34.0)"] -appmesh = ["mypy-boto3-appmesh (>=1.33.0,<1.34.0)"] -apprunner = ["mypy-boto3-apprunner (>=1.33.0,<1.34.0)"] -appstream = ["mypy-boto3-appstream (>=1.33.0,<1.34.0)"] -appsync = ["mypy-boto3-appsync (>=1.33.0,<1.34.0)"] -arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.33.0,<1.34.0)"] -athena = ["mypy-boto3-athena (>=1.33.0,<1.34.0)"] -auditmanager = ["mypy-boto3-auditmanager (>=1.33.0,<1.34.0)"] -autoscaling = ["mypy-boto3-autoscaling (>=1.33.0,<1.34.0)"] -autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.33.0,<1.34.0)"] -b2bi = ["mypy-boto3-b2bi (>=1.33.0,<1.34.0)"] -backup = ["mypy-boto3-backup (>=1.33.0,<1.34.0)"] -backup-gateway = ["mypy-boto3-backup-gateway (>=1.33.0,<1.34.0)"] -backupstorage = ["mypy-boto3-backupstorage (>=1.33.0,<1.34.0)"] -batch = ["mypy-boto3-batch (>=1.33.0,<1.34.0)"] -bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.33.0,<1.34.0)"] -bedrock = ["mypy-boto3-bedrock (>=1.33.0,<1.34.0)"] -bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.33.0,<1.34.0)"] -bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.33.0,<1.34.0)"] -bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.33.0,<1.34.0)"] -billingconductor = ["mypy-boto3-billingconductor (>=1.33.0,<1.34.0)"] -boto3 = ["boto3 (==1.33.5)", "botocore (==1.33.5)"] -braket = ["mypy-boto3-braket (>=1.33.0,<1.34.0)"] -budgets = ["mypy-boto3-budgets (>=1.33.0,<1.34.0)"] -ce = ["mypy-boto3-ce (>=1.33.0,<1.34.0)"] -chime = ["mypy-boto3-chime (>=1.33.0,<1.34.0)"] -chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.33.0,<1.34.0)"] -chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.33.0,<1.34.0)"] -chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.33.0,<1.34.0)"] -chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.33.0,<1.34.0)"] -chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.33.0,<1.34.0)"] -cleanrooms = ["mypy-boto3-cleanrooms (>=1.33.0,<1.34.0)"] -cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.33.0,<1.34.0)"] -cloud9 = ["mypy-boto3-cloud9 (>=1.33.0,<1.34.0)"] -cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.33.0,<1.34.0)"] -clouddirectory = ["mypy-boto3-clouddirectory (>=1.33.0,<1.34.0)"] -cloudformation = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)"] -cloudfront = ["mypy-boto3-cloudfront (>=1.33.0,<1.34.0)"] -cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.33.0,<1.34.0)"] -cloudhsm = ["mypy-boto3-cloudhsm (>=1.33.0,<1.34.0)"] -cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.33.0,<1.34.0)"] -cloudsearch = ["mypy-boto3-cloudsearch (>=1.33.0,<1.34.0)"] -cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.33.0,<1.34.0)"] -cloudtrail = ["mypy-boto3-cloudtrail (>=1.33.0,<1.34.0)"] -cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.33.0,<1.34.0)"] -cloudwatch = ["mypy-boto3-cloudwatch (>=1.33.0,<1.34.0)"] -codeartifact = ["mypy-boto3-codeartifact (>=1.33.0,<1.34.0)"] -codebuild = ["mypy-boto3-codebuild (>=1.33.0,<1.34.0)"] -codecatalyst = ["mypy-boto3-codecatalyst (>=1.33.0,<1.34.0)"] -codecommit = ["mypy-boto3-codecommit (>=1.33.0,<1.34.0)"] -codedeploy = ["mypy-boto3-codedeploy (>=1.33.0,<1.34.0)"] -codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.33.0,<1.34.0)"] -codeguru-security = ["mypy-boto3-codeguru-security (>=1.33.0,<1.34.0)"] -codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.33.0,<1.34.0)"] -codepipeline = ["mypy-boto3-codepipeline (>=1.33.0,<1.34.0)"] -codestar = ["mypy-boto3-codestar (>=1.33.0,<1.34.0)"] -codestar-connections = ["mypy-boto3-codestar-connections (>=1.33.0,<1.34.0)"] -codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.33.0,<1.34.0)"] -cognito-identity = ["mypy-boto3-cognito-identity (>=1.33.0,<1.34.0)"] -cognito-idp = ["mypy-boto3-cognito-idp (>=1.33.0,<1.34.0)"] -cognito-sync = ["mypy-boto3-cognito-sync (>=1.33.0,<1.34.0)"] -comprehend = ["mypy-boto3-comprehend (>=1.33.0,<1.34.0)"] -comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.33.0,<1.34.0)"] -compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.33.0,<1.34.0)"] -config = ["mypy-boto3-config (>=1.33.0,<1.34.0)"] -connect = ["mypy-boto3-connect (>=1.33.0,<1.34.0)"] -connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.33.0,<1.34.0)"] -connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.33.0,<1.34.0)"] -connectcases = ["mypy-boto3-connectcases (>=1.33.0,<1.34.0)"] -connectparticipant = ["mypy-boto3-connectparticipant (>=1.33.0,<1.34.0)"] -controltower = ["mypy-boto3-controltower (>=1.33.0,<1.34.0)"] -cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.33.0,<1.34.0)"] -cur = ["mypy-boto3-cur (>=1.33.0,<1.34.0)"] -customer-profiles = ["mypy-boto3-customer-profiles (>=1.33.0,<1.34.0)"] -databrew = ["mypy-boto3-databrew (>=1.33.0,<1.34.0)"] -dataexchange = ["mypy-boto3-dataexchange (>=1.33.0,<1.34.0)"] -datapipeline = ["mypy-boto3-datapipeline (>=1.33.0,<1.34.0)"] -datasync = ["mypy-boto3-datasync (>=1.33.0,<1.34.0)"] -datazone = ["mypy-boto3-datazone (>=1.33.0,<1.34.0)"] -dax = ["mypy-boto3-dax (>=1.33.0,<1.34.0)"] -detective = ["mypy-boto3-detective (>=1.33.0,<1.34.0)"] -devicefarm = ["mypy-boto3-devicefarm (>=1.33.0,<1.34.0)"] -devops-guru = ["mypy-boto3-devops-guru (>=1.33.0,<1.34.0)"] -directconnect = ["mypy-boto3-directconnect (>=1.33.0,<1.34.0)"] -discovery = ["mypy-boto3-discovery (>=1.33.0,<1.34.0)"] -dlm = ["mypy-boto3-dlm (>=1.33.0,<1.34.0)"] -dms = ["mypy-boto3-dms (>=1.33.0,<1.34.0)"] -docdb = ["mypy-boto3-docdb (>=1.33.0,<1.34.0)"] -docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.33.0,<1.34.0)"] -drs = ["mypy-boto3-drs (>=1.33.0,<1.34.0)"] -ds = ["mypy-boto3-ds (>=1.33.0,<1.34.0)"] -dynamodb = ["mypy-boto3-dynamodb (>=1.33.0,<1.34.0)"] -dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.33.0,<1.34.0)"] -ebs = ["mypy-boto3-ebs (>=1.33.0,<1.34.0)"] -ec2 = ["mypy-boto3-ec2 (>=1.33.0,<1.34.0)"] -ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.33.0,<1.34.0)"] -ecr = ["mypy-boto3-ecr (>=1.33.0,<1.34.0)"] -ecr-public = ["mypy-boto3-ecr-public (>=1.33.0,<1.34.0)"] -ecs = ["mypy-boto3-ecs (>=1.33.0,<1.34.0)"] -efs = ["mypy-boto3-efs (>=1.33.0,<1.34.0)"] -eks = ["mypy-boto3-eks (>=1.33.0,<1.34.0)"] -eks-auth = ["mypy-boto3-eks-auth (>=1.33.0,<1.34.0)"] -elastic-inference = ["mypy-boto3-elastic-inference (>=1.33.0,<1.34.0)"] -elasticache = ["mypy-boto3-elasticache (>=1.33.0,<1.34.0)"] -elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.33.0,<1.34.0)"] -elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.33.0,<1.34.0)"] -elb = ["mypy-boto3-elb (>=1.33.0,<1.34.0)"] -elbv2 = ["mypy-boto3-elbv2 (>=1.33.0,<1.34.0)"] -emr = ["mypy-boto3-emr (>=1.33.0,<1.34.0)"] -emr-containers = ["mypy-boto3-emr-containers (>=1.33.0,<1.34.0)"] -emr-serverless = ["mypy-boto3-emr-serverless (>=1.33.0,<1.34.0)"] -entityresolution = ["mypy-boto3-entityresolution (>=1.33.0,<1.34.0)"] -es = ["mypy-boto3-es (>=1.33.0,<1.34.0)"] -essential = ["mypy-boto3-cloudformation (>=1.33.0,<1.34.0)", "mypy-boto3-dynamodb (>=1.33.0,<1.34.0)", "mypy-boto3-ec2 (>=1.33.0,<1.34.0)", "mypy-boto3-lambda (>=1.33.0,<1.34.0)", "mypy-boto3-rds (>=1.33.0,<1.34.0)", "mypy-boto3-s3 (>=1.33.0,<1.34.0)", "mypy-boto3-sqs (>=1.33.0,<1.34.0)"] -events = ["mypy-boto3-events (>=1.33.0,<1.34.0)"] -evidently = ["mypy-boto3-evidently (>=1.33.0,<1.34.0)"] -finspace = ["mypy-boto3-finspace (>=1.33.0,<1.34.0)"] -finspace-data = ["mypy-boto3-finspace-data (>=1.33.0,<1.34.0)"] -firehose = ["mypy-boto3-firehose (>=1.33.0,<1.34.0)"] -fis = ["mypy-boto3-fis (>=1.33.0,<1.34.0)"] -fms = ["mypy-boto3-fms (>=1.33.0,<1.34.0)"] -forecast = ["mypy-boto3-forecast (>=1.33.0,<1.34.0)"] -forecastquery = ["mypy-boto3-forecastquery (>=1.33.0,<1.34.0)"] -frauddetector = ["mypy-boto3-frauddetector (>=1.33.0,<1.34.0)"] -freetier = ["mypy-boto3-freetier (>=1.33.0,<1.34.0)"] -fsx = ["mypy-boto3-fsx (>=1.33.0,<1.34.0)"] -gamelift = ["mypy-boto3-gamelift (>=1.33.0,<1.34.0)"] -glacier = ["mypy-boto3-glacier (>=1.33.0,<1.34.0)"] -globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.33.0,<1.34.0)"] -glue = ["mypy-boto3-glue (>=1.33.0,<1.34.0)"] -grafana = ["mypy-boto3-grafana (>=1.33.0,<1.34.0)"] -greengrass = ["mypy-boto3-greengrass (>=1.33.0,<1.34.0)"] -greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.33.0,<1.34.0)"] -groundstation = ["mypy-boto3-groundstation (>=1.33.0,<1.34.0)"] -guardduty = ["mypy-boto3-guardduty (>=1.33.0,<1.34.0)"] -health = ["mypy-boto3-health (>=1.33.0,<1.34.0)"] -healthlake = ["mypy-boto3-healthlake (>=1.33.0,<1.34.0)"] -honeycode = ["mypy-boto3-honeycode (>=1.33.0,<1.34.0)"] -iam = ["mypy-boto3-iam (>=1.33.0,<1.34.0)"] -identitystore = ["mypy-boto3-identitystore (>=1.33.0,<1.34.0)"] -imagebuilder = ["mypy-boto3-imagebuilder (>=1.33.0,<1.34.0)"] -importexport = ["mypy-boto3-importexport (>=1.33.0,<1.34.0)"] -inspector = ["mypy-boto3-inspector (>=1.33.0,<1.34.0)"] -inspector-scan = ["mypy-boto3-inspector-scan (>=1.33.0,<1.34.0)"] -inspector2 = ["mypy-boto3-inspector2 (>=1.33.0,<1.34.0)"] -internetmonitor = ["mypy-boto3-internetmonitor (>=1.33.0,<1.34.0)"] -iot = ["mypy-boto3-iot (>=1.33.0,<1.34.0)"] -iot-data = ["mypy-boto3-iot-data (>=1.33.0,<1.34.0)"] -iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.33.0,<1.34.0)"] -iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.33.0,<1.34.0)"] -iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.33.0,<1.34.0)"] -iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.33.0,<1.34.0)"] -iotanalytics = ["mypy-boto3-iotanalytics (>=1.33.0,<1.34.0)"] -iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.33.0,<1.34.0)"] -iotevents = ["mypy-boto3-iotevents (>=1.33.0,<1.34.0)"] -iotevents-data = ["mypy-boto3-iotevents-data (>=1.33.0,<1.34.0)"] -iotfleethub = ["mypy-boto3-iotfleethub (>=1.33.0,<1.34.0)"] -iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.33.0,<1.34.0)"] -iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.33.0,<1.34.0)"] -iotsitewise = ["mypy-boto3-iotsitewise (>=1.33.0,<1.34.0)"] -iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.33.0,<1.34.0)"] -iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.33.0,<1.34.0)"] -iotwireless = ["mypy-boto3-iotwireless (>=1.33.0,<1.34.0)"] -ivs = ["mypy-boto3-ivs (>=1.33.0,<1.34.0)"] -ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.33.0,<1.34.0)"] -ivschat = ["mypy-boto3-ivschat (>=1.33.0,<1.34.0)"] -kafka = ["mypy-boto3-kafka (>=1.33.0,<1.34.0)"] -kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.33.0,<1.34.0)"] -kendra = ["mypy-boto3-kendra (>=1.33.0,<1.34.0)"] -kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.33.0,<1.34.0)"] -keyspaces = ["mypy-boto3-keyspaces (>=1.33.0,<1.34.0)"] -kinesis = ["mypy-boto3-kinesis (>=1.33.0,<1.34.0)"] -kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.33.0,<1.34.0)"] -kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.33.0,<1.34.0)"] -kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.33.0,<1.34.0)"] -kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.33.0,<1.34.0)"] -kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.33.0,<1.34.0)"] -kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.33.0,<1.34.0)"] -kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.33.0,<1.34.0)"] -kms = ["mypy-boto3-kms (>=1.33.0,<1.34.0)"] -lakeformation = ["mypy-boto3-lakeformation (>=1.33.0,<1.34.0)"] -lambda = ["mypy-boto3-lambda (>=1.33.0,<1.34.0)"] -launch-wizard = ["mypy-boto3-launch-wizard (>=1.33.0,<1.34.0)"] -lex-models = ["mypy-boto3-lex-models (>=1.33.0,<1.34.0)"] -lex-runtime = ["mypy-boto3-lex-runtime (>=1.33.0,<1.34.0)"] -lexv2-models = ["mypy-boto3-lexv2-models (>=1.33.0,<1.34.0)"] -lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.33.0,<1.34.0)"] -license-manager = ["mypy-boto3-license-manager (>=1.33.0,<1.34.0)"] -license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.33.0,<1.34.0)"] -license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.33.0,<1.34.0)"] -lightsail = ["mypy-boto3-lightsail (>=1.33.0,<1.34.0)"] -location = ["mypy-boto3-location (>=1.33.0,<1.34.0)"] -logs = ["mypy-boto3-logs (>=1.33.0,<1.34.0)"] -lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.33.0,<1.34.0)"] -lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.33.0,<1.34.0)"] -lookoutvision = ["mypy-boto3-lookoutvision (>=1.33.0,<1.34.0)"] -m2 = ["mypy-boto3-m2 (>=1.33.0,<1.34.0)"] -machinelearning = ["mypy-boto3-machinelearning (>=1.33.0,<1.34.0)"] -macie2 = ["mypy-boto3-macie2 (>=1.33.0,<1.34.0)"] -managedblockchain = ["mypy-boto3-managedblockchain (>=1.33.0,<1.34.0)"] -managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.33.0,<1.34.0)"] -marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.33.0,<1.34.0)"] -marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.33.0,<1.34.0)"] -marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.33.0,<1.34.0)"] -marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.33.0,<1.34.0)"] -marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.33.0,<1.34.0)"] -mediaconnect = ["mypy-boto3-mediaconnect (>=1.33.0,<1.34.0)"] -mediaconvert = ["mypy-boto3-mediaconvert (>=1.33.0,<1.34.0)"] -medialive = ["mypy-boto3-medialive (>=1.33.0,<1.34.0)"] -mediapackage = ["mypy-boto3-mediapackage (>=1.33.0,<1.34.0)"] -mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.33.0,<1.34.0)"] -mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.33.0,<1.34.0)"] -mediastore = ["mypy-boto3-mediastore (>=1.33.0,<1.34.0)"] -mediastore-data = ["mypy-boto3-mediastore-data (>=1.33.0,<1.34.0)"] -mediatailor = ["mypy-boto3-mediatailor (>=1.33.0,<1.34.0)"] -medical-imaging = ["mypy-boto3-medical-imaging (>=1.33.0,<1.34.0)"] -memorydb = ["mypy-boto3-memorydb (>=1.33.0,<1.34.0)"] -meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.33.0,<1.34.0)"] -mgh = ["mypy-boto3-mgh (>=1.33.0,<1.34.0)"] -mgn = ["mypy-boto3-mgn (>=1.33.0,<1.34.0)"] -migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.33.0,<1.34.0)"] -migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.33.0,<1.34.0)"] -migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.33.0,<1.34.0)"] -migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.33.0,<1.34.0)"] -mobile = ["mypy-boto3-mobile (>=1.33.0,<1.34.0)"] -mq = ["mypy-boto3-mq (>=1.33.0,<1.34.0)"] -mturk = ["mypy-boto3-mturk (>=1.33.0,<1.34.0)"] -mwaa = ["mypy-boto3-mwaa (>=1.33.0,<1.34.0)"] -neptune = ["mypy-boto3-neptune (>=1.33.0,<1.34.0)"] -neptunedata = ["mypy-boto3-neptunedata (>=1.33.0,<1.34.0)"] -network-firewall = ["mypy-boto3-network-firewall (>=1.33.0,<1.34.0)"] -networkmanager = ["mypy-boto3-networkmanager (>=1.33.0,<1.34.0)"] -nimble = ["mypy-boto3-nimble (>=1.33.0,<1.34.0)"] -oam = ["mypy-boto3-oam (>=1.33.0,<1.34.0)"] -omics = ["mypy-boto3-omics (>=1.33.0,<1.34.0)"] -opensearch = ["mypy-boto3-opensearch (>=1.33.0,<1.34.0)"] -opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.33.0,<1.34.0)"] -opsworks = ["mypy-boto3-opsworks (>=1.33.0,<1.34.0)"] -opsworkscm = ["mypy-boto3-opsworkscm (>=1.33.0,<1.34.0)"] -organizations = ["mypy-boto3-organizations (>=1.33.0,<1.34.0)"] -osis = ["mypy-boto3-osis (>=1.33.0,<1.34.0)"] -outposts = ["mypy-boto3-outposts (>=1.33.0,<1.34.0)"] -panorama = ["mypy-boto3-panorama (>=1.33.0,<1.34.0)"] -payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.33.0,<1.34.0)"] -payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.33.0,<1.34.0)"] -pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.33.0,<1.34.0)"] -personalize = ["mypy-boto3-personalize (>=1.33.0,<1.34.0)"] -personalize-events = ["mypy-boto3-personalize-events (>=1.33.0,<1.34.0)"] -personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.33.0,<1.34.0)"] -pi = ["mypy-boto3-pi (>=1.33.0,<1.34.0)"] -pinpoint = ["mypy-boto3-pinpoint (>=1.33.0,<1.34.0)"] -pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.33.0,<1.34.0)"] -pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.33.0,<1.34.0)"] -pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.33.0,<1.34.0)"] -pipes = ["mypy-boto3-pipes (>=1.33.0,<1.34.0)"] -polly = ["mypy-boto3-polly (>=1.33.0,<1.34.0)"] -pricing = ["mypy-boto3-pricing (>=1.33.0,<1.34.0)"] -privatenetworks = ["mypy-boto3-privatenetworks (>=1.33.0,<1.34.0)"] -proton = ["mypy-boto3-proton (>=1.33.0,<1.34.0)"] -qbusiness = ["mypy-boto3-qbusiness (>=1.33.0,<1.34.0)"] -qconnect = ["mypy-boto3-qconnect (>=1.33.0,<1.34.0)"] -qldb = ["mypy-boto3-qldb (>=1.33.0,<1.34.0)"] -qldb-session = ["mypy-boto3-qldb-session (>=1.33.0,<1.34.0)"] -quicksight = ["mypy-boto3-quicksight (>=1.33.0,<1.34.0)"] -ram = ["mypy-boto3-ram (>=1.33.0,<1.34.0)"] -rbin = ["mypy-boto3-rbin (>=1.33.0,<1.34.0)"] -rds = ["mypy-boto3-rds (>=1.33.0,<1.34.0)"] -rds-data = ["mypy-boto3-rds-data (>=1.33.0,<1.34.0)"] -redshift = ["mypy-boto3-redshift (>=1.33.0,<1.34.0)"] -redshift-data = ["mypy-boto3-redshift-data (>=1.33.0,<1.34.0)"] -redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.33.0,<1.34.0)"] -rekognition = ["mypy-boto3-rekognition (>=1.33.0,<1.34.0)"] -repostspace = ["mypy-boto3-repostspace (>=1.33.0,<1.34.0)"] -resiliencehub = ["mypy-boto3-resiliencehub (>=1.33.0,<1.34.0)"] -resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.33.0,<1.34.0)"] -resource-groups = ["mypy-boto3-resource-groups (>=1.33.0,<1.34.0)"] -resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.33.0,<1.34.0)"] -robomaker = ["mypy-boto3-robomaker (>=1.33.0,<1.34.0)"] -rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.33.0,<1.34.0)"] -route53 = ["mypy-boto3-route53 (>=1.33.0,<1.34.0)"] -route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.33.0,<1.34.0)"] -route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.33.0,<1.34.0)"] -route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.33.0,<1.34.0)"] -route53domains = ["mypy-boto3-route53domains (>=1.33.0,<1.34.0)"] -route53resolver = ["mypy-boto3-route53resolver (>=1.33.0,<1.34.0)"] -rum = ["mypy-boto3-rum (>=1.33.0,<1.34.0)"] -s3 = ["mypy-boto3-s3 (>=1.33.0,<1.34.0)"] -s3control = ["mypy-boto3-s3control (>=1.33.0,<1.34.0)"] -s3outposts = ["mypy-boto3-s3outposts (>=1.33.0,<1.34.0)"] -sagemaker = ["mypy-boto3-sagemaker (>=1.33.0,<1.34.0)"] -sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.33.0,<1.34.0)"] -sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.33.0,<1.34.0)"] -sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.33.0,<1.34.0)"] -sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.33.0,<1.34.0)"] -sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.33.0,<1.34.0)"] -sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.33.0,<1.34.0)"] -savingsplans = ["mypy-boto3-savingsplans (>=1.33.0,<1.34.0)"] -scheduler = ["mypy-boto3-scheduler (>=1.33.0,<1.34.0)"] -schemas = ["mypy-boto3-schemas (>=1.33.0,<1.34.0)"] -sdb = ["mypy-boto3-sdb (>=1.33.0,<1.34.0)"] -secretsmanager = ["mypy-boto3-secretsmanager (>=1.33.0,<1.34.0)"] -securityhub = ["mypy-boto3-securityhub (>=1.33.0,<1.34.0)"] -securitylake = ["mypy-boto3-securitylake (>=1.33.0,<1.34.0)"] -serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.33.0,<1.34.0)"] -service-quotas = ["mypy-boto3-service-quotas (>=1.33.0,<1.34.0)"] -servicecatalog = ["mypy-boto3-servicecatalog (>=1.33.0,<1.34.0)"] -servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.33.0,<1.34.0)"] -servicediscovery = ["mypy-boto3-servicediscovery (>=1.33.0,<1.34.0)"] -ses = ["mypy-boto3-ses (>=1.33.0,<1.34.0)"] -sesv2 = ["mypy-boto3-sesv2 (>=1.33.0,<1.34.0)"] -shield = ["mypy-boto3-shield (>=1.33.0,<1.34.0)"] -signer = ["mypy-boto3-signer (>=1.33.0,<1.34.0)"] -simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.33.0,<1.34.0)"] -sms = ["mypy-boto3-sms (>=1.33.0,<1.34.0)"] -sms-voice = ["mypy-boto3-sms-voice (>=1.33.0,<1.34.0)"] -snow-device-management = ["mypy-boto3-snow-device-management (>=1.33.0,<1.34.0)"] -snowball = ["mypy-boto3-snowball (>=1.33.0,<1.34.0)"] -sns = ["mypy-boto3-sns (>=1.33.0,<1.34.0)"] -sqs = ["mypy-boto3-sqs (>=1.33.0,<1.34.0)"] -ssm = ["mypy-boto3-ssm (>=1.33.0,<1.34.0)"] -ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.33.0,<1.34.0)"] -ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.33.0,<1.34.0)"] -ssm-sap = ["mypy-boto3-ssm-sap (>=1.33.0,<1.34.0)"] -sso = ["mypy-boto3-sso (>=1.33.0,<1.34.0)"] -sso-admin = ["mypy-boto3-sso-admin (>=1.33.0,<1.34.0)"] -sso-oidc = ["mypy-boto3-sso-oidc (>=1.33.0,<1.34.0)"] -stepfunctions = ["mypy-boto3-stepfunctions (>=1.33.0,<1.34.0)"] -storagegateway = ["mypy-boto3-storagegateway (>=1.33.0,<1.34.0)"] -sts = ["mypy-boto3-sts (>=1.33.0,<1.34.0)"] -support = ["mypy-boto3-support (>=1.33.0,<1.34.0)"] -support-app = ["mypy-boto3-support-app (>=1.33.0,<1.34.0)"] -swf = ["mypy-boto3-swf (>=1.33.0,<1.34.0)"] -synthetics = ["mypy-boto3-synthetics (>=1.33.0,<1.34.0)"] -textract = ["mypy-boto3-textract (>=1.33.0,<1.34.0)"] -timestream-query = ["mypy-boto3-timestream-query (>=1.33.0,<1.34.0)"] -timestream-write = ["mypy-boto3-timestream-write (>=1.33.0,<1.34.0)"] -tnb = ["mypy-boto3-tnb (>=1.33.0,<1.34.0)"] -transcribe = ["mypy-boto3-transcribe (>=1.33.0,<1.34.0)"] -transfer = ["mypy-boto3-transfer (>=1.33.0,<1.34.0)"] -translate = ["mypy-boto3-translate (>=1.33.0,<1.34.0)"] -trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.33.0,<1.34.0)"] -verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.33.0,<1.34.0)"] -voice-id = ["mypy-boto3-voice-id (>=1.33.0,<1.34.0)"] -vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.33.0,<1.34.0)"] -waf = ["mypy-boto3-waf (>=1.33.0,<1.34.0)"] -waf-regional = ["mypy-boto3-waf-regional (>=1.33.0,<1.34.0)"] -wafv2 = ["mypy-boto3-wafv2 (>=1.33.0,<1.34.0)"] -wellarchitected = ["mypy-boto3-wellarchitected (>=1.33.0,<1.34.0)"] -wisdom = ["mypy-boto3-wisdom (>=1.33.0,<1.34.0)"] -workdocs = ["mypy-boto3-workdocs (>=1.33.0,<1.34.0)"] -worklink = ["mypy-boto3-worklink (>=1.33.0,<1.34.0)"] -workmail = ["mypy-boto3-workmail (>=1.33.0,<1.34.0)"] -workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.33.0,<1.34.0)"] -workspaces = ["mypy-boto3-workspaces (>=1.33.0,<1.34.0)"] -workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.33.0,<1.34.0)"] -workspaces-web = ["mypy-boto3-workspaces-web (>=1.33.0,<1.34.0)"] -xray = ["mypy-boto3-xray (>=1.33.0,<1.34.0)"] +accessanalyzer = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)"] +account = ["mypy-boto3-account (>=1.34.0,<1.35.0)"] +acm = ["mypy-boto3-acm (>=1.34.0,<1.35.0)"] +acm-pca = ["mypy-boto3-acm-pca (>=1.34.0,<1.35.0)"] +alexaforbusiness = ["mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)"] +all = ["mypy-boto3-accessanalyzer (>=1.34.0,<1.35.0)", "mypy-boto3-account (>=1.34.0,<1.35.0)", "mypy-boto3-acm (>=1.34.0,<1.35.0)", "mypy-boto3-acm-pca (>=1.34.0,<1.35.0)", "mypy-boto3-alexaforbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-amp (>=1.34.0,<1.35.0)", "mypy-boto3-amplify (>=1.34.0,<1.35.0)", "mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)", "mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)", "mypy-boto3-apigateway (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)", "mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)", "mypy-boto3-appconfig (>=1.34.0,<1.35.0)", "mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)", "mypy-boto3-appfabric (>=1.34.0,<1.35.0)", "mypy-boto3-appflow (>=1.34.0,<1.35.0)", "mypy-boto3-appintegrations (>=1.34.0,<1.35.0)", "mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-application-insights (>=1.34.0,<1.35.0)", "mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-appmesh (>=1.34.0,<1.35.0)", "mypy-boto3-apprunner (>=1.34.0,<1.35.0)", "mypy-boto3-appstream (>=1.34.0,<1.35.0)", "mypy-boto3-appsync (>=1.34.0,<1.35.0)", "mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)", "mypy-boto3-athena (>=1.34.0,<1.35.0)", "mypy-boto3-auditmanager (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling (>=1.34.0,<1.35.0)", "mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)", "mypy-boto3-b2bi (>=1.34.0,<1.35.0)", "mypy-boto3-backup (>=1.34.0,<1.35.0)", "mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)", "mypy-boto3-backupstorage (>=1.34.0,<1.35.0)", "mypy-boto3-batch (>=1.34.0,<1.35.0)", "mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-billingconductor (>=1.34.0,<1.35.0)", "mypy-boto3-braket (>=1.34.0,<1.35.0)", "mypy-boto3-budgets (>=1.34.0,<1.35.0)", "mypy-boto3-ce (>=1.34.0,<1.35.0)", "mypy-boto3-chime (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)", "mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)", "mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)", "mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)", "mypy-boto3-cloud9 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)", "mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)", "mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront (>=1.34.0,<1.35.0)", "mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)", "mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)", "mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)", "mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)", "mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)", "mypy-boto3-codeartifact (>=1.34.0,<1.35.0)", "mypy-boto3-codebuild (>=1.34.0,<1.35.0)", "mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)", "mypy-boto3-codecommit (>=1.34.0,<1.35.0)", "mypy-boto3-codedeploy (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)", "mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)", "mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)", "mypy-boto3-codepipeline (>=1.34.0,<1.35.0)", "mypy-boto3-codestar (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)", "mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)", "mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)", "mypy-boto3-comprehend (>=1.34.0,<1.35.0)", "mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)", "mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)", "mypy-boto3-config (>=1.34.0,<1.35.0)", "mypy-boto3-connect (>=1.34.0,<1.35.0)", "mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)", "mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)", "mypy-boto3-connectcases (>=1.34.0,<1.35.0)", "mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)", "mypy-boto3-controltower (>=1.34.0,<1.35.0)", "mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)", "mypy-boto3-cur (>=1.34.0,<1.35.0)", "mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)", "mypy-boto3-databrew (>=1.34.0,<1.35.0)", "mypy-boto3-dataexchange (>=1.34.0,<1.35.0)", "mypy-boto3-datapipeline (>=1.34.0,<1.35.0)", "mypy-boto3-datasync (>=1.34.0,<1.35.0)", "mypy-boto3-datazone (>=1.34.0,<1.35.0)", "mypy-boto3-dax (>=1.34.0,<1.35.0)", "mypy-boto3-detective (>=1.34.0,<1.35.0)", "mypy-boto3-devicefarm (>=1.34.0,<1.35.0)", "mypy-boto3-devops-guru (>=1.34.0,<1.35.0)", "mypy-boto3-directconnect (>=1.34.0,<1.35.0)", "mypy-boto3-discovery (>=1.34.0,<1.35.0)", "mypy-boto3-dlm (>=1.34.0,<1.35.0)", "mypy-boto3-dms (>=1.34.0,<1.35.0)", "mypy-boto3-docdb (>=1.34.0,<1.35.0)", "mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)", "mypy-boto3-drs (>=1.34.0,<1.35.0)", "mypy-boto3-ds (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)", "mypy-boto3-ebs (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)", "mypy-boto3-ecr (>=1.34.0,<1.35.0)", "mypy-boto3-ecr-public (>=1.34.0,<1.35.0)", "mypy-boto3-ecs (>=1.34.0,<1.35.0)", "mypy-boto3-efs (>=1.34.0,<1.35.0)", "mypy-boto3-eks (>=1.34.0,<1.35.0)", "mypy-boto3-eks-auth (>=1.34.0,<1.35.0)", "mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)", "mypy-boto3-elasticache (>=1.34.0,<1.35.0)", "mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)", "mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)", "mypy-boto3-elb (>=1.34.0,<1.35.0)", "mypy-boto3-elbv2 (>=1.34.0,<1.35.0)", "mypy-boto3-emr (>=1.34.0,<1.35.0)", "mypy-boto3-emr-containers (>=1.34.0,<1.35.0)", "mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-entityresolution (>=1.34.0,<1.35.0)", "mypy-boto3-es (>=1.34.0,<1.35.0)", "mypy-boto3-events (>=1.34.0,<1.35.0)", "mypy-boto3-evidently (>=1.34.0,<1.35.0)", "mypy-boto3-finspace (>=1.34.0,<1.35.0)", "mypy-boto3-finspace-data (>=1.34.0,<1.35.0)", "mypy-boto3-firehose (>=1.34.0,<1.35.0)", "mypy-boto3-fis (>=1.34.0,<1.35.0)", "mypy-boto3-fms (>=1.34.0,<1.35.0)", "mypy-boto3-forecast (>=1.34.0,<1.35.0)", "mypy-boto3-forecastquery (>=1.34.0,<1.35.0)", "mypy-boto3-frauddetector (>=1.34.0,<1.35.0)", "mypy-boto3-freetier (>=1.34.0,<1.35.0)", "mypy-boto3-fsx (>=1.34.0,<1.35.0)", "mypy-boto3-gamelift (>=1.34.0,<1.35.0)", "mypy-boto3-glacier (>=1.34.0,<1.35.0)", "mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)", "mypy-boto3-glue (>=1.34.0,<1.35.0)", "mypy-boto3-grafana (>=1.34.0,<1.35.0)", "mypy-boto3-greengrass (>=1.34.0,<1.35.0)", "mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)", "mypy-boto3-groundstation (>=1.34.0,<1.35.0)", "mypy-boto3-guardduty (>=1.34.0,<1.35.0)", "mypy-boto3-health (>=1.34.0,<1.35.0)", "mypy-boto3-healthlake (>=1.34.0,<1.35.0)", "mypy-boto3-honeycode (>=1.34.0,<1.35.0)", "mypy-boto3-iam (>=1.34.0,<1.35.0)", "mypy-boto3-identitystore (>=1.34.0,<1.35.0)", "mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)", "mypy-boto3-importexport (>=1.34.0,<1.35.0)", "mypy-boto3-inspector (>=1.34.0,<1.35.0)", "mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)", "mypy-boto3-inspector2 (>=1.34.0,<1.35.0)", "mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)", "mypy-boto3-iot (>=1.34.0,<1.35.0)", "mypy-boto3-iot-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)", "mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)", "mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)", "mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents (>=1.34.0,<1.35.0)", "mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)", "mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)", "mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)", "mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)", "mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)", "mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)", "mypy-boto3-iotwireless (>=1.34.0,<1.35.0)", "mypy-boto3-ivs (>=1.34.0,<1.35.0)", "mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)", "mypy-boto3-ivschat (>=1.34.0,<1.35.0)", "mypy-boto3-kafka (>=1.34.0,<1.35.0)", "mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-kendra (>=1.34.0,<1.35.0)", "mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)", "mypy-boto3-keyspaces (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)", "mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)", "mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)", "mypy-boto3-kms (>=1.34.0,<1.35.0)", "mypy-boto3-lakeformation (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)", "mypy-boto3-lex-models (>=1.34.0,<1.35.0)", "mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)", "mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)", "mypy-boto3-lightsail (>=1.34.0,<1.35.0)", "mypy-boto3-location (>=1.34.0,<1.35.0)", "mypy-boto3-logs (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)", "mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)", "mypy-boto3-m2 (>=1.34.0,<1.35.0)", "mypy-boto3-machinelearning (>=1.34.0,<1.35.0)", "mypy-boto3-macie2 (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)", "mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)", "mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)", "mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)", "mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)", "mypy-boto3-medialive (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)", "mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore (>=1.34.0,<1.35.0)", "mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)", "mypy-boto3-mediatailor (>=1.34.0,<1.35.0)", "mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)", "mypy-boto3-memorydb (>=1.34.0,<1.35.0)", "mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)", "mypy-boto3-mgh (>=1.34.0,<1.35.0)", "mypy-boto3-mgn (>=1.34.0,<1.35.0)", "mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)", "mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)", "mypy-boto3-mobile (>=1.34.0,<1.35.0)", "mypy-boto3-mq (>=1.34.0,<1.35.0)", "mypy-boto3-mturk (>=1.34.0,<1.35.0)", "mypy-boto3-mwaa (>=1.34.0,<1.35.0)", "mypy-boto3-neptune (>=1.34.0,<1.35.0)", "mypy-boto3-neptunedata (>=1.34.0,<1.35.0)", "mypy-boto3-network-firewall (>=1.34.0,<1.35.0)", "mypy-boto3-networkmanager (>=1.34.0,<1.35.0)", "mypy-boto3-nimble (>=1.34.0,<1.35.0)", "mypy-boto3-oam (>=1.34.0,<1.35.0)", "mypy-boto3-omics (>=1.34.0,<1.35.0)", "mypy-boto3-opensearch (>=1.34.0,<1.35.0)", "mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)", "mypy-boto3-opsworks (>=1.34.0,<1.35.0)", "mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)", "mypy-boto3-organizations (>=1.34.0,<1.35.0)", "mypy-boto3-osis (>=1.34.0,<1.35.0)", "mypy-boto3-outposts (>=1.34.0,<1.35.0)", "mypy-boto3-panorama (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)", "mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)", "mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)", "mypy-boto3-personalize (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-events (>=1.34.0,<1.35.0)", "mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-pi (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)", "mypy-boto3-pipes (>=1.34.0,<1.35.0)", "mypy-boto3-polly (>=1.34.0,<1.35.0)", "mypy-boto3-pricing (>=1.34.0,<1.35.0)", "mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)", "mypy-boto3-proton (>=1.34.0,<1.35.0)", "mypy-boto3-qbusiness (>=1.34.0,<1.35.0)", "mypy-boto3-qconnect (>=1.34.0,<1.35.0)", "mypy-boto3-qldb (>=1.34.0,<1.35.0)", "mypy-boto3-qldb-session (>=1.34.0,<1.35.0)", "mypy-boto3-quicksight (>=1.34.0,<1.35.0)", "mypy-boto3-ram (>=1.34.0,<1.35.0)", "mypy-boto3-rbin (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-rds-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-data (>=1.34.0,<1.35.0)", "mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)", "mypy-boto3-rekognition (>=1.34.0,<1.35.0)", "mypy-boto3-repostspace (>=1.34.0,<1.35.0)", "mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)", "mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)", "mypy-boto3-resource-groups (>=1.34.0,<1.35.0)", "mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)", "mypy-boto3-robomaker (>=1.34.0,<1.35.0)", "mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)", "mypy-boto3-route53 (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)", "mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)", "mypy-boto3-route53domains (>=1.34.0,<1.35.0)", "mypy-boto3-route53resolver (>=1.34.0,<1.35.0)", "mypy-boto3-rum (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-s3control (>=1.34.0,<1.35.0)", "mypy-boto3-s3outposts (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)", "mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)", "mypy-boto3-savingsplans (>=1.34.0,<1.35.0)", "mypy-boto3-scheduler (>=1.34.0,<1.35.0)", "mypy-boto3-schemas (>=1.34.0,<1.35.0)", "mypy-boto3-sdb (>=1.34.0,<1.35.0)", "mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)", "mypy-boto3-securityhub (>=1.34.0,<1.35.0)", "mypy-boto3-securitylake (>=1.34.0,<1.35.0)", "mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)", "mypy-boto3-service-quotas (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)", "mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)", "mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)", "mypy-boto3-ses (>=1.34.0,<1.35.0)", "mypy-boto3-sesv2 (>=1.34.0,<1.35.0)", "mypy-boto3-shield (>=1.34.0,<1.35.0)", "mypy-boto3-signer (>=1.34.0,<1.35.0)", "mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)", "mypy-boto3-sms (>=1.34.0,<1.35.0)", "mypy-boto3-sms-voice (>=1.34.0,<1.35.0)", "mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)", "mypy-boto3-snowball (>=1.34.0,<1.35.0)", "mypy-boto3-sns (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)", "mypy-boto3-ssm (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)", "mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)", "mypy-boto3-sso (>=1.34.0,<1.35.0)", "mypy-boto3-sso-admin (>=1.34.0,<1.35.0)", "mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)", "mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)", "mypy-boto3-storagegateway (>=1.34.0,<1.35.0)", "mypy-boto3-sts (>=1.34.0,<1.35.0)", "mypy-boto3-support (>=1.34.0,<1.35.0)", "mypy-boto3-support-app (>=1.34.0,<1.35.0)", "mypy-boto3-swf (>=1.34.0,<1.35.0)", "mypy-boto3-synthetics (>=1.34.0,<1.35.0)", "mypy-boto3-textract (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-query (>=1.34.0,<1.35.0)", "mypy-boto3-timestream-write (>=1.34.0,<1.35.0)", "mypy-boto3-tnb (>=1.34.0,<1.35.0)", "mypy-boto3-transcribe (>=1.34.0,<1.35.0)", "mypy-boto3-transfer (>=1.34.0,<1.35.0)", "mypy-boto3-translate (>=1.34.0,<1.35.0)", "mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)", "mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)", "mypy-boto3-voice-id (>=1.34.0,<1.35.0)", "mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)", "mypy-boto3-waf (>=1.34.0,<1.35.0)", "mypy-boto3-waf-regional (>=1.34.0,<1.35.0)", "mypy-boto3-wafv2 (>=1.34.0,<1.35.0)", "mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)", "mypy-boto3-wisdom (>=1.34.0,<1.35.0)", "mypy-boto3-workdocs (>=1.34.0,<1.35.0)", "mypy-boto3-worklink (>=1.34.0,<1.35.0)", "mypy-boto3-workmail (>=1.34.0,<1.35.0)", "mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)", "mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)", "mypy-boto3-xray (>=1.34.0,<1.35.0)"] +amp = ["mypy-boto3-amp (>=1.34.0,<1.35.0)"] +amplify = ["mypy-boto3-amplify (>=1.34.0,<1.35.0)"] +amplifybackend = ["mypy-boto3-amplifybackend (>=1.34.0,<1.35.0)"] +amplifyuibuilder = ["mypy-boto3-amplifyuibuilder (>=1.34.0,<1.35.0)"] +apigateway = ["mypy-boto3-apigateway (>=1.34.0,<1.35.0)"] +apigatewaymanagementapi = ["mypy-boto3-apigatewaymanagementapi (>=1.34.0,<1.35.0)"] +apigatewayv2 = ["mypy-boto3-apigatewayv2 (>=1.34.0,<1.35.0)"] +appconfig = ["mypy-boto3-appconfig (>=1.34.0,<1.35.0)"] +appconfigdata = ["mypy-boto3-appconfigdata (>=1.34.0,<1.35.0)"] +appfabric = ["mypy-boto3-appfabric (>=1.34.0,<1.35.0)"] +appflow = ["mypy-boto3-appflow (>=1.34.0,<1.35.0)"] +appintegrations = ["mypy-boto3-appintegrations (>=1.34.0,<1.35.0)"] +application-autoscaling = ["mypy-boto3-application-autoscaling (>=1.34.0,<1.35.0)"] +application-insights = ["mypy-boto3-application-insights (>=1.34.0,<1.35.0)"] +applicationcostprofiler = ["mypy-boto3-applicationcostprofiler (>=1.34.0,<1.35.0)"] +appmesh = ["mypy-boto3-appmesh (>=1.34.0,<1.35.0)"] +apprunner = ["mypy-boto3-apprunner (>=1.34.0,<1.35.0)"] +appstream = ["mypy-boto3-appstream (>=1.34.0,<1.35.0)"] +appsync = ["mypy-boto3-appsync (>=1.34.0,<1.35.0)"] +arc-zonal-shift = ["mypy-boto3-arc-zonal-shift (>=1.34.0,<1.35.0)"] +athena = ["mypy-boto3-athena (>=1.34.0,<1.35.0)"] +auditmanager = ["mypy-boto3-auditmanager (>=1.34.0,<1.35.0)"] +autoscaling = ["mypy-boto3-autoscaling (>=1.34.0,<1.35.0)"] +autoscaling-plans = ["mypy-boto3-autoscaling-plans (>=1.34.0,<1.35.0)"] +b2bi = ["mypy-boto3-b2bi (>=1.34.0,<1.35.0)"] +backup = ["mypy-boto3-backup (>=1.34.0,<1.35.0)"] +backup-gateway = ["mypy-boto3-backup-gateway (>=1.34.0,<1.35.0)"] +backupstorage = ["mypy-boto3-backupstorage (>=1.34.0,<1.35.0)"] +batch = ["mypy-boto3-batch (>=1.34.0,<1.35.0)"] +bcm-data-exports = ["mypy-boto3-bcm-data-exports (>=1.34.0,<1.35.0)"] +bedrock = ["mypy-boto3-bedrock (>=1.34.0,<1.35.0)"] +bedrock-agent = ["mypy-boto3-bedrock-agent (>=1.34.0,<1.35.0)"] +bedrock-agent-runtime = ["mypy-boto3-bedrock-agent-runtime (>=1.34.0,<1.35.0)"] +bedrock-runtime = ["mypy-boto3-bedrock-runtime (>=1.34.0,<1.35.0)"] +billingconductor = ["mypy-boto3-billingconductor (>=1.34.0,<1.35.0)"] +boto3 = ["boto3 (==1.34.0)", "botocore (==1.34.0)"] +braket = ["mypy-boto3-braket (>=1.34.0,<1.35.0)"] +budgets = ["mypy-boto3-budgets (>=1.34.0,<1.35.0)"] +ce = ["mypy-boto3-ce (>=1.34.0,<1.35.0)"] +chime = ["mypy-boto3-chime (>=1.34.0,<1.35.0)"] +chime-sdk-identity = ["mypy-boto3-chime-sdk-identity (>=1.34.0,<1.35.0)"] +chime-sdk-media-pipelines = ["mypy-boto3-chime-sdk-media-pipelines (>=1.34.0,<1.35.0)"] +chime-sdk-meetings = ["mypy-boto3-chime-sdk-meetings (>=1.34.0,<1.35.0)"] +chime-sdk-messaging = ["mypy-boto3-chime-sdk-messaging (>=1.34.0,<1.35.0)"] +chime-sdk-voice = ["mypy-boto3-chime-sdk-voice (>=1.34.0,<1.35.0)"] +cleanrooms = ["mypy-boto3-cleanrooms (>=1.34.0,<1.35.0)"] +cleanroomsml = ["mypy-boto3-cleanroomsml (>=1.34.0,<1.35.0)"] +cloud9 = ["mypy-boto3-cloud9 (>=1.34.0,<1.35.0)"] +cloudcontrol = ["mypy-boto3-cloudcontrol (>=1.34.0,<1.35.0)"] +clouddirectory = ["mypy-boto3-clouddirectory (>=1.34.0,<1.35.0)"] +cloudformation = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)"] +cloudfront = ["mypy-boto3-cloudfront (>=1.34.0,<1.35.0)"] +cloudfront-keyvaluestore = ["mypy-boto3-cloudfront-keyvaluestore (>=1.34.0,<1.35.0)"] +cloudhsm = ["mypy-boto3-cloudhsm (>=1.34.0,<1.35.0)"] +cloudhsmv2 = ["mypy-boto3-cloudhsmv2 (>=1.34.0,<1.35.0)"] +cloudsearch = ["mypy-boto3-cloudsearch (>=1.34.0,<1.35.0)"] +cloudsearchdomain = ["mypy-boto3-cloudsearchdomain (>=1.34.0,<1.35.0)"] +cloudtrail = ["mypy-boto3-cloudtrail (>=1.34.0,<1.35.0)"] +cloudtrail-data = ["mypy-boto3-cloudtrail-data (>=1.34.0,<1.35.0)"] +cloudwatch = ["mypy-boto3-cloudwatch (>=1.34.0,<1.35.0)"] +codeartifact = ["mypy-boto3-codeartifact (>=1.34.0,<1.35.0)"] +codebuild = ["mypy-boto3-codebuild (>=1.34.0,<1.35.0)"] +codecatalyst = ["mypy-boto3-codecatalyst (>=1.34.0,<1.35.0)"] +codecommit = ["mypy-boto3-codecommit (>=1.34.0,<1.35.0)"] +codedeploy = ["mypy-boto3-codedeploy (>=1.34.0,<1.35.0)"] +codeguru-reviewer = ["mypy-boto3-codeguru-reviewer (>=1.34.0,<1.35.0)"] +codeguru-security = ["mypy-boto3-codeguru-security (>=1.34.0,<1.35.0)"] +codeguruprofiler = ["mypy-boto3-codeguruprofiler (>=1.34.0,<1.35.0)"] +codepipeline = ["mypy-boto3-codepipeline (>=1.34.0,<1.35.0)"] +codestar = ["mypy-boto3-codestar (>=1.34.0,<1.35.0)"] +codestar-connections = ["mypy-boto3-codestar-connections (>=1.34.0,<1.35.0)"] +codestar-notifications = ["mypy-boto3-codestar-notifications (>=1.34.0,<1.35.0)"] +cognito-identity = ["mypy-boto3-cognito-identity (>=1.34.0,<1.35.0)"] +cognito-idp = ["mypy-boto3-cognito-idp (>=1.34.0,<1.35.0)"] +cognito-sync = ["mypy-boto3-cognito-sync (>=1.34.0,<1.35.0)"] +comprehend = ["mypy-boto3-comprehend (>=1.34.0,<1.35.0)"] +comprehendmedical = ["mypy-boto3-comprehendmedical (>=1.34.0,<1.35.0)"] +compute-optimizer = ["mypy-boto3-compute-optimizer (>=1.34.0,<1.35.0)"] +config = ["mypy-boto3-config (>=1.34.0,<1.35.0)"] +connect = ["mypy-boto3-connect (>=1.34.0,<1.35.0)"] +connect-contact-lens = ["mypy-boto3-connect-contact-lens (>=1.34.0,<1.35.0)"] +connectcampaigns = ["mypy-boto3-connectcampaigns (>=1.34.0,<1.35.0)"] +connectcases = ["mypy-boto3-connectcases (>=1.34.0,<1.35.0)"] +connectparticipant = ["mypy-boto3-connectparticipant (>=1.34.0,<1.35.0)"] +controltower = ["mypy-boto3-controltower (>=1.34.0,<1.35.0)"] +cost-optimization-hub = ["mypy-boto3-cost-optimization-hub (>=1.34.0,<1.35.0)"] +cur = ["mypy-boto3-cur (>=1.34.0,<1.35.0)"] +customer-profiles = ["mypy-boto3-customer-profiles (>=1.34.0,<1.35.0)"] +databrew = ["mypy-boto3-databrew (>=1.34.0,<1.35.0)"] +dataexchange = ["mypy-boto3-dataexchange (>=1.34.0,<1.35.0)"] +datapipeline = ["mypy-boto3-datapipeline (>=1.34.0,<1.35.0)"] +datasync = ["mypy-boto3-datasync (>=1.34.0,<1.35.0)"] +datazone = ["mypy-boto3-datazone (>=1.34.0,<1.35.0)"] +dax = ["mypy-boto3-dax (>=1.34.0,<1.35.0)"] +detective = ["mypy-boto3-detective (>=1.34.0,<1.35.0)"] +devicefarm = ["mypy-boto3-devicefarm (>=1.34.0,<1.35.0)"] +devops-guru = ["mypy-boto3-devops-guru (>=1.34.0,<1.35.0)"] +directconnect = ["mypy-boto3-directconnect (>=1.34.0,<1.35.0)"] +discovery = ["mypy-boto3-discovery (>=1.34.0,<1.35.0)"] +dlm = ["mypy-boto3-dlm (>=1.34.0,<1.35.0)"] +dms = ["mypy-boto3-dms (>=1.34.0,<1.35.0)"] +docdb = ["mypy-boto3-docdb (>=1.34.0,<1.35.0)"] +docdb-elastic = ["mypy-boto3-docdb-elastic (>=1.34.0,<1.35.0)"] +drs = ["mypy-boto3-drs (>=1.34.0,<1.35.0)"] +ds = ["mypy-boto3-ds (>=1.34.0,<1.35.0)"] +dynamodb = ["mypy-boto3-dynamodb (>=1.34.0,<1.35.0)"] +dynamodbstreams = ["mypy-boto3-dynamodbstreams (>=1.34.0,<1.35.0)"] +ebs = ["mypy-boto3-ebs (>=1.34.0,<1.35.0)"] +ec2 = ["mypy-boto3-ec2 (>=1.34.0,<1.35.0)"] +ec2-instance-connect = ["mypy-boto3-ec2-instance-connect (>=1.34.0,<1.35.0)"] +ecr = ["mypy-boto3-ecr (>=1.34.0,<1.35.0)"] +ecr-public = ["mypy-boto3-ecr-public (>=1.34.0,<1.35.0)"] +ecs = ["mypy-boto3-ecs (>=1.34.0,<1.35.0)"] +efs = ["mypy-boto3-efs (>=1.34.0,<1.35.0)"] +eks = ["mypy-boto3-eks (>=1.34.0,<1.35.0)"] +eks-auth = ["mypy-boto3-eks-auth (>=1.34.0,<1.35.0)"] +elastic-inference = ["mypy-boto3-elastic-inference (>=1.34.0,<1.35.0)"] +elasticache = ["mypy-boto3-elasticache (>=1.34.0,<1.35.0)"] +elasticbeanstalk = ["mypy-boto3-elasticbeanstalk (>=1.34.0,<1.35.0)"] +elastictranscoder = ["mypy-boto3-elastictranscoder (>=1.34.0,<1.35.0)"] +elb = ["mypy-boto3-elb (>=1.34.0,<1.35.0)"] +elbv2 = ["mypy-boto3-elbv2 (>=1.34.0,<1.35.0)"] +emr = ["mypy-boto3-emr (>=1.34.0,<1.35.0)"] +emr-containers = ["mypy-boto3-emr-containers (>=1.34.0,<1.35.0)"] +emr-serverless = ["mypy-boto3-emr-serverless (>=1.34.0,<1.35.0)"] +entityresolution = ["mypy-boto3-entityresolution (>=1.34.0,<1.35.0)"] +es = ["mypy-boto3-es (>=1.34.0,<1.35.0)"] +essential = ["mypy-boto3-cloudformation (>=1.34.0,<1.35.0)", "mypy-boto3-dynamodb (>=1.34.0,<1.35.0)", "mypy-boto3-ec2 (>=1.34.0,<1.35.0)", "mypy-boto3-lambda (>=1.34.0,<1.35.0)", "mypy-boto3-rds (>=1.34.0,<1.35.0)", "mypy-boto3-s3 (>=1.34.0,<1.35.0)", "mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +events = ["mypy-boto3-events (>=1.34.0,<1.35.0)"] +evidently = ["mypy-boto3-evidently (>=1.34.0,<1.35.0)"] +finspace = ["mypy-boto3-finspace (>=1.34.0,<1.35.0)"] +finspace-data = ["mypy-boto3-finspace-data (>=1.34.0,<1.35.0)"] +firehose = ["mypy-boto3-firehose (>=1.34.0,<1.35.0)"] +fis = ["mypy-boto3-fis (>=1.34.0,<1.35.0)"] +fms = ["mypy-boto3-fms (>=1.34.0,<1.35.0)"] +forecast = ["mypy-boto3-forecast (>=1.34.0,<1.35.0)"] +forecastquery = ["mypy-boto3-forecastquery (>=1.34.0,<1.35.0)"] +frauddetector = ["mypy-boto3-frauddetector (>=1.34.0,<1.35.0)"] +freetier = ["mypy-boto3-freetier (>=1.34.0,<1.35.0)"] +fsx = ["mypy-boto3-fsx (>=1.34.0,<1.35.0)"] +gamelift = ["mypy-boto3-gamelift (>=1.34.0,<1.35.0)"] +glacier = ["mypy-boto3-glacier (>=1.34.0,<1.35.0)"] +globalaccelerator = ["mypy-boto3-globalaccelerator (>=1.34.0,<1.35.0)"] +glue = ["mypy-boto3-glue (>=1.34.0,<1.35.0)"] +grafana = ["mypy-boto3-grafana (>=1.34.0,<1.35.0)"] +greengrass = ["mypy-boto3-greengrass (>=1.34.0,<1.35.0)"] +greengrassv2 = ["mypy-boto3-greengrassv2 (>=1.34.0,<1.35.0)"] +groundstation = ["mypy-boto3-groundstation (>=1.34.0,<1.35.0)"] +guardduty = ["mypy-boto3-guardduty (>=1.34.0,<1.35.0)"] +health = ["mypy-boto3-health (>=1.34.0,<1.35.0)"] +healthlake = ["mypy-boto3-healthlake (>=1.34.0,<1.35.0)"] +honeycode = ["mypy-boto3-honeycode (>=1.34.0,<1.35.0)"] +iam = ["mypy-boto3-iam (>=1.34.0,<1.35.0)"] +identitystore = ["mypy-boto3-identitystore (>=1.34.0,<1.35.0)"] +imagebuilder = ["mypy-boto3-imagebuilder (>=1.34.0,<1.35.0)"] +importexport = ["mypy-boto3-importexport (>=1.34.0,<1.35.0)"] +inspector = ["mypy-boto3-inspector (>=1.34.0,<1.35.0)"] +inspector-scan = ["mypy-boto3-inspector-scan (>=1.34.0,<1.35.0)"] +inspector2 = ["mypy-boto3-inspector2 (>=1.34.0,<1.35.0)"] +internetmonitor = ["mypy-boto3-internetmonitor (>=1.34.0,<1.35.0)"] +iot = ["mypy-boto3-iot (>=1.34.0,<1.35.0)"] +iot-data = ["mypy-boto3-iot-data (>=1.34.0,<1.35.0)"] +iot-jobs-data = ["mypy-boto3-iot-jobs-data (>=1.34.0,<1.35.0)"] +iot-roborunner = ["mypy-boto3-iot-roborunner (>=1.34.0,<1.35.0)"] +iot1click-devices = ["mypy-boto3-iot1click-devices (>=1.34.0,<1.35.0)"] +iot1click-projects = ["mypy-boto3-iot1click-projects (>=1.34.0,<1.35.0)"] +iotanalytics = ["mypy-boto3-iotanalytics (>=1.34.0,<1.35.0)"] +iotdeviceadvisor = ["mypy-boto3-iotdeviceadvisor (>=1.34.0,<1.35.0)"] +iotevents = ["mypy-boto3-iotevents (>=1.34.0,<1.35.0)"] +iotevents-data = ["mypy-boto3-iotevents-data (>=1.34.0,<1.35.0)"] +iotfleethub = ["mypy-boto3-iotfleethub (>=1.34.0,<1.35.0)"] +iotfleetwise = ["mypy-boto3-iotfleetwise (>=1.34.0,<1.35.0)"] +iotsecuretunneling = ["mypy-boto3-iotsecuretunneling (>=1.34.0,<1.35.0)"] +iotsitewise = ["mypy-boto3-iotsitewise (>=1.34.0,<1.35.0)"] +iotthingsgraph = ["mypy-boto3-iotthingsgraph (>=1.34.0,<1.35.0)"] +iottwinmaker = ["mypy-boto3-iottwinmaker (>=1.34.0,<1.35.0)"] +iotwireless = ["mypy-boto3-iotwireless (>=1.34.0,<1.35.0)"] +ivs = ["mypy-boto3-ivs (>=1.34.0,<1.35.0)"] +ivs-realtime = ["mypy-boto3-ivs-realtime (>=1.34.0,<1.35.0)"] +ivschat = ["mypy-boto3-ivschat (>=1.34.0,<1.35.0)"] +kafka = ["mypy-boto3-kafka (>=1.34.0,<1.35.0)"] +kafkaconnect = ["mypy-boto3-kafkaconnect (>=1.34.0,<1.35.0)"] +kendra = ["mypy-boto3-kendra (>=1.34.0,<1.35.0)"] +kendra-ranking = ["mypy-boto3-kendra-ranking (>=1.34.0,<1.35.0)"] +keyspaces = ["mypy-boto3-keyspaces (>=1.34.0,<1.35.0)"] +kinesis = ["mypy-boto3-kinesis (>=1.34.0,<1.35.0)"] +kinesis-video-archived-media = ["mypy-boto3-kinesis-video-archived-media (>=1.34.0,<1.35.0)"] +kinesis-video-media = ["mypy-boto3-kinesis-video-media (>=1.34.0,<1.35.0)"] +kinesis-video-signaling = ["mypy-boto3-kinesis-video-signaling (>=1.34.0,<1.35.0)"] +kinesis-video-webrtc-storage = ["mypy-boto3-kinesis-video-webrtc-storage (>=1.34.0,<1.35.0)"] +kinesisanalytics = ["mypy-boto3-kinesisanalytics (>=1.34.0,<1.35.0)"] +kinesisanalyticsv2 = ["mypy-boto3-kinesisanalyticsv2 (>=1.34.0,<1.35.0)"] +kinesisvideo = ["mypy-boto3-kinesisvideo (>=1.34.0,<1.35.0)"] +kms = ["mypy-boto3-kms (>=1.34.0,<1.35.0)"] +lakeformation = ["mypy-boto3-lakeformation (>=1.34.0,<1.35.0)"] +lambda = ["mypy-boto3-lambda (>=1.34.0,<1.35.0)"] +launch-wizard = ["mypy-boto3-launch-wizard (>=1.34.0,<1.35.0)"] +lex-models = ["mypy-boto3-lex-models (>=1.34.0,<1.35.0)"] +lex-runtime = ["mypy-boto3-lex-runtime (>=1.34.0,<1.35.0)"] +lexv2-models = ["mypy-boto3-lexv2-models (>=1.34.0,<1.35.0)"] +lexv2-runtime = ["mypy-boto3-lexv2-runtime (>=1.34.0,<1.35.0)"] +license-manager = ["mypy-boto3-license-manager (>=1.34.0,<1.35.0)"] +license-manager-linux-subscriptions = ["mypy-boto3-license-manager-linux-subscriptions (>=1.34.0,<1.35.0)"] +license-manager-user-subscriptions = ["mypy-boto3-license-manager-user-subscriptions (>=1.34.0,<1.35.0)"] +lightsail = ["mypy-boto3-lightsail (>=1.34.0,<1.35.0)"] +location = ["mypy-boto3-location (>=1.34.0,<1.35.0)"] +logs = ["mypy-boto3-logs (>=1.34.0,<1.35.0)"] +lookoutequipment = ["mypy-boto3-lookoutequipment (>=1.34.0,<1.35.0)"] +lookoutmetrics = ["mypy-boto3-lookoutmetrics (>=1.34.0,<1.35.0)"] +lookoutvision = ["mypy-boto3-lookoutvision (>=1.34.0,<1.35.0)"] +m2 = ["mypy-boto3-m2 (>=1.34.0,<1.35.0)"] +machinelearning = ["mypy-boto3-machinelearning (>=1.34.0,<1.35.0)"] +macie2 = ["mypy-boto3-macie2 (>=1.34.0,<1.35.0)"] +managedblockchain = ["mypy-boto3-managedblockchain (>=1.34.0,<1.35.0)"] +managedblockchain-query = ["mypy-boto3-managedblockchain-query (>=1.34.0,<1.35.0)"] +marketplace-agreement = ["mypy-boto3-marketplace-agreement (>=1.34.0,<1.35.0)"] +marketplace-catalog = ["mypy-boto3-marketplace-catalog (>=1.34.0,<1.35.0)"] +marketplace-deployment = ["mypy-boto3-marketplace-deployment (>=1.34.0,<1.35.0)"] +marketplace-entitlement = ["mypy-boto3-marketplace-entitlement (>=1.34.0,<1.35.0)"] +marketplacecommerceanalytics = ["mypy-boto3-marketplacecommerceanalytics (>=1.34.0,<1.35.0)"] +mediaconnect = ["mypy-boto3-mediaconnect (>=1.34.0,<1.35.0)"] +mediaconvert = ["mypy-boto3-mediaconvert (>=1.34.0,<1.35.0)"] +medialive = ["mypy-boto3-medialive (>=1.34.0,<1.35.0)"] +mediapackage = ["mypy-boto3-mediapackage (>=1.34.0,<1.35.0)"] +mediapackage-vod = ["mypy-boto3-mediapackage-vod (>=1.34.0,<1.35.0)"] +mediapackagev2 = ["mypy-boto3-mediapackagev2 (>=1.34.0,<1.35.0)"] +mediastore = ["mypy-boto3-mediastore (>=1.34.0,<1.35.0)"] +mediastore-data = ["mypy-boto3-mediastore-data (>=1.34.0,<1.35.0)"] +mediatailor = ["mypy-boto3-mediatailor (>=1.34.0,<1.35.0)"] +medical-imaging = ["mypy-boto3-medical-imaging (>=1.34.0,<1.35.0)"] +memorydb = ["mypy-boto3-memorydb (>=1.34.0,<1.35.0)"] +meteringmarketplace = ["mypy-boto3-meteringmarketplace (>=1.34.0,<1.35.0)"] +mgh = ["mypy-boto3-mgh (>=1.34.0,<1.35.0)"] +mgn = ["mypy-boto3-mgn (>=1.34.0,<1.35.0)"] +migration-hub-refactor-spaces = ["mypy-boto3-migration-hub-refactor-spaces (>=1.34.0,<1.35.0)"] +migrationhub-config = ["mypy-boto3-migrationhub-config (>=1.34.0,<1.35.0)"] +migrationhuborchestrator = ["mypy-boto3-migrationhuborchestrator (>=1.34.0,<1.35.0)"] +migrationhubstrategy = ["mypy-boto3-migrationhubstrategy (>=1.34.0,<1.35.0)"] +mobile = ["mypy-boto3-mobile (>=1.34.0,<1.35.0)"] +mq = ["mypy-boto3-mq (>=1.34.0,<1.35.0)"] +mturk = ["mypy-boto3-mturk (>=1.34.0,<1.35.0)"] +mwaa = ["mypy-boto3-mwaa (>=1.34.0,<1.35.0)"] +neptune = ["mypy-boto3-neptune (>=1.34.0,<1.35.0)"] +neptunedata = ["mypy-boto3-neptunedata (>=1.34.0,<1.35.0)"] +network-firewall = ["mypy-boto3-network-firewall (>=1.34.0,<1.35.0)"] +networkmanager = ["mypy-boto3-networkmanager (>=1.34.0,<1.35.0)"] +nimble = ["mypy-boto3-nimble (>=1.34.0,<1.35.0)"] +oam = ["mypy-boto3-oam (>=1.34.0,<1.35.0)"] +omics = ["mypy-boto3-omics (>=1.34.0,<1.35.0)"] +opensearch = ["mypy-boto3-opensearch (>=1.34.0,<1.35.0)"] +opensearchserverless = ["mypy-boto3-opensearchserverless (>=1.34.0,<1.35.0)"] +opsworks = ["mypy-boto3-opsworks (>=1.34.0,<1.35.0)"] +opsworkscm = ["mypy-boto3-opsworkscm (>=1.34.0,<1.35.0)"] +organizations = ["mypy-boto3-organizations (>=1.34.0,<1.35.0)"] +osis = ["mypy-boto3-osis (>=1.34.0,<1.35.0)"] +outposts = ["mypy-boto3-outposts (>=1.34.0,<1.35.0)"] +panorama = ["mypy-boto3-panorama (>=1.34.0,<1.35.0)"] +payment-cryptography = ["mypy-boto3-payment-cryptography (>=1.34.0,<1.35.0)"] +payment-cryptography-data = ["mypy-boto3-payment-cryptography-data (>=1.34.0,<1.35.0)"] +pca-connector-ad = ["mypy-boto3-pca-connector-ad (>=1.34.0,<1.35.0)"] +personalize = ["mypy-boto3-personalize (>=1.34.0,<1.35.0)"] +personalize-events = ["mypy-boto3-personalize-events (>=1.34.0,<1.35.0)"] +personalize-runtime = ["mypy-boto3-personalize-runtime (>=1.34.0,<1.35.0)"] +pi = ["mypy-boto3-pi (>=1.34.0,<1.35.0)"] +pinpoint = ["mypy-boto3-pinpoint (>=1.34.0,<1.35.0)"] +pinpoint-email = ["mypy-boto3-pinpoint-email (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice = ["mypy-boto3-pinpoint-sms-voice (>=1.34.0,<1.35.0)"] +pinpoint-sms-voice-v2 = ["mypy-boto3-pinpoint-sms-voice-v2 (>=1.34.0,<1.35.0)"] +pipes = ["mypy-boto3-pipes (>=1.34.0,<1.35.0)"] +polly = ["mypy-boto3-polly (>=1.34.0,<1.35.0)"] +pricing = ["mypy-boto3-pricing (>=1.34.0,<1.35.0)"] +privatenetworks = ["mypy-boto3-privatenetworks (>=1.34.0,<1.35.0)"] +proton = ["mypy-boto3-proton (>=1.34.0,<1.35.0)"] +qbusiness = ["mypy-boto3-qbusiness (>=1.34.0,<1.35.0)"] +qconnect = ["mypy-boto3-qconnect (>=1.34.0,<1.35.0)"] +qldb = ["mypy-boto3-qldb (>=1.34.0,<1.35.0)"] +qldb-session = ["mypy-boto3-qldb-session (>=1.34.0,<1.35.0)"] +quicksight = ["mypy-boto3-quicksight (>=1.34.0,<1.35.0)"] +ram = ["mypy-boto3-ram (>=1.34.0,<1.35.0)"] +rbin = ["mypy-boto3-rbin (>=1.34.0,<1.35.0)"] +rds = ["mypy-boto3-rds (>=1.34.0,<1.35.0)"] +rds-data = ["mypy-boto3-rds-data (>=1.34.0,<1.35.0)"] +redshift = ["mypy-boto3-redshift (>=1.34.0,<1.35.0)"] +redshift-data = ["mypy-boto3-redshift-data (>=1.34.0,<1.35.0)"] +redshift-serverless = ["mypy-boto3-redshift-serverless (>=1.34.0,<1.35.0)"] +rekognition = ["mypy-boto3-rekognition (>=1.34.0,<1.35.0)"] +repostspace = ["mypy-boto3-repostspace (>=1.34.0,<1.35.0)"] +resiliencehub = ["mypy-boto3-resiliencehub (>=1.34.0,<1.35.0)"] +resource-explorer-2 = ["mypy-boto3-resource-explorer-2 (>=1.34.0,<1.35.0)"] +resource-groups = ["mypy-boto3-resource-groups (>=1.34.0,<1.35.0)"] +resourcegroupstaggingapi = ["mypy-boto3-resourcegroupstaggingapi (>=1.34.0,<1.35.0)"] +robomaker = ["mypy-boto3-robomaker (>=1.34.0,<1.35.0)"] +rolesanywhere = ["mypy-boto3-rolesanywhere (>=1.34.0,<1.35.0)"] +route53 = ["mypy-boto3-route53 (>=1.34.0,<1.35.0)"] +route53-recovery-cluster = ["mypy-boto3-route53-recovery-cluster (>=1.34.0,<1.35.0)"] +route53-recovery-control-config = ["mypy-boto3-route53-recovery-control-config (>=1.34.0,<1.35.0)"] +route53-recovery-readiness = ["mypy-boto3-route53-recovery-readiness (>=1.34.0,<1.35.0)"] +route53domains = ["mypy-boto3-route53domains (>=1.34.0,<1.35.0)"] +route53resolver = ["mypy-boto3-route53resolver (>=1.34.0,<1.35.0)"] +rum = ["mypy-boto3-rum (>=1.34.0,<1.35.0)"] +s3 = ["mypy-boto3-s3 (>=1.34.0,<1.35.0)"] +s3control = ["mypy-boto3-s3control (>=1.34.0,<1.35.0)"] +s3outposts = ["mypy-boto3-s3outposts (>=1.34.0,<1.35.0)"] +sagemaker = ["mypy-boto3-sagemaker (>=1.34.0,<1.35.0)"] +sagemaker-a2i-runtime = ["mypy-boto3-sagemaker-a2i-runtime (>=1.34.0,<1.35.0)"] +sagemaker-edge = ["mypy-boto3-sagemaker-edge (>=1.34.0,<1.35.0)"] +sagemaker-featurestore-runtime = ["mypy-boto3-sagemaker-featurestore-runtime (>=1.34.0,<1.35.0)"] +sagemaker-geospatial = ["mypy-boto3-sagemaker-geospatial (>=1.34.0,<1.35.0)"] +sagemaker-metrics = ["mypy-boto3-sagemaker-metrics (>=1.34.0,<1.35.0)"] +sagemaker-runtime = ["mypy-boto3-sagemaker-runtime (>=1.34.0,<1.35.0)"] +savingsplans = ["mypy-boto3-savingsplans (>=1.34.0,<1.35.0)"] +scheduler = ["mypy-boto3-scheduler (>=1.34.0,<1.35.0)"] +schemas = ["mypy-boto3-schemas (>=1.34.0,<1.35.0)"] +sdb = ["mypy-boto3-sdb (>=1.34.0,<1.35.0)"] +secretsmanager = ["mypy-boto3-secretsmanager (>=1.34.0,<1.35.0)"] +securityhub = ["mypy-boto3-securityhub (>=1.34.0,<1.35.0)"] +securitylake = ["mypy-boto3-securitylake (>=1.34.0,<1.35.0)"] +serverlessrepo = ["mypy-boto3-serverlessrepo (>=1.34.0,<1.35.0)"] +service-quotas = ["mypy-boto3-service-quotas (>=1.34.0,<1.35.0)"] +servicecatalog = ["mypy-boto3-servicecatalog (>=1.34.0,<1.35.0)"] +servicecatalog-appregistry = ["mypy-boto3-servicecatalog-appregistry (>=1.34.0,<1.35.0)"] +servicediscovery = ["mypy-boto3-servicediscovery (>=1.34.0,<1.35.0)"] +ses = ["mypy-boto3-ses (>=1.34.0,<1.35.0)"] +sesv2 = ["mypy-boto3-sesv2 (>=1.34.0,<1.35.0)"] +shield = ["mypy-boto3-shield (>=1.34.0,<1.35.0)"] +signer = ["mypy-boto3-signer (>=1.34.0,<1.35.0)"] +simspaceweaver = ["mypy-boto3-simspaceweaver (>=1.34.0,<1.35.0)"] +sms = ["mypy-boto3-sms (>=1.34.0,<1.35.0)"] +sms-voice = ["mypy-boto3-sms-voice (>=1.34.0,<1.35.0)"] +snow-device-management = ["mypy-boto3-snow-device-management (>=1.34.0,<1.35.0)"] +snowball = ["mypy-boto3-snowball (>=1.34.0,<1.35.0)"] +sns = ["mypy-boto3-sns (>=1.34.0,<1.35.0)"] +sqs = ["mypy-boto3-sqs (>=1.34.0,<1.35.0)"] +ssm = ["mypy-boto3-ssm (>=1.34.0,<1.35.0)"] +ssm-contacts = ["mypy-boto3-ssm-contacts (>=1.34.0,<1.35.0)"] +ssm-incidents = ["mypy-boto3-ssm-incidents (>=1.34.0,<1.35.0)"] +ssm-sap = ["mypy-boto3-ssm-sap (>=1.34.0,<1.35.0)"] +sso = ["mypy-boto3-sso (>=1.34.0,<1.35.0)"] +sso-admin = ["mypy-boto3-sso-admin (>=1.34.0,<1.35.0)"] +sso-oidc = ["mypy-boto3-sso-oidc (>=1.34.0,<1.35.0)"] +stepfunctions = ["mypy-boto3-stepfunctions (>=1.34.0,<1.35.0)"] +storagegateway = ["mypy-boto3-storagegateway (>=1.34.0,<1.35.0)"] +sts = ["mypy-boto3-sts (>=1.34.0,<1.35.0)"] +support = ["mypy-boto3-support (>=1.34.0,<1.35.0)"] +support-app = ["mypy-boto3-support-app (>=1.34.0,<1.35.0)"] +swf = ["mypy-boto3-swf (>=1.34.0,<1.35.0)"] +synthetics = ["mypy-boto3-synthetics (>=1.34.0,<1.35.0)"] +textract = ["mypy-boto3-textract (>=1.34.0,<1.35.0)"] +timestream-query = ["mypy-boto3-timestream-query (>=1.34.0,<1.35.0)"] +timestream-write = ["mypy-boto3-timestream-write (>=1.34.0,<1.35.0)"] +tnb = ["mypy-boto3-tnb (>=1.34.0,<1.35.0)"] +transcribe = ["mypy-boto3-transcribe (>=1.34.0,<1.35.0)"] +transfer = ["mypy-boto3-transfer (>=1.34.0,<1.35.0)"] +translate = ["mypy-boto3-translate (>=1.34.0,<1.35.0)"] +trustedadvisor = ["mypy-boto3-trustedadvisor (>=1.34.0,<1.35.0)"] +verifiedpermissions = ["mypy-boto3-verifiedpermissions (>=1.34.0,<1.35.0)"] +voice-id = ["mypy-boto3-voice-id (>=1.34.0,<1.35.0)"] +vpc-lattice = ["mypy-boto3-vpc-lattice (>=1.34.0,<1.35.0)"] +waf = ["mypy-boto3-waf (>=1.34.0,<1.35.0)"] +waf-regional = ["mypy-boto3-waf-regional (>=1.34.0,<1.35.0)"] +wafv2 = ["mypy-boto3-wafv2 (>=1.34.0,<1.35.0)"] +wellarchitected = ["mypy-boto3-wellarchitected (>=1.34.0,<1.35.0)"] +wisdom = ["mypy-boto3-wisdom (>=1.34.0,<1.35.0)"] +workdocs = ["mypy-boto3-workdocs (>=1.34.0,<1.35.0)"] +worklink = ["mypy-boto3-worklink (>=1.34.0,<1.35.0)"] +workmail = ["mypy-boto3-workmail (>=1.34.0,<1.35.0)"] +workmailmessageflow = ["mypy-boto3-workmailmessageflow (>=1.34.0,<1.35.0)"] +workspaces = ["mypy-boto3-workspaces (>=1.34.0,<1.35.0)"] +workspaces-thin-client = ["mypy-boto3-workspaces-thin-client (>=1.34.0,<1.35.0)"] +workspaces-web = ["mypy-boto3-workspaces-web (>=1.34.0,<1.35.0)"] +xray = ["mypy-boto3-xray (>=1.34.0,<1.35.0)"] [[package]] name = "botocore" -version = "1.33.5" +version = "1.34.0" description = "Low-level, data-driven core of boto 3." optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "botocore-1.33.5-py3-none-any.whl", hash = "sha256:c165207fb33e8352191d6a2770bce9f9bf01c62f5149824c4295d7f49bf96746"}, - {file = "botocore-1.33.5.tar.gz", hash = "sha256:aa4a5c7cf78a403280e50daba8966479e23577b4a5c20165f71fab7a9b405e99"}, + {file = "botocore-1.34.0-py3-none-any.whl", hash = "sha256:6ec19f6c9f61c3df22fb3e083940ac7946a3d96128db1f370f10aea702bb157f"}, + {file = "botocore-1.34.0.tar.gz", hash = "sha256:711b406de910585395466ca649bceeea87a04300ddf74d9a2e20727c7f27f2f1"}, ] [package.dependencies] @@ -2402,13 +2402,13 @@ reports = ["lxml"] [[package]] name = "mypy-boto3-cloudformation" -version = "1.33.0" -description = "Type annotations for boto3.CloudFormation 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.CloudFormation 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-cloudformation-1.33.0.tar.gz", hash = "sha256:e1c27b910ffce96ed0ae126e4c5755bdd90a56dc7548e543264334a35608df0a"}, - {file = "mypy_boto3_cloudformation-1.33.0-py3-none-any.whl", hash = "sha256:f56659fa8d804017075e3fb27ac8df17cd6c9ea3f686d79832bf5add51eb2965"}, + {file = "mypy-boto3-cloudformation-1.34.0.tar.gz", hash = "sha256:9b25df9ef15d9dc8e4e892cc07aa9343f15f2ed5eb7d33eb5eb65adfa63f538f"}, + {file = "mypy_boto3_cloudformation-1.34.0-py3-none-any.whl", hash = "sha256:4e63a2bca1882971881d65983acd774c2fc636bbc5dc8c3e1f4a41c539cf3c90"}, ] [package.dependencies] @@ -2416,13 +2416,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-dynamodb" -version = "1.33.0" -description = "Type annotations for boto3.DynamoDB 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.DynamoDB 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-dynamodb-1.33.0.tar.gz", hash = "sha256:2cfe1089c89de61b1ec0e69a72ba3e6865a013ea0a37d318ab564983785d42f9"}, - {file = "mypy_boto3_dynamodb-1.33.0-py3-none-any.whl", hash = "sha256:619ea2cc311ced0ecb44b6e8d3bf3dd851fb7c53a34128b4ff6d6e6a11fdd41f"}, + {file = "mypy-boto3-dynamodb-1.34.0.tar.gz", hash = "sha256:c0d98d7e83b0bc22e5039f703889fb96202d818171c4206fd31e665a37654e84"}, + {file = "mypy_boto3_dynamodb-1.34.0-py3-none-any.whl", hash = "sha256:76869c3fec882ddeeaca485074e302bf38c3b61103664d665dfed9425234ff75"}, ] [package.dependencies] @@ -2430,13 +2430,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-ec2" -version = "1.33.0" -description = "Type annotations for boto3.EC2 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.EC2 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-ec2-1.33.0.tar.gz", hash = "sha256:07578937f974f6129a79d88f788e53a0a38d22e5a9c0c63d589f24e39b8fb90d"}, - {file = "mypy_boto3_ec2-1.33.0-py3-none-any.whl", hash = "sha256:ac24fe3a7d849735c84bf53abaa2ba809e39d2582e98bd5538adecb585fce9b0"}, + {file = "mypy-boto3-ec2-1.34.0.tar.gz", hash = "sha256:f19d4fe9b4ae4173af6ec841d1d2d38e53b0c5eec4f4e6fcbad06d0658e13070"}, + {file = "mypy_boto3_ec2-1.34.0-py3-none-any.whl", hash = "sha256:678f58876bcbb21fcae455ed1ba75a542c026a36b0dd464dae7b379afdcecd52"}, ] [package.dependencies] @@ -2444,13 +2444,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-lambda" -version = "1.33.0" -description = "Type annotations for boto3.Lambda 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.Lambda 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-lambda-1.33.0.tar.gz", hash = "sha256:beac0cb4b94f83a444242db16f601405bdfb6c15808c2c52720224d907e7af40"}, - {file = "mypy_boto3_lambda-1.33.0-py3-none-any.whl", hash = "sha256:10e0f04168f4064e89ba136218162003f1cb6826dcbfa95ca982d3cb792fc9f7"}, + {file = "mypy-boto3-lambda-1.34.0.tar.gz", hash = "sha256:e74c0ce548da747a8c6e643c39dad8aa54d67e057f57740ec780a7e565590627"}, + {file = "mypy_boto3_lambda-1.34.0-py3-none-any.whl", hash = "sha256:109a7e126e84d6da6cacf8ab5c7c6f2be022417fe7bfb7f9b019767d7034f73b"}, ] [package.dependencies] @@ -2458,13 +2458,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-logs" -version = "1.33.0" -description = "Type annotations for boto3.CloudWatchLogs 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.CloudWatchLogs 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-logs-1.33.0.tar.gz", hash = "sha256:607520796f6caa69e5e921eb4899a370f6a7ed0c7ebdc1bf0396bec24af880f2"}, - {file = "mypy_boto3_logs-1.33.0-py3-none-any.whl", hash = "sha256:8ba1999f949f33541c0690054725a3d7d1e9084f7cc57fc69b58aabbbf5788b3"}, + {file = "mypy-boto3-logs-1.34.0.tar.gz", hash = "sha256:a852bf6c48733a51c324ca97da042bfe4c66b0d33aabe042fb27d3092572d55b"}, + {file = "mypy_boto3_logs-1.34.0-py3-none-any.whl", hash = "sha256:cb2d29096d3b07d7d508fa1f236f9cd15c292d41c8807aba7347627868e7ebdc"}, ] [package.dependencies] @@ -2472,13 +2472,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-rds" -version = "1.33.0" -description = "Type annotations for boto3.RDS 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.RDS 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-rds-1.33.0.tar.gz", hash = "sha256:2a50e40aa473b34e6651672a4233873b5fafbe42218c33af27a8ecf8571e9169"}, - {file = "mypy_boto3_rds-1.33.0-py3-none-any.whl", hash = "sha256:dda63ea8b2358e549a73001ad7b56c3b4ce3da90f00b520aa37e3ada99e4ab69"}, + {file = "mypy-boto3-rds-1.34.0.tar.gz", hash = "sha256:9909f5f23ccb01830262a2e16c9e8a19acc46021d754838099d7bdb295911e94"}, + {file = "mypy_boto3_rds-1.34.0-py3-none-any.whl", hash = "sha256:aa8d15c28f140c8bc5e079c405e92c20c68f968a319eb21b69a5692610f86fe5"}, ] [package.dependencies] @@ -2486,13 +2486,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-s3" -version = "1.33.2" -description = "Type annotations for boto3.S3 1.33.2 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.S3 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-s3-1.33.2.tar.gz", hash = "sha256:f54a3ad3288f4e4719ebada3dde68c320507b0fc451d59bc68af7e6ab15cbdad"}, - {file = "mypy_boto3_s3-1.33.2-py3-none-any.whl", hash = "sha256:9d463df6def30de31a467d49ab92ff7795d46709d56eff6f52216a08bac27918"}, + {file = "mypy-boto3-s3-1.34.0.tar.gz", hash = "sha256:7644a00e096ebb1c3292551059f64ff8329625dacd40827ced9481b14d64c733"}, + {file = "mypy_boto3_s3-1.34.0-py3-none-any.whl", hash = "sha256:633876d2a96dbb924f9667084316c1759bff40c19a9a38313d5a4e825c5fc641"}, ] [package.dependencies] @@ -2500,13 +2500,13 @@ typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.12\""} [[package]] name = "mypy-boto3-sqs" -version = "1.33.0" -description = "Type annotations for boto3.SQS 1.33.0 service generated with mypy-boto3-builder 7.20.3" +version = "1.34.0" +description = "Type annotations for boto3.SQS 1.34.0 service generated with mypy-boto3-builder 7.21.0" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-boto3-sqs-1.33.0.tar.gz", hash = "sha256:81f4838e81cbb0c088a10e287922fdf6a3f317cbab6647993ab9dbd567c0e8fb"}, - {file = "mypy_boto3_sqs-1.33.0-py3-none-any.whl", hash = "sha256:81f71d5f461e5e670d2ca93df92c93efdd7c29be33eabf8475df5f071e638583"}, + {file = "mypy-boto3-sqs-1.34.0.tar.gz", hash = "sha256:0bf8995f58919ab295398100e72eaa7da898adcfd9d339a42f3c48ce473419d5"}, + {file = "mypy_boto3_sqs-1.34.0-py3-none-any.whl", hash = "sha256:94d8aea4ae75605f70e58e440d706e04d5c614101ddb2f0c73d306d776d10995"}, ] [package.dependencies] @@ -3909,13 +3909,13 @@ pyasn1 = ">=0.1.3" [[package]] name = "s3transfer" -version = "0.8.2" +version = "0.9.0" description = "An Amazon S3 Transfer Manager" optional = false -python-versions = ">= 3.7" +python-versions = ">= 3.8" files = [ - {file = "s3transfer-0.8.2-py3-none-any.whl", hash = "sha256:c9e56cbe88b28d8e197cf841f1f0c130f246595e77ae5b5a05b69fe7cb83de76"}, - {file = "s3transfer-0.8.2.tar.gz", hash = "sha256:368ac6876a9e9ed91f6bc86581e319be08188dc60d50e0d56308ed5765446283"}, + {file = "s3transfer-0.9.0-py3-none-any.whl", hash = "sha256:01d4d2c35a016db8cb14f9a4d5e84c1f8c96e7ffc211422555eed45c11fa7eb1"}, + {file = "s3transfer-0.9.0.tar.gz", hash = "sha256:9e1b186ec8bb5907a1e82b51237091889a9973a2bb799a924bcd9f301ff79d3d"}, ] [package.dependencies] From 730d9478922c703c10591c56f3bbdcbab1d96e03 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Dec 2023 19:28:52 +0000 Subject: [PATCH 224/262] Bump bcrypt from 4.1.1 to 4.1.2 (#1574) --- poetry.lock | 46 ++++++++++++++++++++++++++++------------------ 1 file changed, 28 insertions(+), 18 deletions(-) diff --git a/poetry.lock b/poetry.lock index ca7238a40e..863ddb2c3c 100644 --- a/poetry.lock +++ b/poetry.lock @@ -102,28 +102,38 @@ tzdata = ["tzdata"] [[package]] name = "bcrypt" -version = "4.1.1" +version = "4.1.2" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" files = [ - {file = "bcrypt-4.1.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:196008d91201bbb1aa4e666fee5e610face25d532e433a560cabb33bfdff958b"}, - {file = "bcrypt-4.1.1-cp37-abi3-macosx_13_0_universal2.whl", hash = "sha256:2e197534c884336f9020c1f3a8efbaab0aa96fc798068cb2da9c671818b7fbb0"}, - {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d573885b637815a7f3a3cd5f87724d7d0822da64b0ab0aa7f7c78bae534e86dc"}, - {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bab33473f973e8058d1b2df8d6e095d237c49fbf7a02b527541a86a5d1dc4444"}, - {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:fb931cd004a7ad36a89789caf18a54c20287ec1cd62161265344b9c4554fdb2e"}, - {file = "bcrypt-4.1.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:12f40f78dcba4aa7d1354d35acf45fae9488862a4fb695c7eeda5ace6aae273f"}, - {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:2ade10e8613a3b8446214846d3ddbd56cfe9205a7d64742f0b75458c868f7492"}, - {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f33b385c3e80b5a26b3a5e148e6165f873c1c202423570fdf45fe34e00e5f3e5"}, - {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:755b9d27abcab678e0b8fb4d0abdebeea1f68dd1183b3f518bad8d31fa77d8be"}, - {file = "bcrypt-4.1.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a7a7b8a87e51e5e8ca85b9fdaf3a5dc7aaf123365a09be7a27883d54b9a0c403"}, - {file = "bcrypt-4.1.1-cp37-abi3-win32.whl", hash = "sha256:3d6c4e0d6963c52f8142cdea428e875042e7ce8c84812d8e5507bd1e42534e07"}, - {file = "bcrypt-4.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:14d41933510717f98aac63378b7956bbe548986e435df173c841d7f2bd0b2de7"}, - {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:24c2ebd287b5b11016f31d506ca1052d068c3f9dc817160628504690376ff050"}, - {file = "bcrypt-4.1.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:476aa8e8aca554260159d4c7a97d6be529c8e177dbc1d443cb6b471e24e82c74"}, - {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:12611c4b0a8b1c461646228344784a1089bc0c49975680a2f54f516e71e9b79e"}, - {file = "bcrypt-4.1.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c6450538a0fc32fb7ce4c6d511448c54c4ff7640b2ed81badf9898dcb9e5b737"}, - {file = "bcrypt-4.1.1.tar.gz", hash = "sha256:df37f5418d4f1cdcff845f60e747a015389fa4e63703c918330865e06ad80007"}, + {file = "bcrypt-4.1.2-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:ac621c093edb28200728a9cca214d7e838529e557027ef0581685909acd28b5e"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea505c97a5c465ab8c3ba75c0805a102ce526695cd6818c6de3b1a38f6f60da1"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57fa9442758da926ed33a91644649d3e340a71e2d0a5a8de064fb621fd5a3326"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:eb3bd3321517916696233b5e0c67fd7d6281f0ef48e66812db35fc963a422a1c"}, + {file = "bcrypt-4.1.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:6cad43d8c63f34b26aef462b6f5e44fdcf9860b723d2453b5d391258c4c8e966"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:44290ccc827d3a24604f2c8bcd00d0da349e336e6503656cb8192133e27335e2"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:732b3920a08eacf12f93e6b04ea276c489f1c8fb49344f564cca2adb663b3e4c"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1c28973decf4e0e69cee78c68e30a523be441972c826703bb93099868a8ff5b5"}, + {file = "bcrypt-4.1.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:b8df79979c5bae07f1db22dcc49cc5bccf08a0380ca5c6f391cbb5790355c0b0"}, + {file = "bcrypt-4.1.2-cp37-abi3-win32.whl", hash = "sha256:fbe188b878313d01b7718390f31528be4010fed1faa798c5a1d0469c9c48c369"}, + {file = "bcrypt-4.1.2-cp37-abi3-win_amd64.whl", hash = "sha256:9800ae5bd5077b13725e2e3934aa3c9c37e49d3ea3d06318010aa40f54c63551"}, + {file = "bcrypt-4.1.2-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:71b8be82bc46cedd61a9f4ccb6c1a493211d031415a34adde3669ee1b0afbb63"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e3c6642077b0c8092580c819c1684161262b2e30c4f45deb000c38947bf483"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:387e7e1af9a4dd636b9505a465032f2f5cb8e61ba1120e79a0e1cd0b512f3dfc"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f70d9c61f9c4ca7d57f3bfe88a5ccf62546ffbadf3681bb1e268d9d2e41c91a7"}, + {file = "bcrypt-4.1.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2a298db2a8ab20056120b45e86c00a0a5eb50ec4075b6142db35f593b97cb3fb"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:ba55e40de38a24e2d78d34c2d36d6e864f93e0d79d0b6ce915e4335aa81d01b1"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:3566a88234e8de2ccae31968127b0ecccbb4cddb629da744165db72b58d88ca4"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b90e216dc36864ae7132cb151ffe95155a37a14e0de3a8f64b49655dd959ff9c"}, + {file = "bcrypt-4.1.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:69057b9fc5093ea1ab00dd24ede891f3e5e65bee040395fb1e66ee196f9c9b4a"}, + {file = "bcrypt-4.1.2-cp39-abi3-win32.whl", hash = "sha256:02d9ef8915f72dd6daaef40e0baeef8a017ce624369f09754baf32bb32dba25f"}, + {file = "bcrypt-4.1.2-cp39-abi3-win_amd64.whl", hash = "sha256:be3ab1071662f6065899fe08428e45c16aa36e28bc42921c4901a191fda6ee42"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d75fc8cd0ba23f97bae88a6ec04e9e5351ff3c6ad06f38fe32ba50cbd0d11946"}, + {file = "bcrypt-4.1.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:a97e07e83e3262599434816f631cc4c7ca2aa8e9c072c1b1a7fec2ae809a1d2d"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:e51c42750b7585cee7892c2614be0d14107fad9581d1738d954a262556dd1aab"}, + {file = "bcrypt-4.1.2-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ba4e4cc26610581a6329b3937e02d319f5ad4b85b074846bf4fef8a8cf51e7bb"}, + {file = "bcrypt-4.1.2.tar.gz", hash = "sha256:33313a1200a3ae90b75587ceac502b048b840fc69e7f7a0905b5f87fac7a1258"}, ] [package.extras] From 333210ebab1ddbe704a55d22ccec7b43b2846882 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Sun, 17 Dec 2023 21:24:09 -0400 Subject: [PATCH 225/262] Make it easier to use log_elapsed_time with LoggerMixin (#1573) --- api/circulation_manager.py | 10 +++---- core/util/log.py | 45 +++++++++++++++++++++++++------ pyproject.toml | 1 + tests/core/util/test_log.py | 53 +++++++++++++++++++++++++++++++++++++ 4 files changed, 95 insertions(+), 14 deletions(-) create mode 100644 tests/core/util/test_log.py diff --git a/api/circulation_manager.py b/api/circulation_manager.py index 641093bf50..b8df1ce48c 100644 --- a/api/circulation_manager.py +++ b/api/circulation_manager.py @@ -1,6 +1,5 @@ from __future__ import annotations -import logging import urllib.parse from typing import TYPE_CHECKING @@ -41,7 +40,8 @@ from core.model import ConfigurationSetting, Library from core.model.discovery_service_registration import DiscoveryServiceRegistration from core.service.container import Services -from core.util.log import elapsed_time_logging, log_elapsed_time +from core.service.logging.configuration import LogLevel +from core.util.log import LoggerMixin, elapsed_time_logging, log_elapsed_time if TYPE_CHECKING: from api.admin.controller.admin_search import AdminSearchController @@ -90,9 +90,7 @@ from api.admin.controller.work_editor import WorkController as AdminWorkController -class CirculationManager: - log = logging.getLogger("api.circulation_manager.CirculationManager") - +class CirculationManager(LoggerMixin): # API Controllers index_controller: IndexController opds_feeds: OPDSFeedController @@ -188,7 +186,7 @@ def reload_settings_if_changed(self): self.load_settings() self.site_configuration_last_update = last_update - @log_elapsed_time(log_method=log.info, message_prefix="load_settings") + @log_elapsed_time(log_level=LogLevel.info, message_prefix="load_settings") def load_settings(self): """Load all necessary configuration settings and external integrations from the database. diff --git a/core/util/log.py b/core/util/log.py index 2ac04a4220..3030920d44 100644 --- a/core/util/log.py +++ b/core/util/log.py @@ -3,23 +3,49 @@ import sys import time from contextlib import contextmanager -from typing import Callable, Optional +from typing import Callable, Generator, Optional, TypeVar + +from typing_extensions import ParamSpec + +from core.service.logging.configuration import LogLevel + +P = ParamSpec("P") +T = TypeVar("T") def log_elapsed_time( - *, log_method: Callable, message_prefix: Optional[str] = None, skip_start=False -): + *, + log_level: LogLevel, + message_prefix: Optional[str] = None, + skip_start: bool = False, +) -> Callable[[Callable[P, T]], Callable[P, T]]: """Decorator for logging elapsed time. - :param log_method: Callable to be used to log the message(s). + Must be applied to a method of a subclass of LoggerMixin or a class that has a log property + that is an instance of logging.Logger. + + :param log_level: The log level to use for the emitted log records. :param message_prefix: Optional string to be prepended to the emitted log records. :param skip_start: Boolean indicating whether to skip the starting message. """ prefix = f"{message_prefix}: " if message_prefix else "" - def outer(fn): + def outer(fn: Callable[P, T]) -> Callable[P, T]: @functools.wraps(fn) - def wrapper(*args, **kwargs): + def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + if ( + len(args) > 0 + and hasattr(args[0], "log") + and isinstance(args[0].log, logging.Logger) + ): + log_method = getattr(args[0].log, log_level.name) + elif len(args) > 0 and hasattr(args[0], "logger"): + log_method = getattr(args[0].logger(), log_level.name) + else: + raise RuntimeError( + "Decorator must be applied to a method of a LoggerMixin or a subclass of LoggerMixin." + ) + if not skip_start: log_method(f"{prefix}Starting...") tic = time.perf_counter() @@ -38,8 +64,11 @@ def wrapper(*args, **kwargs): @contextmanager def elapsed_time_logging( - *, log_method: Callable, message_prefix: Optional[str] = None, skip_start=False -): + *, + log_method: Callable[[str], None], + message_prefix: Optional[str] = None, + skip_start: bool = False, +) -> Generator[None, None, None]: """Context manager for logging elapsed time. :param log_method: Callable to be used to log the message(s). diff --git a/pyproject.toml b/pyproject.toml index 02f03c1645..bca399eb4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -108,6 +108,7 @@ module = [ "core.util.authentication_for_opds", "core.util.base64", "core.util.cache", + "core.util.log", "core.util.notifications", "core.util.problem_detail", "core.util.string_helpers", diff --git a/tests/core/util/test_log.py b/tests/core/util/test_log.py new file mode 100644 index 0000000000..f5af553b92 --- /dev/null +++ b/tests/core/util/test_log.py @@ -0,0 +1,53 @@ +import pytest +from _pytest.logging import LogCaptureFixture + +from core.service.logging.configuration import LogLevel +from core.util.log import LoggerMixin, log_elapsed_time + + +class MockClass(LoggerMixin): + @classmethod + @log_elapsed_time(log_level=LogLevel.info, message_prefix="Test") + def test_method(cls): + pass + + @log_elapsed_time( + log_level=LogLevel.debug, message_prefix="Test 12345", skip_start=True + ) + def test_method_2(self): + pass + + +def test_log_elapsed_time_cls(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.info.value) + + MockClass.test_method() + assert len(caplog.records) == 2 + + [first, second] = caplog.records + assert first.name == "tests.core.util.test_log.MockClass" + assert first.message == "Test: Starting..." + assert first.levelname == LogLevel.info.value + + assert second.name == "tests.core.util.test_log.MockClass" + assert "Test: Completed. (elapsed time:" in second.message + assert second.levelname == LogLevel.info.value + + +def test_log_elapsed_time_instance(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.debug.value) + + MockClass().test_method_2() + assert len(caplog.records) == 1 + [record] = caplog.records + assert record.name == "tests.core.util.test_log.MockClass" + assert "Test 12345: Completed. (elapsed time:" in record.message + assert record.levelname == LogLevel.debug.value + + +def test_log_elapsed_time_invalid(caplog: LogCaptureFixture): + caplog.set_level(LogLevel.info.value) + + with pytest.raises(RuntimeError): + log_elapsed_time(log_level=LogLevel.info, message_prefix="Test")(lambda: None)() + assert len(caplog.records) == 0 From 75e6d0d897335a49bf17dc0d964b14d2cd498307 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Mon, 18 Dec 2023 07:50:19 -0400 Subject: [PATCH 226/262] Remove Python 3.8 & 3.9 (PP-609) (#1572) --- .github/workflows/lint.yml | 2 +- .github/workflows/mypy.yml | 2 +- .github/workflows/test-build.yml | 6 +- .pre-commit-config.yaml | 3 +- README.md | 16 +- ...1_1e46a5bc33b5_migrate_catalog_services.py | 3 +- ...31206_e06f965879ab_marc_s3_file_cleanup.py | 3 +- api/admin/announcement_list_validator.py | 12 +- api/admin/config.py | 13 +- api/admin/controller/admin_search.py | 6 +- api/admin/controller/announcement_service.py | 9 +- api/admin/controller/catalog_services.py | 10 +- api/admin/controller/collection_self_tests.py | 8 +- api/admin/controller/collection_settings.py | 12 +- api/admin/controller/custom_lists.py | 52 ++--- api/admin/controller/dashboard.py | 2 +- ...discovery_service_library_registrations.py | 8 +- api/admin/controller/discovery_services.py | 8 +- .../controller/individual_admin_settings.py | 15 +- api/admin/controller/integration_settings.py | 42 ++-- api/admin/controller/library_settings.py | 7 +- .../patron_auth_service_self_tests.py | 12 +- api/admin/controller/patron_auth_services.py | 14 +- api/admin/controller/quicksight.py | 3 +- api/admin/controller/reset_password.py | 12 +- api/admin/controller/self_tests.py | 16 +- api/admin/controller/settings.py | 12 +- api/admin/controller/sign_in.py | 3 +- api/admin/dashboard_stats.py | 3 +- api/admin/form_data.py | 23 +- api/admin/model/dashboard_statistics.py | 14 +- api/admin/model/quicksight.py | 7 +- .../password_admin_authentication_provider.py | 4 +- api/adobe_vendor_id.py | 40 ++-- api/authentication/access_token.py | 4 +- api/authentication/basic.py | 30 +-- api/authentication/basic_token.py | 7 +- api/authenticator.py | 44 ++-- api/axis.py | 205 ++++++++---------- api/bibliotheca.py | 55 +++-- api/circulation.py | 167 +++++++------- api/circulation_exceptions.py | 4 +- api/config.py | 16 +- api/controller/marc.py | 7 +- api/discovery/opds_registration.py | 48 ++-- api/discovery/registration_script.py | 7 +- api/enki.py | 49 +++-- api/firstbook2.py | 12 +- api/kansas_patron.py | 14 +- api/lanes.py | 5 +- api/millenium_patron.py | 25 +-- api/model/time_tracking.py | 10 +- api/monitor.py | 4 +- api/odl.py | 67 +++--- api/odl2.py | 27 +-- api/opds_for_distributors.py | 33 +-- api/overdrive.py | 44 ++-- api/s3_analytics_provider.py | 12 +- api/saml/configuration/model.py | 45 ++-- api/saml/credential.py | 9 +- api/saml/metadata/federations/model.py | 4 +- api/saml/metadata/federations/validator.py | 3 +- api/saml/metadata/model.py | 23 +- api/saml/metadata/parser.py | 3 +- api/saml/provider.py | 10 +- api/selftest.py | 12 +- api/simple_authentication.py | 24 +- api/sip/__init__.py | 37 ++-- api/sip/client.py | 6 +- api/sirsidynix_authentication_provider.py | 10 +- api/util/profilers.py | 3 +- api/util/xray.py | 3 +- core/analytics.py | 4 +- core/config.py | 7 +- core/configuration/library.py | 74 +++---- core/coverage.py | 11 +- core/equivalents_coverage.py | 16 +- core/exceptions.py | 7 +- core/external_search.py | 39 ++-- core/feed/acquisition.py | 81 +++---- core/feed/admin.py | 4 +- core/feed/annotator/admin.py | 5 +- core/feed/annotator/base.py | 32 +-- core/feed/annotator/circulation.py | 192 ++++++++-------- core/feed/annotator/loan_and_hold.py | 8 +- core/feed/annotator/verbose.py | 9 +- core/feed/navigation.py | 10 +- core/feed/opds.py | 14 +- core/feed/serializer/base.py | 4 +- core/feed/serializer/opds.py | 16 +- core/feed/serializer/opds2.py | 40 ++-- core/feed/types.py | 171 +++++++-------- core/feed/util.py | 3 +- core/integration/base.py | 29 +-- core/integration/registry.py | 63 +++--- core/integration/settings.py | 89 ++++---- core/jobs/integration_test.py | 6 +- core/jobs/patron_activity_sync.py | 5 +- core/jobs/playtime_entries.py | 8 +- core/lane.py | 10 +- core/lcp/credential.py | 4 +- core/marc.py | 22 +- core/metadata_layer.py | 11 +- .../migration/migrate_external_integration.py | 18 +- core/migration/util.py | 6 +- core/model/__init__.py | 19 +- core/model/admin.py | 4 +- core/model/announcements.py | 16 +- core/model/before_flush_decorator.py | 24 +- core/model/classification.py | 12 +- core/model/collection.py | 35 +-- core/model/configuration.py | 6 +- core/model/contributor.py | 8 +- core/model/coverage.py | 4 +- core/model/customlist.py | 8 +- core/model/datasource.py | 30 +-- core/model/devicetokens.py | 3 +- core/model/edition.py | 10 +- core/model/formats.py | 28 +-- core/model/hassessioncache.py | 3 +- core/model/identifier.py | 22 +- core/model/integration.py | 14 +- core/model/library.py | 64 +++--- core/model/licensing.py | 34 +-- core/model/listeners.py | 5 +- core/model/patron.py | 12 +- core/model/resource.py | 17 +- core/model/work.py | 16 +- core/opds2_import.py | 31 +-- core/opds_import.py | 185 ++++++++-------- core/python_expression_dsl/evaluator.py | 28 +-- core/python_expression_dsl/util.py | 20 +- core/query/coverage.py | 8 +- core/query/playtime_entries.py | 3 +- core/saml/wayfless.py | 4 +- core/scripts.py | 10 +- core/search/document.py | 23 +- core/search/migrator.py | 12 +- core/search/revision_directory.py | 2 +- core/search/service.py | 22 +- core/search/v5.py | 4 +- core/selftest.py | 75 +++---- core/service/logging/configuration.py | 12 +- core/service/logging/container.py | 4 +- core/service/logging/log.py | 3 +- core/service/storage/configuration.py | 16 +- core/service/storage/container.py | 6 +- core/service/storage/s3.py | 38 ++-- core/util/__init__.py | 9 +- core/util/accept_language.py | 3 +- core/util/authentication_for_opds.py | 14 +- core/util/base64.py | 10 +- core/util/cache.py | 17 +- core/util/datetime_helpers.py | 8 +- core/util/flask_util.py | 4 +- core/util/http.py | 11 +- core/util/languages.py | 8 +- core/util/log.py | 17 +- core/util/notifications.py | 5 +- core/util/problem_detail.py | 41 ++-- core/util/worker_pools.py | 9 +- core/util/xmlparser.py | 44 ++-- customlists/customlist_explain.py | 5 +- customlists/customlist_export.py | 23 +- customlists/customlist_import.py | 29 ++- customlists/customlist_report.py | 6 +- poetry.lock | 119 +--------- pyproject.toml | 3 +- scripts.py | 25 +-- .../admin/controller/test_catalog_services.py | 7 +- .../api/admin/controller/test_collections.py | 3 +- .../api/admin/controller/test_custom_lists.py | 9 +- tests/api/admin/controller/test_library.py | 5 +- tests/api/admin/controller/test_patron.py | 4 +- .../api/admin/controller/test_patron_auth.py | 13 +- tests/api/admin/test_config.py | 15 +- tests/api/admin/test_form_data.py | 8 +- tests/api/admin/test_routes.py | 7 +- tests/api/controller/test_annotation.py | 3 +- tests/api/controller/test_loan.py | 3 +- tests/api/controller/test_marc.py | 13 +- tests/api/controller/test_opds_feed.py | 4 +- tests/api/controller/test_work.py | 4 +- tests/api/discovery/test_opds_registration.py | 7 +- tests/api/feed/fixtures.py | 3 +- tests/api/feed/test_annotators.py | 2 +- tests/api/feed/test_library_annotator.py | 9 +- tests/api/feed/test_opds_acquisition_feed.py | 7 +- tests/api/mockapi/circulation.py | 5 +- tests/api/mockapi/enki.py | 8 +- tests/api/saml/configuration/test_model.py | 2 +- tests/api/saml/conftest.py | 7 +- .../metadata/federations/test_validator.py | 5 +- tests/api/saml/metadata/test_parser.py | 27 ++- tests/api/saml/test_auth.py | 2 +- tests/api/saml/test_provider.py | 2 +- tests/api/sip/test_authentication_provider.py | 3 +- tests/api/sip/test_client.py | 6 +- tests/api/test_adobe_vendor_id.py | 3 +- tests/api/test_annotations.py | 8 +- tests/api/test_authenticator.py | 5 +- tests/api/test_bibliotheca.py | 16 +- tests/api/test_firstbook2.py | 2 +- tests/api/test_kansas_patron.py | 4 +- tests/api/test_lanes.py | 5 +- tests/api/test_millenium_patron.py | 7 +- tests/api/test_odl.py | 4 +- tests/api/test_odl2.py | 5 +- tests/api/test_opds_for_distributors.py | 4 +- tests/api/test_overdrive.py | 10 +- tests/api/test_scripts.py | 10 +- tests/api/test_simple_auth.py | 2 +- tests/api/test_sirsidynix_auth_provider.py | 5 +- tests/core/configuration/test_library.py | 6 +- tests/core/integration/test_settings.py | 3 +- tests/core/jobs/test_playtime_entries.py | 7 +- tests/core/mock.py | 7 +- .../models/test_before_flush_decorator.py | 8 +- tests/core/models/test_coverage.py | 5 +- .../test_discovery_service_registration.py | 6 +- tests/core/models/test_licensing.py | 14 +- tests/core/models/test_listeners.py | 3 +- tests/core/search/test_service.py | 2 +- tests/core/service/storage/test_s3.py | 7 +- tests/core/test_app_server.py | 2 +- tests/core/test_equivalent_coverage.py | 6 +- tests/core/test_external_search.py | 6 +- tests/core/test_http.py | 2 +- tests/core/test_lane.py | 7 +- tests/core/test_opds2_import.py | 6 +- tests/core/test_patron_activity_sync.py | 6 +- tests/core/test_selftest.py | 4 +- tests/core/util/test_notifications.py | 3 +- tests/core/util/test_xml_parser.py | 6 +- tests/customlists/test_explain.py | 5 +- tests/customlists/test_import.py | 29 ++- tests/fixtures/announcements.py | 13 +- tests/fixtures/api_admin.py | 3 +- tests/fixtures/api_controller.py | 5 +- tests/fixtures/api_odl.py | 36 +-- tests/fixtures/api_routes.py | 7 +- tests/fixtures/authenticator.py | 20 +- tests/fixtures/database.py | 25 ++- tests/fixtures/flask.py | 2 +- tests/fixtures/library.py | 8 +- tests/fixtures/odl.py | 21 +- tests/fixtures/s3.py | 26 +-- tests/fixtures/search.py | 4 +- tests/fixtures/tls_server.py | 3 +- tests/fixtures/webserver.py | 18 +- tests/migration/conftest.py | 73 ++++--- tests/migration/test_20231101_2d72d6876c52.py | 14 +- tests/migration/test_20231206_e06f965879ab.py | 11 +- tests/mocks/search.py | 40 ++-- tox.ini | 4 +- 255 files changed, 2100 insertions(+), 2484 deletions(-) diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 07e42157e0..43c9494c3e 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -1,7 +1,7 @@ name: Lint on: [push, pull_request] env: - PYTHON_VERSION: 3.9 + PYTHON_VERSION: "3.10" jobs: lint: diff --git a/.github/workflows/mypy.yml b/.github/workflows/mypy.yml index c108cf186d..beb69330a6 100644 --- a/.github/workflows/mypy.yml +++ b/.github/workflows/mypy.yml @@ -1,7 +1,7 @@ name: Mypy (Type check) on: [push, pull_request] env: - PYTHON_VERSION: 3.9 + PYTHON_VERSION: "3.10" jobs: mypy: diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 18fc21a9d9..0d9f848aa2 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -15,7 +15,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11"] module: [Api, Core] # We want to run on external PRs, but not on our own internal PRs as they'll be run @@ -84,7 +84,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: "3.10" - name: Install Apt Packages run: | @@ -331,7 +331,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: "3.10" - name: Install Poetry uses: ./.github/actions/poetry diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index ef1fbb1adf..03609950c1 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -21,8 +21,7 @@ repos: hooks: - id: pyupgrade args: - - --py38-plus - - --keep-runtime-typing + - --py310-plus - repo: https://github.com/MarcoGorelli/absolufy-imports rev: v0.3.0 diff --git a/README.md b/README.md index cf627c3c1d..bf3a3dab40 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@ [![Code style: black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Imports: isort](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/) [![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit) -![Python: 3.8,3.9,3.10,3.11](https://img.shields.io/badge/Python-3.8%20|%203.9%20|%203.10%20|%203.11-blue) +![Python: 3.10,3.11](https://img.shields.io/badge/Python-3.10%20|%203.11-blue) This is a [The Palace Project](https://thepalaceproject.org) maintained fork of the NYPL [Library Simplified](http://www.librarysimplified.org/) Circulation Manager. @@ -278,13 +278,13 @@ export SIMPLIFIED_MAIL_SENDER=sender@example.com As mentioned in the [pyenv](#pyenv) section, the `poetry` tool should be executed under a virtual environment in order to guarantee that it will use the Python version you expect. To use a particular Python version, you should create a local virtual environment in the cloned `circulation` repository directory. Assuming that -you want to use, for example, Python 3.9.9: +you want to use, for example, Python 3.11.1: ```sh -pyenv virtualenv 3.9.9 circ +pyenv virtualenv 3.11.1 circ ``` -This will create a new local virtual environment called `circ` that uses Python 3.9.9. Switch to that environment: +This will create a new local virtual environment called `circ` that uses Python 3.11.1. Switch to that environment: ```sh pyenv local circ @@ -292,7 +292,7 @@ pyenv local circ On most systems, using `pyenv` will adjust your shell prompt to indicate which virtual environment you are now in. For example, the version of Python installed in your operating system might be `3.10.1`, but -using a virtual environment can substitute, for example, `3.9.9`: +using a virtual environment can substitute, for example, `3.11.1`: ```sh $ python --version @@ -300,7 +300,7 @@ Python 3.10.1 $ pyenv local circ (circ) $ python --version -Python 3.9.9 +Python 3.11.1 ``` For brevity, these instructions assume that all shell commands will be executed within a virtual environment. @@ -578,7 +578,7 @@ poetry install --only ci ## Testing -The Github Actions CI service runs the unit tests against Python 3.8, 3.9, 3.10, and 3.11 automatically using +The Github Actions CI service runs the unit tests against Python 3.10, and 3.11 automatically using [tox](https://tox.readthedocs.io/en/latest/). Tox has an environment for each python version, the module being tested, and an optional `-docker` factor that will @@ -594,8 +594,6 @@ with service dependencies running in docker containers. | Factor | Python Version | |--------|----------------| -| py38 | Python 3.8 | -| py39 | Python 3.9 | | py310 | Python 3.10 | | py311 | Python 3.11 | diff --git a/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py index 0bc74f360d..e79038af34 100644 --- a/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py +++ b/alembic/versions/20231121_1e46a5bc33b5_migrate_catalog_services.py @@ -5,7 +5,6 @@ Create Date: 2023-11-21 17:48:04.535064+00:00 """ -from typing import Optional from alembic import op from core.marc import MARCExporter @@ -56,7 +55,7 @@ def upgrade() -> None: ) = get_configuration_settings(connection, integration) # We moved the setting for update_frequency from the library settings to the integration settings. - update_frequency: Optional[str] = None + update_frequency: str | None = None for library_id, library_settings in libraries_settings.items(): if "marc_update_frequency" in library_settings: frequency = library_settings["marc_update_frequency"] diff --git a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py index 509cc14165..e93b110330 100644 --- a/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py +++ b/alembic/versions/20231206_e06f965879ab_marc_s3_file_cleanup.py @@ -5,7 +5,6 @@ Create Date: 2023-12-06 16:04:36.936466+00:00 """ -from typing import Optional from urllib.parse import unquote, urlparse from alembic import op @@ -19,7 +18,7 @@ depends_on = None -def parse_key_from_url(url: Optional[str], bucket: str) -> Optional[str]: +def parse_key_from_url(url: str | None, bucket: str) -> str | None: """Parse the key from a URL. :param url: The URL to parse. diff --git a/api/admin/announcement_list_validator.py b/api/admin/announcement_list_validator.py index d7bcceaf64..019426fe2c 100644 --- a/api/admin/announcement_list_validator.py +++ b/api/admin/announcement_list_validator.py @@ -1,7 +1,7 @@ import datetime import json import uuid -from typing import Dict, List, Optional, Union, cast +from typing import cast import dateutil from flask_babel import lazy_gettext as _ @@ -28,8 +28,8 @@ def __init__( self.default_duration_days = default_duration_days def validate_announcements( - self, announcements: Union[str, List[Dict[str, str]]] - ) -> Dict[uuid.UUID, AnnouncementData]: + self, announcements: str | list[dict[str, str]] + ) -> dict[uuid.UUID, AnnouncementData]: validated_announcements = {} bad_format = INVALID_INPUT.detailed( _( @@ -64,7 +64,7 @@ def validate_announcements( validated_announcements[id] = validated return validated_announcements - def validate_announcement(self, announcement: Dict[str, str]) -> AnnouncementData: + def validate_announcement(self, announcement: dict[str, str]) -> AnnouncementData: if not isinstance(announcement, dict): raise ProblemError( INVALID_INPUT.detailed( @@ -162,8 +162,8 @@ def validate_length(self, value: str, minimum: int, maximum: int) -> str: def validate_date( cls, field: str, - value: Union[str, datetime.date], - minimum: Optional[datetime.date] = None, + value: str | datetime.date, + minimum: datetime.date | None = None, ) -> datetime.date: """Validate a date value. diff --git a/api/admin/config.py b/api/admin/config.py index dc28340a83..9b3f8f2b5a 100644 --- a/api/admin/config.py +++ b/api/admin/config.py @@ -1,6 +1,5 @@ import os from enum import Enum -from typing import Optional from urllib.parse import urljoin from requests import RequestException @@ -52,7 +51,7 @@ class Configuration(LoggerMixin): ENV_ADMIN_UI_PACKAGE_VERSION = "TPP_CIRCULATION_ADMIN_PACKAGE_VERSION" # Cache the package version after first lookup. - _version: Optional[str] = None + _version: str | None = None @classmethod def operational_mode(cls) -> OperationalMode: @@ -91,7 +90,7 @@ def resolve_package_version(cls, package_name: str, package_version: str) -> str return package_version @classmethod - def env_package_version(cls) -> Optional[str]: + def env_package_version(cls) -> str | None: """Get the package version specified in configuration or environment. :return Package verison. @@ -120,7 +119,7 @@ def package_version(cls) -> str: @classmethod def lookup_asset_url( - cls, key: str, *, _operational_mode: Optional[OperationalMode] = None + cls, key: str, *, _operational_mode: OperationalMode | None = None ) -> str: """Get the URL for the asset_type. @@ -143,7 +142,7 @@ def lookup_asset_url( ) @classmethod - def package_url(cls, *, _operational_mode: Optional[OperationalMode] = None) -> str: + def package_url(cls, *, _operational_mode: OperationalMode | None = None) -> str: """Compute the URL for the admin UI package. :param _operational_mode: For testing. The operational mode is @@ -162,7 +161,7 @@ def package_url(cls, *, _operational_mode: Optional[OperationalMode] = None) -> return url @classmethod - def package_development_directory(cls, *, _base_dir: Optional[str] = None) -> str: + def package_development_directory(cls, *, _base_dir: str | None = None) -> str: """Absolute path for the admin UI package when in development mode. :param _base_dir: For testing purposes. Not used in normal operation. @@ -177,7 +176,7 @@ def package_development_directory(cls, *, _base_dir: Optional[str] = None) -> st ) @classmethod - def static_files_directory(cls, *, _base_dir: Optional[str] = None) -> str: + def static_files_directory(cls, *, _base_dir: str | None = None) -> str: """Absolute path for the admin UI static files. :param _base_dir: For testing purposes. Not used in normal operation. diff --git a/api/admin/controller/admin_search.py b/api/admin/controller/admin_search.py index e2aa1fddbc..dd4d2c7ce2 100644 --- a/api/admin/controller/admin_search.py +++ b/api/admin/controller/admin_search.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List, Tuple - import flask from sqlalchemy import func, or_ @@ -38,13 +36,13 @@ def search_field_values(self) -> dict: return self._search_field_values_cached(collection_ids) @classmethod - def _unzip(cls, values: List[Tuple[str, int]]) -> dict: + def _unzip(cls, values: list[tuple[str, int]]) -> dict: """Covert a list of tuples to a {value0: value1} dictionary""" return {a[0]: a[1] for a in values if type(a[0]) is str} # 1 hour in-memory cache @memoize(ttls=3600) - def _search_field_values_cached(self, collection_ids: List[int]) -> dict: + def _search_field_values_cached(self, collection_ids: list[int]) -> dict: licenses_filter = or_( LicensePool.open_access == True, LicensePool.licenses_owned != 0, diff --git a/api/admin/controller/announcement_service.py b/api/admin/controller/announcement_service.py index 68c320d29b..8ec18ac0af 100644 --- a/api/admin/controller/announcement_service.py +++ b/api/admin/controller/announcement_service.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Any, Callable, Dict +from collections.abc import Callable +from typing import Any import flask @@ -19,13 +20,13 @@ def _action(self) -> Callable: method = flask.request.method.lower() return getattr(self, method) - def process_many(self) -> Dict[str, Any] | ProblemDetail: + def process_many(self) -> dict[str, Any] | ProblemDetail: try: return self._action()() except ProblemError as e: return e.problem_detail - def get(self) -> Dict[str, Any]: + def get(self) -> dict[str, Any]: """Respond with settings and all global announcements""" db_announcements = ( self._db.execute(Announcement.global_announcements()).scalars().all() @@ -37,7 +38,7 @@ def get(self) -> Dict[str, Any]: announcements=announcements, ) - def post(self) -> Dict[str, Any]: + def post(self) -> dict[str, Any]: """POST multiple announcements to the global namespace""" validator = AnnouncementListValidator() if flask.request.form is None or "announcements" not in flask.request.form: diff --git a/api/admin/controller/catalog_services.py b/api/admin/controller/catalog_services.py index 7428aabbfb..35f19b5ff3 100644 --- a/api/admin/controller/catalog_services.py +++ b/api/admin/controller/catalog_services.py @@ -1,5 +1,3 @@ -from typing import List, Type, Union - import flask from flask import Response @@ -30,7 +28,7 @@ class CatalogServicesController( def default_registry(self) -> CatalogServicesRegistry: return CatalogServicesRegistry() - def process_catalog_services(self) -> Union[Response, ProblemDetail]: + def process_catalog_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": @@ -74,14 +72,14 @@ def library_integration_validation( def process_updated_libraries( self, - libraries: List[UpdatedLibrarySettingsTuple], - settings_class: Type[BaseSettings], + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], ) -> None: super().process_updated_libraries(libraries, settings_class) for integration, _ in libraries: self.library_integration_validation(integration) - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: try: form_data = flask.request.form libraries_data = self.get_libraries_data(form_data) diff --git a/api/admin/controller/collection_self_tests.py b/api/admin/controller/collection_self_tests.py index 6b308421f2..8cc53dcb16 100644 --- a/api/admin/controller/collection_self_tests.py +++ b/api/admin/controller/collection_self_tests.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Optional +from typing import Any from flask import Response from sqlalchemy.orm import Session @@ -18,19 +18,19 @@ class CollectionSelfTestsController(IntegrationSelfTestsController[CirculationAp def __init__( self, db: Session, - registry: Optional[IntegrationRegistry[CirculationApiType]] = None, + registry: IntegrationRegistry[CirculationApiType] | None = None, ): registry = registry or LicenseProvidersRegistry() super().__init__(db, registry) def process_collection_self_tests( - self, identifier: Optional[int] + self, identifier: int | None ) -> Response | ProblemDetail: return self.process_self_tests(identifier) def run_self_tests( self, integration: IntegrationConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: protocol_class = self.get_protocol_class(integration) if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): test_result, _ = protocol_class.run_self_tests( diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index 60da10aec8..cb75f7c10e 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional, Union +from typing import Any import flask from flask import Response @@ -37,7 +37,7 @@ def default_registry(self) -> IntegrationRegistry[CirculationApiType]: def configured_service_info( self, service: IntegrationConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: service_info = super().configured_service_info(service) user = getattr(flask.request, "admin", None) if service_info: @@ -54,7 +54,7 @@ def configured_service_info( def configured_service_library_info( self, library_configuration: IntegrationLibraryConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: library_info = super().configured_service_library_info(library_configuration) user = getattr(flask.request, "admin", None) if library_info: @@ -62,7 +62,7 @@ def configured_service_library_info( return library_info return None - def process_collections(self) -> Union[Response, ProblemDetail]: + def process_collections(self) -> Response | ProblemDetail: if flask.request.method == "GET": return self.process_get() else: @@ -86,7 +86,7 @@ def create_new_service(self, name: str, protocol: str) -> IntegrationConfigurati create(self._db, Collection, integration_configuration=service) return service - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: self.require_system_admin() try: form_data = flask.request.form @@ -139,7 +139,7 @@ def process_post(self) -> Union[Response, ProblemDetail]: return Response(str(integration.id), response_code) - def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + def process_delete(self, service_id: int) -> Response | ProblemDetail: self.require_system_admin() integration = get_one( diff --git a/api/admin/controller/custom_lists.py b/api/admin/controller/custom_lists.py index df6afc25c9..c2fc8d4f72 100644 --- a/api/admin/controller/custom_lists.py +++ b/api/admin/controller/custom_lists.py @@ -1,8 +1,8 @@ from __future__ import annotations import json +from collections.abc import Callable from datetime import datetime -from typing import Callable, Dict, List, Optional, Union import flask from flask import Response, url_for @@ -51,16 +51,16 @@ class CustomListSharePostResponse(BaseModel): class CustomListPostRequest(BaseModel): name: str - id: Optional[int] = None - entries: List[dict] = [] - collections: List[int] = [] - deletedEntries: List[dict] = [] + id: int | None = None + entries: list[dict] = [] + collections: list[int] = [] + deletedEntries: list[dict] = [] # For auto updating lists auto_update: bool = False - auto_update_query: Optional[dict] = None - auto_update_facets: Optional[dict] = None + auto_update_query: dict | None = None + auto_update_facets: dict | None = None - def _list_as_json(self, list: CustomList, is_owner=True) -> Dict: + def _list_as_json(self, list: CustomList, is_owner=True) -> dict: """Transform a CustomList object into a response ready dict""" collections = [] for collection in list.collections: @@ -84,7 +84,7 @@ def _list_as_json(self, list: CustomList, is_owner=True) -> Dict: is_shared=len(list.shared_locally_with_libraries) > 0, ) - def custom_lists(self) -> Union[Dict, ProblemDetail, Response, None]: + def custom_lists(self) -> dict | ProblemDetail | Response | None: library: Library = flask.request.library # type: ignore # "Request" has no attribute "library" self.require_librarian(library) @@ -113,7 +113,7 @@ def custom_lists(self) -> Union[Dict, ProblemDetail, Response, None]: return None - def _getJSONFromRequest(self, values: Optional[str]) -> list: + def _getJSONFromRequest(self, values: str | None) -> list: if values: return_values = json.loads(values) else: @@ -121,9 +121,7 @@ def _getJSONFromRequest(self, values: Optional[str]) -> list: return return_values - def _get_work_from_urn( - self, library: Library, urn: Optional[str] - ) -> Optional[Work]: + def _get_work_from_urn(self, library: Library, urn: str | None) -> Work | None: identifier, ignore = Identifier.parse_urn(self._db, urn) if identifier is None: @@ -143,14 +141,14 @@ def _create_or_update_list( self, library: Library, name: str, - entries: List[Dict], - collections: List[int], - deleted_entries: Optional[List[Dict]] = None, - id: Optional[int] = None, - auto_update: Optional[bool] = None, - auto_update_query: Optional[dict[str, str]] = None, - auto_update_facets: Optional[dict[str, str]] = None, - ) -> Union[ProblemDetail, Response]: + entries: list[dict], + collections: list[int], + deleted_entries: list[dict] | None = None, + id: int | None = None, + auto_update: bool | None = None, + auto_update_query: dict[str, str] | None = None, + auto_update_facets: dict[str, str] | None = None, + ) -> ProblemDetail | Response: data_source = DataSource.lookup(self._db, DataSource.LIBRARY_STAFF) old_list_with_name = CustomList.find(self._db, name, library=library) @@ -325,9 +323,7 @@ def url_fn(after): return url_fn - def custom_list( - self, list_id: int - ) -> Optional[Union[Response, Dict, ProblemDetail]]: + def custom_list(self, list_id: int) -> Response | dict | ProblemDetail | None: library: Library = flask.request.library # type: ignore self.require_librarian(library) data_source = DataSource.lookup(self._db, DataSource.LIBRARY_STAFF) @@ -410,7 +406,7 @@ def custom_list( def share_locally( self, customlist_id: int - ) -> Union[ProblemDetail, Dict[str, int], Response]: + ) -> ProblemDetail | dict[str, int] | Response: """Share this customlist with all libraries on this local CM""" if not customlist_id: return INVALID_INPUT @@ -431,7 +427,7 @@ def share_locally( def share_locally_POST( self, customlist: CustomList - ) -> Union[ProblemDetail, Dict[str, int]]: + ) -> ProblemDetail | dict[str, int]: successes = [] failures = [] self.log.info(f"Begin sharing customlist '{customlist.name}'") @@ -463,9 +459,7 @@ def share_locally_POST( successes=len(successes), failures=len(failures) ).dict() - def share_locally_DELETE( - self, customlist: CustomList - ) -> Union[ProblemDetail, Response]: + def share_locally_DELETE(self, customlist: CustomList) -> ProblemDetail | Response: """Delete the shared status of a custom list If a customlist is actively in use by another library, then disallow the unshare """ diff --git a/api/admin/controller/dashboard.py b/api/admin/controller/dashboard.py index b41d46fba0..9267b1f6c9 100644 --- a/api/admin/controller/dashboard.py +++ b/api/admin/controller/dashboard.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import Callable from datetime import date, datetime, timedelta -from typing import Callable import flask from sqlalchemy import desc, nullslast diff --git a/api/admin/controller/discovery_service_library_registrations.py b/api/admin/controller/discovery_service_library_registrations.py index 964bc5e9e9..083256d90f 100644 --- a/api/admin/controller/discovery_service_library_registrations.py +++ b/api/admin/controller/discovery_service_library_registrations.py @@ -1,7 +1,7 @@ from __future__ import annotations import json -from typing import Any, Dict +from typing import Any import flask from flask import Response, url_for @@ -38,7 +38,7 @@ def __init__(self, manager: CirculationManager): def process_discovery_service_library_registrations( self, - ) -> Response | Dict[str, Any] | ProblemDetail: + ) -> Response | dict[str, Any] | ProblemDetail: self.require_system_admin() try: if flask.request.method == "GET": @@ -49,7 +49,7 @@ def process_discovery_service_library_registrations( self._db.rollback() return e.problem_detail - def process_get(self) -> Dict[str, Any]: + def process_get(self) -> dict[str, Any]: """Make a list of all discovery services, each with the list of libraries registered with that service and the status of the registration.""" @@ -94,7 +94,7 @@ def process_get(self) -> Dict[str, Any]: def get_library_info( self, registration: DiscoveryServiceRegistration - ) -> Dict[str, str]: + ) -> dict[str, str]: """Find the relevant information about the library which the user is trying to register""" diff --git a/api/admin/controller/discovery_services.py b/api/admin/controller/discovery_services.py index 30ffce8e6b..ebabbf833a 100644 --- a/api/admin/controller/discovery_services.py +++ b/api/admin/controller/discovery_services.py @@ -1,5 +1,3 @@ -from typing import Union - import flask from flask import Response from sqlalchemy import and_, select @@ -25,7 +23,7 @@ class DiscoveryServicesController( def default_registry(self) -> DiscoveryRegistry: return DiscoveryRegistry() - def process_discovery_services(self) -> Union[Response, ProblemDetail]: + def process_discovery_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": return self.process_get() @@ -60,7 +58,7 @@ def set_up_default_registry(self) -> None: ) default_registry.settings_dict = settings.dict() - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: try: form_data = flask.request.form service, protocol, response_code = self.get_service(form_data) @@ -82,7 +80,7 @@ def process_post(self) -> Union[Response, ProblemDetail]: return Response(str(service.id), response_code) - def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + def process_delete(self, service_id: int) -> Response | ProblemDetail: self.require_system_admin() try: return self.delete_service(service_id) diff --git a/api/admin/controller/individual_admin_settings.py b/api/admin/controller/individual_admin_settings.py index 7a3b67ccf5..8e42970125 100644 --- a/api/admin/controller/individual_admin_settings.py +++ b/api/admin/controller/individual_admin_settings.py @@ -1,5 +1,4 @@ import json -from typing import Optional import flask from flask import Response @@ -27,8 +26,8 @@ def process_individual_admins(self): else: return self.process_post() - def _highest_authorized_role(self) -> Optional[AdminRole]: - highest_role: Optional[AdminRole] = None + def _highest_authorized_role(self) -> AdminRole | None: + highest_role: AdminRole | None = None has_auth = False admin = getattr(flask.request, "admin", None) @@ -54,7 +53,7 @@ def _highest_authorized_role(self) -> Optional[AdminRole]: return highest_role if has_auth else None def process_get(self): - logged_in_admin: Optional[Admin] = getattr(flask.request, "admin", None) + logged_in_admin: Admin | None = getattr(flask.request, "admin", None) if not logged_in_admin: return ADMIN_AUTH_NOT_CONFIGURED @@ -110,7 +109,7 @@ def process_post_create_first_admin(self, email: str): """Create the first admin in the system.""" # Passwords are always required, so check presence and validity up front. - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") if not self.is_acceptable_password(password): return self.unacceptable_password() @@ -143,7 +142,7 @@ def process_post_create_new_admin(self, email: str): """Create a new admin (not the first admin in the system).""" # Passwords are always required, so check presence and validity up front. - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") if not self.is_acceptable_password(password): return self.unacceptable_password() @@ -173,7 +172,7 @@ def process_post_create_new_admin(self, email: str): def process_post_update_existing_admin(self, admin: Admin): """Update an existing admin.""" - password: Optional[str] = flask.request.form.get("password") + password: str | None = flask.request.form.get("password") success = False try: @@ -244,7 +243,7 @@ def unacceptable_password(): ) @staticmethod - def is_acceptable_password(password: Optional[str]) -> bool: + def is_acceptable_password(password: str | None) -> bool: # Forbid missing passwords. if not password: return False diff --git a/api/admin/controller/integration_settings.py b/api/admin/controller/integration_settings.py index 1ddcee3b9d..c8a93c8dff 100644 --- a/api/admin/controller/integration_settings.py +++ b/api/admin/controller/integration_settings.py @@ -2,7 +2,7 @@ import json from abc import ABC, abstractmethod -from typing import Any, Dict, Generic, List, NamedTuple, Optional, Tuple, Type, TypeVar +from typing import Any, Generic, NamedTuple, TypeVar import flask from flask import Response @@ -42,20 +42,20 @@ class UpdatedLibrarySettingsTuple(NamedTuple): integration: IntegrationLibraryConfiguration - settings: Dict[str, Any] + settings: dict[str, Any] class ChangedLibrariesTuple(NamedTuple): - new: List[UpdatedLibrarySettingsTuple] - updated: List[UpdatedLibrarySettingsTuple] - removed: List[IntegrationLibraryConfiguration] + new: list[UpdatedLibrarySettingsTuple] + updated: list[UpdatedLibrarySettingsTuple] + removed: list[IntegrationLibraryConfiguration] class IntegrationSettingsController(ABC, Generic[T], LoggerMixin): def __init__( self, manager: CirculationManager, - registry: Optional[IntegrationRegistry[T]] = None, + registry: IntegrationRegistry[T] | None = None, ): self._db = manager._db self.registry = registry or self.default_registry() @@ -68,7 +68,7 @@ def default_registry(self) -> IntegrationRegistry[T]: ... @memoize(ttls=1800) - def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: + def _cached_protocols(self) -> dict[str, dict[str, Any]]: """Cached result for integration implementations""" protocols = [] for name, api in self.registry: @@ -92,13 +92,13 @@ def _cached_protocols(self) -> Dict[str, Dict[str, Any]]: return dict(protocols) @property - def protocols(self) -> Dict[str, Dict[str, Any]]: + def protocols(self) -> dict[str, dict[str, Any]]: """Use a property for implementations to allow expiring cached results""" return self._cached_protocols() def configured_service_info( self, service: IntegrationConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: return { "id": service.id, "name": service.name, @@ -108,13 +108,13 @@ def configured_service_info( def configured_service_library_info( self, library_configuration: IntegrationLibraryConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: library_info = {"short_name": library_configuration.library.short_name} library_info.update(library_configuration.settings_dict) return library_info @property - def configured_services(self) -> List[Dict[str, Any]]: + def configured_services(self) -> list[dict[str, Any]]: """Return a list of all currently configured services for the controller's goal.""" configured_services = [] for service in ( @@ -147,7 +147,7 @@ def configured_services(self) -> List[Dict[str, Any]]: return configured_services def get_existing_service( - self, service_id: int, name: Optional[str], protocol: str + self, service_id: int, name: str | None, protocol: str ) -> IntegrationConfiguration: """ Query for an existing service to edit. @@ -157,7 +157,7 @@ def get_existing_service( necessary and a ProblemError will be raised if the name is already in use. """ - service: Optional[IntegrationConfiguration] = get_one( + service: IntegrationConfiguration | None = get_one( self._db, IntegrationConfiguration, id=service_id, @@ -202,15 +202,13 @@ def create_new_service(self, name: str, protocol: str) -> IntegrationConfigurati ) return new_service - def get_libraries_data( - self, form_data: ImmutableMultiDict[str, str] - ) -> Optional[str]: + def get_libraries_data(self, form_data: ImmutableMultiDict[str, str]) -> str | None: libraries_data = form_data.get("libraries", None, str) return libraries_data def get_service( self, form_data: ImmutableMultiDict[str, str] - ) -> Tuple[IntegrationConfiguration, str, int]: + ) -> tuple[IntegrationConfiguration, str, int]: protocol = form_data.get("protocol", None, str) _id = form_data.get("id", None, int) name = form_data.get("name", None, str) @@ -239,7 +237,7 @@ def get_library(self, short_name: str) -> Library: """ Get a library by its short name. """ - library: Optional[Library] = get_one(self._db, Library, short_name=short_name) + library: Library | None = get_one(self._db, Library, short_name=short_name) if library is None: raise ProblemError( NO_SUCH_LIBRARY.detailed( @@ -343,7 +341,7 @@ def get_changed_libraries( return ChangedLibrariesTuple(new=new, updated=updated, removed=removed) def process_deleted_libraries( - self, removed: List[IntegrationLibraryConfiguration] + self, removed: list[IntegrationLibraryConfiguration] ) -> None: """ Delete any IntegrationLibraryConfigurations that were removed. @@ -353,8 +351,8 @@ def process_deleted_libraries( def process_updated_libraries( self, - libraries: List[UpdatedLibrarySettingsTuple], - settings_class: Type[BaseSettings], + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], ) -> None: """ Update the settings for any IntegrationLibraryConfigurations that were updated or added. @@ -367,7 +365,7 @@ def process_libraries( self, service: IntegrationConfiguration, libraries_data: str, - settings_class: Type[BaseSettings], + settings_class: type[BaseSettings], ) -> None: """ Process the library settings for a service. This will create new diff --git a/api/admin/controller/library_settings.py b/api/admin/controller/library_settings.py index ba04e759b9..7525faa6de 100644 --- a/api/admin/controller/library_settings.py +++ b/api/admin/controller/library_settings.py @@ -4,7 +4,6 @@ import json import uuid from io import BytesIO -from typing import Optional, Tuple import flask from flask import Response @@ -164,7 +163,7 @@ def process_post(self) -> Response: else: return Response(str(library.uuid), 200) - def create_library(self, short_name: str) -> Tuple[Library, bool]: + def create_library(self, short_name: str) -> tuple[Library, bool]: self.require_system_admin() public_key, private_key = Library.generate_keypair() library, is_new = create( @@ -201,7 +200,7 @@ def get_library_from_uuid(self, library_uuid: str) -> Library: ) def check_short_name_unique( - self, library: Optional[Library], short_name: Optional[str] + self, library: Library | None, short_name: str | None ) -> None: if not library or (short_name and short_name != library.short_name): # If you're adding a new short_name, either by editing an @@ -227,7 +226,7 @@ def _process_image(image: Image.Image, _format: str = "PNG") -> bytes: def scale_and_store_logo( cls, library: Library, - image_file: Optional[FileStorage], + image_file: FileStorage | None, max_dimension: int = Configuration.LOGO_MAX_DIMENSION, ) -> None: if not image_file: diff --git a/api/admin/controller/patron_auth_service_self_tests.py b/api/admin/controller/patron_auth_service_self_tests.py index 23d3b7cd49..476456a599 100644 --- a/api/admin/controller/patron_auth_service_self_tests.py +++ b/api/admin/controller/patron_auth_service_self_tests.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, Optional, Type +from typing import Any from flask import Response from sqlalchemy.orm import Session @@ -20,21 +20,21 @@ class PatronAuthServiceSelfTestsController( def __init__( self, db: Session, - registry: Optional[IntegrationRegistry[AuthenticationProviderType]] = None, + registry: IntegrationRegistry[AuthenticationProviderType] | None = None, ): registry = registry or PatronAuthRegistry() super().__init__(db, registry) def process_patron_auth_service_self_tests( - self, identifier: Optional[int] + self, identifier: int | None ) -> Response | ProblemDetail: return self.process_self_tests(identifier) def get_prior_test_results( self, - protocol_class: Type[AuthenticationProviderType], + protocol_class: type[AuthenticationProviderType], integration: IntegrationConfiguration, - ) -> Dict[str, Any]: + ) -> dict[str, Any]: # Find the first library associated with this service. library_configuration = self.get_library_configuration(integration) @@ -49,7 +49,7 @@ def get_prior_test_results( return super().get_prior_test_results(protocol_class, integration) - def run_self_tests(self, integration: IntegrationConfiguration) -> Dict[str, Any]: + def run_self_tests(self, integration: IntegrationConfiguration) -> dict[str, Any]: # If the auth service doesn't have at least one library associated with it, # we can't run self tests. library_configuration = self.get_library_configuration(integration) diff --git a/api/admin/controller/patron_auth_services.py b/api/admin/controller/patron_auth_services.py index 6974a0f70c..0e8dd595f7 100644 --- a/api/admin/controller/patron_auth_services.py +++ b/api/admin/controller/patron_auth_services.py @@ -1,5 +1,3 @@ -from typing import List, Set, Type, Union - import flask from flask import Response @@ -32,14 +30,14 @@ def default_registry(self) -> IntegrationRegistry[AuthenticationProviderType]: return PatronAuthRegistry() @property - def basic_auth_protocols(self) -> Set[str]: + def basic_auth_protocols(self) -> set[str]: return { name for name, api in self.registry if issubclass(api, BasicAuthenticationProvider) } - def process_patron_auth_services(self) -> Union[Response, ProblemDetail]: + def process_patron_auth_services(self) -> Response | ProblemDetail: self.require_system_admin() if flask.request.method == "GET": @@ -59,7 +57,7 @@ def process_get(self) -> Response: mimetype="application/json", ) - def process_post(self) -> Union[Response, ProblemDetail]: + def process_post(self) -> Response | ProblemDetail: try: form_data = flask.request.form libraries_data = self.get_libraries_data(form_data) @@ -112,14 +110,14 @@ def library_integration_validation( def process_updated_libraries( self, - libraries: List[UpdatedLibrarySettingsTuple], - settings_class: Type[BaseSettings], + libraries: list[UpdatedLibrarySettingsTuple], + settings_class: type[BaseSettings], ) -> None: super().process_updated_libraries(libraries, settings_class) for integration, _ in libraries: self.library_integration_validation(integration) - def process_delete(self, service_id: int) -> Union[Response, ProblemDetail]: + def process_delete(self, service_id: int) -> Response | ProblemDetail: self.require_system_admin() try: return self.delete_service(service_id) diff --git a/api/admin/controller/quicksight.py b/api/admin/controller/quicksight.py index 7b5d0512b1..b6f690f86e 100644 --- a/api/admin/controller/quicksight.py +++ b/api/admin/controller/quicksight.py @@ -1,5 +1,4 @@ import logging -from typing import Dict import boto3 import flask @@ -20,7 +19,7 @@ class QuickSightController(CirculationManagerController): - def generate_quicksight_url(self, dashboard_name) -> Dict: + def generate_quicksight_url(self, dashboard_name) -> dict: log = logging.getLogger(self.__class__.__name__) admin: Admin = getattr(flask.request, "admin") request_data = QuicksightGenerateUrlRequest(**flask.request.args) diff --git a/api/admin/controller/reset_password.py b/api/admin/controller/reset_password.py index 00a6391c93..effc57857a 100644 --- a/api/admin/controller/reset_password.py +++ b/api/admin/controller/reset_password.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Optional, Union - import flask from flask import Request, Response, redirect, url_for from flask_babel import lazy_gettext as _ @@ -41,7 +39,7 @@ class ResetPasswordController(AdminController): ) ) - def forgot_password(self) -> Union[ProblemDetail, WerkzeugResponse]: + def forgot_password(self) -> ProblemDetail | WerkzeugResponse: """Shows forgot password page or starts off forgot password workflow""" if not self.admin_auth_providers: @@ -91,7 +89,7 @@ def forgot_password(self) -> Union[ProblemDetail, WerkzeugResponse]: "Sign in", ) - def _extract_admin_from_request(self, request: Request) -> Optional[Admin]: + def _extract_admin_from_request(self, request: Request) -> Admin | None: email = request.form.get("email") admin = get_one(self._db, Admin, email=email) @@ -114,7 +112,7 @@ def _generate_reset_password_url( def reset_password( self, reset_password_token: str, admin_id: int - ) -> Optional[WerkzeugResponse]: + ) -> WerkzeugResponse | None: """Shows reset password page or process the reset password request""" auth = self.admin_auth_provider(PasswordAdminAuthenticationProvider.NAME) if not auth: @@ -195,11 +193,11 @@ def reset_password( def _response_with_message_and_redirect_button( self, - message: Optional[str], + message: str | None, redirect_button_link: str, redirect_button_text: str, is_error: bool = False, - status_code: Optional[int] = 200, + status_code: int | None = 200, ) -> Response: style = error_style if is_error else body_style diff --git a/api/admin/controller/self_tests.py b/api/admin/controller/self_tests.py index 82705b9dcc..239ff40ae4 100644 --- a/api/admin/controller/self_tests.py +++ b/api/admin/controller/self_tests.py @@ -1,7 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Dict, Generic, Optional, Type, TypeVar +from typing import Any, Generic, TypeVar import flask from flask import Response @@ -109,10 +109,10 @@ def __init__( @abstractmethod def run_self_tests( self, integration: IntegrationConfiguration - ) -> Optional[Dict[str, Any]]: + ) -> dict[str, Any] | None: ... - def get_protocol_class(self, integration: IntegrationConfiguration) -> Type[T]: + def get_protocol_class(self, integration: IntegrationConfiguration) -> type[T]: if not integration.protocol or integration.protocol not in self.registry: raise ProblemError(problem_detail=UNKNOWN_PROTOCOL) return self.registry[integration.protocol] @@ -129,7 +129,7 @@ def look_up_by_id(self, identifier: int) -> IntegrationConfiguration: return service @staticmethod - def get_info(integration: IntegrationConfiguration) -> Dict[str, Any]: + def get_info(integration: IntegrationConfiguration) -> dict[str, Any]: info = dict( id=integration.id, name=integration.name, @@ -142,14 +142,14 @@ def get_info(integration: IntegrationConfiguration) -> Dict[str, Any]: @staticmethod def get_library_configuration( integration: IntegrationConfiguration, - ) -> Optional[IntegrationLibraryConfiguration]: + ) -> IntegrationLibraryConfiguration | None: if not integration.library_configurations: return None return integration.library_configurations[0] def get_prior_test_results( - self, protocol_class: Type[T], integration: IntegrationConfiguration - ) -> Dict[str, Any]: + self, protocol_class: type[T], integration: IntegrationConfiguration + ) -> dict[str, Any]: if issubclass(protocol_class, HasSelfTestsIntegrationConfiguration): self_test_results = protocol_class.load_self_test_results(integration) # type: ignore[unreachable] else: @@ -160,7 +160,7 @@ def get_prior_test_results( return self_test_results - def process_self_tests(self, identifier: Optional[int]) -> Response | ProblemDetail: + def process_self_tests(self, identifier: int | None) -> Response | ProblemDetail: if not identifier: return MISSING_IDENTIFIER try: diff --git a/api/admin/controller/settings.py b/api/admin/controller/settings.py index d07d9c858e..c9e6ceaa91 100644 --- a/api/admin/controller/settings.py +++ b/api/admin/controller/settings.py @@ -2,7 +2,7 @@ import json import logging -from typing import TYPE_CHECKING, Any, Dict, Optional, Type +from typing import TYPE_CHECKING, Any import flask from flask import Response @@ -55,7 +55,7 @@ class SettingsController(CirculationManagerController, AdminPermissionsControlle def _get_settings_class( self, registry: IntegrationRegistry, protocol_name: str, is_child=False - ) -> Type[BaseSettings] | ProblemDetail | None: + ) -> type[BaseSettings] | ProblemDetail | None: api_class = registry.get(protocol_name) if not api_class: return None @@ -234,14 +234,14 @@ def _get_menu_values(setting_key, form): return values def _extract_form_setting_value( - self, setting: Dict[str, Any], form_data: ImmutableMultiDict - ) -> Optional[Any]: + self, setting: dict[str, Any], form_data: ImmutableMultiDict + ) -> Any | None: """Extract the value of a setting from form data.""" key = setting.get("key") setting_type = setting.get("type") - value: Optional[Any] + value: Any | None if setting_type == "list" and not setting.get("options"): value = [item for item in form_data.getlist(key) if item] elif setting_type == "menu": @@ -295,7 +295,7 @@ def _set_configuration_library( self, configuration: IntegrationConfiguration, library_info: dict, - protocol_class: Type[HasLibraryIntegrationConfiguration], + protocol_class: type[HasLibraryIntegrationConfiguration], ) -> IntegrationLibraryConfiguration: """Set the library configuration for the integration configuration. The data will be validated first.""" diff --git a/api/admin/controller/sign_in.py b/api/admin/controller/sign_in.py index e61aa2e002..57f2e71b4c 100644 --- a/api/admin/controller/sign_in.py +++ b/api/admin/controller/sign_in.py @@ -1,7 +1,6 @@ from __future__ import annotations import logging -from typing import Tuple from urllib.parse import urlsplit import flask @@ -147,7 +146,7 @@ class SanitizedRedirections: """Functions to sanitize redirects.""" @staticmethod - def _check_redirect(target: str) -> Tuple[bool, str]: + def _check_redirect(target: str) -> tuple[bool, str]: """Check that a redirect is allowed. Because the URL redirect is assumed to be untrusted user input, we extract the URL path and forbid redirecting to external diff --git a/api/admin/dashboard_stats.py b/api/admin/dashboard_stats.py index 765052225e..5bfe92674c 100644 --- a/api/admin/dashboard_stats.py +++ b/api/admin/dashboard_stats.py @@ -1,9 +1,10 @@ from __future__ import annotations import dataclasses +from collections.abc import Callable, Iterable from datetime import datetime from functools import partial -from typing import TYPE_CHECKING, Callable, Iterable +from typing import TYPE_CHECKING from sqlalchemy.orm import Session from sqlalchemy.sql import func, select diff --git a/api/admin/form_data.py b/api/admin/form_data.py index 88e9dc4d24..690371129c 100644 --- a/api/admin/form_data.py +++ b/api/admin/form_data.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Dict, List, Type, TypeVar +from typing import Any, TypeVar from werkzeug.datastructures import ImmutableMultiDict @@ -14,38 +14,31 @@ class ProcessFormData: - @classmethod - def _remove_prefix(cls, text: str, prefix: str) -> str: - # TODO: Remove this when we upgrade to Python 3.9 - if text.startswith(prefix): - return text[len(prefix) :] - return text - @classmethod def _process_list( cls, key: str, form_data: ImmutableMultiDict[str, str] - ) -> List[str]: + ) -> list[str]: return [v for v in form_data.getlist(key) if v != ""] @classmethod def _process_menu( cls, key: str, form_data: ImmutableMultiDict[str, str] - ) -> List[str]: + ) -> list[str]: return [ - cls._remove_prefix(v, f"{key}_") + v.removeprefix(f"{key}_") for v in form_data.keys() if v.startswith(key) and v != f"{key}_menu" ] @classmethod def get_settings_dict( - cls, settings_class: Type[BaseSettings], form_data: ImmutableMultiDict[str, str] - ) -> Dict[str, Any]: + cls, settings_class: type[BaseSettings], form_data: ImmutableMultiDict[str, str] + ) -> dict[str, Any]: """ Process the wacky format that form data is sent by the admin interface into a dictionary that we can use to update the settings. """ - return_data: Dict[str, Any] = {} + return_data: dict[str, Any] = {} for field in settings_class.__fields__.values(): if not isinstance(field.field_info, FormFieldInfo): continue @@ -63,6 +56,6 @@ def get_settings_dict( @classmethod def get_settings( - cls, settings_class: Type[T], form_data: ImmutableMultiDict[str, str] + cls, settings_class: type[T], form_data: ImmutableMultiDict[str, str] ) -> T: return settings_class(**cls.get_settings_dict(settings_class, form_data)) diff --git a/api/admin/model/dashboard_statistics.py b/api/admin/model/dashboard_statistics.py index 3aec268780..c31aef22fd 100644 --- a/api/admin/model/dashboard_statistics.py +++ b/api/admin/model/dashboard_statistics.py @@ -1,7 +1,7 @@ from __future__ import annotations import sys -from typing import Any, Dict, List +from typing import Any from pydantic import Field, NonNegativeInt @@ -87,10 +87,10 @@ class LibraryStatistics(CustomBaseModel): inventory_summary: InventoryStatistics = Field( description="Summary of inventory statistics for this library." ) - inventory_by_medium: Dict[str, InventoryStatistics] = Field( + inventory_by_medium: dict[str, InventoryStatistics] = Field( description="Per-medium inventory statistics for this library." ) - collection_ids: List[int] = Field( + collection_ids: list[int] = Field( description="List of associated collection identifiers." ) @@ -103,7 +103,7 @@ class CollectionInventory(CustomBaseModel): inventory: InventoryStatistics = Field( description="Inventory statistics for this collection." ) - inventory_by_medium: Dict[str, InventoryStatistics] = Field( + inventory_by_medium: dict[str, InventoryStatistics] = Field( description="Per-medium inventory statistics for this collection." ) @@ -111,16 +111,16 @@ class CollectionInventory(CustomBaseModel): class StatisticsResponse(CustomBaseModel): """Statistics response for authorized libraries and collections.""" - collections: List[CollectionInventory] = Field( + collections: list[CollectionInventory] = Field( description="List of collection-level statistics (includes collections not associated with a library." ) - libraries: List[LibraryStatistics] = Field( + libraries: list[LibraryStatistics] = Field( description="List of library-level statistics." ) inventory_summary: InventoryStatistics = Field( description="Summary inventory across all included collections." ) - inventory_by_medium: Dict[str, InventoryStatistics] = Field( + inventory_by_medium: dict[str, InventoryStatistics] = Field( description="Per-medium summary inventory across all included collections." ) patron_summary: PatronStatistics = Field( diff --git a/api/admin/model/quicksight.py b/api/admin/model/quicksight.py index a789adc4c8..523499f76b 100644 --- a/api/admin/model/quicksight.py +++ b/api/admin/model/quicksight.py @@ -1,4 +1,3 @@ -from typing import List from uuid import UUID from pydantic import Field, validator @@ -7,12 +6,12 @@ class QuicksightGenerateUrlRequest(CustomBaseModel): - library_uuids: List[UUID] = Field( + library_uuids: list[UUID] = Field( description="The list of libraries to include in the dataset, an empty list is equivalent to all the libraries the user is allowed to access." ) @validator("library_uuids", pre=True) - def parse_library_uuids(cls, value) -> List[str]: + def parse_library_uuids(cls, value) -> list[str]: return str_comma_list_validator(value) @@ -21,4 +20,4 @@ class QuicksightGenerateUrlResponse(CustomBaseModel): class QuicksightDashboardNamesResponse(CustomBaseModel): - names: List[str] = Field(description="The named quicksight dashboard ids") + names: list[str] = Field(description="The named quicksight dashboard ids") diff --git a/api/admin/password_admin_authentication_provider.py b/api/admin/password_admin_authentication_provider.py index b6cd887955..549f82af08 100644 --- a/api/admin/password_admin_authentication_provider.py +++ b/api/admin/password_admin_authentication_provider.py @@ -1,5 +1,3 @@ -from typing import Union - from flask import render_template_string, url_for from sqlalchemy.orm.session import Session @@ -110,7 +108,7 @@ def send_reset_password_email(self, admin: Admin, reset_password_url: str) -> No def validate_token_and_extract_admin( self, reset_password_token: str, admin_id: int, _db: Session - ) -> Union[Admin, ProblemDetail]: + ) -> Admin | ProblemDetail: secret_key = ConfigurationSetting.sitewide_secret(_db, Configuration.SECRET_KEY) return Admin.validate_reset_password_token_and_fetch_admin( diff --git a/api/adobe_vendor_id.py b/api/adobe_vendor_id.py index 9af5bd615b..59d43eb46b 100644 --- a/api/adobe_vendor_id.py +++ b/api/adobe_vendor_id.py @@ -5,7 +5,7 @@ import logging import sys import uuid -from typing import Any, Dict, Optional, Tuple, Union +from typing import Any import jwt from jwt.algorithms import HMACAlgorithm @@ -99,9 +99,7 @@ def __init__( self.short_token_signing_key = self.short_token_signer.prepare_key(self.secret) @classmethod - def from_config( - cls, library: Library, _db: Optional[Session] = None - ) -> Optional[Self]: + def from_config(cls, library: Library, _db: Session | None = None) -> Self | None: """Initialize an AuthdataUtility from site configuration. The library must be successfully registered with a discovery @@ -173,7 +171,7 @@ def adobe_relevant_credentials(self, patron: Patron) -> Query[Credential]: ) ) - def encode(self, patron_identifier: Optional[str]) -> Tuple[str, bytes]: + def encode(self, patron_identifier: str | None) -> tuple[str, bytes]: """Generate an authdata JWT suitable for putting in an OPDS feed, where it can be picked up by a client and sent to the delegation authority to look up an Adobe ID. @@ -193,12 +191,12 @@ def encode(self, patron_identifier: Optional[str]) -> Tuple[str, bytes]: def _encode( self, iss: str, - sub: Optional[str] = None, - iat: Optional[datetime.datetime] = None, - exp: Optional[datetime.datetime] = None, + sub: str | None = None, + iat: datetime.datetime | None = None, + exp: datetime.datetime | None = None, ) -> bytes: """Helper method split out separately for use in tests.""" - payload: Dict[str, Any] = dict(iss=iss) # Issuer + payload: dict[str, Any] = dict(iss=iss) # Issuer if sub: payload["sub"] = sub # Subject if iat: @@ -213,7 +211,7 @@ def _encode( ) @classmethod - def adobe_base64_encode(cls, str_to_encode: Union[str, bytes]) -> str: + def adobe_base64_encode(cls, str_to_encode: str | bytes) -> str: """A modified base64 encoding that avoids triggering an Adobe bug. The bug seems to happen when the 'password' portion of a @@ -232,7 +230,7 @@ def adobe_base64_decode(cls, str_to_decode: str) -> bytes: encoded = str_to_decode.replace(":", "+").replace(";", "/").replace("@", "=") return base64.decodebytes(encoded.encode("utf-8")) - def decode(self, authdata: bytes) -> Tuple[str, str]: + def decode(self, authdata: bytes) -> tuple[str, str]: """Decode and verify an authdata JWT from one of the libraries managed by `secrets_by_library`. @@ -266,7 +264,7 @@ def decode(self, authdata: bytes) -> Tuple[str, str]: # in the list. raise exceptions[-1] - def _decode(self, authdata: bytes) -> Tuple[str, str]: + def _decode(self, authdata: bytes) -> tuple[str, str]: # First, decode the authdata without checking the signature. authdata_str = authdata.decode("utf-8") decoded = jwt.decode( @@ -295,7 +293,7 @@ def _decode(self, authdata: bytes) -> Tuple[str, str]: return library_uri, decoded["sub"] @classmethod - def _adobe_patron_identifier(cls, patron: Patron) -> Optional[str]: + def _adobe_patron_identifier(cls, patron: Patron) -> str | None: """Take patron object and return identifier for Adobe ID purposes""" _db = Session.object_session(patron) internal = DataSource.lookup(_db, DataSource.INTERNAL_PROCESSING) @@ -314,8 +312,8 @@ def refresh(credential: Credential) -> None: return patron_identifier.credential def short_client_token_for_patron( - self, patron_information: Union[Patron, str] - ) -> Tuple[str, str]: + self, patron_information: Patron | str + ) -> tuple[str, str]: """Generate short client token for patron, or for a patron's identifier for Adobe ID purposes""" @@ -333,8 +331,8 @@ def _now(self) -> datetime.datetime: return utc_now() def encode_short_client_token( - self, patron_identifier: Optional[str], expires: Optional[Dict[str, int]] = None - ) -> Tuple[str, str]: + self, patron_identifier: str | None, expires: dict[str, int] | None = None + ) -> tuple[str, str]: """Generate a short client token suitable for putting in an OPDS feed, where it can be picked up by a client and sent to the delegation authority to look up an Adobe ID. @@ -357,7 +355,7 @@ def _encode_short_client_token( self, library_short_name: str, patron_identifier: str, - expires: Union[int, float], + expires: int | float, ) -> str: base = library_short_name + "|" + str(expires) + "|" + patron_identifier signature = self.short_token_signer.sign( @@ -374,7 +372,7 @@ def _encode_short_client_token( ) return base + "|" + signature_encoded - def decode_short_client_token(self, token: str) -> Tuple[str, str]: + def decode_short_client_token(self, token: str) -> tuple[str, str]: """Attempt to interpret a 'username' and 'password' as a short client token identifying a patron of a specific library. @@ -392,7 +390,7 @@ def decode_short_client_token(self, token: str) -> Tuple[str, str]: def decode_two_part_short_client_token( self, username: str, password: str - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Decode a short client token that has already been split into two parts. """ @@ -401,7 +399,7 @@ def decode_two_part_short_client_token( def _decode_short_client_token( self, token: str, supposed_signature: bytes - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Make sure a client token is properly formatted, correctly signed, and not expired. """ diff --git a/api/authentication/access_token.py b/api/authentication/access_token.py index 142e75f638..ecc29df13d 100644 --- a/api/authentication/access_token.py +++ b/api/authentication/access_token.py @@ -4,7 +4,7 @@ import time from abc import ABC, abstractmethod from datetime import timedelta -from typing import TYPE_CHECKING, Type +from typing import TYPE_CHECKING from jwcrypto import jwe, jwk @@ -174,4 +174,4 @@ def is_access_token(cls, token: str | None) -> bool: return True -AccessTokenProvider: Type[PatronAccessTokenProvider] = PatronJWEAccessTokenProvider +AccessTokenProvider: type[PatronAccessTokenProvider] = PatronJWEAccessTokenProvider diff --git a/api/authentication/basic.py b/api/authentication/basic.py index 7e9871aa1c..b4075d4681 100644 --- a/api/authentication/basic.py +++ b/api/authentication/basic.py @@ -2,8 +2,10 @@ import re from abc import ABC, abstractmethod +from collections.abc import Generator from enum import Enum -from typing import Any, Dict, Generator, List, Optional, Pattern, TypeVar +from re import Pattern +from typing import Any, TypeVar from flask import url_for from pydantic import PositiveInt, validator @@ -68,7 +70,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): # Configuration settings that are common to all Basic Auth-type # authentication techniques. - test_identifier: Optional[str] = FormField( + test_identifier: str | None = FormField( None, form=ConfigurationFormItem( label="Test identifier", @@ -77,7 +79,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): required=True, ), ) - test_password: Optional[str] = FormField( + test_password: str | None = FormField( None, form=ConfigurationFormItem( label="Test password", @@ -115,7 +117,7 @@ class BasicAuthProviderSettings(AuthProviderSettings): ), ) # By default, there are no restrictions on passwords. - password_regular_expression: Optional[Pattern] = FormField( + password_regular_expression: Pattern | None = FormField( None, form=ConfigurationFormItem( label="Password Regular Expression", @@ -151,14 +153,14 @@ class BasicAuthProviderSettings(AuthProviderSettings): weight=10, ), ) - identifier_maximum_length: Optional[PositiveInt] = FormField( + identifier_maximum_length: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Maximum identifier length", weight=10, ), ) - password_maximum_length: Optional[PositiveInt] = FormField( + password_maximum_length: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Maximum password length", @@ -227,7 +229,7 @@ class BasicAuthProviderLibrarySettings(AuthProviderLibrarySettings): # Usually this is a string which is compared against the # patron's identifiers using the comparison method chosen in # identifier_restriction_type. - library_identifier_restriction_criteria: Optional[str] = FormField( + library_identifier_restriction_criteria: str | None = FormField( None, form=ConfigurationFormItem( label="Library Identifier Restriction", @@ -241,8 +243,8 @@ class BasicAuthProviderLibrarySettings(AuthProviderLibrarySettings): @validator("library_identifier_restriction_criteria") def validate_restriction_criteria( - cls, v: Optional[str], values: Dict[str, Any] - ) -> Optional[str]: + cls, v: str | None, values: dict[str, Any] + ) -> str | None: """Validate the library_identifier_restriction_criteria field.""" if not v: return v @@ -311,8 +313,8 @@ def __init__( ) def process_library_identifier_restriction_criteria( - self, criteria: Optional[str] - ) -> str | List[str] | re.Pattern | None: + self, criteria: str | None + ) -> str | list[str] | re.Pattern | None: """Process the library identifier restriction criteria.""" if not criteria: return None @@ -661,13 +663,13 @@ def _authentication_flow_document(self, _db: Session) -> dict[str, Any]: OPDS document. """ - login_inputs: Dict[str, Any] = dict(keyboard=self.identifier_keyboard.value) + login_inputs: dict[str, Any] = dict(keyboard=self.identifier_keyboard.value) if self.identifier_maximum_length: login_inputs["maximum_length"] = self.identifier_maximum_length if self.identifier_barcode_format != BarcodeFormats.NONE: login_inputs["barcode_format"] = self.identifier_barcode_format.value - password_inputs: Dict[str, Any] = dict(keyboard=self.password_keyboard.value) + password_inputs: dict[str, Any] = dict(keyboard=self.password_keyboard.value) if self.password_maximum_length: password_inputs["maximum_length"] = self.password_maximum_length @@ -713,7 +715,7 @@ def identifies_individuals(self): def _restriction_matches( cls, field: str | None, - restriction: str | List[str] | re.Pattern | None, + restriction: str | list[str] | re.Pattern | None, match_type: LibraryIdentifierRestriction, ) -> bool: """Does the given patron match the given restriction?""" diff --git a/api/authentication/basic_token.py b/api/authentication/basic_token.py index 5e04072313..9197f078f3 100644 --- a/api/authentication/basic_token.py +++ b/api/authentication/basic_token.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Generator, Type, cast +from collections.abc import Generator +from typing import TYPE_CHECKING, cast from flask import url_for from sqlalchemy.orm import Session @@ -31,11 +32,11 @@ class BasicTokenAuthenticationProvider( """ @classmethod - def library_settings_class(cls) -> Type[LibrarySettingsType]: + def library_settings_class(cls) -> type[LibrarySettingsType]: raise NotImplementedError() @classmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: raise NotImplementedError() FLOW_TYPE = "http://thepalaceproject.org/authtype/basic-token" diff --git a/api/authenticator.py b/api/authenticator.py index 69a8ee5088..e642ee13f8 100644 --- a/api/authenticator.py +++ b/api/authenticator.py @@ -4,7 +4,8 @@ import logging import sys from abc import ABC -from typing import Dict, Iterable, List, Optional, Tuple, Type +from collections.abc import Iterable +from typing import cast import flask import jwt @@ -153,11 +154,11 @@ class LibraryAuthenticator(LoggerMixin): @classmethod def from_config( - cls: Type[Self], + cls: type[Self], _db: Session, library: Library, - analytics: Optional[Analytics] = None, - custom_catalog_source: Type[CustomPatronCatalog] = CustomPatronCatalog, + analytics: Analytics | None = None, + custom_catalog_source: type[CustomPatronCatalog] = CustomPatronCatalog, ) -> Self: """Initialize an Authenticator for the given Library based on its configured ExternalIntegrations. @@ -175,7 +176,7 @@ def from_config( # Find all of this library's ExternalIntegrations set up with # the goal of authenticating patrons. - integrations: List[ + integrations: list[ IntegrationLibraryConfiguration ] = IntegrationLibraryConfiguration.for_library_and_goal( _db, library, Goals.PATRON_AUTH_GOAL @@ -214,13 +215,12 @@ def __init__( self, _db: Session, library: Library, - basic_auth_provider: Optional[BasicAuthenticationProvider] = None, - saml_providers: Optional[List[BaseSAMLAuthenticationProvider]] = None, - bearer_token_signing_secret: Optional[str] = None, - authentication_document_annotator: Optional[CustomPatronCatalog] = None, - integration_registry: Optional[ - IntegrationRegistry[AuthenticationProvider] - ] = None, + basic_auth_provider: BasicAuthenticationProvider | None = None, + saml_providers: list[BaseSAMLAuthenticationProvider] | None = None, + bearer_token_signing_secret: str | None = None, + authentication_document_annotator: CustomPatronCatalog | None = None, + integration_registry: None + | (IntegrationRegistry[AuthenticationProvider]) = None, ): """Initialize a LibraryAuthenticator from a list of AuthenticationProviders. @@ -254,8 +254,8 @@ def __init__( self.saml_providers_by_name = {} self.bearer_token_signing_secret = bearer_token_signing_secret - self.initialization_exceptions: Dict[ - Tuple[int | None, int | None], Exception + self.initialization_exceptions: dict[ + tuple[int | None, int | None], Exception ] = {} self.basic_auth_provider: BasicAuthenticationProvider | None = None @@ -451,7 +451,7 @@ def authenticated_patron( ProblemDetail if an error occurs. """ provider: AuthenticationProvider | None = None - provider_token: Dict[str, str | None] | str | None = None + provider_token: dict[str, str | None] | str | None = None if self.basic_auth_provider and auth.type.lower() == "basic": # The patron wants to authenticate with the # BasicAuthenticationProvider. @@ -552,12 +552,14 @@ def create_bearer_token( # Maybe we should use something custom instead. iss=provider_name, ) - return jwt.encode(payload, self.bearer_token_signing_secret, algorithm="HS256") + return jwt.encode( + payload, cast(str, self.bearer_token_signing_secret), algorithm="HS256" + ) - def decode_bearer_token(self, token: str) -> Tuple[str, str]: + def decode_bearer_token(self, token: str) -> tuple[str, str]: """Extract auth provider name and access token from JSON web token.""" decoded = jwt.decode( - token, self.bearer_token_signing_secret, algorithms=["HS256"] + token, cast(str, self.bearer_token_signing_secret), algorithms=["HS256"] ) provider_name = decoded["iss"] token = decoded["token"] @@ -577,7 +579,7 @@ def create_authentication_document(self) -> str: """Create the Authentication For OPDS document to be used when a request comes in with no authentication. """ - links: List[Dict[str, Optional[str]]] = [] + links: list[dict[str, str | None]] = [] if self.library is None: raise ValueError("No library specified!") @@ -755,8 +757,8 @@ def create_authentication_document(self) -> str: # Add feature flags to signal to clients what features they should # offer. - enabled: List[str] = [] - disabled: List[str] = [] + enabled: list[str] = [] + disabled: list[str] = [] if self.library and self.library.settings.allow_holds: bucket = enabled else: diff --git a/api/axis.py b/api/axis.py index faf8a50d4e..2b712a8149 100644 --- a/api/axis.py +++ b/api/axis.py @@ -5,27 +5,12 @@ import html import json import re -import socket import ssl import urllib from abc import ABC, abstractmethod +from collections.abc import Callable, Generator, Mapping, Sequence from datetime import timedelta -from typing import ( - Any, - Callable, - Dict, - Generator, - Generic, - List, - Literal, - Mapping, - Sequence, - Tuple, - Type, - TypeVar, - Union, - cast, -) +from typing import Any, Generic, Literal, Optional, TypeVar, Union, cast from urllib.parse import urlparse import certifi @@ -129,7 +114,7 @@ class Axis360Settings(BaseCirculationApiSettings): required=True, ), ) - verify_certificate: Optional[bool] = FormField( + verify_certificate: bool | None = FormField( default=True, form=ConfigurationFormItem( label=_("Verify SSL Certificate"), @@ -198,11 +183,11 @@ class Axis360API( } @classmethod - def settings_class(cls) -> Type[Axis360Settings]: + def settings_class(cls) -> type[Axis360Settings]: return Axis360Settings @classmethod - def library_settings_class(cls) -> Type[Axis360LibrarySettings]: + def library_settings_class(cls) -> type[Axis360LibrarySettings]: return Axis360LibrarySettings @classmethod @@ -237,7 +222,7 @@ def __init__(self, _db: Session, collection: Collection) -> None: if not self.library_id or not self.username or not self.password: raise CannotLoadConfiguration("Axis 360 configuration is incomplete.") - self.token: Optional[str] = None + self.token: str | None = None self.verify_certificate: bool = ( settings.verify_certificate if settings.verify_certificate is not None @@ -245,11 +230,11 @@ def __init__(self, _db: Session, collection: Collection) -> None: ) @property - def source(self) -> Optional[DataSource]: + def source(self) -> DataSource | None: return DataSource.lookup(self._db, DataSource.AXIS_360) # type: ignore[no-any-return] @property - def authorization_headers(self) -> Dict[str, str]: + def authorization_headers(self) -> dict[str, str]: authorization = ":".join([self.username, self.password, self.library_id]) authorization_encoded = authorization.encode("utf_16_le") authorization_b64 = base64.standard_b64encode(authorization_encoded).decode( @@ -309,9 +294,9 @@ def request( self, url: str, method: str = "get", - extra_headers: Optional[Dict[str, str]] = None, - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + extra_headers: dict[str, str] | None = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, exception_on_401: bool = False, **kwargs: Any, ) -> RequestsResponse: @@ -358,9 +343,9 @@ def request( def availability( self, - patron_id: Optional[str] = None, - since: Optional[datetime.datetime] = None, - title_ids: Optional[List[str]] = None, + patron_id: str | None = None, + since: datetime.datetime | None = None, + title_ids: list[str] | None = None, ) -> RequestsResponse: url = self.base_url + self.availability_endpoint args = dict() @@ -407,9 +392,7 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: except etree.XMLSyntaxError as e: raise RemoteInitiatedServerError(response.content, self.label()) - def _checkin( - self, title_id: Optional[str], patron_id: Optional[str] - ) -> RequestsResponse: + def _checkin(self, title_id: str | None, patron_id: str | None) -> RequestsResponse: """Make a request to the EarlyCheckInTitle endpoint.""" if title_id is None: self.log.warning( @@ -452,7 +435,7 @@ def checkout( raise RemoteInitiatedServerError(response.content, self.label()) def _checkout( - self, title_id: Optional[str], patron_id: Optional[str], internal_format: str + self, title_id: str | None, patron_id: str | None, internal_format: str ) -> RequestsResponse: url = self.base_url + "checkout/v2" args = dict(titleId=title_id, patronId=patron_id, format=internal_format) @@ -498,7 +481,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - hold_notification_email: Optional[str], + hold_notification_email: str | None, ) -> HoldInfo: if not hold_notification_email: hold_notification_email = self.default_notification_email_address( @@ -546,10 +529,10 @@ def release_hold(self, patron: Patron, pin: str, licensepool: LicensePool) -> No def patron_activity( self, patron: Patron, - pin: Optional[str], - identifier: Optional[Identifier] = None, - internal_format: Optional[str] = None, - ) -> List[LoanInfo | HoldInfo]: + pin: str | None, + identifier: Identifier | None = None, + internal_format: str | None = None, + ) -> list[LoanInfo | HoldInfo]: if identifier: assert identifier.identifier is not None title_ids = [identifier.identifier] @@ -572,7 +555,7 @@ def update_availability(self, licensepool: LicensePool) -> None: self.update_licensepools_for_identifiers([licensepool.identifier]) def update_licensepools_for_identifiers( - self, identifiers: List[Identifier] + self, identifiers: list[Identifier] ) -> None: """Update availability and bibliographic information for a list of books. @@ -604,7 +587,7 @@ def update_book( bibliographic: Metadata, availability: CirculationData, analytics: Analytics = Provide[Services.analytics.analytics], - ) -> Tuple[Edition, bool, LicensePool, bool]: + ) -> tuple[Edition, bool, LicensePool, bool]: """Create or update a single book based on bibliographic and availability data from the Axis 360 API. @@ -635,8 +618,8 @@ def update_book( return edition, new_edition, license_pool, new_license_pool def _fetch_remote_availability( - self, identifiers: List[Identifier] - ) -> Generator[Tuple[Metadata, CirculationData], None, None]: + self, identifiers: list[Identifier] + ) -> Generator[tuple[Metadata, CirculationData], None, None]: """Retrieve availability information for the specified identifiers. :yield: A stream of (Metadata, CirculationData) 2-tuples. @@ -678,7 +661,7 @@ def _reap(self, identifier: Identifier) -> None: def recent_activity( self, since: datetime.datetime - ) -> Generator[Tuple[Metadata, CirculationData], None, None]: + ) -> Generator[tuple[Metadata, CirculationData], None, None]: """Find books that have had recent activity. :yield: A sequence of (Metadata, CirculationData) 2-tuples @@ -690,7 +673,7 @@ def recent_activity( @classmethod def create_identifier_strings( cls, identifiers: Sequence[Identifier | str] - ) -> List[str]: + ) -> list[str]: identifier_strings = [] for i in identifiers: if isinstance(i, Identifier): @@ -712,8 +695,8 @@ def _make_request( url: str, method: str, headers: Mapping[str, str], - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, **kwargs: Any, ) -> RequestsResponse: """Actually make an HTTP request.""" @@ -738,9 +721,9 @@ def __init__( self, _db: Session, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, ): super().__init__(_db, collection) if isinstance(api_class, Axis360API): @@ -758,7 +741,7 @@ def __init__( def catch_up_from( self, start: datetime.datetime, - cutoff: Optional[datetime.datetime], + cutoff: datetime.datetime | None, progress: TimestampData, ) -> None: """Find Axis 360 books that changed recently. @@ -776,7 +759,7 @@ def catch_up_from( def process_book( self, bibliographic: Metadata, circulation: CirculationData - ) -> Tuple[Edition, LicensePool]: + ) -> tuple[Edition, LicensePool]: edition, new_edition, license_pool, new_license_pool = self.api.update_book( bibliographic, circulation ) @@ -809,9 +792,9 @@ class Axis360BibliographicCoverageProvider(BibliographicCoverageProvider): def __init__( self, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, **kwargs: Any, ) -> None: """Constructor. @@ -833,8 +816,8 @@ def __init__( self.parser = BibliographicParser() def process_batch( - self, identifiers: List[Identifier] - ) -> List[CoverageFailure | Identifier]: + self, identifiers: list[Identifier] + ) -> list[CoverageFailure | Identifier]: identifier_strings = self.api.create_identifier_strings(identifiers) response = self.api.availability(title_ids=identifier_strings) seen_identifiers = set() @@ -886,9 +869,9 @@ def __init__( self, _db: Session, collection: Collection, - api_class: Union[ - Axis360API, Callable[[Session, Collection], Axis360API] - ] = Axis360API, + api_class: ( + Axis360API | Callable[[Session, Collection], Axis360API] + ) = Axis360API, ) -> None: super().__init__(_db, collection) if isinstance(api_class, Axis360API): @@ -898,7 +881,7 @@ def __init__( else: self.api = api_class(_db, collection) - def process_items(self, identifiers: List[Identifier]) -> None: + def process_items(self, identifiers: list[Identifier]) -> None: self.api.update_licensepools_for_identifiers(identifiers) @@ -912,7 +895,7 @@ class Axis360Parser(XMLProcessor[T], ABC): NAMESPACES = {"axis": "http://axis360api.baker-taylor.com/vendorAPI"} - def _pd(self, date: Optional[str]) -> Optional[datetime.datetime]: + def _pd(self, date: str | None) -> datetime.datetime | None: """Stupid function to parse a date.""" if date is None: return date @@ -926,7 +909,7 @@ def _xpath1_boolean( self, e: _Element, target: str, - ns: Optional[Dict[str, str]], + ns: dict[str, str] | None, default: bool = False, ) -> bool: text = self.text_of_optional_subtag(e, target, ns) @@ -938,13 +921,13 @@ def _xpath1_boolean( return False def _xpath1_date( - self, e: _Element, target: str, ns: Optional[Dict[str, str]] - ) -> Optional[datetime.datetime]: + self, e: _Element, target: str, ns: dict[str, str] | None + ) -> datetime.datetime | None: value = self.text_of_optional_subtag(e, target, ns) return self._pd(value) -class BibliographicParser(Axis360Parser[Tuple[Metadata, CirculationData]], LoggerMixin): +class BibliographicParser(Axis360Parser[tuple[Metadata, CirculationData]], LoggerMixin): DELIVERY_DATA_FOR_AXIS_FORMAT = { "Blio": None, # Legacy format, handled the same way as AxisNow "Acoustik": (None, DeliveryMechanism.FINDAWAY_DRM), # Audiobooks @@ -954,7 +937,7 @@ class BibliographicParser(Axis360Parser[Tuple[Metadata, CirculationData]], Logge } @classmethod - def parse_list(cls, l: str) -> List[str]: + def parse_list(cls, l: str) -> list[str]: """Turn strings like this into lists: FICTION / Thrillers; FICTION / Suspense; FICTION / General @@ -968,9 +951,9 @@ def xpath_expression(self) -> str: def extract_availability( self, - circulation_data: Optional[CirculationData], + circulation_data: CirculationData | None, element: _Element, - ns: Optional[Dict[str, str]], + ns: dict[str, str] | None, ) -> CirculationData: identifier = self.text_of_subtag(element, "axis:titleId", ns) primary_identifier = IdentifierData(Identifier.AXIS_360_ID, identifier) @@ -1012,7 +995,7 @@ def parse_contributor( cls, author: str, primary_author_found: bool = False, - force_role: Optional[str] = None, + force_role: str | None = None, ) -> ContributorData: """Parse an Axis 360 contributor string. @@ -1055,7 +1038,7 @@ def parse_contributor( return ContributorData(sort_name=author, roles=[role]) def extract_bibliographic( - self, element: _Element, ns: Optional[Dict[str, str]] + self, element: _Element, ns: dict[str, str] | None ) -> Metadata: """Turn bibliographic metadata into a Metadata and a CirculationData objects, and return them as a tuple.""" @@ -1261,8 +1244,8 @@ def extract_bibliographic( return metadata def process_one( - self, element: _Element, ns: Optional[Dict[str, str]] - ) -> Tuple[Metadata, CirculationData]: + self, element: _Element, ns: dict[str, str] | None + ) -> tuple[Metadata, CirculationData]: bibliographic = self.extract_bibliographic(element, ns) passed_availability = None @@ -1282,7 +1265,7 @@ class ResponseParser: SERVICE_NAME = "Axis 360" # Map Axis 360 error codes to our circulation exceptions. - code_to_exception: Mapping[int | Tuple[int, str], Type[IntegrationException]] = { + code_to_exception: Mapping[int | tuple[int, str], type[IntegrationException]] = { 315: InvalidInputException, # Bad password 316: InvalidInputException, # DRM account already exists 1000: PatronAuthorizationFailedException, @@ -1342,11 +1325,10 @@ def _raise_exception_on_error( cls, code: str | int, message: str, - custom_error_classes: Optional[ - Mapping[int | Tuple[int, str], Type[IntegrationException]] - ] = None, - ignore_error_codes: Optional[List[int]] = None, - ) -> Tuple[int, str]: + custom_error_classes: None + | (Mapping[int | tuple[int, str], type[IntegrationException]]) = None, + ignore_error_codes: list[int] | None = None, + ) -> tuple[int, str]: try: code = int(code) except ValueError: @@ -1387,12 +1369,11 @@ def __init__(self, collection: Collection): def raise_exception_on_error( self, e: _Element, - ns: Optional[Dict[str, str]], - custom_error_classes: Optional[ - Mapping[int | Tuple[int, str], Type[IntegrationException]] - ] = None, - ignore_error_codes: Optional[List[int]] = None, - ) -> Tuple[int, str]: + ns: dict[str, str] | None, + custom_error_classes: None + | (Mapping[int | tuple[int, str], type[IntegrationException]]) = None, + ignore_error_codes: list[int] | None = None, + ) -> tuple[int, str]: """Raise an error if the given lxml node represents an Axis 360 error condition. @@ -1423,7 +1404,7 @@ def xpath_expression(self) -> str: return "//axis:EarlyCheckinRestResult" def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] + self, e: _Element, namespaces: dict[str, str] | None ) -> Literal[True]: """Either raise an appropriate exception, or do nothing.""" self.raise_exception_on_error(e, namespaces, ignore_error_codes=[4058]) @@ -1435,9 +1416,7 @@ class CheckoutResponseParser(XMLResponseParser[LoanInfo]): def xpath_expression(self) -> str: return "//axis:checkoutResult" - def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] - ) -> LoanInfo: + def process_one(self, e: _Element, namespaces: dict[str, str] | None) -> LoanInfo: """Either turn the given document into a LoanInfo object, or raise an appropriate exception. """ @@ -1470,9 +1449,7 @@ class HoldResponseParser(XMLResponseParser[HoldInfo]): def xpath_expression(self) -> str: return "//axis:addtoholdResult" - def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] - ) -> HoldInfo: + def process_one(self, e: _Element, namespaces: dict[str, str] | None) -> HoldInfo: """Either turn the given document into a HoldInfo object, or raise an appropriate exception. """ @@ -1510,7 +1487,7 @@ def xpath_expression(self) -> str: return "//axis:removeholdResult" def process_one( - self, e: _Element, namespaces: Optional[Dict[str, str]] + self, e: _Element, namespaces: dict[str, str] | None ) -> Literal[True]: # There's no data to gather here. Either there was an error # or we were successful. @@ -1519,7 +1496,7 @@ def process_one( class AvailabilityResponseParser(XMLResponseParser[Union[LoanInfo, HoldInfo]]): - def __init__(self, api: Axis360API, internal_format: Optional[str] = None) -> None: + def __init__(self, api: Axis360API, internal_format: str | None = None) -> None: """Constructor. :param api: An Axis360API instance, in case the parsing of an @@ -1543,8 +1520,8 @@ def xpath_expression(self) -> str: return "//axis:title" def process_one( - self, e: _Element, ns: Optional[Dict[str, str]] - ) -> Optional[Union[LoanInfo, HoldInfo]]: + self, e: _Element, ns: dict[str, str] | None + ) -> LoanInfo | HoldInfo | None: # Figure out which book we're talking about. axis_identifier = self.text_of_subtag(e, "axis:titleId", ns) availability = self._xpath1(e, "axis:availability", ns) @@ -1554,7 +1531,7 @@ def process_one( checked_out = self._xpath1_boolean(availability, "axis:isCheckedout", ns) on_hold = self._xpath1_boolean(availability, "axis:isInHoldQueue", ns) - info: Optional[Union[LoanInfo, HoldInfo]] = None + info: LoanInfo | HoldInfo | None = None if checked_out: start_date = self._xpath1_date(availability, "axis:checkoutStartDate", ns) end_date = self._xpath1_date(availability, "axis:checkoutEndDate", ns) @@ -1574,7 +1551,7 @@ def process_one( identifier=axis_identifier, ) - fulfillment: Optional[FulfillmentInfo] + fulfillment: FulfillmentInfo | None if download_url and self.internal_format != self.api.AXISNOW: # The patron wants a direct link to the book, which we can deliver # immediately, without making any more API requests. @@ -1653,7 +1630,7 @@ class JSONResponseParser(Generic[T], ResponseParser, ABC): """ @classmethod - def _required_key(cls, key: str, json_obj: Optional[Mapping[str, Any]]) -> Any: + def _required_key(cls, key: str, json_obj: Mapping[str, Any] | None) -> Any: """Raise an exception if the given key is not present in the given object. """ @@ -1669,7 +1646,7 @@ def _required_key(cls, key: str, json_obj: Optional[Mapping[str, Any]]) -> Any: return json_obj[key] @classmethod - def verify_status_code(cls, parsed: Optional[Mapping[str, Any]]) -> None: + def verify_status_code(cls, parsed: Mapping[str, Any] | None) -> None: """Assert that the incoming JSON document represents a successful response. """ @@ -1682,7 +1659,7 @@ def verify_status_code(cls, parsed: Optional[Mapping[str, Any]]) -> None: # an appropriate exception immediately. cls._raise_exception_on_error(code, message) - def parse(self, data: Union[Dict[str, Any], bytes, str], **kwargs: Any) -> T: + def parse(self, data: dict[str, Any] | bytes | str, **kwargs: Any) -> T: """Parse a JSON document.""" if isinstance(data, dict): parsed = data # already parsed @@ -1702,7 +1679,7 @@ def parse(self, data: Union[Dict[str, Any], bytes, str], **kwargs: Any) -> T: return self._parse(parsed, **kwargs) @abstractmethod - def _parse(self, parsed: Dict[str, Any], **kwargs: Any) -> T: + def _parse(self, parsed: dict[str, Any], **kwargs: Any) -> T: """Parse a document we know to represent success on the API level. Called by parse() once the high-level details have been worked out. @@ -1712,7 +1689,7 @@ def _parse(self, parsed: Dict[str, Any], **kwargs: Any) -> T: class Axis360FulfillmentInfoResponseParser( JSONResponseParser[ - Tuple[Union[FindawayManifest, "AxisNowManifest"], datetime.datetime] + tuple[Union[FindawayManifest, "AxisNowManifest"], datetime.datetime] ] ): """Parse JSON documents into Findaway audiobook manifests or AxisNow manifests.""" @@ -1727,10 +1704,10 @@ def __init__(self, api: Axis360API): def _parse( self, - parsed: Dict[str, Any], - license_pool: Optional[LicensePool] = None, + parsed: dict[str, Any], + license_pool: LicensePool | None = None, **kwargs: Any, - ) -> Tuple[Union[FindawayManifest, AxisNowManifest], datetime.datetime]: + ) -> tuple[FindawayManifest | AxisNowManifest, datetime.datetime]: """Extract all useful information from a parsed FulfillmentInfo response. @@ -1749,7 +1726,7 @@ def _parse( expiration_date = self._required_key("ExpirationDate", parsed) expiration_date = self.parse_date(expiration_date) - manifest: Union[FindawayManifest, AxisNowManifest] + manifest: FindawayManifest | AxisNowManifest if "FNDTransactionID" in parsed: manifest = self.parse_findaway(parsed, license_pool) else: @@ -1773,7 +1750,7 @@ def parse_date(self, date: str) -> datetime.datetime: return date_parsed def parse_findaway( - self, parsed: Dict[str, Any], license_pool: LicensePool + self, parsed: dict[str, Any], license_pool: LicensePool ) -> FindawayManifest: k = self._required_key fulfillmentId = k("FNDContentID", parsed) @@ -1796,7 +1773,7 @@ def parse_findaway( spine_items=spine_items, ) - def parse_axisnow(self, parsed: Dict[str, Any]) -> AxisNowManifest: + def parse_axisnow(self, parsed: dict[str, Any]) -> AxisNowManifest: k = self._required_key isbn = k("ISBN", parsed) book_vault_uuid = k("BookVaultUUID", parsed) @@ -1804,14 +1781,14 @@ def parse_axisnow(self, parsed: Dict[str, Any]) -> AxisNowManifest: class AudiobookMetadataParser( - JSONResponseParser[Tuple[Optional[str], List[SpineItem]]] + JSONResponseParser[tuple[Optional[str], list[SpineItem]]] ): """Parse the results of Axis 360's audiobook metadata API call.""" @classmethod def _parse( - cls, parsed: Dict[str, Any], **kwargs: Any - ) -> Tuple[Optional[str], List[SpineItem]]: + cls, parsed: dict[str, Any], **kwargs: Any + ) -> tuple[str | None, list[SpineItem]]: spine_items = [] accountId = parsed.get("fndaccountid", None) for item in parsed.get("readingOrder", []): @@ -1821,7 +1798,7 @@ def _parse( return accountId, spine_items @classmethod - def _extract_spine_item(cls, part: Dict[str, str | int | float]) -> SpineItem: + def _extract_spine_item(cls, part: dict[str, str | int | float]) -> SpineItem: """Convert an element of the 'readingOrder' list to a SpineItem.""" title = part.get("title") # Incoming duration is measured in seconds. @@ -1925,7 +1902,7 @@ def problem_detail_document(self, error_details: str) -> ProblemDetail: ) @property - def as_response(self) -> Union[Response, ProblemDetail]: + def as_response(self) -> Response | ProblemDetail: service_name = urlparse(str(self.content_link)).netloc try: if self.verify: @@ -1957,7 +1934,7 @@ def as_response(self) -> Union[Response, ProblemDetail]: e.code, service_name ) ) - except socket.timeout: + except TimeoutError: return self.problem_detail_document( f"Error connecting to {service_name}. Timeout occurred." ) diff --git a/api/bibliotheca.py b/api/bibliotheca.py index 56b6f2e342..0320533228 100644 --- a/api/bibliotheca.py +++ b/api/bibliotheca.py @@ -10,9 +10,10 @@ import time import urllib.parse from abc import ABC +from collections.abc import Generator from datetime import datetime, timedelta from io import BytesIO -from typing import Dict, Generator, List, Tuple, Type, TypeVar, Union +from typing import Optional, TypeVar import dateutil.parser from dependency_injector.wiring import Provide, inject @@ -104,7 +105,7 @@ class BibliothecaSettings(BaseCirculationApiSettings): class BibliothecaLibrarySettings(BaseCirculationLoanSettings): - dont_display_reserves: Optional[str] = FormField( + dont_display_reserves: str | None = FormField( form=ConfigurationFormItem( label=_("Show/Hide Titles with No Available Loans"), required=False, @@ -666,9 +667,9 @@ def xpath_expression(self) -> str: @classmethod def contributors_from_string( - cls, string: Optional[str], role: str = Contributor.AUTHOR_ROLE - ) -> List[ContributorData]: - contributors: List[ContributorData] = [] + cls, string: str | None, role: str = Contributor.AUTHOR_ROLE + ) -> list[ContributorData]: + contributors: list[ContributorData] = [] if not string: return contributors @@ -685,8 +686,8 @@ def contributors_from_string( return contributors @classmethod - def parse_genre_string(self, s: Optional[str]) -> List[SubjectData]: - genres: List[SubjectData] = [] + def parse_genre_string(self, s: str | None) -> list[SubjectData]: + genres: list[SubjectData] = [] if not s: return genres for i in s.split(","): @@ -708,9 +709,7 @@ def parse_genre_string(self, s: Optional[str]) -> List[SubjectData]: ) return genres - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Metadata: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> Metadata: """Turn an tag into a Metadata and an encompassed CirculationData objects, and return the Metadata.""" @@ -956,7 +955,7 @@ def process_first(self, string: str | bytes) -> Exception: return return_val def process_one( - self, error_tag: _Element, namespaces: Optional[Dict[str, str]] + self, error_tag: _Element, namespaces: dict[str, str] | None ) -> Exception: message = self.text_of_optional_subtag(error_tag, "Message") if not message: @@ -1031,9 +1030,7 @@ class PatronCirculationParser(XMLParser): def __init__(self, collection: Collection) -> None: self.collection = collection - def process_all( - self, string: bytes | str - ) -> itertools.chain[Union[LoanInfo, HoldInfo]]: + def process_all(self, string: bytes | str) -> itertools.chain[LoanInfo | HoldInfo]: xml = self._load_xml(string) loans = self._process_all( xml, "//Checkouts/Item", namespaces={}, handler=self.process_one_loan @@ -1047,26 +1044,26 @@ def process_all( return itertools.chain(loans, holds, reserves) def process_one_loan( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[LoanInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> LoanInfo | None: return self.process_one(tag, namespaces, LoanInfo) def process_one_hold( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[HoldInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> HoldInfo | None: return self.process_one(tag, namespaces, HoldInfo) def process_one_reserve( - self, tag: _Element, namespaces: Dict[str, str] - ) -> Optional[HoldInfo]: + self, tag: _Element, namespaces: dict[str, str] + ) -> HoldInfo | None: hold_info = self.process_one(tag, namespaces, HoldInfo) if hold_info is not None: hold_info.hold_position = 0 return hold_info def process_one( - self, tag: _Element, namespaces: Dict[str, str], source_class: Type[T] - ) -> Optional[T]: + self, tag: _Element, namespaces: dict[str, str], source_class: type[T] + ) -> T | None: if not tag.xpath("ItemId"): # This happens for events associated with books # no longer in our collection. @@ -1099,16 +1096,16 @@ def datevalue(key): class DateResponseParser(BibliothecaParser[Optional[datetime]], ABC): """Extract a date from a response.""" - RESULT_TAG_NAME: Optional[str] = None - DATE_TAG_NAME: Optional[str] = None + RESULT_TAG_NAME: str | None = None + DATE_TAG_NAME: str | None = None @property def xpath_expression(self) -> str: return f"/{self.RESULT_TAG_NAME}/{self.DATE_TAG_NAME}" def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Optional[datetime]: + self, tag: _Element, namespaces: dict[str, str] | None + ) -> datetime | None: due_date = tag.text if not due_date: return None @@ -1158,7 +1155,7 @@ def xpath_expression(self) -> str: def process_all( self, string: bytes | str, no_events_error=False ) -> Generator[ - Tuple[str, str, Optional[str], datetime, Optional[datetime], str], None, None + tuple[str, str, str | None, datetime, datetime | None, str], None, None ]: has_events = False for i in super().process_all(string): @@ -1181,8 +1178,8 @@ def process_all( ) def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Tuple[str, str, Optional[str], datetime, Optional[datetime], str]: + self, tag: _Element, namespaces: dict[str, str] | None + ) -> tuple[str, str, str | None, datetime, datetime | None, str]: isbn = self.text_of_subtag(tag, "ISBN") bibliotheca_id = self.text_of_subtag(tag, "ItemId") patron_id = self.text_of_optional_subtag(tag, "PatronId") diff --git a/api/circulation.py b/api/circulation.py index e96148f5cd..e063264be1 100644 --- a/api/circulation.py +++ b/api/circulation.py @@ -5,9 +5,10 @@ import sys import time from abc import ABC, abstractmethod +from collections.abc import Iterable from threading import Thread from types import TracebackType -from typing import Any, Dict, Iterable, List, Literal, Tuple, Type, TypeVar +from typing import Any, Literal, TypeVar import flask from flask import Response @@ -53,9 +54,9 @@ class CirculationInfo: def __init__( self, collection: Collection | int | None, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, ) -> None: """A loan, hold, or whatever. @@ -73,7 +74,7 @@ def __init__( :param identifier: The string identifying the LicensePool. """ - self.collection_id: Optional[int] + self.collection_id: int | None if isinstance(collection, int): self.collection_id = collection elif isinstance(collection, Collection) and collection.id is not None: @@ -85,7 +86,7 @@ def __init__( self.identifier_type = identifier_type self.identifier = identifier - def collection(self, _db: Session) -> Optional[Collection]: + def collection(self, _db: Session) -> Collection | None: """Find the Collection to which this object belongs.""" if self.collection_id is None: return None @@ -103,7 +104,7 @@ def license_pool(self, _db: Session) -> LicensePool: ) return pool - def fd(self, d: Optional[datetime.datetime]) -> Optional[str]: + def fd(self, d: datetime.datetime | None) -> str | None: # Stupid method to format a date if not d: return None @@ -128,10 +129,10 @@ class DeliveryMechanismInfo(CirculationInfo): def __init__( self, - content_type: Optional[str], - drm_scheme: Optional[str], - rights_uri: Optional[str] = RightsStatus.IN_COPYRIGHT, - resource: Optional[Resource] = None, + content_type: str | None, + drm_scheme: str | None, + rights_uri: str | None = RightsStatus.IN_COPYRIGHT, + resource: Resource | None = None, ) -> None: """Constructor. @@ -152,7 +153,7 @@ def __init__( def apply( self, loan: Loan, autocommit: bool = True - ) -> Optional[LicensePoolDeliveryMechanism]: + ) -> LicensePoolDeliveryMechanism | None: """Set an appropriate LicensePoolDeliveryMechanism on the given `Loan`, creating a DeliveryMechanism if necessary. @@ -210,13 +211,13 @@ class FulfillmentInfo(CirculationInfo): def __init__( self, collection: Collection | int | None, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - content_link: Optional[str], - content_type: Optional[str], - content: Optional[str], - content_expires: Optional[datetime.datetime], + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + content_link: str | None, + content_type: str | None, + content: str | None, + content_expires: datetime.datetime | None, content_link_redirect: bool = False, ) -> None: """Constructor. @@ -280,35 +281,35 @@ def as_response(self) -> Response | ProblemDetail | None: return None @property - def content_link(self) -> Optional[str]: + def content_link(self) -> str | None: return self._content_link @content_link.setter - def content_link(self, value: Optional[str]) -> None: + def content_link(self, value: str | None) -> None: self._content_link = value @property - def content_type(self) -> Optional[str]: + def content_type(self) -> str | None: return self._content_type @content_type.setter - def content_type(self, value: Optional[str]) -> None: + def content_type(self, value: str | None) -> None: self._content_type = value @property - def content(self) -> Optional[str]: + def content(self) -> str | None: return self._content @content.setter - def content(self, value: Optional[str]) -> None: + def content(self, value: str | None) -> None: self._content = value @property - def content_expires(self) -> Optional[datetime.datetime]: + def content_expires(self) -> datetime.datetime | None: return self._content_expires @content_expires.setter - def content_expires(self, value: Optional[datetime.datetime]) -> None: + def content_expires(self, value: datetime.datetime | None) -> None: self._content_expires = value @@ -326,9 +327,9 @@ class APIAwareFulfillmentInfo(FulfillmentInfo, ABC): def __init__( self, api: CirculationApiType, - data_source_name: Optional[str], - identifier_type: Optional[str], - identifier: Optional[str], + data_source_name: str | None, + identifier_type: str | None, + identifier: str | None, key: Any, ) -> None: """Constructor. @@ -375,39 +376,39 @@ def do_fetch(self) -> None: ... @property - def content_link(self) -> Optional[str]: + def content_link(self) -> str | None: self.fetch() return self._content_link @content_link.setter - def content_link(self, value: Optional[str]) -> None: + def content_link(self, value: str | None) -> None: raise NotImplementedError() @property - def content_type(self) -> Optional[str]: + def content_type(self) -> str | None: self.fetch() return self._content_type @content_type.setter - def content_type(self, value: Optional[str]) -> None: + def content_type(self, value: str | None) -> None: raise NotImplementedError() @property - def content(self) -> Optional[str]: + def content(self) -> str | None: self.fetch() return self._content @content.setter - def content(self, value: Optional[str]) -> None: + def content(self, value: str | None) -> None: raise NotImplementedError() @property - def content_expires(self) -> Optional[datetime.datetime]: + def content_expires(self) -> datetime.datetime | None: self.fetch() return self._content_expires @content_expires.setter - def content_expires(self, value: Optional[datetime.datetime]) -> None: + def content_expires(self, value: datetime.datetime | None) -> None: raise NotImplementedError() @@ -417,14 +418,14 @@ class LoanInfo(CirculationInfo): def __init__( self, collection: Collection | int, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - start_date: Optional[datetime.datetime], - end_date: Optional[datetime.datetime], - fulfillment_info: Optional[FulfillmentInfo] = None, - external_identifier: Optional[str] = None, - locked_to: Optional[DeliveryMechanismInfo] = None, + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + start_date: datetime.datetime | None, + end_date: datetime.datetime | None, + fulfillment_info: FulfillmentInfo | None = None, + external_identifier: str | None = None, + locked_to: DeliveryMechanismInfo | None = None, ): """Constructor. @@ -472,13 +473,13 @@ class HoldInfo(CirculationInfo): def __init__( self, collection: Collection | int, - data_source_name: Optional[str | DataSource], - identifier_type: Optional[str], - identifier: Optional[str], - start_date: Optional[datetime.datetime], - end_date: Optional[datetime.datetime], - hold_position: Optional[int], - external_identifier: Optional[str] = None, + data_source_name: str | DataSource | None, + identifier_type: str | None, + identifier: str | None, + start_date: datetime.datetime | None, + end_date: datetime.datetime | None, + hold_position: int | None, + external_identifier: str | None = None, ): super().__init__(collection, data_source_name, identifier_type, identifier) self.start_date = start_date @@ -499,7 +500,7 @@ def __repr__(self) -> str: class BaseCirculationEbookLoanSettings(BaseSettings): """A mixin for settings that apply to ebook loans.""" - ebook_loan_duration: Optional[PositiveInt] = FormField( + ebook_loan_duration: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_LOAN_PERIOD, form=ConfigurationFormItem( label=_("Ebook Loan Duration (in Days)"), @@ -514,7 +515,7 @@ class BaseCirculationEbookLoanSettings(BaseSettings): class BaseCirculationLoanSettings(BaseSettings): """A mixin for settings that apply to loans.""" - default_loan_duration: Optional[PositiveInt] = FormField( + default_loan_duration: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_LOAN_PERIOD, form=ConfigurationFormItem( label=_("Default Loan Period (in Days)"), @@ -536,9 +537,7 @@ class CirculationInternalFormatsMixin: # For instance, the combination ("application/epub+zip", # "vnd.adobe/adept+xml") is called "ePub" in Axis 360 and 3M, but # is called "ebook-epub-adobe" in Overdrive. - delivery_mechanism_to_internal_format: Dict[ - Tuple[Optional[str], Optional[str]], str - ] = {} + delivery_mechanism_to_internal_format: dict[tuple[str | None, str | None], str] = {} def internal_format(self, delivery_mechanism: LicensePoolDeliveryMechanism) -> str: """Look up the internal format for this delivery mechanism or @@ -596,7 +595,7 @@ class BaseCirculationAPI( # wait til the point of fulfillment to set a delivery mechanism # (Overdrive), set this to FULFILL_STEP. If there is no choice of # delivery mechanisms (3M), set this to None. - SET_DELIVERY_MECHANISM_AT: Optional[str] = FULFILL_STEP + SET_DELIVERY_MECHANISM_AT: str | None = FULFILL_STEP def __init__(self, _db: Session, collection: Collection): self._db = _db @@ -676,7 +675,7 @@ def checkout( def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -700,7 +699,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: """Place a book on hold. @@ -753,8 +752,8 @@ def __init__( self, db: Session, library: Library, - analytics: Optional[Analytics] = None, - registry: Optional[IntegrationRegistry[CirculationApiType]] = None, + analytics: Analytics | None = None, + registry: IntegrationRegistry[CirculationApiType] | None = None, ): """Constructor. @@ -811,14 +810,14 @@ def __init__( self.collection_ids_for_sync.append(collection.id) @property - def library(self) -> Optional[Library]: + def library(self) -> Library | None: if self.library_id is None: return None return Library.by_id(self._db, self.library_id) def api_for_license_pool( self, licensepool: LicensePool - ) -> Optional[CirculationApiType]: + ) -> CirculationApiType | None: """Find the API to use for the given license pool.""" return self.api_for_collection.get(licensepool.collection.id) @@ -836,8 +835,8 @@ def can_revoke_hold(self, licensepool: LicensePool, hold: Hold) -> bool: def _collect_event( self, - patron: Optional[Patron], - licensepool: Optional[LicensePool], + patron: Patron | None, + licensepool: LicensePool | None, name: str, include_neighborhood: bool = False, ) -> None: @@ -908,8 +907,8 @@ def borrow( pin: str, licensepool: LicensePool, delivery_mechanism: LicensePoolDeliveryMechanism, - hold_notification_email: Optional[str] = None, - ) -> Tuple[Optional[Loan], Optional[Hold], bool]: + hold_notification_email: str | None = None, + ) -> tuple[Loan | None, Hold | None, bool]: """Either borrow a book or put it on hold. Don't worry about fulfilling the loan yet. @@ -1248,9 +1247,9 @@ def patron_at_hold_limit(self, patron: Patron) -> bool: def can_fulfill_without_loan( self, - patron: Optional[Patron], - pool: Optional[LicensePool], - lpdm: Optional[LicensePoolDeliveryMechanism], + patron: Patron | None, + pool: LicensePool | None, + lpdm: LicensePoolDeliveryMechanism | None, ) -> bool: """Can we deliver the given book in the given format to the given patron, even though the patron has no active loan for that @@ -1446,7 +1445,7 @@ def release_hold( def patron_activity( self, patron: Patron, pin: str - ) -> Tuple[List[LoanInfo], List[HoldInfo], bool]: + ) -> tuple[list[LoanInfo], list[HoldInfo], bool]: """Return a record of the patron's current activity vis-a-vis all relevant external loan sources. @@ -1469,11 +1468,11 @@ def __init__( self.api = api self.patron = patron self.pin = pin - self.activity: Optional[Iterable[LoanInfo | HoldInfo]] = None - self.exception: Optional[Exception] = None - self.trace: Tuple[ - Type[BaseException], BaseException, TracebackType - ] | Tuple[None, None, None] | None = None + self.activity: Iterable[LoanInfo | HoldInfo] | None = None + self.exception: Exception | None = None + self.trace: tuple[ + type[BaseException], BaseException, TracebackType + ] | tuple[None, None, None] | None = None super().__init__() def run(self) -> None: @@ -1506,8 +1505,8 @@ def run(self) -> None: thread.start() for thread in threads: thread.join() - loans: List[LoanInfo] = [] - holds: List[HoldInfo] = [] + loans: list[LoanInfo] = [] + holds: list[HoldInfo] = [] complete = True for thread in threads: if thread.exception: @@ -1556,7 +1555,7 @@ def local_holds(self, patron: Patron) -> Query[Hold]: def sync_bookshelf( self, patron: Patron, pin: str, force: bool = False - ) -> Tuple[List[Loan] | Query[Loan], List[Hold] | Query[Hold]]: + ) -> tuple[list[Loan] | Query[Loan], list[Hold] | Query[Hold]]: """Sync our internal model of a patron's bookshelf with any external vendors that provide books to the patron's library. @@ -1579,7 +1578,7 @@ def sync_bookshelf( # Assuming everything goes well, we will set # Patron.last_loan_activity_sync to this value -- the moment # just before we started contacting the vendor APIs. - last_loan_activity_sync: Optional[datetime.datetime] = utc_now() + last_loan_activity_sync: datetime.datetime | None = utc_now() # Update the external view of the patron's current state. remote_loans, remote_holds, complete = self.patron_activity(patron, pin) @@ -1624,8 +1623,8 @@ def sync_bookshelf( active_loans = [] active_holds = [] - start: Optional[datetime.datetime] - end: Optional[datetime.datetime] + start: datetime.datetime | None + end: datetime.datetime | None for loan in remote_loans: # This is a remote loan. Find or create the corresponding # local loan. diff --git a/api/circulation_exceptions.py b/api/circulation_exceptions.py index 0b62206113..d4b187a6ad 100644 --- a/api/circulation_exceptions.py +++ b/api/circulation_exceptions.py @@ -1,5 +1,3 @@ -from typing import Optional - from flask_babel import lazy_gettext as _ from api.problem_details import * @@ -143,7 +141,7 @@ class LimitReached(CirculationException): """ status_code = 403 - BASE_DOC: Optional[ProblemDetail] = None + BASE_DOC: ProblemDetail | None = None MESSAGE_WITH_LIMIT = None def __init__(self, message=None, debug_info=None, limit=None): diff --git a/api/config.py b/api/config.py index 636fd6cc48..72562e0397 100644 --- a/api/config.py +++ b/api/config.py @@ -1,4 +1,4 @@ -from typing import Iterable, List, Optional, Tuple +from collections.abc import Iterable from Crypto.Cipher import PKCS1_OAEP from Crypto.Cipher.PKCS1_OAEP import PKCS1OAEP_Cipher @@ -142,28 +142,28 @@ def estimate_language_collections_when_unset(cls, library: Library) -> None: cls.estimate_language_collections_for_library(library) @classmethod - def large_collection_languages(cls, library: Library) -> List[str]: + def large_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.large_collection_languages is None: return [] return library.settings.large_collection_languages @classmethod - def small_collection_languages(cls, library: Library) -> List[str]: + def small_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.small_collection_languages is None: return [] return library.settings.small_collection_languages @classmethod - def tiny_collection_languages(cls, library: Library) -> List[str]: + def tiny_collection_languages(cls, library: Library) -> list[str]: cls.estimate_language_collections_when_unset(library) if library.settings.tiny_collection_languages is None: return [] return library.settings.tiny_collection_languages @classmethod - def max_outstanding_fines(cls, library: Library) -> Optional[Money]: + def max_outstanding_fines(cls, library: Library) -> Money | None: if library.settings.max_outstanding_fines is None: return None return MoneyUtility.parse(library.settings.max_outstanding_fines) @@ -235,7 +235,7 @@ def _as_mailto(cls, value): return "mailto:%s" % value @classmethod - def help_uris(cls, library: Library) -> Iterable[Tuple[Optional[str], str]]: + def help_uris(cls, library: Library) -> Iterable[tuple[str | None, str]]: """Find all the URIs that might help patrons get help from this library. @@ -247,7 +247,7 @@ def help_uris(cls, library: Library) -> Iterable[Tuple[Optional[str], str]]: yield "text/html", library.settings.help_web @classmethod - def copyright_designated_agent_uri(cls, library: Library) -> Optional[str]: + def copyright_designated_agent_uri(cls, library: Library) -> str | None: if library.settings.copyright_designated_agent_email_address: email = library.settings.copyright_designated_agent_email_address elif library.settings.help_email: @@ -258,7 +258,7 @@ def copyright_designated_agent_uri(cls, library: Library) -> Optional[str]: return cls._as_mailto(email) @classmethod - def configuration_contact_uri(cls, library: Library) -> Optional[str]: + def configuration_contact_uri(cls, library: Library) -> str | None: if library.settings.configuration_contact_email_address: email = library.settings.configuration_contact_email_address elif library.settings.help_email: diff --git a/api/controller/marc.py b/api/controller/marc.py index d72fe4d999..c54fdd715e 100644 --- a/api/controller/marc.py +++ b/api/controller/marc.py @@ -3,7 +3,6 @@ from collections import defaultdict from dataclasses import dataclass, field from datetime import datetime -from typing import Dict, Optional import flask from flask import Response @@ -50,7 +49,7 @@ class MARCRecordController: """ - def __init__(self, storage_service: Optional[S3Service]) -> None: + def __init__(self, storage_service: S3Service | None) -> None: self.storage_service = storage_service @staticmethod @@ -74,7 +73,7 @@ def has_integration(session: Session, library: Library) -> bool: @staticmethod def get_files( session: Session, library: Library - ) -> Dict[str, MarcFileCollectionResult]: + ) -> dict[str, MarcFileCollectionResult]: marc_files = session.execute( select( IntegrationConfiguration.name, @@ -97,7 +96,7 @@ def get_files( ) ).all() - files_by_collection: Dict[str, MarcFileCollectionResult] = defaultdict( + files_by_collection: dict[str, MarcFileCollectionResult] = defaultdict( MarcFileCollectionResult ) for file_row in marc_files: diff --git a/api/discovery/opds_registration.py b/api/discovery/opds_registration.py index 67be3278e1..3f13078231 100644 --- a/api/discovery/opds_registration.py +++ b/api/discovery/opds_registration.py @@ -3,18 +3,8 @@ import base64 import json import sys -from typing import ( - Any, - Callable, - Dict, - List, - Literal, - Optional, - Tuple, - Type, - Union, - overload, -) +from collections.abc import Callable +from typing import Any, Literal, overload from Crypto.Cipher.PKCS1_OAEP import PKCS1OAEP_Cipher from flask_babel import lazy_gettext as _ @@ -99,13 +89,13 @@ def protocol_details(cls, db: Session) -> dict[str, Any]: } @classmethod - def settings_class(cls) -> Type[OpdsRegistrationServiceSettings]: + def settings_class(cls) -> type[OpdsRegistrationServiceSettings]: """Get the settings for this integration.""" return OpdsRegistrationServiceSettings @classmethod @overload - def for_integration(cls, _db: Session, integration: int) -> Optional[Self]: + def for_integration(cls, _db: Session, integration: int) -> Self | None: ... @classmethod @@ -118,7 +108,7 @@ def for_integration( @classmethod def for_integration( cls, _db: Session, integration: int | IntegrationConfiguration - ) -> Optional[Self]: + ) -> Self | None: """ Find a OpdsRegistrationService object configured by the given IntegrationConfiguration ID. """ @@ -138,14 +128,14 @@ def get_request(url: str) -> Response: @staticmethod def post_request( - url: str, payload: Union[str, Dict[str, Any]], **kwargs: Any + url: str, payload: str | dict[str, Any], **kwargs: Any ) -> Response: return HTTP.debuggable_post(url, payload, **kwargs) @classmethod def for_protocol_goal_and_url( cls, _db: Session, protocol: str, goal: Goals, url: str - ) -> Optional[Self]: + ) -> Self | None: """Get a LibraryRegistry for the given protocol, goal, and URL. Create the corresponding ExternalIntegration if necessary. """ @@ -161,7 +151,7 @@ def for_protocol_goal_and_url( return cls(integration, settings) @property - def registrations(self) -> List[DiscoveryServiceRegistration]: + def registrations(self) -> list[DiscoveryServiceRegistration]: """Find all of this site's registrations with this OpdsRegistrationService. :yield: A sequence of Registration objects. @@ -175,7 +165,7 @@ def registrations(self) -> List[DiscoveryServiceRegistration]: def fetch_catalog( self, - ) -> Tuple[str, str]: + ) -> tuple[str, str]: """Fetch the root catalog for this OpdsRegistrationService. :return: A ProblemDetail if there's a problem communicating @@ -187,7 +177,7 @@ def fetch_catalog( return self._extract_catalog_information(response) @classmethod - def _extract_catalog_information(cls, response: Response) -> Tuple[str, str]: + def _extract_catalog_information(cls, response: Response) -> tuple[str, str]: """From an OPDS catalog, extract information that's essential to kickstarting the OPDS Directory Registration Protocol. @@ -220,7 +210,7 @@ def _extract_catalog_information(cls, response: Response) -> Tuple[str, str]: def fetch_registration_document( self, - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: """Fetch a discovery service's registration document and extract useful information from it. @@ -237,7 +227,7 @@ def fetch_registration_document( @classmethod def _extract_registration_information( cls, response: Response - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: """From an OPDS registration document, extract information that's useful to kickstarting the OPDS Directory Registration Protocol. @@ -277,7 +267,7 @@ def _extract_registration_information( @classmethod def _extract_links( cls, response: Response - ) -> Tuple[Optional[Dict[str, Any]], List[Dict[str, str]]]: + ) -> tuple[dict[str, Any] | None, list[dict[str, str]]]: """Parse an OPDS 2 feed out of a Requests response object. :return: A 2-tuple (parsed_catalog, links), @@ -388,7 +378,7 @@ def _create_registration_payload( library: Library, stage: RegistrationStage, url_for: Callable[..., str], - ) -> Dict[str, str]: + ) -> dict[str, str]: """Collect the key-value pairs to be sent when kicking off the registration protocol. @@ -416,7 +406,7 @@ def _create_registration_payload( @staticmethod def _create_registration_headers( registration: DiscoveryServiceRegistration, - ) -> Dict[str, str]: + ) -> dict[str, str]: shared_secret = registration.shared_secret headers = {} if shared_secret: @@ -427,8 +417,8 @@ def _create_registration_headers( def _send_registration_request( cls, register_url: str, - headers: Dict[str, str], - payload: Dict[str, str], + headers: dict[str, str], + payload: dict[str, str], ) -> Response: """Send the request that actually kicks off the OPDS Directory Registration Protocol. @@ -471,7 +461,7 @@ def _decrypt_shared_secret( def _process_registration_result( cls, registration: DiscoveryServiceRegistration, - catalog: Dict[str, Any] | Any, + catalog: dict[str, Any] | Any, cipher: PKCS1OAEP_Cipher, desired_stage: RegistrationStage, ) -> Literal[True]: @@ -494,7 +484,7 @@ def _process_registration_result( f"Remote service served '{catalog}', which I can't make sense of as an OPDS document.", ) ) - metadata: Dict[str, str] = catalog.get("metadata", {}) + metadata: dict[str, str] = catalog.get("metadata", {}) short_name = metadata.get("short_name") encrypted_shared_secret = metadata.get("shared_secret") links = catalog.get("links", []) diff --git a/api/discovery/registration_script.py b/api/discovery/registration_script.py index 0e5ba71f21..a0d19e7481 100644 --- a/api/discovery/registration_script.py +++ b/api/discovery/registration_script.py @@ -1,7 +1,8 @@ from __future__ import annotations from argparse import ArgumentParser -from typing import Callable, List, Literal, Optional +from collections.abc import Callable +from typing import Literal from flask import url_for from sqlalchemy.orm import Session @@ -41,8 +42,8 @@ def arg_parser(cls, _db: Session) -> ArgumentParser: # type: ignore[override] def do_run( self, - cmd_args: Optional[List[str]] = None, - manager: Optional[CirculationManager] = None, + cmd_args: list[str] | None = None, + manager: CirculationManager | None = None, ) -> PalaceFlask | Literal[False]: parsed = self.parse_command_line(self._db, cmd_args) diff --git a/api/enki.py b/api/enki.py index baba060812..b290d4c733 100644 --- a/api/enki.py +++ b/api/enki.py @@ -4,7 +4,8 @@ import json import logging import time -from typing import Any, Callable, Generator, Mapping, Tuple, cast +from collections.abc import Callable, Generator, Mapping +from typing import Any, cast from dependency_injector.wiring import Provide from flask_babel import lazy_gettext as _ @@ -79,7 +80,7 @@ class EnkiLibrarySettings(BaseSettings): enki_library_id: str = FormField( form=ConfigurationFormItem(label=_("Library ID"), required=True) ) - dont_display_reserves: Optional[str] = FormField( + dont_display_reserves: str | None = FormField( form=ConfigurationFormItem( label=_("Show/Hide Titles with No Available Loans"), required=False, @@ -149,7 +150,7 @@ def __init__(self, _db: Session, collection: Collection): self.collection_id = collection.id self.base_url = self.settings.url or self.PRODUCTION_BASE_URL - def enki_library_id(self, library: Library) -> Optional[str]: + def enki_library_id(self, library: Library) -> str | None: """Find the Enki library ID for the given library.""" if library.id is None: return None @@ -198,7 +199,7 @@ def count_title_changes() -> str: % library.name ) - def count_patron_loans_and_holds(patron: Patron, pin: Optional[str]) -> str: + def count_patron_loans_and_holds(patron: Patron, pin: str | None) -> str: activity = list(self.patron_activity(patron, pin)) return "Total loans and holds: %s" % len(activity) @@ -208,9 +209,9 @@ def request( self, url: str, method: str = "get", - extra_headers: Optional[Mapping[str, str]] = None, - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + extra_headers: Mapping[str, str] | None = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, retry_on_timeout: bool = True, **kwargs: Any, ) -> RequestsResponse: @@ -247,8 +248,8 @@ def _request( url: str, method: str, headers: Mapping[str, str], - data: Optional[Mapping[str, Any]] = None, - params: Optional[Mapping[str, Any]] = None, + data: Mapping[str, Any] | None = None, + params: Mapping[str, Any] | None = None, **kwargs: Any, ) -> RequestsResponse: """Actually make an HTTP request. @@ -331,7 +332,7 @@ def updated_titles( response = self.request(url, params=args) yield from BibliographicParser().process_all(response.content) - def get_item(self, enki_id: Optional[str]) -> Optional[Metadata]: + def get_item(self, enki_id: str | None) -> Metadata | None: """Retrieve bibliographic and availability information for a specific title. @@ -437,10 +438,10 @@ def checkin(self, patron: Patron, pin: str, licensepool: LicensePool) -> None: def loan_request( self, - barcode: Optional[str], - pin: Optional[str], - book_id: Optional[str], - enki_library_id: Optional[str], + barcode: str | None, + pin: str | None, + book_id: str | None, + enki_library_id: str | None, ) -> RequestsResponse: self.log.debug("Sending checkout request for %s" % book_id) url = str(self.base_url) + str(self.user_endpoint) @@ -517,7 +518,7 @@ def parse_fulfill_result( return url, item_type, expires def patron_activity( - self, patron: Patron, pin: Optional[str] + self, patron: Patron, pin: str | None ) -> Generator[LoanInfo | HoldInfo, None, None]: enki_library_id = self.enki_library_id(patron.library) response = self.patron_request( @@ -544,7 +545,7 @@ def patron_activity( yield hold_info def patron_request( - self, patron: Optional[str], pin: Optional[str], enki_library_id: Optional[str] + self, patron: str | None, pin: str | None, enki_library_id: str | None ) -> RequestsResponse: self.log.debug("Querying Enki for information on patron %s" % patron) url = str(self.base_url) + str(self.user_endpoint) @@ -574,7 +575,7 @@ def parse_patron_loans(self, checkout_data: Mapping[str, Any]) -> LoanInfo: fulfillment_info=None, ) - def parse_patron_holds(self, hold_data: Mapping[str, Any]) -> Optional[HoldInfo]: + def parse_patron_holds(self, hold_data: Mapping[str, Any]) -> HoldInfo | None: self.log.warning( "Hold information received, but parsing patron holds is not implemented. %r", hold_data, @@ -586,7 +587,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: raise NotImplementedError() @@ -736,8 +737,8 @@ def extract_circulation( self, primary_identifier: IdentifierData, availability: Mapping[str, str], - formattype: Optional[str], - ) -> Optional[CirculationData]: + formattype: str | None, + ) -> CirculationData | None: """Turn the 'availability' portion of an Enki API response into a CirculationData. """ @@ -809,8 +810,8 @@ def collection(self) -> Collection | None: def catch_up_from( self, - start: Optional[datetime.datetime], - cutoff: Optional[datetime.datetime], + start: datetime.datetime | None, + cutoff: datetime.datetime | None, progress: TimestampData, ) -> None: """Find Enki books that changed recently. @@ -912,7 +913,7 @@ def _update_circulation( return circulation_changes - def process_book(self, bibliographic: Metadata) -> Tuple[Edition, LicensePool]: + def process_book(self, bibliographic: Metadata) -> tuple[Edition, LicensePool]: """Make the local database reflect the state of the remote Enki collection for the given book. @@ -956,7 +957,7 @@ def __init__( api = api_class self.api = api - def process_item(self, identifier: Identifier) -> Optional[CirculationData]: + def process_item(self, identifier: Identifier) -> CirculationData | None: self.log.debug("Seeing if %s needs reaping", identifier.identifier) metadata = self.api.get_item(identifier.identifier) if metadata: diff --git a/api/firstbook2.py b/api/firstbook2.py index f9e2dbd2fd..22a3870f79 100644 --- a/api/firstbook2.py +++ b/api/firstbook2.py @@ -2,7 +2,7 @@ import re import time -from typing import Optional, Pattern, Union +from re import Pattern import jwt import requests @@ -48,7 +48,7 @@ class FirstBookAuthSettings(BasicAuthProviderSettings): weight=10, ), ) - password_regular_expression: Optional[Pattern] = FormField( + password_regular_expression: Pattern | None = FormField( re.compile(r"^[0-9]+$"), form=ConfigurationFormItem( label="Password Regular Expression", @@ -107,8 +107,8 @@ def __init__( self.secret = settings.password def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: # All FirstBook credentials are in upper-case. if username is None or username == "": return None @@ -128,8 +128,8 @@ def remote_authenticate( ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + self, patron_or_patrondata: PatronData | Patron + ) -> PatronData | None: if isinstance(patron_or_patrondata, PatronData): return patron_or_patrondata diff --git a/api/kansas_patron.py b/api/kansas_patron.py index 8d44bb32c3..2fc504a584 100644 --- a/api/kansas_patron.py +++ b/api/kansas_patron.py @@ -1,5 +1,3 @@ -from typing import Optional, Type, Union - from flask_babel import lazy_gettext as _ from lxml import etree from pydantic import HttpUrl @@ -37,11 +35,11 @@ def description(cls) -> str: return "An authentication service for the Kansas State Library." @classmethod - def settings_class(cls) -> Type[KansasAuthSettings]: + def settings_class(cls) -> type[KansasAuthSettings]: return KansasAuthSettings @classmethod - def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: return BasicAuthProviderLibrarySettings def __init__( @@ -61,8 +59,8 @@ def __init__( # methods. def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: # Create XML doc for request authorization_request = self.create_authorize_request(username, password) # Post request to the server @@ -83,8 +81,8 @@ def remote_authenticate( ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] - ) -> Optional[PatronData]: + self, patron_or_patrondata: PatronData | Patron + ) -> PatronData | None: # Kansas auth gives very little data about the patron. So this function is just a passthrough. if isinstance(patron_or_patrondata, PatronData): return patron_or_patrondata diff --git a/api/lanes.py b/api/lanes.py index c3a8b74680..13f269bde1 100644 --- a/api/lanes.py +++ b/api/lanes.py @@ -1,5 +1,4 @@ import logging -from typing import Optional import core.classifier as genres from api.config import CannotLoadConfiguration, Configuration @@ -957,8 +956,8 @@ def works(self, *args, **kwargs): class WorkBasedLane(DynamicLane): """A lane that shows works related to one particular Work.""" - DISPLAY_NAME: Optional[str] = None - ROUTE: Optional[str] = None + DISPLAY_NAME: str | None = None + ROUTE: str | None = None def __init__(self, library, work, display_name=None, children=None, **kwargs): self.work = work diff --git a/api/millenium_patron.py b/api/millenium_patron.py index a5085a7b80..27e702b9c6 100644 --- a/api/millenium_patron.py +++ b/api/millenium_patron.py @@ -1,7 +1,6 @@ import datetime import re from enum import Enum -from typing import List, Optional, Type, Union from urllib import parse import dateutil @@ -73,7 +72,7 @@ def validate_neighborhood_mode(cls, v): ) # The field to use when seeing which values of MBLOCK[p56] mean a patron # is blocked. By default, any value other than '-' indicates a block. - block_types: Optional[str] = FormField( + block_types: str | None = FormField( None, form=ConfigurationFormItem( label="Block Types", @@ -84,7 +83,7 @@ def validate_neighborhood_mode(cls, v): # Identifiers that contain any of these strings are ignored when # finding the "correct" identifier in a patron's record, even if # it means they end up with no identifier at all. - identifier_blacklist: List[str] = FormField( + identifier_blacklist: list[str] = FormField( [], form=ConfigurationFormItem( label="Identifier Blacklist", @@ -178,11 +177,11 @@ def description(cls) -> str: return _("III Millenium Patron API") @classmethod - def settings_class(cls) -> Type[MilleniumPatronSettings]: + def settings_class(cls) -> type[MilleniumPatronSettings]: return MilleniumPatronSettings @classmethod - def library_settings_class(cls) -> Type[MilleniumPatronLibrarySettings]: + def library_settings_class(cls) -> type[MilleniumPatronLibrarySettings]: return MilleniumPatronLibrarySettings ERROR_MESSAGE_FIELD = "ERRMSG" @@ -212,7 +211,7 @@ def __init__( integration_id: int, settings: MilleniumPatronSettings, library_settings: MilleniumPatronLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -243,8 +242,8 @@ def _request(self, path): """Make an HTTP request and parse the response.""" def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: """Does the Millenium Patron API approve of these credentials? :return: False if the credentials are invalid. If they are @@ -282,8 +281,8 @@ def remote_authenticate( return None def _remote_authenticate_pintest( - self, username: str, password: Optional[str] - ) -> Optional[PatronData]: + self, username: str, password: str | None + ) -> PatronData | None: # Patrons are authenticated with a secret PIN. # # The PIN is URL-encoded. The username is not: as far as @@ -330,8 +329,8 @@ def family_name_match(self, actual_name, supposed_family_name): return False def remote_patron_lookup( - self, patron_or_patrondata_or_identifier: Union[PatronData, Patron, str] - ) -> Optional[PatronData]: + self, patron_or_patrondata_or_identifier: PatronData | Patron | str + ) -> PatronData | None: if isinstance(patron_or_patrondata_or_identifier, str): identifier = patron_or_patrondata_or_identifier else: @@ -394,7 +393,7 @@ def _patron_block_reason(cls, block_types, mblock_value): return PatronData.NO_VALUE @classmethod - def _code_from_field(cls, field_name: Optional[str]) -> Optional[str]: + def _code_from_field(cls, field_name: str | None) -> str | None: """Convert a Millenium property key to its code. A field name may comprise a label and a code or just a code. diff --git a/api/model/time_tracking.py b/api/model/time_tracking.py index 283e14df7a..7a76755405 100644 --- a/api/model/time_tracking.py +++ b/api/model/time_tracking.py @@ -1,6 +1,6 @@ import datetime import logging -from typing import Any, Dict, List, Optional +from typing import Any from pydantic import Field, validator @@ -44,13 +44,13 @@ def validate_seconds_played(cls, value: int): class PlaytimeEntriesPost(CustomBaseModel): - book_id: Optional[str] = Field( + book_id: str | None = Field( description="An identifier of a book (currently ignored)." ) - library_id: Optional[str] = Field( + library_id: str | None = Field( description="And identifier for the library (currently ignored)." ) - time_entries: List[PlaytimeTimeEntry] = Field(description="A List of time entries") + time_entries: list[PlaytimeTimeEntry] = Field(description="A List of time entries") class PlaytimeEntriesPostSummary(CustomBaseModel): @@ -60,7 +60,7 @@ class PlaytimeEntriesPostSummary(CustomBaseModel): class PlaytimeEntriesPostResponse(CustomBaseModel): - responses: List[Dict[str, Any]] = Field( + responses: list[dict[str, Any]] = Field( description="Responses as part of the multi-reponse" ) summary: PlaytimeEntriesPostSummary = Field( diff --git a/api/monitor.py b/api/monitor.py index 6e9df5075a..444a04e157 100644 --- a/api/monitor.py +++ b/api/monitor.py @@ -1,5 +1,3 @@ -from typing import Type - from sqlalchemy import and_, or_ from api.odl import ODLAPI @@ -56,7 +54,7 @@ def where_clause(self): class LoanReaper(LoanlikeReaperMonitor): """Remove expired and abandoned loans from the database.""" - MODEL_CLASS: Type[Loan] = Loan + MODEL_CLASS: type[Loan] = Loan MAX_AGE = 90 @property diff --git a/api/odl.py b/api/odl.py index a7596f26df..2a79c4ec06 100644 --- a/api/odl.py +++ b/api/odl.py @@ -5,7 +5,8 @@ import json import uuid from abc import ABC -from typing import Any, Callable, Dict, List, Literal, Optional, Tuple, Type, TypeVar +from collections.abc import Callable +from typing import Any, Literal, TypeVar import dateutil from dependency_injector.wiring import Provide, inject @@ -101,7 +102,7 @@ class ODLSettings(OPDSImporterSettings): ), ) - default_reservation_period: Optional[PositiveInt] = FormField( + default_reservation_period: PositiveInt | None = FormField( default=Collection.STANDARD_DEFAULT_RESERVATION_PERIOD, form=ConfigurationFormItem( label=_("Default Reservation Period (in Days)"), @@ -226,7 +227,7 @@ def __init__( self._hasher_factory = HasherFactory() self._credential_factory = LCPCredentialFactory() - self._hasher_instance: Optional[Hasher] = None + self._hasher_instance: Hasher | None = None def _get_hasher(self) -> Hasher: """Returns a Hasher instance @@ -241,7 +242,7 @@ def _get_hasher(self) -> Hasher: return self._hasher_instance - def _get(self, url: str, headers: Optional[Dict[str, str]] = None) -> Response: + def _get(self, url: str, headers: dict[str, str] | None = None) -> Response: """Make a normal HTTP request, but include an authentication header with the credentials for the collection. """ @@ -258,7 +259,7 @@ def _url_for(self, *args: Any, **kwargs: Any) -> str: """Wrapper around flask's url_for to be overridden for tests.""" return url_for(*args, **kwargs) - def get_license_status_document(self, loan: Loan) -> Dict[str, Any]: + def get_license_status_document(self, loan: Loan) -> dict[str, Any]: """Get the License Status Document for a loan. For a new loan, create a local loan with no external identifier and @@ -422,7 +423,7 @@ def checkout( ) def _checkout( - self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + self, patron: Patron, licensepool: LicensePool, hold: Hold | None = None ) -> Loan: _db = Session.object_session(patron) @@ -510,9 +511,9 @@ def fulfill( @staticmethod def _find_content_link_and_type( - links: List[Dict[str, str]], - drm_scheme: Optional[str], - ) -> Tuple[Optional[str], Optional[str]]: + links: list[dict[str, str]], + drm_scheme: str | None, + ) -> tuple[str | None, str | None]: """Find a content link with the type information corresponding to the selected delivery mechanism. :param links: List of dict-like objects containing information about available links in the LCP license file @@ -741,7 +742,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: """Create a new hold.""" return self._place_hold(patron, licensepool) @@ -813,7 +814,7 @@ def _release_hold(self, hold: Hold) -> Literal[True]: self.update_licensepool(licensepool) return True - def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: + def patron_activity(self, patron: Patron, pin: str) -> list[LoanInfo | HoldInfo]: """Look up non-expired loans for this collection in the database.""" _db = Session.object_session(patron) loans = ( @@ -865,9 +866,7 @@ def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo] for hold in remaining_holds ] - def update_loan( - self, loan: Loan, status_doc: Optional[Dict[str, Any]] = None - ) -> None: + def update_loan(self, loan: Loan, status_doc: dict[str, Any] | None = None) -> None: """Check a loan's status, and if it is no longer active, delete the loan and update its pool's availability. """ @@ -919,11 +918,11 @@ class ODLAPI( """ @classmethod - def settings_class(cls) -> Type[ODLSettings]: + def settings_class(cls) -> type[ODLSettings]: return ODLSettings @classmethod - def library_settings_class(cls) -> Type[ODLLibrarySettings]: + def library_settings_class(cls) -> type[ODLLibrarySettings]: return ODLLibrarySettings @classmethod @@ -957,8 +956,8 @@ class BaseODLImporter(BaseOPDSImporter[SettingsType], ABC): @classmethod def fetch_license_info( - cls, document_link: str, do_get: Callable[..., Tuple[int, Any, bytes]] - ) -> Optional[Dict[str, Any]]: + cls, document_link: str, do_get: Callable[..., tuple[int, Any, bytes]] + ) -> dict[str, Any] | None: status_code, _, response = do_get(document_link, headers={}) if status_code in (200, 201): license_info_document = json.loads(response) @@ -973,10 +972,10 @@ def fetch_license_info( @classmethod def parse_license_info( cls, - license_info_document: Dict[str, Any], + license_info_document: dict[str, Any], license_info_link: str, - checkout_link: Optional[str], - ) -> Optional[LicenseData]: + checkout_link: str | None, + ) -> LicenseData | None: """Check the license's attributes passed as parameters: - if they're correct, turn them into a LicenseData object - otherwise, return a None @@ -1061,12 +1060,12 @@ def parse_license_info( def get_license_data( cls, license_info_link: str, - checkout_link: Optional[str], - feed_license_identifier: Optional[str], - feed_license_expires: Optional[datetime.datetime], - feed_concurrency: Optional[int], - do_get: Callable[..., Tuple[int, Any, bytes]], - ) -> Optional[LicenseData]: + checkout_link: str | None, + feed_license_identifier: str | None, + feed_license_expires: datetime.datetime | None, + feed_concurrency: int | None, + do_get: Callable[..., tuple[int, Any, bytes]], + ) -> LicenseData | None: license_info_document = cls.fetch_license_info(license_info_link, do_get) if not license_info_document: @@ -1127,7 +1126,7 @@ class ODLImporter(OPDSImporter, BaseODLImporter[ODLSettings]): LICENSE_INFO_DOCUMENT_MEDIA_TYPE = "application/vnd.odl.info+json" @classmethod - def settings_class(cls) -> Type[ODLSettings]: + def settings_class(cls) -> type[ODLSettings]: return ODLSettings @classmethod @@ -1135,9 +1134,9 @@ def _detail_for_elementtree_entry( cls, parser: OPDSXMLParser, entry_tag: Element, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Dict[str, Any]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> dict[str, Any]: do_get = do_get or Representation.cautious_http_get # TODO: Review for consistency when updated ODL spec is ready. @@ -1158,7 +1157,7 @@ def _detail_for_elementtree_entry( # By default, dcterms:format includes the media type of a # DRM-free resource. content_type = full_content_type - drm_schemes: List[str | None] = [] + drm_schemes: list[str | None] = [] # But it may instead describe an audiobook protected with # the Feedbooks access-control scheme. @@ -1259,7 +1258,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **import_class_kwargs: Any, ): # Always force reimport ODL collections to get up to date license information @@ -1279,7 +1278,7 @@ def __init__( self, _db: Session, collection: Collection, - api: Optional[ODLAPI] = None, + api: ODLAPI | None = None, **kwargs: Any, ): super().__init__(_db, collection, **kwargs) diff --git a/api/odl2.py b/api/odl2.py index 60a18c03b3..74777b250e 100644 --- a/api/odl2.py +++ b/api/odl2.py @@ -1,7 +1,8 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, Any, Callable, List, Optional, Tuple, Type +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from flask_babel import lazy_gettext as _ from pydantic import PositiveInt @@ -38,7 +39,7 @@ class ODL2Settings(ODLSettings, OPDS2ImporterSettings): - skipped_license_formats: List[str] = FormField( + skipped_license_formats: list[str] = FormField( default=["text/html"], alias="odl2_skipped_license_formats", form=ConfigurationFormItem( @@ -51,7 +52,7 @@ class ODL2Settings(ODLSettings, OPDS2ImporterSettings): ), ) - loan_limit: Optional[PositiveInt] = FormField( + loan_limit: PositiveInt | None = FormField( default=None, alias="odl2_loan_limit", form=ConfigurationFormItem( @@ -64,7 +65,7 @@ class ODL2Settings(ODLSettings, OPDS2ImporterSettings): ), ) - hold_limit: Optional[PositiveInt] = FormField( + hold_limit: PositiveInt | None = FormField( default=None, alias="odl2_hold_limit", form=ConfigurationFormItem( @@ -80,11 +81,11 @@ class ODL2Settings(ODLSettings, OPDS2ImporterSettings): class ODL2API(BaseODLAPI[ODL2Settings, ODLLibrarySettings]): @classmethod - def settings_class(cls) -> Type[ODL2Settings]: + def settings_class(cls) -> type[ODL2Settings]: return ODL2Settings @classmethod - def library_settings_class(cls) -> Type[ODLLibrarySettings]: + def library_settings_class(cls) -> type[ODLLibrarySettings]: return ODLLibrarySettings @classmethod @@ -101,7 +102,7 @@ def __init__(self, _db: Session, collection: Collection) -> None: self.hold_limit = self.settings.hold_limit def _checkout( - self, patron: Patron, licensepool: LicensePool, hold: Optional[Hold] = None + self, patron: Patron, licensepool: LicensePool, hold: Hold | None = None ) -> Loan: # If the loan limit is not None or 0 if self.loan_limit: @@ -139,16 +140,16 @@ class ODL2Importer(BaseODLImporter[ODL2Settings], OPDS2Importer): NAME = ODL2API.label() @classmethod - def settings_class(cls) -> Type[ODL2Settings]: + def settings_class(cls) -> type[ODL2Settings]: return ODL2Settings def __init__( self, db: Session, collection: Collection, - parser: Optional[RWPMManifestParser] = None, + parser: RWPMManifestParser | None = None, data_source_name: str | None = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """Initialize a new instance of ODL2Importer class. @@ -183,7 +184,7 @@ def _extract_publication_metadata( self, feed: OPDS2Feed, publication: OPDS2Publication, - data_source_name: Optional[str], + data_source_name: str | None, ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. @@ -254,7 +255,7 @@ def _extract_publication_metadata( if not medium: medium = Edition.medium_from_media_type(license_format) - drm_schemes: List[str | None] + drm_schemes: list[str | None] if license_format in self.LICENSE_FORMATS: # Special case to handle DeMarque audiobooks which include the protection # in the content type. When we see a license format of @@ -304,7 +305,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[ODL2Importer], + import_class: type[ODL2Importer], **import_class_kwargs: Any, ) -> None: # Always force reimport ODL collections to get up to date license information diff --git a/api/opds_for_distributors.py b/api/opds_for_distributors.py index 8df2bd7f4c..dce7f2ea82 100644 --- a/api/opds_for_distributors.py +++ b/api/opds_for_distributors.py @@ -2,7 +2,8 @@ import datetime import json -from typing import TYPE_CHECKING, Any, Dict, Generator, List, Optional, Set, Tuple, Type +from collections.abc import Generator +from typing import TYPE_CHECKING, Any import feedparser from flask_babel import lazy_gettext as _ @@ -86,11 +87,11 @@ class OPDSForDistributorsAPI( ] @classmethod - def settings_class(cls) -> Type[OPDSForDistributorsSettings]: + def settings_class(cls) -> type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings @classmethod - def library_settings_class(cls) -> Type[OPDSForDistributorsLibrarySettings]: + def library_settings_class(cls) -> type[OPDSForDistributorsLibrarySettings]: return OPDSForDistributorsLibrarySettings @classmethod @@ -109,14 +110,14 @@ def __init__(self, _db: Session, collection: Collection): self.username = settings.username self.password = settings.password self.feed_url = settings.external_account_id - self.auth_url: Optional[str] = None + self.auth_url: str | None = None def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None]: """Try to get a token.""" yield self.run_test("Negotiate a fulfillment token", self._get_token, _db) def _request_with_timeout( - self, method: str, url: Optional[str], *args: Any, **kwargs: Any + self, method: str, url: str | None, *args: Any, **kwargs: Any ) -> Response: """Wrapper around HTTP.request_with_timeout to be overridden for tests.""" if url is None: @@ -220,7 +221,7 @@ def refresh(credential: Credential) -> None: def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -342,7 +343,7 @@ def fulfill( content_expires=credential.expires, ) - def patron_activity(self, patron: Patron, pin: str) -> List[LoanInfo | HoldInfo]: + def patron_activity(self, patron: Patron, pin: str) -> list[LoanInfo | HoldInfo]: # Look up loans for this collection in the database. _db = Session.object_session(patron) loans = ( @@ -373,7 +374,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: # All the books for this integration are available as simultaneous # use, so there's no need to place a hold. @@ -387,7 +388,7 @@ class OPDSForDistributorsImporter(OPDSImporter): NAME = OPDSForDistributorsAPI.label() @classmethod - def settings_class(cls) -> Type[OPDSForDistributorsSettings]: + def settings_class(cls) -> type[OPDSForDistributorsSettings]: return OPDSForDistributorsSettings def update_work_for_edition( @@ -426,7 +427,7 @@ def _add_format_data(cls, circulation: CirculationData) -> None: def extract_feed_data( self, feed: str | bytes, feed_url: str | None = None - ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: metadatas, failures = super().extract_feed_data(feed, feed_url) # Force all audiobook licensepools to track playtime @@ -452,7 +453,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **kwargs: Any, ) -> None: super().__init__(_db, collection, import_class, **kwargs) @@ -460,8 +461,8 @@ def __init__( self.api = OPDSForDistributorsAPI(_db, collection) def _get( - self, url: str, headers: Dict[str, str] - ) -> Tuple[int, Dict[str, str], bytes]: + self, url: str, headers: dict[str, str] + ) -> tuple[int, dict[str, str], bytes]: """Make a normal HTTP request for an OPDS feed, but add in an auth header with the credentials for the collection. """ @@ -484,11 +485,11 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[OPDSImporter], + import_class: type[OPDSImporter], **kwargs: Any, ) -> None: super().__init__(_db, collection, import_class, **kwargs) - self.seen_identifiers: Set[str] = set() + self.seen_identifiers: set[str] = set() def feed_contains_new_data(self, feed: bytes | str) -> bool: # Always return True so that the importer will crawl the @@ -497,7 +498,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: + ) -> tuple[list[Edition], dict[str, list[CoverageFailure]]]: # Collect all the identifiers in the feed. parsed_feed = feedparser.parse(feed) identifiers = [entry.get("id") for entry in parsed_feed.get("entries", [])] diff --git a/api/overdrive.py b/api/overdrive.py index 9c988a7b56..9e658b340f 100644 --- a/api/overdrive.py +++ b/api/overdrive.py @@ -9,7 +9,7 @@ import time import urllib.parse from threading import RLock -from typing import Any, Dict, List, Set, Tuple, Union +from typing import Any from urllib.parse import quote, urlsplit, urlunsplit import dateutil @@ -129,7 +129,7 @@ class OverdriveConstants: class OverdriveSettings(ConnectionSetting, BaseCirculationApiSettings): """The basic Overdrive configuration""" - external_account_id: Optional[str] = FormField( + external_account_id: str | None = FormField( form=ConfigurationFormItem( label=_("Library ID"), type=ConfigurationFormItemType.TEXT, @@ -190,7 +190,7 @@ class OverdriveLibrarySettings(BaseCirculationEbookLoanSettings): class OverdriveChildSettings(BaseSettings): - external_account_id: Optional[str] = FormField( + external_account_id: str | None = FormField( form=ConfigurationFormItem( label=_("Library ID"), required=True, @@ -413,7 +413,7 @@ def __init__(self, _db, collection): def settings(self) -> OverdriveSettings: return self._settings - def _determine_hosts(self, *, server_nickname: str) -> Dict[str, str]: + def _determine_hosts(self, *, server_nickname: str) -> dict[str, str]: # Figure out which hostnames we'll be using when constructing # endpoint URLs. if server_nickname not in self.HOSTS: @@ -530,7 +530,7 @@ def refresh_creds(self, credential): def get( self, url: str, extra_headers={}, exception_on_401=False - ) -> Tuple[int, CaseInsensitiveDict, bytes]: + ) -> tuple[int, CaseInsensitiveDict, bytes]: """Make an HTTP GET request using the active Bearer Token.""" request_headers = dict(Authorization="Bearer %s" % self.token) request_headers.update(extra_headers) @@ -585,7 +585,7 @@ def fulfillment_authorization_header(self) -> str: def token_post( self, url: str, - payload: Dict[str, str], + payload: dict[str, str], is_fulfillment=False, headers={}, **kwargs, @@ -806,7 +806,7 @@ def client_secret(self) -> bytes: def library_id(self) -> str: return self._library_id - def hosts(self) -> Dict[str, str]: + def hosts(self) -> dict[str, str]: return dict(self._hosts) def _run_self_tests(self, _db): @@ -900,7 +900,7 @@ def patron_request( return response def get_patron_credential( - self, patron: Patron, pin: Optional[str], is_fulfillment=False + self, patron: Patron, pin: str | None, is_fulfillment=False ) -> Credential: """Create an OAuth token for the given patron. @@ -1179,8 +1179,8 @@ def raise_exception_on_error(self, data, custom_error_to_exception={}): raise d[error](message) def get_loan( - self, patron: Patron, pin: Optional[str], overdrive_id: str - ) -> Dict[str, Any]: + self, patron: Patron, pin: str | None, overdrive_id: str + ) -> dict[str, Any]: """Get patron's loan information for the identified item. :param patron: A patron. @@ -1263,8 +1263,8 @@ def fulfill( ) def get_fulfillment_link( - self, patron: Patron, pin: Optional[str], overdrive_id: str, format_type: str - ) -> Union[OverdriveManifestFulfillmentInfo, Tuple[str, str]]: + self, patron: Patron, pin: str | None, overdrive_id: str, format_type: str + ) -> OverdriveManifestFulfillmentInfo | tuple[str, str]: """Get the link to the ACSM or manifest for an existing loan.""" try: loan = self.get_loan(patron, pin, overdrive_id) @@ -1343,7 +1343,7 @@ def get_fulfillment_link( def get_fulfillment_link_from_download_link( self, patron, pin, download_link, fulfill_url=None - ) -> Tuple[str, str]: + ) -> tuple[str, str]: # If this for Overdrive's streaming reader, and the link expires, # the patron can go back to the circulation manager fulfill url # again to get a new one. @@ -1403,9 +1403,7 @@ def get_patron_information(self, patron, pin): self.raise_exception_on_error(data) return data - def get_patron_checkouts( - self, patron: Patron, pin: Optional[str] - ) -> Dict[str, Any]: + def get_patron_checkouts(self, patron: Patron, pin: str | None) -> dict[str, Any]: """Get information for the given patron's loans. :param patron: A patron. @@ -1480,7 +1478,7 @@ def patron_activity(self, patron, pin): ) @classmethod - def process_checkout_data(cls, checkout: Dict[str, Any], collection: Collection): + def process_checkout_data(cls, checkout: dict[str, Any], collection: Collection): """Convert one checkout from Overdrive's list of checkouts into a LoanInfo object. @@ -2257,7 +2255,7 @@ def internal_formats(cls, overdrive_format): else: yield result - ignorable_overdrive_formats: Set[str] = set() + ignorable_overdrive_formats: set[str] = set() overdrive_role_to_simplified_role = { "actor": Contributor.ACTOR_ROLE, @@ -2390,8 +2388,8 @@ def book_info_to_circulation(self, book): ) def _get_applicable_accounts( - self, accounts: List[Dict[str, Any]] - ) -> List[Dict[str, Any]]: + self, accounts: list[dict[str, Any]] + ) -> list[dict[str, Any]]: """ Returns those accounts from the accounts array that apply the current overdrive collection context. @@ -2457,7 +2455,7 @@ def book_info_to_metadata( # Otherwise we'll probably give it a fraction of this weight. trusted_weight = Classification.TRUSTED_DISTRIBUTOR_WEIGHT - duration: Optional[int] = None + duration: int | None = None if include_bibliographic: title = book.get("title", None) @@ -2578,7 +2576,7 @@ def book_info_to_metadata( links = [] sample_hrefs = set() for format in book.get("formats", []): - duration_str: Optional[str] = format.get("duration") + duration_str: str | None = format.get("duration") if duration_str is not None: # Using this method only the last valid duration attribute is captured # If there are multiple formats with different durations, the edition will ignore the rest @@ -2938,7 +2936,7 @@ class GenerateOverdriveAdvantageAccountList(InputScript): def __init__(self, _db=None, *args, **kwargs): super().__init__(_db, *args, **kwargs) - self._data: List[List[str]] = list() + self._data: list[list[str]] = list() def _create_overdrive_api(self, collection: Collection): return OverdriveAPI(_db=self._db, collection=collection) diff --git a/api/s3_analytics_provider.py b/api/s3_analytics_provider.py index e604a9a6cf..48f9038b31 100644 --- a/api/s3_analytics_provider.py +++ b/api/s3_analytics_provider.py @@ -4,7 +4,7 @@ import json import random import string -from typing import TYPE_CHECKING, Dict, Optional +from typing import TYPE_CHECKING from core.config import CannotLoadConfiguration from core.local_analytics_provider import LocalAnalyticsProvider @@ -17,7 +17,7 @@ class S3AnalyticsProvider(LocalAnalyticsProvider): """Analytics provider storing data in a S3 bucket.""" - def __init__(self, s3_service: Optional[S3Service]): + def __init__(self, s3_service: S3Service | None): self.s3_service = s3_service @staticmethod @@ -28,8 +28,8 @@ def _create_event_object( time: datetime.datetime, old_value, new_value, - neighborhood: Optional[str] = None, - ) -> Dict: + neighborhood: str | None = None, + ) -> dict: """Create a Python dict containing required information about the event. :param library: Library associated with the event @@ -189,10 +189,10 @@ def collect_event( def _get_file_key( self, library: Library, - license_pool: Optional[LicensePool], + license_pool: LicensePool | None, event_type: str, end_time: datetime.datetime, - start_time: Optional[datetime.datetime] = None, + start_time: datetime.datetime | None = None, ): """The path to the analytics data file for the given library, license pool and date range.""" diff --git a/api/saml/configuration/model.py b/api/saml/configuration/model.py index 7d6b50b5e9..f19f43e43c 100644 --- a/api/saml/configuration/model.py +++ b/api/saml/configuration/model.py @@ -1,7 +1,8 @@ import html from datetime import datetime +from re import Pattern from threading import Lock -from typing import Any, Dict, List, Optional, Pattern +from typing import Any from flask_babel import lazy_gettext as _ from onelogin.saml2.settings import OneLogin_Saml2_Settings @@ -118,7 +119,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="sp_private_key", ) - federated_identity_provider_entity_ids: Optional[List[str]] = FormField( + federated_identity_provider_entity_ids: list[str] | None = FormField( None, form=ConfigurationFormItem( label="List of Federated IdPs", @@ -148,7 +149,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_patron_id_use_name_id", ) - patron_id_attributes: Optional[List[str]] = FormField( + patron_id_attributes: list[str] | None = FormField( [ SAMLAttributeType.eduPersonUniqueId.name, SAMLAttributeType.eduPersonTargetedID.name, @@ -170,7 +171,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): alias="saml_patron_id_attributes", format="narrow", ) - patron_id_regular_expression: Optional[Pattern] = FormField( + patron_id_regular_expression: Pattern | None = FormField( None, form=ConfigurationFormItem( label="Patron ID: Regular expression", @@ -194,7 +195,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_patron_id_regular_expression", ) - non_federated_identity_provider_xml_metadata: Optional[str] = FormField( + non_federated_identity_provider_xml_metadata: str | None = FormField( None, form=ConfigurationFormItem( label="Identity Provider's XML metadata", @@ -208,7 +209,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="idp_xml_metadata", ) - session_lifetime: Optional[PositiveInt] = FormField( + session_lifetime: PositiveInt | None = FormField( None, form=ConfigurationFormItem( label="Session Lifetime", @@ -226,7 +227,7 @@ class SAMLWebSSOAuthSettings(AuthProviderSettings, LoggerMixin): ), alias="saml_session_lifetime", ) - filter_expression: Optional[str] = FormField( + filter_expression: str | None = FormField( None, form=ConfigurationFormItem( label="Filter Expression", @@ -392,17 +393,17 @@ def __init__(self, configuration: SAMLWebSSOAuthSettings): :param configuration: Configuration object containing SAML metadata """ self._configuration = configuration - self._service_provider_loaded: Optional[SAMLServiceProviderMetadata] = None - self._service_provider_settings: Optional[Dict[str, Any]] = None - self._identity_providers_loaded: Optional[ - List[SAMLIdentityProviderMetadata] - ] = None - self._identity_providers_settings: Dict[str, Dict[str, Any]] = {} + self._service_provider_loaded: SAMLServiceProviderMetadata | None = None + self._service_provider_settings: dict[str, Any] | None = None + self._identity_providers_loaded: None | ( + list[SAMLIdentityProviderMetadata] + ) = None + self._identity_providers_settings: dict[str, dict[str, Any]] = {} self._metadata_parser = SAMLMetadataParser() def _get_federated_identity_providers( self, db: Session - ) -> List[SAMLFederatedIdentityProvider]: + ) -> list[SAMLFederatedIdentityProvider]: """Return a list of federated IdPs corresponding to the entity IDs selected by the admin. :param db: Database session @@ -424,7 +425,7 @@ def _get_federated_identity_providers( def _load_identity_providers( self, db: Session - ) -> List[SAMLIdentityProviderMetadata]: + ) -> list[SAMLIdentityProviderMetadata]: """Loads IdP settings from the library's configuration settings :param db: Database session @@ -484,7 +485,7 @@ def _load_service_provider(self) -> SAMLServiceProviderMetadata: return service_provider - def get_identity_providers(self, db: Session) -> List[SAMLIdentityProviderMetadata]: + def get_identity_providers(self, db: Session) -> list[SAMLIdentityProviderMetadata]: """Returns identity providers :param db: Database session @@ -512,7 +513,7 @@ def get_service_provider(self) -> SAMLServiceProviderMetadata: def _get_identity_provider_settings( self, identity_provider: SAMLIdentityProviderMetadata - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Converts ServiceProviderMetadata object to the OneLogin's SAML Toolkit format :param identity_provider: IdentityProviderMetadata object @@ -561,7 +562,7 @@ def _get_identity_provider_settings( def _get_service_provider_settings( self, service_provider: SAMLServiceProviderMetadata - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Converts ServiceProviderMetadata object to the OneLogin's SAML Toolkit format :param service_provider: ServiceProviderMetadata object @@ -600,7 +601,7 @@ def configuration(self) -> SAMLWebSSOAuthSettings: def get_identity_provider_settings( self, db: Session, idp_entity_id: str - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Returns a dictionary containing identity provider's settings in a OneLogin's SAML Toolkit format :param db: Database session @@ -641,7 +642,7 @@ def get_identity_provider_settings( return identity_provider - def get_service_provider_settings(self) -> Dict[str, Any]: + def get_service_provider_settings(self) -> dict[str, Any]: """Returns a dictionary containing service provider's settings in the OneLogin's SAML Toolkit format :param db: Database session @@ -657,7 +658,7 @@ def get_service_provider_settings(self) -> Dict[str, Any]: return self._service_provider_settings - def get_settings(self, db: Session, idp_entity_id: str) -> Dict[str, Any]: + def get_settings(self, db: Session, idp_entity_id: str) -> dict[str, Any]: """Returns a dictionary containing SP's and IdP's settings in the OneLogin's SAML Toolkit format :param db: Database session @@ -665,7 +666,7 @@ def get_settings(self, db: Session, idp_entity_id: str) -> Dict[str, Any]: :return: Dictionary containing SP's and IdP's settings in the OneLogin's SAML Toolkit format """ - onelogin_settings: Dict[str, Any] = { + onelogin_settings: dict[str, Any] = { self.DEBUG: self._configuration.service_provider_debug_mode, self.STRICT: self._configuration.service_provider_strict_mode, } diff --git a/api/saml/credential.py b/api/saml/credential.py index f3d212baf2..a087372206 100644 --- a/api/saml/credential.py +++ b/api/saml/credential.py @@ -2,7 +2,6 @@ import json import logging from copy import deepcopy -from typing import Dict, Optional import sqlalchemy @@ -87,7 +86,7 @@ def create_saml_token( db: sqlalchemy.orm.session.Session, patron: Patron, subject: SAMLSubject, - cm_session_lifetime: Optional[int] = None, + cm_session_lifetime: int | None = None, ) -> Credential: """Create a Credential object that ties the given patron to the given provider token. @@ -113,7 +112,7 @@ def create_saml_token( def lookup_saml_token_by_patron( self, db: sqlalchemy.orm.session.Session, patron: Patron - ) -> Optional[Credential]: + ) -> Credential | None: """Look up for a SAML token. :param db: Database session @@ -136,8 +135,8 @@ def lookup_saml_token_by_patron( return credential def lookup_saml_token_by_value( - self, db: sqlalchemy.orm.session.Session, token: Dict - ) -> Optional[Credential]: + self, db: sqlalchemy.orm.session.Session, token: dict + ) -> Credential | None: """Look up for a SAML token. :param db: Database session diff --git a/api/saml/metadata/federations/model.py b/api/saml/metadata/federations/model.py index 4e40b9b3db..2bcde0f1d9 100644 --- a/api/saml/metadata/federations/model.py +++ b/api/saml/metadata/federations/model.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import List - from sqlalchemy import Column, DateTime, ForeignKey, Integer, String, Text from sqlalchemy.orm import Mapped, relationship @@ -20,7 +18,7 @@ class SAMLFederation(Base): certificate = Column(Text(), nullable=True) - identity_providers: Mapped[List[SAMLFederatedIdentityProvider]] = relationship( + identity_providers: Mapped[list[SAMLFederatedIdentityProvider]] = relationship( "SAMLFederatedIdentityProvider", back_populates="federation" ) diff --git a/api/saml/metadata/federations/validator.py b/api/saml/metadata/federations/validator.py index 09f2f39c6e..1fab512c65 100644 --- a/api/saml/metadata/federations/validator.py +++ b/api/saml/metadata/federations/validator.py @@ -1,7 +1,6 @@ import datetime import logging from abc import ABCMeta -from typing import Union from onelogin.saml2.utils import OneLogin_Saml2_Utils from onelogin.saml2.xmlparser import fromstring @@ -92,7 +91,7 @@ def _parse_saml_date_time(saml_date_time): return parsed_date_time - def validate(self, federation: SAMLFederation, metadata: Union[str, bytes]) -> None: + def validate(self, federation: SAMLFederation, metadata: str | bytes) -> None: """Verify that federated SAML metadata has not expired. :param federation: SAML federation diff --git a/api/saml/metadata/model.py b/api/saml/metadata/model.py index 767d5e6aa9..8a7bae5fce 100644 --- a/api/saml/metadata/model.py +++ b/api/saml/metadata/model.py @@ -3,7 +3,8 @@ from enum import Enum from json import JSONDecoder, JSONEncoder from json.decoder import WHITESPACE # type: ignore -from typing import Any, List, Optional, Pattern, Union +from re import Pattern +from typing import Any from onelogin.saml2.constants import OneLogin_Saml2_Constants @@ -786,7 +787,7 @@ def __init__( self, name_format: str, name_qualifier: str, - sp_name_qualifier: Optional[str], + sp_name_qualifier: str | None, name_id: str, ) -> None: """Initializes a new instance of NameID class @@ -850,7 +851,7 @@ def name_qualifier(self) -> str: return self._name_qualifier @property - def sp_name_qualifier(self) -> Optional[str]: + def sp_name_qualifier(self) -> str | None: """Returns the attribute that further qualifies a federated name identifier with the name of the service provider or affiliation of providers which has federated the principal's identity @@ -1045,9 +1046,9 @@ class SAMLSubject: def __init__( self, idp: str, - name_id: Optional[SAMLNameID], - attribute_statement: Optional[SAMLAttributeStatement], - valid_till: Optional[Union[datetime.datetime, datetime.timedelta, int]] = None, + name_id: SAMLNameID | None, + attribute_statement: SAMLAttributeStatement | None, + valid_till: datetime.datetime | datetime.timedelta | int | None = None, ): """Initializes a new instance of Subject class @@ -1060,7 +1061,7 @@ def __init__( - https://wiki.shibboleth.net/confluence/display/IDP30/SessionConfiguration """ self._idp = idp - self._name_id: Optional[SAMLNameID] = name_id + self._name_id: SAMLNameID | None = name_id self._attribute_statement = attribute_statement self._valid_till = valid_till @@ -1112,7 +1113,7 @@ def idp(self) -> str: return self._idp @property - def name_id(self) -> Optional[SAMLNameID]: + def name_id(self) -> SAMLNameID | None: """Return the name ID. :return: Name ID @@ -1120,7 +1121,7 @@ def name_id(self) -> Optional[SAMLNameID]: return self._name_id @name_id.setter - def name_id(self, value: Optional[SAMLNameID]) -> None: + def name_id(self, value: SAMLNameID | None) -> None: """Set the name ID. :param value: New name ID @@ -1290,8 +1291,8 @@ class SAMLSubjectPatronIDExtractor: def __init__( self, use_name_id: bool = True, - attributes: Optional[List[str]] = None, - regular_expression: Optional[Pattern] = None, + attributes: list[str] | None = None, + regular_expression: Pattern | None = None, ): """Initialize a new instance of SAMLSubjectPatronIDExtractor class. diff --git a/api/saml/metadata/parser.py b/api/saml/metadata/parser.py index 946b9950d6..d4f14a31da 100644 --- a/api/saml/metadata/parser.py +++ b/api/saml/metadata/parser.py @@ -1,5 +1,4 @@ import logging -from typing import Union from flask_babel import lazy_gettext as _ from lxml.etree import XMLSyntaxError @@ -90,7 +89,7 @@ def __init__(self, skip_incorrect_providers=False): ] = OneLogin_Saml2_Constants.NS_ALG def _convert_xml_string_to_dom( - self, xml_metadata: Union[str, bytes] + self, xml_metadata: str | bytes ) -> RestrictedElement: """Converts an XML string containing SAML metadata into XML DOM diff --git a/api/saml/provider.py b/api/saml/provider.py index 17271dc136..819b4e0581 100644 --- a/api/saml/provider.py +++ b/api/saml/provider.py @@ -1,5 +1,3 @@ -from typing import Optional, Type - from flask import url_for from flask_babel import lazy_gettext as _ from werkzeug.datastructures import Authorization @@ -45,7 +43,7 @@ def __init__( integration_id: int, settings: SAMLWebSSOAuthSettings, library_settings: SAMLWebSSOAuthLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): """Initializes a new instance of SAMLAuthenticationProvider class""" super().__init__( @@ -76,14 +74,14 @@ def identifies_individuals(self): return True @classmethod - def settings_class(cls) -> Type[SAMLWebSSOAuthSettings]: + def settings_class(cls) -> type[SAMLWebSSOAuthSettings]: return SAMLWebSSOAuthSettings @classmethod - def library_settings_class(cls) -> Type[SAMLWebSSOAuthLibrarySettings]: + def library_settings_class(cls) -> type[SAMLWebSSOAuthLibrarySettings]: return SAMLWebSSOAuthLibrarySettings - def get_credential_from_header(self, auth: Authorization) -> Optional[str]: + def get_credential_from_header(self, auth: Authorization) -> str | None: # We cannot extract the credential from the header, so we just return None return None diff --git a/api/selftest.py b/api/selftest.py index dfc3bbb2b6..8a5d4b048d 100644 --- a/api/selftest.py +++ b/api/selftest.py @@ -1,7 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Generator, Iterable, Optional, Tuple, Union +from collections.abc import Generator, Iterable from sqlalchemy.orm.session import Session @@ -30,7 +30,7 @@ class _NoValidLibrarySelfTestPatron(BaseError): detail (optional) -- additional explanation of the error """ - def __init__(self, message: Optional[str], *, detail: Optional[str] = None): + def __init__(self, message: str | None, *, detail: str | None = None): super().__init__(message=message) self.message = message self.detail = detail @@ -38,7 +38,7 @@ def __init__(self, message: Optional[str], *, detail: Optional[str] = None): @classmethod def default_patrons( cls, collection: Collection - ) -> Iterable[Union[Tuple[Library, Patron, Optional[str]], SelfTestResult]]: + ) -> Iterable[tuple[Library, Patron, str | None] | SelfTestResult]: """Find a usable default Patron for each of the libraries associated with the given Collection. @@ -74,7 +74,7 @@ def default_patrons( @classmethod def _determine_self_test_patron( cls, library: Library, _db=None - ) -> Tuple[Patron, Optional[str]]: + ) -> tuple[Patron, str | None]: """Obtain the test Patron and optional password for a library's self-tests. :param library: The library being tested. @@ -96,8 +96,8 @@ def _determine_self_test_patron( # If we get here, then we have failed to find a valid test patron # and will raise an exception. - message: Optional[str] - detail: Optional[str] + message: str | None + detail: str | None if patron is None: message = "Library has no test patron configured." detail = ( diff --git a/api/simple_authentication.py b/api/simple_authentication.py index 1cdc5b0b8c..675e72027d 100644 --- a/api/simple_authentication.py +++ b/api/simple_authentication.py @@ -1,5 +1,3 @@ -from typing import List, Optional, Type, Union - from api.authentication.base import PatronData from api.authentication.basic import ( BasicAuthenticationProvider, @@ -32,7 +30,7 @@ class SimpleAuthSettings(BasicAuthProviderSettings): description="A test password to use when testing the authentication provider.", ), ) - additional_test_identifiers: Optional[List[str]] = FormField( + additional_test_identifiers: list[str] | None = FormField( None, form=ConfigurationFormItem( label="Additional test identifiers", @@ -41,7 +39,7 @@ class SimpleAuthSettings(BasicAuthProviderSettings): type=ConfigurationFormItemType.LIST, ), ) - neighborhood: Optional[str] = FormField( + neighborhood: str | None = FormField( None, form=ConfigurationFormItem( label="Test neighborhood", @@ -72,11 +70,11 @@ def description(cls) -> str: ) @classmethod - def settings_class(cls) -> Type[SimpleAuthSettings]: + def settings_class(cls) -> type[SimpleAuthSettings]: return SimpleAuthSettings @classmethod - def library_settings_class(cls) -> Type[BasicAuthProviderLibrarySettings]: + def library_settings_class(cls) -> type[BasicAuthProviderLibrarySettings]: return BasicAuthProviderLibrarySettings def __init__( @@ -85,7 +83,7 @@ def __init__( integration_id: int, settings: SimpleAuthSettings, library_settings: BasicAuthProviderLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -105,8 +103,8 @@ def __init__( self.test_neighborhood = settings.neighborhood def remote_authenticate( - self, username: Optional[str], password: Optional[str] - ) -> Optional[PatronData]: + self, username: str | None, password: str | None + ) -> PatronData | None: """Fake 'remote' authentication.""" if not username or (self.collects_password and not password): return None @@ -118,7 +116,7 @@ def remote_authenticate( @classmethod def generate_patrondata( - cls, authorization_identifier: str, neighborhood: Optional[str] = None + cls, authorization_identifier: str, neighborhood: str | None = None ) -> PatronData: if authorization_identifier.endswith("_username"): username = authorization_identifier @@ -140,7 +138,7 @@ def generate_patrondata( ) return patrondata - def valid_patron(self, username: str, password: Optional[str]) -> bool: + def valid_patron(self, username: str, password: str | None) -> bool: """Is this patron associated with the given password in the given dictionary? """ @@ -151,8 +149,8 @@ def valid_patron(self, username: str, password: Optional[str]) -> bool: return password_match and username in self.test_identifiers def remote_patron_lookup( - self, patron_or_patrondata: Union[Patron, PatronData] - ) -> Optional[PatronData]: + self, patron_or_patrondata: Patron | PatronData + ) -> PatronData | None: if not patron_or_patrondata: return None if ( diff --git a/api/sip/__init__.py b/api/sip/__init__.py index 9d99a3ed3a..072fb43c82 100644 --- a/api/sip/__init__.py +++ b/api/sip/__init__.py @@ -1,8 +1,9 @@ from __future__ import annotations import json +from collections.abc import Callable from datetime import datetime -from typing import Any, Callable, Dict, Optional, Type, Union +from typing import Any, Dict, Optional, Type, Union from pydantic import Field, PositiveInt @@ -44,14 +45,14 @@ class SIP2Settings(BasicAuthProviderSettings): # This is _not_ a patron identifier (SIP field AA); it identifies the SC # creating the SIP session. SIP2 defines SC as "...any library automation # device dealing with patrons or library materials." - username: Optional[str] = FormField( + username: str | None = FormField( None, form=ConfigurationFormItem( label="Login User ID", ), ) # Sip field CO; the password to use when initiating a SIP session, if necessary. - password: Optional[str] = FormField( + password: str | None = FormField( None, form=ConfigurationFormItem( label="Login Password", @@ -62,7 +63,7 @@ class SIP2Settings(BasicAuthProviderSettings): # machine within a library system. Some libraries require a special location # code to be provided when authenticating patrons; others may require the # circulation manager to be treated as its own special 'location'. - location_code: Optional[str] = FormField( + location_code: str | None = FormField( None, form=ConfigurationFormItem( label="Location Code", @@ -129,7 +130,7 @@ class SIP2Settings(BasicAuthProviderSettings): required=True, ), ) - ssl_certificate: Optional[str] = FormField( + ssl_certificate: str | None = FormField( None, form=ConfigurationFormItem( label="SSL Certificate", @@ -143,7 +144,7 @@ class SIP2Settings(BasicAuthProviderSettings): type=ConfigurationFormItemType.TEXTAREA, ), ) - ssl_key: Optional[str] = FormField( + ssl_key: str | None = FormField( None, form=ConfigurationFormItem( label="SSL Key", @@ -185,7 +186,7 @@ class SIP2Settings(BasicAuthProviderSettings): class SIP2LibrarySettings(BasicAuthProviderLibrarySettings): # Used as the SIP2 AO field. - institution_id: Optional[str] = FormField( + institution_id: str | None = FormField( None, form=ConfigurationFormItem( label="Institution ID", @@ -220,8 +221,8 @@ def __init__( integration_id: int, settings: SIP2Settings, library_settings: SIP2LibrarySettings, - analytics: Optional[Analytics] = None, - client: Optional[Callable[..., SIPClient]] = None, + analytics: Analytics | None = None, + client: Callable[..., SIPClient] | None = None, ): """An object capable of communicating with a SIP server.""" super().__init__( @@ -284,16 +285,16 @@ def description(cls) -> str: return "SIP2 Patron Authentication" @classmethod - def settings_class(cls) -> Type[SIP2Settings]: + def settings_class(cls) -> type[SIP2Settings]: return SIP2Settings @classmethod - def library_settings_class(cls) -> Type[SIP2LibrarySettings]: + def library_settings_class(cls) -> type[SIP2LibrarySettings]: return SIP2LibrarySettings def patron_information( self, username: str | None, password: str | None - ) -> Dict[str, Any] | ProblemDetail: + ) -> dict[str, Any] | ProblemDetail: try: sip = self.client sip.connect() @@ -311,7 +312,7 @@ def patron_information( ) def remote_patron_lookup( - self, patron_or_patrondata: Union[PatronData, Patron] + self, patron_or_patrondata: PatronData | Patron ) -> PatronData | None | ProblemDetail: info = self.patron_information( patron_or_patrondata.authorization_identifier, None @@ -319,7 +320,7 @@ def remote_patron_lookup( return self.info_to_patrondata(info, False) def remote_authenticate( - self, username: Optional[str], password: Optional[str] + self, username: str | None, password: str | None ) -> PatronData | None | ProblemDetail: """Authenticate a patron with the SIP2 server. @@ -382,8 +383,8 @@ def raw_patron_information(): ) def info_to_patrondata( - self, info: Dict[str, Any] | ProblemDetail, validate_password: bool = True - ) -> Optional[PatronData] | ProblemDetail: + self, info: dict[str, Any] | ProblemDetail, validate_password: bool = True + ) -> PatronData | None | ProblemDetail: """Convert the SIP-specific dictionary obtained from SIPClient.patron_information() to an abstract, authenticator-independent PatronData object. @@ -443,12 +444,12 @@ def info_to_patrondata( def info_to_patrondata_block_reason( self, info, patrondata: PatronData - ) -> Union[PatronData.NoValue, str]: + ) -> PatronData.NoValue | str: # A True value in most (but not all) subfields of the # patron_status field will prohibit the patron from borrowing # books. status = info["patron_status_parsed"] - block_reason: Union[str, PatronData.NoValue] = PatronData.NO_VALUE + block_reason: str | PatronData.NoValue = PatronData.NO_VALUE for field in self.fields_that_deny_borrowing: if status.get(field) is True: block_reason = self.SPECIFIC_BLOCK_REASONS.get( diff --git a/api/sip/client.py b/api/sip/client.py index d9aed2c7ad..97fd533f2a 100644 --- a/api/sip/client.py +++ b/api/sip/client.py @@ -32,8 +32,8 @@ import ssl import tempfile import time +from collections.abc import Callable from enum import Enum -from typing import Callable, Optional import certifi @@ -99,9 +99,9 @@ def __init__( internal_name: str, sip_code: str, required=False, - length: Optional[int] = None, + length: int | None = None, allow_multiple=False, - log: Optional[logging.Logger] = None, + log: logging.Logger | None = None, ): self.sip_code = sip_code self.internal_name = internal_name diff --git a/api/sirsidynix_authentication_provider.py b/api/sirsidynix_authentication_provider.py index d096df38e9..e424fb5fe6 100644 --- a/api/sirsidynix_authentication_provider.py +++ b/api/sirsidynix_authentication_provider.py @@ -2,7 +2,7 @@ import os from gettext import gettext as _ -from typing import TYPE_CHECKING, List, Literal, Optional, Union +from typing import TYPE_CHECKING, Literal from urllib.parse import urljoin from pydantic import HttpUrl @@ -61,7 +61,7 @@ class SirsiDynixHorizonAuthLibrarySettings(BasicAuthProviderLibrarySettings): ), alias="LIBRARY_ID", ) - library_disallowed_suffixes: List[str] = FormField( + library_disallowed_suffixes: list[str] = FormField( [], form=ConfigurationFormItem( label="Disallowed Patron Suffixes", @@ -128,7 +128,7 @@ def __init__( integration_id: int, settings: SirsiDynixHorizonAuthSettings, library_settings: SirsiDynixHorizonAuthLibrarySettings, - analytics: Optional[Analytics] = None, + analytics: Analytics | None = None, ): super().__init__( library_id, integration_id, settings, library_settings, analytics @@ -289,7 +289,7 @@ def api_patron_login(self, username: str, password: str) -> Literal[False] | dic def api_read_patron_data( self, patron_key: str, session_token: str - ) -> Union[Literal[False], dict]: + ) -> Literal[False] | dict: """API request to pull basic patron information :param patron_key: The permanent external identifier for a patron @@ -307,7 +307,7 @@ def api_read_patron_data( def api_patron_status_info( self, patron_key: str, session_token: str - ) -> Union[Literal[False], dict]: + ) -> Literal[False] | dict: """API request to pull patron status information, like fines :param patron_key: The permanent external identifier for a patron diff --git a/api/util/profilers.py b/api/util/profilers.py index 6337f1f1aa..de975c69bb 100644 --- a/api/util/profilers.py +++ b/api/util/profilers.py @@ -2,7 +2,6 @@ import os import time from pathlib import Path -from typing import Optional from flask import Flask, g, request @@ -16,7 +15,7 @@ def enabled(cls) -> bool: return os.environ.get(cls.ENVIRONMENT_VARIABLE, None) is not None @classmethod - def create_profile_dir(cls) -> Optional[Path]: + def create_profile_dir(cls) -> Path | None: if not cls.enabled(): return None diff --git a/api/util/xray.py b/api/util/xray.py index 20d6f2cd1d..8bb728df7a 100644 --- a/api/util/xray.py +++ b/api/util/xray.py @@ -1,5 +1,4 @@ import os -from typing import Optional from aws_xray_sdk.core import AWSXRayRecorder from aws_xray_sdk.core import patch as xray_patch @@ -17,7 +16,7 @@ class PalaceXrayMiddleware(XRayMiddleware): XRAY_ENV_PATRON_BARCODE = "PALACE_XRAY_INCLUDE_BARCODE" @classmethod - def put_annotations(cls, segment: Segment, seg_type: Optional[str] = None): + def put_annotations(cls, segment: Segment, seg_type: str | None = None): if seg_type is not None: segment.put_annotation("type", seg_type) diff --git a/core/analytics.py b/core/analytics.py index ab41fd20ab..8ce6705ddb 100644 --- a/core/analytics.py +++ b/core/analytics.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from api.s3_analytics_provider import S3AnalyticsProvider from core.local_analytics_provider import LocalAnalyticsProvider @@ -17,7 +17,7 @@ class Analytics(LoggerMixin): def __init__( self, s3_analytics_enabled: bool = False, - s3_service: Optional[S3Service] = None, + s3_service: S3Service | None = None, ) -> None: self.providers = [LocalAnalyticsProvider()] diff --git a/core/config.py b/core/config.py index e728877b51..abed6d3906 100644 --- a/core/config.py +++ b/core/config.py @@ -1,7 +1,6 @@ import json import logging import os -from typing import Dict, List from flask_babel import lazy_gettext as _ from sqlalchemy.engine.url import make_url @@ -233,7 +232,7 @@ def basic_token_auth_is_enabled(cls) -> bool: ) from e @classmethod - def fcm_credentials(cls) -> Dict[str, str]: + def fcm_credentials(cls) -> dict[str, str]: """Returns a dictionary containing Firebase Cloud Messaging credentials. Credentials are provided as a JSON string, either (1) directly in an environment @@ -276,7 +275,7 @@ def fcm_credentials(cls) -> Dict[str, str]: ) @classmethod - def overdrive_fulfillment_keys(cls, testing=False) -> Dict[str, str]: + def overdrive_fulfillment_keys(cls, testing=False) -> dict[str, str]: prefix = ( cls.OD_PREFIX_TESTING_PREFIX if testing else cls.OD_PREFIX_PRODUCTION_PREFIX ) @@ -289,7 +288,7 @@ def overdrive_fulfillment_keys(cls, testing=False) -> Dict[str, str]: return {"key": key, "secret": secret} @classmethod - def quicksight_authorized_arns(cls) -> Dict[str, List[str]]: + def quicksight_authorized_arns(cls) -> dict[str, list[str]]: """Split the comma separated arns""" arns_str = os.environ.get(cls.QUICKSIGHT_AUTHORIZED_ARNS_KEY, "") return json.loads(arns_str) diff --git a/core/configuration/library.py b/core/configuration/library.py index 410417885d..eddf736b90 100644 --- a/core/configuration/library.py +++ b/core/configuration/library.py @@ -2,7 +2,7 @@ from dataclasses import dataclass from enum import IntEnum -from typing import Any, Dict, List, Optional, Tuple +from typing import Any import wcag_contrast_ratio from pydantic import ( @@ -55,13 +55,13 @@ class Level(IntEnum): class LibraryConfFormItem(ConfigurationFormItem): category: str = "Basic Information" level: Level = Level.ALL_ACCESS - read_only: Optional[bool] = None - skip: Optional[bool] = None - paired: Optional[str] = None + read_only: bool | None = None + skip: bool | None = None + paired: str | None = None def to_dict( self, db: Session, key: str, required: bool = False, default: Any = None - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """Serialize additional form items specific to library settings.""" weight, item = super().to_dict(db, key, required, default) item["category"] = self.category @@ -144,7 +144,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - enabled_entry_points: List[str] = FormField( + enabled_entry_points: list[str] = FormField( [x.INTERNAL_NAME for x in EntryPoint.DEFAULT_ENABLED], form=LibraryConfFormItem( label="Enabled entry points", @@ -179,7 +179,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - facets_enabled_order: List[str] = FormField( + facets_enabled_order: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[FacetConstants.ORDER_FACET_GROUP_NAME], form=LibraryConfFormItem( label="Allow patrons to sort by", @@ -206,7 +206,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - facets_enabled_available: List[str] = FormField( + facets_enabled_available: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[ FacetConstants.AVAILABILITY_FACET_GROUP_NAME ], @@ -235,7 +235,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - facets_enabled_collection: List[str] = FormField( + facets_enabled_collection: list[str] = FormField( FacetConstants.DEFAULT_ENABLED_FACETS[ FacetConstants.COLLECTION_FACET_GROUP_NAME ], @@ -264,7 +264,7 @@ class LibrarySettings(BaseSettings): skip=True, ), ) - library_description: Optional[str] = FormField( + library_description: str | None = FormField( None, form=LibraryConfFormItem( label="A short description of this library", @@ -273,7 +273,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - help_email: Optional[EmailStr] = FormField( + help_email: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="Patron support email address", @@ -284,7 +284,7 @@ class LibrarySettings(BaseSettings): ), alias="help-email", ) - help_web: Optional[HttpUrl] = FormField( + help_web: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Patron support website", @@ -295,7 +295,7 @@ class LibrarySettings(BaseSettings): ), alias="help-web", ) - copyright_designated_agent_email_address: Optional[EmailStr] = FormField( + copyright_designated_agent_email_address: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="Copyright designated agent email", @@ -307,7 +307,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - configuration_contact_email_address: Optional[EmailStr] = FormField( + configuration_contact_email_address: EmailStr | None = FormField( None, form=LibraryConfFormItem( label="A point of contact for the organization responsible for configuring this library", @@ -388,7 +388,7 @@ class LibrarySettings(BaseSettings): ), alias="web-secondary-color", ) - web_css_file: Optional[HttpUrl] = FormField( + web_css_file: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Custom CSS file for web", @@ -398,7 +398,7 @@ class LibrarySettings(BaseSettings): ), alias="web-css-file", ) - web_header_links: List[str] = FormField( + web_header_links: list[str] = FormField( [], form=LibraryConfFormItem( label="Web header links", @@ -410,7 +410,7 @@ class LibrarySettings(BaseSettings): ), alias="web-header-links", ) - web_header_labels: List[str] = FormField( + web_header_labels: list[str] = FormField( [], form=LibraryConfFormItem( label="Web header labels", @@ -421,7 +421,7 @@ class LibrarySettings(BaseSettings): ), alias="web-header-labels", ) - hidden_content_types: List[str] = FormField( + hidden_content_types: list[str] = FormField( [], form=LibraryConfFormItem( label="Hidden content types", @@ -433,7 +433,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_ONLY, ), ) - max_outstanding_fines: Optional[PositiveFloat] = FormField( + max_outstanding_fines: PositiveFloat | None = FormField( None, form=LibraryConfFormItem( label="Maximum amount in fines a patron can have before losing lending privileges", @@ -441,7 +441,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - loan_limit: Optional[PositiveInt] = FormField( + loan_limit: PositiveInt | None = FormField( None, form=LibraryConfFormItem( label="Maximum number of books a patron can have on loan at once", @@ -452,7 +452,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - hold_limit: Optional[PositiveInt] = FormField( + hold_limit: PositiveInt | None = FormField( None, form=LibraryConfFormItem( label="Maximum number of books a patron can have on hold at once", @@ -463,7 +463,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - terms_of_service: Optional[HttpUrl] = FormField( + terms_of_service: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Terms of service URL", @@ -472,7 +472,7 @@ class LibrarySettings(BaseSettings): ), alias="terms-of-service", ) - privacy_policy: Optional[HttpUrl] = FormField( + privacy_policy: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Privacy policy URL", @@ -481,7 +481,7 @@ class LibrarySettings(BaseSettings): ), alias="privacy-policy", ) - copyright: Optional[HttpUrl] = FormField( + copyright: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Copyright URL", @@ -489,7 +489,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - about: Optional[HttpUrl] = FormField( + about: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="About URL", @@ -497,7 +497,7 @@ class LibrarySettings(BaseSettings): level=Level.ALL_ACCESS, ), ) - license: Optional[HttpUrl] = FormField( + license: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="License URL", @@ -505,7 +505,7 @@ class LibrarySettings(BaseSettings): level=Level.SYS_ADMIN_OR_MANAGER, ), ) - registration_url: Optional[HttpUrl] = FormField( + registration_url: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Patron registration URL", @@ -515,7 +515,7 @@ class LibrarySettings(BaseSettings): ), alias="register", ) - patron_password_reset: Optional[HttpUrl] = FormField( + patron_password_reset: HttpUrl | None = FormField( None, form=LibraryConfFormItem( label="Password Reset Link", @@ -525,7 +525,7 @@ class LibrarySettings(BaseSettings): ), alias="http://librarysimplified.org/terms/rel/patron-password-reset", ) - large_collection_languages: Optional[List[str]] = FormField( + large_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="The primary languages represented in this library's collection", @@ -539,7 +539,7 @@ class LibrarySettings(BaseSettings): ), alias="large_collections", ) - small_collection_languages: Optional[List[str]] = FormField( + small_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="Other major languages represented in this library's collection", @@ -553,7 +553,7 @@ class LibrarySettings(BaseSettings): ), alias="small_collections", ) - tiny_collection_languages: Optional[List[str]] = FormField( + tiny_collection_languages: list[str] | None = FormField( None, form=LibraryConfFormItem( label="Other languages in this library's collection", @@ -570,8 +570,8 @@ class LibrarySettings(BaseSettings): @root_validator def validate_require_help_email_or_website( - cls, values: Dict[str, Any] - ) -> Dict[str, Any]: + cls, values: dict[str, Any] + ) -> dict[str, Any]: if not values.get("help_email") and not values.get("help_web"): help_email_label = cls.get_form_field_label("help_email") help_website_label = cls.get_form_field_label("help_web") @@ -584,7 +584,7 @@ def validate_require_help_email_or_website( return values @root_validator - def validate_header_links(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def validate_header_links(cls, values: dict[str, Any]) -> dict[str, Any]: """Verify that header links and labels are the same length.""" header_links = values.get("web_header_links") header_labels = values.get("web_header_labels") @@ -604,7 +604,7 @@ def validate_web_color_contrast(cls, value: str, field: ModelField) -> str: white test, as well as text color on white backgrounds. """ - def hex_to_rgb(hex: str) -> Tuple[float, ...]: + def hex_to_rgb(hex: str) -> tuple[float, ...]: hex = hex.lstrip("#") return tuple(int(hex[i : i + 2], 16) / 255.0 for i in (0, 2, 4)) @@ -634,8 +634,8 @@ def hex_to_rgb(hex: str) -> Tuple[float, ...]: "tiny_collection_languages", ) def validate_language_codes( - cls, value: Optional[List[str]], field: ModelField - ) -> Optional[List[str]]: + cls, value: list[str] | None, field: ModelField + ) -> list[str] | None: """Verify that collection languages are valid.""" if value is not None: languages = [] diff --git a/core/coverage.py b/core/coverage.py index bdea56bbd1..556d993c38 100644 --- a/core/coverage.py +++ b/core/coverage.py @@ -1,6 +1,5 @@ import logging import traceback -from typing import Optional, Union from sqlalchemy.orm import Load from sqlalchemy.orm.session import Session @@ -151,13 +150,13 @@ class BaseCoverageProvider: # In your subclass, set this to the name of the service, # e.g. "Overdrive Bibliographic Coverage Provider". - SERVICE_NAME: Optional[str] = None + SERVICE_NAME: str | None = None # In your subclass, you _may_ set this to a string that distinguishes # two different CoverageProviders from the same data source. # (You may also override the operation method, if you need # database access to determine which operation to use.) - OPERATION: Optional[str] = None + OPERATION: str | None = None # The database session will be committed each time the # BaseCoverageProvider has (attempted to) provide coverage to this @@ -621,7 +620,7 @@ class IdentifierCoverageProvider(BaseCoverageProvider): # Setting this to None will attempt to give coverage to every single # Identifier in the system, which is probably not what you want. NO_SPECIFIED_TYPES = object() - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = NO_SPECIFIED_TYPES + INPUT_IDENTIFIER_TYPES: None | str | object = NO_SPECIFIED_TYPES # Set this to False if a given Identifier needs to be run through # this CoverageProvider once for every Collection that has this @@ -1107,14 +1106,14 @@ class CollectionCoverageProvider(IdentifierCoverageProvider): # By default, this type of CoverageProvider will provide coverage to # all Identifiers in the given Collection, regardless of their type. - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = None + INPUT_IDENTIFIER_TYPES: None | str | object = None DEFAULT_BATCH_SIZE = 10 # Set this to the name of the protocol managed by this type of # CoverageProvider. If this CoverageProvider can manage collections # for any protocol, leave this as None. - PROTOCOL: Optional[str] = None + PROTOCOL: str | None = None # By default, Works calculated by a CollectionCoverageProvider update # the ExternalSearchIndex. Set this value to True for applications that diff --git a/core/equivalents_coverage.py b/core/equivalents_coverage.py index 0f290a1fb4..0a9d71de38 100644 --- a/core/equivalents_coverage.py +++ b/core/equivalents_coverage.py @@ -1,5 +1,3 @@ -from typing import List, Optional, Set - from sqlalchemy import and_, delete, select from sqlalchemy.orm import Query, joinedload @@ -26,7 +24,7 @@ def __init__( self, _db, batch_size=None, cutoff_time=None, registered_only=False, **kwargs ): # Set of identifiers covered this run of the provider - self._already_covered_identifiers: Set[int] = set() + self._already_covered_identifiers: set[int] = set() super().__init__(_db, batch_size, cutoff_time, registered_only) def run(self): @@ -52,8 +50,8 @@ def items_that_need_coverage(self, identifiers=None, **kwargs) -> Query: return qu def _identifiers_for_coverage( - self, records: List[EquivalencyCoverageRecord] - ) -> Set[Optional[int]]: + self, records: list[EquivalencyCoverageRecord] + ) -> set[int | None]: """Get all identifiers this coverage run should recompute This involves inputs and outputs, and also any parent_identifier that has a direct relation with these identifiers @@ -61,9 +59,9 @@ def _identifiers_for_coverage( equivs = [r.equivalency for r in records] # process both inputs and outputs - identifier_ids_list: List[Optional[int]] = [eq.input_id for eq in equivs] + identifier_ids_list: list[int | None] = [eq.input_id for eq in equivs] identifier_ids_list.extend([eq.output_id for eq in equivs]) - identifier_ids: Set[Optional[int]] = set(identifier_ids_list) + identifier_ids: set[int | None] = set(identifier_ids_list) # Any identifier found, should be recalculated # However we must recalculate any other chain these identifiers were part of also @@ -77,8 +75,8 @@ def _identifiers_for_coverage( return identifier_ids def process_batch( - self, batch: List[EquivalencyCoverageRecord] - ) -> List[EquivalencyCoverageRecord]: + self, batch: list[EquivalencyCoverageRecord] + ) -> list[EquivalencyCoverageRecord]: """Query for and store the chain of equivalent identifiers batch sizes are not exact since we pull the related identifiers into the current batch too, so they would start out larger than intended diff --git a/core/exceptions.py b/core/exceptions.py index 8d7485d7eb..340fe42cb1 100644 --- a/core/exceptions.py +++ b/core/exceptions.py @@ -1,11 +1,8 @@ -from typing import Optional - - class BaseError(Exception): """Base class for all errors""" def __init__( - self, message: Optional[str] = None, inner_exception: Optional[Exception] = None + self, message: str | None = None, inner_exception: Exception | None = None ): """Initializes a new instance of BaseError class @@ -23,7 +20,7 @@ def __hash__(self): return hash(str(self)) @property - def inner_exception(self) -> Optional[str]: + def inner_exception(self) -> str | None: """Returns an inner exception :return: Inner exception diff --git a/core/external_search.py b/core/external_search.py index 493bf3ecd4..c3fe8af237 100644 --- a/core/external_search.py +++ b/core/external_search.py @@ -7,7 +7,8 @@ import re import time from collections import defaultdict -from typing import Any, Callable, Dict, Iterable, List, Optional, Union +from collections.abc import Callable, Iterable +from typing import Any from attr import define from flask_babel import lazy_gettext as _ @@ -128,7 +129,7 @@ class ExternalSearchIndex(HasSelfTests): SITEWIDE = True @classmethod - def search_integration(cls, _db) -> Optional[ExternalIntegration]: + def search_integration(cls, _db) -> ExternalIntegration | None: """Look up the ExternalIntegration for Opensearch.""" return ExternalIntegration.lookup( _db, ExternalIntegration.OPENSEARCH, goal=ExternalIntegration.SEARCH_GOAL @@ -154,11 +155,11 @@ def load(cls, _db, *args, **kwargs): def __init__( self, _db, - url: Optional[str] = None, - test_search_term: Optional[str] = None, - revision_directory: Optional[SearchRevisionDirectory] = None, - version: Optional[int] = None, - custom_client_service: Optional[SearchService] = None, + url: str | None = None, + test_search_term: str | None = None, + revision_directory: SearchRevisionDirectory | None = None, + version: int | None = None, + custom_client_service: SearchService | None = None, ): """Constructor @@ -226,7 +227,7 @@ def search_service(self) -> SearchService: """Get the underlying search service.""" return self._search_service - def start_migration(self) -> Optional[SearchMigrationInProgress]: + def start_migration(self) -> SearchMigrationInProgress | None: """Update to the latest schema, indexing the given works.""" migrator = SearchMigrator( revisions=self._revision_directory, @@ -1279,7 +1280,7 @@ class Operators(Values): _BOOL_TYPE = {"type": "bool"} # The fields mappings in the search DB - FIELD_MAPPING: Dict[str, Dict] = { + FIELD_MAPPING: dict[str, dict] = { "audience": dict(), "author": _KEYWORD_ONLY, "classifications.scheme": _KEYWORD_ONLY, @@ -1388,7 +1389,7 @@ def language(value: str) -> str: "language": ValueTransforms.language, } - def __init__(self, query: Union[str, Dict], filter=None): + def __init__(self, query: str | dict, filter=None): if type(query) is str: try: query = json.loads(query) @@ -1411,10 +1412,10 @@ def search_query(self): def _is_keyword(self, name: str) -> bool: return self.FIELD_MAPPING[name].get("keyword") == True - def _nested_path(self, name: str) -> Union[str, None]: + def _nested_path(self, name: str) -> str | None: return self.FIELD_MAPPING[name].get("path") - def _parse_json_query(self, query: Dict): + def _parse_json_query(self, query: dict): """Eventually recursive json query parser""" es_query = None @@ -1438,7 +1439,7 @@ def _parse_json_query(self, query: Dict): return es_query - def _parse_json_leaf(self, query: Dict) -> Dict: + def _parse_json_leaf(self, query: dict) -> dict: """We have a leaf query, which means this becomes a keyword.term query""" op = query.get(self.QueryLeaf.OP, self.Operators.EQ) @@ -1514,7 +1515,7 @@ def _match_or_term_query(): return es_query - def _parse_json_join(self, query: Dict) -> Dict: + def _parse_json_join(self, query: dict) -> dict: if len(query.keys()) != 1: raise QueryParseException( detail="A conjuction cannot have multiple parts in the same sub-query" @@ -2765,9 +2766,9 @@ def __init__(self, *args, **kwargs): # we're already at the latest version, then simply upload search documents instead. # self.receiver = None - self.migration: Optional[ + self.migration: None | ( SearchMigrationInProgress - ] = self.search_index_client.start_migration() + ) = self.search_index_client.start_migration() if self.migration is None: self.receiver: SearchDocumentReceiver = ( self.search_index_client.start_updating_search_documents() @@ -2788,19 +2789,19 @@ def run_once_and_update_timestamp(self): self.on_completely_finished() return result - def process_batch(self, works) -> List[Work | CoverageFailure]: + def process_batch(self, works) -> list[Work | CoverageFailure]: target: SearchDocumentReceiverType = self.migration or self.receiver failures = target.add_documents( documents=self.search_index_client.create_search_documents_from_works(works) ) # Maintain a dictionary of works so that we can efficiently remove failed works later. - work_map: Dict[int, Work] = {} + work_map: dict[int, Work] = {} for work in works: work_map[work.id] = work # Remove all the works that failed and create failure records for them. - results: List[Work | CoverageFailure] = [] + results: list[Work | CoverageFailure] = [] for failure in failures: work = work_map[failure.id] del work_map[failure.id] diff --git a/core/feed/acquisition.py b/core/feed/acquisition.py index 5fed809b4c..476b9566c9 100644 --- a/core/feed/acquisition.py +++ b/core/feed/acquisition.py @@ -2,17 +2,8 @@ from __future__ import annotations import logging -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Type, -) +from collections.abc import Callable, Generator +from typing import TYPE_CHECKING, Any from sqlalchemy.orm import Query, Session @@ -55,11 +46,11 @@ def __init__( self, title: str, url: str, - works: List[Work], + works: list[Work], annotator: CirculationManagerAnnotator, - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, - precomposed_entries: Optional[List[OPDSMessage]] = None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, + precomposed_entries: list[OPDSMessage] | None = None, ) -> None: self.annotator = annotator self._facets = facets @@ -80,7 +71,7 @@ def generate_feed(self, annotate: bool = True) -> None: if annotate: self.annotator.annotate_feed(self._feed) - def add_pagination_links(self, works: List[Work], lane: WorkList) -> None: + def add_pagination_links(self, works: list[Work], lane: WorkList) -> None: """Add pagination links to the feed""" if not self._pagination: return None @@ -118,7 +109,7 @@ def add_facet_links(self, lane: WorkList) -> None: if entrypoints: # A paginated feed may have multiple entry points into the # same dataset. - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return self.annotator.feed_url( lane, facets=facets.navigate(entrypoint=ep) ) @@ -196,7 +187,7 @@ def as_error_response(self, **kwargs: Any) -> OPDSFeedResponse: def _create_entry( cls, work: Work, - active_licensepool: Optional[LicensePool], + active_licensepool: LicensePool | None, edition: Edition, identifier: Identifier, annotator: Annotator, @@ -215,9 +206,9 @@ def _create_entry( def add_entrypoint_links( cls, feed: FeedData, - url_generator: Callable[[Type[EntryPoint]], str], - entrypoints: List[Type[EntryPoint]], - selected_entrypoint: Optional[Type[EntryPoint]], + url_generator: Callable[[type[EntryPoint]], str], + entrypoints: list[type[EntryPoint]], + selected_entrypoint: type[EntryPoint] | None, group_name: str = "Formats", ) -> None: """Add links to a feed forming an OPDS facet group for a set of @@ -248,12 +239,12 @@ def add_entrypoint_links( @classmethod def _entrypoint_link( cls, - url_generator: Callable[[Type[EntryPoint]], str], - entrypoint: Type[EntryPoint], - selected_entrypoint: Optional[Type[EntryPoint]], + url_generator: Callable[[type[EntryPoint]], str], + entrypoint: type[EntryPoint], + selected_entrypoint: type[EntryPoint] | None, is_default: bool, group_name: str, - ) -> Optional[Link]: + ) -> Link | None: """Create arguments for add_link_to_feed for a link that navigates between EntryPoints. """ @@ -276,7 +267,7 @@ def _entrypoint_link( return link def add_breadcrumb_links( - self, lane: WorkList, entrypoint: Optional[Type[EntryPoint]] = None + self, lane: WorkList, entrypoint: type[EntryPoint] | None = None ) -> None: """Add information necessary to find your current place in the site's navigation. @@ -319,7 +310,7 @@ def add_breadcrumbs( self, lane: WorkList, include_lane: bool = False, - entrypoint: Optional[Type[EntryPoint]] = None, + entrypoint: type[EntryPoint] | None = None, ) -> None: """Add list of ancestor links in a breadcrumbs element. @@ -405,7 +396,7 @@ def add_breadcrumbs( # Append the breadcrumbs to the feed. self._feed.breadcrumbs = breadcrumbs - def show_current_entrypoint(self, entrypoint: Optional[Type[EntryPoint]]) -> None: + def show_current_entrypoint(self, entrypoint: type[EntryPoint] | None) -> None: """Annotate this given feed with a simplified:entryPoint attribute pointing to the current entrypoint's TYPE_URI. @@ -442,9 +433,9 @@ def page( url: str, worklist: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[FacetsWithEntryPoint], - pagination: Optional[Pagination], - search_engine: Optional[ExternalSearchIndex], + facets: FacetsWithEntryPoint | None, + pagination: Pagination | None, + search_engine: ExternalSearchIndex | None, ) -> OPDSAcquisitionFeed: works = worklist.works( _db, facets=facets, pagination=pagination, search_engine=search_engine @@ -470,9 +461,9 @@ def page( @classmethod def active_loans_for( cls, - circulation: Optional[CirculationAPI], + circulation: CirculationAPI | None, patron: Patron, - annotator: Optional[LibraryAnnotator] = None, + annotator: LibraryAnnotator | None = None, **response_kwargs: Any, ) -> OPDSAcquisitionFeed: """A patron specific feed that only contains the loans and holds of a patron""" @@ -484,7 +475,7 @@ def active_loans_for( active_loans_by_work[work] = loan # There might be multiple holds for the same work so we gather all of them and choose the best one. - all_holds_by_work: Dict[Work, List[Hold]] = {} + all_holds_by_work: dict[Work, list[Hold]] = {} for hold in patron.holds: work = hold.work if not work: @@ -495,7 +486,7 @@ def active_loans_for( all_holds_by_work[work].append(hold) - active_holds_by_work: Dict[Work, Hold] = {} + active_holds_by_work: dict[Work, Hold] = {} for work, list_of_holds in all_holds_by_work.items(): active_holds_by_work[ work @@ -599,7 +590,7 @@ def single_entry( work: Work | Edition | None, annotator: Annotator, even_if_no_license_pool: bool = False, - ) -> Optional[WorkEntry | OPDSMessage]: + ) -> WorkEntry | OPDSMessage | None: """Turn a work into an annotated work entry for an acquisition feed.""" identifier = None _work: Work @@ -669,9 +660,9 @@ def groups( url: str, worklist: WorkList, annotator: LibraryAnnotator, - pagination: Optional[Pagination] = None, - facets: Optional[FacetsWithEntryPoint] = None, - search_engine: Optional[ExternalSearchIndex] = None, + pagination: Pagination | None = None, + facets: FacetsWithEntryPoint | None = None, + search_engine: ExternalSearchIndex | None = None, search_debug: bool = False, ) -> OPDSAcquisitionFeed: """Internal method called by groups() when a grouped feed @@ -737,7 +728,7 @@ def groups( entrypoints = facets.selectable_entrypoints(worklist) if entrypoints: - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return annotator.groups_url( worklist, facets=facets.navigate(entrypoint=ep) ) @@ -761,8 +752,8 @@ def search( search_engine: ExternalSearchIndex, query: str, annotator: LibraryAnnotator, - pagination: Optional[Pagination] = None, - facets: Optional[FacetsWithEntryPoint] = None, + pagination: Pagination | None = None, + facets: FacetsWithEntryPoint | None = None, **response_kwargs: Any, ) -> OPDSAcquisitionFeed | ProblemDetail: """Run a search against the given search engine and return @@ -803,7 +794,7 @@ def search( entrypoints = facets.selectable_entrypoints(lane) if entrypoints: - def make_link(ep: Type[EntryPoint]) -> str: + def make_link(ep: type[EntryPoint]) -> str: return annotator.search_url( lane, query, pagination=None, facets=facets.navigate(entrypoint=ep) ) @@ -893,11 +884,11 @@ class LookupAcquisitionFeed(OPDSAcquisitionFeed): """ @classmethod - def single_entry(cls, work: Tuple[Identifier, Work], annotator: Annotator) -> WorkEntry | OPDSMessage: # type: ignore[override] + def single_entry(cls, work: tuple[Identifier, Work], annotator: Annotator) -> WorkEntry | OPDSMessage: # type: ignore[override] # This comes in as a tuple, which deviates from the typical behaviour identifier, _work = work - active_licensepool: Optional[LicensePool] + active_licensepool: LicensePool | None if identifier.licensed_through: active_licensepool = identifier.licensed_through[0] else: diff --git a/core/feed/admin.py b/core/feed/admin.py index a4536fa18e..28730d2c46 100644 --- a/core/feed/admin.py +++ b/core/feed/admin.py @@ -1,5 +1,3 @@ -from typing import Optional - from sqlalchemy import and_ from sqlalchemy.orm import Session from typing_extensions import Self @@ -18,7 +16,7 @@ def suppressed( title: str, url: str, annotator: AdminAnnotator, - pagination: Optional[Pagination] = None, + pagination: Pagination | None = None, ) -> Self: _pagination = pagination or Pagination.default() diff --git a/core/feed/annotator/admin.py b/core/feed/annotator/admin.py index 27da250676..5f017cf016 100644 --- a/core/feed/annotator/admin.py +++ b/core/feed/annotator/admin.py @@ -1,5 +1,4 @@ from datetime import datetime -from typing import Optional from api.circulation import CirculationAPI from core.feed.annotator.circulation import LibraryAnnotator @@ -11,11 +10,11 @@ class AdminAnnotator(LibraryAnnotator): - def __init__(self, circulation: Optional[CirculationAPI], library: Library) -> None: + def __init__(self, circulation: CirculationAPI | None, library: Library) -> None: super().__init__(circulation, None, library) def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry) if not entry.computed: diff --git a/core/feed/annotator/base.py b/core/feed/annotator/base.py index 8c7577e8ab..ee8de57f08 100644 --- a/core/feed/annotator/base.py +++ b/core/feed/annotator/base.py @@ -4,7 +4,7 @@ import logging from collections import defaultdict from decimal import Decimal -from typing import Any, Dict, List, Optional, Set, Tuple +from typing import Any from urllib.parse import quote from sqlalchemy.orm import Session, joinedload @@ -32,7 +32,7 @@ class ToFeedEntry: @classmethod - def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + def authors(cls, edition: Edition) -> dict[str, list[Author]]: """Create one or more author (and contributor) objects for the given Work. @@ -41,8 +41,8 @@ def authors(cls, edition: Edition) -> Dict[str, List[Author]]: Contributions. :return: A dict with "authors" and "contributors" as a list of Author objects """ - authors: Dict[str, List[Author]] = {"authors": [], "contributors": []} - state: Dict[Optional[str], Set[str]] = defaultdict(set) + authors: dict[str, list[Author]] = {"authors": [], "contributors": []} + state: dict[str | None, set[str]] = defaultdict(set) for contribution in edition.contributions: info = cls.contributor(contribution, state) if info is None: @@ -63,8 +63,8 @@ def authors(cls, edition: Edition) -> Dict[str, List[Author]]: @classmethod def contributor( - cls, contribution: Contribution, state: Dict[Optional[str], Set[str]] - ) -> Optional[Tuple[str, Author]]: + cls, contribution: Contribution, state: dict[str | None, set[str]] + ) -> tuple[str, Author] | None: """Build an author (or contributor) object for a Contribution. :param contribution: A Contribution. @@ -101,7 +101,7 @@ def contributor( return None # Okay, we're creating a tag. - properties: Dict[str, Any] = dict() + properties: dict[str, Any] = dict() if marc_role: properties["role"] = marc_role entry = Author(name=name, **properties) @@ -113,8 +113,8 @@ def contributor( @classmethod def series( - cls, series_name: Optional[str], series_position: Optional[int] | Optional[str] - ) -> Optional[FeedEntryType]: + cls, series_name: str | None, series_position: int | None | str | None + ) -> FeedEntryType | None: """Generate a FeedEntryType object for the given name and position.""" if not series_name: return None @@ -126,7 +126,7 @@ def series( return series @classmethod - def rating(cls, type_uri: Optional[str], value: float | Decimal) -> FeedEntryType: + def rating(cls, type_uri: str | None, value: float | Decimal) -> FeedEntryType: """Generate a FeedEntryType object for the given type and value.""" entry = FeedEntryType.create( **dict(ratingValue="%.4f" % value, additionalType=type_uri) @@ -134,7 +134,7 @@ def rating(cls, type_uri: Optional[str], value: float | Decimal) -> FeedEntryTyp return entry @classmethod - def samples(cls, edition: Optional[Edition]) -> list[Hyperlink]: + def samples(cls, edition: Edition | None) -> list[Hyperlink]: if not edition: return [] _db = Session.object_session(edition) @@ -150,7 +150,7 @@ def samples(cls, edition: Optional[Edition]) -> list[Hyperlink]: return links @classmethod - def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: + def categories(cls, work: Work) -> dict[str, list[dict[str, str]]]: """Return all relevant classifications of this work. :return: A dictionary mapping 'scheme' URLs to dictionaries of @@ -187,7 +187,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: # Add the appeals as a category of schema # http://librarysimplified.org/terms/appeal schema_url = AtomFeed.SIMPLIFIED_NS + "appeals/" - appeals: List[Dict[str, Any]] = [] + appeals: list[dict[str, Any]] = [] categories[schema_url] = appeals for name, value in ( (Work.CHARACTER_APPEAL, work.appeal_character), @@ -196,7 +196,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: (Work.STORY_APPEAL, work.appeal_story), ): if value: - appeal: Dict[str, Any] = dict(term=schema_url + name, label=name) + appeal: dict[str, Any] = dict(term=schema_url + name, label=name) weight_field = "ratingValue" appeal[weight_field] = value appeals.append(appeal) @@ -222,7 +222,7 @@ def categories(cls, work: Work) -> Dict[str, List[Dict[str, str]]]: return categories @classmethod - def content(cls, work: Optional[Work]) -> str: + def content(cls, work: Work | None) -> str: """Return an HTML summary of this work.""" summary = "" if work: @@ -243,7 +243,7 @@ def content(cls, work: Optional[Work]) -> str: class Annotator(ToFeedEntry): def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + self, entry: WorkEntry, updated: datetime.datetime | None = None ) -> None: """ Any data that the serializer must consider while generating an "entry" diff --git a/core/feed/annotator/circulation.py b/core/feed/annotator/circulation.py index e488b01c43..1e870d3223 100644 --- a/core/feed/annotator/circulation.py +++ b/core/feed/annotator/circulation.py @@ -7,7 +7,7 @@ import urllib.parse import urllib.request from collections import defaultdict -from typing import Any, Dict, List, Optional, Tuple +from typing import Any from dependency_injector.wiring import Provide, inject from flask import url_for @@ -61,10 +61,10 @@ class AcquisitionHelper: @classmethod def license_tags( cls, - license_pool: Optional[LicensePool], - loan: Optional[Loan], - hold: Optional[Hold], - ) -> Optional[Dict[str, Any]]: + license_pool: LicensePool | None, + loan: Loan | None, + hold: Hold | None, + ) -> dict[str, Any] | None: acquisition = {} # Generate a list of licensing tags. These should be inserted # into a tag. @@ -154,7 +154,7 @@ def license_tags( return acquisition @classmethod - def format_types(cls, delivery_mechanism: DeliveryMechanism) -> List[str]: + def format_types(cls, delivery_mechanism: DeliveryMechanism) -> list[str]: """Generate a set of types suitable for passing into acquisition_link(). """ @@ -184,11 +184,11 @@ class CirculationManagerAnnotator(Annotator): @inject def __init__( self, - lane: Optional[WorkList], - active_loans_by_work: Optional[Dict[Work, Loan]] = None, - active_holds_by_work: Optional[Dict[Work, Hold]] = None, - active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, - hidden_content_types: Optional[List[str]] = None, + lane: WorkList | None, + active_loans_by_work: dict[Work, Loan] | None = None, + active_holds_by_work: dict[Work, Hold] | None = None, + active_fulfillments_by_work: dict[Work, Any] | None = None, + hidden_content_types: list[str] | None = None, analytics: Analytics = Provide[Services.analytics.analytics], ) -> None: if lane: @@ -224,7 +224,7 @@ def is_work_entry_solo(self, work: Work) -> bool: ) ) - def _lane_identifier(self, lane: Optional[WorkList]) -> Optional[int]: + def _lane_identifier(self, lane: WorkList | None) -> int | None: if isinstance(lane, Lane): return lane.id return None @@ -246,11 +246,11 @@ def facet_url(self, facets: Facets) -> str: def feed_url( self, - lane: Optional[WorkList], - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, + lane: WorkList | None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, default_route: str = "feed", - extra_kwargs: Optional[Dict[str, Any]] = None, + extra_kwargs: dict[str, Any] | None = None, ) -> str: if isinstance(lane, WorkList) and hasattr(lane, "url_arguments"): route, kwargs = lane.url_arguments @@ -275,8 +275,8 @@ def navigation_url(self, lane: Lane) -> str: ) def active_licensepool_for( - self, work: Work, library: Optional[Library] = None - ) -> Optional[LicensePool]: + self, work: Work, library: Library | None = None + ) -> LicensePool | None: loan = self.active_loans_by_work.get(work) or self.active_holds_by_work.get( work ) @@ -304,7 +304,7 @@ def _prioritized_formats_for_pool( config.settings_dict.get(FormatPriorities.PRIORITIZED_DRM_SCHEMES_KEY) or [] ) - content_setting: List[str] = ( + content_setting: list[str] = ( config.settings_dict.get(FormatPriorities.PRIORITIZED_CONTENT_TYPES_KEY) or [] ) @@ -351,7 +351,7 @@ def visible_delivery_mechanisms( def annotate_work_entry( self, entry: WorkEntry, - updated: Optional[datetime.datetime] = None, + updated: datetime.datetime | None = None, ) -> None: work = entry.work identifier = entry.identifier or work.presentation_edition.primary_identifier @@ -394,19 +394,18 @@ def annotate_work_entry( def acquisition_links( self, - active_license_pool: Optional[LicensePool], - active_loan: Optional[Loan], - active_hold: Optional[Hold], - active_fulfillment: Optional[Any], + active_license_pool: LicensePool | None, + active_loan: Loan | None, + active_hold: Hold | None, + active_fulfillment: Any | None, identifier: Identifier, can_hold: bool = True, can_revoke_hold: bool = True, set_mechanism_at_borrow: bool = False, - direct_fulfillment_delivery_mechanisms: Optional[ - List[LicensePoolDeliveryMechanism] - ] = None, + direct_fulfillment_delivery_mechanisms: None + | (list[LicensePoolDeliveryMechanism]) = None, add_open_access_links: bool = True, - ) -> List[Acquisition]: + ) -> list[Acquisition]: """Generate a number of tags that enumerate all acquisition methods. @@ -488,7 +487,7 @@ def acquisition_links( link.add_attributes(license_tags) # Add links for fulfilling an active loan. - fulfill_links: List[Optional[Acquisition]] = [] + fulfill_links: list[Acquisition | None] = [] if can_fulfill: if active_fulfillment: # We're making an entry for a specific fulfill link. @@ -532,7 +531,7 @@ def acquisition_links( ) ) - open_access_links: List[Optional[Acquisition]] = [] + open_access_links: list[Acquisition | None] = [] if ( active_license_pool is not None and direct_fulfillment_delivery_mechanisms is not None @@ -580,33 +579,33 @@ def acquisition_links( def revoke_link( self, active_license_pool: LicensePool, - active_loan: Optional[Loan], - active_hold: Optional[Hold], - ) -> Optional[Acquisition]: + active_loan: Loan | None, + active_hold: Hold | None, + ) -> Acquisition | None: return None def borrow_link( self, active_license_pool: LicensePool, - borrow_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], - active_hold: Optional[Hold] = None, - ) -> Optional[Acquisition]: + borrow_mechanism: LicensePoolDeliveryMechanism | None, + fulfillment_mechanisms: list[LicensePoolDeliveryMechanism], + active_hold: Hold | None = None, + ) -> Acquisition | None: return None def fulfill_link( self, license_pool: LicensePool, - active_loan: Optional[Loan], + active_loan: Loan | None, delivery_mechanism: DeliveryMechanism, rel: str = OPDSFeed.ACQUISITION_REL, - ) -> Optional[Acquisition]: + ) -> Acquisition | None: return None def open_access_link( self, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism ) -> Acquisition: - kw: Dict[str, Any] = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") + kw: dict[str, Any] = dict(rel=OPDSFeed.OPEN_ACCESS_REL, type="") # Start off assuming that the URL associated with the # LicensePoolDeliveryMechanism's Resource is the URL we should @@ -626,8 +625,8 @@ def open_access_link( return link def rights_attributes( - self, lpdm: Optional[LicensePoolDeliveryMechanism] - ) -> Dict[str, str]: + self, lpdm: LicensePoolDeliveryMechanism | None + ) -> dict[str, str]: """Create a dictionary of tag attributes that explain the rights status of a LicensePoolDeliveryMechanism. @@ -643,8 +642,8 @@ def acquisition_link( cls, rel: str, href: str, - types: Optional[List[str]], - active_loan: Optional[Loan] = None, + types: list[str] | None, + active_loan: Loan | None = None, ) -> Acquisition: if types: initial_type = types[0] @@ -666,10 +665,10 @@ def acquisition_link( @classmethod def indirect_acquisition( - cls, indirect_types: List[str] - ) -> Optional[IndirectAcquisition]: - top_level_parent: Optional[IndirectAcquisition] = None - parent: Optional[IndirectAcquisition] = None + cls, indirect_types: list[str] + ) -> IndirectAcquisition | None: + top_level_parent: IndirectAcquisition | None = None + parent: IndirectAcquisition | None = None for t in indirect_types: indirect_link = IndirectAcquisition(type=t) if parent is not None: @@ -683,17 +682,17 @@ def indirect_acquisition( class LibraryAnnotator(CirculationManagerAnnotator): def __init__( self, - circulation: Optional[CirculationAPI], - lane: Optional[WorkList], + circulation: CirculationAPI | None, + lane: WorkList | None, library: Library, - patron: Optional[Patron] = None, - active_loans_by_work: Optional[Dict[Work, Loan]] = None, - active_holds_by_work: Optional[Dict[Work, Hold]] = None, - active_fulfillments_by_work: Optional[Dict[Work, Any]] = None, + patron: Patron | None = None, + active_loans_by_work: dict[Work, Loan] | None = None, + active_holds_by_work: dict[Work, Hold] | None = None, + active_fulfillments_by_work: dict[Work, Any] | None = None, facet_view: str = "feed", top_level_title: str = "All Books", library_identifies_patrons: bool = True, - facets: Optional[FacetsWithEntryPoint] = None, + facets: FacetsWithEntryPoint | None = None, ) -> None: """Constructor. @@ -719,9 +718,9 @@ def __init__( self.circulation = circulation self.library: Library = library self.patron = patron - self.lanes_by_work: Dict[Work, List[Any]] = defaultdict(list) + self.lanes_by_work: dict[Work, list[Any]] = defaultdict(list) self.facet_view = facet_view - self._adobe_id_cache: Dict[str, Any] = {} + self._adobe_id_cache: dict[str, Any] = {} self._top_level_title = top_level_title self.identifies_patrons = library_identifies_patrons self.facets = facets or None @@ -729,7 +728,7 @@ def __init__( def top_level_title(self) -> str: return self._top_level_title - def permalink_for(self, identifier: Identifier) -> Tuple[str, str]: + def permalink_for(self, identifier: Identifier) -> tuple[str, str]: # TODO: Do not force OPDS types url = self.url_for( "permalink", @@ -741,7 +740,7 @@ def permalink_for(self, identifier: Identifier) -> Tuple[str, str]: return url, OPDSFeed.ENTRY_TYPE def groups_url( - self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + self, lane: WorkList | None, facets: FacetsWithEntryPoint | None = None ) -> str: lane_identifier = self._lane_identifier(lane) if facets: @@ -757,14 +756,14 @@ def groups_url( **kwargs, ) - def default_lane_url(self, facets: Optional[FacetsWithEntryPoint] = None) -> str: + def default_lane_url(self, facets: FacetsWithEntryPoint | None = None) -> str: return self.groups_url(None, facets=facets) def feed_url( # type: ignore [override] self, - lane: Optional[WorkList], - facets: Optional[FacetsWithEntryPoint] = None, - pagination: Optional[Pagination] = None, + lane: WorkList | None, + facets: FacetsWithEntryPoint | None = None, + pagination: Pagination | None = None, default_route: str = "feed", ) -> str: extra_kwargs = dict() @@ -774,10 +773,10 @@ def feed_url( # type: ignore [override] def search_url( self, - lane: Optional[WorkList], + lane: WorkList | None, query: str, - pagination: Optional[Pagination], - facets: Optional[FacetsWithEntryPoint] = None, + pagination: Pagination | None, + facets: FacetsWithEntryPoint | None = None, ) -> str: lane_identifier = self._lane_identifier(lane) kwargs = dict(q=query) @@ -794,8 +793,8 @@ def search_url( ) def group_uri( - self, work: Work, license_pool: Optional[LicensePool], identifier: Identifier - ) -> Tuple[Optional[str], str]: + self, work: Work, license_pool: LicensePool | None, identifier: Identifier + ) -> tuple[str | None, str]: if not work in self.lanes_by_work: return None, "" @@ -834,7 +833,7 @@ def group_uri( return self.lane_url(lane, self.facets), title def lane_url( - self, lane: Optional[WorkList], facets: Optional[FacetsWithEntryPoint] = None + self, lane: WorkList | None, facets: FacetsWithEntryPoint | None = None ) -> str: # If the lane has sublanes, the URL identifying the group will # take the user to another set of groups for the @@ -853,7 +852,7 @@ def lane_url( return url def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime.datetime] = None + self, entry: WorkEntry, updated: datetime.datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) @@ -965,7 +964,7 @@ def related_books_available(cls, work: Work, library: Library) -> bool: def language_and_audience_key_from_work( self, work: Work - ) -> Tuple[Optional[str], Optional[str]]: + ) -> tuple[str | None, str | None]: language_key = work.language audiences = None @@ -1150,7 +1149,7 @@ def annotate_feed(self, feed: FeedData) -> None: def add_configuration_links(self, feed: FeedData) -> None: _db = Session.object_session(self.library) - def _add_link(l: Dict[str, Any]) -> None: + def _add_link(l: dict[str, Any]) -> None: feed.add_link(**l) library = self.library @@ -1219,16 +1218,15 @@ def _add_link(l: Dict[str, Any]) -> None: def acquisition_links( # type: ignore [override] self, - active_license_pool: Optional[LicensePool], - active_loan: Optional[Loan], - active_hold: Optional[Hold], - active_fulfillment: Optional[Any], + active_license_pool: LicensePool | None, + active_loan: Loan | None, + active_hold: Hold | None, + active_fulfillment: Any | None, identifier: Identifier, - direct_fulfillment_delivery_mechanisms: Optional[ - List[LicensePoolDeliveryMechanism] - ] = None, - mock_api: Optional[Any] = None, - ) -> List[Acquisition]: + direct_fulfillment_delivery_mechanisms: None + | (list[LicensePoolDeliveryMechanism]) = None, + mock_api: Any | None = None, + ) -> list[Acquisition]: """Generate one or more tags that can be used to borrow, reserve, or fulfill a book, depending on the state of the book and the current patron. @@ -1305,9 +1303,9 @@ def acquisition_links( # type: ignore [override] def revoke_link( self, active_license_pool: LicensePool, - active_loan: Optional[Loan], - active_hold: Optional[Hold], - ) -> Optional[Acquisition]: + active_loan: Loan | None, + active_hold: Hold | None, + ) -> Acquisition | None: if not self.identifies_patrons: return None url = self.url_for( @@ -1316,17 +1314,17 @@ def revoke_link( library_short_name=self.library.short_name, _external=True, ) - kw: Dict[str, Any] = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) + kw: dict[str, Any] = dict(href=url, rel=OPDSFeed.REVOKE_LOAN_REL) revoke_link_tag = Acquisition(**kw) return revoke_link_tag def borrow_link( self, active_license_pool: LicensePool, - borrow_mechanism: Optional[LicensePoolDeliveryMechanism], - fulfillment_mechanisms: List[LicensePoolDeliveryMechanism], - active_hold: Optional[Hold] = None, - ) -> Optional[Acquisition]: + borrow_mechanism: LicensePoolDeliveryMechanism | None, + fulfillment_mechanisms: list[LicensePoolDeliveryMechanism], + active_hold: Hold | None = None, + ) -> Acquisition | None: if not self.identifies_patrons: return None identifier = active_license_pool.identifier @@ -1354,7 +1352,7 @@ def borrow_link( is_hold=True if active_hold else False, ) - indirect_acquisitions: List[IndirectAcquisition] = [] + indirect_acquisitions: list[IndirectAcquisition] = [] for lpdm in fulfillment_mechanisms: # We have information about one or more delivery # mechanisms that will be available at the point of @@ -1384,10 +1382,10 @@ def borrow_link( def fulfill_link( self, license_pool: LicensePool, - active_loan: Optional[Loan], + active_loan: Loan | None, delivery_mechanism: DeliveryMechanism, rel: str = OPDSFeed.ACQUISITION_REL, - ) -> Optional[Acquisition]: + ) -> Acquisition | None: """Create a new fulfillment link. This link may include tags from the OPDS Extensions for DRM. @@ -1442,9 +1440,9 @@ def open_access_link( def drm_extension_tags( self, license_pool: LicensePool, - active_loan: Optional[Loan], - delivery_mechanism: Optional[DeliveryMechanism], - ) -> Dict[str, Any]: + active_loan: Loan | None, + delivery_mechanism: DeliveryMechanism | None, + ) -> dict[str, Any]: """Construct OPDS Extensions for DRM tags that explain how to register a device with the DRM server that manages this loan. :param delivery_mechanism: A DeliveryMechanism @@ -1471,7 +1469,7 @@ def drm_extension_tags( def adobe_id_tags( self, patron_identifier: str | Patron - ) -> Dict[str, FeedEntryType]: + ) -> dict[str, FeedEntryType]: """Construct tags using the DRM Extensions for OPDS standard that explain how to get an Adobe ID for this patron, and how to manage their list of device IDs. @@ -1515,7 +1513,7 @@ def adobe_id_tags( cached = copy.deepcopy(cached) return cached - def lcp_key_retrieval_tags(self, active_loan: Loan) -> Dict[str, FeedEntryType]: + def lcp_key_retrieval_tags(self, active_loan: Loan) -> dict[str, FeedEntryType]: # In the case of LCP we have to include a patron's hashed passphrase # inside the acquisition link so client applications can use it to open the LCP license # without having to ask the user to enter their password diff --git a/core/feed/annotator/loan_and_hold.py b/core/feed/annotator/loan_and_hold.py index 084aa8238f..837b0a5267 100644 --- a/core/feed/annotator/loan_and_hold.py +++ b/core/feed/annotator/loan_and_hold.py @@ -1,6 +1,6 @@ import copy from datetime import datetime -from typing import Any, Dict, List, Optional +from typing import Any from core.feed.annotator.circulation import LibraryAnnotator from core.feed.types import FeedData, Link, WorkEntry @@ -10,7 +10,7 @@ class LibraryLoanAndHoldAnnotator(LibraryAnnotator): @staticmethod - def choose_best_hold_for_work(list_of_holds: List[Hold]) -> Hold: + def choose_best_hold_for_work(list_of_holds: list[Hold]) -> Hold: # We don't want holds that are connected to license pools without any licenses owned. Also, we want hold that # would result in the least wait time for the patron. @@ -44,7 +44,7 @@ def choose_best_hold_for_work(list_of_holds: List[Hold]) -> Hold: return best - def drm_device_registration_feed_tags(self, patron: Patron) -> Dict[str, Any]: + def drm_device_registration_feed_tags(self, patron: Patron) -> dict[str, Any]: """Return tags that provide information on DRM device deregistration independent of any particular loan. These tags will go under the tag. @@ -89,7 +89,7 @@ def annotate_feed(self, feed: FeedData) -> None: feed.metadata.drm_licensor = tags["drm_licensor"] def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) if not entry.computed: diff --git a/core/feed/annotator/verbose.py b/core/feed/annotator/verbose.py index eabcb870a2..45be52f715 100644 --- a/core/feed/annotator/verbose.py +++ b/core/feed/annotator/verbose.py @@ -1,6 +1,5 @@ from collections import defaultdict from datetime import datetime -from typing import Dict, List, Optional from sqlalchemy.orm import Session @@ -23,7 +22,7 @@ class VerboseAnnotator(Annotator): """ def annotate_work_entry( - self, entry: WorkEntry, updated: Optional[datetime] = None + self, entry: WorkEntry, updated: datetime | None = None ) -> None: super().annotate_work_entry(entry, updated=updated) self.add_ratings(entry) @@ -42,8 +41,8 @@ def add_ratings(cls, entry: WorkEntry) -> None: @classmethod def categories( - cls, work: Work, policy: Optional[PresentationCalculationPolicy] = None - ) -> Dict[str, List[Dict[str, str]]]: + cls, work: Work, policy: PresentationCalculationPolicy | None = None + ) -> dict[str, list[dict[str, str]]]: """Send out _all_ categories for the work. (So long as the category type has a URI associated with it in @@ -85,7 +84,7 @@ def categories( return by_scheme @classmethod - def authors(cls, edition: Edition) -> Dict[str, List[Author]]: + def authors(cls, edition: Edition) -> dict[str, list[Author]]: """Create a detailed tag for each author.""" return { "authors": [ diff --git a/core/feed/navigation.py b/core/feed/navigation.py index ae2111a9e8..0b1da8865a 100644 --- a/core/feed/navigation.py +++ b/core/feed/navigation.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import Any, Optional +from typing import Any from sqlalchemy.orm import Session from typing_extensions import Self @@ -23,8 +23,8 @@ def __init__( url: str, lane: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[Facets] = None, - pagination: Optional[Pagination] = None, + facets: Facets | None = None, + pagination: Pagination | None = None, ) -> None: self.lane = lane self.annotator = annotator @@ -40,7 +40,7 @@ def navigation( url: str, worklist: WorkList, annotator: CirculationManagerAnnotator, - facets: Optional[Facets] = None, + facets: Facets | None = None, ) -> Self: """The navigation feed with links to a given lane's sublanes.""" @@ -83,7 +83,7 @@ def add_entry( def as_response( self, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **kwargs: Any, ) -> OPDSFeedResponse: response = super().as_response(mime_types=mime_types, **kwargs) diff --git a/core/feed/opds.py b/core/feed/opds.py index b33e1a5447..5daf0c9313 100644 --- a/core/feed/opds.py +++ b/core/feed/opds.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, Dict, List, Optional, Type +from typing import Any from werkzeug.datastructures import MIMEAccept @@ -16,10 +16,10 @@ def get_serializer( - mime_types: Optional[MIMEAccept], + mime_types: MIMEAccept | None, ) -> SerializerInterface[Any]: # Ordering matters for poor matches (eg. */*), so we will keep OPDS1 first - serializers: Dict[str, Type[SerializerInterface[Any]]] = { + serializers: dict[str, type[SerializerInterface[Any]]] = { "application/atom+xml": OPDS1Serializer, "application/opds+json": OPDS2Serializer, } @@ -37,7 +37,7 @@ def __init__( self, title: str, url: str, - precomposed_entries: Optional[List[OPDSMessage]] = None, + precomposed_entries: list[OPDSMessage] | None = None, ) -> None: self.url = url self.title = title @@ -45,12 +45,12 @@ def __init__( self._feed = FeedData() self.log = logging.getLogger(self.__class__.__name__) - def add_link(self, href: str, rel: Optional[str] = None, **kwargs: Any) -> None: + def add_link(self, href: str, rel: str | None = None, **kwargs: Any) -> None: self._feed.add_link(href, rel=rel, **kwargs) def as_response( self, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **kwargs: Any, ) -> OPDSFeedResponse: """Serialize the feed using the serializer protocol""" @@ -67,7 +67,7 @@ def as_response( def entry_as_response( cls, entry: WorkEntry | OPDSMessage, - mime_types: Optional[MIMEAccept] = None, + mime_types: MIMEAccept | None = None, **response_kwargs: Any, ) -> OPDSEntryResponse: serializer = get_serializer(mime_types) diff --git a/core/feed/serializer/base.py b/core/feed/serializer/base.py index 5f07345781..7043a5c643 100644 --- a/core/feed/serializer/base.py +++ b/core/feed/serializer/base.py @@ -1,5 +1,5 @@ from abc import ABC, abstractmethod -from typing import Generic, List, Optional, TypeVar +from typing import Generic, TypeVar from core.feed.types import FeedData, WorkEntryData from core.util.opds_writer import OPDSMessage @@ -15,7 +15,7 @@ def to_string(cls, data: T) -> str: @abstractmethod def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + self, feed: FeedData, precomposed_entries: list[OPDSMessage] | None = None ) -> str: ... diff --git a/core/feed/serializer/opds.py b/core/feed/serializer/opds.py index aa4c08c484..76d78c377d 100644 --- a/core/feed/serializer/opds.py +++ b/core/feed/serializer/opds.py @@ -2,7 +2,7 @@ import datetime from functools import partial -from typing import Any, Dict, List, Optional, cast +from typing import Any, cast from lxml import etree @@ -59,21 +59,19 @@ def __init__(self) -> None: pass def _tag( - self, tag_name: str, *args: Any, mapping: Optional[Dict[str, str]] = None + self, tag_name: str, *args: Any, mapping: dict[str, str] | None = None ) -> etree._Element: if not mapping: mapping = TAG_MAPPING return self.E(mapping.get(tag_name, tag_name), *args) - def _attr_name( - self, attr_name: str, mapping: Optional[Dict[str, str]] = None - ) -> str: + def _attr_name(self, attr_name: str, mapping: dict[str, str] | None = None) -> str: if not mapping: mapping = ATTRIBUTE_MAPPING return mapping.get(attr_name, attr_name) def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[OPDSMessage]] = None + self, feed: FeedData, precomposed_entries: list[OPDSMessage] | None = None ) -> str: # First we do metadata serialized = self.E.feed() @@ -114,7 +112,7 @@ def serialize_feed( etree.indent(serialized) return self.to_string(serialized) - def _serialize_feed_metadata(self, metadata: FeedMetadata) -> List[etree._Element]: + def _serialize_feed_metadata(self, metadata: FeedMetadata) -> list[etree._Element]: tags = [] # Compulsory title tags.append(self._tag("title", metadata.title or "")) @@ -257,14 +255,14 @@ def serialize_work_entry(self, feed_entry: WorkEntryData) -> etree._Element: entry.append(self._serialize_author_tag("contributor", contributor)) for link in feed_entry.image_links: - entry.append(OPDSFeed.link(**link.dict())) + entry.append(OPDSFeed.link(**link.asdict())) for link in feed_entry.acquisition_links: element = self._serialize_acquistion_link(link) entry.append(element) for link in feed_entry.other_links: - entry.append(OPDSFeed.link(**link.dict())) + entry.append(OPDSFeed.link(**link.asdict())) return entry diff --git a/core/feed/serializer/opds2.py b/core/feed/serializer/opds2.py index 91fe915cb2..007e5fadef 100644 --- a/core/feed/serializer/opds2.py +++ b/core/feed/serializer/opds2.py @@ -1,6 +1,6 @@ import json from collections import defaultdict -from typing import Any, Dict, List, Optional +from typing import Any from core.feed.serializer.base import SerializerInterface from core.feed.types import ( @@ -32,14 +32,14 @@ } -class OPDS2Serializer(SerializerInterface[Dict[str, Any]]): +class OPDS2Serializer(SerializerInterface[dict[str, Any]]): def __init__(self) -> None: pass def serialize_feed( - self, feed: FeedData, precomposed_entries: Optional[List[Any]] = None + self, feed: FeedData, precomposed_entries: list[Any] | None = None ) -> str: - serialized: Dict[str, Any] = {"publications": []} + serialized: dict[str, Any] = {"publications": []} serialized["metadata"] = self._serialize_metadata(feed) for entry in feed.entries: @@ -51,20 +51,20 @@ def serialize_feed( return self.to_string(serialized) - def _serialize_metadata(self, feed: FeedData) -> Dict[str, Any]: + def _serialize_metadata(self, feed: FeedData) -> dict[str, Any]: fmeta = feed.metadata - metadata: Dict[str, Any] = {} + metadata: dict[str, Any] = {} if fmeta.title: metadata["title"] = fmeta.title if fmeta.items_per_page is not None: metadata["itemsPerPage"] = fmeta.items_per_page return metadata - def serialize_opds_message(self, entry: OPDSMessage) -> Dict[str, Any]: + def serialize_opds_message(self, entry: OPDSMessage) -> dict[str, Any]: return dict(urn=entry.urn, description=entry.message) - def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: - metadata: Dict[str, Any] = {} + def serialize_work_entry(self, data: WorkEntryData) -> dict[str, Any]: + metadata: dict[str, Any] = {} if data.additionalType: metadata["@type"] = data.additionalType @@ -126,7 +126,7 @@ def serialize_work_entry(self, data: WorkEntryData) -> Dict[str, Any]: publication = {"metadata": metadata, "links": links, "images": images} return publication - def _serialize_link(self, link: Link) -> Dict[str, Any]: + def _serialize_link(self, link: Link) -> dict[str, Any]: serialized = {"href": link.href, "rel": link.rel} if link.type: serialized["type"] = link.type @@ -134,18 +134,18 @@ def _serialize_link(self, link: Link) -> Dict[str, Any]: serialized["title"] = link.title return serialized - def _serialize_acquisition_link(self, link: Acquisition) -> Dict[str, Any]: + def _serialize_acquisition_link(self, link: Acquisition) -> dict[str, Any]: item = self._serialize_link(link) - def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: - result: Dict[str, Any] = dict(type=indirect.type) + def _indirect(indirect: IndirectAcquisition) -> dict[str, Any]: + result: dict[str, Any] = dict(type=indirect.type) if indirect.children: result["child"] = [] for child in indirect.children: result["child"].append(_indirect(child)) return result - props: Dict[str, Any] = {} + props: dict[str, Any] = {} if link.availability_status: state = link.availability_status if link.is_loan: @@ -180,12 +180,12 @@ def _indirect(indirect: IndirectAcquisition) -> Dict[str, Any]: return item - def _serialize_feed_links(self, feed: FeedData) -> Dict[str, Any]: - link_data: Dict[str, List[Dict[str, Any]]] = {"links": [], "facets": []} + def _serialize_feed_links(self, feed: FeedData) -> dict[str, Any]: + link_data: dict[str, list[dict[str, Any]]] = {"links": [], "facets": []} for link in feed.links: link_data["links"].append(self._serialize_link(link)) - facet_links: Dict[str, Any] = defaultdict(lambda: {"metadata": {}, "links": []}) + facet_links: dict[str, Any] = defaultdict(lambda: {"metadata": {}, "links": []}) for link in feed.facet_links: group = getattr(link, "facetGroup", None) if group: @@ -196,8 +196,8 @@ def _serialize_feed_links(self, feed: FeedData) -> Dict[str, Any]: return link_data - def _serialize_contributor(self, author: Author) -> Dict[str, Any]: - result: Dict[str, Any] = {"name": author.name} + def _serialize_contributor(self, author: Author) -> dict[str, Any]: + result: dict[str, Any] = {"name": author.name} if author.sort_name: result["sortAs"] = author.sort_name if author.link: @@ -211,5 +211,5 @@ def content_type(self) -> str: return "application/opds+json" @classmethod - def to_string(cls, data: Dict[str, Any]) -> str: + def to_string(cls, data: dict[str, Any]) -> str: return json.dumps(data, indent=2) diff --git a/core/feed/types.py b/core/feed/types.py index cdf5207bd5..18cbaaaec7 100644 --- a/core/feed/types.py +++ b/core/feed/types.py @@ -1,8 +1,9 @@ from __future__ import annotations +from collections.abc import Generator from dataclasses import dataclass, field from datetime import date, datetime -from typing import Any, Dict, Generator, List, Optional, Tuple, cast +from typing import Any, cast from typing_extensions import Self @@ -15,7 +16,7 @@ @dataclass class BaseModel: - def _vars(self) -> Generator[Tuple[str, Any], None, None]: + def _vars(self) -> Generator[tuple[str, Any], None, None]: """Yield attributes as a tuple""" _attrs = vars(self) for name, value in _attrs.items(): @@ -25,17 +26,17 @@ def _vars(self) -> Generator[Tuple[str, Any], None, None]: continue yield name, value - def dict(self) -> Dict[str, Any]: + def asdict(self) -> dict[str, Any]: """Dataclasses do not return undefined attributes via `asdict` so we must implement this ourselves""" attrs = {} for name, value in self: if isinstance(value, BaseModel): - attrs[name] = value.dict() + attrs[name] = value.asdict() else: attrs[name] = value return attrs - def __iter__(self) -> Generator[Tuple[str, Any], None, None]: + def __iter__(self) -> Generator[tuple[str, Any], None, None]: """Allow attribute iteration""" yield from self._vars() @@ -52,7 +53,7 @@ def get(self, name: str, *default: Any) -> Any: @dataclass class FeedEntryType(BaseModel): - text: Optional[str] = None + text: str | None = None @classmethod def create(cls, **kwargs: Any) -> Self: @@ -61,11 +62,11 @@ def create(cls, **kwargs: Any) -> Self: obj.add_attributes(kwargs) return obj - def add_attributes(self, attrs: Dict[str, Any]) -> None: + def add_attributes(self, attrs: dict[str, Any]) -> None: for name, data in attrs.items(): setattr(self, name, data) - def children(self) -> Generator[Tuple[str, FeedEntryType], None, None]: + def children(self) -> Generator[tuple[str, FeedEntryType], None, None]: """Yield all FeedEntryType attributes""" for name, value in self: if isinstance(value, self.__class__): @@ -75,24 +76,24 @@ def children(self) -> Generator[Tuple[str, FeedEntryType], None, None]: @dataclass class Link(FeedEntryType): - href: Optional[str] = None - rel: Optional[str] = None - type: Optional[str] = None + href: str | None = None + rel: str | None = None + type: str | None = None # Additional types - role: Optional[str] = None - title: Optional[str] = None + role: str | None = None + title: str | None = None - def dict(self) -> Dict[str, Any]: + def asdict(self) -> dict[str, Any]: """A dict without None values""" - d = super().dict() + d = super().asdict() santized = {} for k, v in d.items(): if v is not None: santized[k] = v return santized - def link_attribs(self) -> Dict[str, Any]: + def link_attribs(self) -> dict[str, Any]: d = dict(href=self.href) for key in ["rel", "type"]: if (value := getattr(self, key, None)) is not None: @@ -102,28 +103,28 @@ def link_attribs(self) -> Dict[str, Any]: @dataclass class IndirectAcquisition(BaseModel): - type: Optional[str] = None - children: List[IndirectAcquisition] = field(default_factory=list) + type: str | None = None + children: list[IndirectAcquisition] = field(default_factory=list) @dataclass class Acquisition(Link): - holds_position: Optional[str] = None - holds_total: Optional[str] = None + holds_position: str | None = None + holds_total: str | None = None - copies_available: Optional[str] = None - copies_total: Optional[str] = None + copies_available: str | None = None + copies_total: str | None = None - availability_status: Optional[str] = None - availability_since: Optional[str] = None - availability_until: Optional[str] = None + availability_status: str | None = None + availability_since: str | None = None + availability_until: str | None = None - rights: Optional[str] = None + rights: str | None = None - lcp_hashed_passphrase: Optional[FeedEntryType] = None - drm_licensor: Optional[FeedEntryType] = None + lcp_hashed_passphrase: FeedEntryType | None = None + drm_licensor: FeedEntryType | None = None - indirect_acquisitions: List[IndirectAcquisition] = field(default_factory=list) + indirect_acquisitions: list[IndirectAcquisition] = field(default_factory=list) # Signal if the acquisition is for a loan or a hold for the patron is_loan: bool = False @@ -132,47 +133,47 @@ class Acquisition(Link): @dataclass class Author(FeedEntryType): - name: Optional[str] = None - sort_name: Optional[str] = None - viaf: Optional[str] = None - role: Optional[str] = None - family_name: Optional[str] = None - wikipedia_name: Optional[str] = None - lc: Optional[str] = None - link: Optional[Link] = None + name: str | None = None + sort_name: str | None = None + viaf: str | None = None + role: str | None = None + family_name: str | None = None + wikipedia_name: str | None = None + lc: str | None = None + link: Link | None = None @dataclass class WorkEntryData(BaseModel): """All the metadata possible for a work. This is not a FeedEntryType because we want strict control.""" - additionalType: Optional[str] = None - identifier: Optional[str] = None - pwid: Optional[str] = None - issued: Optional[datetime | date] = None - duration: Optional[float] = None - - summary: Optional[FeedEntryType] = None - language: Optional[FeedEntryType] = None - publisher: Optional[FeedEntryType] = None - published: Optional[FeedEntryType] = None - updated: Optional[FeedEntryType] = None - title: Optional[FeedEntryType] = None - sort_title: Optional[FeedEntryType] = None - subtitle: Optional[FeedEntryType] = None - series: Optional[FeedEntryType] = None - imprint: Optional[FeedEntryType] = None - - authors: List[Author] = field(default_factory=list) - contributors: List[Author] = field(default_factory=list) - categories: List[FeedEntryType] = field(default_factory=list) - ratings: List[FeedEntryType] = field(default_factory=list) - distribution: Optional[FeedEntryType] = None + additionalType: str | None = None + identifier: str | None = None + pwid: str | None = None + issued: datetime | date | None = None + duration: float | None = None + + summary: FeedEntryType | None = None + language: FeedEntryType | None = None + publisher: FeedEntryType | None = None + published: FeedEntryType | None = None + updated: FeedEntryType | None = None + title: FeedEntryType | None = None + sort_title: FeedEntryType | None = None + subtitle: FeedEntryType | None = None + series: FeedEntryType | None = None + imprint: FeedEntryType | None = None + + authors: list[Author] = field(default_factory=list) + contributors: list[Author] = field(default_factory=list) + categories: list[FeedEntryType] = field(default_factory=list) + ratings: list[FeedEntryType] = field(default_factory=list) + distribution: FeedEntryType | None = None # Links - acquisition_links: List[Acquisition] = field(default_factory=list) - image_links: List[Link] = field(default_factory=list) - other_links: List[Link] = field(default_factory=list) + acquisition_links: list[Acquisition] = field(default_factory=list) + image_links: list[Link] = field(default_factory=list) + other_links: list[Link] = field(default_factory=list) @dataclass @@ -180,17 +181,17 @@ class WorkEntry(BaseModel): work: Work edition: Edition identifier: Identifier - license_pool: Optional[LicensePool] = None + license_pool: LicensePool | None = None # Actual, computed feed data - computed: Optional[WorkEntryData] = None + computed: WorkEntryData | None = None def __init__( self, - work: Optional[Work] = None, - edition: Optional[Edition] = None, - identifier: Optional[Identifier] = None, - license_pool: Optional[LicensePool] = None, + work: Work | None = None, + edition: Edition | None = None, + identifier: Identifier | None = None, + license_pool: LicensePool | None = None, ) -> None: if None in (work, edition, identifier): raise ValueError( @@ -204,13 +205,13 @@ def __init__( @dataclass class FeedMetadata(BaseModel): - title: Optional[str] = None - id: Optional[str] = None - updated: Optional[str] = None - items_per_page: Optional[int] = None - patron: Optional[FeedEntryType] = None - drm_licensor: Optional[FeedEntryType] = None - lcp_hashed_passphrase: Optional[FeedEntryType] = None + title: str | None = None + id: str | None = None + updated: str | None = None + items_per_page: int | None = None + patron: FeedEntryType | None = None + drm_licensor: FeedEntryType | None = None + lcp_hashed_passphrase: FeedEntryType | None = None class DataEntryTypes: @@ -221,21 +222,21 @@ class DataEntryTypes: class DataEntry(FeedEntryType): """Other kinds of information, like entries of a navigation feed""" - type: Optional[str] = None - title: Optional[str] = None - id: Optional[str] = None - links: List[Link] = field(default_factory=list) + type: str | None = None + title: str | None = None + id: str | None = None + links: list[Link] = field(default_factory=list) @dataclass class FeedData(BaseModel): - links: List[Link] = field(default_factory=list) - breadcrumbs: List[Link] = field(default_factory=list) - facet_links: List[Link] = field(default_factory=list) - entries: List[WorkEntry] = field(default_factory=list) - data_entries: List[DataEntry] = field(default_factory=list) + links: list[Link] = field(default_factory=list) + breadcrumbs: list[Link] = field(default_factory=list) + facet_links: list[Link] = field(default_factory=list) + entries: list[WorkEntry] = field(default_factory=list) + data_entries: list[DataEntry] = field(default_factory=list) metadata: FeedMetadata = field(default_factory=lambda: FeedMetadata()) - entrypoint: Optional[str] = None + entrypoint: str | None = None class Config: arbitrary_types_allowed = True diff --git a/core/feed/util.py b/core/feed/util.py index 5519f0a5b5..808201a0f0 100644 --- a/core/feed/util.py +++ b/core/feed/util.py @@ -1,5 +1,4 @@ import datetime -from typing import Union import pytz @@ -7,7 +6,7 @@ TIME_FORMAT_NAIVE = "%Y-%m-%dT%H:%M:%SZ" -def strftime(date: Union[datetime.datetime, datetime.date]) -> str: +def strftime(date: datetime.datetime | datetime.date) -> str: """ Format a date for the OPDS feeds. diff --git a/core/integration/base.py b/core/integration/base.py index bd01635f2f..606648120b 100644 --- a/core/integration/base.py +++ b/core/integration/base.py @@ -1,17 +1,8 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generic, - Mapping, - Optional, - Protocol, - Type, - TypeVar, -) +from collections.abc import Mapping +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar from sqlalchemy.orm import Session from sqlalchemy.orm.attributes import Mapped, flag_modified @@ -23,15 +14,15 @@ class IntegrationConfigurationProtocol(Protocol): - settings_dict: Mapped[Dict[str, Any]] + settings_dict: Mapped[dict[str, Any]] T = TypeVar("T", bound=BaseSettings) def integration_settings_load( - settings_cls: Type[T], - integration: IntegrationConfigurationProtocol | Dict[str, Any], + settings_cls: type[T], + integration: IntegrationConfigurationProtocol | dict[str, Any], ) -> T: """ Load the settings object for an integration from the database. @@ -54,7 +45,7 @@ def integration_settings_load( def integration_settings_update( - settings_cls: Type[BaseSettings], + settings_cls: type[BaseSettings], integration: IntegrationConfigurationProtocol, new_settings: BaseSettings | Mapping[str, Any], merge: bool = False, @@ -103,7 +94,7 @@ def description(cls) -> str: @classmethod @abstractmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: """Get the settings for this integration""" ... @@ -149,7 +140,7 @@ class HasLibraryIntegrationConfiguration( ): @classmethod @abstractmethod - def library_settings_class(cls) -> Type[LibrarySettingsType]: + def library_settings_class(cls) -> type[LibrarySettingsType]: """Get the library settings for this integration""" ... @@ -188,7 +179,7 @@ class HasChildIntegrationConfiguration( ): @classmethod @abstractmethod - def child_settings_class(cls) -> Type[ChildSettingsType]: + def child_settings_class(cls) -> type[ChildSettingsType]: """Get the child settings class""" ... @@ -203,7 +194,7 @@ def child_settings_load(cls, child: IntegrationConfiguration) -> ChildSettingsTy def settings_load( cls, integration: IntegrationConfiguration, - parent: Optional[IntegrationConfiguration] = None, + parent: IntegrationConfiguration | None = None, ) -> SettingsType: """ Load the full settings object for this integration from the database. diff --git a/core/integration/registry.py b/core/integration/registry.py index ac54fa39c4..a0b55e5720 100644 --- a/core/integration/registry.py +++ b/core/integration/registry.py @@ -1,19 +1,8 @@ from __future__ import annotations from collections import defaultdict -from typing import ( - Dict, - Generic, - Iterator, - List, - Optional, - Set, - Tuple, - Type, - TypeVar, - Union, - overload, -) +from collections.abc import Iterator +from typing import Generic, TypeVar, overload from core.integration.goals import Goals @@ -26,10 +15,10 @@ class IntegrationRegistryException(ValueError): class IntegrationRegistry(Generic[T]): - def __init__(self, goal: Goals, integrations: Optional[Dict[str, Type[T]]] = None): + def __init__(self, goal: Goals, integrations: dict[str, type[T]] | None = None): """Initialize a new IntegrationRegistry.""" - self._lookup: Dict[str, Type[T]] = {} - self._reverse_lookup: Dict[Type[T], List[str]] = defaultdict(list) + self._lookup: dict[str, type[T]] = {} + self._reverse_lookup: dict[type[T], list[str]] = defaultdict(list) self.goal = goal if integrations: @@ -38,11 +27,11 @@ def __init__(self, goal: Goals, integrations: Optional[Dict[str, Type[T]]] = Non def register( self, - integration: Type[T], + integration: type[T], *, - canonical: Optional[str] = None, - aliases: Optional[List[str]] = None, - ) -> Type[T]: + canonical: str | None = None, + aliases: list[str] | None = None, + ) -> type[T]: """ Register an integration class. @@ -72,29 +61,29 @@ def register( return integration @overload - def get(self, protocol: str, default: None = ...) -> Type[T] | None: + def get(self, protocol: str, default: None = ...) -> type[T] | None: ... @overload - def get(self, protocol: str, default: V) -> Type[T] | V: + def get(self, protocol: str, default: V) -> type[T] | V: ... - def get(self, protocol: str, default: V | None = None) -> Type[T] | V | None: + def get(self, protocol: str, default: V | None = None) -> type[T] | V | None: """Look up an integration class by protocol.""" if protocol not in self._lookup: return default return self[protocol] @overload - def get_protocol(self, integration: Type[T], default: None = ...) -> str | None: + def get_protocol(self, integration: type[T], default: None = ...) -> str | None: ... @overload - def get_protocol(self, integration: Type[T], default: V) -> str | V: + def get_protocol(self, integration: type[T], default: V) -> str | V: ... def get_protocol( - self, integration: Type[T], default: V | None = None + self, integration: type[T], default: V | None = None ) -> str | V | None: """Look up the canonical protocol for an integration class.""" names = self.get_protocols(integration, default) @@ -104,24 +93,24 @@ def get_protocol( @overload def get_protocols( - self, integration: Type[T], default: None = ... - ) -> List[str] | None: + self, integration: type[T], default: None = ... + ) -> list[str] | None: ... @overload - def get_protocols(self, integration: Type[T], default: V) -> List[str] | V: + def get_protocols(self, integration: type[T], default: V) -> list[str] | V: ... def get_protocols( - self, integration: Type[T], default: V | None = None - ) -> List[str] | V | None: + self, integration: type[T], default: V | None = None + ) -> list[str] | V | None: """Look up all protocols for an integration class.""" if integration not in self._reverse_lookup: return default return self._reverse_lookup[integration] @property - def integrations(self) -> Set[Type[T]]: + def integrations(self) -> set[type[T]]: """Return a set of all registered canonical protocols.""" return set(self._reverse_lookup.keys()) @@ -137,11 +126,11 @@ def update(self, other: IntegrationRegistry[T]) -> None: assert isinstance(names, list) self.register(integration, canonical=names[0], aliases=names[1:]) - def __iter__(self) -> Iterator[Tuple[str, Type[T]]]: + def __iter__(self) -> Iterator[tuple[str, type[T]]]: for integration, names in self._reverse_lookup.items(): yield names[0], integration - def __getitem__(self, protocol: str) -> Type[T]: + def __getitem__(self, protocol: str) -> type[T]: """Look up an integration class by protocol, using the [] operator.""" return self._lookup[protocol] @@ -156,15 +145,13 @@ def __contains__(self, name: str) -> bool: def __repr__(self) -> str: return f"" - def __add__( - self, other: IntegrationRegistry[V] - ) -> IntegrationRegistry[Union[T, V]]: + def __add__(self, other: IntegrationRegistry[V]) -> IntegrationRegistry[T | V]: if not isinstance(other, IntegrationRegistry): raise TypeError( f"unsupported operand type(s) for +: 'IntegrationRegistry' and '{type(other).__name__}'" ) - new: IntegrationRegistry[Union[T, V]] = IntegrationRegistry(self.goal) + new: IntegrationRegistry[T | V] = IntegrationRegistry(self.goal) new.update(self) new.update(other) return new diff --git a/core/integration/settings.py b/core/integration/settings.py index 87d5392ad0..bf857839ee 100644 --- a/core/integration/settings.py +++ b/core/integration/settings.py @@ -1,18 +1,9 @@ from __future__ import annotations +from collections.abc import Callable, Mapping from dataclasses import dataclass from enum import Enum -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - List, - Mapping, - Optional, - Tuple, - Union, -) +from typing import TYPE_CHECKING, Any, Union from pydantic import ( BaseModel, @@ -74,29 +65,29 @@ def FormField( default: Any = Undefined, *, form: ConfigurationFormItem = None, # type: ignore[assignment] - default_factory: Optional[NoArgAnyCallable] = None, - alias: Optional[str] = None, - title: Optional[str] = None, - description: Optional[str] = None, - exclude: Union[AbstractSetIntStr, MappingIntStrAny, Any] = None, - include: Union[AbstractSetIntStr, MappingIntStrAny, Any] = None, - const: Optional[bool] = None, - gt: Optional[float] = None, - ge: Optional[float] = None, - lt: Optional[float] = None, - le: Optional[float] = None, - multiple_of: Optional[float] = None, - allow_inf_nan: Optional[bool] = None, - max_digits: Optional[int] = None, - decimal_places: Optional[int] = None, - min_items: Optional[int] = None, - max_items: Optional[int] = None, - unique_items: Optional[bool] = None, - min_length: Optional[int] = None, - max_length: Optional[int] = None, + default_factory: NoArgAnyCallable | None = None, + alias: str | None = None, + title: str | None = None, + description: str | None = None, + exclude: AbstractSetIntStr | MappingIntStrAny | Any = None, + include: AbstractSetIntStr | MappingIntStrAny | Any = None, + const: bool | None = None, + gt: float | None = None, + ge: float | None = None, + lt: float | None = None, + le: float | None = None, + multiple_of: float | None = None, + allow_inf_nan: bool | None = None, + max_digits: int | None = None, + decimal_places: int | None = None, + min_items: int | None = None, + max_items: int | None = None, + unique_items: bool | None = None, + min_length: int | None = None, + max_length: int | None = None, allow_mutation: bool = True, - regex: Optional[str] = None, - discriminator: Optional[str] = None, + regex: str | None = None, + discriminator: str | None = None, repr: bool = True, **extra: Any, ) -> Any: @@ -213,13 +204,13 @@ def get_form_value(value: Any) -> Any: def to_dict( self, db: Session, key: str, required: bool = False, default: Any = None - ) -> Tuple[int, Dict[str, Any]]: + ) -> tuple[int, dict[str, Any]]: """ Convert the ConfigurationFormItem to a dictionary The dictionary is in the format expected by the admin interface. """ - form_entry: Dict[str, Any] = { + form_entry: dict[str, Any] = { "label": self.label, "key": key, "required": required or self.required, @@ -265,7 +256,7 @@ class MySettings(BaseSettings): """ @root_validator(pre=True) - def extra_args(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def extra_args(cls, values: dict[str, Any]) -> dict[str, Any]: # We log any extra arguments that are passed to the model, but # we don't raise an error, these arguments may be old configuration # settings that have not been cleaned up by a migration yet. @@ -310,8 +301,19 @@ class Config: # not the alias. allow_population_by_field_name = True + # If your settings class needs additional form fields that are not + # defined on the model, you can add them here. This is useful if you + # need to add a custom form field, but don't want the data in the field + # to be stored on the model in the database. For example, if you want + # to add a custom form field that allows the user to upload an image, but + # want to store that image data outside the settings model. + # + # The key for the dictionary should be the field name, and the value + # should be a ConfigurationFormItem object that defines the form field. + _additional_form_fields: dict[str, ConfigurationFormItem] = {} + @classmethod - def configuration_form(cls, db: Session) -> List[Dict[str, Any]]: + def configuration_form(cls, db: Session) -> list[dict[str, Any]]: """Get the configuration dictionary for this class""" config = [] for field in cls.__fields__.values(): @@ -332,7 +334,7 @@ def configuration_form(cls, db: Session) -> List[Dict[str, Any]]: config.sort(key=lambda x: x[0]) return [item[1] for item in config] - def dict(self, *args: Any, **kwargs: Any) -> Dict[str, Any]: + def dict(self, *args: Any, **kwargs: Any) -> dict[str, Any]: """Override the dict method to remove the default values""" if "exclude_defaults" not in kwargs: kwargs["exclude_defaults"] = True @@ -352,17 +354,6 @@ def get_form_field_label(cls, field_name: str) -> str: else: return field_name - # If your settings class needs additional form fields that are not - # defined on the model, you can add them here. This is useful if you - # need to add a custom form field, but don't want the data in the field - # to be stored on the model in the database. For example, if you want - # to add a custom form field that allows the user to upload an image, but - # want to store that image data outside the settings model. - # - # The key for the dictionary should be the field name, and the value - # should be a ConfigurationFormItem object that defines the form field. - _additional_form_fields: Dict[str, ConfigurationFormItem] = {} - def __init__(self, **data: Any): """ Override the init method to return our custom ProblemError diff --git a/core/jobs/integration_test.py b/core/jobs/integration_test.py index b4fd8a7c93..6128ca9e54 100644 --- a/core/jobs/integration_test.py +++ b/core/jobs/integration_test.py @@ -5,7 +5,7 @@ from dataclasses import dataclass from json import JSONDecodeError from ssl import get_server_certificate -from typing import Any, List, cast +from typing import Any, cast from urllib.parse import urlparse import pytz @@ -97,7 +97,7 @@ def arg_parser(cls): # pragma: no cover def _read_config( self, filepath: str, key_file: str | None = None, raw: bool = False - ) -> List | bytes: + ) -> list | bytes: """Read the config yml from a source. The file should be a yml with a list of IntegrationTestDetails as the content. :param filepath: The path to the file, could be an URL or a file on the local directory @@ -157,7 +157,7 @@ def do_run(self) -> None: self._encrypt(args.config, args.key_file, args.encrypt_file) return - data = cast(List[dict], self._read_config(args.config, key_file=args.key_file)) + data = cast(list[dict], self._read_config(args.config, key_file=args.key_file)) for datapoint in data: test = IntegrationTestDetails(**datapoint) diff --git a/core/jobs/patron_activity_sync.py b/core/jobs/patron_activity_sync.py index 5bd168ce8b..2cdf71f1f6 100644 --- a/core/jobs/patron_activity_sync.py +++ b/core/jobs/patron_activity_sync.py @@ -1,5 +1,4 @@ from datetime import timedelta -from typing import List, Optional from sqlalchemy import or_ from sqlalchemy.orm import Query @@ -16,7 +15,7 @@ class PatronActivitySyncNotificationScript(PatronSweepMonitor): and notify said patron devices to re-sync their data""" STALE_ACTIVITY_SYNC_DAYS = 2 - SERVICE_NAME: Optional[str] = "Patron Activity Sync Notification" + SERVICE_NAME: str | None = "Patron Activity Sync Notification" def item_query(self) -> Query: expired_sync = utc_now() - timedelta(days=self.STALE_ACTIVITY_SYNC_DAYS) @@ -36,5 +35,5 @@ def item_query(self) -> Query: ) return query - def process_items(self, items: List[Patron]) -> None: + def process_items(self, items: list[Patron]) -> None: PushNotifications.send_activity_sync_message(items) diff --git a/core/jobs/playtime_entries.py b/core/jobs/playtime_entries.py index 4d4ed5d184..7865187e7d 100644 --- a/core/jobs/playtime_entries.py +++ b/core/jobs/playtime_entries.py @@ -6,7 +6,7 @@ from collections import defaultdict from datetime import datetime, timedelta from tempfile import TemporaryFile -from typing import TYPE_CHECKING, Optional, cast +from typing import TYPE_CHECKING, cast import dateutil.parser import pytz @@ -174,8 +174,8 @@ def do_run(self): identifier_id, total, ) in self._fetch_report_records(start=start, until=until): - edition: Optional[Edition] = None - identifier: Optional[Identifier] = None + edition: Edition | None = None + identifier: Identifier | None = None if identifier_id: edition = get_one( self._db, Edition, primary_identifier_id=identifier_id @@ -241,7 +241,7 @@ def _fetch_report_records(self, start: datetime, until: datetime) -> Query: @staticmethod def _isbn_for_identifier( - identifier: Optional[Identifier], + identifier: Identifier | None, /, *, default_value: str = "", diff --git a/core/lane.py b/core/lane.py index 3d252d52ca..4d04fcd3f9 100644 --- a/core/lane.py +++ b/core/lane.py @@ -4,7 +4,7 @@ import logging import time from collections import defaultdict -from typing import Any, List, Optional +from typing import Any from urllib.parse import quote_plus from flask_babel import lazy_gettext as _ @@ -1322,7 +1322,7 @@ class WorkList: # If a certain type of Worklist should always have its OPDS feeds # cached under a specific type, define that type as # CACHED_FEED_TYPE. - CACHED_FEED_TYPE: Optional[str] = None + CACHED_FEED_TYPE: str | None = None # By default, a WorkList is always visible. @property @@ -2611,7 +2611,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): size_by_entrypoint = Column(JSON, nullable=True) # A lane may have one parent lane and many sublanes. - sublanes: Mapped[List[Lane]] = relationship( + sublanes: Mapped[list[Lane]] = relationship( "Lane", backref=backref("parent", remote_side=[id]), ) @@ -2619,7 +2619,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): # A lane may have multiple associated LaneGenres. For most lanes, # this is how the contents of the lanes are defined. genres = association_proxy("lane_genres", "genre", creator=LaneGenre.from_genre) - lane_genres: Mapped[List[LaneGenre]] = relationship( + lane_genres: Mapped[list[LaneGenre]] = relationship( "LaneGenre", foreign_keys="LaneGenre.lane_id", backref="lane", @@ -2678,7 +2678,7 @@ class Lane(Base, DatabaseBackedWorkList, HierarchyWorkList): ) # Only the books on these specific CustomLists will be shown. - customlists: Mapped[List[CustomList]] = relationship( + customlists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: lanes_customlists, backref="lane" # type: ignore ) diff --git a/core/lcp/credential.py b/core/lcp/credential.py index f88224521c..3ba1a42960 100644 --- a/core/lcp/credential.py +++ b/core/lcp/credential.py @@ -1,6 +1,6 @@ import logging from enum import Enum -from typing import Any, Optional +from typing import Any from sqlalchemy.orm import Session @@ -67,7 +67,7 @@ def _get_or_create_persistent_token( data_source_type: Any, credential_type: Any, commit: bool, - value: Optional[str] = None, + value: str | None = None, ) -> Any: """Gets or creates a new persistent token diff --git a/core/marc.py b/core/marc.py index efc8a3b04a..c13792c946 100644 --- a/core/marc.py +++ b/core/marc.py @@ -2,9 +2,9 @@ import re import urllib.parse +from collections.abc import Mapping from datetime import datetime from io import BytesIO -from typing import List, Mapping, Optional, Tuple from uuid import UUID, uuid4 import pytz @@ -56,7 +56,7 @@ class Annotator(LoggerMixin): # TODO: Add remaining formats. Maybe there's a better place to # store this so it's easier to keep up-to-date. # There doesn't seem to be any particular vocabulary for this. - FORMAT_TERMS: Mapping[Tuple[Optional[str], Optional[str]], str] = { + FORMAT_TERMS: Mapping[tuple[str | None, str | None], str] = { (Representation.EPUB_MEDIA_TYPE, DeliveryMechanism.NO_DRM): "EPUB eBook", ( Representation.EPUB_MEDIA_TYPE, @@ -70,8 +70,8 @@ def __init__( self, cm_url: str, library_short_name: str, - web_client_urls: List[str], - organization_code: Optional[str], + web_client_urls: list[str], + organization_code: str | None, include_summary: bool, include_genres: bool, ) -> None: @@ -561,7 +561,7 @@ def add_web_client_urls( identifier: Identifier, library_short_name: str, cm_url: str, - web_client_urls: List[str], + web_client_urls: list[str], ) -> None: qualified_identifier = urllib.parse.quote( f"{identifier.type}/{identifier.identifier}", safe="" @@ -604,7 +604,7 @@ class MarcExporterLibrarySettings(BaseSettings): # MARC organization codes are assigned by the # Library of Congress and can be found here: # http://www.loc.gov/marc/organizations/org-search.php - organization_code: Optional[str] = FormField( + organization_code: str | None = FormField( None, form=ConfigurationFormItem( label="The MARC organization code for this library (003 field).", @@ -614,7 +614,7 @@ class MarcExporterLibrarySettings(BaseSettings): alias="marc_organization_code", ) - web_client_url: Optional[str] = FormField( + web_client_url: str | None = FormField( None, form=ConfigurationFormItem( label="The base URL for the web catalog for this library, for the 856 field.", @@ -690,7 +690,7 @@ def create_record( revised: bool, work: Work, annotator: Annotator, - ) -> Optional[Record]: + ) -> Record | None: """Build a complete MARC record for a given work.""" pool = work.active_license_pool() if not pool: @@ -711,7 +711,7 @@ def _file_key( library: Library, collection: Collection, creation_time: datetime, - since_time: Optional[datetime] = None, + since_time: datetime | None = None, ) -> str: """The path to the hosted MARC file for the given library, collection, and date range.""" @@ -733,7 +733,7 @@ def _file_key( def query_works( self, collection: Collection, - since_time: Optional[datetime], + since_time: datetime | None, creation_time: datetime, batch_size: int, ) -> ScalarResult: @@ -759,7 +759,7 @@ def records( annotator: Annotator, *, creation_time: datetime, - since_time: Optional[datetime] = None, + since_time: datetime | None = None, batch_size: int = 500, ) -> None: """ diff --git a/core/metadata_layer.py b/core/metadata_layer.py index 51182a35f0..9368a522d6 100644 --- a/core/metadata_layer.py +++ b/core/metadata_layer.py @@ -10,7 +10,6 @@ import datetime import logging from collections import defaultdict -from typing import List, Optional from dateutil.parser import parse from dependency_injector.wiring import Provide, inject @@ -526,14 +525,14 @@ class LicenseData(LicenseFunctions): def __init__( self, identifier: str, - checkout_url: Optional[str], + checkout_url: str | None, status_url: str, status: LicenseStatus, checkouts_available: int, - expires: Optional[datetime.datetime] = None, - checkouts_left: Optional[int] = None, - terms_concurrency: Optional[int] = None, - content_types: Optional[List[str]] = None, + expires: datetime.datetime | None = None, + checkouts_left: int | None = None, + terms_concurrency: int | None = None, + content_types: list[str] | None = None, ): self.identifier = identifier self.checkout_url = checkout_url diff --git a/core/migration/migrate_external_integration.py b/core/migration/migrate_external_integration.py index a05c9a435e..fdc0be846d 100644 --- a/core/migration/migrate_external_integration.py +++ b/core/migration/migrate_external_integration.py @@ -1,6 +1,6 @@ import json from collections import defaultdict -from typing import Any, Dict, Optional, Tuple, Type, TypeVar +from typing import Any, TypeVar from sqlalchemy.engine import Connection, CursorResult, Row @@ -19,7 +19,7 @@ def _validate_and_load_settings( - settings_class: Type[T], settings_dict: Dict[str, str] + settings_class: type[T], settings_dict: dict[str, str] ) -> T: aliases = { f.alias: f.name @@ -47,14 +47,14 @@ def _validate_and_load_settings( def get_configuration_settings( connection: Connection, integration: Row, -) -> Tuple[Dict[str, str], Dict[str, Dict[str, str]], str]: +) -> tuple[dict[str, str], dict[str, dict[str, str]], str]: settings = connection.execute( "select cs.library_id, cs.key, cs.value from configurationsettings cs " "where cs.external_integration_id = (%s)", (integration.id,), ) settings_dict = {} - library_settings: Dict[str, Dict[str, str]] = defaultdict(dict) + library_settings: dict[str, dict[str, str]] = defaultdict(dict) self_test_results = json_serializer({}) for setting in settings: if not setting.value: @@ -74,11 +74,11 @@ def _migrate_external_integration( connection: Connection, name: str, protocol: str, - protocol_class: Type[HasIntegrationConfiguration[BaseSettings]], + protocol_class: type[HasIntegrationConfiguration[BaseSettings]], goal: str, - settings_dict: Dict[str, Any], + settings_dict: dict[str, Any], self_test_results: str, - context: Optional[Dict[str, Any]] = None, + context: dict[str, Any] | None = None, ) -> int: # Load and validate the settings before storing them in the database. settings_class = protocol_class.settings_class() @@ -105,8 +105,8 @@ def _migrate_library_settings( connection: Connection, integration_id: int, library_id: int, - library_settings: Dict[str, str], - protocol_class: Type[ + library_settings: dict[str, str], + protocol_class: type[ HasLibraryIntegrationConfiguration[BaseSettings, BaseSettings] ], ) -> None: diff --git a/core/migration/util.py b/core/migration/util.py index 1da519cd2b..dc02ee49bb 100644 --- a/core/migration/util.py +++ b/core/migration/util.py @@ -1,7 +1,7 @@ from __future__ import annotations import logging -from typing import Any, List +from typing import Any import sqlalchemy as sa @@ -11,8 +11,8 @@ def pg_update_enum( table: str, column: str, enum_name: str, - old_values: List[str], - new_values: List[str], + old_values: list[str], + new_values: list[str], ) -> None: """ Alembic migration helper function to update an enum type. diff --git a/core/model/__init__.py b/core/model/__init__.py index fa58803d01..6778bde80a 100644 --- a/core/model/__init__.py +++ b/core/model/__init__.py @@ -3,7 +3,8 @@ import json import logging import os -from typing import Any, Generator, List, Literal, Tuple, Type, TypeVar +from collections.abc import Generator +from typing import Any, List, Literal, Tuple, Type, TypeVar, Union from contextlib2 import contextmanager from psycopg2.extensions import adapt as sqlescape @@ -85,8 +86,8 @@ def flush(db): def create( - db: Session, model: Type[T], create_method="", create_method_kwargs=None, **kwargs -) -> Tuple[T, Literal[True]]: + db: Session, model: type[T], create_method="", create_method_kwargs=None, **kwargs +) -> tuple[T, Literal[True]]: kwargs.update(create_method_kwargs or {}) created = getattr(model, create_method, model)(**kwargs) db.add(created) @@ -95,7 +96,7 @@ def create( def get_one( - db: Session, model: Type[T], on_multiple="error", constraint=None, **kwargs + db: Session, model: type[T], on_multiple="error", constraint=None, **kwargs ) -> T | None: """Gets an object from the database based on its attributes. @@ -131,8 +132,8 @@ def get_one( def get_one_or_create( - db: Session, model: Type[T], create_method="", create_method_kwargs=None, **kwargs -) -> Tuple[T, bool]: + db: Session, model: type[T], create_method="", create_method_kwargs=None, **kwargs +) -> tuple[T, bool]: one = get_one(db, model, **kwargs) if one: return one, False @@ -345,8 +346,8 @@ def engine(cls, url=None): @classmethod def setup_event_listener( - cls, session: Union[Session, sessionmaker] - ) -> Union[Session, sessionmaker]: + cls, session: Session | sessionmaker + ) -> Session | sessionmaker: event.listen(session, "before_flush", Listener.before_flush_event_listener) return session @@ -482,7 +483,7 @@ def __init__( self.bulk_method = bulk_method self.bulk_method_kwargs = bulk_method_kwargs or {} self.batch_size = batch_size - self._objects: List[Base] = [] + self._objects: list[Base] = [] def __enter__(self): return self diff --git a/core/model/admin.py b/core/model/admin.py index c9db3cce5d..c286cbf053 100644 --- a/core/model/admin.py +++ b/core/model/admin.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING import bcrypt from flask_babel import lazy_gettext as _ @@ -40,7 +40,7 @@ class Admin(Base, HasSessionCache): password_hashed = Column(Unicode, index=True) # An Admin may have many roles. - roles: Mapped[List[AdminRole]] = relationship( + roles: Mapped[list[AdminRole]] = relationship( "AdminRole", backref="admin", cascade="all, delete-orphan", uselist=True ) diff --git a/core/model/announcements.py b/core/model/announcements.py index fae3c6ddaa..c263daec18 100644 --- a/core/model/announcements.py +++ b/core/model/announcements.py @@ -3,7 +3,7 @@ import dataclasses import datetime import uuid -from typing import TYPE_CHECKING, Dict, List, Optional +from typing import TYPE_CHECKING from sqlalchemy import Column, Date, ForeignKey, Integer, Unicode, select from sqlalchemy.dialects.postgresql import UUID @@ -52,7 +52,7 @@ def library_announcements(cls, library: Library) -> Select: @classmethod def authentication_document_announcements( cls, library: Library - ) -> List[Dict[str, str]]: + ) -> list[dict[str, str]]: db = Session.object_session(library) today_local = datetime.date.today() query = ( @@ -69,7 +69,7 @@ def authentication_document_announcements( @classmethod def from_data( - cls, db: Session, data: AnnouncementData, library: Optional[Library] = None + cls, db: Session, data: AnnouncementData, library: Library | None = None ) -> Announcement: created, _ = create( db, @@ -86,9 +86,9 @@ def from_data( def sync( cls, db: Session, - existing: List[Announcement], - new: Dict[uuid.UUID, AnnouncementData], - library: Optional[Library] = None, + existing: list[Announcement], + new: dict[uuid.UUID, AnnouncementData], + library: Library | None = None, ) -> None: """ Synchronize the existing announcements with the new announcements, creating any new announcements @@ -140,9 +140,9 @@ class AnnouncementData: content: str start: datetime.date finish: datetime.date - id: Optional[uuid.UUID] = None + id: uuid.UUID | None = None - def as_dict(self) -> Dict[str, str]: + def as_dict(self) -> dict[str, str]: date_format = "%Y-%m-%d" return_dict = { "content": self.content, diff --git a/core/model/before_flush_decorator.py b/core/model/before_flush_decorator.py index 46b296f5b3..4a2c706c85 100644 --- a/core/model/before_flush_decorator.py +++ b/core/model/before_flush_decorator.py @@ -1,20 +1,14 @@ from __future__ import annotations -import sys +from collections.abc import Callable from copy import copy from dataclasses import dataclass from enum import Enum -from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Type +from typing import TYPE_CHECKING, ParamSpec from sqlalchemy.orm import Session from sqlalchemy.orm.unitofwork import UOWTransaction -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - if TYPE_CHECKING: from core.model import Base @@ -39,7 +33,7 @@ class Listeners: """ # Tuple of models that the listener is registered for. - models: Tuple[Type[Base], ...] + models: tuple[type[Base], ...] # State that the listener is registered for. state: ListenerState # If True, the listener will only be called once. @@ -50,11 +44,11 @@ class Listeners: one_shot_triggered: bool = False def __init__(self): - self._listeners: List[BeforeFlushListener.Listeners] = [] + self._listeners: list[BeforeFlushListener.Listeners] = [] def before_flush( self, - model: Type[Base] | Tuple[Type[Base], ...], + model: type[Base] | tuple[type[Base], ...], state: ListenerState = ListenerState.any, one_shot: bool = False, ) -> Callable[[Callable[P, None]], Callable[P, None]]: @@ -91,8 +85,8 @@ def _invoke_listeners( cls, listening_for: ListenerState, session: Session, - listeners: List[BeforeFlushListener.Listeners], - instance_filter: Optional[Callable[[Session, Base], bool]] = None, + listeners: list[BeforeFlushListener.Listeners], + instance_filter: Callable[[Session, Base], bool] | None = None, ) -> None: """ Invoke the listeners for the given state. @@ -136,8 +130,8 @@ def _invoke_listeners( def before_flush_event_listener( self, session: Session, - _flush_context: Optional[UOWTransaction] = None, - _instances: Optional[List[object]] = None, + _flush_context: UOWTransaction | None = None, + _instances: list[object] | None = None, ) -> None: """ SQLAlchemy event listener that is called before a flush. This is where we invoke the listeners that have been diff --git a/core/model/classification.py b/core/model/classification.py index d4d406716a..4e94fbe234 100644 --- a/core/model/classification.py +++ b/core/model/classification.py @@ -2,7 +2,7 @@ from __future__ import annotations import logging -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from sqlalchemy import ( Boolean, @@ -146,7 +146,7 @@ class Subject(Base): checked = Column(Boolean, default=False, index=True) # One Subject may participate in many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", back_populates="subject" ) @@ -350,11 +350,11 @@ class Classification(Base): __tablename__ = "classifications" id = Column(Integer, primary_key=True) identifier_id = Column(Integer, ForeignKey("identifiers.id"), index=True) - identifier: Mapped[Optional[Identifier]] + identifier: Mapped[Identifier | None] subject_id = Column(Integer, ForeignKey("subjects.id"), index=True) subject: Mapped[Subject] = relationship("Subject", back_populates="classifications") data_source_id = Column(Integer, ForeignKey("datasources.id"), index=True) - data_source: Mapped[Optional[DataSource]] + data_source: Mapped[DataSource | None] # How much weight the data source gives to this classification. weight = Column(Integer) @@ -485,12 +485,12 @@ class Genre(Base, HasSessionCache): name = Column(Unicode, unique=True, index=True) # One Genre may have affinity with many Subjects. - subjects: Mapped[List[Subject]] = relationship("Subject", backref="genre") + subjects: Mapped[list[Subject]] = relationship("Subject", backref="genre") # One Genre may participate in many WorkGenre assignments. works = association_proxy("work_genres", "work") - work_genres: Mapped[List[WorkGenre]] = relationship( + work_genres: Mapped[list[WorkGenre]] = relationship( "WorkGenre", backref="genre", cascade="all, delete-orphan" ) diff --git a/core/model/collection.py b/core/model/collection.py index 7b4b5b7941..b9876ba4ed 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Generator, List, Optional, Tuple, TypeVar +from collections.abc import Generator +from typing import TYPE_CHECKING, Any, TypeVar from sqlalchemy import ( Boolean, @@ -83,7 +84,7 @@ class Collection(Base, HasSessionCache): # A collection may have many child collections. For example, # An Overdrive collection may have many children corresponding # to Overdrive Advantage collections. - children: Mapped[List[Collection]] = relationship( + children: Mapped[list[Collection]] = relationship( "Collection", back_populates="parent", uselist=True ) @@ -94,12 +95,12 @@ class Collection(Base, HasSessionCache): # A Collection can provide books to many Libraries. # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#composite-association-proxies - libraries: Mapped[List[Library]] = association_proxy( + libraries: Mapped[list[Library]] = association_proxy( "integration_configuration", "libraries" ) # A Collection can include many LicensePools. - licensepools: Mapped[List[LicensePool]] = relationship( + licensepools: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="collection", cascade="all, delete-orphan", @@ -107,23 +108,23 @@ class Collection(Base, HasSessionCache): ) # A Collection can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="collection", cascade="delete" ) # A Collection can be monitored by many Monitors, each of which # will have its own Timestamp. - timestamps: Mapped[List[Timestamp]] = relationship( + timestamps: Mapped[list[Timestamp]] = relationship( "Timestamp", back_populates="collection" ) - catalog: Mapped[List[Identifier]] = relationship( + catalog: Mapped[list[Identifier]] = relationship( "Identifier", secondary=lambda: collections_identifiers, backref="collections" ) # A Collection can be associated with multiple CoverageRecords # for Identifiers in its catalog. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", backref="collection", cascade="all" ) @@ -132,7 +133,7 @@ class Collection(Base, HasSessionCache): # also be added to the list. Admins can remove items from the # the list and they won't be added back, so the list doesn't # necessarily match the collection. - customlists: Mapped[List[CustomList]] = relationship( + customlists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: collections_customlists, backref="collections" ) @@ -146,13 +147,13 @@ class Collection(Base, HasSessionCache): def __repr__(self) -> str: return f'' - def cache_key(self) -> Tuple[str | None, str | None]: + def cache_key(self) -> tuple[str | None, str | None]: return self.name, self.integration_configuration.protocol @classmethod def by_name_and_protocol( cls, _db: Session, name: str, protocol: str - ) -> Tuple[Collection, bool]: + ) -> tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -162,15 +163,15 @@ def by_name_and_protocol( """ key = (name, protocol) - def lookup_hook() -> Tuple[Collection, bool]: + def lookup_hook() -> tuple[Collection, bool]: return cls._by_name_and_protocol(_db, key) return cls.by_cache_key(_db, key, lookup_hook) @classmethod def _by_name_and_protocol( - cls, _db: Session, cache_key: Tuple[str, str] - ) -> Tuple[Collection, bool]: + cls, _db: Session, cache_key: tuple[str, str] + ) -> tuple[Collection, bool]: """Find or create a Collection with the given name and the given protocol. @@ -325,7 +326,7 @@ def default_loan_period_setting( self, library: Library, medium: str = EditionConstants.BOOK_MEDIUM, - ) -> Optional[int]: + ) -> int | None: """Until we hear otherwise from the license provider, we assume that someone who borrows a non-open-access item from this collection has it for this number of days. @@ -460,7 +461,7 @@ def pools_with_no_delivery_mechanisms(self) -> Query[LicensePool]: qu = LicensePool.with_no_delivery_mechanisms(_db) return qu.filter(LicensePool.collection == self) # type: ignore[no-any-return] - def explain(self, include_secrets: bool = False) -> List[str]: + def explain(self, include_secrets: bool = False) -> list[str]: """Create a series of human-readable strings to explain a collection's settings. @@ -491,7 +492,7 @@ def explain(self, include_secrets: bool = False) -> List[str]: def restrict_to_ready_deliverable_works( cls, query: Query[T], - collection_ids: List[int] | None = None, + collection_ids: list[int] | None = None, show_suppressed: bool = False, allow_holds: bool = True, ) -> Query[T]: diff --git a/core/model/configuration.py b/core/model/configuration.py index 73cc741a66..079b5222b2 100644 --- a/core/model/configuration.py +++ b/core/model/configuration.py @@ -4,7 +4,7 @@ import json import logging from enum import Enum -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from sqlalchemy import Column, ForeignKey, Index, Integer, Unicode from sqlalchemy.orm import Mapped, relationship @@ -162,14 +162,14 @@ class ExternalIntegration(Base): # Any additional configuration information goes into # ConfigurationSettings. - settings: Mapped[List[ConfigurationSetting]] = relationship( + settings: Mapped[list[ConfigurationSetting]] = relationship( "ConfigurationSetting", back_populates="external_integration", cascade="all, delete", uselist=True, ) - libraries: Mapped[List[Library]] = relationship( + libraries: Mapped[list[Library]] = relationship( "Library", back_populates="integrations", secondary=lambda: externalintegrations_libraries, diff --git a/core/model/contributor.py b/core/model/contributor.py index 00e2ebf93a..1f73c519f3 100644 --- a/core/model/contributor.py +++ b/core/model/contributor.py @@ -3,7 +3,7 @@ import logging import re -from typing import TYPE_CHECKING, Dict, List, Set +from typing import TYPE_CHECKING from sqlalchemy import Column, ForeignKey, Integer, Unicode, UniqueConstraint from sqlalchemy.dialects.postgresql import ARRAY, JSON @@ -52,9 +52,9 @@ class Contributor(Base): # provided by a publisher. biography = Column(Unicode) - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) - contributions: Mapped[List[Contribution]] = relationship( + contributions: Mapped[list[Contribution]] = relationship( "Contribution", back_populates="contributor", uselist=True ) @@ -93,7 +93,7 @@ class Contributor(Base): COPYRIGHT_HOLDER_ROLE = "Copyright holder" TRANSCRIBER_ROLE = "Transcriber" DESIGNER_ROLE = "Designer" - AUTHOR_ROLES: Set[str] = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} + AUTHOR_ROLES: set[str] = {PRIMARY_AUTHOR_ROLE, AUTHOR_ROLE} # Map our recognized roles to MARC relators. # https://www.loc.gov/marc/relators/relaterm.html diff --git a/core/model/coverage.py b/core/model/coverage.py index cce96fcca9..e3f422a1e4 100644 --- a/core/model/coverage.py +++ b/core/model/coverage.py @@ -1,7 +1,7 @@ # BaseCoverageRecord, Timestamp, CoverageRecord, WorkCoverageRecord from __future__ import annotations -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from sqlalchemy import ( Column, @@ -790,7 +790,7 @@ class EquivalencyCoverageRecord(Base, BaseCoverageRecord): def bulk_add( cls, _db, - equivalents: List[Equivalency], + equivalents: list[Equivalency], operation: str, status=BaseCoverageRecord.REGISTERED, batch_size=100, diff --git a/core/model/customlist.py b/core/model/customlist.py index bf44e00d5f..5c2e8fa222 100644 --- a/core/model/customlist.py +++ b/core/model/customlist.py @@ -3,7 +3,7 @@ import logging from functools import total_ordering -from typing import TYPE_CHECKING, List +from typing import TYPE_CHECKING from sqlalchemy import ( Boolean, @@ -62,12 +62,12 @@ class CustomList(Base): # cached when the list contents change. size = Column(Integer, nullable=False, default=0) - entries: Mapped[List[CustomListEntry]] = relationship( + entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="customlist", uselist=True ) # List sharing mechanisms - shared_locally_with_libraries: Mapped[List[Library]] = relationship( + shared_locally_with_libraries: Mapped[list[Library]] = relationship( "Library", secondary=lambda: customlist_sharedlibrary, back_populates="shared_custom_lists", @@ -81,7 +81,7 @@ class CustomList(Base): auto_update_status: Mapped[str] = Column(auto_update_status_enum, default=INIT) # type: ignore[assignment] # Typing specific - collections: List[Collection] + collections: list[Collection] library: Library __table_args__ = ( diff --git a/core/model/datasource.py b/core/model/datasource.py index 665fdddd45..085c18c466 100644 --- a/core/model/datasource.py +++ b/core/model/datasource.py @@ -2,7 +2,7 @@ from __future__ import annotations from collections import defaultdict -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING from urllib.parse import quote, unquote from sqlalchemy import Boolean, Column, Integer, String @@ -42,68 +42,68 @@ class DataSource(Base, HasSessionCache, DataSourceConstants): name = Column(String, unique=True, index=True) offers_licenses = Column(Boolean, default=False) primary_identifier_type = Column(String, index=True) - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) # One DataSource can generate many Editions. - editions: Mapped[List[Edition]] = relationship( + editions: Mapped[list[Edition]] = relationship( "Edition", back_populates="data_source", uselist=True ) # One DataSource can generate many CoverageRecords. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", back_populates="data_source" ) # One DataSource can generate many IDEquivalencies. - id_equivalencies: Mapped[List[Equivalency]] = relationship( + id_equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", backref="data_source" ) # One DataSource can grant access to many LicensePools. - license_pools: Mapped[List[LicensePool]] = relationship( + license_pools: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="data_source", overlaps="delivery_mechanisms" ) # One DataSource can provide many Hyperlinks. - links: Mapped[List[Hyperlink]] = relationship("Hyperlink", backref="data_source") + links: Mapped[list[Hyperlink]] = relationship("Hyperlink", backref="data_source") # One DataSource can provide many Resources. - resources: Mapped[List[Resource]] = relationship("Resource", backref="data_source") + resources: Mapped[list[Resource]] = relationship("Resource", backref="data_source") # One DataSource can generate many Measurements. - measurements: Mapped[List[Measurement]] = relationship( + measurements: Mapped[list[Measurement]] = relationship( "Measurement", back_populates="data_source" ) # One DataSource can provide many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", backref="data_source" ) # One DataSource can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="data_source" ) # One DataSource can generate many CustomLists. - custom_lists: Mapped[List[CustomList]] = relationship( + custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", back_populates="data_source" ) # One DataSource can provide many LicensePoolDeliveryMechanisms. - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", backref="data_source", foreign_keys=lambda: [LicensePoolDeliveryMechanism.data_source_id], ) - license_lanes: Mapped[List[Lane]] = relationship( + license_lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="license_datasource", foreign_keys="Lane.license_datasource_id", ) - list_lanes: Mapped[List[Lane]] = relationship( + list_lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="_list_datasource", foreign_keys="Lane._list_datasource_id", diff --git a/core/model/devicetokens.py b/core/model/devicetokens.py index b324b65619..faab4b93e4 100644 --- a/core/model/devicetokens.py +++ b/core/model/devicetokens.py @@ -1,5 +1,4 @@ import sys -from typing import Union from sqlalchemy import Column, Enum, ForeignKey, Index, Integer, Unicode from sqlalchemy.exc import IntegrityError @@ -55,7 +54,7 @@ def create( db, token_type: str, device_token: str, - patron: Union[Patron, int], + patron: Patron | int, ) -> Self: """Create a DeviceToken while ensuring sql issues are managed. Raises InvalidTokenTypeError, DuplicateDeviceTokenError""" diff --git a/core/model/edition.py b/core/model/edition.py index af435a7692..a83f8f7ea1 100644 --- a/core/model/edition.py +++ b/core/model/edition.py @@ -3,7 +3,7 @@ import logging from collections import defaultdict -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING from sqlalchemy import ( Column, @@ -93,12 +93,12 @@ class Edition(Base, EditionConstants): ) # An Edition may show up in many CustomListEntries. - custom_list_entries: Mapped[List[CustomListEntry]] = relationship( + custom_list_entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="edition" ) # An Edition may be the presentation edition for many LicensePools. - is_presentation_for: Mapped[List[LicensePool]] = relationship( + is_presentation_for: Mapped[list[LicensePool]] = relationship( "LicensePool", backref="presentation_edition" ) @@ -117,7 +117,7 @@ class Edition(Base, EditionConstants): author = Column(Unicode, index=True) sort_author = Column(Unicode, index=True) - contributions: Mapped[List[Contribution]] = relationship( + contributions: Mapped[list[Contribution]] = relationship( "Contribution", back_populates="edition", uselist=True ) @@ -151,7 +151,7 @@ class Edition(Base, EditionConstants): cover_thumbnail_url = Column(Unicode) # Information kept in here probably won't be used. - extra: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + extra: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) def __repr__(self): id_repr = repr(self.primary_identifier) diff --git a/core/model/formats.py b/core/model/formats.py index ee12b8d803..e49d47518c 100644 --- a/core/model/formats.py +++ b/core/model/formats.py @@ -1,5 +1,5 @@ import sys -from typing import List, Mapping, Optional +from collections.abc import Mapping from flask_babel import lazy_gettext as _ @@ -26,14 +26,14 @@ class FormatPriorities: _prioritized_drm_schemes: Mapping[str, int] _prioritized_content_types: Mapping[str, int] - _hidden_content_types: List[str] + _hidden_content_types: list[str] _deprioritize_lcp_non_epubs: bool def __init__( self, - prioritized_drm_schemes: List[str], - prioritized_content_types: List[str], - hidden_content_types: List[str], + prioritized_drm_schemes: list[str], + prioritized_content_types: list[str], + hidden_content_types: list[str], deprioritize_lcp_non_epubs: bool, ): """ @@ -58,7 +58,7 @@ def __init__( def prioritize_for_pool( self, pool: LicensePool - ) -> List[LicensePoolDeliveryMechanism]: + ) -> list[LicensePoolDeliveryMechanism]: """ Filter and prioritize the delivery mechanisms in the given pool. :param pool: The license pool @@ -67,8 +67,8 @@ def prioritize_for_pool( return self.prioritize_mechanisms(pool.delivery_mechanisms) def prioritize_mechanisms( - self, mechanisms: List[LicensePoolDeliveryMechanism] - ) -> List[LicensePoolDeliveryMechanism]: + self, mechanisms: list[LicensePoolDeliveryMechanism] + ) -> list[LicensePoolDeliveryMechanism]: """ Filter and prioritize the delivery mechanisms in the given pool. :param mechanisms: The list of delivery mechanisms @@ -76,7 +76,7 @@ def prioritize_mechanisms( """ # First, filter out all hidden content types. - mechanisms_filtered: List[LicensePoolDeliveryMechanism] = [] + mechanisms_filtered: list[LicensePoolDeliveryMechanism] = [] for delivery in mechanisms: delivery_mechanism = delivery.delivery_mechanism if delivery_mechanism: @@ -115,7 +115,7 @@ def prioritize_mechanisms( @staticmethod def _artificial_lcp_content_priority( - drm_scheme: Optional[str], content_type: Optional[str] + drm_scheme: str | None, content_type: str | None ) -> int: """A comparison function that arbitrarily deflates the priority of LCP content. The comparison function treats all other DRM mechanisms and content types as equal.""" @@ -127,7 +127,7 @@ def _artificial_lcp_content_priority( else: return 0 - def _drm_scheme_priority(self, drm_scheme: Optional[str]) -> int: + def _drm_scheme_priority(self, drm_scheme: str | None) -> int: """Determine the priority of a DRM scheme. A lack of DRM is always prioritized over having DRM, and prioritized schemes are always higher priority than non-prioritized schemes.""" @@ -143,7 +143,7 @@ def _content_type_priority(self, content_type: str) -> int: class FormatPrioritiesSettings(BaseSettings): - prioritized_drm_schemes: Optional[list] = FormField( + prioritized_drm_schemes: list | None = FormField( default=[], form=ConfigurationFormItem( label=_("Prioritized DRM schemes"), @@ -165,7 +165,7 @@ class FormatPrioritiesSettings(BaseSettings): ), ) - prioritized_content_types: Optional[list] = FormField( + prioritized_content_types: list | None = FormField( default=[], form=ConfigurationFormItem( label=_("Prioritized content types"), @@ -187,7 +187,7 @@ class FormatPrioritiesSettings(BaseSettings): ), ) - deprioritize_lcp_non_epubs: Optional[str] = FormField( + deprioritize_lcp_non_epubs: str | None = FormField( default="false", form=ConfigurationFormItem( label=_("De-prioritize LCP non-EPUBs"), diff --git a/core/model/hassessioncache.py b/core/model/hassessioncache.py index f17fa48bfa..cd91f77500 100644 --- a/core/model/hassessioncache.py +++ b/core/model/hassessioncache.py @@ -5,8 +5,9 @@ import sys from abc import abstractmethod from collections import namedtuple +from collections.abc import Callable, Hashable from types import SimpleNamespace -from typing import Callable, Hashable, TypeVar +from typing import TypeVar from sqlalchemy.exc import SQLAlchemyError from sqlalchemy.orm import Mapped, Session diff --git a/core/model/identifier.py b/core/model/identifier.py index 35245e3733..7a687255cb 100644 --- a/core/model/identifier.py +++ b/core/model/identifier.py @@ -7,7 +7,7 @@ from abc import ABCMeta, abstractmethod from collections import defaultdict from functools import total_ordering -from typing import TYPE_CHECKING, List, overload +from typing import TYPE_CHECKING, overload from urllib.parse import quote, unquote import isbnlib @@ -242,7 +242,7 @@ class Identifier(Base, IdentifierConstants): type = Column(String(64), index=True) identifier = Column(String, index=True) - equivalencies: Mapped[List[Equivalency]] = relationship( + equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", foreign_keys="Equivalency.input_id", back_populates="input", @@ -250,7 +250,7 @@ class Identifier(Base, IdentifierConstants): uselist=True, ) - inbound_equivalencies: Mapped[List[Equivalency]] = relationship( + inbound_equivalencies: Mapped[list[Equivalency]] = relationship( "Equivalency", foreign_keys="Equivalency.output_id", back_populates="output", @@ -259,7 +259,7 @@ class Identifier(Base, IdentifierConstants): ) # One Identifier may have many associated CoverageRecords. - coverage_records: Mapped[List[CoverageRecord]] = relationship( + coverage_records: Mapped[list[CoverageRecord]] = relationship( "CoverageRecord", back_populates="identifier" ) @@ -273,13 +273,13 @@ def __repr__(self): # One Identifier may serve as the primary identifier for # several Editions. - primarily_identifies: Mapped[List[Edition]] = relationship( + primarily_identifies: Mapped[list[Edition]] = relationship( "Edition", backref="primary_identifier" ) # One Identifier may serve as the identifier for many # LicensePools, through different Collections. - licensed_through: Mapped[List[LicensePool]] = relationship( + licensed_through: Mapped[list[LicensePool]] = relationship( "LicensePool", back_populates="identifier", lazy="joined", @@ -287,27 +287,27 @@ def __repr__(self): ) # One Identifier may have many Links. - links: Mapped[List[Hyperlink]] = relationship( + links: Mapped[list[Hyperlink]] = relationship( "Hyperlink", backref="identifier", uselist=True ) # One Identifier may be the subject of many Measurements. - measurements: Mapped[List[Measurement]] = relationship( + measurements: Mapped[list[Measurement]] = relationship( "Measurement", backref="identifier" ) # One Identifier may participate in many Classifications. - classifications: Mapped[List[Classification]] = relationship( + classifications: Mapped[list[Classification]] = relationship( "Classification", backref="identifier" ) # One identifier may participate in many Annotations. - annotations: Mapped[List[Annotation]] = relationship( + annotations: Mapped[list[Annotation]] = relationship( "Annotation", backref="identifier" ) # One Identifier can have many LicensePoolDeliveryMechanisms. - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", backref="identifier", foreign_keys=lambda: [LicensePoolDeliveryMechanism.identifier_id], diff --git a/core/model/integration.py b/core/model/integration.py index 893d07d55a..755b58fe43 100644 --- a/core/model/integration.py +++ b/core/model/integration.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Any, Dict, List +from typing import TYPE_CHECKING, Any from sqlalchemy import Column from sqlalchemy import Enum as SQLAlchemyEnum @@ -47,14 +47,14 @@ class IntegrationConfiguration(Base): name = Column(Unicode, nullable=False, unique=True) # The configuration settings for this integration. Stored as json. - settings_dict: Mapped[Dict[str, Any]] = Column( + settings_dict: Mapped[dict[str, Any]] = Column( "settings", JSONB, nullable=False, default=dict ) # Integration specific context data. Stored as json. This is used to # store configuration data that is not user supplied for a particular # integration. - context: Mapped[Dict[str, Any]] = Column(JSONB, nullable=False, default=dict) + context: Mapped[dict[str, Any]] = Column(JSONB, nullable=False, default=dict) __table_args__ = ( Index( @@ -64,7 +64,7 @@ class IntegrationConfiguration(Base): ), ) - def context_update(self, new_context: Dict[str, Any]) -> None: + def context_update(self, new_context: dict[str, Any]) -> None: """Update the context for this integration""" self.context.update(new_context) flag_modified(self, "context") @@ -73,7 +73,7 @@ def context_update(self, new_context: Dict[str, Any]) -> None: self_test_results = Column(JSONB, nullable=False, default=dict) library_configurations: Mapped[ - List[IntegrationLibraryConfiguration] + list[IntegrationLibraryConfiguration] ] = relationship( "IntegrationLibraryConfiguration", back_populates="parent", @@ -87,7 +87,7 @@ def context_update(self, new_context: Dict[str, Any]) -> None: ) # https://docs.sqlalchemy.org/en/14/orm/extensions/associationproxy.html#simplifying-association-objects - libraries: Mapped[List[Library]] = association_proxy( + libraries: Mapped[list[Library]] = association_proxy( "library_configurations", "library", creator=lambda library: IntegrationLibraryConfiguration(library=library), @@ -161,7 +161,7 @@ class IntegrationLibraryConfiguration(Base): library: Mapped[Library] = relationship("Library") # The configuration settings for this integration. Stored as json. - settings_dict: Mapped[Dict[str, Any]] = Column( + settings_dict: Mapped[dict[str, Any]] = Column( "settings", JSONB, nullable=False, default=dict ) diff --git a/core/model/library.py b/core/model/library.py index 28cdc2e087..32c268da57 100644 --- a/core/model/library.py +++ b/core/model/library.py @@ -3,18 +3,8 @@ import logging from collections import Counter -from typing import ( - TYPE_CHECKING, - Any, - Dict, - Generator, - List, - Optional, - Sequence, - Tuple, - Type, - Union, -) +from collections.abc import Generator, Sequence +from typing import TYPE_CHECKING, Any from Crypto.PublicKey import RSA from expiringdict import ExpiringDict @@ -101,23 +91,23 @@ class Library(Base, HasSessionCache): library_registry_shared_secret = Column(Unicode, unique=True) # A library may have many Patrons. - patrons: Mapped[List[Patron]] = relationship( + patrons: Mapped[list[Patron]] = relationship( "Patron", back_populates="library", cascade="all, delete-orphan" ) # An Library may have many admin roles. - adminroles: Mapped[List[AdminRole]] = relationship( + adminroles: Mapped[list[AdminRole]] = relationship( "AdminRole", back_populates="library", cascade="all, delete-orphan" ) # A Library may have many CustomLists. - custom_lists: Mapped[List[CustomList]] = relationship( + custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", backref="library", uselist=True ) # Lists shared with this library # shared_custom_lists: "CustomList" - shared_custom_lists: Mapped[List[CustomList]] = relationship( + shared_custom_lists: Mapped[list[CustomList]] = relationship( "CustomList", secondary=lambda: customlist_sharedlibrary, back_populates="shared_locally_with_libraries", @@ -125,7 +115,7 @@ class Library(Base, HasSessionCache): ) # A Library may have many ExternalIntegrations. - integrations: Mapped[List[ExternalIntegration]] = relationship( + integrations: Mapped[list[ExternalIntegration]] = relationship( "ExternalIntegration", secondary=lambda: externalintegrations_libraries, back_populates="libraries", @@ -134,21 +124,21 @@ class Library(Base, HasSessionCache): # This parameter is deprecated, and will be removed once all of our integrations # are updated to use IntegrationSettings. New code shouldn't use it. # TODO: Remove this column. - external_integration_settings: Mapped[List[ConfigurationSetting]] = relationship( + external_integration_settings: Mapped[list[ConfigurationSetting]] = relationship( "ConfigurationSetting", back_populates="library", cascade="all, delete", ) # Any additional configuration information is stored as JSON on this column. - settings_dict: Dict[str, Any] = Column(JSONB, nullable=False, default=dict) + settings_dict: dict[str, Any] = Column(JSONB, nullable=False, default=dict) # A Library may have many CirculationEvents - circulation_events: Mapped[List[CirculationEvent]] = relationship( + circulation_events: Mapped[list[CirculationEvent]] = relationship( "CirculationEvent", backref="library", cascade="all, delete-orphan" ) - library_announcements: Mapped[List[Announcement]] = relationship( + library_announcements: Mapped[list[Announcement]] = relationship( "Announcement", back_populates="library", cascade="all, delete-orphan", @@ -157,12 +147,12 @@ class Library(Base, HasSessionCache): # A class-wide cache mapping library ID to the calculated value # used for Library.has_root_lane. This is invalidated whenever # Lane configuration changes, and it will also expire on its own. - _has_root_lane_cache: Dict[Union[int, None], bool] = ExpiringDict( + _has_root_lane_cache: dict[int | None, bool] = ExpiringDict( max_len=1000, max_age_seconds=3600 ) # A Library can have many lanes - lanes: Mapped[List[Lane]] = relationship( + lanes: Mapped[list[Lane]] = relationship( "Lane", back_populates="library", foreign_keys="Lane.library_id", @@ -205,7 +195,7 @@ def collections(self) -> Sequence[Collection]: ).all() # Cache of the libraries loaded settings object - _settings: Optional[LibrarySettings] + _settings: LibrarySettings | None def __repr__(self) -> str: return ( @@ -213,14 +203,14 @@ def __repr__(self) -> str: % (self.name, self.short_name, self.uuid, self.library_registry_short_name) ) - def cache_key(self) -> Optional[str]: + def cache_key(self) -> str | None: return self.short_name @classmethod - def lookup(cls, _db: Session, short_name: Optional[str]) -> Optional[Library]: + def lookup(cls, _db: Session, short_name: str | None) -> Library | None: """Look up a library by short name.""" - def _lookup() -> Tuple[Optional[Library], bool]: + def _lookup() -> tuple[Library | None, bool]: library = get_one(_db, Library, short_name=short_name) return library, False @@ -228,13 +218,13 @@ def _lookup() -> Tuple[Optional[Library], bool]: return library @classmethod - def default(cls, _db: Session) -> Optional[Library]: + def default(cls, _db: Session) -> Library | None: """Find the default Library.""" # If for some reason there are multiple default libraries in # the database, they're not actually interchangeable, but # raising an error here might make it impossible to fix the # problem. - defaults: List[Library] = ( + defaults: list[Library] = ( _db.query(Library) .filter(Library._is_default == True) .order_by(Library.id.asc()) @@ -269,7 +259,7 @@ def default(cls, _db: Session) -> Optional[Library]: return default_library # type: ignore[no-any-return] @classmethod - def generate_keypair(cls) -> Tuple[str, bytes]: + def generate_keypair(cls) -> tuple[str, bytes]: """Generate a public / private keypair for a library.""" private_key = RSA.generate(2048) public_key = private_key.public_key() @@ -278,12 +268,12 @@ def generate_keypair(cls) -> Tuple[str, bytes]: return public_key_str, private_key_bytes @hybrid_property - def library_registry_short_name(self) -> Optional[str]: + def library_registry_short_name(self) -> str | None: """Gets library_registry_short_name from database""" return self._library_registry_short_name @library_registry_short_name.setter - def library_registry_short_name(self, value: Optional[str]) -> None: + def library_registry_short_name(self, value: str | None) -> None: """Uppercase the library registry short name on the way in.""" if value: value = value.upper() @@ -320,7 +310,7 @@ def all_collections(self) -> Generator[Collection, None, None]: yield from collection.parents @property - def entrypoints(self) -> Generator[Optional[Type[EntryPoint]], None, None]: + def entrypoints(self) -> Generator[type[EntryPoint] | None, None, None]: """The EntryPoints enabled for this library.""" values = self.settings.enabled_entry_points for v in values: @@ -328,7 +318,7 @@ def entrypoints(self) -> Generator[Optional[Type[EntryPoint]], None, None]: if cls: yield cls - def enabled_facets(self, group_name: str) -> List[str]: + def enabled_facets(self, group_name: str) -> list[str]: """Look up the enabled facets for a given facet group.""" if group_name == FacetConstants.DISTRIBUTOR_FACETS_GROUP_NAME: enabled = [] @@ -380,7 +370,7 @@ def has_root_lanes(self) -> bool: def restrict_to_ready_deliverable_works( self, query: Query[Work], - collection_ids: Optional[List[int]] = None, + collection_ids: list[int] | None = None, show_suppressed: bool = False, ) -> Query[Work]: """Restrict a query to show only presentation-ready works present in @@ -442,7 +432,7 @@ def default_facet(self, group_name: str) -> str: """Look up the default facet for a given facet group.""" return getattr(self.settings, "facets_default_" + group_name) # type: ignore[no-any-return] - def explain(self, include_secrets: bool = False) -> List[str]: + def explain(self, include_secrets: bool = False) -> list[str]: """Create a series of human-readable strings to explain a library's settings. @@ -488,7 +478,7 @@ def explain(self, include_secrets: bool = False) -> List[str]: return lines @property - def is_default(self) -> Optional[bool]: + def is_default(self) -> bool | None: return self._is_default @is_default.setter diff --git a/core/model/licensing.py b/core/model/licensing.py index 6c8b93c456..9fbde77a00 100644 --- a/core/model/licensing.py +++ b/core/model/licensing.py @@ -5,7 +5,7 @@ import datetime import logging from enum import Enum as PythonEnum -from typing import TYPE_CHECKING, List, Literal, Optional, Tuple, overload +from typing import TYPE_CHECKING, Literal, overload from sqlalchemy import Boolean, Column, DateTime from sqlalchemy import Enum as AlchemyEnum @@ -140,7 +140,7 @@ class License(Base, LicenseFunctions): ) # One License can have many Loans. - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", back_populates="license", cascade="all, delete-orphan" ) @@ -155,7 +155,7 @@ def is_available_for_borrowing(self) -> bool: and self.checkouts_available > 0 ) - def loan_to(self, patron: Patron, **kwargs) -> Tuple[Loan, bool]: + def loan_to(self, patron: Patron, **kwargs) -> tuple[Loan, bool]: loan, is_new = self.license_pool.loan_to(patron, **kwargs) loan.license = self return loan, is_new @@ -224,7 +224,7 @@ class LicensePool(Base): # If the source provides information about individual licenses, the # LicensePool may have many Licenses. - licenses: Mapped[List[License]] = relationship( + licenses: Mapped[list[License]] = relationship( "License", back_populates="license_pool", cascade="all, delete-orphan", @@ -232,17 +232,17 @@ class LicensePool(Base): ) # One LicensePool can have many Loans. - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", back_populates="license_pool", cascade="all, delete-orphan" ) # One LicensePool can have many Holds. - holds: Mapped[List[Hold]] = relationship( + holds: Mapped[list[Hold]] = relationship( "Hold", back_populates="license_pool", cascade="all, delete-orphan" ) # One LicensePool can have many CirculationEvents - circulation_events: Mapped[List[CirculationEvent]] = relationship( + circulation_events: Mapped[list[CirculationEvent]] = relationship( "CirculationEvent", backref="license_pool", cascade="all, delete-orphan" ) @@ -282,7 +282,7 @@ class LicensePool(Base): UniqueConstraint("identifier_id", "data_source_id", "collection_id"), ) - delivery_mechanisms: Mapped[List[LicensePoolDeliveryMechanism]] = relationship( + delivery_mechanisms: Mapped[list[LicensePoolDeliveryMechanism]] = relationship( "LicensePoolDeliveryMechanism", primaryjoin="and_(LicensePool.data_source_id==LicensePoolDeliveryMechanism.data_source_id, LicensePool.identifier_id==LicensePoolDeliveryMechanism.identifier_id)", foreign_keys=(data_source_id, identifier_id), @@ -342,7 +342,7 @@ def for_foreign_id( foreign_id, rights_status=None, collection=None, - ) -> Tuple[LicensePool, bool]: + ) -> tuple[LicensePool, bool]: ... @classmethod @@ -356,7 +356,7 @@ def for_foreign_id( rights_status, collection, autocreate: Literal[False], - ) -> Tuple[LicensePool | None, bool]: + ) -> tuple[LicensePool | None, bool]: ... @classmethod @@ -369,7 +369,7 @@ def for_foreign_id( rights_status=None, collection=None, autocreate=True, - ) -> Tuple[LicensePool | None, bool]: + ) -> tuple[LicensePool | None, bool]: """Find or create a LicensePool for the given foreign ID.""" from core.model.collection import CollectionMissing from core.model.datasource import DataSource @@ -1027,7 +1027,7 @@ def loan_to( end=None, fulfillment=None, external_identifier=None, - ) -> Tuple[Loan, bool]: + ) -> tuple[Loan, bool]: _db = Session.object_session(patron) kwargs = dict(start=start or utc_now(), end=end) loan, is_new = get_one_or_create( @@ -1090,7 +1090,7 @@ def best_available_license(self) -> License | None: The worst option would be pay-per-use, but we don't yet support any distributors that offer that model. """ - best: Optional[License] = None + best: License | None = None for license in (l for l in self.licenses if l.is_available_for_borrowing): if ( @@ -1476,7 +1476,7 @@ class LicensePoolDeliveryMechanism(Base): ) # One LicensePoolDeliveryMechanism may fulfill many Loans. - fulfills: Mapped[List[Loan]] = relationship("Loan", back_populates="fulfillment") + fulfills: Mapped[list[Loan]] = relationship("Loan", back_populates="fulfillment") # One LicensePoolDeliveryMechanism may be associated with one RightsStatus. rightsstatus_id = Column(Integer, ForeignKey("rightsstatus.id"), index=True) @@ -1745,7 +1745,7 @@ class DeliveryMechanism(Base, HasSessionCache): default_client_can_fulfill_lookup.add((_media_type, BEARER_TOKEN)) license_pool_delivery_mechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship( "LicensePoolDeliveryMechanism", back_populates="delivery_mechanism", @@ -1999,11 +1999,11 @@ class RightsStatus(Base): # One RightsStatus may apply to many LicensePoolDeliveryMechanisms. licensepooldeliverymechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship("LicensePoolDeliveryMechanism", backref="rights_status") # One RightsStatus may apply to many Resources. - resources: Mapped[List[Resource]] = relationship( + resources: Mapped[list[Resource]] = relationship( "Resource", backref="rights_status" ) diff --git a/core/model/listeners.py b/core/model/listeners.py index 889320a503..9e12d34f3c 100644 --- a/core/model/listeners.py +++ b/core/model/listeners.py @@ -2,7 +2,6 @@ import datetime from threading import RLock -from typing import Union from sqlalchemy import event, text from sqlalchemy.orm import Session @@ -208,9 +207,7 @@ def recursive_equivalence_on_identifier_create( @Listener.before_flush((Work, LicensePool), ListenerState.new) -def add_work_to_customlists( - session: Session, instance: Union[Work, LicensePool] -) -> None: +def add_work_to_customlists(session: Session, instance: Work | LicensePool) -> None: """Whenever a Work or LicensePool is created we must add it to the custom lists for its collection""" add_work_to_customlists_for_collection(instance) diff --git a/core/model/patron.py b/core/model/patron.py index db815b0011..445657991f 100644 --- a/core/model/patron.py +++ b/core/model/patron.py @@ -4,7 +4,7 @@ import datetime import logging import uuid -from typing import TYPE_CHECKING, List, Optional +from typing import TYPE_CHECKING from psycopg2.extras import NumericRange from sqlalchemy import ( @@ -153,10 +153,10 @@ class Patron(Base): # be an explicit decision of the ILS integration code. cached_neighborhood = Column(Unicode, default=None, index=True) - loans: Mapped[List[Loan]] = relationship( + loans: Mapped[list[Loan]] = relationship( "Loan", backref="patron", cascade="delete", uselist=True ) - holds: Mapped[List[Hold]] = relationship( + holds: Mapped[list[Hold]] = relationship( "Hold", back_populates="patron", cascade="delete", @@ -164,7 +164,7 @@ class Patron(Base): order_by="Hold.id", ) - annotations: Mapped[List[Annotation]] = relationship( + annotations: Mapped[list[Annotation]] = relationship( "Annotation", backref="patron", order_by="desc(Annotation.timestamp)", @@ -172,7 +172,7 @@ class Patron(Base): ) # One Patron can have many associated Credentials. - credentials: Mapped[List[Credential]] = relationship( + credentials: Mapped[list[Credential]] = relationship( "Credential", back_populates="patron", cascade="delete" ) @@ -545,7 +545,7 @@ class Loan(Base, LoanAndHoldMixin): license: Mapped[License] = relationship("License", back_populates="loans") fulfillment_id = Column(Integer, ForeignKey("licensepooldeliveries.id")) - fulfillment: Mapped[Optional[LicensePoolDeliveryMechanism]] = relationship( + fulfillment: Mapped[LicensePoolDeliveryMechanism | None] = relationship( "LicensePoolDeliveryMechanism", back_populates="fulfills" ) start = Column(DateTime(timezone=True), index=True) diff --git a/core/model/resource.py b/core/model/resource.py index 99e6f87f72..0a5b1931e4 100644 --- a/core/model/resource.py +++ b/core/model/resource.py @@ -10,7 +10,6 @@ import traceback from hashlib import md5 from io import BytesIO -from typing import Dict, List, Tuple from urllib.parse import quote, urlparse, urlsplit import requests @@ -66,7 +65,7 @@ class Resource(Base): # Many Editions may choose this resource (as opposed to other # resources linked to them with rel="image") as their cover image. - cover_editions: Mapped[List[Edition]] = relationship( + cover_editions: Mapped[list[Edition]] = relationship( "Edition", backref="cover", foreign_keys=[Edition.cover_id] ) @@ -74,21 +73,21 @@ class Resource(Base): # linked to them with rel="description") as their summary. from core.model.work import Work - summary_works: Mapped[List[Work]] = relationship( + summary_works: Mapped[list[Work]] = relationship( "Work", backref="summary", foreign_keys=[Work.summary_id] ) # Many LicensePools (but probably one at most) may use this # resource in a delivery mechanism. licensepooldeliverymechanisms: Mapped[ - List[LicensePoolDeliveryMechanism] + list[LicensePoolDeliveryMechanism] ] = relationship( "LicensePoolDeliveryMechanism", back_populates="resource", foreign_keys=[LicensePoolDeliveryMechanism.resource_id], ) - links: Mapped[List[Hyperlink]] = relationship("Hyperlink", backref="resource") + links: Mapped[list[Hyperlink]] = relationship("Hyperlink", backref="resource") # The DataSource that is the controlling authority for this Resource. data_source_id = Column(Integer, ForeignKey("datasources.id"), index=True) @@ -103,7 +102,7 @@ class Resource(Base): rights_explanation = Column(Unicode) # A Resource may be transformed into many derivatives. - transformations: Mapped[List[ResourceTransformation]] = relationship( + transformations: Mapped[list[ResourceTransformation]] = relationship( "ResourceTransformation", foreign_keys="ResourceTransformation.original_id", lazy="joined", @@ -376,7 +375,7 @@ class ResourceTransformation(Base): original_id = Column(Integer, ForeignKey("resources.id"), index=True) # The settings used for the transformation. - settings: Mapped[Dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) + settings: Mapped[dict[str, str]] = Column(MutableDict.as_mutable(JSON), default={}) class Hyperlink(Base, LinkRelations): @@ -499,7 +498,7 @@ class Representation(Base, MediaTypes): # Representation. thumbnail_of_id = Column(Integer, ForeignKey("representations.id"), index=True) - thumbnails: Mapped[List[Representation]] = relationship( + thumbnails: Mapped[list[Representation]] = relationship( "Representation", backref=backref("thumbnail_of", remote_side=[id]), lazy="joined", @@ -1010,7 +1009,7 @@ def headers_to_string(cls, d): @classmethod def simple_http_get( cls, url, headers, **kwargs - ) -> Tuple[int, Dict[str, str], bytes]: + ) -> tuple[int, dict[str, str], bytes]: """The most simple HTTP-based GET.""" if not "allow_redirects" in kwargs: kwargs["allow_redirects"] = True diff --git a/core/model/work.py b/core/model/work.py index b2120a353f..d53c0e2cfe 100644 --- a/core/model/work.py +++ b/core/model/work.py @@ -7,7 +7,7 @@ from collections import Counter from datetime import date, datetime from decimal import Decimal -from typing import TYPE_CHECKING, Any, List, Optional, Union, cast +from typing import TYPE_CHECKING, Any, cast import pytz from sqlalchemy import ( @@ -122,7 +122,7 @@ class Work(Base): id = Column(Integer, primary_key=True) # One Work may have copies scattered across many LicensePools. - license_pools: Mapped[List[LicensePool]] = relationship( + license_pools: Mapped[list[LicensePool]] = relationship( "LicensePool", backref="work", lazy="joined", uselist=True ) @@ -131,20 +131,20 @@ class Work(Base): presentation_edition_id = Column(Integer, ForeignKey("editions.id"), index=True) # One Work may have many associated WorkCoverageRecords. - coverage_records: Mapped[List[WorkCoverageRecord]] = relationship( + coverage_records: Mapped[list[WorkCoverageRecord]] = relationship( "WorkCoverageRecord", back_populates="work", cascade="all, delete-orphan" ) # One Work may be associated with many CustomListEntries. # However, a CustomListEntry may lose its Work without # ceasing to exist. - custom_list_entries: Mapped[List[CustomListEntry]] = relationship( + custom_list_entries: Mapped[list[CustomListEntry]] = relationship( "CustomListEntry", backref="work" ) # One Work may participate in many WorkGenre assignments. genres = association_proxy("work_genres", "genre", creator=WorkGenre.from_genre) - work_genres: Mapped[List[WorkGenre]] = relationship( + work_genres: Mapped[list[WorkGenre]] = relationship( "WorkGenre", backref="work", cascade="all, delete-orphan" ) audience = Column(Unicode, index=True) @@ -257,7 +257,7 @@ def sort_author(self): return self.presentation_edition.sort_author or self.presentation_edition.author @property - def language(self) -> Optional[str]: + def language(self) -> str | None: if self.presentation_edition: return self.presentation_edition.language return None @@ -2185,9 +2185,7 @@ def delete(self, search_index=None): _db.delete(self) -def add_work_to_customlists_for_collection( - pool_or_work: Union[LicensePool, Work] -) -> None: +def add_work_to_customlists_for_collection(pool_or_work: LicensePool | Work) -> None: if isinstance(pool_or_work, Work): work = pool_or_work pools = work.license_pools diff --git a/core/opds2_import.py b/core/opds2_import.py index 516b4b59e8..7f26213bce 100644 --- a/core/opds2_import.py +++ b/core/opds2_import.py @@ -1,19 +1,10 @@ from __future__ import annotations import logging +from collections.abc import Callable, Iterable from datetime import datetime from io import BytesIO, StringIO -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Iterable, - List, - Optional, - Tuple, - Type, -) +from typing import TYPE_CHECKING, Any from urllib.parse import urljoin, urlparse import webpub_manifest_parser.opds2.ast as opds2_ast @@ -209,7 +200,7 @@ class OPDS2ImporterSettings(OPDSImporterSettings): ), ) - ignored_identifier_types: List[str] = FormField( + ignored_identifier_types: list[str] = FormField( alias="IGNORED_IDENTIFIER_TYPE", default=[], form=ConfigurationFormItem( @@ -234,11 +225,11 @@ class OPDS2ImporterLibrarySettings(OPDSImporterLibrarySettings): class OPDS2API(BaseOPDSAPI): @classmethod - def settings_class(cls) -> Type[OPDS2ImporterSettings]: + def settings_class(cls) -> type[OPDS2ImporterSettings]: return OPDS2ImporterSettings @classmethod - def library_settings_class(cls) -> Type[OPDS2ImporterLibrarySettings]: + def library_settings_class(cls) -> type[OPDS2ImporterLibrarySettings]: return OPDS2ImporterLibrarySettings @classmethod @@ -336,7 +327,7 @@ class OPDS2Importer(BaseOPDSImporter[OPDS2ImporterSettings]): NEXT_LINK_RELATION: str = "next" @classmethod - def settings_class(cls) -> Type[OPDS2ImporterSettings]: + def settings_class(cls) -> type[OPDS2ImporterSettings]: return OPDS2ImporterSettings def __init__( @@ -345,7 +336,7 @@ def __init__( collection: Collection, parser: RWPMManifestParser, data_source_name: str | None = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """Initialize a new instance of OPDS2Importer class. @@ -463,7 +454,7 @@ def _extract_contributors( return contributor_metadata_list def _extract_link( - self, link: Link, feed_self_url: str, default_link_rel: Optional[str] = None + self, link: Link, feed_self_url: str, default_link_rel: str | None = None ) -> LinkData: """Extract a LinkData object from webpub-manifest-parser's link. @@ -750,7 +741,7 @@ def _extract_publication_metadata( self, feed: opds2_ast.OPDS2Feed, publication: opds2_ast.OPDS2Publication, - data_source_name: Optional[str], + data_source_name: str | None, ) -> Metadata: """Extract a Metadata object from webpub-manifest-parser's publication. @@ -1060,7 +1051,7 @@ def extract_next_links(self, feed: str | opds2_ast.OPDS2Feed) -> list[str]: def extract_last_update_dates( self, feed: str | opds2_ast.OPDS2Feed - ) -> list[tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: """Extract last update date of the feed. :param feed: OPDS 2.0 feed @@ -1150,7 +1141,7 @@ class OPDS2ImportMonitor(OPDSImportMonitor): MEDIA_TYPE = OPDS2MediaTypesRegistry.OPDS_FEED.key, "application/json" def _verify_media_type( - self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + self, url: str, status_code: int, headers: dict[str, str], feed: bytes ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. diff --git a/core/opds_import.py b/core/opds_import.py index e7d482fd2d..03a46929ce 100644 --- a/core/opds_import.py +++ b/core/opds_import.py @@ -5,25 +5,10 @@ import urllib from abc import ABC, abstractmethod from collections import defaultdict +from collections.abc import Callable, Generator, Iterable, Sequence from datetime import datetime from io import BytesIO -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Dict, - Generator, - Generic, - Iterable, - List, - Optional, - Sequence, - Tuple, - Type, - TypeVar, - cast, - overload, -) +from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast, overload from urllib.parse import urljoin, urlparse from xml.etree.ElementTree import Element @@ -131,7 +116,7 @@ class OPDSImporterSettings( form=ConfigurationFormItem(label=_("Data source name"), required=True) ) - default_audience: Optional[str] = FormField( + default_audience: str | None = FormField( None, form=ConfigurationFormItem( label=_("Default audience"), @@ -148,7 +133,7 @@ class OPDSImporterSettings( ), ) - username: Optional[str] = FormField( + username: str | None = FormField( form=ConfigurationFormItem( label=_("Username"), description=_( @@ -158,7 +143,7 @@ class OPDSImporterSettings( ) ) - password: Optional[str] = FormField( + password: str | None = FormField( form=ConfigurationFormItem( label=_("Password"), description=_( @@ -187,7 +172,7 @@ class OPDSImporterSettings( ), ) - primary_identifier_source: Optional[str] = FormField( + primary_identifier_source: str | None = FormField( form=ConfigurationFormItem( label=_("Identifer"), required=False, @@ -231,7 +216,7 @@ def place_hold( patron: Patron, pin: str, licensepool: LicensePool, - notification_email_address: Optional[str], + notification_email_address: str | None, ) -> HoldInfo: # Because all OPDS content is assumed to be simultaneously # available to all patrons, there is no such thing as a hold. @@ -354,7 +339,7 @@ def checkout( def can_fulfill_without_loan( self, - patron: Optional[Patron], + patron: Patron | None, pool: LicensePool, lpdm: LicensePoolDeliveryMechanism, ) -> bool: @@ -373,8 +358,8 @@ def __init__( self, _db: Session, collection: Collection, - data_source_name: Optional[str], - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + data_source_name: str | None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): self._db = _db if collection.id is None: @@ -404,23 +389,23 @@ def __init__( @classmethod @abstractmethod - def settings_class(cls) -> Type[SettingsType]: + def settings_class(cls) -> type[SettingsType]: ... @abstractmethod def extract_feed_data( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: ... @abstractmethod def extract_last_update_dates( self, feed: str | bytes | FeedParserDict - ) -> List[Tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: ... @abstractmethod - def extract_next_links(self, feed: str | bytes) -> List[str]: + def extract_next_links(self, feed: str | bytes) -> list[str]: ... @overload @@ -428,10 +413,10 @@ def parse_identifier(self, identifier: str) -> Identifier: ... @overload - def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + def parse_identifier(self, identifier: str | None) -> Identifier | None: ... - def parse_identifier(self, identifier: Optional[str]) -> Optional[Identifier]: + def parse_identifier(self, identifier: str | None) -> Identifier | None: """Parse the identifier and return an Identifier object representing it. :param identifier: String containing the identifier @@ -551,12 +536,12 @@ def update_work_for_edition( return pool, work def import_from_feed( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[ - List[Edition], - List[LicensePool], - List[Work], - Dict[str, List[CoverageFailure]], + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[ + list[Edition], + list[LicensePool], + list[Work], + dict[str, list[CoverageFailure]], ]: # Keep track of editions that were imported. Pools and works # for those editions may be looked up or created. @@ -639,11 +624,11 @@ def import_from_feed( class OPDSAPI(BaseOPDSAPI): @classmethod - def settings_class(cls) -> Type[OPDSImporterSettings]: + def settings_class(cls) -> type[OPDSImporterSettings]: return OPDSImporterSettings @classmethod - def library_settings_class(cls) -> Type[OPDSImporterLibrarySettings]: + def library_settings_class(cls) -> type[OPDSImporterLibrarySettings]: return OPDSImporterLibrarySettings @classmethod @@ -674,15 +659,15 @@ class OPDSImporter(BaseOPDSImporter[OPDSImporterSettings]): PARSER_CLASS = OPDSXMLParser @classmethod - def settings_class(cls) -> Type[OPDSImporterSettings]: + def settings_class(cls) -> type[OPDSImporterSettings]: return OPDSImporterSettings def __init__( self, _db: Session, collection: Collection, - data_source_name: Optional[str] = None, - http_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, + data_source_name: str | None = None, + http_get: Callable[..., tuple[int, Any, bytes]] | None = None, ): """:param collection: LicensePools created by this OPDS import will be associated with the given Collection. If this is None, @@ -708,7 +693,7 @@ def __init__( # gutenberg.org. self.http_get = http_get or Representation.cautious_http_get - def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: + def extract_next_links(self, feed: str | bytes | FeedParserDict) -> list[str]: if isinstance(feed, (bytes, str)): parsed = feedparser.parse(feed) else: @@ -723,7 +708,7 @@ def extract_next_links(self, feed: str | bytes | FeedParserDict) -> List[str]: def extract_last_update_dates( self, feed: str | bytes | FeedParserDict - ) -> List[Tuple[Optional[str], Optional[datetime]]]: + ) -> list[tuple[str | None, datetime | None]]: if isinstance(feed, (bytes, str)): parsed_feed = feedparser.parse(feed) else: @@ -735,8 +720,8 @@ def extract_last_update_dates( return [x for x in dates if x and x[1]] def extract_feed_data( - self, feed: str | bytes, feed_url: Optional[str] = None - ) -> Tuple[Dict[str, Metadata], Dict[str, List[CoverageFailure]]]: + self, feed: str | bytes, feed_url: str | None = None + ) -> tuple[dict[str, Metadata], dict[str, list[CoverageFailure]]]: """Turn an OPDS feed into lists of Metadata and CirculationData objects, with associated messages and next_links. """ @@ -860,18 +845,18 @@ def extract_feed_data( @overload def handle_failure( self, urn: str, failure: Identifier - ) -> Tuple[Identifier, Identifier]: + ) -> tuple[Identifier, Identifier]: ... @overload def handle_failure( self, urn: str, failure: CoverageFailure - ) -> Tuple[Identifier, CoverageFailure]: + ) -> tuple[Identifier, CoverageFailure]: ... def handle_failure( self, urn: str, failure: Identifier | CoverageFailure - ) -> Tuple[Identifier, CoverageFailure | Identifier]: + ) -> tuple[Identifier, CoverageFailure | Identifier]: """Convert a URN and a failure message that came in through an OPDS feed into an Identifier and a CoverageFailure object. @@ -902,8 +887,8 @@ def _add_format_data(cls, circulation: CirculationData) -> None: @classmethod def combine( - self, d1: Optional[Dict[str, Any]], d2: Optional[Dict[str, Any]] - ) -> Dict[str, Any]: + self, d1: dict[str, Any] | None, d2: dict[str, Any] | None + ) -> dict[str, Any]: """Combine two dictionaries that can be used as keyword arguments to the Metadata constructor. """ @@ -940,7 +925,7 @@ def combine( def extract_data_from_feedparser( self, feed: str | bytes, data_source: DataSource - ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: + ) -> tuple[dict[str, Any], dict[str, CoverageFailure]]: feedparser_parsed = feedparser.parse(feed) values = {} failures = {} @@ -970,9 +955,9 @@ def extract_metadata_from_elementtree( cls, feed: bytes | str, data_source: DataSource, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Tuple[Dict[str, Any], Dict[str, CoverageFailure]]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> tuple[dict[str, Any], dict[str, CoverageFailure]]: """Parse the OPDS as XML and extract all author and subject information, as well as ratings and medium. @@ -1029,23 +1014,23 @@ def extract_metadata_from_elementtree( return values, failures @classmethod - def _datetime(cls, entry: Dict[str, str], key: str) -> Optional[datetime]: + def _datetime(cls, entry: dict[str, str], key: str) -> datetime | None: value = entry.get(key, None) if not value: return None return datetime_utc(*value[:6]) def last_update_date_for_feedparser_entry( - self, entry: Dict[str, Any] - ) -> Tuple[Optional[str], Optional[datetime]]: + self, entry: dict[str, Any] + ) -> tuple[str | None, datetime | None]: identifier = entry.get("id") updated = self._datetime(entry, "updated_parsed") return identifier, updated @classmethod def data_detail_for_feedparser_entry( - cls, entry: Dict[str, str], data_source: DataSource - ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: + cls, entry: dict[str, str], data_source: DataSource + ) -> tuple[str | None, dict[str, Any] | None, CoverageFailure | None]: """Turn an entry dictionary created by feedparser into dictionaries of data that can be used as keyword arguments to the Metadata and CirculationData constructors. @@ -1070,8 +1055,8 @@ def data_detail_for_feedparser_entry( @classmethod def _data_detail_for_feedparser_entry( - cls, entry: Dict[str, Any], metadata_data_source: DataSource - ) -> Dict[str, Any]: + cls, entry: dict[str, Any], metadata_data_source: DataSource + ) -> dict[str, Any]: """Helper method that extracts metadata and circulation data from a feedparser entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. @@ -1131,7 +1116,7 @@ def _data_detail_for_feedparser_entry( links = [] - def summary_to_linkdata(detail: Optional[Dict[str, str]]) -> Optional[LinkData]: + def summary_to_linkdata(detail: dict[str, str] | None) -> LinkData | None: if not detail: return None if not "value" in detail or not detail["value"]: @@ -1184,7 +1169,7 @@ def rights_uri(cls, rights_string: str) -> str: return RightsStatus.rights_uri_from_string(rights_string) @classmethod - def rights_uri_from_feedparser_entry(cls, entry: Dict[str, str]) -> str: + def rights_uri_from_feedparser_entry(cls, entry: dict[str, str]) -> str: """Extract a rights URI from a parsed feedparser entry. :return: A rights URI. @@ -1193,7 +1178,7 @@ def rights_uri_from_feedparser_entry(cls, entry: Dict[str, str]) -> str: return cls.rights_uri(rights) @classmethod - def rights_uri_from_entry_tag(cls, entry: Element) -> Optional[str]: + def rights_uri_from_entry_tag(cls, entry: Element) -> str | None: """Extract a rights string from an lxml tag. :return: A rights URI. @@ -1255,7 +1240,7 @@ def coveragefailures_from_messages( @classmethod def coveragefailure_from_message( cls, data_source: DataSource, message: OPDSMessage - ) -> Optional[CoverageFailure]: + ) -> CoverageFailure | None: """Turn a tag into a CoverageFailure.""" _db = Session.object_session(data_source) @@ -1300,9 +1285,9 @@ def detail_for_elementtree_entry( parser: OPDSXMLParser, entry_tag: Element, data_source: DataSource, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Tuple[Optional[str], Optional[Dict[str, Any]], Optional[CoverageFailure]]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> tuple[str | None, dict[str, Any] | None, CoverageFailure | None]: """Turn an tag into a dictionary of metadata that can be used as keyword arguments to the Metadata contructor. @@ -1334,16 +1319,16 @@ def _detail_for_elementtree_entry( cls, parser: OPDSXMLParser, entry_tag: Element, - feed_url: Optional[str] = None, - do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None, - ) -> Dict[str, Any]: + feed_url: str | None = None, + do_get: Callable[..., tuple[int, Any, bytes]] | None = None, + ) -> dict[str, Any]: """Helper method that extracts metadata and circulation data from an elementtree entry. This method can be overridden in tests to check that callers handle things properly when it throws an exception. """ # We will fill this dictionary with all the information # we can find. - data: Dict[str, Any] = dict() + data: dict[str, Any] = dict() alternate_identifiers = [] for id_tag in parser._xpath(entry_tag, "dcterms:identifier"): @@ -1407,7 +1392,7 @@ def _detail_for_elementtree_entry( return data @classmethod - def get_medium_from_links(cls, links: List[LinkData]) -> Optional[str]: + def get_medium_from_links(cls, links: list[LinkData]) -> str | None: """Get medium if derivable from information in an acquisition link.""" derived = None for link in links: @@ -1423,7 +1408,7 @@ def get_medium_from_links(cls, links: List[LinkData]) -> Optional[str]: return derived @classmethod - def extract_identifier(cls, identifier_tag: Element) -> Optional[IdentifierData]: + def extract_identifier(cls, identifier_tag: Element) -> IdentifierData | None: """Turn a tag into an IdentifierData object.""" try: if identifier_tag.text is None: @@ -1437,8 +1422,8 @@ def extract_identifier(cls, identifier_tag: Element) -> Optional[IdentifierData] @classmethod def extract_medium( - cls, entry_tag: Optional[Element], default: Optional[str] = Edition.BOOK_MEDIUM - ) -> Optional[str]: + cls, entry_tag: Element | None, default: str | None = Edition.BOOK_MEDIUM + ) -> str | None: """Derive a value for Edition.medium from schema:additionalType or from a subtag. @@ -1462,7 +1447,7 @@ def extract_medium( @classmethod def extract_contributor( cls, parser: OPDSXMLParser, author_tag: Element - ) -> Optional[ContributorData]: + ) -> ContributorData | None: """Turn an tag into a ContributorData object.""" subtag = parser.text_of_optional_subtag sort_name = subtag(author_tag, "simplified:sort_name") @@ -1525,9 +1510,9 @@ def extract_subject( def extract_link( cls, link_tag: Element, - feed_url: Optional[str] = None, - entry_rights_uri: Optional[str] = None, - ) -> Optional[LinkData]: + feed_url: str | None = None, + entry_rights_uri: str | None = None, + ) -> LinkData | None: """Convert a tag into a LinkData object. :param feed_url: The URL to the enclosing feed, for use in resolving @@ -1562,10 +1547,10 @@ def extract_link( def make_link_data( cls, rel: str, - href: Optional[str] = None, - media_type: Optional[str] = None, - rights_uri: Optional[str] = None, - content: Optional[str] = None, + href: str | None = None, + media_type: str | None = None, + rights_uri: str | None = None, + content: str | None = None, ) -> LinkData: """Hook method for creating a LinkData object. @@ -1580,7 +1565,7 @@ def make_link_data( ) @classmethod - def consolidate_links(cls, links: Sequence[LinkData | None]) -> List[LinkData]: + def consolidate_links(cls, links: Sequence[LinkData | None]) -> list[LinkData]: """Try to match up links with their thumbnails. If link n is an image and link n+1 is a thumbnail, then the @@ -1643,7 +1628,7 @@ def consolidate_links(cls, links: Sequence[LinkData | None]) -> List[LinkData]: return new_links @classmethod - def extract_measurement(cls, rating_tag: Element) -> Optional[MeasurementData]: + def extract_measurement(cls, rating_tag: Element) -> MeasurementData | None: type = rating_tag.get("{http://schema.org/}additionalType") value = rating_tag.get("{http://schema.org/}ratingValue") if not value: @@ -1664,7 +1649,7 @@ def extract_measurement(cls, rating_tag: Element) -> Optional[MeasurementData]: return None @classmethod - def extract_series(cls, series_tag: Element) -> Tuple[Optional[str], Optional[str]]: + def extract_series(cls, series_tag: Element) -> tuple[str | None, str | None]: attr = series_tag.attrib series_name = attr.get("{http://schema.org/}name", None) series_position = attr.get("{http://schema.org/}position", None) @@ -1690,7 +1675,7 @@ def __init__( self, _db: Session, collection: Collection, - import_class: Type[BaseOPDSImporter[OPDSImporterSettings]], + import_class: type[BaseOPDSImporter[OPDSImporterSettings]], force_reimport: bool = False, **import_class_kwargs: Any, ) -> None: @@ -1727,8 +1712,8 @@ def __init__( super().__init__(_db, collection) def _get( - self, url: str, headers: Dict[str, str] - ) -> Tuple[int, Dict[str, str], bytes]: + self, url: str, headers: dict[str, str] + ) -> tuple[int, dict[str, str], bytes]: """Make the sort of HTTP request that's normal for an OPDS feed. Long timeout, raise error on anything but 2xx or 3xx. @@ -1755,7 +1740,7 @@ def _get_accept_header(self) -> str: ] ) - def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: + def _update_headers(self, headers: dict[str, str] | None) -> dict[str, str]: headers = dict(headers) if headers else {} if self.username and self.password and not "Authorization" in headers: headers["Authorization"] = "Basic %s" % base64.b64encode( @@ -1769,7 +1754,7 @@ def _update_headers(self, headers: Optional[Dict[str, str]]) -> Dict[str, str]: return headers - def data_source(self, collection: Collection) -> Optional[DataSource]: + def data_source(self, collection: Collection) -> DataSource | None: """Returns the data source name for the given collection. By default, this URL is stored as a setting on the collection, but @@ -1808,7 +1793,7 @@ def feed_contains_new_data(self, feed: bytes | str) -> bool: return new_data def identifier_needs_import( - self, identifier: Optional[Identifier], last_updated_remote: Optional[datetime] + self, identifier: Identifier | None, last_updated_remote: datetime | None ) -> bool: """Does the remote side have new information about this Identifier? @@ -1874,7 +1859,7 @@ def identifier_needs_import( return False def _verify_media_type( - self, url: str, status_code: int, headers: Dict[str, str], feed: bytes + self, url: str, status_code: int, headers: dict[str, str], feed: bytes ) -> None: # Make sure we got an OPDS feed, and not an error page that was # sent with a 200 status code. @@ -1888,8 +1873,8 @@ def _verify_media_type( ) def follow_one_link( - self, url: str, do_get: Optional[Callable[..., Tuple[int, Any, bytes]]] = None - ) -> Tuple[List[str], Optional[bytes]]: + self, url: str, do_get: Callable[..., tuple[int, Any, bytes]] | None = None + ) -> tuple[list[str], bytes | None]: """Download a representation of a URL and extract the useful information. @@ -1918,7 +1903,7 @@ def follow_one_link( def import_one_feed( self, feed: bytes | str - ) -> Tuple[List[Edition], Dict[str, List[CoverageFailure]]]: + ) -> tuple[list[Edition], dict[str, list[CoverageFailure]]]: """Import every book mentioned in an OPDS feed.""" # Because we are importing into a Collection, we will immediately @@ -1946,7 +1931,7 @@ def import_one_feed( return imported_editions, failures - def _get_feeds(self) -> Iterable[Tuple[str, bytes]]: + def _get_feeds(self) -> Iterable[tuple[str, bytes]]: feeds = [] queue = [cast(str, self.feed_url)] seen_links = set() diff --git a/core/python_expression_dsl/evaluator.py b/core/python_expression_dsl/evaluator.py index 8f3b18083b..14e02d8c8e 100644 --- a/core/python_expression_dsl/evaluator.py +++ b/core/python_expression_dsl/evaluator.py @@ -1,7 +1,7 @@ import operator import types +from collections.abc import Callable from copy import copy, deepcopy -from typing import Callable, Dict, List, Optional, Union from multipledispatch import dispatch @@ -74,8 +74,8 @@ class DSLEvaluationVisitor(Visitor): def __init__( self, - context: Optional[Union[Dict, object]] = None, - safe_classes: Optional[List[type]] = None, + context: dict | object | None = None, + safe_classes: list[type] | None = None, ): """Initialize a new instance of DSLEvaluationVisitor class. @@ -83,8 +83,8 @@ def __init__( :param safe_classes: Optional list of classes which methods can be called. By default it contains only built-in classes: float, int, str """ - self._context: Optional[Union[Dict, object]] = {} - self._safe_classes: Optional[List[type]] = [] + self._context: dict | object | None = {} + self._safe_classes: list[type] | None = [] self._current_scope = None self._root_dot_node = None @@ -95,7 +95,7 @@ def __init__( self.safe_classes = safe_classes @staticmethod - def _get_attribute_value(obj: Union[Dict, object], attribute: str): + def _get_attribute_value(obj: dict | object, attribute: str): """Return the attribute's value by its name. :param obj: Object or a dictionary containing the attribute @@ -121,7 +121,7 @@ def _get_attribute_value(obj: Union[Dict, object], attribute: str): def _evaluate_unary_expression( self, unary_expression: UnaryExpression, - available_operators: Dict[Operator, Callable], + available_operators: dict[Operator, Callable], ): """Evaluate the unary expression. @@ -147,7 +147,7 @@ def _evaluate_unary_expression( def _evaluate_binary_expression( self, binary_expression: BinaryExpression, - available_operators: Dict[Operator, Callable], + available_operators: dict[Operator, Callable], ): """Evaluate the binary expression. @@ -172,7 +172,7 @@ def _evaluate_binary_expression( return result @property - def context(self) -> Union[Dict, object]: + def context(self) -> dict | object: """Return the evaluation context. :return: Evaluation context @@ -180,7 +180,7 @@ def context(self) -> Union[Dict, object]: return self._context @context.setter - def context(self, value: Union[Dict, object]): + def context(self, value: dict | object): """Set the evaluation context. :param value: New evaluation context @@ -202,7 +202,7 @@ def context(self, value: Union[Dict, object]): self._context = new_context @property - def safe_classes(self) -> Optional[List[type]]: + def safe_classes(self) -> list[type] | None: """Return a list of classes which methods can be called. :return: List of safe classes which methods can be called @@ -210,7 +210,7 @@ def safe_classes(self) -> Optional[List[type]]: return self._safe_classes @safe_classes.setter - def safe_classes(self, value: List[type]): + def safe_classes(self, value: list[type]): """Set safe classes which methods can be called. :param value: List of safe classes which methods be called @@ -397,8 +397,8 @@ def parser(self) -> DSLParser: def evaluate( self, expression: str, - context: Optional[Union[Dict, object]] = None, - safe_classes: Optional[List[type]] = None, + context: dict | object | None = None, + safe_classes: list[type] | None = None, ): """Evaluate the expression and return the resulting value. diff --git a/core/python_expression_dsl/util.py b/core/python_expression_dsl/util.py index 4f3f4ba167..54bd2ebaf5 100644 --- a/core/python_expression_dsl/util.py +++ b/core/python_expression_dsl/util.py @@ -1,4 +1,4 @@ -from typing import Optional, Type, TypeVar +from typing import TypeVar from pyparsing import ParseResults @@ -55,8 +55,8 @@ def _parse_number(tokens: ParseResults) -> Number: def _parse_unary_expression( - expression_type: Type[UE], tokens: ParseResults -) -> Optional[UE]: + expression_type: type[UE], tokens: ParseResults +) -> UE | None: """Transform the token into an unary expression. :param tokens: ParseResults objects @@ -80,7 +80,7 @@ def _parse_unary_expression( def _parse_unary_arithmetic_expression( tokens: ParseResults, -) -> Optional[UnaryArithmeticExpression]: +) -> UnaryArithmeticExpression | None: """Transform the token into an UnaryArithmeticExpression node. :param tokens: ParseResults objects @@ -92,7 +92,7 @@ def _parse_unary_arithmetic_expression( def _parse_unary_boolean_expression( tokens: ParseResults, -) -> Optional[UnaryBooleanExpression]: +) -> UnaryBooleanExpression | None: """Transform the token into an UnaryBooleanExpression node. :param tokens: ParseResults objects @@ -103,8 +103,8 @@ def _parse_unary_boolean_expression( def _parse_binary_expression( - expression_type: Type[BE], tokens: ParseResults -) -> Optional[BE]: + expression_type: type[BE], tokens: ParseResults +) -> BE | None: """Transform the token into a BinaryExpression node. :param tokens: ParseResults objects @@ -129,7 +129,7 @@ def _parse_binary_expression( def _parse_binary_arithmetic_expression( tokens: ParseResults, -) -> Optional[BinaryArithmeticExpression]: +) -> BinaryArithmeticExpression | None: """Transform the token into a BinaryArithmeticExpression node. :param tokens: ParseResults objects @@ -141,7 +141,7 @@ def _parse_binary_arithmetic_expression( def _parse_binary_boolean_expression( tokens: ParseResults, -) -> Optional[BinaryBooleanExpression]: +) -> BinaryBooleanExpression | None: """Transform the token into a BinaryBooleanExpression node. :param tokens: ParseResults objects @@ -153,7 +153,7 @@ def _parse_binary_boolean_expression( def _parse_comparison_expression( tokens: ParseResults, -) -> Optional[ComparisonExpression]: +) -> ComparisonExpression | None: """Transform the token into a ComparisonExpression node. :param tokens: ParseResults objects diff --git a/core/query/coverage.py b/core/query/coverage.py index 0a9db44fa6..07ba502c5c 100644 --- a/core/query/coverage.py +++ b/core/query/coverage.py @@ -1,5 +1,3 @@ -from typing import List - from sqlalchemy.orm.session import Session from core.model.coverage import EquivalencyCoverageRecord @@ -9,8 +7,8 @@ class EquivalencyCoverageQueries: @classmethod def add_coverage_for_identifiers_chain( - cls, identifiers: List[Identifier], _db=None - ) -> List[EquivalencyCoverageRecord]: + cls, identifiers: list[Identifier], _db=None + ) -> list[EquivalencyCoverageRecord]: """Hunt down any recursive identifiers that may be touched by these identifiers set all the possible coverages to reset and recompute the chain """ @@ -29,7 +27,7 @@ def add_coverage_for_identifiers_chain( ) # Need to be reset - equivs: List[Equivalency] = Equivalency.for_identifiers( + equivs: list[Equivalency] = Equivalency.for_identifiers( _db, (p[0] for p in parent_ids) ) records = [] diff --git a/core/query/playtime_entries.py b/core/query/playtime_entries.py index de82701695..6b9f79c3e1 100644 --- a/core/query/playtime_entries.py +++ b/core/query/playtime_entries.py @@ -1,5 +1,4 @@ import logging -from typing import List, Tuple from sqlalchemy.exc import IntegrityError from sqlalchemy.orm import Session @@ -26,7 +25,7 @@ def insert_playtime_entries( collection: Collection, library: Library, data: PlaytimeEntriesPost, - ) -> Tuple[List, PlaytimeEntriesPostSummary]: + ) -> tuple[list, PlaytimeEntriesPostSummary]: """Insert into the database playtime entries from a request""" responses = [] summary = PlaytimeEntriesPostSummary() diff --git a/core/saml/wayfless.py b/core/saml/wayfless.py index 5a6e655dd3..d40305e1fc 100644 --- a/core/saml/wayfless.py +++ b/core/saml/wayfless.py @@ -1,5 +1,3 @@ -from typing import Optional - from flask_babel import lazy_gettext as _ from core.exceptions import BaseError @@ -18,7 +16,7 @@ class SAMLWAYFlessConstants: class SAMLWAYFlessSetttings(BaseSettings): - saml_wayfless_url_template: Optional[str] = FormField( + saml_wayfless_url_template: str | None = FormField( default=None, form=ConfigurationFormItem( label=_("SAML WAYFless URL Template"), diff --git a/core/scripts.py b/core/scripts.py index f79677e942..19e839533a 100644 --- a/core/scripts.py +++ b/core/scripts.py @@ -8,8 +8,8 @@ import traceback import unicodedata import uuid +from collections.abc import Generator from enum import Enum -from typing import Generator, Optional, Type from sqlalchemy import and_, exists, or_, select, tuple_ from sqlalchemy.orm import Query, Session, defer @@ -120,7 +120,7 @@ def parse_time(cls, time_string): continue raise ValueError("Could not parse time: %s" % time_string) - def __init__(self, _db=None, services: Optional[Services] = None, *args, **kwargs): + def __init__(self, _db=None, services: Services | None = None, *args, **kwargs): """Basic constructor. :_db: A database session to be used instead of @@ -1808,7 +1808,7 @@ def paginate_query(self, query) -> Generator: the ordering of the rows follows all the joined tables""" for subject in self._unchecked_subjects(): - last_work: Optional[Work] = None # Last work object of the previous page + last_work: Work | None = None # Last work object of the previous page # IDs of the last work, for paging work_id, license_id, iden_id, classn_id = ( None, @@ -2009,7 +2009,7 @@ class OPDSImportScript(CollectionInputScript): name = "Import all books from the OPDS feed associated with a collection." IMPORTER_CLASS = OPDSImporter - MONITOR_CLASS: Type[OPDSImportMonitor] = OPDSImportMonitor + MONITOR_CLASS: type[OPDSImportMonitor] = OPDSImportMonitor PROTOCOL = ExternalIntegration.OPDS_IMPORT def __init__( @@ -2467,7 +2467,7 @@ class WhereAreMyBooksScript(CollectionInputScript): """ def __init__( - self, _db=None, output=None, search: Optional[ExternalSearchIndex] = None + self, _db=None, output=None, search: ExternalSearchIndex | None = None ): _db = _db or self._db super().__init__(_db) diff --git a/core/search/document.py b/core/search/document.py index 987433ed9c..6cabe57d67 100644 --- a/core/search/document.py +++ b/core/search/document.py @@ -1,5 +1,4 @@ from abc import ABC, abstractmethod -from typing import Dict class SearchMappingFieldType(ABC): @@ -68,14 +67,14 @@ def serialize(self) -> dict: class SearchMappingFieldTypeParameterized(SearchMappingFieldType): """The base class for types that have parameters (date, keyword, etc)""" - _parameters: Dict[str, str] + _parameters: dict[str, str] def __init__(self, name: str): self._name = name self._parameters = {} @property - def parameters(self) -> Dict[str, str]: + def parameters(self) -> dict[str, str]: return self._parameters def serialize(self) -> dict: @@ -111,14 +110,14 @@ def sort_author_keyword() -> SearchMappingFieldTypeParameterized: class SearchMappingFieldTypeObject(SearchMappingFieldType): """See: https://opensearch.org/docs/latest/field-types/supported-field-types/object/""" - _properties: Dict[str, SearchMappingFieldType] + _properties: dict[str, SearchMappingFieldType] def __init__(self, type: str): self._type = type self._properties = {} @property - def properties(self) -> Dict[str, SearchMappingFieldType]: + def properties(self) -> dict[str, SearchMappingFieldType]: return self._properties def add_property(self, name, type: SearchMappingFieldType): @@ -226,24 +225,24 @@ class SearchMappingDocument: """ def __init__(self): - self._settings: Dict[str, dict] = {} - self._fields: Dict[str, SearchMappingFieldType] = {} - self._scripts: Dict[str, str] = {} + self._settings: dict[str, dict] = {} + self._fields: dict[str, SearchMappingFieldType] = {} + self._scripts: dict[str, str] = {} @property - def settings(self) -> Dict[str, dict]: + def settings(self) -> dict[str, dict]: return self._settings @property - def scripts(self) -> Dict[str, str]: + def scripts(self) -> dict[str, str]: return self._scripts @property - def properties(self) -> Dict[str, SearchMappingFieldType]: + def properties(self) -> dict[str, SearchMappingFieldType]: return self._fields @properties.setter - def properties(self, fields: Dict[str, SearchMappingFieldType]): + def properties(self, fields: dict[str, SearchMappingFieldType]): self._fields = dict(fields) def serialize(self) -> dict: diff --git a/core/search/migrator.py b/core/search/migrator.py index e68141e259..2aff641024 100644 --- a/core/search/migrator.py +++ b/core/search/migrator.py @@ -1,6 +1,6 @@ import logging from abc import ABC, abstractmethod -from typing import Iterable, List, Optional +from collections.abc import Iterable from core.search.revision import SearchSchemaRevision from core.search.revision_directory import SearchRevisionDirectory @@ -21,7 +21,7 @@ class SearchDocumentReceiverType(ABC): @abstractmethod def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" @abstractmethod @@ -44,7 +44,7 @@ def pointer(self) -> str: def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" return self._service.index_submit_documents( pointer=self._pointer, documents=documents @@ -77,7 +77,7 @@ def __init__( def add_documents( self, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit documents to be indexed.""" return self._receiver.add_documents(documents) @@ -109,9 +109,7 @@ def __init__(self, revisions: SearchRevisionDirectory, service: SearchService): self._revisions = revisions self._service = service - def migrate( - self, base_name: str, version: int - ) -> Optional[SearchMigrationInProgress]: + def migrate(self, base_name: str, version: int) -> SearchMigrationInProgress | None: """ Migrate to the given version using the given base name (such as 'circulation-works'). The function returns an object that expects to receive batches of search documents used to populate any new index. When all diff --git a/core/search/revision_directory.py b/core/search/revision_directory.py index 6adbdb01bf..55e2462521 100644 --- a/core/search/revision_directory.py +++ b/core/search/revision_directory.py @@ -1,4 +1,4 @@ -from typing import Mapping +from collections.abc import Mapping from core.config import CannotLoadConfiguration from core.search.revision import SearchSchemaRevision diff --git a/core/search/service.py b/core/search/service.py index 3a7122d8ec..bf751fd91b 100644 --- a/core/search/service.py +++ b/core/search/service.py @@ -1,8 +1,8 @@ import logging import re from abc import ABC, abstractmethod +from collections.abc import Iterable from dataclasses import dataclass -from typing import Iterable, List, Optional import opensearchpy.helpers from opensearch_dsl import MultiSearch, Search @@ -81,11 +81,11 @@ def write_pointer_name(self) -> str: """Get the name used for the write pointer.""" @abstractmethod - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: """Get the read pointer, if it exists.""" @abstractmethod - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: """Get the writer pointer, if it exists.""" @abstractmethod @@ -105,7 +105,7 @@ def index_create(self, revision: SearchSchemaRevision) -> None: """Atomically create an index for the given base name and revision.""" @abstractmethod - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: """A log of all the indexes that have been created by this client service.""" @abstractmethod @@ -125,7 +125,7 @@ def index_submit_documents( self, pointer: str, documents: Iterable[dict], - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: """Submit search documents to the given index.""" @abstractmethod @@ -166,7 +166,7 @@ def __init__(self, client: OpenSearch, base_revision_name: str): self._search = Search(using=self._client) self.base_revision_name = base_revision_name self._multi_search = MultiSearch(using=self._client) - self._indexes_created: List[str] = [] + self._indexes_created: list[str] = [] # Documents are not allowed to automatically create indexes. # AWS OpenSearch only accepts the "flat" format @@ -174,10 +174,10 @@ def __init__(self, client: OpenSearch, base_revision_name: str): body={"persistent": {"action.auto_create_index": "false"}} ) - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: return self._indexes_created - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: try: result: dict = self._client.indices.get_alias( name=self.write_pointer_name() @@ -278,7 +278,7 @@ def _ensure_scripts(self, revision: SearchSchemaRevision) -> None: def index_submit_documents( self, pointer: str, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: self._logger.info(f"submitting documents to index {pointer}") # Specifically override the target in all documents to the target pointer @@ -303,7 +303,7 @@ def index_submit_documents( yield_ok=False, ) - error_results: List[SearchServiceFailedDocument] = [] + error_results: list[SearchServiceFailedDocument] = [] if isinstance(errors, list): for error in errors: error_results.append(SearchServiceFailedDocument.from_bulk_error(error)) @@ -335,7 +335,7 @@ def write_pointer_set(self, revision: SearchSchemaRevision) -> None: self._logger.debug(f"setting write pointer {alias_name} to {target_index}") self._client.indices.update_aliases(body=action) - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: try: result: dict = self._client.indices.get_alias(name=self.read_pointer_name()) for name in result.keys(): diff --git a/core/search/v5.py b/core/search/v5.py index d206f5e78e..7c202a2a3b 100644 --- a/core/search/v5.py +++ b/core/search/v5.py @@ -1,5 +1,3 @@ -from typing import Dict - from core.search.document import ( BASIC_TEXT, BOOLEAN, @@ -227,7 +225,7 @@ def __init__(self): char_filter=self.AUTHOR_CHAR_FILTER_NAMES, ) - self._fields: Dict[str, SearchMappingFieldType] = { + self._fields: dict[str, SearchMappingFieldType] = { "summary": BASIC_TEXT, "title": FILTERABLE_TEXT, "subtitle": FILTERABLE_TEXT, diff --git a/core/selftest.py b/core/selftest.py index da7c946112..5136629b35 100644 --- a/core/selftest.py +++ b/core/selftest.py @@ -7,19 +7,9 @@ import sys import traceback from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from datetime import datetime -from typing import ( - Any, - Callable, - Dict, - Generator, - List, - Optional, - Tuple, - Type, - TypeVar, - Union, -) +from typing import Any, ParamSpec, TypeVar from sqlalchemy.orm import Session @@ -30,11 +20,6 @@ from core.util.log import LoggerMixin from core.util.opds_writer import AtomFeed -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - if sys.version_info >= (3, 11): from typing import Self else: @@ -47,7 +32,7 @@ class SelfTestResult: HasSelfTest.run_self_tests() returns a list of these """ - def __init__(self, name: Optional[str]): + def __init__(self, name: str | None): # Name of the test. self.name = name @@ -55,7 +40,7 @@ def __init__(self, name: Optional[str]): self.success = False # The exception raised, if any. - self.exception: Optional[Exception] = None + self.exception: Exception | None = None # The return value of the test method, assuming it ran to # completion. @@ -65,13 +50,13 @@ def __init__(self, name: Optional[str]): self.start: datetime = utc_now() # End time of the test. - self.end: Optional[datetime] = None + self.end: datetime | None = None # Collection associated with the test - self.collection: Optional[Collection] = None + self.collection: Collection | None = None @property - def to_dict(self) -> Dict[str, Any]: + def to_dict(self) -> dict[str, Any]: """Convert this SelfTestResult to a dictionary for use in JSON serialization. """ @@ -85,7 +70,7 @@ def to_dict(self) -> Dict[str, Any]: } else: exception = None - value: Dict[str, Any] = dict( + value: dict[str, Any] = dict( name=self.name, success=self.success, duration=self.duration, @@ -140,7 +125,7 @@ def duration(self) -> float: return (self.end - self.start).total_seconds() @property - def debug_message(self) -> Optional[str]: + def debug_message(self) -> str | None: """The debug message associated with the Exception, if any.""" if not self.exception: return None @@ -158,12 +143,12 @@ class BaseHasSelfTests(ABC): @classmethod def run_self_tests( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Tuple[Dict[str, Any], List[SelfTestResult]]: + ) -> tuple[dict[str, Any], list[SelfTestResult]]: """Instantiate this class and call _run_self_tests on it. :param _db: A database connection. Will be passed into `_run_self_tests`. @@ -262,8 +247,8 @@ def run_test( def test_failure( cls, name: str, - message: Union[Optional[str], Exception], - debug_message: Optional[str] = None, + message: str | None | Exception, + debug_message: str | None = None, ) -> SelfTestResult: """Create a SelfTestResult for a known failure. @@ -292,7 +277,7 @@ def _run_self_tests(self, _db: Session) -> Generator[SelfTestResult, None, None] @abstractmethod def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: ... @@ -307,10 +292,10 @@ class HasSelfTests(BaseHasSelfTests, ABC): SELF_TEST_RESULTS_SETTING = "self_test_results" def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: """Store the results of a self-test in the database.""" - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None from core.external_search import ExternalSearchIndex if isinstance(self, ExternalSearchIndex): @@ -328,19 +313,19 @@ def store_self_test_results( @classmethod def prior_test_results( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Union[Optional[Dict[str, Any]], str]: + ) -> dict[str, Any] | None | str: """Retrieve the last set of test results from the database. The arguments here are the same as the arguments to run_self_tests. """ constructor_method = constructor_method or cls instance = constructor_method(*args, **kwargs) - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None from core.external_search import ExternalSearchIndex @@ -357,7 +342,7 @@ def prior_test_results( return None - def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: + def external_integration(self, _db: Session) -> ExternalIntegration | None: """Locate the ExternalIntegration associated with this object. The status of the self-tests will be stored as a ConfigurationSetting on this ExternalIntegration. @@ -374,7 +359,7 @@ def external_integration(self, _db: Session) -> Optional[ExternalIntegration]: class HasSelfTestsIntegrationConfiguration(BaseHasSelfTests, LoggerMixin, ABC): def store_self_test_results( - self, _db: Session, value: Dict[str, Any], results: List[SelfTestResult] + self, _db: Session, value: dict[str, Any], results: list[SelfTestResult] ) -> None: integration = self.integration(_db) if integration is None: @@ -386,8 +371,8 @@ def store_self_test_results( @classmethod def load_self_test_results( - cls, integration: Optional[IntegrationConfiguration] - ) -> Optional[Dict[str, Any]]: + cls, integration: IntegrationConfiguration | None + ) -> dict[str, Any] | None: if integration is None: cls.logger().error( "No IntegrationConfiguration was found. Self-test results could not be loaded." @@ -404,21 +389,21 @@ def load_self_test_results( @classmethod def prior_test_results( - cls: Type[Self], + cls: type[Self], _db: Session, - constructor_method: Optional[Callable[..., Self]] = None, + constructor_method: Callable[..., Self] | None = None, *args: Any, **kwargs: Any, - ) -> Union[Optional[Dict[str, Any]], str]: + ) -> dict[str, Any] | None | str: """Retrieve the last set of test results from the database. The arguments here are the same as the arguments to run_self_tests. """ constructor_method = constructor_method or cls instance = constructor_method(*args, **kwargs) - integration: Optional[IntegrationConfiguration] = instance.integration(_db) + integration: IntegrationConfiguration | None = instance.integration(_db) return cls.load_self_test_results(integration) or "No results yet" @abstractmethod - def integration(self, _db: Session) -> Optional[IntegrationConfiguration]: + def integration(self, _db: Session) -> IntegrationConfiguration | None: ... diff --git a/core/service/logging/configuration.py b/core/service/logging/configuration.py index 0a758c9720..a39ce37ddd 100644 --- a/core/service/logging/configuration.py +++ b/core/service/logging/configuration.py @@ -1,5 +1,5 @@ from enum import Enum -from typing import Any, Dict, Optional +from typing import Any import boto3 from pydantic import PositiveInt, validator @@ -20,18 +20,18 @@ class LoggingConfiguration(ServiceConfiguration): verbose_level: LogLevel = LogLevel.warning cloudwatch_enabled: bool = False - cloudwatch_region: Optional[str] = None + cloudwatch_region: str | None = None cloudwatch_group: str = "palace" cloudwatch_stream: str = DEFAULT_LOG_STREAM_NAME cloudwatch_interval: PositiveInt = 60 cloudwatch_create_group: bool = True - cloudwatch_access_key: Optional[str] = None - cloudwatch_secret_key: Optional[str] = None + cloudwatch_access_key: str | None = None + cloudwatch_secret_key: str | None = None @validator("cloudwatch_region") def validate_cloudwatch_region( - cls, v: Optional[str], values: Dict[str, Any] - ) -> Optional[str]: + cls, v: str | None, values: dict[str, Any] + ) -> str | None: if not values.get("cloudwatch_enabled"): # If cloudwatch is not enabled, no validation is needed. return None diff --git a/core/service/logging/container.py b/core/service/logging/container.py index 91f4fa281e..54b8c247e6 100644 --- a/core/service/logging/container.py +++ b/core/service/logging/container.py @@ -1,7 +1,7 @@ from __future__ import annotations from logging import Handler -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import boto3 from dependency_injector import providers @@ -32,7 +32,7 @@ class Logging(DeclarativeContainer): json_formatter: Provider[JSONFormatter] = Singleton(JSONFormatter) - cloudwatch_handler: Provider[Optional[Handler]] = providers.Singleton( + cloudwatch_handler: Provider[Handler | None] = providers.Singleton( create_cloudwatch_handler, formatter=json_formatter, level=config.level, diff --git a/core/service/logging/log.py b/core/service/logging/log.py index 23dc873e02..8cedf44628 100644 --- a/core/service/logging/log.py +++ b/core/service/logging/log.py @@ -3,8 +3,9 @@ import json import logging import socket +from collections.abc import Callable from logging import Handler -from typing import TYPE_CHECKING, Any, Callable +from typing import TYPE_CHECKING, Any from watchtower import CloudWatchLogHandler diff --git a/core/service/storage/configuration.py b/core/service/storage/configuration.py index 6e9b51f052..677a4badfa 100644 --- a/core/service/storage/configuration.py +++ b/core/service/storage/configuration.py @@ -1,5 +1,3 @@ -from typing import Optional - import boto3 from pydantic import AnyHttpUrl, parse_obj_as, validator @@ -7,21 +5,21 @@ class StorageConfiguration(ServiceConfiguration): - region: Optional[str] = None - access_key: Optional[str] = None - secret_key: Optional[str] = None + region: str | None = None + access_key: str | None = None + secret_key: str | None = None - public_access_bucket: Optional[str] = None - analytics_bucket: Optional[str] = None + public_access_bucket: str | None = None + analytics_bucket: str | None = None - endpoint_url: Optional[AnyHttpUrl] = None + endpoint_url: AnyHttpUrl | None = None url_template: AnyHttpUrl = parse_obj_as( AnyHttpUrl, "https://{bucket}.s3.{region}.amazonaws.com/{key}" ) @validator("region") - def validate_region(cls, v: Optional[str]) -> Optional[str]: + def validate_region(cls, v: str | None) -> str | None: # No validation if region is not provided. if v is None: return None diff --git a/core/service/storage/container.py b/core/service/storage/container.py index cf454d39a0..9375c7508b 100644 --- a/core/service/storage/container.py +++ b/core/service/storage/container.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import boto3 from dependency_injector import providers @@ -25,7 +25,7 @@ class Storage(DeclarativeContainer): endpoint_url=config.endpoint_url, ) - analytics: Provider[Optional[S3Service]] = providers.Singleton( + analytics: Provider[S3Service | None] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, @@ -33,7 +33,7 @@ class Storage(DeclarativeContainer): url_template=config.url_template, ) - public: Provider[Optional[S3Service]] = providers.Singleton( + public: Provider[S3Service | None] = providers.Singleton( S3Service.factory, client=s3_client, region=config.region, diff --git a/core/service/storage/s3.py b/core/service/storage/s3.py index c64360ae09..558d466306 100644 --- a/core/service/storage/s3.py +++ b/core/service/storage/s3.py @@ -5,7 +5,7 @@ from io import BytesIO from string import Formatter from types import TracebackType -from typing import TYPE_CHECKING, BinaryIO, List, Optional, Type +from typing import TYPE_CHECKING, BinaryIO from urllib.parse import quote from botocore.exceptions import BotoCoreError, ClientError @@ -36,19 +36,19 @@ def __init__( bucket: str, key: str, url: str, - media_type: Optional[str] = None, + media_type: str | None = None, ) -> None: self.client = client self.key = key self.bucket = bucket self.part_number = 1 - self.parts: List[MultipartS3UploadPart] = [] + self.parts: list[MultipartS3UploadPart] = [] self.media_type = media_type - self.upload: Optional[CreateMultipartUploadOutputTypeDef] = None - self.upload_id: Optional[str] = None + self.upload: CreateMultipartUploadOutputTypeDef | None = None + self.upload_id: str | None = None self._complete = False self._url = url - self._exception: Optional[BaseException] = None + self._exception: BaseException | None = None def __enter__(self) -> Self: params = { @@ -63,9 +63,9 @@ def __enter__(self) -> Self: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> bool: if exc_val is None: self._upload_complete() @@ -130,7 +130,7 @@ def complete(self) -> bool: return self._complete @property - def exception(self) -> Optional[BaseException]: + def exception(self) -> BaseException | None: return self._exception @@ -138,7 +138,7 @@ class S3Service(LoggerMixin): def __init__( self, client: S3Client, - region: Optional[str], + region: str | None, bucket: str, url_template: str, ) -> None: @@ -164,10 +164,10 @@ def __init__( def factory( cls, client: S3Client, - region: Optional[str], - bucket: Optional[str], + region: str | None, + bucket: str | None, url_template: str, - ) -> Optional[Self]: + ) -> Self | None: if bucket is None: return None return cls(client, region, bucket, url_template) @@ -184,8 +184,8 @@ def store( self, key: str, content: str | bytes, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: if isinstance(content, str): content = content.encode("utf8") return self.store_stream( @@ -196,8 +196,8 @@ def store_stream( self, key: str, stream: BinaryIO, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: try: extra_args = {} if content_type is None else {"ContentType": content_type} self.client.upload_fileobj( @@ -223,7 +223,7 @@ def store_stream( return url def multipart( - self, key: str, content_type: Optional[str] = None + self, key: str, content_type: str | None = None ) -> MultipartS3ContextManager: url = self.generate_url(key) return MultipartS3ContextManager( diff --git a/core/util/__init__.py b/core/util/__init__.py index 7e7acbbabd..9754039e1e 100644 --- a/core/util/__init__.py +++ b/core/util/__init__.py @@ -5,7 +5,8 @@ import re import string from collections import Counter -from typing import Any, Iterable, Optional +from collections.abc import Iterable +from typing import Any, Optional import sqlalchemy from money import Money @@ -580,9 +581,9 @@ def chunks(lst, chunk_size, start_index=0): def ansible_boolean( - value: Optional[str | bool], - label: Optional[str] = None, - default: Optional[bool] = None, + value: str | bool | None, + label: str | None = None, + default: bool | None = None, ) -> bool: """Map Ansible "truthy" and "falsy" values to a Python boolean. diff --git a/core/util/accept_language.py b/core/util/accept_language.py index ae0ea14b02..0696bde4f6 100644 --- a/core/util/accept_language.py +++ b/core/util/accept_language.py @@ -22,7 +22,6 @@ import re from collections import namedtuple from operator import attrgetter -from typing import Optional VALIDATE_LANG_REGEX = re.compile("^[a-z]+$", flags=re.IGNORECASE) QUALITY_VAL_SUB_REGEX = re.compile("^q=", flags=re.IGNORECASE) @@ -34,7 +33,7 @@ def parse_accept_language( - accept_language_str: str, default_quality: Optional[float] = None + accept_language_str: str, default_quality: float | None = None ) -> list: """ Parse a RFC 2616 Accept-Language string. diff --git a/core/util/authentication_for_opds.py b/core/util/authentication_for_opds.py index 334bee3775..dfcb899d75 100644 --- a/core/util/authentication_for_opds.py +++ b/core/util/authentication_for_opds.py @@ -1,7 +1,7 @@ from __future__ import annotations from abc import ABC, abstractmethod -from typing import Any, Dict, List, Optional +from typing import Any from sqlalchemy.orm import Session @@ -19,7 +19,7 @@ def flow_type(self) -> str: """ ... - def authentication_flow_document(self, _db: Session) -> Dict[str, Any]: + def authentication_flow_document(self, _db: Session) -> dict[str, Any]: """Convert this object into a dictionary that can be used in the `authentication` list of an Authentication For OPDS document. """ @@ -28,7 +28,7 @@ def authentication_flow_document(self, _db: Session) -> Dict[str, Any]: return data @abstractmethod - def _authentication_flow_document(self, _db: Session) -> Dict[str, Any]: + def _authentication_flow_document(self, _db: Session) -> dict[str, Any]: ... @@ -44,8 +44,8 @@ def __init__( self, id: str | None = None, title: str | None = None, - authentication_flows: List[OPDSAuthenticationFlow] | None = None, - links: List[Dict[str, Optional[str]]] | None = None, + authentication_flows: list[OPDSAuthenticationFlow] | None = None, + links: list[dict[str, str | None]] | None = None, ): """Initialize an Authentication For OPDS document. @@ -63,7 +63,7 @@ def __init__( self.authentication_flows = authentication_flows or [] self.links = links or [] - def to_dict(self, _db: Session) -> Dict[str, Any]: + def to_dict(self, _db: Session) -> dict[str, Any]: """Convert this data structure to a dictionary that becomes an Authentication For OPDS document when serialized to JSON. @@ -83,7 +83,7 @@ def to_dict(self, _db: Session) -> Dict[str, Any]: if not isinstance(value, list): raise ValueError("'%s' must be a list." % key) - document: Dict[str, Any] = dict(id=self.id, title=self.title) + document: dict[str, Any] = dict(id=self.id, title=self.title) flow_documents = document.setdefault("authentication", []) for flow in self.authentication_flows: doc = flow.authentication_flow_document(_db) diff --git a/core/util/base64.py b/core/util/base64.py index 3de2b6e6f4..77e1db67d2 100644 --- a/core/util/base64.py +++ b/core/util/base64.py @@ -1,15 +1,9 @@ from __future__ import annotations import base64 as stdlib_base64 -import sys +from collections.abc import Callable from functools import wraps -from typing import Callable, TypeVar - -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import Concatenate, ParamSpec -else: - from typing_extensions import Concatenate, ParamSpec +from typing import Concatenate, ParamSpec, TypeVar P = ParamSpec("P") T = TypeVar("T") diff --git a/core/util/cache.py b/core/util/cache.py index 986d03e348..36919acc2e 100644 --- a/core/util/cache.py +++ b/core/util/cache.py @@ -1,22 +1,15 @@ from __future__ import annotations -import sys import time +from collections.abc import Callable from functools import wraps from threading import Lock -from typing import Any, Callable, Dict, List, Optional, TypeVar, cast +from typing import Any, ParamSpec, TypeVar, cast from sqlalchemy.orm import Session from core.model.datasource import DataSource -# TODO: Remove this when we drop support for Python 3.9 -if sys.version_info >= (3, 10): - from typing import ParamSpec -else: - from typing_extensions import ParamSpec - - P = ParamSpec("P") T = TypeVar("T") @@ -41,7 +34,7 @@ def func(...): because the first argument will always be the instance itself Hence the signatures will be different for each object """ - cache: Dict[str, Any] = {} + cache: dict[str, Any] = {} def outer(func: Callable[P, T]) -> Callable[P, T]: @wraps(func) @@ -74,7 +67,7 @@ class CachedData: """ # Instance of itself - cache: Optional[CachedData] = None + cache: CachedData | None = None @classmethod def initialize(cls, _db: Session) -> CachedData: @@ -96,7 +89,7 @@ def __init__(self, _db: Session) -> None: self.lock = Lock() @memoize(ttls=3600) - def data_sources(self) -> List[DataSource]: + def data_sources(self) -> list[DataSource]: """List of all datasources within the system""" with self.lock: sources = self._db.query(DataSource).order_by(DataSource.id).all() diff --git a/core/util/datetime_helpers.py b/core/util/datetime_helpers.py index 137ec1fa49..9f3bd8e6da 100644 --- a/core/util/datetime_helpers.py +++ b/core/util/datetime_helpers.py @@ -1,5 +1,5 @@ import datetime -from typing import Optional, Tuple, overload +from typing import overload import pytz from dateutil.relativedelta import relativedelta @@ -41,11 +41,11 @@ def to_utc(dt: datetime.datetime) -> datetime.datetime: @overload -def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: +def to_utc(dt: datetime.datetime | None) -> datetime.datetime | None: ... -def to_utc(dt: Optional[datetime.datetime]) -> Optional[datetime.datetime]: +def to_utc(dt: datetime.datetime | None) -> datetime.datetime | None: """This converts a naive datetime object that represents UTC into an aware datetime object. @@ -73,7 +73,7 @@ def strptime_utc(date_string: str, format: str) -> datetime.datetime: return to_utc(datetime.datetime.strptime(date_string, format)) -def previous_months(number_of_months: int) -> Tuple[datetime.date, datetime.date]: +def previous_months(number_of_months: int) -> tuple[datetime.date, datetime.date]: """Calculate date boundaries for matching the specified previous number of months. :param number_of_months: The number of months in the interval. diff --git a/core/util/flask_util.py b/core/util/flask_util.py index d728af5e0c..c2bbd876b4 100644 --- a/core/util/flask_util.py +++ b/core/util/flask_util.py @@ -1,7 +1,7 @@ """Utilities for Flask applications.""" import datetime import time -from typing import Any, Dict +from typing import Any from wsgiref.handlers import format_date_time from flask import Response as FlaskResponse @@ -198,7 +198,7 @@ class Config: def api_dict( self, *args: Any, by_alias: bool = True, **kwargs: Any - ) -> Dict[str, Any]: + ) -> dict[str, Any]: """Return the instance in a form suitable for a web response. By default, the properties use their lower camel case aliases, diff --git a/core/util/http.py b/core/util/http.py index c156876821..c885967019 100644 --- a/core/util/http.py +++ b/core/util/http.py @@ -1,7 +1,8 @@ import logging import time +from collections.abc import Callable from json import JSONDecodeError -from typing import Any, Callable, Dict, List, Optional, Union +from typing import Any from urllib.parse import urlparse import requests @@ -407,7 +408,7 @@ def debuggable_get(cls, url: str, **kwargs: Any) -> Response: @classmethod def debuggable_post( - cls, url: str, payload: Union[str, Dict[str, Any]], **kwargs: Any + cls, url: str, payload: str | dict[str, Any], **kwargs: Any ) -> Response: """Make a POST request that returns a detailed problem detail document on error. @@ -420,7 +421,7 @@ def debuggable_request( cls, http_method: str, url: str, - make_request_with: Optional[Callable[..., Response]] = None, + make_request_with: Callable[..., Response] | None = None, **kwargs: Any, ) -> Response: """Make a request that raises a ProblemError with a detailed problem detail @@ -451,8 +452,8 @@ def process_debuggable_response( cls, url: str, response: Response, - allowed_response_codes: Optional[List[Union[str, int]]] = None, - disallowed_response_codes: Optional[List[Union[str, int]]] = None, + allowed_response_codes: list[str | int] | None = None, + disallowed_response_codes: list[str | int] | None = None, expected_encoding: str = "utf-8", ) -> Response: """If there was a problem with an integration request, diff --git a/core/util/languages.py b/core/util/languages.py index cd277fe7a4..346d23be03 100644 --- a/core/util/languages.py +++ b/core/util/languages.py @@ -3,7 +3,7 @@ import re from collections import defaultdict -from typing import Dict, List, Pattern +from re import Pattern class LookupTable(dict): @@ -31,9 +31,9 @@ class LanguageCodes: two_to_three = LookupTable() three_to_two = LookupTable() terminologic_to_three = LookupTable() - english_names: Dict[str, List[str]] = defaultdict(list) + english_names: dict[str, list[str]] = defaultdict(list) english_names_to_three = LookupTable() - native_names: Dict[str, List[str]] = defaultdict(list) + native_names: dict[str, list[str]] = defaultdict(list) RAW_DATA = """aar||aa|Afar|afar abk||ab|Abkhazian|abkhaze @@ -653,7 +653,7 @@ class LanguageNames: number = re.compile("[0-9]") parentheses = re.compile(r"\([^)]+\)") - name_to_codes: Dict[str, List[str]] + name_to_codes: dict[str, list[str]] name_re: Pattern @classmethod diff --git a/core/util/log.py b/core/util/log.py index 3030920d44..1d092d6a0a 100644 --- a/core/util/log.py +++ b/core/util/log.py @@ -1,9 +1,9 @@ import functools import logging -import sys import time +from collections.abc import Callable, Generator from contextlib import contextmanager -from typing import Callable, Generator, Optional, TypeVar +from typing import TypeVar from typing_extensions import ParamSpec @@ -16,7 +16,7 @@ def log_elapsed_time( *, log_level: LogLevel, - message_prefix: Optional[str] = None, + message_prefix: str | None = None, skip_start: bool = False, ) -> Callable[[Callable[P, T]], Callable[P, T]]: """Decorator for logging elapsed time. @@ -66,7 +66,7 @@ def wrapper(*args: P.args, **kwargs: P.kwargs) -> T: def elapsed_time_logging( *, log_method: Callable[[str], None], - message_prefix: Optional[str] = None, + message_prefix: str | None = None, skip_start: bool = False, ) -> Generator[None, None, None]: """Context manager for logging elapsed time. @@ -88,18 +88,11 @@ def elapsed_time_logging( log_method(f"{prefix}Completed. (elapsed time: {elapsed_time:0.4f} seconds)") -# Once we drop python 3.8 this can go away -if sys.version_info >= (3, 9): - cache_decorator = functools.cache -else: - cache_decorator = functools.lru_cache - - class LoggerMixin: """Mixin that adds a logger with a standardized name""" @classmethod - @cache_decorator + @functools.cache def logger(cls) -> logging.Logger: """ Returns a logger named after the module and name of the class. diff --git a/core/util/notifications.py b/core/util/notifications.py index cc8729e269..0dab790796 100644 --- a/core/util/notifications.py +++ b/core/util/notifications.py @@ -1,6 +1,7 @@ from __future__ import annotations -from typing import Mapping, Optional, cast +from collections.abc import Mapping +from typing import cast import firebase_admin from firebase_admin import credentials, messaging @@ -55,7 +56,7 @@ def base_url(cls, _db: Session) -> str: def send_messages( cls, tokens: list[DeviceToken], - notification: Optional[messaging.Notification], + notification: messaging.Notification | None, data: Mapping[str, str | None], ) -> list[str]: responses = [] diff --git a/core/util/problem_detail.py b/core/util/problem_detail.py index 72b4f507e1..7c9172830d 100644 --- a/core/util/problem_detail.py +++ b/core/util/problem_detail.py @@ -6,7 +6,6 @@ import json as j import logging -from typing import Dict, Optional, Tuple from flask_babel import LazyString from pydantic import BaseModel @@ -18,10 +17,10 @@ def json( type: str, - status: Optional[int], - title: Optional[str], - detail: Optional[str] = None, - debug_message: Optional[str] = None, + status: int | None, + title: str | None, + detail: str | None = None, + debug_message: str | None = None, ) -> str: d = dict(type=type, title=str(title), status=status) if detail: @@ -32,11 +31,11 @@ def json( class ProblemDetailModel(BaseModel): - type: Optional[str] = None - status: Optional[int] = None - title: Optional[str] = None - detail: Optional[str] = None - debug_message: Optional[str] = None + type: str | None = None + status: int | None = None + title: str | None = None + detail: str | None = None + debug_message: str | None = None class ProblemDetail: @@ -48,10 +47,10 @@ class ProblemDetail: def __init__( self, uri: str, - status_code: Optional[int] = None, - title: Optional[str] = None, - detail: Optional[str] = None, - debug_message: Optional[str] = None, + status_code: int | None = None, + title: str | None = None, + detail: str | None = None, + debug_message: str | None = None, ): self.uri = uri self.title = title @@ -60,7 +59,7 @@ def __init__( self.debug_message = debug_message @property - def response(self) -> Tuple[str, int, Dict[str, str]]: + def response(self) -> tuple[str, int, dict[str, str]]: """Create a Flask-style response.""" return ( json( @@ -77,9 +76,9 @@ def response(self) -> Tuple[str, int, Dict[str, str]]: def detailed( self, detail: str, - status_code: Optional[int] = None, - title: Optional[str] = None, - debug_message: Optional[str] = None, + status_code: int | None = None, + title: str | None = None, + debug_message: str | None = None, ) -> ProblemDetail: """Create a ProblemDetail for a more specific occurrence of an existing ProblemDetail. @@ -105,9 +104,9 @@ def detailed( def with_debug( self, debug_message: str, - detail: Optional[str] = None, - status_code: Optional[int] = None, - title: Optional[str] = None, + detail: str | None = None, + status_code: int | None = None, + title: str | None = None, ) -> ProblemDetail: """Insert debugging information into a ProblemDetail. diff --git a/core/util/worker_pools.py b/core/util/worker_pools.py index 97ad1fe2b2..bd3aff2585 100644 --- a/core/util/worker_pools.py +++ b/core/util/worker_pools.py @@ -1,10 +1,11 @@ from __future__ import annotations import sys +from collections.abc import Callable from queue import Queue from threading import Thread from types import TracebackType -from typing import Any, Callable, Literal, Optional, Type +from typing import Any, Literal from sqlalchemy.orm import Session @@ -114,9 +115,9 @@ def restart(self) -> Self: def __exit__( self, - type: Optional[Type[BaseException]], - value: Optional[BaseException], - traceback: Optional[TracebackType], + type: type[BaseException] | None, + value: BaseException | None, + traceback: TracebackType | None, ) -> Literal[False]: self.join() if value is not None: diff --git a/core/util/xmlparser.py b/core/util/xmlparser.py index d14fb7c7ba..653719bc7f 100644 --- a/core/util/xmlparser.py +++ b/core/util/xmlparser.py @@ -1,17 +1,9 @@ from __future__ import annotations from abc import ABC, abstractmethod +from collections.abc import Callable, Generator from io import BytesIO -from typing import ( - TYPE_CHECKING, - Callable, - Dict, - Generator, - Generic, - List, - Optional, - TypeVar, -) +from typing import TYPE_CHECKING, Generic, TypeVar from lxml import etree @@ -25,12 +17,12 @@ class XMLParser: """Helper functions to process XML data.""" - NAMESPACES: Dict[str, str] = {} + NAMESPACES: dict[str, str] = {} @classmethod def _xpath( - cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> List[_Element]: + cls, tag: _Element, expression: str, namespaces: dict[str, str] | None = None + ) -> list[_Element]: if not namespaces: namespaces = cls.NAMESPACES """Wrapper to do a namespaced XPath expression.""" @@ -38,8 +30,8 @@ def _xpath( @classmethod def _xpath1( - cls, tag: _Element, expression: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[_Element]: + cls, tag: _Element, expression: str, namespaces: dict[str, str] | None = None + ) -> _Element | None: """Wrapper to do a namespaced XPath expression.""" values = cls._xpath(tag, expression, namespaces=namespaces) if not values: @@ -54,8 +46,8 @@ def _cls(self, tag_name: str, class_name: str) -> str: ) def text_of_optional_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[str]: + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None + ) -> str | None: tag = self._xpath1(tag, name, namespaces=namespaces) if tag is None or tag.text is None: return None @@ -63,18 +55,18 @@ def text_of_optional_subtag( return str(tag.text) def text_of_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None ) -> str: return str(tag.xpath(name, namespaces=namespaces)[0].text) def int_of_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None ) -> int: return int(self.text_of_subtag(tag, name, namespaces=namespaces)) def int_of_optional_subtag( - self, tag: _Element, name: str, namespaces: Optional[Dict[str, str]] = None - ) -> Optional[int]: + self, tag: _Element, name: str, namespaces: dict[str, str] | None = None + ) -> int | None: v = self.text_of_optional_subtag(tag, name, namespaces=namespaces) if not v: return None @@ -107,8 +99,8 @@ def _load_xml( def _process_all( xml: _ElementTree, xpath_expression: str, - namespaces: Dict[str, str], - handler: Callable[[_Element, Dict[str, str]], Optional[T]], + namespaces: dict[str, str], + handler: Callable[[_Element, dict[str, str]], T | None], ) -> Generator[T, None, None]: """ Process all elements matching the given XPath expression. Calling @@ -144,7 +136,7 @@ def process_all( def process_first( self, xml: str | bytes | _ElementTree, - ) -> Optional[T]: + ) -> T | None: """ Process the first element matching the given XPath expression. Calling process_one on the element and returning None if no elements match or @@ -163,9 +155,7 @@ def xpath_expression(self) -> str: ... @abstractmethod - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> Optional[T]: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> T | None: """ Process one element and return the result. Return None if the element should be ignored. diff --git a/customlists/customlist_explain.py b/customlists/customlist_explain.py index da99cefba2..9d3f4fd86f 100644 --- a/customlists/customlist_explain.py +++ b/customlists/customlist_explain.py @@ -2,7 +2,6 @@ import csv import json import logging -from typing import List from customlists.customlist_report import ( CustomListProblemBookBrokenOnSourceCM, @@ -21,7 +20,7 @@ class CustomListImportExplainer: @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Explain what went wrong during an import." ) @@ -214,7 +213,7 @@ def __init__(self, args: argparse.Namespace): self._output_csv_file = args.output_csv_file @staticmethod - def create(args: List[str]) -> "CustomListImportExplainer": + def create(args: list[str]) -> "CustomListImportExplainer": return CustomListImportExplainer( CustomListImportExplainer._parse_arguments(args) ) diff --git a/customlists/customlist_export.py b/customlists/customlist_export.py index 9cfcb9e9ac..0d9d646959 100644 --- a/customlists/customlist_export.py +++ b/customlists/customlist_export.py @@ -3,7 +3,8 @@ import logging import os import re -from typing import IO, Any, Iterable, List, Mapping, Union +from collections.abc import Iterable, Mapping +from typing import IO, Any from urllib.parse import unquote import feedparser @@ -162,9 +163,9 @@ def name(self) -> str: class CustomList: - _books: List[Book] - _problematic_books: List[ProblematicBook] - _collections: List[CollectionReference] + _books: list[Book] + _problematic_books: list[ProblematicBook] + _collections: list[CollectionReference] _id: int _name: str _library_id: str @@ -225,8 +226,8 @@ def name(self) -> str: class CustomListExports: - _lists: List[CustomList] - _problematic_lists: List[ProblematicCustomList] + _lists: list[CustomList] + _problematic_lists: list[ProblematicCustomList] def __init__(self): self._lists = [] @@ -336,14 +337,14 @@ class CustomListExporter: _output_file: str _library_name: str _schema_file: str - _lists: List[str] + _lists: list[str] @staticmethod def _fatal(message: str): raise CustomListExportFailed(message) @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Fetch one or more custom lists." ) @@ -374,9 +375,7 @@ def _parse_arguments(args: List[str]) -> argparse.Namespace: ) return parser.parse_args(args) - def _make_custom_list( - self, raw_list: dict - ) -> Union[CustomList, ProblematicCustomList]: + def _make_custom_list(self, raw_list: dict) -> CustomList | ProblematicCustomList: id: int = raw_list["id"] name: str = raw_list["name"] @@ -529,5 +528,5 @@ def __init__(self, args: argparse.Namespace): self._logger.setLevel(logging.DEBUG) @staticmethod - def create(args: List[str]) -> "CustomListExporter": + def create(args: list[str]) -> "CustomListExporter": return CustomListExporter(CustomListExporter._parse_arguments(args)) diff --git a/customlists/customlist_import.py b/customlists/customlist_import.py index 82faa10a94..1d97d25003 100644 --- a/customlists/customlist_import.py +++ b/customlists/customlist_import.py @@ -3,7 +3,6 @@ import logging import os import re -from typing import Dict, List, Set from urllib.parse import unquote import feedparser @@ -62,7 +61,7 @@ def _fatal_response(message: str, response: Response) -> None: CustomListImporter._fatal(CustomListImporter._error_response(message, response)) @staticmethod - def _parse_arguments(args: List[str]) -> argparse.Namespace: + def _parse_arguments(args: list[str]) -> argparse.Namespace: parser: argparse.ArgumentParser = argparse.ArgumentParser( description="Import custom lists." ) @@ -133,7 +132,7 @@ def _process_check_book( report: CustomListReport, customlist: CustomList, book: Book, - rejected_books: Set[str], + rejected_books: set[str], ) -> None: self._logger.info( f"Checking that book '{book.title()}' ({book.id()}) has a matching ID and title on the target CM." @@ -217,7 +216,7 @@ def _process_customlist_check_collections( self, list_report: CustomListReport, customlist: CustomList, - rejected_collections: Set[str], + rejected_collections: set[str], ) -> None: self._logger.info( "Checking that all referenced collections exist on the target CM" @@ -250,7 +249,7 @@ def _process_customlist_check_books( self, list_report: CustomListReport, customlist: CustomList, - rejected_books: Set[str], + rejected_books: set[str], ) -> None: for book in customlist.books(): self._process_check_book( @@ -269,7 +268,7 @@ def _process_customlist_check_list( self, list_report: CustomListReport, customlist: CustomList, - rejected_lists: Set[int], + rejected_lists: set[int], ) -> None: self._logger.info( f"Checking that list '{customlist.name()}' ({customlist.id()}) does not exist on the target CM" @@ -300,20 +299,20 @@ def _process_customlist_update_list( self, list_report: CustomListReport, customlist: CustomList, - rejected_books: Set[str], - rejected_collections: Set[str], + rejected_books: set[str], + rejected_collections: set[str], ) -> None: self._logger.info( f"Updating list '{customlist.name()}' ({customlist.id()}) on the target CM with {customlist.size()} books" ) if not self._dry_run: - output_books: List[dict] = [] + output_books: list[dict] = [] for book in customlist.books(): if book.id() in rejected_books: continue output_books.append({"id": book.id(), "title": book.title()}) - output_collections: List[int] = [] + output_collections: list[int] = [] for collection in customlist.collections(): if collection.name() in rejected_collections: continue @@ -362,10 +361,10 @@ def _process_customlists( report: CustomListsReport, customlists: CustomListExports, ) -> None: - list_reports: Dict[int, CustomListReport] = {} - rejected_books: Set[str] = set({}) - rejected_lists: Set[int] = set({}) - rejected_collections: Set[str] = set({}) + list_reports: dict[int, CustomListReport] = {} + rejected_books: set[str] = set({}) + rejected_lists: set[int] = set({}) + rejected_collections: set[str] = set({}) for customlist in customlists.lists(): list_report = CustomListReport(customlist.id(), customlist.name()) @@ -434,5 +433,5 @@ def __init__(self, args: argparse.Namespace): self._logger.setLevel(logging.DEBUG) @staticmethod - def create(args: List[str]) -> "CustomListImporter": + def create(args: list[str]) -> "CustomListImporter": return CustomListImporter(CustomListImporter._parse_arguments(args)) diff --git a/customlists/customlist_report.py b/customlists/customlist_report.py index 8d15a5528a..7f1a616196 100644 --- a/customlists/customlist_report.py +++ b/customlists/customlist_report.py @@ -1,6 +1,6 @@ import json import logging -from typing import Iterable, List +from collections.abc import Iterable import jsonschema @@ -304,7 +304,7 @@ def name(self) -> str: class CustomListReport: - _errors: List[CustomListProblem] + _errors: list[CustomListProblem] _id: int _name: str @@ -407,7 +407,7 @@ def _parse(document: dict) -> "CustomListReport": class CustomListsReport: - _reports: List[CustomListReport] + _reports: list[CustomListReport] def __init__(self): self._reports = [] diff --git a/poetry.lock b/poetry.lock index 863ddb2c3c..e42a702ada 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alembic" @@ -12,9 +12,6 @@ files = [ ] [package.dependencies] -"backports.zoneinfo" = {version = "*", optional = true, markers = "python_version < \"3.9\" and extra == \"tz\""} -importlib-metadata = {version = "*", markers = "python_version < \"3.9\""} -importlib-resources = {version = "*", markers = "python_version < \"3.9\""} Mako = "*" SQLAlchemy = ">=1.3.0" typing-extensions = ">=4" @@ -66,40 +63,9 @@ files = [ {file = "Babel-2.13.0.tar.gz", hash = "sha256:04c3e2d28d2b7681644508f836be388ae49e0cfe91465095340395b60d00f210"}, ] -[package.dependencies] -pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} - [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] -[[package]] -name = "backports-zoneinfo" -version = "0.2.1" -description = "Backport of the standard library zoneinfo module" -optional = false -python-versions = ">=3.6" -files = [ - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, - {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, - {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, - {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, - {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, -] - -[package.extras] -tzdata = ["tzdata"] - [[package]] name = "bcrypt" version = "4.1.2" @@ -607,10 +573,7 @@ files = [ [package.dependencies] jmespath = ">=0.7.1,<2.0.0" python-dateutil = ">=2.1,<3.0.0" -urllib3 = [ - {version = ">=1.25.4,<1.27", markers = "python_version < \"3.10\""}, - {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""}, -] +urllib3 = {version = ">=1.25.4,<2.1", markers = "python_version >= \"3.10\""} [package.extras] crt = ["awscrt (==0.19.17)"] @@ -628,7 +591,6 @@ files = [ [package.dependencies] types-awscrt = "*" -typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.9\""} [[package]] name = "cachecontrol" @@ -1205,7 +1167,6 @@ files = [ [package.dependencies] blinker = ">=1.6.2" click = ">=8.1.3" -importlib-metadata = {version = ">=3.6.0", markers = "python_version < \"3.10\""} itsdangerous = ">=2.1.2" Jinja2 = ">=3.1.2" Werkzeug = ">=3.0.0" @@ -1332,12 +1293,12 @@ files = [ google-auth = ">=2.14.1,<3.0dev" googleapis-common-protos = ">=1.56.2,<2.0dev" grpcio = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] grpcio-status = [ - {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, {version = ">=1.49.1,<2.0dev", optional = true, markers = "python_version >= \"3.11\" and extra == \"grpc\""}, + {version = ">=1.33.2,<2.0dev", optional = true, markers = "python_version < \"3.11\" and extra == \"grpc\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" requests = ">=2.18.0,<3.0.0dev" @@ -1439,8 +1400,8 @@ files = [ google-api-core = {version = ">=1.34.0,<2.0.dev0 || >=2.11.dev0,<3.0.0dev", extras = ["grpc"]} google-cloud-core = ">=1.4.1,<3.0.0dev" proto-plus = [ - {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, {version = ">=1.22.2,<2.0.0dev", markers = "python_version >= \"3.11\""}, + {version = ">=1.22.0,<2.0.0dev", markers = "python_version < \"3.11\""}, ] protobuf = ">=3.19.5,<3.20.0 || >3.20.0,<3.20.1 || >3.20.1,<4.21.0 || >4.21.0,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4.21.4 || >4.21.4,<4.21.5 || >4.21.5,<5.0.0dev" @@ -1779,43 +1740,6 @@ files = [ {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] -[[package]] -name = "importlib-metadata" -version = "6.0.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, - {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] - -[[package]] -name = "importlib-resources" -version = "5.10.2" -description = "Read resources from Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "importlib_resources-5.10.2-py3-none-any.whl", hash = "sha256:7d543798b0beca10b6a01ac7cafda9f822c54db9e8376a6bf57e0cbd74d486b6"}, - {file = "importlib_resources-5.10.2.tar.gz", hash = "sha256:e4a96c8cc0339647ff9a5e0550d9f276fc5a01ffa276012b58ec108cfd7b8484"}, -] - -[package.dependencies] -zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [[package]] name = "inflection" version = "0.5.1" @@ -1926,9 +1850,7 @@ files = [ [package.dependencies] attrs = ">=22.2.0" -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} jsonschema-specifications = ">=2023.03.6" -pkgutil-resolve-name = {version = ">=1.3.10", markers = "python_version < \"3.9\""} referencing = ">=0.28.4" rpds-py = ">=0.7.1" @@ -1948,7 +1870,6 @@ files = [ ] [package.dependencies] -importlib-resources = {version = ">=1.4.0", markers = "python_version < \"3.9\""} referencing = ">=0.28.0" [[package]] @@ -2719,17 +2640,6 @@ files = [ docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] -[[package]] -name = "pkgutil-resolve-name" -version = "1.3.10" -description = "Resolve a name to an object." -optional = false -python-versions = ">=3.6" -files = [ - {file = "pkgutil_resolve_name-1.3.10-py3-none-any.whl", hash = "sha256:ca27cc078d25c5ad71a9de0a7a330146c4e014c2462d9af19c6b828280649c5e"}, - {file = "pkgutil_resolve_name-1.3.10.tar.gz", hash = "sha256:357d6c9e6a755653cfd78893817c0853af365dd51ec97f3d358a819373bbd174"}, -] - [[package]] name = "platformdirs" version = "3.10.0" @@ -4544,22 +4454,7 @@ files = [ [package.dependencies] lxml = ">=3.8" -[[package]] -name = "zipp" -version = "3.11.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.7" -files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] -testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] - [metadata] lock-version = "2.0" -python-versions = ">=3.8,<4" -content-hash = "4e3e2002e9f0848b4a7b1be5f051233ae5aa5dc04b488aadaa8726f8bfd2f1e5" +python-versions = ">=3.10,<4" +content-hash = "d1674a963c91bac0fd32744a848a17fec3069ae529c176a5907d380321469ed5" diff --git a/pyproject.toml b/pyproject.toml index bca399eb4e..6bcfb46bdd 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -225,7 +225,7 @@ pymarc = "5.1.0" pyOpenSSL = "^23.1.0" pyparsing = "3.1.1" pyspellchecker = "0.7.2" -python = ">=3.8,<4" +python = ">=3.10,<4" python-dateutil = "2.8.2" python3-saml = "^1.16" # python-saml is required for SAML authentication pytz = "^2023.3" @@ -237,7 +237,6 @@ textblob = "0.17.1" types-pyopenssl = "^23.1.0.3" types-pyyaml = "^6.0.12.9" # We import typing_extensions, so we can use new annotation features. -# - ParamSpec (Python 3.10) # - Self (Python 3.11) typing_extensions = {version = "^4.5.0", python = "<3.11"} unicodecsv = "0.14.1" # this is used, but can probably be removed on py3 diff --git a/scripts.py b/scripts.py index 21505ebdc9..1a99db4b73 100644 --- a/scripts.py +++ b/scripts.py @@ -4,9 +4,10 @@ import os import sys import time +from collections.abc import Sequence from datetime import timedelta from pathlib import Path -from typing import Any, List, Optional, Sequence, Tuple, Type +from typing import Any from sqlalchemy import inspect, select from sqlalchemy.engine import Connection @@ -168,9 +169,9 @@ def arg_parser(cls, _db: Session) -> argparse.ArgumentParser: # type: ignore[ov def __init__( self, - _db: Optional[Session] = None, - cmd_args: Optional[Sequence[str]] = None, - exporter: Optional[MARCExporter] = None, + _db: Session | None = None, + cmd_args: Sequence[str] | None = None, + exporter: MARCExporter | None = None, *args: Any, **kwargs: Any, ) -> None: @@ -185,9 +186,7 @@ def __init__( self.exporter = exporter or MARCExporter(self._db, self.storage_service) - def parse_args( - self, cmd_args: Optional[Sequence[str]] = None - ) -> argparse.Namespace: + def parse_args(self, cmd_args: Sequence[str] | None = None) -> argparse.Namespace: parser = self.arg_parser(self._db) parsed = parser.parse_args(cmd_args) self.force = parsed.force @@ -195,7 +194,7 @@ def parse_args( def settings( self, library: Library - ) -> Tuple[MarcExporterSettings, MarcExporterLibrarySettings]: + ) -> tuple[MarcExporterSettings, MarcExporterLibrarySettings]: integration_query = ( select(IntegrationLibraryConfiguration) .join(IntegrationConfiguration) @@ -228,8 +227,8 @@ def get_collections(self, library: Library) -> Sequence[Collection]: ).all() def get_web_client_urls( - self, library: Library, url: Optional[str] = None - ) -> List[str]: + self, library: Library, url: str | None = None + ) -> list[str]: """Find web client URLs configured by the registry for this library.""" urls = [ s.web_client @@ -247,7 +246,7 @@ def get_web_client_urls( return urls def process_library( - self, library: Library, annotator_cls: Type[MarcAnnotator] = MarcAnnotator + self, library: Library, annotator_cls: type[MarcAnnotator] = MarcAnnotator ) -> None: try: settings, library_settings = self.settings(library) @@ -290,7 +289,7 @@ def process_library( def last_updated( self, library: Library, collection: Collection - ) -> Optional[datetime.datetime]: + ) -> datetime.datetime | None: """Find the most recent MarcFile creation time.""" last_updated_file = self._db.execute( select(MarcFile.created) @@ -487,7 +486,7 @@ class InstanceInitializationScript: """ def __init__(self) -> None: - self._log: Optional[logging.Logger] = None + self._log: logging.Logger | None = None self._container = container_instance() # Call init_resources() to initialize the logging configuration. diff --git a/tests/api/admin/controller/test_catalog_services.py b/tests/api/admin/controller/test_catalog_services.py index 19ebd756bf..fa8f5d76d1 100644 --- a/tests/api/admin/controller/test_catalog_services.py +++ b/tests/api/admin/controller/test_catalog_services.py @@ -1,6 +1,5 @@ import json from contextlib import nullcontext -from typing import Dict, Optional, Type import flask import pytest @@ -158,10 +157,10 @@ def test_catalog_services_get_with_marc_exporter( def test_catalog_services_post_errors( self, admin_ctrl_fixture: AdminControllerFixture, - post_data: Dict[str, str], - expected: Optional[ProblemDetail], + post_data: dict[str, str], + expected: ProblemDetail | None, admin: bool, - raises: Optional[Type[Exception]], + raises: type[Exception] | None, ): if admin: admin_ctrl_fixture.admin.add_role(AdminRole.SYSTEM_ADMIN) diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index fd793796f5..f5adbff220 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -1,5 +1,4 @@ import json -from typing import Dict import flask import pytest @@ -271,7 +270,7 @@ def test_collections_get_collections_with_multiple_collections( def test_collections_post_errors( self, admin_ctrl_fixture: AdminControllerFixture, - post_data: Dict[str, str], + post_data: dict[str, str], expected: ProblemDetail, detailed: bool, ): diff --git a/tests/api/admin/controller/test_custom_lists.py b/tests/api/admin/controller/test_custom_lists.py index e71ca13638..eff19c8ced 100644 --- a/tests/api/admin/controller/test_custom_lists.py +++ b/tests/api/admin/controller/test_custom_lists.py @@ -1,5 +1,4 @@ import json -from typing import Optional from unittest import mock import feedparser @@ -894,10 +893,10 @@ def test_custom_list_delete_errors( @define class ShareLocallySetup: - shared_with: Optional[Library] = None - primary_library: Optional[Library] = None - collection1: Optional[Collection] = None - list: Optional[CustomList] = None + shared_with: Library | None = None + primary_library: Library | None = None + collection1: Collection | None = None + list: CustomList | None = None def _setup_share_locally(self, admin_librarian_fixture: AdminLibrarianFixture): shared_with = admin_librarian_fixture.ctrl.db.library("shared_with") diff --git a/tests/api/admin/controller/test_library.py b/tests/api/admin/controller/test_library.py index 096d1703d4..6cc8785777 100644 --- a/tests/api/admin/controller/test_library.py +++ b/tests/api/admin/controller/test_library.py @@ -4,7 +4,6 @@ import datetime import json from io import BytesIO -from typing import Dict, List from unittest.mock import MagicMock import flask @@ -52,10 +51,10 @@ def logo_properties(self): } def library_form( - self, library: Library, fields: Dict[str, str | List[str]] | None = None + self, library: Library, fields: dict[str, str | list[str]] | None = None ): fields = fields or {} - defaults: Dict[str, str | List[str]] = { + defaults: dict[str, str | list[str]] = { "uuid": str(library.uuid), "name": "The New York Public Library", "short_name": str(library.short_name), diff --git a/tests/api/admin/controller/test_patron.py b/tests/api/admin/controller/test_patron.py index d41662c522..75efd3f009 100644 --- a/tests/api/admin/controller/test_patron.py +++ b/tests/api/admin/controller/test_patron.py @@ -1,5 +1,3 @@ -from typing import Optional - import flask import pytest from werkzeug.datastructures import ImmutableMultiDict @@ -126,7 +124,7 @@ def test_reset_adobe_id(self, patron_controller_fixture: PatronControllerFixture # This PatronController will always return a specific # PatronData object, no matter what is asked for. class MockPatronController(PatronController): - mock_patrondata: Optional[PatronData] = None + mock_patrondata: PatronData | None = None def _load_patrondata(self, authenticator): self.called_with = authenticator diff --git a/tests/api/admin/controller/test_patron_auth.py b/tests/api/admin/controller/test_patron_auth.py index e4785bcbc6..26a8416f8b 100644 --- a/tests/api/admin/controller/test_patron_auth.py +++ b/tests/api/admin/controller/test_patron_auth.py @@ -1,7 +1,8 @@ from __future__ import annotations import json -from typing import TYPE_CHECKING, Any, Callable, List, Tuple +from collections.abc import Callable +from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock import flask @@ -85,7 +86,7 @@ def post(form: ImmutableMultiDict[str, str]) -> Response | ProblemDetail: @pytest.fixture -def common_args() -> List[Tuple[str, str]]: +def common_args() -> list[tuple[str, str]]: return [ ("test_identifier", "user"), ("test_password", "pass"), @@ -466,7 +467,7 @@ def test_patron_auth_services_post_invalid_library_identifier_restriction_regex( def test_patron_auth_services_post_not_authorized( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, post_response: Callable[..., Response | ProblemDetail], ): @@ -481,7 +482,7 @@ def test_patron_auth_services_post_not_authorized( def test_patron_auth_services_post_create( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], default_library: Library, post_response: Callable[..., Response | ProblemDetail], db: DatabaseTransactionFixture, @@ -562,7 +563,7 @@ def test_patron_auth_services_post_create( def test_patron_auth_services_post_edit( self, post_response: Callable[..., Response | ProblemDetail], - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, create_simple_auth_integration: SimpleAuthIntegrationFixture, db: DatabaseTransactionFixture, @@ -626,7 +627,7 @@ def test_patron_auth_services_post_edit( def test_patron_auth_service_delete( self, - common_args: List[Tuple[str, str]], + common_args: list[tuple[str, str]], settings_ctrl_fixture: SettingsControllerFixture, create_simple_auth_integration: SimpleAuthIntegrationFixture, ): diff --git a/tests/api/admin/test_config.py b/tests/api/admin/test_config.py index ec3b933950..3338c62133 100644 --- a/tests/api/admin/test_config.py +++ b/tests/api/admin/test_config.py @@ -1,6 +1,5 @@ import logging import os -from typing import Optional from unittest.mock import MagicMock, patch import pytest @@ -12,7 +11,7 @@ class TestAdminUI: @staticmethod - def _set_env(monkeypatch, key: str, value: Optional[str]): + def _set_env(monkeypatch, key: str, value: str | None): if value: monkeypatch.setenv(key, value) elif key in os.environ: @@ -47,9 +46,9 @@ def test_package_version_cached(self, monkeypatch): def test_env_package_version( self, monkeypatch, - package_version: Optional[str], + package_version: str | None, resolves: bool, - expected_result: Optional[str], + expected_result: str | None, ): with patch( "api.admin.config.Configuration.resolve_package_version" @@ -112,8 +111,8 @@ def test_resolve_package_version(self, caplog): def test_package_url( self, monkeypatch, - package_name: Optional[str], - package_version: Optional[str], + package_name: str | None, + package_version: str | None, mode: OperationalMode, expected_result_startswith: str, ): @@ -145,8 +144,8 @@ def test_package_url( def test_package_development_directory( self, monkeypatch, - package_name: Optional[str], - package_version: Optional[str], + package_name: str | None, + package_version: str | None, expected_result: str, ): self._set_env(monkeypatch, "TPP_CIRCULATION_ADMIN_PACKAGE_NAME", package_name) diff --git a/tests/api/admin/test_form_data.py b/tests/api/admin/test_form_data.py index 7241f4ca7d..7cfb3a7067 100644 --- a/tests/api/admin/test_form_data.py +++ b/tests/api/admin/test_form_data.py @@ -1,5 +1,3 @@ -from typing import List, Optional - from werkzeug.datastructures import ImmutableMultiDict from api.admin.form_data import ProcessFormData @@ -12,21 +10,21 @@ class MockSettings(BaseSettings): - field1: List[str] = FormField( + field1: list[str] = FormField( [], form=ConfigurationFormItem( label="Field 1", type=ConfigurationFormItemType.LIST, ), ) - field2: List[str] = FormField( + field2: list[str] = FormField( [], form=ConfigurationFormItem( label="Field 2", type=ConfigurationFormItemType.MENU, ), ) - field3: Optional[str] = FormField( + field3: str | None = FormField( None, form=ConfigurationFormItem( label="Field 3", diff --git a/tests/api/admin/test_routes.py b/tests/api/admin/test_routes.py index 9c9984d64b..4ce6f8ac39 100644 --- a/tests/api/admin/test_routes.py +++ b/tests/api/admin/test_routes.py @@ -1,6 +1,7 @@ import logging +from collections.abc import Generator from pathlib import Path -from typing import Any, Generator, Optional +from typing import Any import flask import pytest @@ -93,8 +94,8 @@ def __init__( self.original_api_app = self.api_routes.app self.resolver = self.original_app.url_map.bind("", "/") - self.controller: Optional[CirculationManagerController] = None - self.real_controller: Optional[CirculationManagerController] = None + self.controller: CirculationManagerController | None = None + self.real_controller: CirculationManagerController | None = None self.routes.app = app # type: ignore # Need to also mock the route app from /api/routes. diff --git a/tests/api/controller/test_annotation.py b/tests/api/controller/test_annotation.py index 413a80f4b4..5a7b7b6c49 100644 --- a/tests/api/controller/test_annotation.py +++ b/tests/api/controller/test_annotation.py @@ -1,7 +1,6 @@ import datetime import json from time import mktime -from typing import Union from wsgiref.handlers import format_date_time import pytest @@ -139,7 +138,7 @@ def test_get_container_for_work(self, annotation_fixture: AnnotationFixture): assert expected_time == response.headers["Last-Modified"] def test_post_to_container(self, annotation_fixture: AnnotationFixture): - data: dict[str, Union[str, dict]] = dict() + data: dict[str, str | dict] = dict() data["@context"] = AnnotationWriter.JSONLD_CONTEXT data["type"] = "Annotation" data["motivation"] = Annotation.IDLING diff --git a/tests/api/controller/test_loan.py b/tests/api/controller/test_loan.py index af16e530be..110cab527f 100644 --- a/tests/api/controller/test_loan.py +++ b/tests/api/controller/test_loan.py @@ -1,7 +1,6 @@ import datetime import urllib.parse from decimal import Decimal -from typing import Optional from unittest.mock import MagicMock, patch import feedparser @@ -900,7 +899,7 @@ def test_no_drm_fulfill(self, loan_fixture: LoanFixture): with_license_pool=True, data_source_name=DataSource.OVERDRIVE ) - pool_opt: Optional[LicensePool] = work.active_license_pool() + pool_opt: LicensePool | None = work.active_license_pool() assert pool_opt is not None pool: LicensePool = pool_opt pool.loan_to(patron) diff --git a/tests/api/controller/test_marc.py b/tests/api/controller/test_marc.py index 8d43b60a71..d4665cfe82 100644 --- a/tests/api/controller/test_marc.py +++ b/tests/api/controller/test_marc.py @@ -1,7 +1,6 @@ from __future__ import annotations import datetime -from typing import Optional from unittest.mock import MagicMock import pytest @@ -30,7 +29,7 @@ def __init__(self, db: DatabaseTransactionFixture): # since we don't have a request context self.controller.library = lambda: self.library - def integration(self, library: Optional[Library] = None): + def integration(self, library: Library | None = None): library = library or self.library return self.db.integration_configuration( MARCExporter.__name__, @@ -40,11 +39,11 @@ def integration(self, library: Optional[Library] = None): def file( self, - library: Optional[Library] = None, - collection: Optional[Collection] = None, - key: Optional[str] = None, - created: Optional[datetime.datetime] = None, - since: Optional[datetime.datetime] = None, + library: Library | None = None, + collection: Collection | None = None, + key: str | None = None, + created: datetime.datetime | None = None, + since: datetime.datetime | None = None, ): key = key or self.db.fresh_str() created = created or utc_now() diff --git a/tests/api/controller/test_opds_feed.py b/tests/api/controller/test_opds_feed.py index 66f321d57d..4ed4885b89 100644 --- a/tests/api/controller/test_opds_feed.py +++ b/tests/api/controller/test_opds_feed.py @@ -1,5 +1,5 @@ import json -from typing import Any, Dict +from typing import Any from unittest.mock import MagicMock from urllib.parse import quote_plus @@ -116,7 +116,7 @@ def test_feed( # But the rest of the feed looks good. links = feed["feed"]["links"] - by_rel: Dict[str, Any] = dict() + by_rel: dict[str, Any] = dict() # Put the links into a data structure based on their rel values. for i in links: diff --git a/tests/api/controller/test_work.py b/tests/api/controller/test_work.py index 29f9dc2897..70796b1775 100644 --- a/tests/api/controller/test_work.py +++ b/tests/api/controller/test_work.py @@ -1,7 +1,7 @@ import datetime import json import urllib.parse -from typing import Any, Dict +from typing import Any from unittest.mock import MagicMock import feedparser @@ -327,7 +327,7 @@ def test_permalink_does_not_return_fulfillment_links_for_authenticated_patrons_w patron2_loan, _ = pool.loan_to(patron_2) # We want to make sure that the feed doesn't contain any fulfillment links. - active_loans_by_work: Dict[Any, Any] = {} + active_loans_by_work: dict[Any, Any] = {} annotator = LibraryAnnotator( None, None, diff --git a/tests/api/discovery/test_opds_registration.py b/tests/api/discovery/test_opds_registration.py index 94ee5ecda9..60e67f23a5 100644 --- a/tests/api/discovery/test_opds_registration.py +++ b/tests/api/discovery/test_opds_registration.py @@ -1,9 +1,10 @@ import base64 import json import os +from collections.abc import Callable from dataclasses import dataclass from functools import partial -from typing import Any, Callable, List, Optional +from typing import Any from unittest.mock import MagicMock import pytest @@ -56,7 +57,7 @@ def __init__( ) def create_registration( - self, library: Optional[Library] = None + self, library: Library | None = None ) -> DiscoveryServiceRegistration: obj, _ = create( self.db.session, @@ -719,7 +720,7 @@ class Processed: url_for: Callable[..., str] class Mock(LibraryRegistrationScript): - processed: List[Processed] = [] + processed: list[Processed] = [] def process_library( # type: ignore[override] self, diff --git a/tests/api/feed/fixtures.py b/tests/api/feed/fixtures.py index 002d806bac..03990a3fec 100644 --- a/tests/api/feed/fixtures.py +++ b/tests/api/feed/fixtures.py @@ -1,7 +1,8 @@ import urllib +from collections.abc import Callable from dataclasses import dataclass from functools import partial -from typing import Any, Callable +from typing import Any from unittest.mock import patch import pytest diff --git a/tests/api/feed/test_annotators.py b/tests/api/feed/test_annotators.py index c09d8e90cb..3e000914c8 100644 --- a/tests/api/feed/test_annotators.py +++ b/tests/api/feed/test_annotators.py @@ -197,7 +197,7 @@ def test_detailed_author(self, db: DatabaseTransactionFixture): work.presentation_edition.add_contributor(c, Contributor.PRIMARY_AUTHOR_ROLE) [same_tag] = VerboseAnnotator.authors(work.presentation_edition)["authors"] - assert same_tag.dict() == author.dict() + assert same_tag.asdict() == author.asdict() def test_duplicate_author_names_are_ignored(self, db: DatabaseTransactionFixture): session = db.session diff --git a/tests/api/feed/test_library_annotator.py b/tests/api/feed/test_library_annotator.py index 5eec641269..f9b1dff344 100644 --- a/tests/api/feed/test_library_annotator.py +++ b/tests/api/feed/test_library_annotator.py @@ -1,6 +1,5 @@ import datetime from collections import defaultdict -from typing import List from unittest.mock import create_autospec, patch import dateutil @@ -338,7 +337,7 @@ def test_adobe_id_tags_when_vendor_id_configured( # object that renders to the same data. same_tag = annotator_fixture.annotator.adobe_id_tags(patron_identifier) assert same_tag is not element - assert same_tag["drm_licensor"].dict() == element["drm_licensor"].dict() + assert same_tag["drm_licensor"].asdict() == element["drm_licensor"].asdict() # If the Adobe Vendor ID configuration is present but # incomplete, adobe_id_tags does nothing. @@ -712,7 +711,7 @@ def assert_link_on_entry( def get_link_by_rel(rel): if isinstance(entry, WorkEntry): links = entry.computed.other_links + entry.computed.acquisition_links - elif isinstance(entry, List): + elif isinstance(entry, list): links = [e.link for e in entry] else: links = [entry.link] @@ -1427,9 +1426,9 @@ def test_drm_device_registration_feed_tags( # If we remove that attribute, the feed-level tag is the same as the # generic tag. - assert feed_tag["drm_licensor"].dict() != generic_tag["drm_licensor"].dict() + assert feed_tag["drm_licensor"].asdict() != generic_tag["drm_licensor"].asdict() delattr(feed_tag["drm_licensor"], "scheme") - assert feed_tag["drm_licensor"].dict() == generic_tag["drm_licensor"].dict() + assert feed_tag["drm_licensor"].asdict() == generic_tag["drm_licensor"].asdict() def test_borrow_link_raises_unfulfillable_work( self, annotator_fixture: LibraryAnnotatorFixture diff --git a/tests/api/feed/test_opds_acquisition_feed.py b/tests/api/feed/test_opds_acquisition_feed.py index 0d4222aa3f..a8231bb82c 100644 --- a/tests/api/feed/test_opds_acquisition_feed.py +++ b/tests/api/feed/test_opds_acquisition_feed.py @@ -1,7 +1,8 @@ import datetime import logging from collections import defaultdict -from typing import Any, Callable, Generator, List, Type +from collections.abc import Callable, Generator +from typing import Any from unittest.mock import MagicMock, patch import pytest @@ -986,10 +987,10 @@ class TestEntrypointLinkInsertionFixture: db: DatabaseTransactionFixture mock: Any no_eps: WorkList - entrypoints: List[MediumEntryPoint] + entrypoints: list[MediumEntryPoint] wl: WorkList lane: Lane - annotator: Type[MockAnnotator] + annotator: type[MockAnnotator] old_add_entrypoint_links: Callable diff --git a/tests/api/mockapi/circulation.py b/tests/api/mockapi/circulation.py index 3a588a3151..eb693495df 100644 --- a/tests/api/mockapi/circulation.py +++ b/tests/api/mockapi/circulation.py @@ -1,6 +1,5 @@ from abc import ABC from collections import defaultdict -from typing import Type from sqlalchemy.orm import Session @@ -29,11 +28,11 @@ def description(cls) -> str: return "" @classmethod - def settings_class(cls) -> Type[BaseSettings]: + def settings_class(cls) -> type[BaseSettings]: return BaseSettings @classmethod - def library_settings_class(cls) -> Type[BaseSettings]: + def library_settings_class(cls) -> type[BaseSettings]: return BaseSettings diff --git a/tests/api/mockapi/enki.py b/tests/api/mockapi/enki.py index 64ddeb8585..6eed0ed850 100644 --- a/tests/api/mockapi/enki.py +++ b/tests/api/mockapi/enki.py @@ -1,4 +1,4 @@ -from typing import Any, List, Optional +from typing import Any from sqlalchemy.orm import Session @@ -12,10 +12,10 @@ class MockEnkiAPI(EnkiAPI): def __init__( - self, _db: Session, library: Library, collection: Optional[Collection] = None + self, _db: Session, library: Library, collection: Collection | None = None ) -> None: - self.responses: List[MockRequestsResponse] = [] - self.requests: List[List[Any]] = [] + self.responses: list[MockRequestsResponse] = [] + self.requests: list[list[Any]] = [] if not collection: collection, ignore = Collection.by_name_and_protocol( diff --git a/tests/api/saml/configuration/test_model.py b/tests/api/saml/configuration/test_model.py index 82c090115e..be8797e954 100644 --- a/tests/api/saml/configuration/test_model.py +++ b/tests/api/saml/configuration/test_model.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from datetime import datetime -from typing import Callable from unittest.mock import MagicMock, call, create_autospec import pytest diff --git a/tests/api/saml/conftest.py b/tests/api/saml/conftest.py index 7a93fad700..4d99e2d2b5 100644 --- a/tests/api/saml/conftest.py +++ b/tests/api/saml/conftest.py @@ -1,7 +1,8 @@ from __future__ import annotations +from collections.abc import Callable from functools import partial -from typing import TYPE_CHECKING, Callable, List, Optional +from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest @@ -57,8 +58,8 @@ def create_mock_onelogin_configuration( ) -> Callable[..., SAMLOneLoginConfiguration]: def _create_mock( service_provider: SAMLServiceProviderMetadata, - identity_providers: List[SAMLIdentityProviderMetadata], - configuration: Optional[SAMLWebSSOAuthSettings] = None, + identity_providers: list[SAMLIdentityProviderMetadata], + configuration: SAMLWebSSOAuthSettings | None = None, ): if configuration is None: configuration = create_saml_configuration() diff --git a/tests/api/saml/metadata/federations/test_validator.py b/tests/api/saml/metadata/federations/test_validator.py index 30cac7c42b..006cebcad8 100644 --- a/tests/api/saml/metadata/federations/test_validator.py +++ b/tests/api/saml/metadata/federations/test_validator.py @@ -1,6 +1,5 @@ import datetime import os -from typing import Optional, Type, Union import pytest from freezegun import freeze_time @@ -136,8 +135,8 @@ def test_validate( self, _, current_time: datetime.datetime, - metadata: Union[str, bytes], - expected_exception: Optional[Type[Exception]], + metadata: str | bytes, + expected_exception: type[Exception] | None, ): # Arrange validator = SAMLFederatedMetadataExpirationValidator() diff --git a/tests/api/saml/metadata/test_parser.py b/tests/api/saml/metadata/test_parser.py index 4190726245..487387c1a5 100644 --- a/tests/api/saml/metadata/test_parser.py +++ b/tests/api/saml/metadata/test_parser.py @@ -1,4 +1,3 @@ -from typing import Dict, Union from unittest.mock import MagicMock, create_autospec import pytest @@ -38,7 +37,7 @@ class TestSAMLMetadataParser: ], ) def test_parse_raises_exception_when_xml_metadata_has_incorrect_format( - self, _, incorrect_xml: Union[str, bytes] + self, _, incorrect_xml: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -63,7 +62,7 @@ def test_parse_raises_exception_when_xml_metadata_has_incorrect_format( def test_parse_raises_exception_when_idp_metadata_does_not_contain_sso_service( self, _, - incorrect_xml_with_one_idp_metadata_without_sso_service: Union[str, bytes], + incorrect_xml_with_one_idp_metadata_without_sso_service: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -90,9 +89,9 @@ def test_parse_raises_exception_when_idp_metadata_does_not_contain_sso_service( def test_parse_raises_exception_when_idp_metadata_contains_sso_service_with_wrong_binding( self, _, - incorrect_xml_with_one_idp_metadata_with_sso_service_with_wrong_binding: Union[ - str, bytes - ], + incorrect_xml_with_one_idp_metadata_with_sso_service_with_wrong_binding: ( + str | bytes + ), ): # Arrange metadata_parser = SAMLMetadataParser() @@ -119,7 +118,7 @@ def test_parse_raises_exception_when_idp_metadata_contains_sso_service_with_wron def test_parse_does_not_raise_exception_when_xml_metadata_does_not_have_display_names( self, _, - correct_xml_with_one_idp_metadata_without_display_names: Union[str, bytes], + correct_xml_with_one_idp_metadata_without_display_names: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -167,7 +166,7 @@ def test_parse_does_not_raise_exception_when_xml_metadata_does_not_have_display_ ], ) def test_parse_correctly_parses_one_idp_metadata( - self, _, correct_xml_with_idp_1: Union[str, bytes] + self, _, correct_xml_with_idp_1: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -269,7 +268,7 @@ def test_parse_correctly_parses_one_idp_metadata( ], ) def test_parse_correctly_parses_idp_metadata_without_name_id_format( - self, _, correct_xml_with_idp_1: Union[str, bytes] + self, _, correct_xml_with_idp_1: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -376,7 +375,7 @@ def test_parse_correctly_parses_idp_metadata_without_name_id_format( def test_parse_correctly_parses_idp_metadata_with_one_certificate( self, _, - correct_xml_with_one_idp_metadata_with_one_certificate: Union[str, bytes], + correct_xml_with_one_idp_metadata_with_one_certificate: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -483,7 +482,7 @@ def test_parse_correctly_parses_idp_metadata_with_one_certificate( ], ) def test_parse_correctly_parses_metadata_with_multiple_descriptors( - self, _, correct_xml_with_multiple_idps: Union[str, bytes] + self, _, correct_xml_with_multiple_idps: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -631,7 +630,7 @@ def test_parse_correctly_parses_metadata_with_multiple_descriptors( def test_parse_raises_exception_when_sp_metadata_does_not_contain_acs_service( self, _, - incorrect_xml_with_one_sp_metadata_without_acs_service: Union[str, bytes], + incorrect_xml_with_one_sp_metadata_without_acs_service: str | bytes, ): # Arrange metadata_parser = SAMLMetadataParser() @@ -653,7 +652,7 @@ def test_parse_raises_exception_when_sp_metadata_does_not_contain_acs_service( ], ) def test_parse_correctly_parses_one_sp_metadata( - self, _, correct_xml_with_one_sp: Union[str, bytes] + self, _, correct_xml_with_one_sp: str | bytes ): # Arrange metadata_parser = SAMLMetadataParser() @@ -888,7 +887,7 @@ def test_parse( name_id_nq: str, name_id_spnq: str, name_id: str, - attributes: Dict[str, Dict], + attributes: dict[str, dict], expected_result: SAMLSubject, ): # Arrange diff --git a/tests/api/saml/test_auth.py b/tests/api/saml/test_auth.py index 25a71779ef..305d5598df 100644 --- a/tests/api/saml/test_auth.py +++ b/tests/api/saml/test_auth.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from copy import copy -from typing import Callable from unittest.mock import MagicMock, create_autospec, patch from urllib.parse import parse_qs, urlsplit diff --git a/tests/api/saml/test_provider.py b/tests/api/saml/test_provider.py index 9eb8141851..01328f4e16 100644 --- a/tests/api/saml/test_provider.py +++ b/tests/api/saml/test_provider.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable +from collections.abc import Callable from unittest.mock import MagicMock, create_autospec, patch import pytest diff --git a/tests/api/sip/test_authentication_provider.py b/tests/api/sip/test_authentication_provider.py index f32755784a..6d70e0d8db 100644 --- a/tests/api/sip/test_authentication_provider.py +++ b/tests/api/sip/test_authentication_provider.py @@ -1,8 +1,9 @@ import json +from collections.abc import Callable from datetime import datetime from decimal import Decimal from functools import partial -from typing import Callable, cast +from typing import cast import pytest diff --git a/tests/api/sip/test_client.py b/tests/api/sip/test_client.py index 309c22d6d0..68a3f90a3e 100644 --- a/tests/api/sip/test_client.py +++ b/tests/api/sip/test_client.py @@ -2,8 +2,8 @@ import socket import ssl import tempfile +from collections.abc import Callable from functools import partial -from typing import Callable, List, Optional from unittest.mock import MagicMock, Mock, patch import pytest @@ -161,7 +161,7 @@ def test_connect(self): socket.socket = old_socket def test_secure_connect_insecure(self, mock_socket: MockSocketFixture): - self.context: Optional[MagicMock] = None + self.context: MagicMock | None = None def create_context(protocol): self.context = Mock(spec=ssl.SSLContext) @@ -221,7 +221,7 @@ def create_context(protocol): # Record the temporary files created. self.old_mkstemp = tempfile.mkstemp - self.temporary_files: List[str] = [] + self.temporary_files: list[str] = [] def create_temporary_file(): (fd, name) = self.old_mkstemp() diff --git a/tests/api/test_adobe_vendor_id.py b/tests/api/test_adobe_vendor_id.py index 18de4070d2..47a6a50bbe 100644 --- a/tests/api/test_adobe_vendor_id.py +++ b/tests/api/test_adobe_vendor_id.py @@ -2,7 +2,6 @@ import base64 import datetime -from typing import Type from unittest.mock import MagicMock import pytest @@ -42,7 +41,7 @@ class TestAuthdataUtility: def test_eligible_authdata_vendor_id_integrations( self, registration_status: RegistrationStatus, - authdata_utility_type: Type[AuthdataUtility] | Type[None], + authdata_utility_type: type[AuthdataUtility] | type[None], authdata: AuthdataUtility, vendor_id_fixture: VendorIDFixture, ): diff --git a/tests/api/test_annotations.py b/tests/api/test_annotations.py index a6445169f7..92b56923b2 100644 --- a/tests/api/test_annotations.py +++ b/tests/api/test_annotations.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Any, Dict +from typing import Any import pytest from pyld import jsonld @@ -390,7 +390,7 @@ class TestAnnotationParser: def _sample_jsonld( annotation_parser_fixture: AnnotationParserFixture, motivation=Annotation.IDLING ): - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@context"] = [ AnnotationWriter.JSONLD_CONTEXT, {"ls": Annotation.LS_NAMESPACE}, @@ -456,7 +456,7 @@ def test_parse_expanded_jsonld( ): annotation_parser_fixture.pool.loan_to(annotation_parser_fixture.patron_value) - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@type"] = ["http://www.w3.org/ns/oa#Annotation"] data["http://www.w3.org/ns/oa#motivatedBy"] = [{"@id": Annotation.IDLING}] data["http://www.w3.org/ns/oa#hasBody"] = [ @@ -514,7 +514,7 @@ def test_parse_compacted_jsonld( ): annotation_parser_fixture.pool.loan_to(annotation_parser_fixture.patron_value) - data: Dict[Any, Any] = dict() + data: dict[Any, Any] = dict() data["@type"] = "http://www.w3.org/ns/oa#Annotation" data["http://www.w3.org/ns/oa#motivatedBy"] = {"@id": Annotation.IDLING} data["http://www.w3.org/ns/oa#hasBody"] = { diff --git a/tests/api/test_authenticator.py b/tests/api/test_authenticator.py index 505741c211..ecf4b35afe 100644 --- a/tests/api/test_authenticator.py +++ b/tests/api/test_authenticator.py @@ -7,9 +7,10 @@ import json import os import re +from collections.abc import Callable from decimal import Decimal from functools import partial -from typing import TYPE_CHECKING, Callable, Literal, Tuple, cast +from typing import TYPE_CHECKING, Literal, cast from unittest.mock import MagicMock, PropertyMock, patch import flask @@ -166,7 +167,7 @@ def patron_data() -> PatronData: ) -InactivePatronFixture = Tuple[Patron, PatronData] +InactivePatronFixture = tuple[Patron, PatronData] @pytest.fixture diff --git a/tests/api/test_bibliotheca.py b/tests/api/test_bibliotheca.py index cf5a89108b..c8effd80ca 100644 --- a/tests/api/test_bibliotheca.py +++ b/tests/api/test_bibliotheca.py @@ -4,15 +4,7 @@ import random from datetime import datetime, timedelta from io import BytesIO, StringIO -from typing import ( - TYPE_CHECKING, - ClassVar, - Optional, - Protocol, - Type, - cast, - runtime_checkable, -) +from typing import TYPE_CHECKING, ClassVar, Protocol, cast, runtime_checkable from unittest import mock from unittest.mock import MagicMock, create_autospec @@ -977,7 +969,7 @@ def as_problem_detail_document(self, debug=False) -> ProblemDetail: def test_exception( self, incoming_message: str, - error_class: Type[CirculationException], + error_class: type[CirculationException], error_code: int, problem_detail_title: str, problem_detail_code: int, @@ -1033,8 +1025,8 @@ def test_exception( ) def test_remote_initiated_server_error( self, - incoming_message: Optional[str], - incoming_message_from_file: Optional[str], + incoming_message: str | None, + incoming_message_from_file: str | None, error_string: str, api_bibliotheca_files_fixture: BibliothecaFilesFixture, ): diff --git a/tests/api/test_firstbook2.py b/tests/api/test_firstbook2.py index 2515ce19e9..432843de19 100644 --- a/tests/api/test_firstbook2.py +++ b/tests/api/test_firstbook2.py @@ -1,8 +1,8 @@ import os import time import urllib.parse +from collections.abc import Callable from functools import partial -from typing import Callable import jwt import pytest diff --git a/tests/api/test_kansas_patron.py b/tests/api/test_kansas_patron.py index 7a4a50b809..bc33bb98d4 100644 --- a/tests/api/test_kansas_patron.py +++ b/tests/api/test_kansas_patron.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable, List import pytest from lxml import etree @@ -18,7 +18,7 @@ def __init__(self, content): class MockAPI(KansasAuthenticationAPI): - queue: List[bytes] + queue: list[bytes] def __init__( self, diff --git a/tests/api/test_lanes.py b/tests/api/test_lanes.py index 9032b1d8ca..f4379e50a0 100644 --- a/tests/api/test_lanes.py +++ b/tests/api/test_lanes.py @@ -1,5 +1,4 @@ from collections import Counter -from typing import List from unittest.mock import MagicMock, patch import pytest @@ -948,7 +947,7 @@ def test_default(self, db: DatabaseTransactionFixture): (Facets.COLLECTION_NAME_FACETS_GROUP_NAME, Facets.COLLECTION_NAME_ALL), ], ) - def test_available_none(self, group_name: str, expected: List[str]) -> None: + def test_available_none(self, group_name: str, expected: list[str]) -> None: assert CrawlableFacets.available_facets(None, group_name) == [expected] @pytest.mark.parametrize( @@ -964,7 +963,7 @@ def test_available_none(self, group_name: str, expected: List[str]) -> None: ), ], ) - def test_available(self, group_name: str, expected: List[str]): + def test_available(self, group_name: str, expected: list[str]): mock = MagicMock(spec=Library) mock.enabled_facets = MagicMock(return_value=["foo"]) diff --git a/tests/api/test_millenium_patron.py b/tests/api/test_millenium_patron.py index a6664ab940..8289b9d6b1 100644 --- a/tests/api/test_millenium_patron.py +++ b/tests/api/test_millenium_patron.py @@ -1,7 +1,8 @@ +from collections.abc import Callable from datetime import date, timedelta from decimal import Decimal from functools import partial -from typing import Any, Callable, List +from typing import Any from urllib import parse import pytest @@ -28,8 +29,8 @@ def __init__(self, content): class MockAPI(MilleniumPatronAPI): - queue: List[Any] - requests_made: List[Any] + queue: list[Any] + requests_made: list[Any] def __init__( self, diff --git a/tests/api/test_odl.py b/tests/api/test_odl.py index 62635d6717..040a285c8f 100644 --- a/tests/api/test_odl.py +++ b/tests/api/test_odl.py @@ -3,7 +3,7 @@ import datetime import json import urllib.parse -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock import dateutil @@ -619,7 +619,7 @@ def test_fulfill_success( delivery_mechanism: str, correct_type: str, correct_link: str, - links: Dict[str, Any], + links: dict[str, Any], ) -> None: # Fulfill a loan in a way that gives access to a license file. odl_api_test_fixture.license.setup(concurrency=1, available=1) # type: ignore[attr-defined] diff --git a/tests/api/test_odl2.py b/tests/api/test_odl2.py index b5a384bb76..cbca67109d 100644 --- a/tests/api/test_odl2.py +++ b/tests/api/test_odl2.py @@ -1,5 +1,4 @@ import datetime -from typing import List, Optional import pytest from freezegun import freeze_time @@ -40,10 +39,10 @@ class TestODL2Importer: @staticmethod def _get_delivery_mechanism_by_drm_scheme_and_content_type( - delivery_mechanisms: List[LicensePoolDeliveryMechanism], + delivery_mechanisms: list[LicensePoolDeliveryMechanism], content_type: str, drm_scheme: str, - ) -> Optional[DeliveryMechanism]: + ) -> DeliveryMechanism | None: """Find a license pool in the list by its identifier. :param delivery_mechanisms: List of delivery mechanisms diff --git a/tests/api/test_opds_for_distributors.py b/tests/api/test_opds_for_distributors.py index 3d5dd064b3..9a4d18dc5c 100644 --- a/tests/api/test_opds_for_distributors.py +++ b/tests/api/test_opds_for_distributors.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable, Union +from collections.abc import Callable from unittest.mock import MagicMock, patch import pytest @@ -58,7 +58,7 @@ def _auth_doc(without_links=False) -> str: if not without_links else {} ) - doc: dict[str, list[dict[str, Union[str, list]]]] = { + doc: dict[str, list[dict[str, str | list]]] = { "authentication": [ { **{"type": "http://opds-spec.org/auth/oauth/client_credentials"}, diff --git a/tests/api/test_overdrive.py b/tests/api/test_overdrive.py index 8d2211abf8..42e71029e8 100644 --- a/tests/api/test_overdrive.py +++ b/tests/api/test_overdrive.py @@ -7,7 +7,7 @@ import os import random from datetime import timedelta -from typing import TYPE_CHECKING, Any, Dict +from typing import TYPE_CHECKING, Any from unittest.mock import MagicMock, create_autospec, patch import pytest @@ -2303,7 +2303,7 @@ def token_post( Goals.LICENSE_GOAL, {ExternalIntegration.OVERDRIVE: MockAPI} ), ) - od_apis: Dict[str, OverdriveAPI] = { + od_apis: dict[str, OverdriveAPI] = { api.collection.name: api # type: ignore[union-attr,misc] for api in list(circulation.api_for_collection.values()) } @@ -2452,7 +2452,7 @@ def make_direct_download_link(cls, download_link): error_url = "http://error/" # Here we don't even know the name of the format. - empty: Dict[str, Any] = dict() + empty: dict[str, Any] = dict() with pytest.raises(IOError) as excinfo: m(empty, error_url) assert "No linkTemplates for format (unknown)" in str(excinfo.value) @@ -3001,7 +3001,7 @@ def test_catch_up_from_with_failures_retried( db = overdrive_api_fixture.db class MockAPI: - tries: Dict[str, int] = {} + tries: dict[str, int] = {} def __init__(self, *ignore, **kwignore): self.licensepools = [] @@ -3066,7 +3066,7 @@ def test_catch_up_from_with_failures_all( db = overdrive_api_fixture.db class MockAPI: - tries: Dict[str, int] = {} + tries: dict[str, int] = {} def __init__(self, *ignore, **kwignore): self.licensepools = [] diff --git a/tests/api/test_scripts.py b/tests/api/test_scripts.py index d2811e7e7d..d172241f6f 100644 --- a/tests/api/test_scripts.py +++ b/tests/api/test_scripts.py @@ -4,7 +4,7 @@ import logging from io import StringIO from pathlib import Path -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING from unittest.mock import MagicMock, call, create_autospec, patch import pytest @@ -131,9 +131,7 @@ def __init__(self, db: DatabaseTransactionFixture): db.session, Configuration.BASE_URL_KEY ).value = self.cm_base_url - def integration( - self, library: Optional[Library] = None - ) -> IntegrationConfiguration: + def integration(self, library: Library | None = None) -> IntegrationConfiguration: if library is None: library = self.library @@ -143,7 +141,7 @@ def integration( libraries=[library], ) - def script(self, cmd_args: Optional[list[str]] = None) -> CacheMARCFiles: + def script(self, cmd_args: list[str] | None = None) -> CacheMARCFiles: cmd_args = cmd_args or [] return CacheMARCFiles( self.db.session, @@ -452,7 +450,7 @@ def test_process_collection_skip( self, cache_marc_files: CacheMARCFilesFixture, caplog: LogCaptureFixture, - last_updated: Optional[datetime.datetime], + last_updated: datetime.datetime | None, force: bool, update_frequency: int, run_exporter: bool, diff --git a/tests/api/test_simple_auth.py b/tests/api/test_simple_auth.py index 6e048aa9dc..be13ebdb7a 100644 --- a/tests/api/test_simple_auth.py +++ b/tests/api/test_simple_auth.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable import pytest diff --git a/tests/api/test_sirsidynix_auth_provider.py b/tests/api/test_sirsidynix_auth_provider.py index 7216cbdcd2..400c4fcfdb 100644 --- a/tests/api/test_sirsidynix_auth_provider.py +++ b/tests/api/test_sirsidynix_auth_provider.py @@ -1,6 +1,7 @@ +from collections.abc import Callable from copy import deepcopy from functools import partial -from typing import Any, Callable, Dict, List, Tuple +from typing import Any from unittest.mock import MagicMock, call, patch import pytest @@ -329,7 +330,7 @@ def test_blocked_patron_status_info( "amountOwed": {"currencyCode": "USD", "amount": "0.00"}, } - statuses: List[Tuple[Dict[str, bool], Any]] = [ + statuses: list[tuple[dict[str, bool], Any]] = [ ({"hasMaxDaysWithFines": True}, PatronData.EXCESSIVE_FINES), ({"hasMaxFines": True}, PatronData.EXCESSIVE_FINES), ({"hasMaxLostItem": True}, PatronData.TOO_MANY_LOST), diff --git a/tests/core/configuration/test_library.py b/tests/core/configuration/test_library.py index ad00aa2860..93289c8507 100644 --- a/tests/core/configuration/test_library.py +++ b/tests/core/configuration/test_library.py @@ -1,5 +1,5 @@ +from collections.abc import Callable from functools import partial -from typing import Callable, List, Optional import pytest @@ -31,8 +31,8 @@ def library_settings() -> LibrarySettingsFixture: ], ) def test_validate_language_codes( - languages: Optional[List[str]], - expected: Optional[List[str]], + languages: list[str] | None, + expected: list[str] | None, library_settings: LibrarySettingsFixture, ) -> None: settings = library_settings(large_collection_languages=languages) diff --git a/tests/core/integration/test_settings.py b/tests/core/integration/test_settings.py index f7b858918f..3771681012 100644 --- a/tests/core/integration/test_settings.py +++ b/tests/core/integration/test_settings.py @@ -1,6 +1,5 @@ import dataclasses import logging -from typing import Optional from unittest.mock import MagicMock import pytest @@ -33,7 +32,7 @@ def custom_validator(cls, v): raise SettingsValidationError(mock_problem_detail) return v - test: Optional[str] = FormField( + test: str | None = FormField( "test", form=ConfigurationFormItem(label="Test", description="Test description"), ) diff --git a/tests/core/jobs/test_playtime_entries.py b/tests/core/jobs/test_playtime_entries.py index 4adedbbc2c..fb21c925aa 100644 --- a/tests/core/jobs/test_playtime_entries.py +++ b/tests/core/jobs/test_playtime_entries.py @@ -2,7 +2,6 @@ import re from datetime import datetime, timedelta -from typing import List, Optional from unittest.mock import MagicMock, call, patch import pytest @@ -31,7 +30,7 @@ def create_playtime_entries( collection: Collection, library: Library, *entries: PlaytimeTimeEntry, -) -> List[PlaytimeEntry]: +) -> list[PlaytimeEntry]: all_inserted = [] for entry in entries: inserted = PlaytimeEntry( @@ -452,9 +451,7 @@ def test__isbn_for_identifier( ) for equivalent in equivalents ] - test_identifier: Optional[Identifier] = ( - ids[id_key] if id_key is not None else None - ) + test_identifier: Identifier | None = ids[id_key] if id_key is not None else None if test_identifier is not None: test_identifier.equivalencies = equivalencies diff --git a/tests/core/mock.py b/tests/core/mock.py index 050beb4708..ced72b17c5 100644 --- a/tests/core/mock.py +++ b/tests/core/mock.py @@ -1,6 +1,5 @@ import json import logging -from typing import Optional, Union from core.coverage import ( BibliographicCoverageProvider, @@ -71,7 +70,7 @@ def __getattr__(self, item): class MockCoverageProvider: """Mixin class for mock CoverageProviders that defines common constants.""" - SERVICE_NAME: Optional[str] = "Generic mock CoverageProvider" + SERVICE_NAME: str | None = "Generic mock CoverageProvider" # Whenever a CoverageRecord is created, the data_source of that # record will be Project Gutenberg. @@ -79,11 +78,11 @@ class MockCoverageProvider: # For testing purposes, this CoverageProvider will try to cover # every identifier in the database. - INPUT_IDENTIFIER_TYPES: Union[None, str, object] = None + INPUT_IDENTIFIER_TYPES: None | str | object = None # This CoverageProvider can work with any Collection that supports # the OPDS import protocol (e.g. DatabaseTest._default_collection). - PROTOCOL: Optional[str] = ExternalIntegration.OPDS_IMPORT + PROTOCOL: str | None = ExternalIntegration.OPDS_IMPORT class InstrumentedCoverageProvider(MockCoverageProvider, IdentifierCoverageProvider): diff --git a/tests/core/models/test_before_flush_decorator.py b/tests/core/models/test_before_flush_decorator.py index 19c276dbc4..a358931e58 100644 --- a/tests/core/models/test_before_flush_decorator.py +++ b/tests/core/models/test_before_flush_decorator.py @@ -1,4 +1,4 @@ -from typing import Callable, List, Optional +from collections.abc import Callable from unittest.mock import MagicMock, PropertyMock, call import pytest @@ -17,9 +17,9 @@ def before_flush_decorator() -> BeforeFlushListener: @pytest.fixture def create_session() -> Callable[..., Session]: def create( - new: Optional[List[Base]] = None, - deleted: Optional[List[Base]] = None, - dirty: Optional[List[Base]] = None, + new: list[Base] | None = None, + deleted: list[Base] | None = None, + dirty: list[Base] | None = None, ) -> Session: new = new or [] deleted = deleted or [] diff --git a/tests/core/models/test_coverage.py b/tests/core/models/test_coverage.py index 2c1803c5f8..c395594f07 100644 --- a/tests/core/models/test_coverage.py +++ b/tests/core/models/test_coverage.py @@ -1,5 +1,4 @@ import datetime -from typing import List import pytest @@ -587,8 +586,8 @@ def relevant_records(work): class ExampleEquivalencyCoverageRecordFixture: - identifiers: List[Identifier] - equivalencies: List[Equivalency] + identifiers: list[Identifier] + equivalencies: list[Equivalency] transaction: DatabaseTransactionFixture def __init__(self, transaction: DatabaseTransactionFixture): diff --git a/tests/core/models/test_discovery_service_registration.py b/tests/core/models/test_discovery_service_registration.py index f953d01ac5..6b60d8eb30 100644 --- a/tests/core/models/test_discovery_service_registration.py +++ b/tests/core/models/test_discovery_service_registration.py @@ -1,5 +1,3 @@ -from typing import Optional - import pytest from sqlalchemy import select @@ -20,8 +18,8 @@ class RegistrationFixture: def __call__( self, - library: Optional[Library] = None, - integration: Optional[IntegrationConfiguration] = None, + library: Library | None = None, + integration: IntegrationConfiguration | None = None, ) -> DiscoveryServiceRegistration: library = library or self.library_fixture.library() integration = integration or self.integration_fixture( diff --git a/tests/core/models/test_licensing.py b/tests/core/models/test_licensing.py index c152bbdc14..0dbb99b00c 100644 --- a/tests/core/models/test_licensing.py +++ b/tests/core/models/test_licensing.py @@ -1,6 +1,6 @@ import datetime import json -from typing import Callable, Optional +from collections.abc import Callable from unittest.mock import MagicMock, PropertyMock import pytest @@ -1537,10 +1537,10 @@ class TestFormatPriorities: @pytest.fixture def mock_delivery( self, - ) -> Callable[[Optional[str], Optional[str]], DeliveryMechanism]: + ) -> Callable[[str | None, str | None], DeliveryMechanism]: def delivery_mechanism( - drm_scheme: Optional[str] = None, - content_type: Optional[str] = "application/epub+zip", + drm_scheme: str | None = None, + content_type: str | None = "application/epub+zip", ) -> DeliveryMechanism: def _delivery_eq(self, other): return ( @@ -1564,10 +1564,10 @@ def _delivery_repr(self): @pytest.fixture def mock_mechanism( self, mock_delivery - ) -> Callable[[Optional[str], Optional[str]], LicensePoolDeliveryMechanism]: + ) -> Callable[[str | None, str | None], LicensePoolDeliveryMechanism]: def mechanism( - drm_scheme: Optional[str] = None, - content_type: Optional[str] = "application/epub+zip", + drm_scheme: str | None = None, + content_type: str | None = "application/epub+zip", ) -> LicensePoolDeliveryMechanism: def _mechanism_eq(self, other): return self.delivery_mechanism == other.delivery_mechanism diff --git a/tests/core/models/test_listeners.py b/tests/core/models/test_listeners.py index da6887c84a..9751621773 100644 --- a/tests/core/models/test_listeners.py +++ b/tests/core/models/test_listeners.py @@ -1,5 +1,6 @@ import functools -from typing import Any, Iterable +from collections.abc import Iterable +from typing import Any import pytest diff --git a/tests/core/search/test_service.py b/tests/core/search/test_service.py index 53ece7748d..4192e51996 100644 --- a/tests/core/search/test_service.py +++ b/tests/core/search/test_service.py @@ -1,4 +1,4 @@ -from typing import Iterable +from collections.abc import Iterable from core.search.document import LONG, SearchMappingDocument from core.search.revision import SearchSchemaRevision diff --git a/tests/core/service/storage/test_s3.py b/tests/core/service/storage/test_s3.py index c68c253acd..1328aa707e 100644 --- a/tests/core/service/storage/test_s3.py +++ b/tests/core/service/storage/test_s3.py @@ -1,8 +1,9 @@ from __future__ import annotations import functools +from collections.abc import Generator from io import BytesIO -from typing import TYPE_CHECKING, Generator, Optional +from typing import TYPE_CHECKING from unittest.mock import MagicMock import pytest @@ -331,7 +332,7 @@ def test_store( key: str, bucket: str, content: bytes | str, - content_type: Optional[str], + content_type: str | None, s3_service_integration_fixture: S3ServiceIntegrationFixture, ): """The S3Service.store method stores content in the bucket.""" @@ -371,7 +372,7 @@ def test_multipart( key: str, bucket: str, content: bytes, - content_type: Optional[str], + content_type: str | None, s3_service_integration_fixture: S3ServiceIntegrationFixture, ): service = getattr(s3_service_integration_fixture, bucket) diff --git a/tests/core/test_app_server.py b/tests/core/test_app_server.py index 9c336084e6..1e1c79a835 100644 --- a/tests/core/test_app_server.py +++ b/tests/core/test_app_server.py @@ -1,8 +1,8 @@ import gzip import json +from collections.abc import Callable, Iterable from functools import partial from io import BytesIO -from typing import Callable, Iterable from unittest.mock import MagicMock, PropertyMock import flask diff --git a/tests/core/test_equivalent_coverage.py b/tests/core/test_equivalent_coverage.py index 60452f3464..119e05b74c 100644 --- a/tests/core/test_equivalent_coverage.py +++ b/tests/core/test_equivalent_coverage.py @@ -1,5 +1,3 @@ -from typing import List - import pytest import sqlalchemy from sqlalchemy import or_ @@ -23,8 +21,8 @@ class EquivalentCoverageFixture: coverage_records: ExampleEquivalencyCoverageRecordFixture provider: EquivalentIdentifiersCoverageProvider transaction: DatabaseTransactionFixture - identifiers: List[Identifier] - equivalencies: List[Equivalency] + identifiers: list[Identifier] + equivalencies: list[Equivalency] @pytest.fixture() diff --git a/tests/core/test_external_search.py b/tests/core/test_external_search.py index 8c82273c42..b92d1b17a6 100644 --- a/tests/core/test_external_search.py +++ b/tests/core/test_external_search.py @@ -2,8 +2,8 @@ import re import time import uuid +from collections.abc import Callable, Collection from datetime import datetime -from typing import Callable, Collection, List from unittest.mock import MagicMock import pytest @@ -1632,7 +1632,7 @@ class TestAuthorFilterData: sort_name: Contributor viaf: Contributor lc: Contributor - works: List[Work] + works: list[Work] literary_wonderlands: Work ubik: Work justice: Work @@ -5381,7 +5381,7 @@ class TestExternalSearchJSONQueryData: book_work: Work facets: SearchFacets filter: Filter - random_works: List[Work] + random_works: list[Work] class TestExternalSearchJSONQuery: diff --git a/tests/core/test_http.py b/tests/core/test_http.py index 1ccd4f53af..4a32f125cd 100644 --- a/tests/core/test_http.py +++ b/tests/core/test_http.py @@ -1,6 +1,6 @@ import functools +from collections.abc import Callable from dataclasses import dataclass -from typing import Callable import pytest import requests diff --git a/tests/core/test_lane.py b/tests/core/test_lane.py index 6e588794bc..2676883d20 100644 --- a/tests/core/test_lane.py +++ b/tests/core/test_lane.py @@ -1,7 +1,6 @@ import datetime import logging import random -from typing import List, Tuple from unittest.mock import MagicMock, call import pytest @@ -60,7 +59,7 @@ class MockFacetConfig: but you don't care which EntryPoints are configured. """ - entrypoints: List = [] + entrypoints: list = [] def test_items(self): ep = AudiobooksEntryPoint @@ -714,8 +713,8 @@ def test_from_request_gets_available_facets_through_hook_methods( # available_facets() and default_facets() methods. This gives # subclasses a chance to add extra facets or change defaults. class Mock(Facets): - available_facets_calls: List[Tuple] = [] - default_facet_calls: List[Tuple] = [] + available_facets_calls: list[tuple] = [] + default_facet_calls: list[tuple] = [] # For whatever reason, this faceting object allows only a # single setting for each facet group. diff --git a/tests/core/test_opds2_import.py b/tests/core/test_opds2_import.py index bac2dd61d6..203242d4b0 100644 --- a/tests/core/test_opds2_import.py +++ b/tests/core/test_opds2_import.py @@ -1,5 +1,5 @@ import datetime -from typing import Generator, List, Union +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest @@ -147,7 +147,7 @@ def test_opds2_importer_correctly_imports_valid_opds2_feed( opds2_importer_fixture.transaction.session, ) content_server_feed_text = opds2_files_fixture.sample_text("feed.json") - content_server_feed: Union[str, bytes] + content_server_feed: str | bytes if manifest_type == "bytes": content_server_feed = content_server_feed_text.encode() @@ -405,7 +405,7 @@ def test_opds2_importer_skips_publications_with_unsupported_identifier_types( opds2_importer_fixture: TestOPDS2ImporterFixture, opds2_files_fixture: OPDS2FilesFixture, this_identifier_type, - ignore_identifier_type: List[IdentifierType], + ignore_identifier_type: list[IdentifierType], identifier: str, ) -> None: """Ensure that OPDS2Importer imports only publications having supported identifier types. diff --git a/tests/core/test_patron_activity_sync.py b/tests/core/test_patron_activity_sync.py index 0648a8ef48..406edf8c32 100644 --- a/tests/core/test_patron_activity_sync.py +++ b/tests/core/test_patron_activity_sync.py @@ -1,5 +1,5 @@ from datetime import timedelta -from typing import Optional, cast +from typing import cast from unittest.mock import call, patch import pytest @@ -31,12 +31,12 @@ class TestPatronActivitySync: def test_item_query(self, sync_fixture: PatronSyncFixture): db = sync_fixture.db - work: Optional[Work] = db.work( + work: Work | None = db.work( with_license_pool=True, with_open_access_download=True ) assert work is not None - pool: Optional[LicensePool] = work.active_license_pool() + pool: LicensePool | None = work.active_license_pool() assert pool is not None patron1: Patron = db.patron() # 0 loans, holds or tokens diff --git a/tests/core/test_selftest.py b/tests/core/test_selftest.py index a676cb1d63..49ea4cf14d 100644 --- a/tests/core/test_selftest.py +++ b/tests/core/test_selftest.py @@ -6,7 +6,7 @@ """ import datetime -from typing import Generator, Optional +from collections.abc import Generator from unittest.mock import MagicMock from sqlalchemy.orm import Session @@ -104,7 +104,7 @@ def test_run_self_tests(self, db: DatabaseTransactionFixture): """ class Tester(HasSelfTests): - integration: Optional[ExternalIntegration] + integration: ExternalIntegration | None def __init__(self, extra_arg=None): """This constructor works.""" diff --git a/tests/core/util/test_notifications.py b/tests/core/util/test_notifications.py index 2d7b7fe6d2..432a10dd84 100644 --- a/tests/core/util/test_notifications.py +++ b/tests/core/util/test_notifications.py @@ -1,7 +1,8 @@ import logging import re +from collections.abc import Generator from datetime import datetime -from typing import Any, Generator +from typing import Any from unittest import mock from unittest.mock import MagicMock diff --git a/tests/core/util/test_xml_parser.py b/tests/core/util/test_xml_parser.py index d7aad46706..9b077f6c01 100644 --- a/tests/core/util/test_xml_parser.py +++ b/tests/core/util/test_xml_parser.py @@ -1,7 +1,5 @@ from __future__ import annotations -from typing import Dict, Optional - from lxml.etree import _Element from core.util.xmlparser import XMLProcessor @@ -17,9 +15,7 @@ def __init__(self, xpath_expression: str) -> None: def xpath_expression(self) -> str: return self._xpath_expression - def process_one( - self, tag: _Element, namespaces: Optional[Dict[str, str]] - ) -> _Element: + def process_one(self, tag: _Element, namespaces: dict[str, str] | None) -> _Element: return tag diff --git a/tests/customlists/test_explain.py b/tests/customlists/test_explain.py index 8628c42c62..efc92fd06c 100644 --- a/tests/customlists/test_explain.py +++ b/tests/customlists/test_explain.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import List from customlists.customlist_explain import CustomListImportExplainer @@ -46,8 +45,8 @@ def test_explain_simple_report(self, tmpdir): ] ).execute() - text_expected: List[str] = open(output_path).readlines() - text_received: List[str] = open(tmpdir.join("output.csv")).readlines() + text_expected: list[str] = open(output_path).readlines() + text_received: list[str] = open(tmpdir.join("output.csv")).readlines() assert len(text_expected) == len(text_received) for i in range(0, len(text_expected)): assert text_expected[i] == text_received[i] diff --git a/tests/customlists/test_import.py b/tests/customlists/test_import.py index 406070df58..7c176f97d8 100644 --- a/tests/customlists/test_import.py +++ b/tests/customlists/test_import.py @@ -1,6 +1,5 @@ import json from pathlib import Path -from typing import List import pytest @@ -371,10 +370,10 @@ def test_import_cannot_update_custom_list( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -497,10 +496,10 @@ def test_import_cannot_update_existing_list( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -609,10 +608,10 @@ def test_import_dry_run(self, mock_web_server: MockAPIServer, tmpdir): schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 1 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -724,10 +723,10 @@ def test_import_error_collection_missing( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" @@ -845,10 +844,10 @@ def test_import_updates_and_includes_csrf( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 1 == len(problems) assert ( "Book 'Bad Book' (urn:uuid:9c9c1f5c-6742-47d4-b94c-e77f88ca55f7) was excluded from list updates due to a problem on the source CM: Something went wrong on the source CM" @@ -964,10 +963,10 @@ def test_import_updates_with_missing_collection( schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 2 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" @@ -1174,10 +1173,10 @@ def test_import_bad_book_identifier(self, mock_web_server: MockAPIServer, tmpdir schema=schema, document=document ) - reports: List[CustomListReport] = list(report_document.reports()) + reports: list[CustomListReport] = list(report_document.reports()) assert 1 == len(reports) report = reports[0] - problems: List[CustomListProblem] = list(report.problems()) + problems: list[CustomListProblem] = list(report.problems()) assert 3 == len(problems) assert ( "The collection 'B2' appears to be missing on the importing CM" diff --git a/tests/fixtures/announcements.py b/tests/fixtures/announcements.py index 61bf5084e7..9d85281e31 100644 --- a/tests/fixtures/announcements.py +++ b/tests/fixtures/announcements.py @@ -1,5 +1,4 @@ import datetime -from typing import Optional import pytest from sqlalchemy.orm import Session @@ -22,10 +21,10 @@ class AnnouncementFixture: def create_announcement( self, db: Session, - start: Optional[datetime.date] = None, - finish: Optional[datetime.date] = None, + start: datetime.date | None = None, + finish: datetime.date | None = None, content: str = "test", - library: Optional[Library] = None, + library: Library | None = None, ) -> Announcement: if start is None: start = self.today @@ -40,7 +39,7 @@ def create_announcement( return announcement def active_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement is active. return self.create_announcement( @@ -52,7 +51,7 @@ def active_announcement( ) def expired_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement expired yesterday. return self.create_announcement( @@ -64,7 +63,7 @@ def expired_announcement( ) def forthcoming_announcement( - self, db: Session, library: Optional[Library] = None + self, db: Session, library: Library | None = None ) -> Announcement: # This announcement should be displayed starting tomorrow. return self.create_announcement( diff --git a/tests/fixtures/api_admin.py b/tests/fixtures/api_admin.py index d1e1e3647d..081841cc95 100644 --- a/tests/fixtures/api_admin.py +++ b/tests/fixtures/api_admin.py @@ -1,5 +1,4 @@ from contextlib import contextmanager -from typing import List import flask import pytest @@ -22,7 +21,7 @@ class AdminControllerFixture: admin: Admin manager: CirculationManager - BOOKS: List[WorkSpec] = [] + BOOKS: list[WorkSpec] = [] def __init__(self, controller_fixture: ControllerFixture): self.ctrl = controller_fixture diff --git a/tests/fixtures/api_controller.py b/tests/fixtures/api_controller.py index 207e4026a2..ae4d01ba38 100644 --- a/tests/fixtures/api_controller.py +++ b/tests/fixtures/api_controller.py @@ -2,8 +2,9 @@ import datetime import logging +from collections.abc import Callable from contextlib import contextmanager -from typing import Any, Callable, Optional +from typing import Any import flask import pytest @@ -58,7 +59,7 @@ class ControllerFixture: """A test that requires a functional app server.""" app: PalaceFlask - authdata: Optional[AuthdataUtility] + authdata: AuthdataUtility | None collection: Collection collections: list[Collection] controller: CirculationManagerController diff --git a/tests/fixtures/api_odl.py b/tests/fixtures/api_odl.py index 9bdef0ad8a..00a6decc41 100644 --- a/tests/fixtures/api_odl.py +++ b/tests/fixtures/api_odl.py @@ -3,7 +3,7 @@ import datetime import json import uuid -from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union +from typing import TYPE_CHECKING, Any import pytest from jinja2 import Template @@ -24,10 +24,10 @@ class LicenseHelper: def __init__( self, - identifier: Optional[str] = None, - checkouts: Optional[int] = None, - concurrency: Optional[int] = None, - expires: Optional[Union[datetime.datetime, str]] = None, + identifier: str | None = None, + checkouts: int | None = None, + concurrency: int | None = None, + expires: datetime.datetime | str | None = None, ) -> None: """Initialize a new instance of LicenseHelper class. @@ -37,12 +37,12 @@ def __init__( :param expires: Date & time when a license expires """ self.identifier: str = identifier if identifier else f"urn:uuid:{uuid.uuid1()}" - self.checkouts: Optional[int] = checkouts - self.concurrency: Optional[int] = concurrency + self.checkouts: int | None = checkouts + self.concurrency: int | None = concurrency if isinstance(expires, datetime.datetime): self.expires = expires.isoformat() else: - self.expires: Optional[str] = expires # type: ignore + self.expires: str | None = expires # type: ignore class LicenseInfoHelper: @@ -53,12 +53,12 @@ def __init__( license: LicenseHelper, available: int, status: str = "available", - left: Optional[int] = None, + left: int | None = None, ) -> None: """Initialize a new instance of LicenseInfoHelper class.""" self.license: LicenseHelper = license self.status: str = status - self.left: Optional[int] = left + self.left: int | None = left self.available: int = available def __str__(self) -> str: @@ -110,7 +110,7 @@ class MockGet: def __init__(self): self.responses = [] - def get(self, *args: Any, **kwargs: Any) -> Tuple[int, Dict[str, str], bytes]: + def get(self, *args: Any, **kwargs: Any) -> tuple[int, dict[str, str], bytes]: return 200, {}, self.responses.pop(0) def add(self, item: LicenseInfoHelper | str | bytes) -> None: @@ -169,12 +169,12 @@ def __init__( self.feed_template = feed_template def __call__( - self, licenses: List[LicenseInfoHelper] - ) -> Tuple[ - List[Edition], - List[LicensePool], - List[Work], - Dict[str, List[CoverageFailure]], + self, licenses: list[LicenseInfoHelper] + ) -> tuple[ + list[Edition], + list[LicensePool], + list[Work], + dict[str, list[CoverageFailure]], ]: feed_licenses = [l.license for l in licenses] for _license in licenses: @@ -187,7 +187,7 @@ def __call__( return self.importer.import_from_feed(feed) def get_templated_feed( - self, files: APIFilesFixture, filename: str, licenses: List[LicenseHelper] + self, files: APIFilesFixture, filename: str, licenses: list[LicenseHelper] ) -> str: """Get the test ODL feed with specific licensing information. diff --git a/tests/fixtures/api_routes.py b/tests/fixtures/api_routes.py index 2dce5f9cdb..e9823dbad5 100644 --- a/tests/fixtures/api_routes.py +++ b/tests/fixtures/api_routes.py @@ -1,5 +1,6 @@ import logging -from typing import Any, Generator, Optional +from collections.abc import Generator +from typing import Any import flask import pytest @@ -136,8 +137,8 @@ def __init__( self.original_app = self.routes.app self.resolver = self.original_app.url_map.bind("", "/") - self.controller: Optional[CirculationManagerController] = None - self.real_controller: Optional[CirculationManagerController] = None + self.controller: CirculationManagerController | None = None + self.real_controller: CirculationManagerController | None = None self.routes.app = app # type: ignore def set_controller_name(self, name: str): diff --git a/tests/fixtures/authenticator.py b/tests/fixtures/authenticator.py index 8794e98658..da97b8cea2 100644 --- a/tests/fixtures/authenticator.py +++ b/tests/fixtures/authenticator.py @@ -1,4 +1,4 @@ -from typing import Dict, Optional, Tuple, Type +from typing import Optional import pytest @@ -20,7 +20,7 @@ IntegrationLibraryConfigurationFixture, ) -AuthProviderFixture = Tuple[ +AuthProviderFixture = tuple[ IntegrationConfiguration, Optional[IntegrationLibraryConfiguration] ] @@ -37,9 +37,9 @@ def __init__( def __call__( self, protocol: str, - library: Optional[Library], - settings_dict: Optional[Dict[str, str]] = None, - library_settings_dict: Optional[Dict[str, str]] = None, + library: Library | None, + settings_dict: dict[str, str] | None = None, + library_settings_dict: dict[str, str] | None = None, ) -> AuthProviderFixture: settings_dict = settings_dict or {} library_settings_dict = library_settings_dict or {} @@ -76,7 +76,7 @@ class AuthProtocolFixture: def __init__(self, registry: PatronAuthRegistry): self.registry = registry - def __call__(self, protocol: Type[AuthenticationProviderType]) -> str: + def __call__(self, protocol: type[AuthenticationProviderType]) -> str: return self.registry.get_protocol(protocol, "") @@ -100,7 +100,7 @@ def __init__( def __call__( self, - library: Optional[Library] = None, + library: Library | None = None, test_identifier: str = "username1", test_password: str = "password1", ) -> AuthProviderFixture: @@ -136,7 +136,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "url" not in kwargs: kwargs["url"] = "http://url.com/" @@ -169,7 +169,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "url" not in kwargs: kwargs["url"] = "url.com" @@ -202,7 +202,7 @@ def __init__( self.get_auth_protocol = get_auth_protocol def __call__( - self, library: Optional[Library] = None, **kwargs: str + self, library: Library | None = None, **kwargs: str ) -> AuthProviderFixture: if "service_provider_xml_metadata" not in kwargs: kwargs["service_provider_xml_metadata"] = CORRECT_XML_WITH_ONE_SP diff --git a/tests/fixtures/database.py b/tests/fixtures/database.py index 20206bc275..da161daaa8 100644 --- a/tests/fixtures/database.py +++ b/tests/fixtures/database.py @@ -7,8 +7,9 @@ import tempfile import time import uuid +from collections.abc import Generator, Iterable from textwrap import dedent -from typing import Any, Dict, Generator, Iterable, List, Optional, Tuple +from typing import Any import pytest import sqlalchemy @@ -98,7 +99,7 @@ def __init__(self, engine: Engine, connection: Connection): self._connection = connection @staticmethod - def _get_database_connection() -> Tuple[Engine, Connection]: + def _get_database_connection() -> tuple[Engine, Connection]: url = Configuration.database_url() engine = SessionManager.engine(url) connection = engine.connect() @@ -139,12 +140,12 @@ class DatabaseTransactionFixture: """A fixture representing a single transaction. The transaction is automatically rolled back.""" _database: DatabaseFixture - _default_library: Optional[Library] - _default_collection: Optional[Collection] + _default_library: Library | None + _default_collection: Collection | None _session: Session _transaction: Transaction _counter: int - _isbns: List[str] + _isbns: list[str] def __init__( self, database: DatabaseFixture, session: Session, transaction: Transaction @@ -240,9 +241,9 @@ def fresh_str(self) -> str: def library( self, - name: Optional[str] = None, - short_name: Optional[str] = None, - settings: Optional[LibrarySettings] = None, + name: str | None = None, + short_name: str | None = None, + settings: LibrarySettings | None = None, ) -> Library: # Just a dummy key used for testing. key_string = """\ @@ -295,7 +296,7 @@ def collection( username=None, password=None, data_source_name=None, - settings: Dict[str, Any] | None = None, + settings: dict[str, Any] | None = None, ) -> Collection: name = name or self.fresh_str() collection, _ = Collection.by_name_and_protocol(self.session, name, protocol) @@ -972,7 +973,7 @@ def __init__(self, db: DatabaseTransactionFixture): self.db = db def __call__( - self, protocol: Optional[str], goal: Goals, settings_dict: Optional[dict] = None + self, protocol: str | None, goal: Goals, settings_dict: dict | None = None ) -> IntegrationConfiguration: integration, _ = create( self.db.session, @@ -985,7 +986,7 @@ def __call__( return integration def discovery_service( - self, protocol: Optional[str] = None, url: Optional[str] = None + self, protocol: str | None = None, url: str | None = None ) -> IntegrationConfiguration: registry = DiscoveryRegistry() if protocol is None: @@ -1019,7 +1020,7 @@ def __call__( self, library: Library, parent: IntegrationConfiguration, - settings_dict: Optional[dict] = None, + settings_dict: dict | None = None, ) -> IntegrationLibraryConfiguration: settings_dict = settings_dict or {} integration, _ = create( diff --git a/tests/fixtures/flask.py b/tests/fixtures/flask.py index 50a53ae427..670b7d908b 100644 --- a/tests/fixtures/flask.py +++ b/tests/fixtures/flask.py @@ -1,4 +1,4 @@ -from typing import Generator +from collections.abc import Generator import pytest from flask.ctx import RequestContext diff --git a/tests/fixtures/library.py b/tests/fixtures/library.py index c17f599669..e4056b49e2 100644 --- a/tests/fixtures/library.py +++ b/tests/fixtures/library.py @@ -1,6 +1,6 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Optional +from typing import TYPE_CHECKING import pytest @@ -38,9 +38,9 @@ def __init__(self, db: DatabaseTransactionFixture) -> None: def library( self, - name: Optional[str] = None, - short_name: Optional[str] = None, - settings: Optional[LibrarySettings] = None, + name: str | None = None, + short_name: str | None = None, + settings: LibrarySettings | None = None, ) -> Library: library = self.db.library(name=name, short_name=short_name, settings=settings) if isinstance(settings, MockLibrarySettings): diff --git a/tests/fixtures/odl.py b/tests/fixtures/odl.py index d2732d7b16..ffa3e3dead 100644 --- a/tests/fixtures/odl.py +++ b/tests/fixtures/odl.py @@ -1,6 +1,7 @@ import json import types -from typing import Any, Callable, Optional, Tuple, Type +from collections.abc import Callable +from typing import Any import pytest from _pytest.monkeypatch import MonkeyPatch @@ -52,7 +53,7 @@ def _url_for(patched_self, *args, **kwargs): "&".join([f"{key}={val}" for key, val in list(kwargs.items())]), ) - def __call__(self, api: Type[BaseODLAPI]): + def __call__(self, api: type[BaseODLAPI]): # We monkeypatch the ODLAPI class to intercept HTTP requests and responses # these monkeypatched methods are staticmethods on this class. They take # a patched_self argument, which is the instance of the ODLAPI class that @@ -170,7 +171,7 @@ def checkout( pool: LicensePool, db: DatabaseTransactionFixture, loan_url: str, - ) -> Callable[[], Tuple[LoanInfo, Any]]: + ) -> Callable[[], tuple[LoanInfo, Any]]: """Create a function that, when evaluated, performs a checkout.""" def c(): @@ -231,19 +232,17 @@ def __init__( self.patron = patron self.pool = license.license_pool - def checkin( - self, patron: Optional[Patron] = None, pool: Optional[LicensePool] = None - ): + def checkin(self, patron: Patron | None = None, pool: LicensePool | None = None): patron = patron or self.patron pool = pool or self.pool return self.fixture.checkin(self.api, patron=patron, pool=pool)() def checkout( self, - loan_url: Optional[str] = None, - patron: Optional[Patron] = None, - pool: Optional[LicensePool] = None, - ) -> Tuple[LoanInfo, Any]: + loan_url: str | None = None, + patron: Patron | None = None, + pool: LicensePool | None = None, + ) -> tuple[LoanInfo, Any]: patron = patron or self.patron pool = pool or self.pool loan_url = loan_url or self.db.fresh_url() @@ -278,7 +277,7 @@ def __init__( patched(ODL2API) def collection( - self, library: Library, api_class: Type[ODL2API] = ODL2API + self, library: Library, api_class: type[ODL2API] = ODL2API ) -> Collection: collection = super().collection(library, api_class) collection.integration_configuration.name = "Test ODL2 Collection" diff --git a/tests/fixtures/s3.py b/tests/fixtures/s3.py index b332a44244..fced64664e 100644 --- a/tests/fixtures/s3.py +++ b/tests/fixtures/s3.py @@ -2,7 +2,7 @@ import functools import sys -from typing import TYPE_CHECKING, BinaryIO, List, NamedTuple, Optional, Protocol +from typing import TYPE_CHECKING, BinaryIO, NamedTuple, Protocol from unittest.mock import MagicMock import pytest @@ -22,7 +22,7 @@ class MockS3ServiceUpload(NamedTuple): key: str content: bytes - media_type: Optional[str] + media_type: str | None class MockMultipartS3ContextManager(MultipartS3ContextManager): @@ -32,14 +32,14 @@ def __init__( bucket: str, key: str, url: str, - media_type: Optional[str] = None, + media_type: str | None = None, ) -> None: self.parent = parent self.key = key self.bucket = bucket self.media_type = media_type self.content = b"" - self.content_parts: List[bytes] = [] + self.content_parts: list[bytes] = [] self._complete = False self._url = url self._exception = None @@ -71,20 +71,20 @@ def __init__( url_template: str, ) -> None: super().__init__(client, region, bucket, url_template) - self.uploads: List[MockS3ServiceUpload] = [] - self.mocked_multipart_upload: Optional[MockMultipartS3ContextManager] = None + self.uploads: list[MockS3ServiceUpload] = [] + self.mocked_multipart_upload: MockMultipartS3ContextManager | None = None def store_stream( self, key: str, stream: BinaryIO, - content_type: Optional[str] = None, - ) -> Optional[str]: + content_type: str | None = None, + ) -> str | None: self.uploads.append(MockS3ServiceUpload(key, stream.read(), content_type)) return self.generate_url(key) def multipart( - self, key: str, content_type: Optional[str] = None + self, key: str, content_type: str | None = None ) -> MultipartS3ContextManager: self.mocked_multipart_upload = MockMultipartS3ContextManager( self, self.bucket, key, self.generate_url(key), content_type @@ -95,10 +95,10 @@ def multipart( class S3ServiceProtocol(Protocol): def __call__( self, - client: Optional[S3Client] = None, - region: Optional[str] = None, - bucket: Optional[str] = None, - url_template: Optional[str] = None, + client: S3Client | None = None, + region: str | None = None, + bucket: str | None = None, + url_template: str | None = None, ) -> S3Service: ... diff --git a/tests/fixtures/search.py b/tests/fixtures/search.py index 0936d273aa..bb54290942 100644 --- a/tests/fixtures/search.py +++ b/tests/fixtures/search.py @@ -1,6 +1,6 @@ import logging import os -from typing import Iterable, List +from collections.abc import Iterable import pytest from opensearchpy import OpenSearch @@ -24,7 +24,7 @@ class ExternalSearchFixture: integration: ExternalIntegration db: DatabaseTransactionFixture search: OpenSearch - _indexes_created: List[str] + _indexes_created: list[str] def __init__(self): self._indexes_created = [] diff --git a/tests/fixtures/tls_server.py b/tests/fixtures/tls_server.py index 26ca6ce2c6..57adbcf97f 100644 --- a/tests/fixtures/tls_server.py +++ b/tests/fixtures/tls_server.py @@ -3,10 +3,11 @@ import select import ssl from collections import deque +from collections.abc import Generator from concurrent.futures import ThreadPoolExecutor from pathlib import Path from socket import AF_INET, SOCK_STREAM, socket -from typing import Any, Deque, Generator +from typing import Any, Deque import pytest diff --git a/tests/fixtures/webserver.py b/tests/fixtures/webserver.py index 04ac0577a6..b9060b9934 100644 --- a/tests/fixtures/webserver.py +++ b/tests/fixtures/webserver.py @@ -1,6 +1,6 @@ import threading +from collections.abc import Generator from http.server import BaseHTTPRequestHandler, HTTPServer -from typing import Dict, Generator, List, Optional, Tuple import pytest @@ -10,7 +10,7 @@ class MockAPIServerRequest: """A request made to a server.""" - headers: Dict[str, str] + headers: dict[str, str] payload: bytes method: str path: str @@ -27,7 +27,7 @@ class MockAPIServerResponse: status_code: int content: bytes - headers: Dict[str, str] + headers: dict[str, str] close_obnoxiously: bool def __init__(self) -> None: @@ -100,14 +100,14 @@ def do_PUT(self) -> None: def version_string(self) -> str: return "" - def date_time_string(self, timestamp: Optional[int] = 0) -> str: + def date_time_string(self, timestamp: int | None = 0) -> str: return "Sat, 1 January 2000 00:00:00 UTC" class MockAPIInternalServer(HTTPServer): mock_api_server: "MockAPIServer" - def __init__(self, server_address: Tuple[str, int], bind_and_activate: bool): + def __init__(self, server_address: tuple[str, int], bind_and_activate: bool): super().__init__(server_address, MockAPIServerRequestHandler, bind_and_activate) self.allow_reuse_address = True @@ -119,8 +119,8 @@ class MockAPIServer(LoggerMixin): _port: int _server: HTTPServer _server_thread: threading.Thread - _responses: Dict[str, Dict[str, List[MockAPIServerResponse]]] - _requests: List[MockAPIServerRequest] + _responses: dict[str, dict[str, list[MockAPIServerResponse]]] + _requests: list[MockAPIServerRequest] def __init__(self, address: str, port: int): self._address = address @@ -156,7 +156,7 @@ def enqueue_response( def dequeue_response( self, request: MockAPIServerRequest - ) -> Optional[MockAPIServerResponse]: + ) -> MockAPIServerResponse | None: self._requests.append(request) _by_method = self._responses.get(request.method) or {} _by_path = _by_method.get(request.path) or [] @@ -173,7 +173,7 @@ def port(self) -> int: def url(self, path: str) -> str: return f"http://{self.address()}:{self.port()}{path}" - def requests(self) -> List[MockAPIServerRequest]: + def requests(self) -> list[MockAPIServerRequest]: return list(self._requests) diff --git a/tests/migration/conftest.py b/tests/migration/conftest.py index a5b2c87869..c537d93b1a 100644 --- a/tests/migration/conftest.py +++ b/tests/migration/conftest.py @@ -3,8 +3,9 @@ import json import random import string +from collections.abc import Generator from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Generator, Optional, Protocol, Union, cast +from typing import TYPE_CHECKING, Any, Protocol, cast import pytest import pytest_alembic @@ -61,7 +62,7 @@ def alembic_engine(database: DatabaseFixture) -> Engine: @pytest.fixture def alembic_runner( - alembic_config: Union[Dict[str, Any], alembic.config.Config, Config], + alembic_config: dict[str, Any] | alembic.config.Config | Config, alembic_engine: Engine, ) -> Generator[MigrationContext, None, None]: """ @@ -75,13 +76,13 @@ def alembic_runner( class RandomName(Protocol): - def __call__(self, length: Optional[int] = None) -> str: + def __call__(self, length: int | None = None) -> str: ... @pytest.fixture def random_name() -> RandomName: - def fixture(length: Optional[int] = None) -> str: + def fixture(length: int | None = None) -> str: if length is None: length = 10 return "".join(random.choices(string.ascii_lowercase, k=length)) @@ -93,8 +94,8 @@ class CreateLibrary(Protocol): def __call__( self, connection: Connection, - name: Optional[str] = None, - short_name: Optional[str] = None, + name: str | None = None, + short_name: str | None = None, ) -> int: ... @@ -103,8 +104,8 @@ def __call__( def create_library(random_name: RandomName) -> CreateLibrary: def fixture( connection: Connection, - name: Optional[str] = None, - short_name: Optional[str] = None, + name: str | None = None, + short_name: str | None = None, ) -> int: if name is None: name = random_name() @@ -142,7 +143,7 @@ class CreateCollection(Protocol): def __call__( self, connection: Connection, - integration_configuration_id: Optional[int] = None, + integration_configuration_id: int | None = None, ) -> int: ... @@ -151,7 +152,7 @@ def __call__( def create_collection(random_name: RandomName) -> CreateCollection: def fixture( connection: Connection, - integration_configuration_id: Optional[int] = None, + integration_configuration_id: int | None = None, ) -> int: collection = connection.execute( "INSERT INTO collections (integration_configuration_id) VALUES (%s) returning id", @@ -168,9 +169,9 @@ class CreateExternalIntegration(Protocol): def __call__( self, connection: Connection, - protocol: Optional[str] = None, - goal: Optional[str] = None, - name: Optional[str] = None, + protocol: str | None = None, + goal: str | None = None, + name: str | None = None, ) -> int: ... @@ -179,9 +180,9 @@ def __call__( def create_external_integration(random_name: RandomName) -> CreateExternalIntegration: def fixture( connection: Connection, - protocol: Optional[str] = None, - goal: Optional[str] = None, - name: Optional[str] = None, + protocol: str | None = None, + goal: str | None = None, + name: str | None = None, ) -> int: protocol = protocol or random_name() goal = goal or random_name() @@ -200,10 +201,10 @@ class CreateConfigSetting(Protocol): def __call__( self, connection: Connection, - key: Optional[str] = None, - value: Optional[str] = None, - integration_id: Optional[int] = None, - library_id: Optional[int] = None, + key: str | None = None, + value: str | None = None, + integration_id: int | None = None, + library_id: int | None = None, associate_library: bool = False, ) -> int: ... @@ -213,10 +214,10 @@ def __call__( def create_config_setting() -> CreateConfigSetting: def fixture( connection: Connection, - key: Optional[str] = None, - value: Optional[str] = None, - integration_id: Optional[int] = None, - library_id: Optional[int] = None, + key: str | None = None, + value: str | None = None, + integration_id: int | None = None, + library_id: int | None = None, associate_library: bool = False, ) -> int: if type(value) in (tuple, list, dict): @@ -252,7 +253,7 @@ def __call__( name: str, protocol: str, goal: str, - settings: Optional[Dict[str, Any]] = None, + settings: dict[str, Any] | None = None, ) -> int: ... @@ -264,7 +265,7 @@ def fixture( name: str, protocol: str, goal: str, - settings: Optional[Dict[str, Any]] = None, + settings: dict[str, Any] | None = None, ) -> int: if settings is None: settings = {} @@ -318,8 +319,8 @@ class CreateIdentifier: def __call__( self, connection: Connection, - identifier: Optional[str] = None, - type: Optional[str] = None, + identifier: str | None = None, + type: str | None = None, ) -> int: identifier = identifier or self.random_name() type = type or self.random_name() @@ -346,8 +347,8 @@ def __call__( self, connection: Connection, collection_id: int, - identifier_id: Optional[int] = None, - should_track_playtime: Optional[bool] = False, + identifier_id: int | None = None, + should_track_playtime: bool | None = False, ) -> int: ... @@ -357,8 +358,8 @@ def create_license_pool() -> CreateLicensePool: def fixture( connection: Connection, collection_id: int, - identifier_id: Optional[int] = None, - should_track_playtime: Optional[bool] = False, + identifier_id: int | None = None, + should_track_playtime: bool | None = False, ) -> int: licensepool = connection.execute( "INSERT into licensepools (collection_id, identifier_id, should_track_playtime) VALUES (%(id)s, %(identifier_id)s, %(track)s) returning id", @@ -377,7 +378,7 @@ def __call__( self, connection: Connection, library_id: int, - name: Optional[str] = None, + name: str | None = None, priority: int = 0, inherit_parent_restrictions: bool = False, include_self_in_grouped_feed: bool = False, @@ -413,9 +414,9 @@ class CreateCoverageRecord: def __call__( self, connection: Connection, - operation: Optional[str] = None, - identifier_id: Optional[int] = None, - collection_id: Optional[int] = None, + operation: str | None = None, + identifier_id: int | None = None, + collection_id: int | None = None, ) -> int: if identifier_id is None: identifier_id = self.create_identifier(connection) diff --git a/tests/migration/test_20231101_2d72d6876c52.py b/tests/migration/test_20231101_2d72d6876c52.py index 48e27ae8ec..56a803898a 100644 --- a/tests/migration/test_20231101_2d72d6876c52.py +++ b/tests/migration/test_20231101_2d72d6876c52.py @@ -1,4 +1,4 @@ -from typing import Any, Dict, Optional +from typing import Any import pytest from pytest_alembic import MigrationContext @@ -19,7 +19,7 @@ def create_integration_configuration( name: str, protocol: str, goal: str, - settings: Optional[Dict[str, Any]] = None, + settings: dict[str, Any] | None = None, ) -> int: if settings is None: settings = {} @@ -43,7 +43,7 @@ def create_integration_library_configuration( connection: Connection, integration_id: int, library_id: int, - settings: Optional[Dict[str, Any]] = None, + settings: dict[str, Any] | None = None, ) -> None: if settings is None: settings = {} @@ -73,9 +73,9 @@ def create_collection( connection: Connection, name: str, integration_configuration_id: int, - external_account_id: Optional[str] = None, - external_integration_id: Optional[int] = None, - parent_id: Optional[int] = None, + external_account_id: str | None = None, + external_integration_id: int | None = None, + parent_id: int | None = None, ) -> int: collection = connection.execute( "INSERT INTO collections " @@ -135,7 +135,7 @@ def test_migration( "LICENSE_GOAL", settings=integration_2_settings, ) - integration_3_settings: Dict[str, str] = {} + integration_3_settings: dict[str, str] = {} integration_3 = create_integration_configuration( connection, "collection_1", diff --git a/tests/migration/test_20231206_e06f965879ab.py b/tests/migration/test_20231206_e06f965879ab.py index c233bbf3b3..5f0aeee5bb 100644 --- a/tests/migration/test_20231206_e06f965879ab.py +++ b/tests/migration/test_20231206_e06f965879ab.py @@ -1,4 +1,3 @@ -from typing import Optional, Tuple from unittest.mock import MagicMock, call import pytest @@ -21,10 +20,10 @@ class CreateCachedMarcFile: def __call__( self, connection: Connection, - url: Optional[str], - library_id: Optional[int] = None, - lane_id: Optional[int] = None, - ) -> Tuple[int, int]: + url: str | None, + library_id: int | None = None, + lane_id: int | None = None, + ) -> tuple[int, int]: if library_id is None: library_id = self.create_library(connection) @@ -43,7 +42,7 @@ def __call__( return representation_id, file_id - def representation(self, connection: Connection, url: Optional[str]) -> int: + def representation(self, connection: Connection, url: str | None) -> int: row = connection.execute( "INSERT INTO representations (media_type, url) " "VALUES ('application/marc', %s) returning id", diff --git a/tests/mocks/search.py b/tests/mocks/search.py index 194ddb557c..eebf8f3992 100644 --- a/tests/mocks/search.py +++ b/tests/mocks/search.py @@ -1,7 +1,7 @@ from __future__ import annotations +from collections.abc import Iterable from enum import Enum -from typing import Dict, Iterable, List, Optional from unittest.mock import MagicMock from opensearch_dsl import MultiSearch, Search @@ -32,29 +32,29 @@ class SearchServiceFailureMode(Enum): class SearchServiceFake(SearchService): """A search service that doesn't speak to a real service.""" - _documents_by_index: Dict[str, List[dict]] + _documents_by_index: dict[str, list[dict]] _failing: SearchServiceFailureMode _search_client: Search _multi_search_client: MultiSearch - _indexes_created: List[str] - _document_submission_attempts: List[dict] + _indexes_created: list[str] + _document_submission_attempts: list[dict] def __init__(self): self.base_name = "test_index" self._failing = SearchServiceFailureMode.NOT_FAILING self._documents_by_index = {} - self._read_pointer: Optional[str] = None - self._write_pointer: Optional[SearchWritePointer] = None + self._read_pointer: str | None = None + self._write_pointer: SearchWritePointer | None = None self._search_client = Search(using=MagicMock()) self._multi_search_client = MultiSearch(using=MagicMock()) self._indexes_created = [] self._document_submission_attempts = [] @property - def document_submission_attempts(self) -> List[dict]: + def document_submission_attempts(self) -> list[dict]: return self._document_submission_attempts - def indexes_created(self) -> List[str]: + def indexes_created(self) -> list[str]: return self._indexes_created def _fail_if_necessary(self): @@ -64,17 +64,17 @@ def _fail_if_necessary(self): def set_failing_mode(self, mode: SearchServiceFailureMode): self._failing = mode - def documents_for_index(self, index_name: str) -> List[dict]: + def documents_for_index(self, index_name: str) -> list[dict]: self._fail_if_necessary() if not (index_name in self._documents_by_index): return [] return self._documents_by_index[index_name] - def documents_all(self) -> List[dict]: + def documents_all(self) -> list[dict]: self._fail_if_necessary() - results: List[dict] = [] + results: list[dict] = [] for documents in self._documents_by_index.values(): for document in documents: results.append(document) @@ -93,11 +93,11 @@ def write_pointer_name(self) -> str: self._fail_if_necessary() return f"{self.base_name}-search-write" - def read_pointer(self) -> Optional[str]: + def read_pointer(self) -> str | None: self._fail_if_necessary() return self._read_pointer - def write_pointer(self) -> Optional[SearchWritePointer]: + def write_pointer(self) -> SearchWritePointer | None: self._fail_if_necessary() return self._write_pointer @@ -131,7 +131,7 @@ def index_set_mapping(self, revision: SearchSchemaRevision) -> None: def index_submit_documents( self, pointer: str, documents: Iterable[dict] - ) -> List[SearchServiceFailedDocument]: + ) -> list[SearchServiceFailedDocument]: self._fail_if_necessary() _should_fail = False @@ -145,7 +145,7 @@ def index_submit_documents( ) if _should_fail: - results: List[SearchServiceFailedDocument] = [] + results: list[SearchServiceFailedDocument] = [] for document in documents: self._document_submission_attempts.append(document) if self._failing == SearchServiceFailureMode.FAIL_INDEXING_DOCUMENTS: @@ -208,7 +208,7 @@ def is_pointer_empty(*args): return False -def fake_hits(works: List[Work]): +def fake_hits(works: list[Work]): return [ Hit( { @@ -237,14 +237,14 @@ def __init__( _db, url, test_search_term, revision_directory, version, SearchServiceFake() ) - self._mock_multi_works: List[Dict] = [] + self._mock_multi_works: list[dict] = [] self._mock_count_works = 0 - self._queries: List[tuple] = [] + self._queries: list[tuple] = [] - def mock_query_works(self, works: List[Work]): + def mock_query_works(self, works: list[Work]): self.mock_query_works_multi(works) - def mock_query_works_multi(self, works: List[Work], *args: List[Work]): + def mock_query_works_multi(self, works: list[Work], *args: list[Work]): self._mock_multi_works = [fake_hits(works)] self._mock_multi_works.extend([fake_hits(arg_works) for arg_works in args]) diff --git a/tox.ini b/tox.ini index 34ffa9983f..b95fd8cbe7 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{38,39,310,311}-{api,core}-docker +envlist = py{310,311}-{api,core}-docker skipsdist = true [testenv] @@ -76,8 +76,6 @@ ports = [gh-actions] python = - 3.8: py38 - 3.9: py39 3.10: py310 3.11: py311 From 9e28d4e340bfbf9fcf4eca37896df842c7424ee3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 19 Dec 2023 08:57:04 -0800 Subject: [PATCH 227/262] Bump freezegun from 1.3.1 to 1.4.0 (#1578) Bumps [freezegun](https://github.com/spulec/freezegun) from 1.3.1 to 1.4.0. - [Release notes](https://github.com/spulec/freezegun/releases) - [Changelog](https://github.com/spulec/freezegun/blob/master/CHANGELOG) - [Commits](https://github.com/spulec/freezegun/compare/1.3.1...1.4.0) --- updated-dependencies: - dependency-name: freezegun dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- poetry.lock | 10 +++++----- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/poetry.lock b/poetry.lock index e42a702ada..17a8a29dc2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. [[package]] name = "alembic" @@ -1226,13 +1226,13 @@ flask = ["flask"] [[package]] name = "freezegun" -version = "1.3.1" +version = "1.4.0" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" files = [ - {file = "freezegun-1.3.1-py3-none-any.whl", hash = "sha256:065e77a12624d05531afa87ade12a0b9bdb53495c4573893252a055b545ce3ea"}, - {file = "freezegun-1.3.1.tar.gz", hash = "sha256:48984397b3b58ef5dfc645d6a304b0060f612bcecfdaaf45ce8aff0077a6cb6a"}, + {file = "freezegun-1.4.0-py3-none-any.whl", hash = "sha256:55e0fc3c84ebf0a96a5aa23ff8b53d70246479e9a68863f1fcac5a3e52f19dd6"}, + {file = "freezegun-1.4.0.tar.gz", hash = "sha256:10939b0ba0ff5adaecf3b06a5c2f73071d9678e507c5eaedb23c761d56ac774b"}, ] [package.dependencies] @@ -4457,4 +4457,4 @@ lxml = ">=3.8" [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "d1674a963c91bac0fd32744a848a17fec3069ae529c176a5907d380321469ed5" +content-hash = "b1dd10356c05b41ea3c45ab418ae7c009d4387b1484eff66d2cfae44d000339e" diff --git a/pyproject.toml b/pyproject.toml index 6bcfb46bdd..f75963c8f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -256,7 +256,7 @@ tox-gh-actions = "^3.0" [tool.poetry.group.dev.dependencies] boto3-stubs = {version = "^1.28", extras = ["boto3", "essential", "logs", "s3"]} -freezegun = "~1.3.0" +freezegun = "~1.4.0" Jinja2 = "^3.1.2" mypy = "^1.4.1" psycopg2-binary = "~2.9.5" From d3b9a360e6a7310b3dfd1339c2f64d43edf6521a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:49:52 +0000 Subject: [PATCH 228/262] Bump pre-commit from 3.5.0 to 3.6.0 (#1576) --- poetry.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 17a8a29dc2..e5a3494de9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2672,13 +2672,13 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "pre-commit" -version = "3.5.0" +version = "3.6.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "pre_commit-3.5.0-py2.py3-none-any.whl", hash = "sha256:841dc9aef25daba9a0238cd27984041fa0467b4199fc4852e27950664919f660"}, - {file = "pre_commit-3.5.0.tar.gz", hash = "sha256:5804465c675b659b0862f07907f96295d490822a450c4c40e747d0b1c6ebcb32"}, + {file = "pre_commit-3.6.0-py2.py3-none-any.whl", hash = "sha256:c255039ef399049a5544b6ce13d135caba8f2c28c3b4033277a788f434308376"}, + {file = "pre_commit-3.6.0.tar.gz", hash = "sha256:d30bad9abf165f7785c15a21a1f46da7d0677cb00ee7ff4c579fd38922efe15d"}, ] [package.dependencies] From 9b0f124ac84b39c2848c799fcd6b1f7b9e5e0261 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:51:28 +0000 Subject: [PATCH 229/262] Bump mypy from 1.7.1 to 1.8.0 (#1581) --- poetry.lock | 56 ++++++++++++++++++++++++++--------------------------- 1 file changed, 28 insertions(+), 28 deletions(-) diff --git a/poetry.lock b/poetry.lock index e5a3494de9..8201c6d94e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2286,38 +2286,38 @@ files = [ [[package]] name = "mypy" -version = "1.7.1" +version = "1.8.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:12cce78e329838d70a204293e7b29af9faa3ab14899aec397798a4b41be7f340"}, - {file = "mypy-1.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1484b8fa2c10adf4474f016e09d7a159602f3239075c7bf9f1627f5acf40ad49"}, - {file = "mypy-1.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31902408f4bf54108bbfb2e35369877c01c95adc6192958684473658c322c8a5"}, - {file = "mypy-1.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f2c2521a8e4d6d769e3234350ba7b65ff5d527137cdcde13ff4d99114b0c8e7d"}, - {file = "mypy-1.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:fcd2572dd4519e8a6642b733cd3a8cfc1ef94bafd0c1ceed9c94fe736cb65b6a"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b901927f16224d0d143b925ce9a4e6b3a758010673eeded9b748f250cf4e8f7"}, - {file = "mypy-1.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2f7f6985d05a4e3ce8255396df363046c28bea790e40617654e91ed580ca7c51"}, - {file = "mypy-1.7.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:944bdc21ebd620eafefc090cdf83158393ec2b1391578359776c00de00e8907a"}, - {file = "mypy-1.7.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9c7ac372232c928fff0645d85f273a726970c014749b924ce5710d7d89763a28"}, - {file = "mypy-1.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:f6efc9bd72258f89a3816e3a98c09d36f079c223aa345c659622f056b760ab42"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:6dbdec441c60699288adf051f51a5d512b0d818526d1dcfff5a41f8cd8b4aaf1"}, - {file = "mypy-1.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4fc3d14ee80cd22367caaaf6e014494415bf440980a3045bf5045b525680ac33"}, - {file = "mypy-1.7.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c6e4464ed5f01dc44dc9821caf67b60a4e5c3b04278286a85c067010653a0eb"}, - {file = "mypy-1.7.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d9b338c19fa2412f76e17525c1b4f2c687a55b156320acb588df79f2e6fa9fea"}, - {file = "mypy-1.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:204e0d6de5fd2317394a4eff62065614c4892d5a4d1a7ee55b765d7a3d9e3f82"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:84860e06ba363d9c0eeabd45ac0fde4b903ad7aa4f93cd8b648385a888e23200"}, - {file = "mypy-1.7.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8c5091ebd294f7628eb25ea554852a52058ac81472c921150e3a61cdd68f75a7"}, - {file = "mypy-1.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40716d1f821b89838589e5b3106ebbc23636ffdef5abc31f7cd0266db936067e"}, - {file = "mypy-1.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5cf3f0c5ac72139797953bd50bc6c95ac13075e62dbfcc923571180bebb662e9"}, - {file = "mypy-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:78e25b2fd6cbb55ddfb8058417df193f0129cad5f4ee75d1502248e588d9e0d7"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:75c4d2a6effd015786c87774e04331b6da863fc3fc4e8adfc3b40aa55ab516fe"}, - {file = "mypy-1.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:2643d145af5292ee956aa0a83c2ce1038a3bdb26e033dadeb2f7066fb0c9abce"}, - {file = "mypy-1.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75aa828610b67462ffe3057d4d8a4112105ed211596b750b53cbfe182f44777a"}, - {file = "mypy-1.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ee5d62d28b854eb61889cde4e1dbc10fbaa5560cb39780c3995f6737f7e82120"}, - {file = "mypy-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:72cf32ce7dd3562373f78bd751f73c96cfb441de147cc2448a92c1a308bd0ca6"}, - {file = "mypy-1.7.1-py3-none-any.whl", hash = "sha256:f7c5d642db47376a0cc130f0de6d055056e010debdaf0707cd2b0fc7e7ef30ea"}, - {file = "mypy-1.7.1.tar.gz", hash = "sha256:fcb6d9afb1b6208b4c712af0dafdc650f518836065df0d4fb1d800f5d6773db2"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, + {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, + {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, + {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, + {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, + {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, + {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, + {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, + {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, + {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, + {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, + {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, + {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, + {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, + {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, + {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, + {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, + {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, + {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, + {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, + {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, + {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, + {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, ] [package.dependencies] From fd514e9a8913927b64b8572e5d715d4ab4aabdd6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:52:12 +0000 Subject: [PATCH 230/262] Bump alembic from 1.13.0 to 1.13.1 (#1582) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8201c6d94e..06fdc7bfaa 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "alembic" -version = "1.13.0" +version = "1.13.1" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.8" files = [ - {file = "alembic-1.13.0-py3-none-any.whl", hash = "sha256:a23974ea301c3ee52705db809c7413cecd165290c6679b9998dd6c74342ca23a"}, - {file = "alembic-1.13.0.tar.gz", hash = "sha256:ab4b3b94d2e1e5f81e34be8a9b7b7575fc9dd5398fccb0bef351ec9b14872623"}, + {file = "alembic-1.13.1-py3-none-any.whl", hash = "sha256:2edcc97bed0bd3272611ce3a98d98279e9c209e7186e43e75bbb1b2bdfdbcc43"}, + {file = "alembic-1.13.1.tar.gz", hash = "sha256:4932c8558bf68f2ee92b9bbcb8218671c627064d5b08939437af6d77dc05e595"}, ] [package.dependencies] From ce3833915b17c49c141bfde4084a340a37d40e1e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 3 Jan 2024 13:52:39 +0000 Subject: [PATCH 231/262] Bump jwcrypto from 1.5.0 to 1.5.1 (#1583) --- poetry.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 06fdc7bfaa..eabf6a207f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1874,12 +1874,12 @@ referencing = ">=0.28.0" [[package]] name = "jwcrypto" -version = "1.5.0" +version = "1.5.1" description = "Implementation of JOSE Web standards" optional = false python-versions = ">= 3.6" files = [ - {file = "jwcrypto-1.5.0.tar.gz", hash = "sha256:2c1dc51cf8e38ddf324795dfe9426dee9dd46caf47f535ccbc18781fba810b8d"}, + {file = "jwcrypto-1.5.1.tar.gz", hash = "sha256:48bb9bf433777136253579e52b75ffe0f9a4a721d133d01f45a0b91ed5f4f1ae"}, ] [package.dependencies] From 823c6115ff1b7209cebd4874ae7a76ddec9021ef Mon Sep 17 00:00:00 2001 From: Tim DiLauro Date: Wed, 3 Jan 2024 15:39:03 -0500 Subject: [PATCH 232/262] Change hold/loan notification times. (#1587) --- docker/services/cron/cron.d/circulation | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/services/cron/cron.d/circulation b/docker/services/cron/cron.d/circulation index f600cbcd80..46c6bd2f2c 100644 --- a/docker/services/cron/cron.d/circulation +++ b/docker/services/cron/cron.d/circulation @@ -103,8 +103,8 @@ HOME=/var/www/circulation # Notifications # -10 * * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 -15 * * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 +10 */8 * * * root core/bin/run loan_notifications >> /var/log/cron.log 2>&1 +15 */8 * * * root core/bin/run hold_notifications >> /var/log/cron.log 2>&1 0 1 * * * root core/bin/run patron_activity_sync_notifications >> /var/log/cron.log 2>&1 # Audiobook playtimes From 4bc339005e566908f111228521de26d0c3f48636 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Thu, 4 Jan 2024 14:29:35 -0400 Subject: [PATCH 233/262] Fix overdrive advantage collection parent reporting (PP-806) (#1590) * Fix admin UI overdrive issue. --- api/admin/controller/collection_settings.py | 7 +++++-- tests/api/admin/controller/test_collections.py | 2 +- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/api/admin/controller/collection_settings.py b/api/admin/controller/collection_settings.py index cb75f7c10e..ee28ac21d6 100644 --- a/api/admin/controller/collection_settings.py +++ b/api/admin/controller/collection_settings.py @@ -43,8 +43,11 @@ def configured_service_info( if service_info: # Add 'marked_for_deletion' to the service info service_info["marked_for_deletion"] = service.collection.marked_for_deletion - - service_info["parent_id"] = service.collection.parent_id + service_info["parent_id"] = ( + service.collection.parent.integration_configuration_id + if service.collection.parent + else None + ) service_info["settings"]["export_marc_records"] = str( service.collection.export_marc_records ).lower() diff --git a/tests/api/admin/controller/test_collections.py b/tests/api/admin/controller/test_collections.py index f5adbff220..6685cbdb87 100644 --- a/tests/api/admin/controller/test_collections.py +++ b/tests/api/admin/controller/test_collections.py @@ -132,7 +132,7 @@ def test_collections_get_collections_with_multiple_collections( "overdrive_client_secret" ] == settings2.get("overdrive_client_secret") - assert c2.id == coll3.get("parent_id") + assert c2.integration_configuration.id == coll3.get("parent_id") coll3_libraries = coll3.get("libraries") assert 2 == len(coll3_libraries) From b25b092885ee0bf2bba29e73ebe01839aff2ac48 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 20:50:05 +0000 Subject: [PATCH 234/262] Bump sqlalchemy from 1.4.50 to 1.4.51 (#1585) --- poetry.lock | 58 ++++++++++++++++++++++++++--------------------------- 1 file changed, 29 insertions(+), 29 deletions(-) diff --git a/poetry.lock b/poetry.lock index eabf6a207f..76cc75bf18 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3878,36 +3878,36 @@ files = [ [[package]] name = "sqlalchemy" -version = "1.4.50" +version = "1.4.51" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85292ff52ddf85a39367057c3d7968a12ee1fb84565331a36a8fead346f08796"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d0fed0f791d78e7767c2db28d34068649dfeea027b83ed18c45a423f741425cb"}, - {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db4db3c08ffbb18582f856545f058a7a5e4ab6f17f75795ca90b3c38ee0a8ba4"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14b0cacdc8a4759a1e1bd47dc3ee3f5db997129eb091330beda1da5a0e9e5bd7"}, - {file = "SQLAlchemy-1.4.50-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1fb9cb60e0f33040e4f4681e6658a7eb03b5cb4643284172f91410d8c493dace"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4cb501d585aa74a0f86d0ea6263b9c5e1d1463f8f9071392477fd401bd3c7cc"}, - {file = "SQLAlchemy-1.4.50-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a7a66297e46f85a04d68981917c75723e377d2e0599d15fbe7a56abed5e2d75"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1db0221cb26d66294f4ca18c533e427211673ab86c1fbaca8d6d9ff78654293"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7dbe6369677a2bea68fe9812c6e4bbca06ebfa4b5cde257b2b0bf208709131"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a9bddb60566dc45c57fd0a5e14dd2d9e5f106d2241e0a2dc0c1da144f9444516"}, - {file = "SQLAlchemy-1.4.50-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82dd4131d88395df7c318eeeef367ec768c2a6fe5bd69423f7720c4edb79473c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:273505fcad22e58cc67329cefab2e436006fc68e3c5423056ee0513e6523268a"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3257a6e09626d32b28a0c5b4f1a97bced585e319cfa90b417f9ab0f6145c33c"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d69738d582e3a24125f0c246ed8d712b03bd21e148268421e4a4d09c34f521a5"}, - {file = "SQLAlchemy-1.4.50-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34e1c5d9cd3e6bf3d1ce56971c62a40c06bfc02861728f368dcfec8aeedb2814"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1fcee5a2c859eecb4ed179edac5ffbc7c84ab09a5420219078ccc6edda45436"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e70e0673d7d12fa6cd363453a0d22dac0d9978500aa6b46aa96e22690a55eab"}, - {file = "SQLAlchemy-1.4.50-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b881ac07d15fb3e4f68c5a67aa5cdaf9eb8f09eb5545aaf4b0a5f5f4659be18"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6997da81114daef9203d30aabfa6b218a577fc2bd797c795c9c88c9eb78d49"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdb77e1789e7596b77fd48d99ec1d2108c3349abd20227eea0d48d3f8cf398d9"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:128a948bd40780667114b0297e2cc6d657b71effa942e0a368d8cc24293febb3"}, - {file = "SQLAlchemy-1.4.50-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2d526aeea1bd6a442abc7c9b4b00386fd70253b80d54a0930c0a216230a35be"}, - {file = "SQLAlchemy-1.4.50.tar.gz", hash = "sha256:3b97ddf509fc21e10b09403b5219b06c5b558b27fc2453150274fa4e70707dbf"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2be4e6294c53f2ec8ea36486b56390e3bcaa052bf3a9a47005687ccf376745d1"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca484ca11c65e05639ffe80f20d45e6be81fbec7683d6c9a15cd421e6e8b340"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0535d5b57d014d06ceeaeffd816bb3a6e2dddeb670222570b8c4953e2d2ea678"}, + {file = "SQLAlchemy-1.4.51-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af55cc207865d641a57f7044e98b08b09220da3d1b13a46f26487cc2f898a072"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7deeae5071930abb3669b5185abb6c33ddfd2398f87660fafdb9e6a5fb0f3f2f"}, + {file = "SQLAlchemy-1.4.51-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0892e7ac8bc76da499ad3ee8de8da4d7905a3110b952e2a35a940dab1ffa550e"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cacc0b2dd7d22a918a9642fc89840a5d3cee18a0e1fe41080b1141b23b10916"}, + {file = "SQLAlchemy-1.4.51-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:245c67c88e63f1523e9216cad6ba3107dea2d3ee19adc359597a628afcabfbcb"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ec7a0ed9b32afdf337172678a4a0e6419775ba4e649b66f49415615fa47efbd"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:352df882088a55293f621328ec33b6ffca936ad7f23013b22520542e1ab6ad1b"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:86a22143a4001f53bf58027b044da1fb10d67b62a785fc1390b5c7f089d9838c"}, + {file = "SQLAlchemy-1.4.51-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c37bc677690fd33932182b85d37433845de612962ed080c3e4d92f758d1bd894"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c55040d8ea65414de7c47f1a23823cd9f3fad0dc93e6b6b728fee81230f817b"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38ef80328e3fee2be0a1abe3fe9445d3a2e52a1282ba342d0dab6edf1fef4707"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f8cafa6f885a0ff5e39efa9325195217bb47d5929ab0051636610d24aef45ade"}, + {file = "SQLAlchemy-1.4.51-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8f2df79a46e130235bc5e1bbef4de0583fb19d481eaa0bffa76e8347ea45ec6"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb18549b770351b54e1ab5da37d22bc530b8bfe2ee31e22b9ebe650640d2ef12"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55e699466106d09f028ab78d3c2e1f621b5ef2c8694598242259e4515715da7c"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2ad16880ccd971ac8e570550fbdef1385e094b022d6fc85ef3ce7df400dddad3"}, + {file = "SQLAlchemy-1.4.51-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b97fd5bb6b7c1a64b7ac0632f7ce389b8ab362e7bd5f60654c2a418496be5d7f"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e646b19f47d655261b22df9976e572f588185279970efba3d45c377127d35349"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3cf56cc36d42908495760b223ca9c2c0f9f0002b4eddc994b24db5fcb86a9e4"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0d661cff58c91726c601cc0ee626bf167b20cc4d7941c93c5f3ac28dc34ddbea"}, + {file = "SQLAlchemy-1.4.51-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3823dda635988e6744d4417e13f2e2b5fe76c4bf29dd67e95f98717e1b094cad"}, + {file = "SQLAlchemy-1.4.51.tar.gz", hash = "sha256:e7908c2025eb18394e32d65dd02d2e37e17d733cdbe7d78231c2b6d7eb20cdb9"}, ] [package.dependencies] @@ -3917,7 +3917,7 @@ sqlalchemy2-stubs = {version = "*", optional = true, markers = "extra == \"mypy\ [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing-extensions (!=3.10.0.1)"] +aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] asyncio = ["greenlet (!=0.4.17)"] asyncmy = ["asyncmy (>=0.2.3,!=0.2.4)", "greenlet (!=0.4.17)"] mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2)"] @@ -3927,14 +3927,14 @@ mssql-pyodbc = ["pyodbc"] mypy = ["mypy (>=0.910)", "sqlalchemy2-stubs"] mysql = ["mysqlclient (>=1.4.0)", "mysqlclient (>=1.4.0,<2)"] mysql-connector = ["mysql-connector-python"] -oracle = ["cx-oracle (>=7)", "cx-oracle (>=7,<8)"] +oracle = ["cx_oracle (>=7)", "cx_oracle (>=7,<8)"] postgresql = ["psycopg2 (>=2.7)"] postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] postgresql-pg8000 = ["pg8000 (>=1.16.6,!=1.29.0)"] postgresql-psycopg2binary = ["psycopg2-binary"] postgresql-psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql", "pymysql (<1)"] -sqlcipher = ["sqlcipher3-binary"] +sqlcipher = ["sqlcipher3_binary"] [[package]] name = "sqlalchemy2-stubs" From 081cd785d340a53cdcbfb07f24a8ff4c503215d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 20:50:22 +0000 Subject: [PATCH 235/262] Bump pillow from 10.1.0 to 10.2.0 (#1586) --- poetry.lock | 128 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 73 insertions(+), 55 deletions(-) diff --git a/poetry.lock b/poetry.lock index 76cc75bf18..61afb1d831 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2575,70 +2575,88 @@ uritemplate = ">=4.1,<5.0" [[package]] name = "pillow" -version = "10.1.0" +version = "10.2.0" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1ab05f3db77e98f93964697c8efc49c7954b08dd61cff526b7f2531a22410106"}, - {file = "Pillow-10.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6932a7652464746fcb484f7fc3618e6503d2066d853f68a4bd97193a3996e273"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5f63b5a68daedc54c7c3464508d8c12075e56dcfbd42f8c1bf40169061ae666"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0949b55eb607898e28eaccb525ab104b2d86542a85c74baf3a6dc24002edec2"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:ae88931f93214777c7a3aa0a8f92a683f83ecde27f65a45f95f22d289a69e593"}, - {file = "Pillow-10.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b0eb01ca85b2361b09480784a7931fc648ed8b7836f01fb9241141b968feb1db"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d27b5997bdd2eb9fb199982bb7eb6164db0426904020dc38c10203187ae2ff2f"}, - {file = "Pillow-10.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7df5608bc38bd37ef585ae9c38c9cd46d7c81498f086915b0f97255ea60c2818"}, - {file = "Pillow-10.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:41f67248d92a5e0a2076d3517d8d4b1e41a97e2df10eb8f93106c89107f38b57"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:1fb29c07478e6c06a46b867e43b0bcdb241b44cc52be9bc25ce5944eed4648e7"}, - {file = "Pillow-10.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2cdc65a46e74514ce742c2013cd4a2d12e8553e3a2563c64879f7c7e4d28bce7"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50d08cd0a2ecd2a8657bd3d82c71efd5a58edb04d9308185d66c3a5a5bed9610"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:062a1610e3bc258bff2328ec43f34244fcec972ee0717200cb1425214fe5b839"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:61f1a9d247317fa08a308daaa8ee7b3f760ab1809ca2da14ecc88ae4257d6172"}, - {file = "Pillow-10.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a646e48de237d860c36e0db37ecaecaa3619e6f3e9d5319e527ccbc8151df061"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:47e5bf85b80abc03be7455c95b6d6e4896a62f6541c1f2ce77a7d2bb832af262"}, - {file = "Pillow-10.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:a92386125e9ee90381c3369f57a2a50fa9e6aa8b1cf1d9c4b200d41a7dd8e992"}, - {file = "Pillow-10.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:0f7c276c05a9767e877a0b4c5050c8bee6a6d960d7f0c11ebda6b99746068c2a"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:a89b8312d51715b510a4fe9fc13686283f376cfd5abca8cd1c65e4c76e21081b"}, - {file = "Pillow-10.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:00f438bb841382b15d7deb9a05cc946ee0f2c352653c7aa659e75e592f6fa17d"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d929a19f5469b3f4df33a3df2983db070ebb2088a1e145e18facbc28cae5b27"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a92109192b360634a4489c0c756364c0c3a2992906752165ecb50544c251312"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:0248f86b3ea061e67817c47ecbe82c23f9dd5d5226200eb9090b3873d3ca32de"}, - {file = "Pillow-10.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9882a7451c680c12f232a422730f986a1fcd808da0fd428f08b671237237d651"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:1c3ac5423c8c1da5928aa12c6e258921956757d976405e9467c5f39d1d577a4b"}, - {file = "Pillow-10.1.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:806abdd8249ba3953c33742506fe414880bad78ac25cc9a9b1c6ae97bedd573f"}, - {file = "Pillow-10.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:eaed6977fa73408b7b8a24e8b14e59e1668cfc0f4c40193ea7ced8e210adf996"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:fe1e26e1ffc38be097f0ba1d0d07fcade2bcfd1d023cda5b29935ae8052bd793"}, - {file = "Pillow-10.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7a7e3daa202beb61821c06d2517428e8e7c1aab08943e92ec9e5755c2fc9ba5e"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:24fadc71218ad2b8ffe437b54876c9382b4a29e030a05a9879f615091f42ffc2"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa1d323703cfdac2036af05191b969b910d8f115cf53093125e4058f62012c9a"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:912e3812a1dbbc834da2b32299b124b5ddcb664ed354916fd1ed6f193f0e2d01"}, - {file = "Pillow-10.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:7dbaa3c7de82ef37e7708521be41db5565004258ca76945ad74a8e998c30af8d"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9d7bc666bd8c5a4225e7ac71f2f9d12466ec555e89092728ea0f5c0c2422ea80"}, - {file = "Pillow-10.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baada14941c83079bf84c037e2d8b7506ce201e92e3d2fa0d1303507a8538212"}, - {file = "Pillow-10.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:2ef6721c97894a7aa77723740a09547197533146fba8355e86d6d9a4a1056b14"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0a026c188be3b443916179f5d04548092e253beb0c3e2ee0a4e2cdad72f66099"}, - {file = "Pillow-10.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:04f6f6149f266a100374ca3cc368b67fb27c4af9f1cc8cb6306d849dcdf12616"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb40c011447712d2e19cc261c82655f75f32cb724788df315ed992a4d65696bb"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a8413794b4ad9719346cd9306118450b7b00d9a15846451549314a58ac42219"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c9aeea7b63edb7884b031a35305629a7593272b54f429a9869a4f63a1bf04c34"}, - {file = "Pillow-10.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b4005fee46ed9be0b8fb42be0c20e79411533d1fd58edabebc0dd24626882cfd"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4d0152565c6aa6ebbfb1e5d8624140a440f2b99bf7afaafbdbf6430426497f28"}, - {file = "Pillow-10.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d921bc90b1defa55c9917ca6b6b71430e4286fc9e44c55ead78ca1a9f9eba5f2"}, - {file = "Pillow-10.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfe96560c6ce2f4c07d6647af2d0f3c54cc33289894ebd88cfbb3bcd5391e256"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:937bdc5a7f5343d1c97dc98149a0be7eb9704e937fe3dc7140e229ae4fc572a7"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1c25762197144e211efb5f4e8ad656f36c8d214d390585d1d21281f46d556ba"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:afc8eef765d948543a4775f00b7b8c079b3321d6b675dde0d02afa2ee23000b4"}, - {file = "Pillow-10.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:883f216eac8712b83a63f41b76ddfb7b2afab1b74abbb413c5df6680f071a6b9"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b920e4d028f6442bea9a75b7491c063f0b9a3972520731ed26c83e254302eb1e"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c41d960babf951e01a49c9746f92c5a7e0d939d1652d7ba30f6b3090f27e412"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1fafabe50a6977ac70dfe829b2d5735fd54e190ab55259ec8aea4aaea412fa0b"}, - {file = "Pillow-10.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b834f4b16173e5b92ab6566f0473bfb09f939ba14b23b8da1f54fa63e4b623f"}, - {file = "Pillow-10.1.0.tar.gz", hash = "sha256:e6bf8de6c36ed96c86ea3b6e1d5273c53f46ef518a062464cd7ef5dd2cf92e38"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:7823bdd049099efa16e4246bdf15e5a13dbb18a51b68fa06d6c1d4d8b99a796e"}, + {file = "pillow-10.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:83b2021f2ade7d1ed556bc50a399127d7fb245e725aa0113ebd05cfe88aaf588"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fad5ff2f13d69b7e74ce5b4ecd12cc0ec530fcee76356cac6742785ff71c452"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da2b52b37dad6d9ec64e653637a096905b258d2fc2b984c41ae7d08b938a67e4"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:47c0995fc4e7f79b5cfcab1fc437ff2890b770440f7696a3ba065ee0fd496563"}, + {file = "pillow-10.2.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:322bdf3c9b556e9ffb18f93462e5f749d3444ce081290352c6070d014c93feb2"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:51f1a1bffc50e2e9492e87d8e09a17c5eea8409cda8d3f277eb6edc82813c17c"}, + {file = "pillow-10.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:69ffdd6120a4737710a9eee73e1d2e37db89b620f702754b8f6e62594471dee0"}, + {file = "pillow-10.2.0-cp310-cp310-win32.whl", hash = "sha256:c6dafac9e0f2b3c78df97e79af707cdc5ef8e88208d686a4847bab8266870023"}, + {file = "pillow-10.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:aebb6044806f2e16ecc07b2a2637ee1ef67a11840a66752751714a0d924adf72"}, + {file = "pillow-10.2.0-cp310-cp310-win_arm64.whl", hash = "sha256:7049e301399273a0136ff39b84c3678e314f2158f50f517bc50285fb5ec847ad"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:35bb52c37f256f662abdfa49d2dfa6ce5d93281d323a9af377a120e89a9eafb5"}, + {file = "pillow-10.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9c23f307202661071d94b5e384e1e1dc7dfb972a28a2310e4ee16103e66ddb67"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:773efe0603db30c281521a7c0214cad7836c03b8ccff897beae9b47c0b657d61"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11fa2e5984b949b0dd6d7a94d967743d87c577ff0b83392f17cb3990d0d2fd6e"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:716d30ed977be8b37d3ef185fecb9e5a1d62d110dfbdcd1e2a122ab46fddb03f"}, + {file = "pillow-10.2.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a086c2af425c5f62a65e12fbf385f7c9fcb8f107d0849dba5839461a129cf311"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c8de2789052ed501dd829e9cae8d3dcce7acb4777ea4a479c14521c942d395b1"}, + {file = "pillow-10.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:609448742444d9290fd687940ac0b57fb35e6fd92bdb65386e08e99af60bf757"}, + {file = "pillow-10.2.0-cp311-cp311-win32.whl", hash = "sha256:823ef7a27cf86df6597fa0671066c1b596f69eba53efa3d1e1cb8b30f3533068"}, + {file = "pillow-10.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:1da3b2703afd040cf65ec97efea81cfba59cdbed9c11d8efc5ab09df9509fc56"}, + {file = "pillow-10.2.0-cp311-cp311-win_arm64.whl", hash = "sha256:edca80cbfb2b68d7b56930b84a0e45ae1694aeba0541f798e908a49d66b837f1"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:1b5e1b74d1bd1b78bc3477528919414874748dd363e6272efd5abf7654e68bef"}, + {file = "pillow-10.2.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0eae2073305f451d8ecacb5474997c08569fb4eb4ac231ffa4ad7d342fdc25ac"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b7c2286c23cd350b80d2fc9d424fc797575fb16f854b831d16fd47ceec078f2c"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e23412b5c41e58cec602f1135c57dfcf15482013ce6e5f093a86db69646a5aa"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:52a50aa3fb3acb9cf7213573ef55d31d6eca37f5709c69e6858fe3bc04a5c2a2"}, + {file = "pillow-10.2.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:127cee571038f252a552760076407f9cff79761c3d436a12af6000cd182a9d04"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8d12251f02d69d8310b046e82572ed486685c38f02176bd08baf216746eb947f"}, + {file = "pillow-10.2.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:54f1852cd531aa981bc0965b7d609f5f6cc8ce8c41b1139f6ed6b3c54ab82bfb"}, + {file = "pillow-10.2.0-cp312-cp312-win32.whl", hash = "sha256:257d8788df5ca62c980314053197f4d46eefedf4e6175bc9412f14412ec4ea2f"}, + {file = "pillow-10.2.0-cp312-cp312-win_amd64.whl", hash = "sha256:154e939c5f0053a383de4fd3d3da48d9427a7e985f58af8e94d0b3c9fcfcf4f9"}, + {file = "pillow-10.2.0-cp312-cp312-win_arm64.whl", hash = "sha256:f379abd2f1e3dddb2b61bc67977a6b5a0a3f7485538bcc6f39ec76163891ee48"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8373c6c251f7ef8bda6675dd6d2b3a0fcc31edf1201266b5cf608b62a37407f9"}, + {file = "pillow-10.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:870ea1ada0899fd0b79643990809323b389d4d1d46c192f97342eeb6ee0b8483"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4b6b1e20608493548b1f32bce8cca185bf0480983890403d3b8753e44077129"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3031709084b6e7852d00479fd1d310b07d0ba82765f973b543c8af5061cf990e"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:3ff074fc97dd4e80543a3e91f69d58889baf2002b6be64347ea8cf5533188213"}, + {file = "pillow-10.2.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:cb4c38abeef13c61d6916f264d4845fab99d7b711be96c326b84df9e3e0ff62d"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b1b3020d90c2d8e1dae29cf3ce54f8094f7938460fb5ce8bc5c01450b01fbaf6"}, + {file = "pillow-10.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:170aeb00224ab3dc54230c797f8404507240dd868cf52066f66a41b33169bdbe"}, + {file = "pillow-10.2.0-cp38-cp38-win32.whl", hash = "sha256:c4225f5220f46b2fde568c74fca27ae9771536c2e29d7c04f4fb62c83275ac4e"}, + {file = "pillow-10.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0689b5a8c5288bc0504d9fcee48f61a6a586b9b98514d7d29b840143d6734f39"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b792a349405fbc0163190fde0dc7b3fef3c9268292586cf5645598b48e63dc67"}, + {file = "pillow-10.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c570f24be1e468e3f0ce7ef56a89a60f0e05b30a3669a459e419c6eac2c35364"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8ecd059fdaf60c1963c58ceb8997b32e9dc1b911f5da5307aab614f1ce5c2fb"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c365fd1703040de1ec284b176d6af5abe21b427cb3a5ff68e0759e1e313a5e7e"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:70c61d4c475835a19b3a5aa42492409878bbca7438554a1f89d20d58a7c75c01"}, + {file = "pillow-10.2.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b6f491cdf80ae540738859d9766783e3b3c8e5bd37f5dfa0b76abdecc5081f13"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9d189550615b4948f45252d7f005e53c2040cea1af5b60d6f79491a6e147eef7"}, + {file = "pillow-10.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:49d9ba1ed0ef3e061088cd1e7538a0759aab559e2e0a80a36f9fd9d8c0c21591"}, + {file = "pillow-10.2.0-cp39-cp39-win32.whl", hash = "sha256:babf5acfede515f176833ed6028754cbcd0d206f7f614ea3447d67c33be12516"}, + {file = "pillow-10.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:0304004f8067386b477d20a518b50f3fa658a28d44e4116970abfcd94fac34a8"}, + {file = "pillow-10.2.0-cp39-cp39-win_arm64.whl", hash = "sha256:0fb3e7fc88a14eacd303e90481ad983fd5b69c761e9e6ef94c983f91025da869"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:322209c642aabdd6207517e9739c704dc9f9db943015535783239022002f054a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3eedd52442c0a5ff4f887fab0c1c0bb164d8635b32c894bc1faf4c618dd89df2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cb28c753fd5eb3dd859b4ee95de66cc62af91bcff5db5f2571d32a520baf1f04"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:33870dc4653c5017bf4c8873e5488d8f8d5f8935e2f1fb9a2208c47cdd66efd2"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3c31822339516fb3c82d03f30e22b1d038da87ef27b6a78c9549888f8ceda39a"}, + {file = "pillow-10.2.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a2b56ba36e05f973d450582fb015594aaa78834fefe8dfb8fcd79b93e64ba4c6"}, + {file = "pillow-10.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:d8e6aeb9201e655354b3ad049cb77d19813ad4ece0df1249d3c793de3774f8c7"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:2247178effb34a77c11c0e8ac355c7a741ceca0a732b27bf11e747bbc950722f"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15587643b9e5eb26c48e49a7b33659790d28f190fc514a322d55da2fb5c2950e"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753cd8f2086b2b80180d9b3010dd4ed147efc167c90d3bf593fe2af21265e5a5"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:7c8f97e8e7a9009bcacbe3766a36175056c12f9a44e6e6f2d5caad06dcfbf03b"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d1b35bcd6c5543b9cb547dee3150c93008f8dd0f1fef78fc0cd2b141c5baf58a"}, + {file = "pillow-10.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe4c15f6c9285dc54ce6553a3ce908ed37c8f3825b5a51a15c91442bb955b868"}, + {file = "pillow-10.2.0.tar.gz", hash = "sha256:e87f0b2c78157e12d7686b27d63c070fd65d994e8ddae6f328e0dcf4a0cd007e"}, ] [package.extras] docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-removed-in", "sphinxext-opengraph"] +fpx = ["olefile"] +mic = ["olefile"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +typing = ["typing-extensions"] +xmp = ["defusedxml"] [[package]] name = "platformdirs" From 5f558203b0d7af114f725d6db0c01a0b1b1bbab7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 20:50:46 +0000 Subject: [PATCH 236/262] Bump pyspellchecker from 0.7.2 to 0.7.3 (#1588) --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 61afb1d831..5bef10dcd2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3221,13 +3221,13 @@ files = [ [[package]] name = "pyspellchecker" -version = "0.7.2" +version = "0.7.3" description = "Pure python spell checker based on work by Peter Norvig" optional = false python-versions = ">=3.6" files = [ - {file = "pyspellchecker-0.7.2-py3-none-any.whl", hash = "sha256:b5ef23437702b8d03626f814b9646779b572d378b325ad252d8a8e616b3d76db"}, - {file = "pyspellchecker-0.7.2.tar.gz", hash = "sha256:bc51ffb2c18ba26eaa1340756ebf96d0d886ed6a31d6f8e7a0094ad49d24550a"}, + {file = "pyspellchecker-0.7.3-py3-none-any.whl", hash = "sha256:5c696008952b87ad300edf05ec84748d1d01c655303a2c737a36f17d120f21f4"}, + {file = "pyspellchecker-0.7.3.tar.gz", hash = "sha256:c34fab476510d9805d6cb8477acde64a3d433e62f4cbc677ea08bfae8804f8dd"}, ] [[package]] @@ -4475,4 +4475,4 @@ lxml = ">=3.8" [metadata] lock-version = "2.0" python-versions = ">=3.10,<4" -content-hash = "b1dd10356c05b41ea3c45ab418ae7c009d4387b1484eff66d2cfae44d000339e" +content-hash = "a7f2bfbfc1fbbc83baf50f4345c3cfe05044c52d9e8667213ce132f0ae6de148" diff --git a/pyproject.toml b/pyproject.toml index f75963c8f0..535ddfca21 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -224,7 +224,7 @@ PyLD = "2.0.3" pymarc = "5.1.0" pyOpenSSL = "^23.1.0" pyparsing = "3.1.1" -pyspellchecker = "0.7.2" +pyspellchecker = "0.7.3" python = ">=3.10,<4" python-dateutil = "2.8.2" python3-saml = "^1.16" # python-saml is required for SAML authentication From 9d9efa27aab78364c9bbb9d8b4c8a0ded8ef293c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 4 Jan 2024 20:51:01 +0000 Subject: [PATCH 237/262] Bump tox-gh-actions from 3.1.3 to 3.2.0 (#1589) --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5bef10dcd2..d45824540d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4038,13 +4038,13 @@ tox = ">=3.0.0,<5.0" [[package]] name = "tox-gh-actions" -version = "3.1.3" +version = "3.2.0" description = "Seamless integration of tox into GitHub Actions" optional = false python-versions = ">=3.7" files = [ - {file = "tox-gh-actions-3.1.3.tar.gz", hash = "sha256:ffd4151fe8b62c6f401a2fc5a01317835d7ab380923f6e0d063c300750308328"}, - {file = "tox_gh_actions-3.1.3-py2.py3-none-any.whl", hash = "sha256:5954766fe2ed0e284f3cdc87535dfdf68d0f803f1011b17ff8cf52ed3156e6c1"}, + {file = "tox-gh-actions-3.2.0.tar.gz", hash = "sha256:ac6fa3b8da51bc90dd77985fd55f09e746c6558c55910c0a93d643045a2b0ccc"}, + {file = "tox_gh_actions-3.2.0-py2.py3-none-any.whl", hash = "sha256:821b66a4751a788fa3e9617bd796d696507b08c6e1d929ee4faefba06b73b694"}, ] [package.dependencies] From 91c9ba0c16fcd6e4bbd0671b06872b207fac68c3 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 10:44:31 -0400 Subject: [PATCH 238/262] Faster migration to add the should_track_playtime column to licensepools table. (#1592) --- ...4_6af9160a578e_licensepools_time_tracking_flag.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py index 9f7bd5a0b6..50b2ba3d92 100644 --- a/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py +++ b/alembic/versions/20231124_6af9160a578e_licensepools_time_tracking_flag.py @@ -19,11 +19,15 @@ def upgrade() -> None: op.add_column( "licensepools", - sa.Column("should_track_playtime", sa.Boolean(), nullable=True, default=False), + sa.Column( + "should_track_playtime", + sa.Boolean(), + nullable=False, + server_default=sa.sql.false(), + default=False, + ), ) - session = op.get_bind() - session.execute("UPDATE licensepools SET should_track_playtime=false") - op.alter_column("licensepools", "should_track_playtime", nullable=False) + op.alter_column("licensepools", "should_track_playtime", server_default=None) def downgrade() -> None: From c2bcfebe3eb0311fc7123a02520a9195b81f35cf Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 12:43:21 -0400 Subject: [PATCH 239/262] Add ci script to check for outdated db migrations --- docker/ci/test_migration.sh | 40 +++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) create mode 100755 docker/ci/test_migration.sh diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh new file mode 100755 index 0000000000..1f2c3d9de8 --- /dev/null +++ b/docker/ci/test_migration.sh @@ -0,0 +1,40 @@ +#!/bin/bash + +if ! git diff --quiet; then + echo "ERROR: You have uncommitted changes. These changes will be lost if you run this script." + echo " Please commit or stash your changes and try again." + exit 1 +fi + +# Find the currently checked out commit +current_commit=$(git show -s --format=%H) + +echo "Current commit: ${current_commit}" + +# Find the first migration file +first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") + +echo "First migration file: ${first_migration_file}" + +# Find the git commit before the first migration file was added +before_migration_commit=$(git log --follow --format=%P --reverse "${first_migration_file}" | head -n 1) + +echo "Before migration commit: ${before_migration_commit}" + +# Checkout this commit +git checkout "${before_migration_commit}" + +# Start containers and initialize the database +docker-compose up -d pg +export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@pg:5432/circ" +bin/util/initialize_instance + +# Checkout the current commit +git checkout "${current_commit}" + +# Migrate up to the current commit +alembic upgrade head + +# Now check that the database matches what we would expect +alembic check From 991fb7785d2ba10ae1dd4a7970438b8f8fb2dedb Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 12:46:42 -0400 Subject: [PATCH 240/262] Change DB URL --- docker/ci/test_migration.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 1f2c3d9de8..ab892dec84 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -23,11 +23,11 @@ before_migration_commit=$(git log --follow --format=%P --reverse "${first_migrat echo "Before migration commit: ${before_migration_commit}" # Checkout this commit -git checkout "${before_migration_commit}" +git checkout -q "${before_migration_commit}" # Start containers and initialize the database docker-compose up -d pg -export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@pg:5432/circ" +export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" bin/util/initialize_instance # Checkout the current commit From 4fd97c24a9052e75bca9b8f6b8692236af42f3b0 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 12:51:08 -0400 Subject: [PATCH 241/262] Try again --- docker/ci/test_migration.sh | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index ab892dec84..cb71090c43 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -17,16 +17,16 @@ first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") echo "First migration file: ${first_migration_file}" -# Find the git commit before the first migration file was added -before_migration_commit=$(git log --follow --format=%P --reverse "${first_migration_file}" | head -n 1) +# Find the git commit where this migration was introduced +first_migration_commit=$(git log --follow --format=%H --reverse "${first_migration_file}" | head -n 1) -echo "Before migration commit: ${before_migration_commit}" +echo "First migration commit: ${first_migration_commit}" # Checkout this commit -git checkout -q "${before_migration_commit}" +git checkout -q "${first_migration_commit}" # Start containers and initialize the database -docker-compose up -d pg +docker-compose up -d --pull-quiet pg export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" bin/util/initialize_instance From 219cccf0bedda711622959c09b0d81509fa2089b Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 12:57:37 -0400 Subject: [PATCH 242/262] save branch --- docker/ci/test_migration.sh | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index cb71090c43..fff0108a6f 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -7,9 +7,9 @@ if ! git diff --quiet; then fi # Find the currently checked out commit -current_commit=$(git show -s --format=%H) +current_branch=$(git symbolic-ref --short HEAD) -echo "Current commit: ${current_commit}" +echo "Current branch: ${current_branch}" # Find the first migration file first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) @@ -31,7 +31,7 @@ export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/c bin/util/initialize_instance # Checkout the current commit -git checkout "${current_commit}" +git checkout "${current_branch}" # Migrate up to the current commit alembic upgrade head From fd51fd8cc1042b00bd5569e1948665216ad0bb06 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:15:58 -0400 Subject: [PATCH 243/262] Run everything in container --- docker/ci/test_migration.sh | 18 ++++++++++-------- 1 file changed, 10 insertions(+), 8 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index fff0108a6f..9185c198c3 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -1,5 +1,11 @@ #!/bin/bash +run_in_container() +{ + CMD=$1 + docker compose run --quiet-pull --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" +} + if ! git diff --quiet; then echo "ERROR: You have uncommitted changes. These changes will be lost if you run this script." echo " Please commit or stash your changes and try again." @@ -26,15 +32,11 @@ echo "First migration commit: ${first_migration_commit}" git checkout -q "${first_migration_commit}" # Start containers and initialize the database -docker-compose up -d --pull-quiet pg -export SIMPLIFIED_PRODUCTION_DATABASE="postgresql://palace:test@localhost:5432/circ" -bin/util/initialize_instance +docker compose up -d --quiet-pull pg +run_in_container "./bin/util/initialize_instance" # Checkout the current commit git checkout "${current_branch}" -# Migrate up to the current commit -alembic upgrade head - -# Now check that the database matches what we would expect -alembic check +# Migrate up to the current commit and check if the database is in sync +run_in_container "alembic upgrade head && alembic check" From e99cb5a5af3fd4de0ed9534e7abf9ffd400e6ad3 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:16:51 -0400 Subject: [PATCH 244/262] Run everything in container --- docker/ci/test_migration.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 9185c198c3..7cff8e45ce 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -32,6 +32,7 @@ echo "First migration commit: ${first_migration_commit}" git checkout -q "${first_migration_commit}" # Start containers and initialize the database +docker compose down docker compose up -d --quiet-pull pg run_in_container "./bin/util/initialize_instance" From 7cedb315256ff2a0d67037cf159bbfc009cdd457 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:18:34 -0400 Subject: [PATCH 245/262] Foo test --- core/model/collection.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/model/collection.py b/core/model/collection.py index b9876ba4ed..6754d1eb74 100644 --- a/core/model/collection.py +++ b/core/model/collection.py @@ -76,7 +76,7 @@ class Collection(Base, HasSessionCache): # ordinary Overdrive collection. It uses the same access key and # secret as the Overdrive collection, but it has a distinct # external_account_id. - parent_id = Column(Integer, ForeignKey("collections.id"), index=True) + parent_id = Column(Integer, ForeignKey("collections.id"), index=False) parent: Collection = relationship( "Collection", remote_side=[id], back_populates="children" ) From 0603d311ebc79994f53b38a0252c840824794ae0 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:23:48 -0400 Subject: [PATCH 246/262] Make sure containers are stopped. --- docker/ci/test_migration.sh | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 7cff8e45ce..24e675b719 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -41,3 +41,15 @@ git checkout "${current_branch}" # Migrate up to the current commit and check if the database is in sync run_in_container "alembic upgrade head && alembic check" +exit_code=$? + +if [[ $exit_code -eq 0 ]]; then + echo "Database is in sync." +else + echo "ERROR: Database is out of sync. Please generate an alembic migration." +fi + +# Stop containers +docker compose down + +exit $exit_code From 8f9fc39cf30d411b690a8aa66da79e75f1a66bdf Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:29:42 -0400 Subject: [PATCH 247/262] Quiet output --- docker/ci/test_migration.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 24e675b719..f6631672ed 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -3,7 +3,7 @@ run_in_container() { CMD=$1 - docker compose run --quiet-pull --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" + docker compose --log-level ERROR run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" } if ! git diff --quiet; then @@ -12,7 +12,7 @@ if ! git diff --quiet; then exit 1 fi -# Find the currently checked out commit +# Find the currently checked out branch current_branch=$(git symbolic-ref --short HEAD) echo "Current branch: ${current_branch}" @@ -33,7 +33,7 @@ git checkout -q "${first_migration_commit}" # Start containers and initialize the database docker compose down -docker compose up -d --quiet-pull pg +docker compose --log-level ERROR up -d pg run_in_container "./bin/util/initialize_instance" # Checkout the current commit @@ -46,7 +46,7 @@ exit_code=$? if [[ $exit_code -eq 0 ]]; then echo "Database is in sync." else - echo "ERROR: Database is out of sync. Please generate an alembic migration." + echo "ERROR: Database is out of sync. A new migration is required." fi # Stop containers From 2dd70505512636b879170da6f332c0df3fe239d1 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:35:14 -0400 Subject: [PATCH 248/262] quiet output --- docker/ci/test_migration.sh | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index f6631672ed..058f0f5d5d 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -3,7 +3,7 @@ run_in_container() { CMD=$1 - docker compose --log-level ERROR run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" + docker --log-level ERROR compose --progress quiet run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" } if ! git diff --quiet; then @@ -33,7 +33,7 @@ git checkout -q "${first_migration_commit}" # Start containers and initialize the database docker compose down -docker compose --log-level ERROR up -d pg +docker --log-level ERROR compose --progress quiet up -d pg run_in_container "./bin/util/initialize_instance" # Checkout the current commit From 7df5cfbd5123120d25b006d6876d4614c0b4ab73 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:37:35 -0400 Subject: [PATCH 249/262] eAsier to read --- docker/ci/test_migration.sh | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 058f0f5d5d..d7bbd7abf8 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -1,9 +1,13 @@ #!/bin/bash +compose-cmd() { + docker --log-level ERROR compose --progress quiet "$@" +} + run_in_container() { CMD=$1 - docker --log-level ERROR compose --progress quiet run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" + compose-cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" } if ! git diff --quiet; then @@ -32,8 +36,8 @@ echo "First migration commit: ${first_migration_commit}" git checkout -q "${first_migration_commit}" # Start containers and initialize the database -docker compose down -docker --log-level ERROR compose --progress quiet up -d pg +compose-cmd down +compose-cmd up -d pg run_in_container "./bin/util/initialize_instance" # Checkout the current commit @@ -50,6 +54,6 @@ else fi # Stop containers -docker compose down +compose-cmd down exit $exit_code From 9edc159ca07935ee7fd09ab4b10f23b6bf5d4550 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 13:46:20 -0400 Subject: [PATCH 250/262] Test migration --- docker/ci/test_migration.sh | 33 ++++++++++++++++++++------------- 1 file changed, 20 insertions(+), 13 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index d7bbd7abf8..c9c08e4b88 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -26,29 +26,36 @@ first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") echo "First migration file: ${first_migration_file}" +echo "" -# Find the git commit where this migration was introduced -first_migration_commit=$(git log --follow --format=%H --reverse "${first_migration_file}" | head -n 1) +# Find the git commit before this migration file was introduced +first_migration_parent_commit=$(git log --follow --format=%P --reverse "${first_migration_file}" | head -n 1) -echo "First migration commit: ${first_migration_commit}" - -# Checkout this commit -git checkout -q "${first_migration_commit}" - -# Start containers and initialize the database +echo "Starting containers and initializing database at commit ${first_migration_parent_commit}" +git checkout -q "${first_migration_parent_commit}" compose-cmd down compose-cmd up -d pg run_in_container "./bin/util/initialize_instance" - -# Checkout the current commit -git checkout "${current_branch}" +echo "" # Migrate up to the current commit and check if the database is in sync -run_in_container "alembic upgrade head && alembic check" +git checkout -q "${current_branch}" +echo "Running database migrations on branch ${current_branch}" +run_in_container "alembic upgrade head" +exit_code=$? +if [[ $exit_code -ne 0 ]]; then + echo "ERROR: Database migration failed." + exit $exit_code +fi +echo "" + +echo "Checking database status" +run_in_container "alembic check" exit_code=$? +echo "" if [[ $exit_code -eq 0 ]]; then - echo "Database is in sync." + echo "SUCCESS: Database is in sync." else echo "ERROR: Database is out of sync. A new migration is required." fi From 40b1c212b409ea4b98bd12acff31a2f84c58e23a Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:10:29 -0400 Subject: [PATCH 251/262] Migrations --- docker/ci/test_migration.sh | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index c9c08e4b88..c56a4282a4 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -28,11 +28,11 @@ first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") echo "First migration file: ${first_migration_file}" echo "" -# Find the git commit before this migration file was introduced -first_migration_parent_commit=$(git log --follow --format=%P --reverse "${first_migration_file}" | head -n 1) +# Find the git commit where the first migration file was added +first_migration_commit=$(git log --follow --format=%H --reverse "${first_migration_file}" | head -n 1) -echo "Starting containers and initializing database at commit ${first_migration_parent_commit}" -git checkout -q "${first_migration_parent_commit}" +echo "Starting containers and initializing database at commit ${first_migration_commit}" +git checkout -q "${first_migration_commit}" compose-cmd down compose-cmd up -d pg run_in_container "./bin/util/initialize_instance" From 6bc25ca6c6b630f2e83c3312603fa890723e1526 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:19:54 -0400 Subject: [PATCH 252/262] Give it a go --- .github/workflows/test-build.yml | 33 ++++++++++++++++++++++++++++++++ docker/ci/test_migration.sh | 12 ++++++------ 2 files changed, 39 insertions(+), 6 deletions(-) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 0d9f848aa2..898d031fd9 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -110,6 +110,39 @@ jobs: files: ./coverage.xml flags: migration + docker-test-migrations: + name: Docker migration test + runs-on: ubuntu-latest + permissions: + contents: read + + # We want to run on external PRs, but not on our own internal PRs as they'll be run + # by the push to the branch. This prevents duplicated runs on internal PRs. + # Some discussion of this here: + # https://github.community/t/duplicate-checks-on-push-and-pull-request-simultaneous-event/18012 + if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository + + steps: + - uses: actions/checkout@v4 + with: + persist-credentials: false + fetch-depth: 0 + + # See comment here: https://github.com/actions/runner-images/issues/1187#issuecomment-686735760 + - name: Disable network offload + run: sudo ethtool -K eth0 tx off rx off + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Install Poetry + uses: ./.github/actions/poetry + + - name: Poetry install + run: poetry install + env: + POETRY_VIRTUALENVS_CREATE: false + docker-image-build: name: Docker build runs-on: ubuntu-latest diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index c56a4282a4..4efe959623 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -1,13 +1,13 @@ #!/bin/bash -compose-cmd() { +compose_cmd() { docker --log-level ERROR compose --progress quiet "$@" } run_in_container() { CMD=$1 - compose-cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" + compose_cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" } if ! git diff --quiet; then @@ -22,7 +22,7 @@ current_branch=$(git symbolic-ref --short HEAD) echo "Current branch: ${current_branch}" # Find the first migration file -first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +first_migration_id=$(run_in_container alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") echo "First migration file: ${first_migration_file}" @@ -33,8 +33,8 @@ first_migration_commit=$(git log --follow --format=%H --reverse "${first_migrati echo "Starting containers and initializing database at commit ${first_migration_commit}" git checkout -q "${first_migration_commit}" -compose-cmd down -compose-cmd up -d pg +compose_cmd down +compose_cmd up -d pg run_in_container "./bin/util/initialize_instance" echo "" @@ -61,6 +61,6 @@ else fi # Stop containers -compose-cmd down +compose_cmd down exit $exit_code From 1f9f91505ef830b826aa6eb7985a79ceae0690fa Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:22:22 -0400 Subject: [PATCH 253/262] Test --- docker/ci/test_migration.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migration.sh index 4efe959623..e45f5cb720 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migration.sh @@ -22,7 +22,7 @@ current_branch=$(git symbolic-ref --short HEAD) echo "Current branch: ${current_branch}" # Find the first migration file -first_migration_id=$(run_in_container alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") echo "First migration file: ${first_migration_file}" From 809c4a104ad7fdd1e4d99344279b9d88cbe381a6 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:28:15 -0400 Subject: [PATCH 254/262] Add a bit of error checking --- .github/workflows/test-build.yml | 3 +++ docker/ci/{test_migration.sh => test_migrations.sh} | 9 +++++++++ 2 files changed, 12 insertions(+) rename docker/ci/{test_migration.sh => test_migrations.sh} (89%) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index 898d031fd9..d5bdd47bc4 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -143,6 +143,9 @@ jobs: env: POETRY_VIRTUALENVS_CREATE: false + - name: Test migrations + run: ./docker/ci/test_migrations.sh + docker-image-build: name: Docker build runs-on: ubuntu-latest diff --git a/docker/ci/test_migration.sh b/docker/ci/test_migrations.sh similarity index 89% rename from docker/ci/test_migration.sh rename to docker/ci/test_migrations.sh index e45f5cb720..72447aa047 100755 --- a/docker/ci/test_migration.sh +++ b/docker/ci/test_migrations.sh @@ -23,7 +23,16 @@ echo "Current branch: ${current_branch}" # Find the first migration file first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +if [[ -z $first_migration_id ]]; then + echo "ERROR: Could not find first migration." + exit 1 +fi + first_migration_file=$(find alembic/versions -name "*${first_migration_id}*.py") +if [[ -z $first_migration_file ]]; then + echo "ERROR: Could not find first migration file." + exit 1 +fi echo "First migration file: ${first_migration_file}" echo "" From a8c73fa90f6c60279c2427b2c280fb116eca428f Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:33:19 -0400 Subject: [PATCH 255/262] foo --- docker/ci/test_migrations.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 72447aa047..82c59b44b2 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -22,7 +22,7 @@ current_branch=$(git symbolic-ref --short HEAD) echo "Current branch: ${current_branch}" # Find the first migration file -first_migration_id=$(alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) +first_migration_id=$(run_in_container alembic history -r'base:base+1' -v | head -n 1 | cut -d ' ' -f2) if [[ -z $first_migration_id ]]; then echo "ERROR: Could not find first migration." exit 1 From cada1b1cb88f05e5be5070823ca9b94d9c2e987e Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:34:28 -0400 Subject: [PATCH 256/262] foo --- docker/ci/test_migrations.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 82c59b44b2..19a85028ec 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -1,4 +1,4 @@ -#!/bin/bash +#!/bin/bash -x compose_cmd() { docker --log-level ERROR compose --progress quiet "$@" From c6b442f6f63b64bd6389325d492ce871e6e0cc45 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:36:31 -0400 Subject: [PATCH 257/262] bar --- docker/ci/test_migrations.sh | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 19a85028ec..6582a85329 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -6,8 +6,7 @@ compose_cmd() { run_in_container() { - CMD=$1 - compose_cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $CMD" + compose_cmd run --build --rm webapp /bin/bash -c "source env/bin/activate && $*" } if ! git diff --quiet; then From 05ee1049892d3035912951fe90fafae60bbe018d Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:44:48 -0400 Subject: [PATCH 258/262] Try adding to a CI build. --- .github/workflows/test-build.yml | 8 -------- docker/ci/test_migrations.sh | 17 ++++++++++++++++- 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index d5bdd47bc4..cbc007c0c1 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -135,14 +135,6 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - - name: Install Poetry - uses: ./.github/actions/poetry - - - name: Poetry install - run: poetry install - env: - POETRY_VIRTUALENVS_CREATE: false - - name: Test migrations run: ./docker/ci/test_migrations.sh diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 6582a85329..14de5d03e8 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -1,4 +1,19 @@ -#!/bin/bash -x +#!/bin/bash + +# This script makes sure that our database migrations bring the database up to date +# so that the resulting database is the same as if we had initialized a new instance. +# +# This is done by checking out the an older version of our codebase. The commit when +# the first migration was added and initializing a new instance. Then we check out +# the current version of our codebase and run the migrations. If the database is in +# sync, then the migrations are up to date. If the database is out of sync, then +# a new migration is required. +# +# This test is cannot be added to the normal migration test suite since it requires +# manipulating the git history and checking out older versions of the codebase. +# +# All of the commands in this script are run inside a docker-compose environment. + compose_cmd() { docker --log-level ERROR compose --progress quiet "$@" From dba634acf071551af5966895868129836a790773 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 14:50:54 -0400 Subject: [PATCH 259/262] dockee migrations --- .github/workflows/test-build.yml | 6 ------ 1 file changed, 6 deletions(-) diff --git a/.github/workflows/test-build.yml b/.github/workflows/test-build.yml index cbc007c0c1..5cf654c0a0 100644 --- a/.github/workflows/test-build.yml +++ b/.github/workflows/test-build.yml @@ -116,12 +116,6 @@ jobs: permissions: contents: read - # We want to run on external PRs, but not on our own internal PRs as they'll be run - # by the push to the branch. This prevents duplicated runs on internal PRs. - # Some discussion of this here: - # https://github.community/t/duplicate-checks-on-push-and-pull-request-simultaneous-event/18012 - if: github.event_name == 'push' || github.event.pull_request.head.repo.full_name != github.repository - steps: - uses: actions/checkout@v4 with: From a85a7820a4a3cf76e8bf01cfbe253a8272ab5570 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 15:00:30 -0400 Subject: [PATCH 260/262] Another test --- docker/ci/test_migrations.sh | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 14de5d03e8..3d8c9833a9 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -33,6 +33,14 @@ fi # Find the currently checked out branch current_branch=$(git symbolic-ref --short HEAD) +# If we are not on a branch, then we are in a detached HEAD state, so +# we use the commit hash instead. This happens in CI when being run +# against a PR instead of a branch. +# See: https://stackoverflow.com/questions/69935511/how-do-i-save-the-current-head-so-i-can-check-it-back-out-in-the-same-way-later +if [[ -z $? ]]; then + current_branch=$(git rev-parse HEAD) +fi + echo "Current branch: ${current_branch}" # Find the first migration file From c7ebe4c5b0c22349db459c42fb14a863339f89ae Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 15:03:42 -0400 Subject: [PATCH 261/262] Another test --- docker/ci/test_migrations.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 3d8c9833a9..3856bdd86e 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -32,12 +32,13 @@ fi # Find the currently checked out branch current_branch=$(git symbolic-ref --short HEAD) +current_branch_exit_code=$? # If we are not on a branch, then we are in a detached HEAD state, so # we use the commit hash instead. This happens in CI when being run # against a PR instead of a branch. # See: https://stackoverflow.com/questions/69935511/how-do-i-save-the-current-head-so-i-can-check-it-back-out-in-the-same-way-later -if [[ -z $? ]]; then +if [[ $current_branch_exit_code -ne 0 ]]; then current_branch=$(git rev-parse HEAD) fi From c53edc840a12668d349b1e5fdf7ae30d562c38d4 Mon Sep 17 00:00:00 2001 From: Jonathan Green Date: Fri, 5 Jan 2024 15:05:19 -0400 Subject: [PATCH 262/262] Test for PR --- docker/ci/test_migrations.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docker/ci/test_migrations.sh b/docker/ci/test_migrations.sh index 3856bdd86e..dce2a803b0 100755 --- a/docker/ci/test_migrations.sh +++ b/docker/ci/test_migrations.sh @@ -31,7 +31,7 @@ if ! git diff --quiet; then fi # Find the currently checked out branch -current_branch=$(git symbolic-ref --short HEAD) +current_branch=$(git symbolic-ref -q --short HEAD) current_branch_exit_code=$? # If we are not on a branch, then we are in a detached HEAD state, so