diff --git a/.env.tools b/.env.tools new file mode 100644 index 00000000..5677fcbc --- /dev/null +++ b/.env.tools @@ -0,0 +1 @@ +export TOOLS_BUILDNUMBER=20241115.3 diff --git a/.github/workflows/python-package.yml b/.github/workflows/ci.yml similarity index 94% rename from .github/workflows/python-package.yml rename to .github/workflows/ci.yml index 9d5771ac..060d6235 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/ci.yml @@ -1,16 +1,17 @@ # This workflow will install Python dependencies, run tests and lint with a variety of Python versions # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions -name: Python Quality Control +name: Build and test -on: [pull_request] +on: + workflow_call: jobs: build: strategy: fail-fast: false matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12" ] + python-version: ["3.9", "3.10", "3.11", "3.12", "3.13" ] os: [ubuntu-latest, windows-latest] runs-on: ${{ matrix.os }} steps: @@ -66,7 +67,6 @@ jobs: ./scripts/unittests.sh shell: bash - name: Run type-hint checks - if: ${{ matrix.python-version != '3.12' }} run: | ./scripts/version.sh python3 -m pyright --stats archivist diff --git a/.github/workflows/merge-rtequires.yml b/.github/workflows/merge-rtequires.yml new file mode 100644 index 00000000..85c6166c --- /dev/null +++ b/.github/workflows/merge-rtequires.yml @@ -0,0 +1,35 @@ +# This workflow configures the repository specific choices of which CI builds +# must pass in order for a build to merge. This allows a sinlge global teraform +# configured rule to require a "well known" check in each repository. Whilst +# granting repository stakeholders the ability configure what workflows are +# appropriate to satisfy that check. +name: Merge Requires +on: + push: + +jobs: + ci: + if: ${{ github.event_name != 'release' }} + secrets: inherit + uses: ./.github/workflows/ci.yml + release: + if: ${{ github.event_name == 'release' }} + secrets: inherit + uses: ./.github/workflows/package.yml + + merge-checks-ok: + runs-on: ubuntu-latest + needs: [ci] + steps: + - name: ok + run: | + echo "It's working!" + + release-checks-ok: + runs-on: ubuntu-latest + if: ${{ github.event_name == 'release' }} + needs: [ci, release] + steps: + - name: ok + run: | + echo "Houston we have cleared the tower" diff --git a/.github/workflows/python-publish.yml b/.github/workflows/package.yml similarity index 96% rename from .github/workflows/python-publish.yml rename to .github/workflows/package.yml index bc80a04c..aad45459 100644 --- a/.github/workflows/python-publish.yml +++ b/.github/workflows/package.yml @@ -1,14 +1,13 @@ # This workflow will upload a Python Package using Twine when a release is created # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries -name: Upload Python Package +name: Package and Publish on: - release: - types: [created] + workflow_call: jobs: - deploy: + build: runs-on: ubuntu-latest diff --git a/DEVELOPMENT.md b/DEVELOPMENT.md index 0e57efcb..7ac15a84 100644 --- a/DEVELOPMENT.md +++ b/DEVELOPMENT.md @@ -200,11 +200,11 @@ To run the unittests: task unittests ``` -##### Python 3.9 +##### Python 3.10 -To build the docker builder image with Python 3.9: +To build the docker builder image with Python 3.10: ```bash -task builder-3.9 +task builder-3.10 ``` To check the style @@ -217,11 +217,11 @@ To run the unittests: task unittests ``` -##### Python 3.10 +##### Python 3.11 -To build the docker builder image with Python 3.10: +To build the docker builder image with Python 3.11: ```bash -task builder-3.10 +task builder-3.11 ``` To check the style @@ -234,11 +234,11 @@ To run the unittests: task unittests ``` -##### Python 3.11 +##### Python 3.12 -To build the docker builder image with Python 3.11: +To build the docker builder image with Python 3.12: ```bash -task builder-3.11 +task builder-3.12 ``` To check the style @@ -251,11 +251,11 @@ To run the unittests: task unittests ``` -##### Python 3.12 +##### Python 3.13 -To build the docker builder image with Python 3.12: +To build the docker builder image with Python 3.13: ```bash -task builder-3.12 +task builder-3.13 ``` To check the style diff --git a/README.rst b/README.rst index 4cf6ce34..10525804 100644 --- a/README.rst +++ b/README.rst @@ -12,10 +12,10 @@ documented at https://docs.datatrails.ai Support ======= -This package currently is tested against Python versions 3.8,3.9,3.10,3.11 and 3.12. +This package currently is tested against Python versions 3.9,3.10,3.11, 3.12 and 3.13. -The current default version is 3.8 - this means that this package will not -use any features specific to versions 3.9 and later. +The current default version is 3.9 - this means that this package will not +use any features specific to versions 3.10 and later. After End of Life of a particular Python version, support is offered on a best effort basis. We may ask you to update your Python version to help solve the problem, diff --git a/Taskfile.yml b/Taskfile.yml index dc86435c..ace86856 100644 --- a/Taskfile.yml +++ b/Taskfile.yml @@ -17,11 +17,6 @@ tasks: - ./scripts/builder.sh pip-audit -r requirements.txt builder: - desc: Build a docker environment with the right dependencies and utilities - cmds: - - ./scripts/build.sh "3.8" - - builder-3.9: desc: Build a docker environment with the right dependencies and utilities cmds: - ./scripts/build.sh "3.9" @@ -41,6 +36,11 @@ tasks: cmds: - ./scripts/build.sh "3.12" + builder-3.13: + desc: Build a docker environment with the right dependencies and utilities + cmds: + - ./scripts/build.sh "3.13" + check: desc: Check the style, bug and quality of the code deps: [about] diff --git a/archivist/notebooks/Checking the Dog's Weight.ipynb b/archivist/notebooks/Checking the Dog's Weight.ipynb index eadd1bfc..2f1fa676 100644 --- a/archivist/notebooks/Checking the Dog's Weight.ipynb +++ b/archivist/notebooks/Checking the Dog's Weight.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Create Artist and Album Release Info.ipynb b/archivist/notebooks/Create Artist and Album Release Info.ipynb index ec11f97f..b579840d 100644 --- a/archivist/notebooks/Create Artist and Album Release Info.ipynb +++ b/archivist/notebooks/Create Artist and Album Release Info.ipynb @@ -34,7 +34,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Feeding the Dog Hourly.ipynb b/archivist/notebooks/Feeding the Dog Hourly.ipynb index 5e796c94..9abe343d 100644 --- a/archivist/notebooks/Feeding the Dog Hourly.ipynb +++ b/archivist/notebooks/Feeding the Dog Hourly.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb b/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb index f15b6b45..4866b8eb 100644 --- a/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb +++ b/archivist/notebooks/Feeding the Dog in a Timely Manner.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Feeding the Dog.ipynb b/archivist/notebooks/Feeding the Dog.ipynb index 557dc8bc..f5d13007 100644 --- a/archivist/notebooks/Feeding the Dog.ipynb +++ b/archivist/notebooks/Feeding the Dog.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb b/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb index e79cb931..7654535f 100644 --- a/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb +++ b/archivist/notebooks/Playing Fetch Every 5 Minutes.ipynb @@ -35,7 +35,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Sharing Album Release Info with Record Labels.ipynb b/archivist/notebooks/Sharing Album Release Info with Record Labels.ipynb index 71e6df17..81ee17e5 100644 --- a/archivist/notebooks/Sharing Album Release Info with Record Labels.ipynb +++ b/archivist/notebooks/Sharing Album Release Info with Record Labels.ipynb @@ -40,7 +40,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Sharing Album Release Info with User.ipynb b/archivist/notebooks/Sharing Album Release Info with User.ipynb index 3ca97268..3b831df2 100644 --- a/archivist/notebooks/Sharing Album Release Info with User.ipynb +++ b/archivist/notebooks/Sharing Album Release Info with User.ipynb @@ -34,7 +34,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Sharing Artist Asset with Record Labels.ipynb b/archivist/notebooks/Sharing Artist Asset with Record Labels.ipynb index fd144b4c..6c98b1e2 100644 --- a/archivist/notebooks/Sharing Artist Asset with Record Labels.ipynb +++ b/archivist/notebooks/Sharing Artist Asset with Record Labels.ipynb @@ -41,7 +41,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/archivist/notebooks/Sharing Artist Asset with User.ipynb b/archivist/notebooks/Sharing Artist Asset with User.ipynb index 3b9a71e6..f97fb258 100644 --- a/archivist/notebooks/Sharing Artist Asset with User.ipynb +++ b/archivist/notebooks/Sharing Artist Asset with User.ipynb @@ -34,7 +34,6 @@ "metadata": {}, "outputs": [], "source": [ - "\n", "from json import dumps as json_dumps\n", "from os import getenv\n", "\n", diff --git a/docs/conf.py b/docs/conf.py index 2b440d83..e438236c 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -35,6 +35,7 @@ "nbsphinx", "sphinx_gallery.load_style", "sphinx_rtd_theme", + "sphinxcontrib.googleanalytics", "sphinxcontrib.spelling", "sphinx.ext.autodoc", "sphinx.ext.autosectionlabel", @@ -62,6 +63,7 @@ source_suffix = [ '.md', '.rst'] +googleanalytics_id = 'G-7K46H3KK7N' # Provided by Google in your dashboard # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -84,7 +86,6 @@ html_theme = 'sphinx_rtd_theme' html_theme_options = { - 'analytics_id': 'G-7K46H3KK7N', # Provided by Google in your dashboard 'logo_only': True, } diff --git a/docs/features.rst b/docs/features.rst index d4f9658b..54a7c340 100644 --- a/docs/features.rst +++ b/docs/features.rst @@ -10,7 +10,7 @@ The definitive guide to the REST API is defined here: https://docs.datatrails.ai This python SDK offers a number of advantages over a simple REST api (in any language): - * versioned package for the python 3.8,3.9,3.10,3.11,3.12 ecosystem. + * versioned package for the python 3.9,3.10,3.11,3.12,3.13 ecosystems. * automatic confirmation of assets and events: just set **confirm=True** when creating the asset or event and a sophisticated retry and exponential backoff algorithm will take care of everything (optional). diff --git a/docs/notebooks/requirements.txt b/docs/notebooks/requirements.txt index 8e59ea40..61f1c130 100644 --- a/docs/notebooks/requirements.txt +++ b/docs/notebooks/requirements.txt @@ -1,16 +1,12 @@ # # jupyter notebooks # -# python 3.12 is unsupported as it reuires pyzmq 25.1.1 which -# is a dependency of notebook. -ipython~=8.12.3; python_version < '3.12' -# pin notebook at version 6. version7 does not support the various -# extensions - upgrade to notebook v7 is a future PR when -# it has stabilised. -notebook~=6.4; python_version < '3.12' -jupyter~=1.0; python_version < '3.12' -jupyter-console~=6.6; python_version < '3.12' -jupyter-contrib-nbextensions~=0.7; python_version < '3.12' -jupyter-nbextensions-configurator~=0.6; python_version < '3.12' -python-dotenv[cli]~=1.0; python_version < '3.12' +ipython~=8.13.0; python_version < '3.10' +ipython~=8.20; python_version >= '3.10' +notebook~=7.2 +jupyter~=1.0 +jupyter-console~=6.6 +jupyter-contrib-nbextensions~=0.7 +jupyter-nbextensions-configurator~=0.6 +python-dotenv[cli]~=1.0 diff --git a/docs/requirements.txt b/docs/requirements.txt index 0df4bed0..aad16a5b 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,11 +1,12 @@ # # this is used for pushing to github pages # -# sphinx 7.2 does not support 3.8 -sphinx~=7.1 -sphinx-rtd-theme~=1.3 +sphinx~=7.4; python_version < '3.10' +sphinx~=8.1; python_version >= '3.10' +sphinx-rtd-theme~=3.0 sphinxcontrib-spelling~=8.0 -sphinx-gallery~=0.14 +sphinxcontrib-googleanalytics~=0.4 +sphinx-gallery~=0.18 nbsphinx~=0.9 -r notebooks/requirements.txt diff --git a/functests/execattachments.py b/functests/execattachments.py index 4667394c..082151b1 100644 --- a/functests/execattachments.py +++ b/functests/execattachments.py @@ -175,8 +175,9 @@ def test_attachment_upload_and_download_strict(self): attachment = self.arch.attachments.upload(fd) file_uuid = attachment["identity"] - with open(self.DATATRAILS_IMAGE_DOWNLOAD_PATH, "wb") as fd, self.assertRaises( - ArchivistBadRequestError + with ( + open(self.DATATRAILS_IMAGE_DOWNLOAD_PATH, "wb") as fd, + self.assertRaises(ArchivistBadRequestError), ): attachment = self.arch.attachments.download( file_uuid, fd, params={"strict": "true"} diff --git a/pyproject.toml b/pyproject.toml index 1ef43217..2ef3a0b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,6 +2,9 @@ requires = [ "setuptools>=61.0", "wheel", "setuptools-git-versioning", ] build-backend = "setuptools.build_meta" +[tool.black] +target-version = ["py39", "py310", "py311", "py312", "py313"] + [tool.coverage.report] fail_under = 100 exclude_lines = [ @@ -260,6 +263,7 @@ valid-metaclass-classmethod-first-arg = ["cls"] # Maximum number of arguments for function / method. max-args = 7 +max-positional-arguments = 6 # Maximum number of attributes for a class (see R0902). max-attributes = 7 diff --git a/requirements-dev.txt b/requirements-dev.txt index fa7d06e5..664d65d3 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,23 +1,23 @@ -r requirements.txt # code quality -autopep8~=2.0 -black[jupyter]~=24.1 -coverage[toml]~=7.4 +autopep8~=2.3 +black[jupyter]~=24.10 +coverage[toml]~=7.6 pip-audit~=2.7 -pycodestyle~=2.11 -pylint~=3.0 +pycodestyle~=2.12 +pylint~=3.3 pyright~=1.1 -ruff~=0.2 +ruff~=0.7 unittest-xml-reporting~=3.2 -testbook~=0.3 +testbook~=0.4 # analyze dependencies -pipdeptree~=2.13 +pipdeptree~=2.23 # uploading to pypi -build~=1.0 -twine~=5.0 +build~=1.2 +twine~=5.1 # for sbom.xml file xq~=0.0 diff --git a/requirements.txt b/requirements.txt index 2f48c9fd..de1a02c2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -8,8 +8,8 @@ flatten-dict~=0.4 iso8601~=2.1 Jinja2~=3.1 pyaml-env~=1.2 -requests~=2.32.0 +requests~=2.32 requests-toolbelt~=1.0 rfc3339~=6.2 -xmltodict~=0.13 +xmltodict~=0.14 diff --git a/unittests/testarchivist.py b/unittests/testarchivist.py index a4dc691b..884f4962 100644 --- a/unittests/testarchivist.py +++ b/unittests/testarchivist.py @@ -167,10 +167,13 @@ def test_archivist_token(self): """ Test archivist creation with app registration """ - with Archivist( - "https://app.datatrails.ai", - (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), - ) as arch, mock.patch.object(arch.appidp, "token") as mock_token: + with ( + Archivist( + "https://app.datatrails.ai", + (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), + ) as arch, + mock.patch.object(arch.appidp, "token") as mock_token, + ): mock_token.return_value = RESPONSE self.assertEqual( arch.auth, @@ -192,10 +195,13 @@ def test_archivist_appidp_token(self): """ Test archivist creation with appidp token """ - with Archivist( - "https://app.datatrails.ai", - (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), - ) as arch, mock.patch.object(arch.appidp, "token") as mock_token: + with ( + Archivist( + "https://app.datatrails.ai", + (DATATRAILS_APPREG_CLIENT, DATATRAILS_APPREG_SECRET), + ) as arch, + mock.patch.object(arch.appidp, "token") as mock_token, + ): mock_token.return_value = NONE_RESPONSE with self.assertRaises(ArchivistError): _ = arch.auth diff --git a/unittests/testassets.py b/unittests/testassets.py index 1db523f8..409eec3c 100644 --- a/unittests/testassets.py +++ b/unittests/testassets.py @@ -190,9 +190,10 @@ def test_assets_create_with_confirmation_no_confirmed_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE_NO_CONFIRMATION) @@ -203,9 +204,10 @@ def test_assets_create_with_confirmation_failed_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), @@ -218,9 +220,10 @@ def test_assets_create_with_confirmation_always_pending_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), @@ -244,9 +247,10 @@ def test_assets_create_with_confirmation(self): """ Test asset creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE) asset = self.arch.assets.create(attrs=ATTRS, confirm=True) @@ -260,9 +264,10 @@ def test_assets_create_with_confirmation_unequivocal(self): """ Test asset creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE_UNEQUIVOCAL) mock_get.return_value = MockResponse(200, **RESPONSE_UNEQUIVOCAL) asset = self.arch.assets.create(attrs=ATTRS, confirm=True) @@ -276,9 +281,10 @@ def test_assets_create_with_explicit_confirmation(self): """ Test asset creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE) asset = self.arch.assets.create(attrs=ATTRS, confirm=False) @@ -293,9 +299,10 @@ def test_assets_create_with_confirmation_pending_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), @@ -318,9 +325,10 @@ def test_assets_create_if_not_exists_existing_asset(self): """ Test asset creation """ - with mock.patch.object( - self.arch.session, "post", autospec=True - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post", autospec=True) as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_get.return_value = MockResponse( 200, assets=[ @@ -374,15 +382,14 @@ def common_assets_create_if_not_exists_nonexistent_asset( """ Test asset creation """ - with mock.patch.object( - self.arch.session, "post", autospec=True - ) as mock_post, mock.patch.object( - self.arch.session, "get" - ) as mock_get, mock.patch.object( - self.arch.locations, "create_if_not_exists" - ) as mock_location, mock.patch.object( - self.arch.attachments, "create" - ) as mock_attachments: + with ( + mock.patch.object(self.arch.session, "post", autospec=True) as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + mock.patch.object( + self.arch.locations, "create_if_not_exists" + ) as mock_location, + mock.patch.object(self.arch.attachments, "create") as mock_attachments, + ): mock_get.side_effect = ArchivistNotFoundError mock_post.return_value = MockResponse(200, **resp) if loc_resp is not None: diff --git a/unittests/testcompliance.py b/unittests/testcompliance.py index ffcd9d52..67263ab3 100644 --- a/unittests/testcompliance.py +++ b/unittests/testcompliance.py @@ -146,9 +146,10 @@ def test_compliance_with_report(self): """ Test compliance """ - with mock.patch.object(self.arch.session, "get") as mock_get, mock.patch.object( - self.arch.compliance_policies, "read" - ) as mock_read: + with ( + mock.patch.object(self.arch.session, "get") as mock_get, + mock.patch.object(self.arch.compliance_policies, "read") as mock_read, + ): mock_read.return_value = MockResponse(200, **POLICY) mock_response = MockResponse( 200, diff --git a/unittests/testeventsconfirm.py b/unittests/testeventsconfirm.py index 87c12468..d518c916 100644 --- a/unittests/testeventsconfirm.py +++ b/unittests/testeventsconfirm.py @@ -38,9 +38,10 @@ def test_events_create_with_confirmation(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE) @@ -55,9 +56,10 @@ def test_events_create_with_confirmation_pending_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), diff --git a/unittests/testeventscreate.py b/unittests/testeventscreate.py index b59719e8..0ca9732c 100644 --- a/unittests/testeventscreate.py +++ b/unittests/testeventscreate.py @@ -108,11 +108,12 @@ def test_events_create_with_upload_attachments(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object( - self.arch.attachments, "create" - ) as mock_attachments_create: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object( + self.arch.attachments, "create" + ) as mock_attachments_create, + ): mock_post.return_value = MockResponse(200, **RESPONSE_WITH_ATTACHMENTS) mock_attachments_create.return_value = ATTACHMENTS @@ -152,13 +153,13 @@ def test_events_create_with_upload_sbom_as_attachment(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object( - self.arch.attachments, "create" - ) as mock_attachments_create, mock.patch( - "archivist.events.sboms_parse" - ) as mock_sboms_parse: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object( + self.arch.attachments, "create" + ) as mock_attachments_create, + mock.patch("archivist.events.sboms_parse") as mock_sboms_parse, + ): mock_post.return_value = MockResponse(200, **RESPONSE_WITH_SBOMATTACHMENT) mock_sboms_parse.return_value = SBOM_RESULT mock_attachments_create.return_value = ATTACHMENTS @@ -199,11 +200,12 @@ def test_events_create_with_location(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object( - self.arch.locations, "create_if_not_exists" - ) as mock_location_create: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object( + self.arch.locations, "create_if_not_exists" + ) as mock_location_create, + ): mock_post.return_value = MockResponse(200, **RESPONSE_WITH_LOCATION) mock_location_create.return_value = LOCATION, True @@ -243,9 +245,10 @@ def test_events_create_with_location_identity(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.locations, "create_if_not_exists"): + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.locations, "create_if_not_exists"), + ): mock_post.return_value = MockResponse(200, **RESPONSE_WITH_LOCATION) event = self.arch.events.create_from_data( @@ -327,9 +330,10 @@ def test_events_create_with_explicit_confirmation(self): """ Test event creation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE) @@ -345,9 +349,10 @@ def test_events_create_with_confirmation_no_confirmed_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.return_value = MockResponse(200, **RESPONSE_NO_CONFIRMATION) @@ -358,9 +363,10 @@ def test_events_create_with_confirmation_failed_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), @@ -373,9 +379,10 @@ def test_events_create_with_confirmation_always_pending_status(self): """ Test asset confirmation """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post, mock.patch.object(self.arch.session, "get") as mock_get: + with ( + mock.patch.object(self.arch.session, "post") as mock_post, + mock.patch.object(self.arch.session, "get") as mock_get, + ): mock_post.return_value = MockResponse(200, **RESPONSE) mock_get.side_effect = [ MockResponse(200, **RESPONSE_PENDING), diff --git a/unittests/testlocations.py b/unittests/testlocations.py index 86fa0ae0..f60538d5 100644 --- a/unittests/testlocations.py +++ b/unittests/testlocations.py @@ -206,9 +206,10 @@ def test_locations_create_if_not_exists_existing_location(self): """ Test location creation """ - with mock.patch.object(self.arch.session, "get") as mock_get, mock.patch.object( - self.arch.session, "post" - ) as mock_post: + with ( + mock.patch.object(self.arch.session, "get") as mock_get, + mock.patch.object(self.arch.session, "post") as mock_post, + ): mock_get.return_value = MockResponse( 200, locations=[ @@ -254,9 +255,10 @@ def test_locations_create_if_not_exists_nonexistent_location(self): """ Test location creation """ - with mock.patch.object(self.arch.session, "get") as mock_get, mock.patch.object( - self.arch.session, "post" - ) as mock_post: + with ( + mock.patch.object(self.arch.session, "get") as mock_get, + mock.patch.object(self.arch.session, "post") as mock_post, + ): mock_get.side_effect = ArchivistNotFoundError mock_post.return_value = MockResponse(200, **RESPONSE) location, existed = self.arch.locations.create_if_not_exists( @@ -298,9 +300,10 @@ def test_locations_create_if_not_exists_nonexistent_location_selector_noattribut """ Test location creation """ - with mock.patch.object(self.arch.session, "get") as mock_get, mock.patch.object( - self.arch.session, "post" - ) as mock_post: + with ( + mock.patch.object(self.arch.session, "get") as mock_get, + mock.patch.object(self.arch.session, "post") as mock_post, + ): mock_get.side_effect = ArchivistNotFoundError mock_post.return_value = MockResponse(200, **RESPONSE) location, existed = self.arch.locations.create_if_not_exists( diff --git a/unittests/testrunnerassets.py b/unittests/testrunnerassets.py index 5e1b2b93..d9d31e05 100644 --- a/unittests/testrunnerassets.py +++ b/unittests/testrunnerassets.py @@ -261,11 +261,10 @@ def test_runner_events_list(self, mock_sleep): """ Test runner operation """ - with mock.patch.object( - self.arch.events, "list" - ) as mock_events_list, mock.patch.object( - self.arch.runner, "identity" - ) as mock_identity: + with ( + mock.patch.object(self.arch.events, "list") as mock_events_list, + mock.patch.object(self.arch.runner, "identity") as mock_identity, + ): mock_identity.return_value = EVENTS_LIST_ASSET_ID mock_events_list.return_value = event_generator(2) self.arch.runner( @@ -297,11 +296,10 @@ def test_runner_events_count(self, mock_sleep): """ Test runner operation """ - with mock.patch.object( - self.arch.events, "count" - ) as mock_events_count, mock.patch.object( - self.arch.runner, "identity" - ) as mock_identity: + with ( + mock.patch.object(self.arch.events, "count") as mock_events_count, + mock.patch.object(self.arch.runner, "identity") as mock_identity, + ): mock_identity.return_value = EVENTS_LIST_ASSET_ID mock_events_count.return_value = 2 self.arch.runner( diff --git a/unittests/testrunnercompliance.py b/unittests/testrunnercompliance.py index 023aea45..cd2aea2c 100644 --- a/unittests/testrunnercompliance.py +++ b/unittests/testrunnercompliance.py @@ -113,11 +113,14 @@ def test_runner_compliance_policies_create(self, mock_sleep): """ Test runner operation """ - with mock.patch.object( - self.arch.compliance_policies, "create_from_data" - ) as mock_compliance_policies_create, mock.patch.object( - self.arch.compliance_policies, "delete" - ) as mock_compliance_policies_delete: + with ( + mock.patch.object( + self.arch.compliance_policies, "create_from_data" + ) as mock_compliance_policies_create, + mock.patch.object( + self.arch.compliance_policies, "delete" + ) as mock_compliance_policies_delete, + ): mock_compliance_policies_create.return_value = CompliancePolicy( **COMPLIANCE_POLICIES_RESPONSE ) @@ -156,11 +159,12 @@ def test_runner_compliance_compliant_at(self, mock_sleep): """ Test runner operation """ - with mock.patch.object( - self.arch.compliance, "compliant_at" - ) as mock_compliance_compliant_at, mock.patch.object( - self.arch.runner, "identity" - ) as mock_identity: + with ( + mock.patch.object( + self.arch.compliance, "compliant_at" + ) as mock_compliance_compliant_at, + mock.patch.object(self.arch.runner, "identity") as mock_identity, + ): mock_identity.return_value = COMPLIANCE_COMPLIANT_AT_ID mock_compliance_compliant_at.return_value = Compliance( **COMPLIANCE_RESPONSE @@ -198,11 +202,12 @@ def test_runner_compliance_compliant_at_non_compliant(self, mock_sleep): """ Test runner operation """ - with mock.patch.object( - self.arch.compliance, "compliant_at" - ) as mock_compliance_compliant_at, mock.patch.object( - self.arch.runner, "identity" - ) as mock_identity: + with ( + mock.patch.object( + self.arch.compliance, "compliant_at" + ) as mock_compliance_compliant_at, + mock.patch.object(self.arch.runner, "identity") as mock_identity, + ): mock_identity.return_value = COMPLIANCE_COMPLIANT_AT_ID mock_compliance_compliant_at.return_value = Compliance( **COMPLIANCE_FALSE_RESPONSE diff --git a/unittests/testrunnerstep.py b/unittests/testrunnerstep.py index 2dc93ab6..1b889627 100644 --- a/unittests/testrunnerstep.py +++ b/unittests/testrunnerstep.py @@ -57,7 +57,7 @@ def test_runner_step_with_delete_method(self): "description": "Testing runner events list", "asset_label": "Existing Asset", "delete": True, - } + }, ) self.assertEqual( step.action, @@ -96,7 +96,7 @@ def test_runner_step(self): "description": "Testing runner events list", "asset_label": "Existing Asset", "delete": True, - } + }, ) self.assertEqual( step.action, @@ -182,7 +182,7 @@ def test_runner_step_location_label(self): "print_response": True, "description": "Testing runner events list", "location_label": "Existing Location", - } + }, ) self.assertEqual( step.action, diff --git a/unittests/testsubjects.py b/unittests/testsubjects.py index 38e66a3b..7b8b9d2e 100644 --- a/unittests/testsubjects.py +++ b/unittests/testsubjects.py @@ -156,15 +156,12 @@ def test_subjects_share(self): """ Test subject share """ - with mock.patch.object( - self.arch.session, "post" - ) as mock_post1, mock.patch.object( - self.arch.session, "get" - ) as mock_get1, mock.patch.object( - self.arch2.session, "post" - ) as mock_post2, mock.patch.object( - self.arch2.session, "get" - ) as mock_get2: + with ( + mock.patch.object(self.arch.session, "post") as mock_post1, + mock.patch.object(self.arch.session, "get") as mock_get1, + mock.patch.object(self.arch2.session, "post") as mock_post2, + mock.patch.object(self.arch2.session, "get") as mock_get2, + ): mock_post1.return_value = MockResponse(200, **RESPONSE_WITH_CONFIRMATION) mock_get1.return_value = MockResponse(200, **RESPONSE_WITH_CONFIRMATION) mock_post2.return_value = MockResponse(200, **RESPONSE2_WITH_CONFIRMATION)