From 5a8422bc6c8e95c1373e2088515b24e2a9cb9acb Mon Sep 17 00:00:00 2001 From: Ziga Luksic Date: Mon, 29 Jan 2024 12:09:58 +0100 Subject: [PATCH 1/6] adjust test for new versions of pytest and moto --- pyproject.toml | 3 +-- tests/aws/test_batch.py | 6 +++--- tests/test_io_utils.py | 4 +--- 3 files changed, 5 insertions(+), 8 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b476381c..0c073544 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,14 +73,13 @@ dev = [ "click>=8.0.0", "fs", "mypy>=0.990", - "moto", + "moto[s3]", "pandas", "pre-commit", "pylint>=2.14.0", "pytest>=4.0.0", "pytest-cov", "pytest-dependency", - "pytest-lazy-fixture", "pytest-mock", "ray[default]", "requests-mock", diff --git a/tests/aws/test_batch.py b/tests/aws/test_batch.py index 04c576f0..2cec8965 100644 --- a/tests/aws/test_batch.py +++ b/tests/aws/test_batch.py @@ -10,7 +10,7 @@ import boto3 import pytest -from moto import mock_s3 +from moto import mock_aws from pytest_mock import MockerFixture from sentinelhub import BatchRequestStatus, BatchStatisticalRequest, SHConfig @@ -25,7 +25,7 @@ class BatchInputType(Enum): OBJECT = "batch_object" -@mock_s3 +@mock_aws def _create_mocked_bucket_and_upload_data(bucket_name: str, paths: Sequence[str], data: Sequence[JsonDict]) -> str: """Creates a new empty mocked s3 bucket. If one such bucket already exists it deletes it first. @@ -48,7 +48,7 @@ def _create_mocked_bucket_and_upload_data(bucket_name: str, paths: Sequence[str] return bucket_name -@mock_s3 +@mock_aws @pytest.mark.parametrize("batch_input_type", list(BatchInputType)) @pytest.mark.parametrize("use_feature_ids", [True, False]) @pytest.mark.parametrize(("config", "show_progress"), [(None, False), (SHConfig(), True)]) diff --git a/tests/test_io_utils.py b/tests/test_io_utils.py index 999c6db2..ddf53c1b 100644 --- a/tests/test_io_utils.py +++ b/tests/test_io_utils.py @@ -6,7 +6,6 @@ import numpy as np import pytest from fs.tempfs import TempFS -from pytest_lazyfixture import lazy_fixture from sentinelhub import read_data, write_data @@ -39,7 +38,6 @@ def test_read_tar_with_folder(input_folder: str) -> None: assert data == {"tar-folder/simple.json": {"message": "test"}} -@pytest.fixture() def xml_testcase(): xml_root = ElementTree.Element("EOPatch") xml_data = ElementTree.SubElement(xml_root, "data") @@ -57,7 +55,7 @@ def xml_testcase(): ("img-15bit.jp2", BASIC_IMAGE), ("img-16bit.jp2", BASIC_IMAGE), ("test-string.txt", "sentinelhub-py is often shortened to sh-py"), - ("test-xml.xml", lazy_fixture("xml_testcase")), + ("test-xml.xml", xml_testcase()), ], ) def test_write_read(filename: str, data: str | np.ndarray | ElementTree.ElementTree) -> None: From 589ccd1f1bc1bda997d344956849dbdf037acde0 Mon Sep 17 00:00:00 2001 From: Ziga Luksic Date: Mon, 29 Jan 2024 12:12:21 +0100 Subject: [PATCH 2/6] add version fix for moto --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0c073544..9a0cd02c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,7 +73,7 @@ dev = [ "click>=8.0.0", "fs", "mypy>=0.990", - "moto[s3]", + "moto[s3]>=5.0.0", "pandas", "pre-commit", "pylint>=2.14.0", From cb586addbfcf215a7f18054e6595193c410e2e28 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=BDiga=20Luk=C5=A1i=C4=8D?= <31988337+zigaLuksic@users.noreply.github.com> Date: Fri, 8 Mar 2024 08:17:59 +0100 Subject: [PATCH 3/6] Fix BYOC test (#524) --- tests/api/test_byoc.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tests/api/test_byoc.py b/tests/api/test_byoc.py index 813d5837..c5b509b7 100644 --- a/tests/api/test_byoc.py +++ b/tests/api/test_byoc.py @@ -34,7 +34,6 @@ def byoc_fixture(config: SHConfig) -> SentinelHubBYOC: def collection_fixture() -> JsonDict: return { "id": "7453e962-0ee5-4f74-8227-89759fbe9ba9", - "userId": "1b639ce6-eb3e-494c-9cb4-2eab3569b121", "name": "SI LULC Reference", "s3Bucket": "eo-learn.sentinel-hub.com", "additionalData": { From 4dd97ebac72f33f4b4db2c742a965894fefb4ae1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=BDiga=20Luk=C5=A1i=C4=8D?= <31988337+zigaLuksic@users.noreply.github.com> Date: Mon, 18 Mar 2024 09:07:17 +0100 Subject: [PATCH 4/6] Change collection detail in catalog test (#525) --- tests/api/test_catalog.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/api/test_catalog.py b/tests/api/test_catalog.py index bac4f104..6083a9eb 100644 --- a/tests/api/test_catalog.py +++ b/tests/api/test_catalog.py @@ -165,7 +165,7 @@ def test_search_geometry_and_iterator_methods(catalog: SentinelHubCatalog) -> No ), ( DataCollection.SENTINEL5P, - "S5P_NRTI_L2__AER_AI_20210114T100354_20210114T100854_16869_01_010400_20210114T104450", + "S5P_NRTI_L2__SO2____20210114T100354_20210114T100854_16869_01_020104_20210114T105704", ), ], ) From dd27e3608867075b209c2ca997b8041188e3ede7 Mon Sep 17 00:00:00 2001 From: Regan Koopmans Date: Wed, 24 Apr 2024 12:39:06 +0200 Subject: [PATCH 5/6] Add configuration to limit rate limit retries (#527) * Add configuration to limit rate limit retries * Disable pylint check for _SHConfig * Rename max_rate_limit_retries to max_retries --------- Co-authored-by: Regan Koopmans --- sentinelhub/config.py | 4 +++- sentinelhub/download/sentinelhub_client.py | 7 ++++++- tests/download/test_sentinelhub_client.py | 18 ++++++++++++++++++ 3 files changed, 27 insertions(+), 2 deletions(-) diff --git a/sentinelhub/config.py b/sentinelhub/config.py index a61b6023..b639f714 100644 --- a/sentinelhub/config.py +++ b/sentinelhub/config.py @@ -24,7 +24,7 @@ @dataclass(repr=False) -class _SHConfig: +class _SHConfig: # pylint: disable=too-many-instance-attributes instance_id: str = "" sh_client_id: str = "" sh_client_secret: str = "" @@ -46,6 +46,7 @@ class _SHConfig: download_sleep_time: float = 5.0 download_timeout_seconds: float = 120.0 number_of_download_processes: int = 1 + max_retries: int | None = None def __post_init__(self) -> None: if self.sh_auth_base_url is not None: @@ -94,6 +95,7 @@ class SHConfig(_SHConfig): attempt this number exponentially increases with factor `3`. - `download_timeout_seconds`: Maximum number of seconds before download attempt is canceled. - `number_of_download_processes`: Number of download processes, used to calculate rate-limit sleep time. + - `max_retries`: Maximum number of retries until an exception is raised. The location of `config.toml` for manual modification can be found with `SHConfig.get_config_location()`. """ diff --git a/sentinelhub/download/sentinelhub_client.py b/sentinelhub/download/sentinelhub_client.py index af79fef4..0c2c02b7 100644 --- a/sentinelhub/download/sentinelhub_client.py +++ b/sentinelhub/download/sentinelhub_client.py @@ -15,7 +15,7 @@ from ..config import SHConfig from ..constants import SHConstants -from ..exceptions import SHRateLimitWarning, SHRuntimeWarning +from ..exceptions import OutOfRequestsException, SHRateLimitWarning, SHRuntimeWarning from ..types import JsonDict from .client import DownloadClient from .handlers import fail_user_errors, retry_temporary_errors @@ -75,10 +75,12 @@ def _execute_download(self, request: DownloadRequest) -> DownloadResponse: """ Executes the download with a single thread and uses a rate limit object, which is shared between all threads """ + download_attempts = 0 while True: sleep_time = self._execute_thread_safe(self.rate_limit.register_next) if sleep_time == 0: + download_attempts += 1 LOGGER.debug( "Sending %s request to %s. Hash of sent request is %s", request.request_type.value, @@ -89,6 +91,9 @@ def _execute_download(self, request: DownloadRequest) -> DownloadResponse: if response.status_code == requests.status_codes.codes.TOO_MANY_REQUESTS: warnings.warn("Download rate limit hit", category=SHRateLimitWarning) + if self.config.max_retries is not None and download_attempts >= self.config.max_retries: + raise OutOfRequestsException("Maximum number of download attempts reached") + self._execute_thread_safe(self.rate_limit.update, response.headers, default=self.default_retry_time) continue diff --git a/tests/download/test_sentinelhub_client.py b/tests/download/test_sentinelhub_client.py index 429cde6c..83af3201 100644 --- a/tests/download/test_sentinelhub_client.py +++ b/tests/download/test_sentinelhub_client.py @@ -1,15 +1,18 @@ from __future__ import annotations import pytest +import requests from requests_mock import Mocker from sentinelhub import ( + DownloadRequest, SentinelHubDownloadClient, SentinelHubSession, SentinelHubStatisticalDownloadClient, SHConfig, __version__, ) +from sentinelhub.exceptions import OutOfRequestsException FAST_SH_ENDPOINT = "https://services.sentinel-hub.com/api/v1/catalog/collections" # ruff: noqa: SLF001 @@ -112,3 +115,18 @@ def test_universal_session_caching(session: SentinelHubSession) -> None: SentinelHubDownloadClient.cache_session(session, universal=True) cached_session = client.get_session() assert cached_session is session + + +@pytest.mark.sh_integration() +def test_client_with_max_retries(session: SentinelHubSession) -> None: + blank_config = SHConfig(use_defaults=True) + blank_config.max_retries = 1 + client = SentinelHubDownloadClient(session=session, config=blank_config) + + class MockResponse: + def __init__(self): + self.status_code = requests.status_codes.codes.TOO_MANY_REQUESTS + + client._do_download = lambda _: MockResponse() + with pytest.raises(OutOfRequestsException): + client.download(download_requests=[DownloadRequest()]) From 13a71ab58ad1f9175d9ede304e3cd01b8e54f170 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=BDiga=20Luk=C5=A1i=C4=8D?= <31988337+zigaLuksic@users.noreply.github.com> Date: Wed, 24 Apr 2024 15:16:30 +0200 Subject: [PATCH 6/6] Prepare for release (#528) * update version and changelog * fix failing test * update pre-commit hooks * fixed doc version --- .pre-commit-config.yaml | 8 ++--- CHANGELOG.MD | 5 +++ examples/fis_request.ipynb | 16 +++++---- examples/process_request.ipynb | 16 +++++---- examples/process_request_cdse.ipynb | 16 +++++---- pyproject.toml | 2 +- sentinelhub/_version.py | 2 +- sentinelhub/api/base_request.py | 12 ++++--- sentinelhub/api/batch/process.py | 24 +++++++------ sentinelhub/api/byoc.py | 12 ++++--- sentinelhub/api/catalog.py | 30 ++++++++-------- sentinelhub/data_collections_bands.py | 2 +- sentinelhub/download/handlers.py | 2 +- sentinelhub/geometry.py | 2 +- tests/api/batch/test_process.py | 28 ++++++++------- tests/api/test_byoc.py | 34 +++++++++++-------- tests/api/test_fis.py | 16 +++++---- tests/api/test_ogc.py | 4 +-- tests/api/test_process.py | 3 +- tests/api/test_process_async.py | 3 +- .../test_sentinelhub_statistical_client.py | 14 ++++---- 21 files changed, 139 insertions(+), 112 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1948c24e..20864026 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v4.6.0 hooks: - id: end-of-file-fixer - id: requirements-txt-fixer @@ -13,18 +13,18 @@ repos: - id: debug-statements - repo: https://github.com/psf/black - rev: 23.12.1 + rev: 24.4.0 hooks: - id: black language_version: python3 - repo: https://github.com/charliermarsh/ruff-pre-commit - rev: "v0.1.11" + rev: "v0.4.1" hooks: - id: ruff - repo: https://github.com/nbQA-dev/nbQA - rev: 1.7.1 + rev: 1.8.5 hooks: - id: nbqa-black - id: nbqa-ruff diff --git a/CHANGELOG.MD b/CHANGELOG.MD index b1f1bf5a..78c84994 100644 --- a/CHANGELOG.MD +++ b/CHANGELOG.MD @@ -1,3 +1,8 @@ +## [Version 3.10.2] - 2024-24-04 + +- Added `max_retries` parameter to `SHConfig` class. It controls how many times the client will attempt to re-download before raising `OutOfRequestsException`. It is set to `None` by default, in which case it never stops trying. Contributed by @Regan-Koopmans. + + ## [Version 3.10.1] - 2024-01-10 - Improved documentation for Copernicus Data Space Ecosystem. diff --git a/examples/fis_request.ipynb b/examples/fis_request.ipynb index 0c606755..a679a890 100644 --- a/examples/fis_request.ipynb +++ b/examples/fis_request.ipynb @@ -432,13 +432,15 @@ "\n", "geometry1 = Geometry(Polygon([(-5.13, 48), (-5.23, 48.09), (-5.13, 48.17), (-5.03, 48.08), (-5.13, 48)]), CRS.WGS84)\n", "geometry2 = Geometry(\n", - " Polygon([\n", - " (1292344.0, 5205055.5),\n", - " (1301479.5, 5195920.0),\n", - " (1310615.0, 5205055.5),\n", - " (1301479.5, 5214191.0),\n", - " (1292344.0, 5205055.5),\n", - " ]),\n", + " Polygon(\n", + " [\n", + " (1292344.0, 5205055.5),\n", + " (1301479.5, 5195920.0),\n", + " (1310615.0, 5205055.5),\n", + " (1301479.5, 5214191.0),\n", + " (1292344.0, 5205055.5),\n", + " ]\n", + " ),\n", " CRS.POP_WEB,\n", ")" ] diff --git a/examples/process_request.ipynb b/examples/process_request.ipynb index bcbe438d..aff6ae6e 100644 --- a/examples/process_request.ipynb +++ b/examples/process_request.ipynb @@ -989,13 +989,15 @@ "request_raw_dict = {\n", " \"input\": {\n", " \"bounds\": {\"properties\": {\"crs\": betsiboka_bbox.crs.opengis_string}, \"bbox\": list(betsiboka_bbox)},\n", - " \"data\": [{\n", - " \"type\": \"S2L1C\",\n", - " \"dataFilter\": {\n", - " \"timeRange\": {\"from\": \"2020-06-01T00:00:00Z\", \"to\": \"2020-06-30T00:00:00Z\"},\n", - " \"mosaickingOrder\": \"leastCC\",\n", - " },\n", - " }],\n", + " \"data\": [\n", + " {\n", + " \"type\": \"S2L1C\",\n", + " \"dataFilter\": {\n", + " \"timeRange\": {\"from\": \"2020-06-01T00:00:00Z\", \"to\": \"2020-06-30T00:00:00Z\"},\n", + " \"mosaickingOrder\": \"leastCC\",\n", + " },\n", + " }\n", + " ],\n", " },\n", " \"output\": {\n", " \"width\": betsiboka_size[0],\n", diff --git a/examples/process_request_cdse.ipynb b/examples/process_request_cdse.ipynb index 236e30d7..69213b80 100644 --- a/examples/process_request_cdse.ipynb +++ b/examples/process_request_cdse.ipynb @@ -941,13 +941,15 @@ "request_raw_dict = {\n", " \"input\": {\n", " \"bounds\": {\"properties\": {\"crs\": betsiboka_bbox.crs.opengis_string}, \"bbox\": list(betsiboka_bbox)},\n", - " \"data\": [{\n", - " \"type\": \"S2L1C\",\n", - " \"dataFilter\": {\n", - " \"timeRange\": {\"from\": \"2020-06-01T00:00:00Z\", \"to\": \"2020-06-30T00:00:00Z\"},\n", - " \"mosaickingOrder\": \"leastCC\",\n", - " },\n", - " }],\n", + " \"data\": [\n", + " {\n", + " \"type\": \"S2L1C\",\n", + " \"dataFilter\": {\n", + " \"timeRange\": {\"from\": \"2020-06-01T00:00:00Z\", \"to\": \"2020-06-30T00:00:00Z\"},\n", + " \"mosaickingOrder\": \"leastCC\",\n", + " },\n", + " }\n", + " ],\n", " },\n", " \"output\": {\n", " \"width\": betsiboka_size[0],\n", diff --git a/pyproject.toml b/pyproject.toml index 9a0cd02c..6ddbacc4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -63,7 +63,7 @@ docs = [ "matplotlib", "nbsphinx", "sphinx==7.1.2", - "sphinx_mdinclude", + "sphinx_mdinclude==0.5.4", # version fixed because 0.6.0 didnt work at release time of 3.10.2 "sphinx_rtd_theme==1.3.0", ] dev = [ diff --git a/sentinelhub/_version.py b/sentinelhub/_version.py index eed9afab..6b0145b6 100644 --- a/sentinelhub/_version.py +++ b/sentinelhub/_version.py @@ -1,3 +1,3 @@ """Version of the sentinelhub package.""" -__version__ = "3.10.1" +__version__ = "3.10.2" diff --git a/sentinelhub/api/base_request.py b/sentinelhub/api/base_request.py index 3d3d5273..f558c7be 100644 --- a/sentinelhub/api/base_request.py +++ b/sentinelhub/api/base_request.py @@ -134,11 +134,13 @@ def _get_base_url(self) -> str: settings from config object. In case different collections have different restrictions then `SHConfig.sh_base_url` breaks the tie in case it matches one of the data collection URLs. """ - data_collection_urls = tuple({ - input_data_dict.service_url.rstrip("/") - for input_data_dict in self.payload["input"]["data"] - if isinstance(input_data_dict, InputDataDict) and input_data_dict.service_url is not None - }) + data_collection_urls = tuple( + { + input_data_dict.service_url.rstrip("/") + for input_data_dict in self.payload["input"]["data"] + if isinstance(input_data_dict, InputDataDict) and input_data_dict.service_url is not None + } + ) config_base_url = self.config.sh_base_url.rstrip("/") if not data_collection_urls: diff --git a/sentinelhub/api/batch/process.py b/sentinelhub/api/batch/process.py index 5837970c..49e402cf 100644 --- a/sentinelhub/api/batch/process.py +++ b/sentinelhub/api/batch/process.py @@ -150,17 +150,19 @@ def output( :param kwargs: Any other arguments to be added to a dictionary of parameters :return: A dictionary of output parameters """ - return remove_undefined({ - "defaultTilePath": default_tile_path, - "overwrite": overwrite, - "skipExisting": skip_existing, - "cogOutput": cog_output, - "cogParameters": cog_parameters, - "createCollection": create_collection, - "collectionId": collection_id, - "responses": responses, - **kwargs, - }) + return remove_undefined( + { + "defaultTilePath": default_tile_path, + "overwrite": overwrite, + "skipExisting": skip_existing, + "cogOutput": cog_output, + "cogParameters": cog_parameters, + "createCollection": create_collection, + "collectionId": collection_id, + "responses": responses, + **kwargs, + } + ) def iter_tiling_grids(self, **kwargs: Any) -> SentinelHubFeatureIterator: """An iterator over tiling grids diff --git a/sentinelhub/api/byoc.py b/sentinelhub/api/byoc.py index 9c633c0a..6c3d94ee 100644 --- a/sentinelhub/api/byoc.py +++ b/sentinelhub/api/byoc.py @@ -220,11 +220,13 @@ def update_tile(self, collection: CollectionType, tile: TileType) -> Json: headers = {"Content-Type": MimeType.JSON.get_string()} _tile = self._to_dict(tile) - updates = remove_undefined({ - "path": _tile["path"], - "coverGeometry": _tile.get("coverGeometry"), - "sensingTime": _tile.get("sensingTime"), - }) + updates = remove_undefined( + { + "path": _tile["path"], + "coverGeometry": _tile.get("coverGeometry"), + "sensingTime": _tile.get("sensingTime"), + } + ) return self.client.get_json( url=url, request_type=RequestType.PUT, post_values=updates, headers=headers, use_session=True diff --git a/sentinelhub/api/catalog.py b/sentinelhub/api/catalog.py index 46f2d651..3c41da7a 100644 --- a/sentinelhub/api/catalog.py +++ b/sentinelhub/api/catalog.py @@ -140,20 +140,22 @@ def search( if geometry and geometry.crs is not CRS.WGS84: geometry = geometry.transform(CRS.WGS84) - payload = remove_undefined({ - "collections": [collection_id], - "datetime": f"{start_time}/{end_time}" if time else None, - "bbox": list(bbox) if bbox else None, - "intersects": geometry.get_geojson(with_crs=False) if geometry else None, - "ids": ids, - "filter": self._prepare_filters(filter, collection, filter_lang), - "filter-lang": filter_lang, - "filter-crs": filter_crs, - "fields": fields, - "distinct": distinct, - "limit": limit, - **kwargs, - }) + payload = remove_undefined( + { + "collections": [collection_id], + "datetime": f"{start_time}/{end_time}" if time else None, + "bbox": list(bbox) if bbox else None, + "intersects": geometry.get_geojson(with_crs=False) if geometry else None, + "ids": ids, + "filter": self._prepare_filters(filter, collection, filter_lang), + "filter-lang": filter_lang, + "filter-crs": filter_crs, + "fields": fields, + "distinct": distinct, + "limit": limit, + **kwargs, + } + ) return CatalogSearchIterator(self.client, url, payload) diff --git a/sentinelhub/data_collections_bands.py b/sentinelhub/data_collections_bands.py index a2999428..df2bb308 100644 --- a/sentinelhub/data_collections_bands.py +++ b/sentinelhub/data_collections_bands.py @@ -1,4 +1,4 @@ -""" Contains information about data collections used by SH """ +"""Contains information about data collections used by SH""" from __future__ import annotations diff --git a/sentinelhub/download/handlers.py b/sentinelhub/download/handlers.py index 942d3684..09318c3c 100644 --- a/sentinelhub/download/handlers.py +++ b/sentinelhub/download/handlers.py @@ -52,7 +52,7 @@ def new_download_func(self: Self, request: DownloadRequest) -> T: def retry_temporary_errors( - download_func: Callable[[SelfWithConfig, DownloadRequest], T] + download_func: Callable[[SelfWithConfig, DownloadRequest], T], ) -> Callable[[SelfWithConfig, DownloadRequest], T]: """Decorator function for handling server and connection errors""" backoff_coefficient = 3 diff --git a/sentinelhub/geometry.py b/sentinelhub/geometry.py index e5491cf0..8ec1817c 100644 --- a/sentinelhub/geometry.py +++ b/sentinelhub/geometry.py @@ -150,7 +150,7 @@ def _to_tuple(cls, bbox: BBoxInputType) -> tuple[float, float, float, float]: @staticmethod def _tuple_from_list_or_tuple( - bbox: tuple[float, float, float, float] | tuple[tuple[float, float], tuple[float, float]] + bbox: tuple[float, float, float, float] | tuple[tuple[float, float], tuple[float, float]], ) -> tuple[float, float, float, float]: """Converts a list or tuple representation of a bbox into a flat tuple representation. diff --git a/tests/api/batch/test_process.py b/tests/api/batch/test_process.py index 5f00b35b..a1518905 100644 --- a/tests/api/batch/test_process.py +++ b/tests/api/batch/test_process.py @@ -67,21 +67,23 @@ def test_create_and_run_batch_request(batch_client: SentinelHubBatch, requests_m request_id = "mocked-id" requests_mock.post( "/api/v1/batch/process", - [{ - "json": { - "id": request_id, - "processRequest": { - "input": { - "bounds": { - "bbox": list(bbox), - "properties": {"crs": "http://www.opengis.net/def/crs/OGC/1.3/CRS84"}, + [ + { + "json": { + "id": request_id, + "processRequest": { + "input": { + "bounds": { + "bbox": list(bbox), + "properties": {"crs": "http://www.opengis.net/def/crs/OGC/1.3/CRS84"}, + } } - } - }, - "tileCount": 42, - "status": "CREATED", + }, + "tileCount": 42, + "status": "CREATED", + } } - }], + ], ) batch_request = batch_client.create( diff --git a/tests/api/test_byoc.py b/tests/api/test_byoc.py index c5b509b7..3974cd19 100644 --- a/tests/api/test_byoc.py +++ b/tests/api/test_byoc.py @@ -41,13 +41,15 @@ def collection_fixture() -> JsonDict: "maxMetersPerPixel": 800.0, "extent": { "type": "Polygon", - "coordinates": [[ - [13.293347498, 45.366449953], - [13.293347498, 46.897693758], - [16.575424424, 46.897693758], - [16.575424424, 45.366449953], - [13.293347498, 45.366449953], - ]], + "coordinates": [ + [ + [13.293347498, 45.366449953], + [13.293347498, 46.897693758], + [16.575424424, 46.897693758], + [16.575424424, 45.366449953], + [13.293347498, 45.366449953], + ] + ], }, "hasSensingTimes": "NO", }, @@ -79,15 +81,17 @@ def tile_fixture() -> JsonDict: "coverGeometry": { "crs": {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::32633"}}, "type": "MultiPolygon", - "coordinates": [( + "coordinates": [ ( - (369999.99998228427, 5025000.000464492), - (620000.000010147, 5025000.000464773), - (620000.000012391, 5195000.000585059), - (369999.9999783558, 5195000.00058473), - (369999.99998228427, 5025000.000464492), - ), - )], + ( + (369999.99998228427, 5025000.000464492), + (620000.000010147, 5025000.000464773), + (620000.000012391, 5195000.000585059), + (369999.9999783558, 5195000.00058473), + (369999.99998228427, 5025000.000464492), + ), + ) + ], }, "created": "2020-06-22T12:33:36.081000Z", "sensingTime": None, diff --git a/tests/api/test_fis.py b/tests/api/test_fis.py index ac2814b1..f1d82fd8 100644 --- a/tests/api/test_fis.py +++ b/tests/api/test_fis.py @@ -15,13 +15,15 @@ BBOX = BBox([14.00, 45.00, 14.03, 45.03], crs=CRS.WGS84) GEOMETRY1 = Geometry( - Polygon([ - (465888.877326859, 5079639.436138632), - (465885.3413983975, 5079641.524618266), - (465882.9542217017, 5079647.166043535), - (465888.8780175466, 5079668.703676634), - (465888.877326859, 5079639.436138632), - ]), + Polygon( + [ + (465888.877326859, 5079639.436138632), + (465885.3413983975, 5079641.524618266), + (465882.9542217017, 5079647.166043535), + (465888.8780175466, 5079668.703676634), + (465888.877326859, 5079639.436138632), + ] + ), CRS(32633), ) GEOMETRY2 = Geometry("POLYGON((-5.13 48, -5.23 48.09, -5.13 48.17, -5.03 48.08, -5.13 48))", CRS.WGS84) diff --git a/tests/api/test_ogc.py b/tests/api/test_ogc.py index 7a0774a3..db290378 100644 --- a/tests/api/test_ogc.py +++ b/tests/api/test_ogc.py @@ -418,9 +418,9 @@ def collect_data(self, request: OgcRequest) -> list: result_len=1, img_min=0.0, img_max=1.0, - img_mean=0.34803, + img_mean=0.34748, img_median=0.02383, - img_std=0.46208, + img_std=0.46185, tile_num=2, ), OgcTestCase( diff --git a/tests/api/test_process.py b/tests/api/test_process.py index f9e09c75..af3fe1d5 100644 --- a/tests/api/test_process.py +++ b/tests/api/test_process.py @@ -1,5 +1,4 @@ -""" Tests for the Process API requests -""" +"""Tests for the Process API requests""" from __future__ import annotations diff --git a/tests/api/test_process_async.py b/tests/api/test_process_async.py index b5d4cdad..6537180b 100644 --- a/tests/api/test_process_async.py +++ b/tests/api/test_process_async.py @@ -1,5 +1,4 @@ -""" Tests for the Async Process API requests -""" +"""Tests for the Async Process API requests""" import datetime as dt diff --git a/tests/download/test_sentinelhub_statistical_client.py b/tests/download/test_sentinelhub_statistical_client.py index e0018f70..1872d37c 100644 --- a/tests/download/test_sentinelhub_statistical_client.py +++ b/tests/download/test_sentinelhub_statistical_client.py @@ -69,13 +69,15 @@ def test_statistical_client_runs_out_of_retries(download_request: DownloadReques requests_mock.post( url="/api/v1/statistics", - response_list=[{ - "json": { - "data": [ - {"interval": {"from": "2020-01-20", "to": "2020-01-20"}, "error": {"type": "EXECUTION_ERROR"}}, - ] + response_list=[ + { + "json": { + "data": [ + {"interval": {"from": "2020-01-20", "to": "2020-01-20"}, "error": {"type": "EXECUTION_ERROR"}}, + ] + } } - }], + ], ) with pytest.raises(DownloadFailedException) as exception_info: