diff --git a/docs/source/crs.rst b/docs/source/crs.rst
index d2abc463a..5c299b854 100644
--- a/docs/source/crs.rst
+++ b/docs/source/crs.rst
@@ -30,7 +30,7 @@ Metadata
The conformance class `http://www.opengis.net/spec/ogcapi-features-2/1.0/conf/crs` is present as a `conformsTo` field
in the root landing page response.
-The configured CRSs, or their defaults, `crs` and `storageCRS` and optionally `storageCrsCoordinateEpoch` will be present in the "Describe Collection" response.
+The configured CRSs, or their defaults, `crs` and `storageCrs` and optionally `storageCrsCoordinateEpoch` will be present in the "Describe Collection" response.
Parameters
----------
@@ -95,7 +95,7 @@ Suppose an addresses collection with the following CRS support in its collection
"http://www.opengis.net/def/crs/EPSG/0/28992",
"http://www.opengis.net/def/crs/OGC/1.3/CRS84"
],
- "storageCRS": "http://www.opengis.net/def/crs/OGC/1.3/CRS84"
+ "storageCrs": "http://www.opengis.net/def/crs/OGC/1.3/CRS84"
This allows a `bbox-crs` query using Dutch "RD" coordinates with CRS `http://www.opengis.net/def/crs/EPSG/0/28992` to retrieve
diff --git a/docs/source/data-publishing/ogcapi-features.rst b/docs/source/data-publishing/ogcapi-features.rst
index 63cbea1ef..200fa5282 100644
--- a/docs/source/data-publishing/ogcapi-features.rst
+++ b/docs/source/data-publishing/ogcapi-features.rst
@@ -538,9 +538,6 @@ PostgreSQL
Must have PostGIS installed.
-.. note::
- Geometry must be using EPSG:4326
-
.. code-block:: yaml
providers:
diff --git a/docs/source/data-publishing/ogcapi-tiles.rst b/docs/source/data-publishing/ogcapi-tiles.rst
index 3f669061a..66ab76c91 100644
--- a/docs/source/data-publishing/ogcapi-tiles.rst
+++ b/docs/source/data-publishing/ogcapi-tiles.rst
@@ -22,6 +22,7 @@ pygeoapi core tile providers are listed below, along with supported features.
`MVT-elastic`_,✅,✅,✅,❌,❌,✅
`MVT-proxy`_,❓,❓,❓,❓,❌,✅
`WMTSFacade`_,✅,❌,✅,✅,✅,❌
+ `MVT-postgresql`_,✅,✅,✅,✅,❌,✅
Below are specific connection examples based on supported providers.
@@ -130,6 +131,47 @@ Following code block shows how to configure pygeoapi to read Mapbox vector tiles
name: pbf
mimetype: application/vnd.mapbox-vector-tile
+MVT-postgresql
+^^^^^^^^^^^^^^
+
+.. note::
+ Requires Python packages sqlalchemy, geoalchemy2 and psycopg2-binary
+
+.. note::
+ Must have PostGIS installed with protobuf-c support
+
+.. note::
+ Geometry must be using EPSG:4326
+
+This provider gives support to serving tiles generated using `PostgreSQL `_ with `PostGIS `_.
+The tiles are rendered on-the-fly using `ST_AsMVT `_ and related methods.
+
+This code block shows how to configure pygeoapi to render Mapbox vector tiles from a PostGIS table.
+
+.. code-block:: yaml
+
+ providers:
+ - type: tile
+ name: MVT-postgresql
+ data:
+ host: 127.0.0.1
+ port: 3010 # Default 5432 if not provided
+ dbname: test
+ user: postgres
+ password: postgres
+ search_path: [osm, public]
+ id_field: osm_id
+ table: hotosm_bdi_waterways
+ geom_field: foo_geom
+ options:
+ zoom:
+ min: 0
+ max: 15
+ format:
+ name: pbf
+ mimetype: application/vnd.mapbox-vector-tile
+
+PostgreSQL-related connection options can also be added to `options`. Please refer to the :ref:`PostgreSQL OGC Features Provider` documentation for more information.
WMTSFacade
^^^^^^^^^^
diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py
index f96220080..9b88b796c 100644
--- a/pygeoapi/api/__init__.py
+++ b/pygeoapi/api/__init__.py
@@ -42,7 +42,7 @@
from collections import ChainMap, OrderedDict
from copy import deepcopy
-from datetime import datetime
+from datetime import (datetime, timezone)
from functools import partial
from gzip import compress
from http import HTTPStatus
@@ -165,6 +165,32 @@ def apply_gzip(headers: dict, content: Union[str, bytes]) -> Union[str, bytes]:
return content
+def pre_load_colls(func):
+ """
+ Decorator function that makes sure the loaded collections are updated.
+ This is used when the resources are loaded dynamically, not strictly
+ from the yaml file.
+
+ :param func: decorated function
+
+ :returns: `func`
+ """
+
+ def inner(*args, **kwargs):
+ cls = args[0]
+
+ # Validation on the method name for the provided class instance on this
+ # decoration function
+ if hasattr(cls, 'reload_resources_if_necessary'):
+ # Validate the resources are up to date
+ cls.reload_resources_if_necessary()
+
+ # Continue
+ return func(*args, **kwargs)
+
+ return inner
+
+
class APIRequest:
"""
Transforms an incoming server-specific Request into an object
@@ -565,9 +591,74 @@ def __init__(self, config, openapi):
self.tpl_config = deepcopy(self.config)
self.tpl_config['server']['url'] = self.base_url
+ # Now that the basic configuration is read, call the load_resources function. # noqa
+ # This call enables the api engine to load resources dynamically.
+ # This pattern allows for loading resources coming from another
+ # source (e.g. a database) rather than from the yaml file.
+ # This, along with the @pre_load_colls decorative function, enables
+ # resources management on multiple distributed pygeoapi instances.
+ self.load_resources()
+
self.manager = get_manager(self.config)
LOGGER.info('Process manager plugin loaded')
+ def on_load_resources(self, resources: dict) -> dict:
+ """
+ Overridable function to load the available resources dynamically.
+ By default, this function simply returns the provided resources
+ as-is. This is the native behavior of the API; expecting
+ resources to be configured in the yaml config file.
+
+ :param resources: the resources as currently configured
+ (self.config['resources'])
+ :returns: the resources dictionary that's available in the API.
+ """
+
+ # By default, return the same resources object, unchanged.
+ return resources
+
+ def on_load_resources_check(self, last_loaded_resources: datetime) -> bool: # noqa
+ """
+ Overridable function to check if the resources should be reloaded.
+ Return True in your API implementation when resources should be
+ reloaded. This implementation depends on your environment and
+ messaging broker.
+ Natively, the resources used by the pygeoapi instance are strictly
+ the ones from the yaml configuration file. It doesn't support
+ resources changing on-the-fly. Therefore, False is returned here
+ and they are never reloaded.
+ """
+
+ # By default, return False to not reload the resources.
+ return False
+
+ def load_resources(self) -> None:
+ """
+ Calls on_load_resources and reassigns the resources configuration.
+ """
+
+ # Call on_load_resources sending the current resources configuration.
+ self.config['resources'] = self.on_load_resources(self.config['resources']) # noqa
+
+ # Copy over for the template config also
+ # TODO: Check relevancy of this line
+ self.tpl_config['resources'] = deepcopy(self.config['resources'])
+
+ # Keep track of UTC date of last time resources were loaded
+ self.last_loaded_resources = datetime.now(timezone.utc)
+
+ def reload_resources_if_necessary(self) -> None:
+ """
+ Checks if the resources should be reloaded by calling overridable
+ function 'on_load_resources_check' and then, when necessary, calls
+ 'load_resources'.
+ """
+
+ # If the resources should be reloaded
+ if self.on_load_resources_check(self.last_loaded_resources):
+ # Reload the resources
+ self.load_resources()
+
def get_exception(self, status, headers, format_, code,
description) -> Tuple[dict, int, str]:
"""
@@ -657,7 +748,7 @@ def _create_crs_transform_spec(
if not query_crs_uri:
if storage_crs_uri in DEFAULT_CRS_LIST:
- # Could be that storageCRS is
+ # Could be that storageCrs is
# http://www.opengis.net/def/crs/OGC/1.3/CRS84h
query_crs_uri = storage_crs_uri
else:
@@ -714,7 +805,7 @@ def _set_content_crs_header(
# If empty use default CRS
storage_crs_uri = config.get('storage_crs', DEFAULT_STORAGE_CRS)
if storage_crs_uri in DEFAULT_CRS_LIST:
- # Could be that storageCRS is one of the defaults like
+ # Could be that storageCrs is one of the defaults like
# http://www.opengis.net/def/crs/OGC/1.3/CRS84h
content_crs_uri = storage_crs_uri
else:
@@ -922,6 +1013,7 @@ def conformance(api, request: APIRequest) -> Tuple[dict, int, str]:
@jsonldify
+@pre_load_colls
def describe_collections(api: API, request: APIRequest,
dataset=None) -> Tuple[dict, int, str]:
"""
@@ -1136,7 +1228,7 @@ def describe_collections(api: API, request: APIRequest,
# OAPIF Part 2 - list supported CRSs and StorageCRS
if collection_data_type in ['edr', 'feature']:
collection['crs'] = get_supported_crs_list(collection_data, DEFAULT_CRS_LIST) # noqa
- collection['storageCRS'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa
+ collection['storageCrs'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa
if 'storage_crs_coordinate_epoch' in collection_data:
collection['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa
@@ -1425,6 +1517,8 @@ def get_collection_schema(api: API, request: Union[APIRequest, Any],
for k, v in p.fields.items():
schema['properties'][k] = v
+ if v['type'] == 'float':
+ schema['properties'][k]['type'] = 'number'
if v.get('format') is None:
schema['properties'][k].pop('format', None)
diff --git a/pygeoapi/api/coverages.py b/pygeoapi/api/coverages.py
index 65f57f450..6205d9f0e 100644
--- a/pygeoapi/api/coverages.py
+++ b/pygeoapi/api/coverages.py
@@ -51,7 +51,7 @@
from . import (
APIRequest, API, F_JSON, SYSTEM_LOCALE, validate_bbox, validate_datetime,
- validate_subset
+ validate_subset, pre_load_colls
)
LOGGER = logging.getLogger(__name__)
@@ -68,6 +68,7 @@
]
+@pre_load_colls
def get_collection_coverage(
api: API, request: APIRequest, dataset) -> Tuple[dict, int, str]:
"""
diff --git a/pygeoapi/api/itemtypes.py b/pygeoapi/api/itemtypes.py
index a7aae3afd..10a3adfe9 100644
--- a/pygeoapi/api/itemtypes.py
+++ b/pygeoapi/api/itemtypes.py
@@ -63,7 +63,7 @@
from . import (
APIRequest, API, SYSTEM_LOCALE, F_JSON, FORMAT_TYPES, F_HTML, F_JSONLD,
- validate_bbox, validate_datetime
+ validate_bbox, validate_datetime, pre_load_colls
)
LOGGER = logging.getLogger(__name__)
@@ -100,6 +100,7 @@
]
+@pre_load_colls
def get_collection_queryables(api: API, request: Union[APIRequest, Any],
dataset=None) -> Tuple[dict, int, str]:
"""
@@ -199,6 +200,8 @@ def get_collection_queryables(api: API, request: Union[APIRequest, Any],
'title': k,
'type': v['type']
}
+ if v['type'] == 'float':
+ queryables['properties'][k]['type'] = 'number'
if v.get('format') is not None:
queryables['properties'][k]['format'] = v['format']
if 'values' in v:
@@ -231,6 +234,7 @@ def get_collection_queryables(api: API, request: Union[APIRequest, Any],
return headers, HTTPStatus.OK, to_json(queryables, api.pretty_print)
+@pre_load_colls
def get_collection_items(
api: API, request: Union[APIRequest, Any],
dataset) -> Tuple[dict, int, str]:
@@ -397,8 +401,8 @@ def get_collection_items(
# bbox but no bbox-crs param: assume bbox is in default CRS
bbox_crs = DEFAULT_CRS
- # Transform bbox to storageCRS
- # when bbox-crs different from storageCRS.
+ # Transform bbox to storageCrs
+ # when bbox-crs different from storageCrs.
if len(bbox) > 0:
try:
# Get a pyproj CRS instance for the Collection's Storage CRS
@@ -580,7 +584,21 @@ def get_collection_items(
'href': f'{uri}?f={F_HTML}{serialized_query_params}'
}])
- if offset > 0:
+ next_link = False
+ prev_link = False
+
+ if 'next' in [link['rel'] for link in content['links']]:
+ LOGGER.debug('Using next link from provider')
+ else:
+ if content.get('numberMatched', -1) > (limit + offset):
+ next_link = True
+ elif len(content['features']) == limit:
+ next_link = True
+
+ if offset > 0:
+ prev_link = True
+
+ if prev_link:
prev = max(0, offset - limit)
content['links'].append(
{
@@ -590,13 +608,6 @@ def get_collection_items(
'href': f'{uri}?offset={prev}{serialized_query_params}'
})
- next_link = False
-
- if content.get('numberMatched', -1) > (limit + offset):
- next_link = True
- elif len(content['features']) == limit:
- next_link = True
-
if next_link:
next_ = offset + limit
next_href = f'{uri}?offset={next_}{serialized_query_params}'
@@ -688,6 +699,7 @@ def get_collection_items(
return headers, HTTPStatus.OK, to_json(content, api.pretty_print)
+@pre_load_colls
def manage_collection_item(
api: API, request: APIRequest,
action, dataset, identifier=None) -> Tuple[dict, int, str]:
@@ -799,6 +811,7 @@ def manage_collection_item(
return headers, HTTPStatus.OK, ''
+@pre_load_colls
def get_collection_item(api: API, request: APIRequest,
dataset, identifier) -> Tuple[dict, int, str]:
"""
@@ -999,7 +1012,7 @@ def create_crs_transform_spec(
if not query_crs_uri:
if storage_crs_uri in DEFAULT_CRS_LIST:
- # Could be that storageCRS is
+ # Could be that storageCrs is
# http://www.opengis.net/def/crs/OGC/1.3/CRS84h
query_crs_uri = storage_crs_uri
else:
@@ -1056,7 +1069,7 @@ def set_content_crs_header(
# If empty use default CRS
storage_crs_uri = config.get('storage_crs', DEFAULT_STORAGE_CRS)
if storage_crs_uri in DEFAULT_CRS_LIST:
- # Could be that storageCRS is one of the defaults like
+ # Could be that storageCrs is one of the defaults like
# http://www.opengis.net/def/crs/OGC/1.3/CRS84h
content_crs_uri = storage_crs_uri
else:
diff --git a/pygeoapi/api/maps.py b/pygeoapi/api/maps.py
index 728177f52..de91c2abd 100644
--- a/pygeoapi/api/maps.py
+++ b/pygeoapi/api/maps.py
@@ -51,7 +51,7 @@
filter_dict_by_key_value
)
-from . import APIRequest, API, validate_datetime
+from . import APIRequest, API, validate_datetime, pre_load_colls
LOGGER = logging.getLogger(__name__)
@@ -60,6 +60,7 @@
]
+@pre_load_colls
def get_collection_map(api: API, request: APIRequest,
dataset, style=None) -> Tuple[dict, int, str]:
"""
diff --git a/pygeoapi/api/tiles.py b/pygeoapi/api/tiles.py
index 6c456fe0e..0d457e596 100644
--- a/pygeoapi/api/tiles.py
+++ b/pygeoapi/api/tiles.py
@@ -238,7 +238,7 @@ def get_collection_tiles_data(
p = load_plugin('provider', t)
format_ = p.format_type
- headers['Content-Type'] = format_
+ headers['Content-Type'] = t['format']['mimetype']
LOGGER.debug(f'Fetching tileset id {matrix_id} and tile {z_idx}/{y_idx}/{x_idx}') # noqa
content = p.get_tiles(layer=p.get_layer(), tileset=matrix_id,
diff --git a/pygeoapi/plugin.py b/pygeoapi/plugin.py
index 08324cf64..620462a34 100644
--- a/pygeoapi/plugin.py
+++ b/pygeoapi/plugin.py
@@ -53,6 +53,7 @@
'MVT-tippecanoe': 'pygeoapi.provider.mvt_tippecanoe.MVTTippecanoeProvider', # noqa: E501
'MVT-elastic': 'pygeoapi.provider.mvt_elastic.MVTElasticProvider',
'MVT-proxy': 'pygeoapi.provider.mvt_proxy.MVTProxyProvider',
+ 'MVT-postgresql': 'pygeoapi.provider.mvt_postgresql.MVTPostgreSQLProvider', # noqa: E501
'OracleDB': 'pygeoapi.provider.oracle.OracleProvider',
'OGR': 'pygeoapi.provider.ogr.OGRProvider',
'OpenSearch': 'pygeoapi.provider.opensearch_.OpenSearchProvider',
diff --git a/pygeoapi/provider/base_mvt.py b/pygeoapi/provider/base_mvt.py
index 12e10155f..e2d096170 100644
--- a/pygeoapi/provider/base_mvt.py
+++ b/pygeoapi/provider/base_mvt.py
@@ -95,7 +95,8 @@ def get_tiles_service(self, baseurl=None, servicepath=None,
:returns: `dict` of item tile service
"""
- url = urlparse(self.data)
+ # self.data will be a dict when using MVTPostgresProvider
+ url = urlparse(self.data) if isinstance(self.data, str) else urlparse('/') # noqa
baseurl = baseurl or f'{url.scheme}://{url.netloc}'
# @TODO: support multiple types
tile_type = tile_type or self.format_type
diff --git a/pygeoapi/provider/mvt_postgresql.py b/pygeoapi/provider/mvt_postgresql.py
new file mode 100644
index 000000000..61eda81df
--- /dev/null
+++ b/pygeoapi/provider/mvt_postgresql.py
@@ -0,0 +1,264 @@
+# =================================================================
+#
+# Authors: Prajwal Amaravati
+# Tanvi Prasad
+# Bryan Robert
+#
+# Copyright (c) 2025 Prajwal Amaravati
+# Copyright (c) 2025 Tanvi Prasad
+# Copyright (c) 2025 Bryan Robert
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# =================================================================
+
+from copy import deepcopy
+import logging
+
+from sqlalchemy.sql import text
+
+from pygeoapi.models.provider.base import (
+ TileSetMetadata, TileMatrixSetEnum, LinkType)
+from pygeoapi.provider.base import ProviderConnectionError
+from pygeoapi.provider.base_mvt import BaseMVTProvider
+from pygeoapi.provider.postgresql import PostgreSQLProvider
+from pygeoapi.provider.tile import ProviderTileNotFoundError
+from pygeoapi.util import url_join
+
+LOGGER = logging.getLogger(__name__)
+
+
+class MVTPostgreSQLProvider(BaseMVTProvider):
+ """
+ MVT PostgreSQL Provider
+ Provider for serving tiles rendered on-the-fly from
+ feature tables in PostgreSQL
+ """
+
+ def __init__(self, provider_def):
+ """
+ Initialize object
+
+ :param provider_def: provider definition
+
+ :returns: pygeoapi.provider.MVT.MVTPostgreSQLProvider
+ """
+
+ super().__init__(provider_def)
+
+ pg_def = deepcopy(provider_def)
+ # delete the zoom option before initializing the PostgreSQL provider
+ # that provider breaks otherwise
+ del pg_def["options"]["zoom"]
+ self.postgres = PostgreSQLProvider(pg_def)
+
+ self.layer_name = provider_def["table"]
+ self.table = provider_def['table']
+ self.id_field = provider_def['id_field']
+ self.geom = provider_def.get('geom_field', 'geom')
+
+ LOGGER.debug(f'DB connection: {repr(self.postgres._engine.url)}')
+
+ def __repr__(self):
+ return f' {self.data}'
+
+ @property
+ def service_url(self):
+ return self._service_url
+
+ @property
+ def service_metadata_url(self):
+ return self._service_metadata_url
+
+ def get_layer(self):
+ """
+ Extracts layer name from url
+
+ :returns: layer name
+ """
+
+ return self.layer_name
+
+ def get_tiling_schemes(self):
+
+ return [
+ TileMatrixSetEnum.WEBMERCATORQUAD.value,
+ TileMatrixSetEnum.WORLDCRS84QUAD.value
+ ]
+
+ def get_tiles_service(self, baseurl=None, servicepath=None,
+ dirpath=None, tile_type=None):
+ """
+ Gets mvt service description
+
+ :param baseurl: base URL of endpoint
+ :param servicepath: base path of URL
+ :param dirpath: directory basepath (equivalent of URL)
+ :param tile_type: tile format type
+
+ :returns: `dict` of item tile service
+ """
+
+ super().get_tiles_service(baseurl, servicepath,
+ dirpath, tile_type)
+
+ self._service_url = servicepath
+ return self.get_tms_links()
+
+ def get_tiles(self, layer=None, tileset=None,
+ z=None, y=None, x=None, format_=None):
+ """
+ Gets tile
+
+ :param layer: mvt tile layer
+ :param tileset: mvt tileset
+ :param z: z index
+ :param y: y index
+ :param x: x index
+ :param format_: tile format
+
+ :returns: an encoded mvt tile
+ """
+ if format_ == 'mvt':
+ format_ = self.format_type
+
+ fields_arr = self.postgres.get_fields().keys()
+ fields = ', '.join(['"' + f + '"' for f in fields_arr])
+ if len(fields) != 0:
+ fields = ',' + fields
+
+ query = ''
+ if tileset == TileMatrixSetEnum.WEBMERCATORQUAD.value.tileMatrixSet:
+ if not self.is_in_limits(TileMatrixSetEnum.WEBMERCATORQUAD.value, z, x, y): # noqa
+ raise ProviderTileNotFoundError
+
+ query = text("""
+ WITH
+ bounds AS (
+ SELECT ST_TileEnvelope(:z, :x, :y) AS boundgeom
+ ),
+ mvtgeom AS (
+ SELECT ST_AsMVTGeom(ST_Transform(ST_CurveToLine({geom}), 3857), bounds.boundgeom) AS geom {fields}
+ FROM "{table}", bounds
+ WHERE ST_Intersects({geom}, ST_Transform(bounds.boundgeom, 4326))
+ )
+ SELECT ST_AsMVT(mvtgeom, 'default') FROM mvtgeom;
+ """.format(geom=self.geom, table=self.table, fields=fields)) # noqa
+
+ if tileset == TileMatrixSetEnum.WORLDCRS84QUAD.value.tileMatrixSet:
+ if not self.is_in_limits(TileMatrixSetEnum.WORLDCRS84QUAD.value, z, x, y): # noqa
+ raise ProviderTileNotFoundError
+
+ query = text("""
+ WITH
+ bounds AS (
+ SELECT ST_TileEnvelope(:z, :x, :y,
+ 'SRID=4326;POLYGON((-180 -90,-180 90,180 90,180 -90,-180 -90))'::geometry) AS boundgeom
+ ),
+ mvtgeom AS (
+ SELECT ST_AsMVTGeom(ST_CurveToLine({geom}), bounds.boundgeom) AS geom {fields}
+ FROM "{table}", bounds
+ WHERE ST_Intersects({geom}, bounds.boundgeom)
+ )
+ SELECT ST_AsMVT(mvtgeom, 'default') FROM mvtgeom;
+ """.format(geom=self.geom, table=self.table, fields=fields)) # noqa
+
+ with self.postgres._engine.connect() as session:
+ result = session.execute(query, {
+ 'z': z,
+ 'y': y,
+ 'x': x
+ }).fetchone()
+
+ if len(bytes(result[0])) == 0:
+ return None
+ return bytes(result[0])
+
+ def get_html_metadata(self, dataset, server_url, layer, tileset,
+ title, description, keywords, **kwargs):
+
+ service_url = url_join(
+ server_url,
+ f'collections/{dataset}/tiles/{tileset}/{{tileMatrix}}/{{tileRow}}/{{tileCol}}?f=mvt') # noqa
+ metadata_url = url_join(
+ server_url,
+ f'collections/{dataset}/tiles/{tileset}/metadata')
+
+ metadata = dict()
+ metadata['id'] = dataset
+ metadata['title'] = title
+ metadata['tileset'] = tileset
+ metadata['collections_path'] = service_url
+ metadata['json_url'] = f'{metadata_url}?f=json'
+
+ return metadata
+
+ def get_default_metadata(self, dataset, server_url, layer, tileset,
+ title, description, keywords, **kwargs):
+
+ service_url = url_join(
+ server_url,
+ f'collections/{dataset}/tiles/{tileset}/{{tileMatrix}}/{{tileRow}}/{{tileCol}}?f=mvt') # noqa
+
+ content = {}
+ tiling_schemes = self.get_tiling_schemes()
+ # Default values
+ tileMatrixSetURI = tiling_schemes[0].tileMatrixSetURI
+ crs = tiling_schemes[0].crs
+ # Checking the selected matrix in configured tiling_schemes
+ for schema in tiling_schemes:
+ if (schema.tileMatrixSet == tileset):
+ crs = schema.crs
+ tileMatrixSetURI = schema.tileMatrixSetURI
+
+ tiling_scheme_url = url_join(
+ server_url, f'/TileMatrixSets/{schema.tileMatrixSet}')
+ tiling_scheme_url_type = "application/json"
+ tiling_scheme_url_title = f'{schema.tileMatrixSet} tile matrix set definition' # noqa
+
+ tiling_scheme = LinkType(href=tiling_scheme_url,
+ rel="http://www.opengis.net/def/rel/ogc/1.0/tiling-scheme", # noqa
+ type_=tiling_scheme_url_type,
+ title=tiling_scheme_url_title)
+
+ if tiling_scheme is None:
+ msg = f'Could not identify a valid tiling schema' # noqa
+ LOGGER.error(msg)
+ raise ProviderConnectionError(msg)
+
+ content = TileSetMetadata(title=title, description=description,
+ keywords=keywords, crs=crs,
+ tileMatrixSetURI=tileMatrixSetURI)
+
+ links = []
+ service_url_link_type = "application/vnd.mapbox-vector-tile"
+ service_url_link_title = f'{tileset} vector tiles for {layer}'
+ service_url_link = LinkType(href=service_url, rel="item",
+ type_=service_url_link_type,
+ title=service_url_link_title)
+
+ links.append(tiling_scheme)
+ links.append(service_url_link)
+
+ content.links = links
+
+ return content.dict(exclude_none=True)
diff --git a/pygeoapi/provider/rasterio_.py b/pygeoapi/provider/rasterio_.py
index 3b0fbc2c7..68b614be2 100644
--- a/pygeoapi/provider/rasterio_.py
+++ b/pygeoapi/provider/rasterio_.py
@@ -2,7 +2,7 @@
#
# Authors: Tom Kralidis
#
-# Copyright (c) 2024 Tom Kralidis
+# Copyright (c) 2025 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
@@ -79,9 +79,11 @@ def get_fields(self):
dtype2 = dtype
if dtype.startswith('float'):
- dtype2 = 'number'
+ dtype2 = 'float'
elif dtype.startswith('int'):
dtype2 = 'integer'
+ elif dtype.startswith('str'):
+ dtype2 = 'string'
self._fields[i2] = {
'title': name,
@@ -306,7 +308,9 @@ def gen_covjson(self, metadata, data):
parameter = {
'type': 'Parameter',
- 'description': pm['description'],
+ 'description': {
+ 'en': pm['description']
+ },
'unit': {
'symbol': pm['unit_label']
},
diff --git a/pygeoapi/provider/xarray_.py b/pygeoapi/provider/xarray_.py
index 9ed2726b1..dd6f423b5 100644
--- a/pygeoapi/provider/xarray_.py
+++ b/pygeoapi/provider/xarray_.py
@@ -4,7 +4,7 @@
# Authors: Tom Kralidis
#
# Copyright (c) 2020 Gregory Petrochenkov
-# Copyright (c) 2022 Tom Kralidis
+# Copyright (c) 2025 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
@@ -111,9 +111,11 @@ def get_fields(self):
LOGGER.debug('Adding variable')
dtype = value.dtype
if dtype.name.startswith('float'):
- dtype = 'number'
+ dtype = 'float'
elif dtype.name.startswith('int'):
dtype = 'integer'
+ elif dtype.name.startswith('str'):
+ dtype = 'string'
self._fields[key] = {
'type': dtype,
@@ -330,18 +332,35 @@ def gen_covjson(self, metadata, data, fields):
'ranges': {}
}
+ if (data.coords[self.x_field].size == 1 and
+ data.coords[self.y_field].size == 1):
+ LOGGER.debug('Modelling as PointSeries')
+ cj['domain']['axes']['x'] = {
+ 'values': [float(data.coords[self.x_field].values)]
+ }
+ cj['domain']['axes']['y'] = {
+ 'values': [float(data.coords[self.y_field].values)]
+ }
+ cj['domain']['domainType'] = 'PointSeries'
+
if self.time_field is not None:
- mint, maxt = metadata['time']
- cj['domain']['axes'][self.time_field] = {
- 'start': mint,
- 'stop': maxt,
- 'num': metadata['time_steps'],
+ cj['domain']['axes']['t'] = {
+ 'values': [str(v) for v in data[self.time_field].values]
}
+ cj['domain']['referencing'].append({
+ 'coordinates': ['t'],
+ 'system': {
+ 'type': 'TemporalRS',
+ 'calendar': 'Gregorian'
+ }
+ })
for key, value in selected_fields.items():
parameter = {
'type': 'Parameter',
- 'description': value['title'],
+ 'description': {
+ 'en': value['title']
+ },
'unit': {
'symbol': value['x-ogc-unit']
},
@@ -368,12 +387,13 @@ def gen_covjson(self, metadata, data, fields):
'shape': [metadata['height'],
metadata['width']]
}
- cj['ranges'][key]['values'] = data[key].values.flatten().tolist() # noqa
+ cj['ranges'][key]['values'] = [
+ None if np.isnan(v) else v
+ for v in data[key].values.flatten()
+ ]
if self.time_field is not None:
- cj['ranges'][key]['axisNames'].append(
- self._coverage_properties['time_axis_label']
- )
+ cj['ranges'][key]['axisNames'].append('t')
cj['ranges'][key]['shape'].append(metadata['time_steps'])
except IndexError as err:
LOGGER.warning(err)
diff --git a/pygeoapi/templates/collections/collection.html b/pygeoapi/templates/collections/collection.html
index 65bac7b33..aa128a53f 100644
--- a/pygeoapi/templates/collections/collection.html
+++ b/pygeoapi/templates/collections/collection.html
@@ -125,7 +125,7 @@ {% trans %}Reference Systems{% endtrans %}
{% trans %}Storage CRS{% endtrans %}
-
- {% trans %}CRS{% endtrans %}: {{ data['storageCRS'] }}
+ {% trans %}CRS{% endtrans %}: {{ data['storageCrs'] }}
-
{% trans %}Epoch{% endtrans %}: {{ data['storageCrsCoordinateEpoch'] or '_(not specified)' }}
@@ -148,8 +148,8 @@
{% trans %}Storage CRS{% endtrans %}
{# if this collection has a map representation, add it to the map #}
{% for link in data['links'] %}
- {% if link['rel'] == 'http://www.opengis.net/def/rel/ogc/1.0/map' and link['href'] %}
- L.tileLayer.wms("{{ link['href'] }}", {"opacity": .7, "transparent": true, "crs": L.CRS.EPSG4326}).addTo(map);
+ {% if link['rel'] == 'http://www.opengis.net/def/rel/ogc/1.0/map' and link['href'] %}
+ L.tileLayer.wms("{{ link['href'] }}", {"opacity": .7, "transparent": true, "crs": L.CRS.EPSG4326}).addTo(map);
{% endif %}
{% endfor %}
diff --git a/tests/api/test_api.py b/tests/api/test_api.py
index 90492b613..70f810840 100644
--- a/tests/api/test_api.py
+++ b/tests/api/test_api.py
@@ -614,8 +614,8 @@ def test_describe_collections(config, api_):
]
for crs in crs_set:
assert crs in collection['crs']
- assert collection['storageCRS'] is not None
- assert collection['storageCRS'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
+ assert collection['storageCrs'] is not None
+ assert collection['storageCrs'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
assert 'storageCrsCoordinateEpoch' not in collection
# French language request
@@ -652,8 +652,8 @@ def test_describe_collections(config, api_):
if crs in default_crs_list:
contains_default = True
assert contains_default
- assert collection['storageCRS'] is not None
- assert collection['storageCRS'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
+ assert collection['storageCrs'] is not None
+ assert collection['storageCrs'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa
assert collection['storageCrsCoordinateEpoch'] == 2017.23
diff --git a/tests/api/test_environmental_data_retrieval.py b/tests/api/test_environmental_data_retrieval.py
index 59232e7f6..230218499 100644
--- a/tests/api/test_environmental_data_retrieval.py
+++ b/tests/api/test_environmental_data_retrieval.py
@@ -5,7 +5,7 @@
# Colin Blackburn
# Bernhard Mallinger
#
-# Copyright (c) 2024 Tom Kralidis
+# Copyright (c) 2025 Tom Kralidis
# Copyright (c) 2022 John A Stevenson and Colin Blackburn
#
# Permission is hereby granted, free of charge, to any person
@@ -87,12 +87,12 @@ def test_get_collection_edr_query(config, api_):
axes = list(data['domain']['axes'].keys())
axes.sort()
assert len(axes) == 3
- assert axes == ['TIME', 'x', 'y']
+ assert axes == ['t', 'x', 'y']
- assert data['domain']['axes']['x']['start'] == 11.0
- assert data['domain']['axes']['x']['stop'] == 11.0
- assert data['domain']['axes']['y']['start'] == 11.0
- assert data['domain']['axes']['y']['stop'] == 11.0
+ assert isinstance(data['domain']['axes']['x'], dict)
+ assert isinstance(data['domain']['axes']['x']['values'], list)
+ assert data['domain']['axes']['x']['values'][0] == 11.0
+ assert data['domain']['axes']['y']['values'][0] == 11.0
parameters = list(data['parameters'].keys())
parameters.sort()
@@ -131,11 +131,19 @@ def test_get_collection_edr_query(config, api_):
assert code == HTTPStatus.OK
data = json.loads(response)
- time_dict = data['domain']['axes']['TIME']
+ time_dict = data['domain']['axes']['t']
+ assert isinstance(time_dict, dict)
+ assert isinstance(time_dict['values'], list)
- assert time_dict['start'] == '2000-02-15T16:29:05.999999999'
- assert time_dict['stop'] == '2000-06-16T10:25:30.000000000'
- assert time_dict['num'] == 5
+ t_values = [
+ '2000-02-15T16:29:05.999999999',
+ '2000-03-17T02:58:12.000000000',
+ '2000-04-16T13:27:18.000000000',
+ '2000-05-16T23:56:24.000000000',
+ '2000-06-16T10:25:30.000000000'
+ ]
+
+ assert sorted(time_dict['values']) == t_values
# unbounded date range - start
req = mock_api_request({
@@ -147,11 +155,20 @@ def test_get_collection_edr_query(config, api_):
assert code == HTTPStatus.OK
data = json.loads(response)
- time_dict = data['domain']['axes']['TIME']
+ time_dict = data['domain']['axes']['t']
+ assert isinstance(time_dict, dict)
+ assert isinstance(time_dict['values'], list)
+
+ t_values = [
+ '2000-01-16T06:00:00.000000000',
+ '2000-02-15T16:29:05.999999999',
+ '2000-03-17T02:58:12.000000000',
+ '2000-04-16T13:27:18.000000000',
+ '2000-05-16T23:56:24.000000000',
+ '2000-06-16T10:25:30.000000000'
+ ]
- assert time_dict['start'] == '2000-01-16T06:00:00.000000000'
- assert time_dict['stop'] == '2000-06-16T10:25:30.000000000'
- assert time_dict['num'] == 6
+ assert sorted(time_dict['values']) == t_values
# unbounded date range - end
req = mock_api_request({
@@ -163,11 +180,21 @@ def test_get_collection_edr_query(config, api_):
assert code == HTTPStatus.OK
data = json.loads(response)
- time_dict = data['domain']['axes']['TIME']
-
- assert time_dict['start'] == '2000-06-16T10:25:30.000000000'
- assert time_dict['stop'] == '2000-12-16T01:20:05.999999996'
- assert time_dict['num'] == 7
+ time_dict = data['domain']['axes']['t']
+ assert isinstance(time_dict, dict)
+ assert isinstance(time_dict['values'], list)
+
+ t_values = [
+ '2000-06-16T10:25:30.000000000',
+ '2000-07-16T20:54:36.000000000',
+ '2000-08-16T07:23:42.000000000',
+ '2000-09-15T17:52:48.000000000',
+ '2000-10-16T04:21:54.000000000',
+ '2000-11-15T14:51:00.000000000',
+ '2000-12-16T01:20:05.999999996'
+ ]
+
+ assert sorted(time_dict['values']) == t_values
# some data
req = mock_api_request({
diff --git a/tests/api/test_itemtypes.py b/tests/api/test_itemtypes.py
index 7b5d9bb29..17ea213ac 100644
--- a/tests/api/test_itemtypes.py
+++ b/tests/api/test_itemtypes.py
@@ -444,7 +444,7 @@ def test_get_collection_items_crs(config, api_):
assert code == HTTPStatus.OK
assert rsp_headers['Content-Crs'] == f'<{crs}>'
- # With CRS query parameter, using storageCRS
+ # With CRS query parameter, using storageCrs
req = mock_api_request({'crs': storage_crs})
rsp_headers, code, response = get_collection_items(
api_, req, 'norway_pop')
diff --git a/tests/conftest.py b/tests/conftest.py
index ddeb3b496..d17a7107d 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -3,6 +3,7 @@
# Authors: Bernhard Mallinger
#
# Copyright (c) 2024 Bernhard Mallinger
+# Copyright (c) 2025 Francesco Bartoli
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
@@ -26,10 +27,14 @@
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
+import io
+import json
+import sys
import pytest
from pygeoapi.api import API
+from pygeoapi.provider.base import BaseProvider, ProviderItemNotFoundError
from pygeoapi.util import yaml_load
from tests.util import get_test_file_path
@@ -50,3 +55,99 @@ def openapi():
@pytest.fixture()
def api_(config, openapi):
return API(config, openapi)
+
+
+@pytest.fixture
+def basic_provider_def():
+ """Basic provider definition for testing."""
+ return {
+ "name": "test_provider",
+ "type": "feature",
+ "data": "/path/to/data.geojson"
+ }
+
+
+@pytest.fixture
+def extended_provider_def():
+ """Extended provider definition with all optional fields."""
+ return {
+ "name": "test_provider",
+ "type": "feature",
+ "data": "/path/to/data.geojson",
+ "editable": True,
+ "options": {"some_option": "value"},
+ "id_field": "feature_id",
+ "uri_field": "uri",
+ "x_field": "longitude",
+ "y_field": "latitude",
+ "time_field": "timestamp",
+ "title_field": "title",
+ "properties": ["prop1", "prop2"],
+ "file_types": [".geojson", ".json"]
+ }
+
+
+@pytest.fixture
+def basic_provider(basic_provider_def):
+ """Basic BaseProvider instance."""
+ return BaseProvider(basic_provider_def)
+
+
+@pytest.fixture
+def extended_provider(extended_provider_def):
+ """Extended BaseProvider instance."""
+ return BaseProvider(extended_provider_def)
+
+
+@pytest.fixture
+def mock_provider_with_get():
+ """Mock provider that implements get() method."""
+ class MockProvider(BaseProvider):
+ def get(self, identifier, **kwargs):
+ if identifier == "mock_id":
+ return {"type": "Feature", "id": identifier}
+ else:
+ raise ProviderItemNotFoundError("Not found")
+
+ provider_def = {
+ "name": "mock_provider",
+ "type": "feature",
+ "data": "/path/to/data.geojson"
+ }
+ return MockProvider(provider_def)
+
+
+@pytest.fixture
+def valid_geojson_item():
+ """Valid GeoJSON item for testing."""
+ return json.dumps({
+ "type": "Feature",
+ "id": "test_id",
+ "geometry": {"type": "Point", "coordinates": [0, 0]},
+ "properties": {"name": "Test Feature"}
+ })
+
+
+@pytest.fixture
+def geojson_item_with_props_id():
+ """GeoJSON item with identifier in properties."""
+ return json.dumps({
+ "type": "Feature",
+ "geometry": {"type": "Point", "coordinates": [0, 0]},
+ "properties": {"identifier": "props_id", "name": "Test"}
+ })
+
+
+@pytest.fixture
+def remove_stdout():
+ """Fixture to remove standard output during tests."""
+ class RemoveStdout:
+ def __enter__(self):
+ self.original_stdout = sys.stdout
+ sys.stdout = io.StringIO()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ sys.stdout = self.original_stdout
+
+ return RemoveStdout
diff --git a/tests/test_base_provider.py b/tests/test_base_provider.py
new file mode 100644
index 000000000..6aaef601d
--- /dev/null
+++ b/tests/test_base_provider.py
@@ -0,0 +1,402 @@
+# =================================================================
+#
+# Authors: Francesco Bartoli
+#
+# Copyright (c) 2025 Francesco Bartoli
+#
+# Permission is hereby granted, free of charge, to any person
+# obtaining a copy of this software and associated documentation
+# files (the "Software"), to deal in the Software without
+# restriction, including without limitation the rights to use,
+# copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the
+# Software is furnished to do so, subject to the following
+# conditions:
+#
+# The above copyright notice and this permission notice shall be
+# included in all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
+# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
+# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
+# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
+# OTHER DEALINGS IN THE SOFTWARE.
+#
+# =================================================================
+
+import pytest
+import json
+from http import HTTPStatus
+
+from pygeoapi.provider.base import (
+ BaseProvider, ProviderTypeError,
+ ProviderItemNotFoundError, ProviderInvalidDataError,
+ ProviderInvalidQueryError, ProviderRequestEntityTooLargeError,
+ ProviderConnectionError, ProviderGenericError, ProviderQueryError,
+ ProviderNoDataError, SchemaType
+)
+
+
+def test_valid_initialization(basic_provider_def):
+ """Test BaseProvider initialization with valid config"""
+ provider = BaseProvider(basic_provider_def)
+
+ # Test required fields
+ assert provider.name == "test_provider"
+ assert provider.type == "feature"
+ assert provider.data == "/path/to/data.geojson"
+
+
+def test_initialization_with_optional_fields(extended_provider_def):
+ """Test BaseProvider initialization with optional fields"""
+ provider = BaseProvider(extended_provider_def)
+
+ # Test required fields
+ assert provider.name == "test_provider"
+ assert provider.type == "feature"
+ assert provider.data == "/path/to/data.geojson"
+
+ # Test optional fields
+ assert provider.editable is True
+ assert provider.options == {"some_option": "value"}
+ assert provider.id_field == "feature_id"
+ assert provider.uri_field == "uri"
+ assert provider.x_field == "longitude"
+ assert provider.y_field == "latitude"
+ assert provider.time_field == "timestamp"
+ assert provider.title_field == "title"
+ assert provider.properties == ["prop1", "prop2"]
+ assert provider.file_types == [".geojson", ".json"]
+
+
+def test_default_values(basic_provider):
+ """Test default values for optional fields"""
+ # Test default values
+ assert basic_provider.editable is False
+ assert basic_provider.options is None
+ assert basic_provider.id_field is None
+ assert basic_provider.uri_field is None
+ assert basic_provider.x_field is None
+ assert basic_provider.y_field is None
+ assert basic_provider.time_field is None
+ assert basic_provider.title_field is None
+ assert basic_provider.properties == []
+ assert basic_provider.file_types == []
+ assert basic_provider._fields == {}
+ assert basic_provider.filename is None
+ assert basic_provider.axes == []
+ assert basic_provider.crs is None
+ assert basic_provider.num_bands is None
+
+
+@pytest.mark.parametrize("missing_field,config", [
+ ("name", {"type": "feature", "data": "/path"}),
+ ("type", {"name": "test", "data": "/path"}),
+ ("data", {"name": "test", "type": "feature"}),
+ ("all", {})
+])
+def test_missing_required_fields(missing_field, config):
+ """Test that missing required fields raise RuntimeError"""
+ with pytest.raises(RuntimeError, match="name/type/data are required"):
+ BaseProvider(config)
+
+
+def test_repr_method(basic_provider):
+ """Test __repr__ method"""
+ assert repr(basic_provider) == " feature"
+
+
+# Test Functions for BaseProvider Methods
+
+def test_get_fields_not_implemented(basic_provider):
+ """Test that get_fields raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_fields()
+
+
+@pytest.mark.parametrize("schema_type", [
+ None, # Default
+ SchemaType.item,
+ SchemaType.create,
+ SchemaType.update,
+ SchemaType.replace
+])
+def test_get_schema_not_implemented(basic_provider, schema_type):
+ """Test that get_schema raises NotImplementedError."""
+ if schema_type is None:
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_schema()
+ else:
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_schema(schema_type)
+
+
+def test_get_data_path_not_implemented(basic_provider):
+ """Test that get_data_path raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_data_path("http://example.com", "/path", "/dir")
+
+
+def test_get_metadata_not_implemented(basic_provider):
+ """Test that get_metadata raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_metadata()
+
+
+@pytest.mark.parametrize("properties,current", [
+ ([], False), # Default
+ (["prop1", "prop2"], True),
+ ([], True),
+ (["prop1"], False)
+])
+def test_get_domains_not_implemented(basic_provider, properties, current):
+ """Test that get_domains raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.get_domains(properties, current)
+
+
+def test_query_not_implemented(basic_provider):
+ """Test that query raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.query()
+
+
+@pytest.mark.parametrize("identifier,kwargs", [
+ ("test_id", {}),
+ ("test_id", {"some_param": "value"}),
+ ("another_id", {"param1": "value1", "param2": "value2"})
+])
+def test_get_not_implemented(basic_provider, identifier, kwargs):
+ """Test that get raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.get(identifier, **kwargs)
+
+
+def test_create_not_implemented(basic_provider):
+ """Test that create raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.create({"type": "Feature"})
+
+
+def test_update_not_implemented(basic_provider):
+ """Test that update raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.update("test_id", {"type": "Feature"})
+
+
+def test_delete_not_implemented(basic_provider):
+ """Test that delete raises NotImplementedError."""
+ with pytest.raises(NotImplementedError):
+ basic_provider.delete("test_id")
+
+
+def test_fields_property_with_empty_fields(basic_provider):
+ """Test fields property when _fields is empty."""
+
+ result = basic_provider.fields
+ assert result == {}
+ assert result is basic_provider._fields
+
+
+def test_fields_property_with_populated_fields(basic_provider):
+ """Test fields property when _fields is populated."""
+ # Populate _fields manually
+ test_fields = {
+ 'id': {'type': 'string'},
+ 'name': {'type': 'string'},
+ 'geometry': {'type': 'geometry'}
+ }
+ basic_provider._fields = test_fields
+ assert basic_provider.fields == test_fields
+
+
+def test_fields_property_without_fields_attribute():
+ """Test fields property when _fields attribute doesn't exist."""
+ # Delete _fields to simulate NotImplementedError
+ provider_def = {
+ 'name': 'test_provider',
+ 'type': 'feature',
+ 'data': '/path/to/data.geojson'
+ }
+ provider = BaseProvider(provider_def)
+ delattr(provider, '_fields')
+ assert not hasattr(provider, '_fields')
+
+ # get_fields() gets called and raises NotImplementedError
+ with pytest.raises(NotImplementedError):
+ _ = provider.fields
+
+
+def test_load_and_prepare_item_valid_geojson(
+ mock_provider_with_get, valid_geojson_item, remove_stdout
+):
+ """Test loading valid GeoJSON item."""
+ with remove_stdout():
+ identifier, data = mock_provider_with_get._load_and_prepare_item(
+ valid_geojson_item)
+
+ assert identifier == "test_id"
+ assert data["type"] == "Feature"
+ assert data["id"] == "test_id"
+ assert "geometry" in data
+ assert "properties" in data
+
+
+def test_load_and_prepare_item_identifier_in_properties(
+ mock_provider_with_get, geojson_item_with_props_id, remove_stdout
+):
+ """Test loading item with identifier in properties."""
+ with remove_stdout():
+ identifier, data = mock_provider_with_get._load_and_prepare_item(
+ geojson_item_with_props_id)
+
+ assert identifier == "props_id"
+ assert data["properties"]["identifier"] == "props_id"
+
+
+def test_load_and_prepare_item_invalid_json(
+ mock_provider_with_get, remove_stdout
+):
+ """Test loading invalid JSON."""
+ invalid_json = "{ invalid json }"
+
+ with remove_stdout():
+ with pytest.raises(
+ ProviderInvalidDataError,
+ match="Invalid JSON data"
+ ):
+ mock_provider_with_get._load_and_prepare_item(invalid_json)
+
+
+def test_load_and_prepare_item_invalid_data_type(
+ mock_provider_with_get, remove_stdout
+):
+ """Test loading invalid data type."""
+ with remove_stdout():
+ with pytest.raises(ProviderInvalidDataError, match="Invalid data"):
+ mock_provider_with_get._load_and_prepare_item(123)
+
+
+def test_load_and_prepare_item_missing_identifier(
+ mock_provider_with_get, remove_stdout
+):
+ """Test loading item without identifier."""
+ item_no_id = json.dumps({
+ "type": "Feature",
+ "geometry": {"type": "Point", "coordinates": [0, 0]},
+ "properties": {"name": "Test Feature"}
+ })
+
+ with remove_stdout():
+ with pytest.raises(
+ ProviderInvalidDataError,
+ match="Missing identifier \\(id or properties.identifier\\)"
+ ):
+ mock_provider_with_get._load_and_prepare_item(item_no_id)
+
+
+def test_load_and_prepare_item_accept_missing_identifier(
+ mock_provider_with_get, remove_stdout
+):
+ """Test loading item without identifier when accepting missing."""
+ item_no_id = json.dumps({
+ "type": "Feature",
+ "geometry": {"type": "Point", "coordinates": [0, 0]},
+ "properties": {"name": "Test Feature"}
+ })
+
+ with remove_stdout():
+ identifier, data = mock_provider_with_get._load_and_prepare_item(
+ item_no_id,
+ accept_missing_identifier=True
+ )
+
+ assert identifier is None
+ assert data["type"] == "Feature"
+
+
+@pytest.mark.parametrize("missing_part,item_data", [
+ ("geometry", {
+ "type": "Feature",
+ "id": "test_id",
+ "properties": {"name": "Test Feature"}
+ }),
+ ("properties", {
+ "type": "Feature",
+ "id": "test_id",
+ "geometry": {"type": "Point", "coordinates": [0, 0]}
+ })
+])
+def test_load_and_prepare_item_missing_geojson_parts(
+ mock_provider_with_get, missing_part, item_data, remove_stdout
+):
+ """Test loading item without required GeoJSON parts."""
+ item_json = json.dumps(item_data)
+
+ with remove_stdout():
+ with pytest.raises(
+ ProviderInvalidDataError,
+ match=f"Missing core GeoJSON {missing_part}"
+ ):
+ mock_provider_with_get._load_and_prepare_item(item_json)
+
+
+# Test Functions for Provider Exceptions
+
+@pytest.mark.parametrize("exception_class,expected_msg", [
+ (ProviderGenericError, "generic error (check logs)"),
+ (ProviderConnectionError, "connection error (check logs)"),
+ (ProviderTypeError, "invalid provider type"),
+ (ProviderInvalidQueryError, "query error"),
+ (ProviderQueryError, "query error (check logs)"),
+ (ProviderItemNotFoundError, "identifier not found"),
+ (ProviderNoDataError, "No data found")
+])
+def test_provider_exceptions_default_messages(exception_class, expected_msg):
+ """Test provider exception default messages."""
+ error = exception_class()
+ assert error.default_msg == expected_msg
+
+
+@pytest.mark.parametrize("exception_class,expected_code", [
+ (ProviderTypeError, HTTPStatus.BAD_REQUEST),
+ (ProviderInvalidQueryError, HTTPStatus.BAD_REQUEST),
+ (ProviderItemNotFoundError, HTTPStatus.NOT_FOUND),
+ (ProviderNoDataError, HTTPStatus.NO_CONTENT),
+ (ProviderRequestEntityTooLargeError, HTTPStatus.REQUEST_ENTITY_TOO_LARGE)
+])
+def test_provider_exceptions_http_status_codes(exception_class, expected_code):
+ """Test provider exception HTTP status codes."""
+ error = exception_class()
+ assert error.http_status_code == expected_code
+
+
+@pytest.mark.parametrize("exception_class,expected_code", [
+ (ProviderInvalidQueryError, "InvalidQuery"),
+ (ProviderItemNotFoundError, "NotFound"),
+ (ProviderNoDataError, "InvalidParameterValue")
+])
+def test_provider_exceptions_status_codes(exception_class, expected_code):
+ """Test provider exception status codes"""
+ error = exception_class()
+ assert error.ogc_exception_code == expected_code
+
+
+def test_provider_request_entity_too_large_error_with_message():
+ """Test ProviderRequestEntityTooLargeError with message."""
+ error = ProviderRequestEntityTooLargeError("Too large")
+ assert error.http_status_code == HTTPStatus.REQUEST_ENTITY_TOO_LARGE
+
+
+@pytest.mark.parametrize("schema_type,expected_value", [
+ (SchemaType.item, "item"),
+ (SchemaType.create, "create"),
+ (SchemaType.update, "update"),
+ (SchemaType.replace, "replace")
+])
+def test_schema_type_values(schema_type, expected_value):
+ """Test SchemaType enum values."""
+ assert schema_type.value == expected_value
diff --git a/tests/test_django.py b/tests/test_django.py
index 06cda4f09..10af13bc2 100644
--- a/tests/test_django.py
+++ b/tests/test_django.py
@@ -38,4 +38,4 @@ def test_django_edr_without_instance_id(django_):
# Validate CoverageJSON is returned
response_json = response.json()
assert response_json["type"] == "Coverage"
- assert response_json["domain"]["domainType"] == "Grid"
+ assert response_json["domain"]["domainType"] == "PointSeries"
diff --git a/tests/test_ogr_gpkg_provider.py b/tests/test_ogr_gpkg_provider.py
index 723e2dca2..87ad4f30f 100644
--- a/tests/test_ogr_gpkg_provider.py
+++ b/tests/test_ogr_gpkg_provider.py
@@ -136,7 +136,7 @@ def config_gpkg_28992():
'http://www.opengis.net/def/crs/OGC/1.3/CRS84',
'http://www.opengis.net/def/crs/EPSG/0/28992'
],
- 'storageCRS': 'http://www.opengis.net/def/crs/EPSG/0/28992',
+ 'storageCrs': 'http://www.opengis.net/def/crs/EPSG/0/28992',
'id_field': 'id',
'layer': 'OGRGeoJSON'
}
diff --git a/tests/test_ogr_wfs_provider_live.py b/tests/test_ogr_wfs_provider_live.py
index 5e212268f..492e4831b 100644
--- a/tests/test_ogr_wfs_provider_live.py
+++ b/tests/test_ogr_wfs_provider_live.py
@@ -140,7 +140,7 @@ def config_geosol_gs_WFS():
'http://www.opengis.net/def/crs/OGC/1.3/CRS84',
'http://www.opengis.net/def/crs/EPSG/0/32632'
],
- 'storageCRS': 'http://www.opengis.net/def/crs/EPSG/0/32632',
+ 'storageCrs': 'http://www.opengis.net/def/crs/EPSG/0/32632',
'id_field': 'gml_id',
'layer': 'unesco:Unesco_point',
}