Skip to content

Commit

Permalink
Queryables landing page and collection links (#267)
Browse files Browse the repository at this point in the history
**Related Issue(s):**

- #260 

**Description:**

- Code to add the `queryables` link included in the Filter extension to
the landing page and collections
- Only adds the `queryables` collection link when the Filter extension
is enabled. It does this by passing an extensions list to the
DatabaseLogic class. This could be used to have other conditions for
when certain extensions are disabled/enabled in the app. please let me
know if you have any suggestions for this approach
- Some improvements to `data_loader.py`

**PR Checklist:**

- [x] Code is formatted and linted (run `pre-commit run --all-files`)
- [x] Tests pass (run `make test`)
- [ ] Documentation has been updated to reflect changes, if applicable
- [x] Changes are added to the changelog

---------

Co-authored-by: Jonathan Healy <[email protected]>
  • Loading branch information
jamesfisher-geo and jonhealy1 authored Jun 5, 2024
1 parent 009754e commit a416ec0
Show file tree
Hide file tree
Showing 12 changed files with 135 additions and 27 deletions.
3 changes: 3 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.

## [Unreleased]

### Added
- Queryables landing page and collection links when the Filter Extension is enabled [#267](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/267)

### Changed

- Updated stac-fastapi libraries to v3.0.0a1 [#265](https://github.com/stac-utils/stac-fastapi-elasticsearch-opensearch/pull/265)
Expand Down
7 changes: 6 additions & 1 deletion data_loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,17 @@ def load_collection(base_url, collection_id, data_dir):
collection["id"] = collection_id
try:
resp = requests.post(f"{base_url}/collections", json=collection)
if resp.status_code == 200:
if resp.status_code == 200 or resp.status_code == 201:
click.echo(f"Status code: {resp.status_code}")
click.echo(f"Added collection: {collection['id']}")
elif resp.status_code == 409:
click.echo(f"Status code: {resp.status_code}")
click.echo(f"Collection: {collection['id']} already exists")
else:
click.echo(f"Status code: {resp.status_code}")
click.echo(
f"Error writing {collection['id']} collection. Message: {resp.text}"
)
except requests.ConnectionError:
click.secho("Failed to connect", fg="red", err=True)

Expand Down
1 change: 1 addition & 0 deletions sample_data/collection.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
{
"id":"sentinel-s2-l2a-cogs-test",
"stac_version":"1.0.0",
"type": "Collection",
"description":"Sentinel-2a and Sentinel-2b imagery, processed to Level 2A (Surface Reflectance) and converted to Cloud-Optimized GeoTIFFs",
"links":[
{"rel":"self","href":"https://earth-search.aws.element84.com/v0/collections/sentinel-s2-l2a-cogs"},
Expand Down
45 changes: 32 additions & 13 deletions stac_fastapi/core/stac_fastapi/core/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,6 +153,19 @@ async def landing_page(self, **kwargs) -> stac_types.LandingPage:
conformance_classes=self.conformance_classes(),
extension_schemas=[],
)

if self.extension_is_enabled("FilterExtension"):
landing_page["links"].append(
{
# TODO: replace this with Relations.queryables.value,
"rel": "queryables",
# TODO: replace this with MimeTypes.jsonschema,
"type": "application/schema+json",
"title": "Queryables",
"href": urljoin(base_url, "queryables"),
}
)

collections = await self.all_collections(request=kwargs["request"])
for collection in collections["collections"]:
landing_page["links"].append(
Expand Down Expand Up @@ -205,7 +218,7 @@ async def all_collections(self, **kwargs) -> stac_types.Collections:
token = request.query_params.get("token")

collections, next_token = await self.database.get_all_collections(
token=token, limit=limit, base_url=base_url
token=token, limit=limit, request=request
)

links = [
Expand Down Expand Up @@ -239,10 +252,12 @@ async def get_collection(
Raises:
NotFoundError: If the collection with the given id cannot be found in the database.
"""
base_url = str(kwargs["request"].base_url)
request = kwargs["request"]
collection = await self.database.find_collection(collection_id=collection_id)
return self.collection_serializer.db_to_stac(
collection=collection, base_url=base_url
collection=collection,
request=request,
extensions=[type(ext).__name__ for ext in self.extensions],
)

async def item_collection(
Expand Down Expand Up @@ -748,12 +763,14 @@ async def create_collection(
ConflictError: If the collection already exists.
"""
collection = collection.model_dump(mode="json")
base_url = str(kwargs["request"].base_url)
collection = self.database.collection_serializer.stac_to_db(
collection, base_url
)
request = kwargs["request"]
collection = self.database.collection_serializer.stac_to_db(collection, request)
await self.database.create_collection(collection=collection)
return CollectionSerializer.db_to_stac(collection, base_url)
return CollectionSerializer.db_to_stac(
collection,
request,
extensions=[type(ext).__name__ for ext in self.database.extensions],
)

@overrides
async def update_collection(
Expand All @@ -780,16 +797,18 @@ async def update_collection(
"""
collection = collection.model_dump(mode="json")

base_url = str(kwargs["request"].base_url)
request = kwargs["request"]

collection = self.database.collection_serializer.stac_to_db(
collection, base_url
)
collection = self.database.collection_serializer.stac_to_db(collection, request)
await self.database.update_collection(
collection_id=collection_id, collection=collection
)

return CollectionSerializer.db_to_stac(collection, base_url)
return CollectionSerializer.db_to_stac(
collection,
request,
extensions=[type(ext).__name__ for ext in self.database.extensions],
)

@overrides
async def delete_collection(
Expand Down
33 changes: 33 additions & 0 deletions stac_fastapi/core/stac_fastapi/core/models/links.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,39 @@ async def get_links(
return links


@attr.s
class CollectionLinks(BaseLinks):
"""Create inferred links specific to collections."""

collection_id: str = attr.ib()
extensions: List[str] = attr.ib(default=attr.Factory(list))

def link_parent(self) -> Dict[str, Any]:
"""Create the `parent` link."""
return dict(rel=Relations.parent, type=MimeTypes.json.value, href=self.base_url)

def link_items(self) -> Dict[str, Any]:
"""Create the `items` link."""
return dict(
rel="items",
type=MimeTypes.geojson.value,
href=urljoin(self.base_url, f"collections/{self.collection_id}/items"),
)

def link_queryables(self) -> Dict[str, Any]:
"""Create the `queryables` link."""
if "FilterExtension" in self.extensions:
return dict(
rel="queryables",
type=MimeTypes.json.value,
href=urljoin(
self.base_url, f"collections/{self.collection_id}/queryables"
),
)
else:
return None


@attr.s
class PagingLinks(BaseLinks):
"""Create links for paging."""
Expand Down
25 changes: 16 additions & 9 deletions stac_fastapi/core/stac_fastapi/core/serializers.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
"""Serializers."""
import abc
from copy import deepcopy
from typing import Any
from typing import Any, List, Optional

import attr
from starlette.requests import Request

from stac_fastapi.core.datetime_utils import now_to_rfc3339_str
from stac_fastapi.core.models.links import CollectionLinks
from stac_fastapi.types import stac as stac_types
from stac_fastapi.types.links import CollectionLinks, ItemLinks, resolve_links
from stac_fastapi.types.links import ItemLinks, resolve_links


@attr.s
Expand Down Expand Up @@ -109,29 +111,34 @@ class CollectionSerializer(Serializer):

@classmethod
def stac_to_db(
cls, collection: stac_types.Collection, base_url: str
cls, collection: stac_types.Collection, request: Request
) -> stac_types.Collection:
"""
Transform STAC Collection to database-ready STAC collection.
Args:
stac_data: the STAC Collection object to be transformed
base_url: the base URL for the STAC API
starlette.requests.Request: the API request
Returns:
stac_types.Collection: The database-ready STAC Collection object.
"""
collection = deepcopy(collection)
collection["links"] = resolve_links(collection.get("links", []), base_url)
collection["links"] = resolve_links(
collection.get("links", []), str(request.base_url)
)
return collection

@classmethod
def db_to_stac(cls, collection: dict, base_url: str) -> stac_types.Collection:
def db_to_stac(
cls, collection: dict, request: Request, extensions: Optional[List[str]] = []
) -> stac_types.Collection:
"""Transform database model to STAC collection.
Args:
collection (dict): The collection data in dictionary form, extracted from the database.
base_url (str): The base URL for the collection.
starlette.requests.Request: the API request
extensions: A list of the extension class names (`ext.__name__`) or all enabled STAC API extensions.
Returns:
stac_types.Collection: The STAC collection object.
Expand All @@ -157,13 +164,13 @@ def db_to_stac(cls, collection: dict, base_url: str) -> stac_types.Collection:

# Create the collection links using CollectionLinks
collection_links = CollectionLinks(
collection_id=collection_id, base_url=base_url
collection_id=collection_id, request=request, extensions=extensions
).create_links()

# Add any additional links from the collection dictionary
original_links = collection.get("links")
if original_links:
collection_links += resolve_links(original_links, base_url)
collection_links += resolve_links(original_links, str(request.base_url))
collection["links"] = collection_links

# Return the stac_types.Collection object
Expand Down
2 changes: 2 additions & 0 deletions stac_fastapi/elasticsearch/stac_fastapi/elasticsearch/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@
filter_extension,
]

database_logic.extensions = [type(ext).__name__ for ext in extensions]

post_request_model = create_post_request_model(extensions)

api = StacApi(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@

import attr
from elasticsearch_dsl import Q, Search
from starlette.requests import Request

from elasticsearch import exceptions, helpers # type: ignore
from stac_fastapi.core.extensions import filter
Expand Down Expand Up @@ -312,10 +313,12 @@ class DatabaseLogic:
default=CollectionSerializer
)

extensions: List[str] = attr.ib(default=attr.Factory(list))

"""CORE LOGIC"""

async def get_all_collections(
self, token: Optional[str], limit: int, base_url: str
self, token: Optional[str], limit: int, request: Request
) -> Tuple[List[Dict[str, Any]], Optional[str]]:
"""Retrieve a list of all collections from Elasticsearch, supporting pagination.
Expand All @@ -342,7 +345,7 @@ async def get_all_collections(
hits = response["hits"]["hits"]
collections = [
self.collection_serializer.db_to_stac(
collection=hit["_source"], base_url=base_url
collection=hit["_source"], request=request, extensions=self.extensions
)
for hit in hits
]
Expand Down
2 changes: 2 additions & 0 deletions stac_fastapi/opensearch/stac_fastapi/opensearch/app.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,8 @@
filter_extension,
]

database_logic.extensions = [type(ext).__name__ for ext in extensions]

post_request_model = create_post_request_model(extensions)

api = StacApi(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@
from opensearchpy.exceptions import TransportError
from opensearchpy.helpers.query import Q
from opensearchpy.helpers.search import Search
from starlette.requests import Request

from stac_fastapi.core import serializers
from stac_fastapi.core.extensions import filter
Expand Down Expand Up @@ -333,10 +334,12 @@ class DatabaseLogic:
default=serializers.CollectionSerializer
)

extensions: List[str] = attr.ib(default=attr.Factory(list))

"""CORE LOGIC"""

async def get_all_collections(
self, token: Optional[str], limit: int, base_url: str
self, token: Optional[str], limit: int, request: Request
) -> Tuple[List[Dict[str, Any]], Optional[str]]:
"""
Retrieve a list of all collections from Opensearch, supporting pagination.
Expand Down Expand Up @@ -366,7 +369,7 @@ async def get_all_collections(
hits = response["hits"]["hits"]
collections = [
self.collection_serializer.db_to_stac(
collection=hit["_source"], base_url=base_url
collection=hit["_source"], request=request, extensions=self.extensions
)
for hit in hits
]
Expand Down
1 change: 1 addition & 0 deletions stac_fastapi/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,7 @@ def __init__(self, item, collection):

class MockRequest:
base_url = "http://test-server"
url = "http://test-server/test"
query_params = {}

def __init__(
Expand Down
29 changes: 29 additions & 0 deletions stac_fastapi/tests/extensions/test_filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,35 @@
THIS_DIR = os.path.dirname(os.path.abspath(__file__))


@pytest.mark.asyncio
async def test_filter_extension_landing_page_link(app_client, ctx):
resp = await app_client.get("/")
assert resp.status_code == 200

resp_json = resp.json()
keys = [link["rel"] for link in resp_json["links"]]

assert "queryables" in keys


@pytest.mark.asyncio
async def test_filter_extension_collection_link(app_client, load_test_data):
"""Test creation and deletion of a collection"""
test_collection = load_test_data("test_collection.json")
test_collection["id"] = "test"

resp = await app_client.post("/collections", json=test_collection)
assert resp.status_code == 201

resp = await app_client.get(f"/collections/{test_collection['id']}")
resp_json = resp.json()
keys = [link["rel"] for link in resp_json["links"]]
assert "queryables" in keys

resp = await app_client.delete(f"/collections/{test_collection['id']}")
assert resp.status_code == 204


@pytest.mark.asyncio
async def test_search_filters_post(app_client, ctx):

Expand Down

0 comments on commit a416ec0

Please sign in to comment.