Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactor: Query Routes #423

Draft
wants to merge 1 commit into
base: develop
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion app/routes/datasets/downloads.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
from ...utils.geostore import get_geostore
from ...utils.path import split_s3_path
from .. import dataset_version_dependency
from .queries import _query_dataset_csv, _query_dataset_json
from ..utils.downloads import _query_dataset_csv, _query_dataset_json

router: APIRouter = APIRouter()

Expand Down
54 changes: 2 additions & 52 deletions app/routes/datasets/queries.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
import csv
import re
from io import StringIO
from typing import Any, Dict, List, Optional, Tuple, Union, cast
from typing import Any, Dict, List, Optional, Tuple, Union
from urllib.parse import unquote
from uuid import UUID, uuid4

Expand All @@ -25,7 +25,6 @@
from ...authentication.token import is_gfwpro_admin_for_query
from ...application import db
# from ...authentication.api_keys import get_api_key
from ...crud import assets
from ...models.enum.assets import AssetType
from ...models.enum.creation_options import Delimiters
from ...models.enum.geostore import GeostoreOrigin
Expand Down Expand Up @@ -76,6 +75,7 @@
from ...utils.aws import invoke_lambda
from ...utils.geostore import get_geostore
from .. import dataset_version_dependency
from ..utils.downloads import _query_dataset_csv, _query_dataset_json

router = APIRouter()

Expand Down Expand Up @@ -305,56 +305,6 @@ async def query_dataset_csv_post(
return CSVStreamingResponse(iter([csv_data.getvalue()]), download=False)


async def _query_dataset_json(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
) -> List[Dict[str, Any]]:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
return await _query_table(dataset, version, sql, geometry)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(dataset, default_asset, sql, geostore)
return results["data"]
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


async def _query_dataset_csv(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
delimiter: Delimiters = Delimiters.comma,
) -> StringIO:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
response = await _query_table(dataset, version, sql, geometry)
return _orm_to_csv(response, delimiter=delimiter)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(
dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter
)
return StringIO(results["data"])
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


def _get_query_type(default_asset: AssetORM, geostore: Optional[GeostoreCommon]):
if default_asset.asset_type in [
AssetType.geo_database_table,
Expand Down
Empty file added app/routes/utils/__init__.py
Empty file.
82 changes: 82 additions & 0 deletions app/routes/utils/queries.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
import csv
from io import StringIO
from typing import Any, Dict, List, Optional, cast

from fastapi import HTTPException

from ...crud import assets
from ...models.enum.creation_options import Delimiters
from ...models.enum.queries import QueryFormat, QueryType
from ...models.orm.assets import Asset as AssetORM
from ...models.pydantic.geostore import GeostoreCommon
from ..datasets.queries import _get_query_type, _query_raster, _query_table


async def _query_dataset_json(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
) -> List[Dict[str, Any]]:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
return await _query_table(dataset, version, sql, geometry)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(dataset, default_asset, sql, geostore)
return results["data"]
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


async def _query_dataset_csv(
dataset: str,
version: str,
sql: str,
geostore: Optional[GeostoreCommon],
delimiter: Delimiters = Delimiters.comma,
) -> StringIO:
# Make sure we can query the dataset
default_asset: AssetORM = await assets.get_default_asset(dataset, version)
query_type = _get_query_type(default_asset, geostore)
if query_type == QueryType.table:
geometry = geostore.geojson if geostore else None
response = await _query_table(dataset, version, sql, geometry)
return _orm_to_csv(response, delimiter=delimiter)
elif query_type == QueryType.raster:
geostore = cast(GeostoreCommon, geostore)
results = await _query_raster(
dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter
)
return StringIO(results["data"])
else:
raise HTTPException(
status_code=501,
detail="This endpoint is not implemented for the given dataset.",
)


def _orm_to_csv(
data: List[Dict[str, Any]], delimiter: Delimiters = Delimiters.comma
) -> StringIO:
"""Create a new csv file that represents generated data.

Response will return a temporary redirect to download URL.
"""
csv_file = StringIO()

if data:
wr = csv.writer(csv_file, quoting=csv.QUOTE_NONNUMERIC, delimiter=delimiter)
field_names = data[0].keys()
wr.writerow(field_names)
for row in data:
wr.writerow(row.values())
csv_file.seek(0)

return csv_file
Loading