From 95b9f352b57b2f4805421b2b1922fc57a7f7e953 Mon Sep 17 00:00:00 2001 From: Gary Tempus Jr Date: Thu, 7 Sep 2023 13:54:15 -0400 Subject: [PATCH] refactor(routes): move query helper functions to utils --- app/routes/datasets/downloads.py | 2 +- app/routes/datasets/queries.py | 56 +--------------------- app/routes/utils/__init__.py | 0 app/routes/utils/queries.py | 82 ++++++++++++++++++++++++++++++++ 4 files changed, 85 insertions(+), 55 deletions(-) create mode 100644 app/routes/utils/__init__.py create mode 100644 app/routes/utils/queries.py diff --git a/app/routes/datasets/downloads.py b/app/routes/datasets/downloads.py index 1bd8e1a32..3f0ae8f70 100644 --- a/app/routes/datasets/downloads.py +++ b/app/routes/datasets/downloads.py @@ -24,7 +24,7 @@ from ...utils.geostore import get_geostore from ...utils.path import split_s3_path from .. import dataset_version_dependency -from .queries import _query_dataset_csv, _query_dataset_json +from ..utils.downloads import _query_dataset_csv, _query_dataset_json router: APIRouter = APIRouter() diff --git a/app/routes/datasets/queries.py b/app/routes/datasets/queries.py index 428539d73..187cbd325 100755 --- a/app/routes/datasets/queries.py +++ b/app/routes/datasets/queries.py @@ -2,7 +2,7 @@ import csv import re from io import StringIO -from typing import Any, Dict, List, Optional, Tuple, Union, cast +from typing import Any, Dict, List, Optional, Tuple, Union from urllib.parse import unquote from uuid import UUID, uuid4 @@ -21,12 +21,10 @@ from pglast.parser import ParseError from pglast.printer import RawStream from pydantic.tools import parse_obj_as -from sqlalchemy.sql import and_ from ...application import db # from ...authentication.api_keys import get_api_key -from ...crud import assets from ...models.enum.assets import AssetType from ...models.enum.creation_options import Delimiters from ...models.enum.geostore import GeostoreOrigin @@ -61,7 +59,6 @@ from ...models.enum.queries import QueryFormat, QueryType from ...models.orm.assets import Asset as AssetORM from ...models.orm.queries.raster_assets import latest_raster_tile_sets -from ...models.orm.versions import Version as VersionORM from ...models.pydantic.asset_metadata import RasterTable, RasterTableRow from ...models.pydantic.creation_options import NoDataType from ...models.pydantic.geostore import Geometry, GeostoreCommon @@ -78,6 +75,7 @@ from ...utils.aws import invoke_lambda from ...utils.geostore import get_geostore from .. import dataset_version_dependency +from ..utils.downloads import _query_dataset_csv, _query_dataset_json router = APIRouter() @@ -291,56 +289,6 @@ async def query_dataset_csv_post( return CSVStreamingResponse(iter([csv_data.getvalue()]), download=False) -async def _query_dataset_json( - dataset: str, - version: str, - sql: str, - geostore: Optional[GeostoreCommon], -) -> List[Dict[str, Any]]: - # Make sure we can query the dataset - default_asset: AssetORM = await assets.get_default_asset(dataset, version) - query_type = _get_query_type(default_asset, geostore) - if query_type == QueryType.table: - geometry = geostore.geojson if geostore else None - return await _query_table(dataset, version, sql, geometry) - elif query_type == QueryType.raster: - geostore = cast(GeostoreCommon, geostore) - results = await _query_raster(dataset, default_asset, sql, geostore) - return results["data"] - else: - raise HTTPException( - status_code=501, - detail="This endpoint is not implemented for the given dataset.", - ) - - -async def _query_dataset_csv( - dataset: str, - version: str, - sql: str, - geostore: Optional[GeostoreCommon], - delimiter: Delimiters = Delimiters.comma, -) -> StringIO: - # Make sure we can query the dataset - default_asset: AssetORM = await assets.get_default_asset(dataset, version) - query_type = _get_query_type(default_asset, geostore) - if query_type == QueryType.table: - geometry = geostore.geojson if geostore else None - response = await _query_table(dataset, version, sql, geometry) - return _orm_to_csv(response, delimiter=delimiter) - elif query_type == QueryType.raster: - geostore = cast(GeostoreCommon, geostore) - results = await _query_raster( - dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter - ) - return StringIO(results["data"]) - else: - raise HTTPException( - status_code=501, - detail="This endpoint is not implemented for the given dataset.", - ) - - def _get_query_type(default_asset: AssetORM, geostore: Optional[GeostoreCommon]): if default_asset.asset_type in [ AssetType.geo_database_table, diff --git a/app/routes/utils/__init__.py b/app/routes/utils/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/app/routes/utils/queries.py b/app/routes/utils/queries.py new file mode 100644 index 000000000..3acb21409 --- /dev/null +++ b/app/routes/utils/queries.py @@ -0,0 +1,82 @@ +import csv +from io import StringIO +from typing import Any, Dict, List, Optional, cast + +from fastapi import HTTPException + +from ...crud import assets +from ...models.enum.creation_options import Delimiters +from ...models.enum.queries import QueryFormat, QueryType +from ...models.orm.assets import Asset as AssetORM +from ...models.pydantic.geostore import GeostoreCommon +from ..datasets.queries import _get_query_type, _query_raster, _query_table + + +async def _query_dataset_json( + dataset: str, + version: str, + sql: str, + geostore: Optional[GeostoreCommon], +) -> List[Dict[str, Any]]: + # Make sure we can query the dataset + default_asset: AssetORM = await assets.get_default_asset(dataset, version) + query_type = _get_query_type(default_asset, geostore) + if query_type == QueryType.table: + geometry = geostore.geojson if geostore else None + return await _query_table(dataset, version, sql, geometry) + elif query_type == QueryType.raster: + geostore = cast(GeostoreCommon, geostore) + results = await _query_raster(dataset, default_asset, sql, geostore) + return results["data"] + else: + raise HTTPException( + status_code=501, + detail="This endpoint is not implemented for the given dataset.", + ) + + +async def _query_dataset_csv( + dataset: str, + version: str, + sql: str, + geostore: Optional[GeostoreCommon], + delimiter: Delimiters = Delimiters.comma, +) -> StringIO: + # Make sure we can query the dataset + default_asset: AssetORM = await assets.get_default_asset(dataset, version) + query_type = _get_query_type(default_asset, geostore) + if query_type == QueryType.table: + geometry = geostore.geojson if geostore else None + response = await _query_table(dataset, version, sql, geometry) + return _orm_to_csv(response, delimiter=delimiter) + elif query_type == QueryType.raster: + geostore = cast(GeostoreCommon, geostore) + results = await _query_raster( + dataset, default_asset, sql, geostore, QueryFormat.csv, delimiter + ) + return StringIO(results["data"]) + else: + raise HTTPException( + status_code=501, + detail="This endpoint is not implemented for the given dataset.", + ) + + +def _orm_to_csv( + data: List[Dict[str, Any]], delimiter: Delimiters = Delimiters.comma +) -> StringIO: + """Create a new csv file that represents generated data. + + Response will return a temporary redirect to download URL. + """ + csv_file = StringIO() + + if data: + wr = csv.writer(csv_file, quoting=csv.QUOTE_NONNUMERIC, delimiter=delimiter) + field_names = data[0].keys() + wr.writerow(field_names) + for row in data: + wr.writerow(row.values()) + csv_file.seek(0) + + return csv_file