Skip to content

Commit 5de3182

Browse files
jeremyhomad
authored andcommitted
Revert "[pre-commit.ci] auto fixes from pre-commit.com hooks"
This reverts commit e0aa69f to do issues detected in #581
1 parent c5af35a commit 5de3182

File tree

11 files changed

+167
-163
lines changed

11 files changed

+167
-163
lines changed

cubedash/_api.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -33,9 +33,9 @@ def datasets_geojson(
3333
time = _utils.as_time_range(year, month, day, tzinfo=_model.STORE.grouping_timezone)
3434

3535
return as_geojson(
36-
{
37-
"type": "FeatureCollection",
38-
"features": [
36+
dict(
37+
type="FeatureCollection",
38+
features=[
3939
s.as_geojson()
4040
for s in _model.STORE.search_items(
4141
product_names=[product_name],
@@ -45,7 +45,7 @@ def datasets_geojson(
4545
)
4646
if s.geom_geojson is not None
4747
],
48-
},
48+
),
4949
downloadable_filename_prefix=_utils.api_path_as_filename_prefix(),
5050
)
5151

cubedash/_filters.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -173,7 +173,9 @@ def _format_albers_area(shape: MultiPolygon):
173173
@bp.app_template_filter("query_value")
174174
def _format_query_value(val):
175175
if isinstance(val, Range):
176-
return f"{_format_query_value(val.begin)} to {_format_query_value(val.end)}"
176+
return "{} to {}".format(
177+
_format_query_value(val.begin), _format_query_value(val.end)
178+
)
177179
if isinstance(val, datetime):
178180
return _format_datetime(val)
179181
if val is None:

cubedash/_model.py

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -76,10 +76,10 @@
7676

7777
# Global defaults
7878
app.config.from_mapping(
79-
{
80-
"CUBEDASH_DEFAULT_API_LIMIT": 500,
81-
"CUBEDASH_HARD_API_LIMIT": 4000,
82-
}
79+
dict(
80+
CUBEDASH_DEFAULT_API_LIMIT=500,
81+
CUBEDASH_HARD_API_LIMIT=4000,
82+
)
8383
)
8484

8585
cache = Cache(app=app, config=app.config)
@@ -187,15 +187,15 @@ def get_footprint_geojson(
187187
if not footprint:
188188
return None
189189

190-
return {
191-
"type": "Feature",
192-
"geometry": footprint.__geo_interface__,
193-
"properties": {
194-
"dataset_count": period.footprint_count,
195-
"product_name": product_name,
196-
"time_spec": [year, month, day],
197-
},
198-
}
190+
return dict(
191+
type="Feature",
192+
geometry=footprint.__geo_interface__,
193+
properties=dict(
194+
dataset_count=period.footprint_count,
195+
product_name=product_name,
196+
time_spec=[year, month, day],
197+
),
198+
)
199199

200200

201201
@cache.memoize(timeout=60)
@@ -302,4 +302,4 @@ def internal_server_error(error):
302302
from prometheus_flask_exporter.multiprocess import GunicornInternalPrometheusMetrics
303303

304304
metrics = GunicornInternalPrometheusMetrics(app, group_by="endpoint")
305-
_LOG.info("Prometheus metrics enabled : {metrics}", extra={"metrics": metrics})
305+
_LOG.info("Prometheus metrics enabled : {metrics}", extra=dict(metrics=metrics))

cubedash/_pages.py

Lines changed: 22 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -230,7 +230,7 @@ def search_page( # noqa: C901
230230

231231
if request_wants_json():
232232
return as_rich_json(
233-
{"datasets": [build_dataset_info(_model.STORE.index, d) for d in datasets]}
233+
dict(datasets=[build_dataset_info(_model.STORE.index, d) for d in datasets])
234234
)
235235

236236
# For display on the page (and future searches).
@@ -329,12 +329,12 @@ def region_page(
329329
)
330330
)
331331

332-
same_region_products = [
332+
same_region_products = list(
333333
product.name
334334
for product in _model.STORE.find_products_for_region(
335335
region_code, year, month, day, limit=limit + 1, offset=offset
336336
)
337-
]
337+
)
338338

339339
def url_with_offset(new_offset: int):
340340
"""Currently request url with a different offset."""
@@ -356,7 +356,7 @@ def url_with_offset(new_offset: int):
356356

357357
if request_wants_json():
358358
return as_rich_json(
359-
{"datasets": [build_dataset_info(_model.STORE.index, d) for d in datasets]}
359+
dict(datasets=[build_dataset_info(_model.STORE.index, d) for d in datasets])
360360
)
361361

362362
return utils.render(
@@ -404,10 +404,10 @@ def region_geojson(
404404

405405
geojson = region_info.region(region_code).footprint_geojson
406406
geojson["properties"].update(
407-
{
408-
"product_name": product_name,
409-
"year_month_day_filter": [year, month, day],
410-
}
407+
dict(
408+
product_name=product_name,
409+
year_month_day_filter=[year, month, day],
410+
)
411411
)
412412
return utils.as_geojson(
413413
geojson,
@@ -474,27 +474,25 @@ def inject_globals():
474474
if product_summary:
475475
last_updated = product_summary.last_successful_summary_time
476476

477-
return {
477+
return dict(
478478
# Only the known, summarised products in groups.
479-
"grouped_products": _get_grouped_products(),
479+
grouped_products=_get_grouped_products(),
480480
# All products in the datacube, summarised or not.
481-
"datacube_products": list(_model.STORE.index.products.get_all()),
482-
"hidden_product_list": app.config.get(
483-
"CUBEDASH_HIDE_PRODUCTS_BY_NAME_LIST", []
484-
),
485-
"datacube_metadata_types": list(_model.STORE.index.metadata_types.get_all()),
486-
"current_time": datetime.utcnow(),
487-
"datacube_version": datacube.__version__,
488-
"app_version": cubedash.__version__,
489-
"grouping_timezone": tz.gettz(_model.DEFAULT_GROUPING_TIMEZONE),
490-
"last_updated_time": last_updated,
491-
"explorer_instance_title": app.config.get(
481+
datacube_products=list(_model.STORE.index.products.get_all()),
482+
hidden_product_list=app.config.get("CUBEDASH_HIDE_PRODUCTS_BY_NAME_LIST", []),
483+
datacube_metadata_types=list(_model.STORE.index.metadata_types.get_all()),
484+
current_time=datetime.utcnow(),
485+
datacube_version=datacube.__version__,
486+
app_version=cubedash.__version__,
487+
grouping_timezone=tz.gettz(_model.DEFAULT_GROUPING_TIMEZONE),
488+
last_updated_time=last_updated,
489+
explorer_instance_title=app.config.get(
492490
"CUBEDASH_INSTANCE_TITLE",
493491
)
494492
or app.config.get("STAC_ENDPOINT_TITLE", ""),
495-
"explorer_sister_instances": app.config.get("CUBEDASH_SISTER_SITES", None),
496-
"breadcrumb": _get_breadcrumbs(request.path, request.script_root),
497-
}
493+
explorer_sister_instances=app.config.get("CUBEDASH_SISTER_SITES", None),
494+
breadcrumb=_get_breadcrumbs(request.path, request.script_root),
495+
)
498496

499497

500498
HREF = str

cubedash/_stac.py

Lines changed: 55 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -414,7 +414,9 @@ def _list_arg(arg: list):
414414
"""
415415
if isinstance(arg, str):
416416
arg = list(arg)
417-
return [json.loads(a.replace("'", '"')) if isinstance(a, str) else a for a in arg]
417+
return list(
418+
map(lambda a: json.loads(a.replace("'", '"')) if isinstance(a, str) else a, arg)
419+
)
418420

419421

420422
# Search
@@ -506,20 +508,20 @@ def next_page_url(next_offset):
506508

507509
feature_collection.extra_fields["links"].extend(
508510
(
509-
{
510-
"href": url_for(".stac_search"),
511-
"rel": "search",
512-
"title": "Search",
513-
"type": "application/geo+json",
514-
"method": "GET",
515-
},
516-
{
517-
"href": url_for(".stac_search"),
518-
"rel": "search",
519-
"title": "Search",
520-
"type": "application/geo+json",
521-
"method": "POST",
522-
},
511+
dict(
512+
href=url_for(".stac_search"),
513+
rel="search",
514+
title="Search",
515+
type="application/geo+json",
516+
method="GET",
517+
),
518+
dict(
519+
href=url_for(".stac_search"),
520+
rel="search",
521+
title="Search",
522+
type="application/geo+json",
523+
method="POST",
524+
),
523525
)
524526
)
525527
return feature_collection
@@ -747,18 +749,18 @@ def search_stac_items(
747749
page = 0
748750
if limit != 0:
749751
page = offset // limit
750-
extra_properties = {
751-
"links": [],
752+
extra_properties = dict(
753+
links=[],
752754
# Stac standard
753-
"numberReturned": len(returned),
755+
numberReturned=len(returned),
754756
# Compatibility with older implementation. Was removed from stac-api standard.
755757
# (page numbers + limits are not ideal as they prevent some big db optimisations.)
756-
"context": {
757-
"page": page,
758-
"limit": limit,
759-
"returned": len(returned),
760-
},
761-
}
758+
context=dict(
759+
page=page,
760+
limit=limit,
761+
returned=len(returned),
762+
),
763+
)
762764
if include_total_count:
763765
count_matching = _model.STORE.get_count(
764766
product_names=product_names, time=time, bbox=bbox, dataset_ids=dataset_ids
@@ -775,34 +777,34 @@ def search_stac_items(
775777
result = ItemCollection(items, extra_fields=extra_properties)
776778

777779
if there_are_more:
778-
next_link = {
779-
"rel": "next",
780-
"title": "Next page of Items",
781-
"type": "application/geo+json",
782-
}
780+
next_link = dict(
781+
rel="next",
782+
title="Next page of Items",
783+
type="application/geo+json",
784+
)
783785
if use_post_request:
784786
next_link.update(
785-
{
786-
"method": "POST",
787-
"merge": True,
787+
dict(
788+
method="POST",
789+
merge=True,
788790
# Unlike GET requests, we can tell them to repeat their same request args
789791
# themselves.
790792
#
791793
# Same URL:
792-
"href": flask.request.url,
794+
href=flask.request.url,
793795
# ... with a new offset.
794-
"body": {
795-
"_o": offset + limit,
796-
},
797-
}
796+
body=dict(
797+
_o=offset + limit,
798+
),
799+
)
798800
)
799801
else:
800802
# Otherwise, let the route create the next url.
801803
next_link.update(
802-
{
803-
"method": "GET",
804-
"href": get_next_url(offset + limit),
805-
}
804+
dict(
805+
method="GET",
806+
href=get_next_url(offset + limit),
807+
)
806808
)
807809

808810
result.extra_fields["links"].append(next_link)
@@ -904,10 +906,10 @@ def _geojson_stac_response(doc: Union[STACObject, ItemCollection]) -> flask.Resp
904906

905907
def stac_endpoint_information() -> Dict:
906908
config = _model.app.config
907-
o = {
908-
"id": config.get("STAC_ENDPOINT_ID", "odc-explorer"),
909-
"title": config.get("STAC_ENDPOINT_TITLE", "Default ODC Explorer instance"),
910-
}
909+
o = dict(
910+
id=config.get("STAC_ENDPOINT_ID", "odc-explorer"),
911+
title=config.get("STAC_ENDPOINT_TITLE", "Default ODC Explorer instance"),
912+
)
911913
description = config.get(
912914
"STAC_ENDPOINT_DESCRIPTION",
913915
"Configure stac endpoint information in your Explorer `settings.env.py` file",
@@ -995,7 +997,7 @@ def root():
995997
"http://www.opengis.net/spec/ogcapi-features-3/1.0/conf/filter",
996998
"https://api.stacspec.org/v1.0.0-rc.1/collections",
997999
]
998-
c.extra_fields = {"conformsTo": conformance_classes}
1000+
c.extra_fields = dict(conformsTo=conformance_classes)
9991001

10001002
return _stac_response(c)
10011003

@@ -1033,18 +1035,18 @@ def collections():
10331035
an array (instead of just a link to each collection).
10341036
"""
10351037
return _utils.as_json(
1036-
{
1037-
"links": [
1038-
{"rel": "self", "type": "application/json", "href": request.url},
1039-
{"rel": "root", "type": "application/json", "href": url_for(".root")},
1040-
{"rel": "parent", "type": "application/json", "href": url_for(".root")},
1038+
dict(
1039+
links=[
1040+
dict(rel="self", type="application/json", href=request.url),
1041+
dict(rel="root", type="application/json", href=url_for(".root")),
1042+
dict(rel="parent", type="application/json", href=url_for(".root")),
10411043
],
1042-
"collections": [
1044+
collections=[
10431045
# TODO: This has a root link, right?
10441046
_stac_collection(product.name).to_dict()
10451047
for product, product_summary in _model.get_products_with_summaries()
10461048
],
1047-
}
1049+
)
10481050
)
10491051

10501052

0 commit comments

Comments
 (0)