From 0ec7d69aeaecaa751b2c785d50ab9c9861a0abab Mon Sep 17 00:00:00 2001 From: Daniel Thiex Date: Fri, 7 Jul 2023 15:00:54 +0200 Subject: [PATCH 01/42] add skysat json --- .../commercial_collections/SKYSAT.json | 538 ++++++++++++++++++ .../collection_information/SKYSAT.json | 538 ++++++++++++++++++ tests/test_units.py | 1 + 3 files changed, 1077 insertions(+) create mode 100644 rest/openeo_collections/commercial_collections/SKYSAT.json create mode 100644 tests/fixtures/collection_information/SKYSAT.json diff --git a/rest/openeo_collections/commercial_collections/SKYSAT.json b/rest/openeo_collections/commercial_collections/SKYSAT.json new file mode 100644 index 00000000..1e019b2a --- /dev/null +++ b/rest/openeo_collections/commercial_collections/SKYSAT.json @@ -0,0 +1,538 @@ +{ + "cube:dimensions":{ + "bands":{ + "type":"bands", + "values":[ + "Blue", + "Green", + "Red", + "NIR", + "UDM", + "UDM2_Clear", + "UDM2_Snow", + "UDM2_Shadow", + "UDM2_LightHaze", + "UDM2_HeavyHaze", + "UDM2_Cloud", + "UDM2_Confidence", + "PAN", + "dataMask" + ] + }, + "t":{ + "extent":[ + "2014-01-01T00:00:00Z", + null + ], + "type":"temporal" + }, + "x":{ + "axis":"x", + "extent":[ + -180, + 180 + ], + "reference_system":{ + "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", + "area":"World", + "base_crs":{ + "coordinate_system":{ + "axis":[ + { + "abbreviation":"Lat", + "direction":"north", + "name":"Geodetic latitude", + "unit":"degree" + }, + { + "abbreviation":"Lon", + "direction":"east", + "name":"Geodetic longitude", + "unit":"degree" + } + ], + "subtype":"ellipsoidal" + }, + "datum":{ + "ellipsoid":{ + "inverse_flattening":298.257223563, + "name":"WGS 84", + "semi_major_axis":6378137 + }, + "name":"World Geodetic System 1984", + "type":"GeodeticReferenceFrame" + }, + "name":"WGS 84" + }, + "bbox":{ + "east_longitude":180, + "north_latitude":90, + "south_latitude":-90, + "west_longitude":-180 + }, + "coordinate_system":{ + "axis":[ + { + "abbreviation":"E", + "direction":"east", + "name":"Easting", + "unit":"metre" + }, + { + "abbreviation":"N", + "direction":"north", + "name":"Northing", + "unit":"metre" + } + ], + "subtype":"Cartesian" + }, + "id":{ + "authority":"OGC", + "code":"Auto42001", + "version":"1.3" + }, + "name":"AUTO 42001 (Universal Transverse Mercator)", + "type":"ProjectedCRS" + }, + "type":"spatial" + }, + "y":{ + "axis":"y", + "extent":[ + -90, + 90 + ], + "reference_system":{ + "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", + "area":"World", + "base_crs":{ + "coordinate_system":{ + "axis":[ + { + "abbreviation":"Lat", + "direction":"north", + "name":"Geodetic latitude", + "unit":"degree" + }, + { + "abbreviation":"Lon", + "direction":"east", + "name":"Geodetic longitude", + "unit":"degree" + } + ], + "subtype":"ellipsoidal" + }, + "datum":{ + "ellipsoid":{ + "inverse_flattening":298.257223563, + "name":"WGS 84", + "semi_major_axis":6378137 + }, + "name":"World Geodetic System 1984", + "type":"GeodeticReferenceFrame" + }, + "name":"WGS 84" + }, + "bbox":{ + "east_longitude":180, + "north_latitude":90, + "south_latitude":-90, + "west_longitude":-180 + }, + "coordinate_system":{ + "axis":[ + { + "abbreviation":"E", + "direction":"east", + "name":"Easting", + "unit":"metre" + }, + { + "abbreviation":"N", + "direction":"north", + "name":"Northing", + "unit":"metre" + } + ], + "subtype":"Cartesian" + }, + "id":{ + "authority":"OGC", + "code":"Auto42001", + "version":"1.3" + }, + "name":"AUTO 42001 (Universal Transverse Mercator)", + "type":"ProjectedCRS" + }, + "type":"spatial" + } + }, + "datasource_type":"byoc-ID", + "description":"SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", + "extent":{ + "spatial":{ + "bbox":[ + [ + -180, + -90, + 180, + 90 + ] + ] + }, + "temporal":{ + "interval":[ + [ + "2014-01-01T00:00:00Z", + null + ] + ] + } + }, + "id":"SKYSAT", + "keywords":[ + "sentinel hub", + "SkySat", + "vhr", + "commercial data" + ], + "license":"various", + "links":[ + + ], + "providers":[ + { + "description":"", + "name":"Sentinel Hub", + "roles":[ + "processor" + ], + "url":"https://services.sentinel-hub.com/" + }, + { + "description":"", + "name":"Planet", + "roles":[ + "producer" + ], + "url":"https://www.planet.com/products/planet-imagery/" + } + ], + "sci:citation":"\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", + "stac_extensions":[ + "datacube" + ], + "stac_version":"0.9.0", + "summaries":{ + "eo:bands":[ + { + "center_wavelength":0.4825, + "common_name":"blue", + "description":"Blue", + "full_width_half_max":0.325, + "name":"Blue", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.545, + "common_name":"green", + "description":"Green", + "full_width_half_max":0.4, + "name":"Green", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.650, + "common_name":"red", + "description":"Red", + "full_width_half_max":0.45, + "name":"Red", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.82, + "common_name":"nir08", + "description":"Near Infrared", + "full_width_half_max":0.8, + "name":"NIR", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Unusable Data Mask", + "name":"UDM", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Clear mask", + "name":"UDM2_Clear", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Snow mask", + "name":"UDM2_Snow", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Shadow mask", + "name":"UDM2_Shadow", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Light haze mask", + "name":"UDM2_LightHaze", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Heavy haze mask", + "name":"UDM2_HeavyHaze", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Cloud mask", + "name":"UDM2_Cloud", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Confidence map", + "name":"UDM2_Confidence", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.675, + "common_name":"nir08", + "description":"Panchromatic", + "full_width_half_max":0.225, + "name":"PAN", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"The mask of data/no data pixels", + "name":"dataMask" + } + ] + }, + "crs":[ + "http://www.opengis.net/def/crs/OGC/1.3/CRS84", + "http://www.opengis.net/def/crs/EPSG/0/2154", + "http://www.opengis.net/def/crs/EPSG/0/2180", + "http://www.opengis.net/def/crs/EPSG/0/2193", + "http://www.opengis.net/def/crs/EPSG/0/3003", + "http://www.opengis.net/def/crs/EPSG/0/3004", + "http://www.opengis.net/def/crs/EPSG/0/3031", + "http://www.opengis.net/def/crs/EPSG/0/3035", + "http://www.opengis.net/def/crs/EPSG/0/4326", + "http://www.opengis.net/def/crs/EPSG/0/4346", + "http://www.opengis.net/def/crs/EPSG/0/4416", + "http://www.opengis.net/def/crs/EPSG/0/4765", + "http://www.opengis.net/def/crs/EPSG/0/4794", + "http://www.opengis.net/def/crs/EPSG/0/4844", + "http://www.opengis.net/def/crs/EPSG/0/4857", + "http://www.opengis.net/def/crs/EPSG/0/3912", + "http://www.opengis.net/def/crs/EPSG/0/3995", + "http://www.opengis.net/def/crs/EPSG/0/4026", + "http://www.opengis.net/def/crs/EPSG/0/5514", + "http://www.opengis.net/def/crs/EPSG/0/28992", + "http://www.opengis.net/def/crs/EPSG/0/32601", + "http://www.opengis.net/def/crs/EPSG/0/32602", + "http://www.opengis.net/def/crs/EPSG/0/32603", + "http://www.opengis.net/def/crs/EPSG/0/32604", + "http://www.opengis.net/def/crs/EPSG/0/32605", + "http://www.opengis.net/def/crs/EPSG/0/32606", + "http://www.opengis.net/def/crs/EPSG/0/32607", + "http://www.opengis.net/def/crs/EPSG/0/32608", + "http://www.opengis.net/def/crs/EPSG/0/32609", + "http://www.opengis.net/def/crs/EPSG/0/32610", + "http://www.opengis.net/def/crs/EPSG/0/32611", + "http://www.opengis.net/def/crs/EPSG/0/32612", + "http://www.opengis.net/def/crs/EPSG/0/32613", + "http://www.opengis.net/def/crs/EPSG/0/32614", + "http://www.opengis.net/def/crs/EPSG/0/32615", + "http://www.opengis.net/def/crs/EPSG/0/32616", + "http://www.opengis.net/def/crs/EPSG/0/32617", + "http://www.opengis.net/def/crs/EPSG/0/32618", + "http://www.opengis.net/def/crs/EPSG/0/32619", + "http://www.opengis.net/def/crs/EPSG/0/32620", + "http://www.opengis.net/def/crs/EPSG/0/32621", + "http://www.opengis.net/def/crs/EPSG/0/32622", + "http://www.opengis.net/def/crs/EPSG/0/32623", + "http://www.opengis.net/def/crs/EPSG/0/32624", + "http://www.opengis.net/def/crs/EPSG/0/32625", + "http://www.opengis.net/def/crs/EPSG/0/32626", + "http://www.opengis.net/def/crs/EPSG/0/32627", + "http://www.opengis.net/def/crs/EPSG/0/32628", + "http://www.opengis.net/def/crs/EPSG/0/32629", + "http://www.opengis.net/def/crs/EPSG/0/32630", + "http://www.opengis.net/def/crs/EPSG/0/32631", + "http://www.opengis.net/def/crs/EPSG/0/32632", + "http://www.opengis.net/def/crs/EPSG/0/32633", + "http://www.opengis.net/def/crs/EPSG/0/32634", + "http://www.opengis.net/def/crs/EPSG/0/32635", + "http://www.opengis.net/def/crs/EPSG/0/32636", + "http://www.opengis.net/def/crs/EPSG/0/32637", + "http://www.opengis.net/def/crs/EPSG/0/32638", + "http://www.opengis.net/def/crs/EPSG/0/32639", + "http://www.opengis.net/def/crs/EPSG/0/32640", + "http://www.opengis.net/def/crs/EPSG/0/32641", + "http://www.opengis.net/def/crs/EPSG/0/32642", + "http://www.opengis.net/def/crs/EPSG/0/32643", + "http://www.opengis.net/def/crs/EPSG/0/32644", + "http://www.opengis.net/def/crs/EPSG/0/32645", + "http://www.opengis.net/def/crs/EPSG/0/32646", + "http://www.opengis.net/def/crs/EPSG/0/32647", + "http://www.opengis.net/def/crs/EPSG/0/32648", + "http://www.opengis.net/def/crs/EPSG/0/32649", + "http://www.opengis.net/def/crs/EPSG/0/32650", + "http://www.opengis.net/def/crs/EPSG/0/32651", + "http://www.opengis.net/def/crs/EPSG/0/32652", + "http://www.opengis.net/def/crs/EPSG/0/32653", + "http://www.opengis.net/def/crs/EPSG/0/32654", + "http://www.opengis.net/def/crs/EPSG/0/32655", + "http://www.opengis.net/def/crs/EPSG/0/32656", + "http://www.opengis.net/def/crs/EPSG/0/32657", + "http://www.opengis.net/def/crs/EPSG/0/32658", + "http://www.opengis.net/def/crs/EPSG/0/32659", + "http://www.opengis.net/def/crs/EPSG/0/32660", + "http://www.opengis.net/def/crs/EPSG/0/32701", + "http://www.opengis.net/def/crs/EPSG/0/32702", + "http://www.opengis.net/def/crs/EPSG/0/32703", + "http://www.opengis.net/def/crs/EPSG/0/32704", + "http://www.opengis.net/def/crs/EPSG/0/32705", + "http://www.opengis.net/def/crs/EPSG/0/32706", + "http://www.opengis.net/def/crs/EPSG/0/32707", + "http://www.opengis.net/def/crs/EPSG/0/32708", + "http://www.opengis.net/def/crs/EPSG/0/32709", + "http://www.opengis.net/def/crs/EPSG/0/32710", + "http://www.opengis.net/def/crs/EPSG/0/32711", + "http://www.opengis.net/def/crs/EPSG/0/32712", + "http://www.opengis.net/def/crs/EPSG/0/32713", + "http://www.opengis.net/def/crs/EPSG/0/32714", + "http://www.opengis.net/def/crs/EPSG/0/32715", + "http://www.opengis.net/def/crs/EPSG/0/32716", + "http://www.opengis.net/def/crs/EPSG/0/32717", + "http://www.opengis.net/def/crs/EPSG/0/32718", + "http://www.opengis.net/def/crs/EPSG/0/32719", + "http://www.opengis.net/def/crs/EPSG/0/32720", + "http://www.opengis.net/def/crs/EPSG/0/32721", + "http://www.opengis.net/def/crs/EPSG/0/32722", + "http://www.opengis.net/def/crs/EPSG/0/32723", + "http://www.opengis.net/def/crs/EPSG/0/32724", + "http://www.opengis.net/def/crs/EPSG/0/32725", + "http://www.opengis.net/def/crs/EPSG/0/32726", + "http://www.opengis.net/def/crs/EPSG/0/32727", + "http://www.opengis.net/def/crs/EPSG/0/32728", + "http://www.opengis.net/def/crs/EPSG/0/32729", + "http://www.opengis.net/def/crs/EPSG/0/32730", + "http://www.opengis.net/def/crs/EPSG/0/32731", + "http://www.opengis.net/def/crs/EPSG/0/32732", + "http://www.opengis.net/def/crs/EPSG/0/32733", + "http://www.opengis.net/def/crs/EPSG/0/32734", + "http://www.opengis.net/def/crs/EPSG/0/32735", + "http://www.opengis.net/def/crs/EPSG/0/32736", + "http://www.opengis.net/def/crs/EPSG/0/32737", + "http://www.opengis.net/def/crs/EPSG/0/32738", + "http://www.opengis.net/def/crs/EPSG/0/32739", + "http://www.opengis.net/def/crs/EPSG/0/32740", + "http://www.opengis.net/def/crs/EPSG/0/32741", + "http://www.opengis.net/def/crs/EPSG/0/32742", + "http://www.opengis.net/def/crs/EPSG/0/32743", + "http://www.opengis.net/def/crs/EPSG/0/32744", + "http://www.opengis.net/def/crs/EPSG/0/32745", + "http://www.opengis.net/def/crs/EPSG/0/32746", + "http://www.opengis.net/def/crs/EPSG/0/32746", + "http://www.opengis.net/def/crs/EPSG/0/32748", + "http://www.opengis.net/def/crs/EPSG/0/32749", + "http://www.opengis.net/def/crs/EPSG/0/32750", + "http://www.opengis.net/def/crs/EPSG/0/32751", + "http://www.opengis.net/def/crs/EPSG/0/32752", + "http://www.opengis.net/def/crs/EPSG/0/32753", + "http://www.opengis.net/def/crs/EPSG/0/32754", + "http://www.opengis.net/def/crs/EPSG/0/32755", + "http://www.opengis.net/def/crs/EPSG/0/32756", + "http://www.opengis.net/def/crs/EPSG/0/32757", + "http://www.opengis.net/def/crs/EPSG/0/32758", + "http://www.opengis.net/def/crs/EPSG/0/32759", + "http://www.opengis.net/def/crs/EPSG/0/32760", + "http://www.opengis.net/def/crs/SR-ORG/0/98739" + ], + "title":"SkySat" +} \ No newline at end of file diff --git a/tests/fixtures/collection_information/SKYSAT.json b/tests/fixtures/collection_information/SKYSAT.json new file mode 100644 index 00000000..1e019b2a --- /dev/null +++ b/tests/fixtures/collection_information/SKYSAT.json @@ -0,0 +1,538 @@ +{ + "cube:dimensions":{ + "bands":{ + "type":"bands", + "values":[ + "Blue", + "Green", + "Red", + "NIR", + "UDM", + "UDM2_Clear", + "UDM2_Snow", + "UDM2_Shadow", + "UDM2_LightHaze", + "UDM2_HeavyHaze", + "UDM2_Cloud", + "UDM2_Confidence", + "PAN", + "dataMask" + ] + }, + "t":{ + "extent":[ + "2014-01-01T00:00:00Z", + null + ], + "type":"temporal" + }, + "x":{ + "axis":"x", + "extent":[ + -180, + 180 + ], + "reference_system":{ + "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", + "area":"World", + "base_crs":{ + "coordinate_system":{ + "axis":[ + { + "abbreviation":"Lat", + "direction":"north", + "name":"Geodetic latitude", + "unit":"degree" + }, + { + "abbreviation":"Lon", + "direction":"east", + "name":"Geodetic longitude", + "unit":"degree" + } + ], + "subtype":"ellipsoidal" + }, + "datum":{ + "ellipsoid":{ + "inverse_flattening":298.257223563, + "name":"WGS 84", + "semi_major_axis":6378137 + }, + "name":"World Geodetic System 1984", + "type":"GeodeticReferenceFrame" + }, + "name":"WGS 84" + }, + "bbox":{ + "east_longitude":180, + "north_latitude":90, + "south_latitude":-90, + "west_longitude":-180 + }, + "coordinate_system":{ + "axis":[ + { + "abbreviation":"E", + "direction":"east", + "name":"Easting", + "unit":"metre" + }, + { + "abbreviation":"N", + "direction":"north", + "name":"Northing", + "unit":"metre" + } + ], + "subtype":"Cartesian" + }, + "id":{ + "authority":"OGC", + "code":"Auto42001", + "version":"1.3" + }, + "name":"AUTO 42001 (Universal Transverse Mercator)", + "type":"ProjectedCRS" + }, + "type":"spatial" + }, + "y":{ + "axis":"y", + "extent":[ + -90, + 90 + ], + "reference_system":{ + "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", + "area":"World", + "base_crs":{ + "coordinate_system":{ + "axis":[ + { + "abbreviation":"Lat", + "direction":"north", + "name":"Geodetic latitude", + "unit":"degree" + }, + { + "abbreviation":"Lon", + "direction":"east", + "name":"Geodetic longitude", + "unit":"degree" + } + ], + "subtype":"ellipsoidal" + }, + "datum":{ + "ellipsoid":{ + "inverse_flattening":298.257223563, + "name":"WGS 84", + "semi_major_axis":6378137 + }, + "name":"World Geodetic System 1984", + "type":"GeodeticReferenceFrame" + }, + "name":"WGS 84" + }, + "bbox":{ + "east_longitude":180, + "north_latitude":90, + "south_latitude":-90, + "west_longitude":-180 + }, + "coordinate_system":{ + "axis":[ + { + "abbreviation":"E", + "direction":"east", + "name":"Easting", + "unit":"metre" + }, + { + "abbreviation":"N", + "direction":"north", + "name":"Northing", + "unit":"metre" + } + ], + "subtype":"Cartesian" + }, + "id":{ + "authority":"OGC", + "code":"Auto42001", + "version":"1.3" + }, + "name":"AUTO 42001 (Universal Transverse Mercator)", + "type":"ProjectedCRS" + }, + "type":"spatial" + } + }, + "datasource_type":"byoc-ID", + "description":"SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", + "extent":{ + "spatial":{ + "bbox":[ + [ + -180, + -90, + 180, + 90 + ] + ] + }, + "temporal":{ + "interval":[ + [ + "2014-01-01T00:00:00Z", + null + ] + ] + } + }, + "id":"SKYSAT", + "keywords":[ + "sentinel hub", + "SkySat", + "vhr", + "commercial data" + ], + "license":"various", + "links":[ + + ], + "providers":[ + { + "description":"", + "name":"Sentinel Hub", + "roles":[ + "processor" + ], + "url":"https://services.sentinel-hub.com/" + }, + { + "description":"", + "name":"Planet", + "roles":[ + "producer" + ], + "url":"https://www.planet.com/products/planet-imagery/" + } + ], + "sci:citation":"\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", + "stac_extensions":[ + "datacube" + ], + "stac_version":"0.9.0", + "summaries":{ + "eo:bands":[ + { + "center_wavelength":0.4825, + "common_name":"blue", + "description":"Blue", + "full_width_half_max":0.325, + "name":"Blue", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.545, + "common_name":"green", + "description":"Green", + "full_width_half_max":0.4, + "name":"Green", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.650, + "common_name":"red", + "description":"Red", + "full_width_half_max":0.45, + "name":"Red", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.82, + "common_name":"nir08", + "description":"Near Infrared", + "full_width_half_max":0.8, + "name":"NIR", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Unusable Data Mask", + "name":"UDM", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Clear mask", + "name":"UDM2_Clear", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Snow mask", + "name":"UDM2_Snow", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Shadow mask", + "name":"UDM2_Shadow", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Light haze mask", + "name":"UDM2_LightHaze", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Heavy haze mask", + "name":"UDM2_HeavyHaze", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Cloud mask", + "name":"UDM2_Cloud", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"Usable Data mask - Confidence map", + "name":"UDM2_Confidence", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "center_wavelength":0.675, + "common_name":"nir08", + "description":"Panchromatic", + "full_width_half_max":0.225, + "name":"PAN", + "openeo:gsd":{ + "unit":"m", + "value":[ + 0.5, + 0.5 + ] + } + }, + { + "description":"The mask of data/no data pixels", + "name":"dataMask" + } + ] + }, + "crs":[ + "http://www.opengis.net/def/crs/OGC/1.3/CRS84", + "http://www.opengis.net/def/crs/EPSG/0/2154", + "http://www.opengis.net/def/crs/EPSG/0/2180", + "http://www.opengis.net/def/crs/EPSG/0/2193", + "http://www.opengis.net/def/crs/EPSG/0/3003", + "http://www.opengis.net/def/crs/EPSG/0/3004", + "http://www.opengis.net/def/crs/EPSG/0/3031", + "http://www.opengis.net/def/crs/EPSG/0/3035", + "http://www.opengis.net/def/crs/EPSG/0/4326", + "http://www.opengis.net/def/crs/EPSG/0/4346", + "http://www.opengis.net/def/crs/EPSG/0/4416", + "http://www.opengis.net/def/crs/EPSG/0/4765", + "http://www.opengis.net/def/crs/EPSG/0/4794", + "http://www.opengis.net/def/crs/EPSG/0/4844", + "http://www.opengis.net/def/crs/EPSG/0/4857", + "http://www.opengis.net/def/crs/EPSG/0/3912", + "http://www.opengis.net/def/crs/EPSG/0/3995", + "http://www.opengis.net/def/crs/EPSG/0/4026", + "http://www.opengis.net/def/crs/EPSG/0/5514", + "http://www.opengis.net/def/crs/EPSG/0/28992", + "http://www.opengis.net/def/crs/EPSG/0/32601", + "http://www.opengis.net/def/crs/EPSG/0/32602", + "http://www.opengis.net/def/crs/EPSG/0/32603", + "http://www.opengis.net/def/crs/EPSG/0/32604", + "http://www.opengis.net/def/crs/EPSG/0/32605", + "http://www.opengis.net/def/crs/EPSG/0/32606", + "http://www.opengis.net/def/crs/EPSG/0/32607", + "http://www.opengis.net/def/crs/EPSG/0/32608", + "http://www.opengis.net/def/crs/EPSG/0/32609", + "http://www.opengis.net/def/crs/EPSG/0/32610", + "http://www.opengis.net/def/crs/EPSG/0/32611", + "http://www.opengis.net/def/crs/EPSG/0/32612", + "http://www.opengis.net/def/crs/EPSG/0/32613", + "http://www.opengis.net/def/crs/EPSG/0/32614", + "http://www.opengis.net/def/crs/EPSG/0/32615", + "http://www.opengis.net/def/crs/EPSG/0/32616", + "http://www.opengis.net/def/crs/EPSG/0/32617", + "http://www.opengis.net/def/crs/EPSG/0/32618", + "http://www.opengis.net/def/crs/EPSG/0/32619", + "http://www.opengis.net/def/crs/EPSG/0/32620", + "http://www.opengis.net/def/crs/EPSG/0/32621", + "http://www.opengis.net/def/crs/EPSG/0/32622", + "http://www.opengis.net/def/crs/EPSG/0/32623", + "http://www.opengis.net/def/crs/EPSG/0/32624", + "http://www.opengis.net/def/crs/EPSG/0/32625", + "http://www.opengis.net/def/crs/EPSG/0/32626", + "http://www.opengis.net/def/crs/EPSG/0/32627", + "http://www.opengis.net/def/crs/EPSG/0/32628", + "http://www.opengis.net/def/crs/EPSG/0/32629", + "http://www.opengis.net/def/crs/EPSG/0/32630", + "http://www.opengis.net/def/crs/EPSG/0/32631", + "http://www.opengis.net/def/crs/EPSG/0/32632", + "http://www.opengis.net/def/crs/EPSG/0/32633", + "http://www.opengis.net/def/crs/EPSG/0/32634", + "http://www.opengis.net/def/crs/EPSG/0/32635", + "http://www.opengis.net/def/crs/EPSG/0/32636", + "http://www.opengis.net/def/crs/EPSG/0/32637", + "http://www.opengis.net/def/crs/EPSG/0/32638", + "http://www.opengis.net/def/crs/EPSG/0/32639", + "http://www.opengis.net/def/crs/EPSG/0/32640", + "http://www.opengis.net/def/crs/EPSG/0/32641", + "http://www.opengis.net/def/crs/EPSG/0/32642", + "http://www.opengis.net/def/crs/EPSG/0/32643", + "http://www.opengis.net/def/crs/EPSG/0/32644", + "http://www.opengis.net/def/crs/EPSG/0/32645", + "http://www.opengis.net/def/crs/EPSG/0/32646", + "http://www.opengis.net/def/crs/EPSG/0/32647", + "http://www.opengis.net/def/crs/EPSG/0/32648", + "http://www.opengis.net/def/crs/EPSG/0/32649", + "http://www.opengis.net/def/crs/EPSG/0/32650", + "http://www.opengis.net/def/crs/EPSG/0/32651", + "http://www.opengis.net/def/crs/EPSG/0/32652", + "http://www.opengis.net/def/crs/EPSG/0/32653", + "http://www.opengis.net/def/crs/EPSG/0/32654", + "http://www.opengis.net/def/crs/EPSG/0/32655", + "http://www.opengis.net/def/crs/EPSG/0/32656", + "http://www.opengis.net/def/crs/EPSG/0/32657", + "http://www.opengis.net/def/crs/EPSG/0/32658", + "http://www.opengis.net/def/crs/EPSG/0/32659", + "http://www.opengis.net/def/crs/EPSG/0/32660", + "http://www.opengis.net/def/crs/EPSG/0/32701", + "http://www.opengis.net/def/crs/EPSG/0/32702", + "http://www.opengis.net/def/crs/EPSG/0/32703", + "http://www.opengis.net/def/crs/EPSG/0/32704", + "http://www.opengis.net/def/crs/EPSG/0/32705", + "http://www.opengis.net/def/crs/EPSG/0/32706", + "http://www.opengis.net/def/crs/EPSG/0/32707", + "http://www.opengis.net/def/crs/EPSG/0/32708", + "http://www.opengis.net/def/crs/EPSG/0/32709", + "http://www.opengis.net/def/crs/EPSG/0/32710", + "http://www.opengis.net/def/crs/EPSG/0/32711", + "http://www.opengis.net/def/crs/EPSG/0/32712", + "http://www.opengis.net/def/crs/EPSG/0/32713", + "http://www.opengis.net/def/crs/EPSG/0/32714", + "http://www.opengis.net/def/crs/EPSG/0/32715", + "http://www.opengis.net/def/crs/EPSG/0/32716", + "http://www.opengis.net/def/crs/EPSG/0/32717", + "http://www.opengis.net/def/crs/EPSG/0/32718", + "http://www.opengis.net/def/crs/EPSG/0/32719", + "http://www.opengis.net/def/crs/EPSG/0/32720", + "http://www.opengis.net/def/crs/EPSG/0/32721", + "http://www.opengis.net/def/crs/EPSG/0/32722", + "http://www.opengis.net/def/crs/EPSG/0/32723", + "http://www.opengis.net/def/crs/EPSG/0/32724", + "http://www.opengis.net/def/crs/EPSG/0/32725", + "http://www.opengis.net/def/crs/EPSG/0/32726", + "http://www.opengis.net/def/crs/EPSG/0/32727", + "http://www.opengis.net/def/crs/EPSG/0/32728", + "http://www.opengis.net/def/crs/EPSG/0/32729", + "http://www.opengis.net/def/crs/EPSG/0/32730", + "http://www.opengis.net/def/crs/EPSG/0/32731", + "http://www.opengis.net/def/crs/EPSG/0/32732", + "http://www.opengis.net/def/crs/EPSG/0/32733", + "http://www.opengis.net/def/crs/EPSG/0/32734", + "http://www.opengis.net/def/crs/EPSG/0/32735", + "http://www.opengis.net/def/crs/EPSG/0/32736", + "http://www.opengis.net/def/crs/EPSG/0/32737", + "http://www.opengis.net/def/crs/EPSG/0/32738", + "http://www.opengis.net/def/crs/EPSG/0/32739", + "http://www.opengis.net/def/crs/EPSG/0/32740", + "http://www.opengis.net/def/crs/EPSG/0/32741", + "http://www.opengis.net/def/crs/EPSG/0/32742", + "http://www.opengis.net/def/crs/EPSG/0/32743", + "http://www.opengis.net/def/crs/EPSG/0/32744", + "http://www.opengis.net/def/crs/EPSG/0/32745", + "http://www.opengis.net/def/crs/EPSG/0/32746", + "http://www.opengis.net/def/crs/EPSG/0/32746", + "http://www.opengis.net/def/crs/EPSG/0/32748", + "http://www.opengis.net/def/crs/EPSG/0/32749", + "http://www.opengis.net/def/crs/EPSG/0/32750", + "http://www.opengis.net/def/crs/EPSG/0/32751", + "http://www.opengis.net/def/crs/EPSG/0/32752", + "http://www.opengis.net/def/crs/EPSG/0/32753", + "http://www.opengis.net/def/crs/EPSG/0/32754", + "http://www.opengis.net/def/crs/EPSG/0/32755", + "http://www.opengis.net/def/crs/EPSG/0/32756", + "http://www.opengis.net/def/crs/EPSG/0/32757", + "http://www.opengis.net/def/crs/EPSG/0/32758", + "http://www.opengis.net/def/crs/EPSG/0/32759", + "http://www.opengis.net/def/crs/EPSG/0/32760", + "http://www.opengis.net/def/crs/SR-ORG/0/98739" + ], + "title":"SkySat" +} \ No newline at end of file diff --git a/tests/test_units.py b/tests/test_units.py index b05ba2e6..3f2214ff 100644 --- a/tests/test_units.py +++ b/tests/test_units.py @@ -57,6 +57,7 @@ def test_collections(get_process_graph, collection_id): [ "SPOT", "PLEIADES", + "SKYSAT", "WORLDVIEW", "PLANETSCOPE", "landsat-7-etm+-l2", From c966491057a7504d2b6fd7ac45c5518418722b24 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Fri, 29 Sep 2023 15:10:44 +0200 Subject: [PATCH 02/42] add estimate to metadata --- rest/app.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/rest/app.py b/rest/app.py index d57bcad3..4c5e7ade 100644 --- a/rest/app.py +++ b/rest/app.py @@ -605,6 +605,9 @@ def add_job_to_queue(job_id): # we can create a /results_metadata.json file here # the contents of the batch job folder in the bucket isn't revealed anywhere else anyway + estimated_pu, _ = get_batch_job_estimate( + job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] + ) metadata_creation_time = datetime.utcnow().strftime(ISO8601_UTC_FORMAT) batch_job_metadata = { "type": "Feature", @@ -614,6 +617,8 @@ def add_job_to_queue(job_id): "https://stac-extensions.github.io/timestamps/v1.1.0/schema.json", ], "id": job_id, + "estimated_sentinel_hub_processing_units": estimated_pu, + "platform_credits": estimated_pu * 0.15, "geometry": None, "properties": { "title": job.get("title", None), From cddd5f33d8eec8f7520c5251c8d975fc5a8484fa Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 3 Oct 2023 14:03:42 +0200 Subject: [PATCH 03/42] Save estimate to db, so we have it saved until the job is deleted --- rest/app.py | 10 +++------- rest/dynamodb/dynamodb.py | 1 + rest/processing/processing.py | 3 +++ 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/rest/app.py b/rest/app.py index 4c5e7ade..90ddbced 100644 --- a/rest/app.py +++ b/rest/app.py @@ -604,10 +604,6 @@ def add_job_to_queue(job_id): # we can create a /results_metadata.json file here # the contents of the batch job folder in the bucket isn't revealed anywhere else anyway - - estimated_pu, _ = get_batch_job_estimate( - job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] - ) metadata_creation_time = datetime.utcnow().strftime(ISO8601_UTC_FORMAT) batch_job_metadata = { "type": "Feature", @@ -617,8 +613,8 @@ def add_job_to_queue(job_id): "https://stac-extensions.github.io/timestamps/v1.1.0/schema.json", ], "id": job_id, - "estimated_sentinel_hub_processing_units": estimated_pu, - "platform_credits": estimated_pu * 0.15, + "estimated_sentinel_hub_processing_units": float(job["sh_pu_estimate"]), + "platform_credits": float(job["sh_pu_estimate"]) * 0.15, "geometry": None, "properties": { "title": job.get("title", None), @@ -659,7 +655,7 @@ def estimate_job_cost(job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - + estimated_pu, estimated_file_size = get_batch_job_estimate( job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] ) diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 9fbc4429..e69d1042 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -204,6 +204,7 @@ def create(cls, data): "http_code": {"N": data.get("http_code", "200")}, "results": {"S": json.dumps(data.get("results"))}, "deployment_endpoint": {"S": data.get("deployment_endpoint", "https://services.sentinel-hub.com")}, + "sh_pu_estimate": {"N": data.get("sh_pu_estimate", "0")} } if data.get("title"): item["title"] = {"S": str(data.get("title"))} diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 5a2aa096..a251b4c8 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -9,6 +9,7 @@ from processing.sentinel_hub import SentinelHub from processing.partially_supported_processes import partially_supported_processes from dynamodb.utils import get_user_defined_processes_graphs +from dynamodb import JobsPersistence from const import openEOBatchJobStatus from openeoerrors import Timeout @@ -58,6 +59,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_pu, job_id) + JobsPersistence.update_key(job_id, "sh_pu_estimate", estimated_pu) return new_batch_request_id @@ -88,6 +90,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) + JobsPersistence.update_key(job_id, "sh_pu_estimate", estimated_pu) elif batch_request_info.status == BatchRequestStatus.PARTIAL: sentinel_hub.restart_batch_job(batch_request_id) elif batch_request_info.status in [ From 283a11b8744c348fb0fafba4d474845140966606 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 3 Oct 2023 14:10:03 +0200 Subject: [PATCH 04/42] rename variable --- rest/app.py | 4 ++-- rest/dynamodb/dynamodb.py | 2 +- rest/processing/processing.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/rest/app.py b/rest/app.py index 90ddbced..0e75ba60 100644 --- a/rest/app.py +++ b/rest/app.py @@ -613,8 +613,8 @@ def add_job_to_queue(job_id): "https://stac-extensions.github.io/timestamps/v1.1.0/schema.json", ], "id": job_id, - "estimated_sentinel_hub_processing_units": float(job["sh_pu_estimate"]), - "platform_credits": float(job["sh_pu_estimate"]) * 0.15, + "estimated_sentinel_hub_processing_units": float(job["estimated_pu"]), + "platform_credits": float(job["estimated_pu"]) * 0.15, "geometry": None, "properties": { "title": job.get("title", None), diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index e69d1042..473d9fb0 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -204,7 +204,7 @@ def create(cls, data): "http_code": {"N": data.get("http_code", "200")}, "results": {"S": json.dumps(data.get("results"))}, "deployment_endpoint": {"S": data.get("deployment_endpoint", "https://services.sentinel-hub.com")}, - "sh_pu_estimate": {"N": data.get("sh_pu_estimate", "0")} + "estimated_pu": {"N": data.get("estimated_pu", "0")} } if data.get("title"): item["title"] = {"S": str(data.get("title"))} diff --git a/rest/processing/processing.py b/rest/processing/processing.py index a251b4c8..71c4f5e4 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -59,7 +59,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_pu, job_id) - JobsPersistence.update_key(job_id, "sh_pu_estimate", estimated_pu) + JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) return new_batch_request_id @@ -90,7 +90,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) - JobsPersistence.update_key(job_id, "sh_pu_estimate", estimated_pu) + JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) elif batch_request_info.status == BatchRequestStatus.PARTIAL: sentinel_hub.restart_batch_job(batch_request_id) elif batch_request_info.status in [ From 3ad15cfb0103bf5cc767e968771fdcf5e8adba9d Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 3 Oct 2023 14:22:05 +0200 Subject: [PATCH 05/42] Also add estimated file size to db and on /estimate fetch from db if possible --- rest/app.py | 14 +++++++++++--- rest/dynamodb/dynamodb.py | 3 ++- rest/processing/processing.py | 6 ++++-- 3 files changed, 17 insertions(+), 6 deletions(-) diff --git a/rest/app.py b/rest/app.py index 0e75ba60..c5b09747 100644 --- a/rest/app.py +++ b/rest/app.py @@ -656,9 +656,17 @@ def estimate_job_cost(job_id): if job is None: raise JobNotFound() - estimated_pu, estimated_file_size = get_batch_job_estimate( - job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] - ) + # if estimate == 0, it has not been estimated yet, so do estimate and save it to db + if float(job["estimated_pu"]) == 0 and float(job["estimated_file_size"]) == 0: + estimated_pu, estimated_file_size = get_batch_job_estimate( + job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] + ) + JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) + JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) + else: + estimated_pu = float(job["estimated_pu"]) + estimated_file_size = float(job["estimated_file_size"]) + return flask.make_response( jsonify(costs=estimated_pu, size=estimated_file_size), 200, diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 473d9fb0..32eb1fd7 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -204,7 +204,8 @@ def create(cls, data): "http_code": {"N": data.get("http_code", "200")}, "results": {"S": json.dumps(data.get("results"))}, "deployment_endpoint": {"S": data.get("deployment_endpoint", "https://services.sentinel-hub.com")}, - "estimated_pu": {"N": data.get("estimated_pu", "0")} + "estimated_pu": {"N": data.get("estimated_pu", "0")}, + "estimated_file_size": {"N": data.get("estimated_file_size", "0")} } if data.get("title"): item["title"] = {"S": str(data.get("title"))} diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 71c4f5e4..52d6fb6d 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -56,10 +56,11 @@ def create_batch_job(process): def start_new_batch_job(sentinel_hub, process, job_id): new_batch_request_id, deployment_endpoint = create_batch_job(process) - estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) + estimated_pu, estimated_file_size = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_pu, job_id) JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) + JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) return new_batch_request_id @@ -87,10 +88,11 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if batch_request_info is None: return start_new_batch_job(sentinel_hub, process, job_id) elif batch_request_info.status in [BatchRequestStatus.CREATED, BatchRequestStatus.ANALYSIS_DONE]: - estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) + estimated_pu, estimated_file_size = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) + JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) elif batch_request_info.status == BatchRequestStatus.PARTIAL: sentinel_hub.restart_batch_job(batch_request_id) elif batch_request_info.status in [ From d9cb910dfc43149d2ba6ef88982968c40872acdd Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 3 Oct 2023 14:30:05 +0200 Subject: [PATCH 06/42] Run black to lint --- rest/app.py | 4 ++-- rest/dynamodb/dynamodb.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/rest/app.py b/rest/app.py index c5b09747..9b1e7005 100644 --- a/rest/app.py +++ b/rest/app.py @@ -655,7 +655,7 @@ def estimate_job_cost(job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - + # if estimate == 0, it has not been estimated yet, so do estimate and save it to db if float(job["estimated_pu"]) == 0 and float(job["estimated_file_size"]) == 0: estimated_pu, estimated_file_size = get_batch_job_estimate( @@ -666,7 +666,7 @@ def estimate_job_cost(job_id): else: estimated_pu = float(job["estimated_pu"]) estimated_file_size = float(job["estimated_file_size"]) - + return flask.make_response( jsonify(costs=estimated_pu, size=estimated_file_size), 200, diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 32eb1fd7..685a7d3e 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -205,7 +205,7 @@ def create(cls, data): "results": {"S": json.dumps(data.get("results"))}, "deployment_endpoint": {"S": data.get("deployment_endpoint", "https://services.sentinel-hub.com")}, "estimated_pu": {"N": data.get("estimated_pu", "0")}, - "estimated_file_size": {"N": data.get("estimated_file_size", "0")} + "estimated_file_size": {"N": data.get("estimated_file_size", "0")}, } if data.get("title"): item["title"] = {"S": str(data.get("title"))} From 47f603c460f7e14b540cd239abac660e4c0a9c1f Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 10:40:22 +0200 Subject: [PATCH 07/42] Move estimate to properites in metadata and rename it --- rest/app.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/app.py b/rest/app.py index 9b1e7005..11662a72 100644 --- a/rest/app.py +++ b/rest/app.py @@ -613,13 +613,13 @@ def add_job_to_queue(job_id): "https://stac-extensions.github.io/timestamps/v1.1.0/schema.json", ], "id": job_id, - "estimated_sentinel_hub_processing_units": float(job["estimated_pu"]), - "platform_credits": float(job["estimated_pu"]) * 0.15, "geometry": None, "properties": { "title": job.get("title", None), "datetime": metadata_creation_time, "expires": metadata_valid, + "estimated_usage_sentinel_hub_processing_units": float(job["estimated_pu"]), + "estimated_usage_platform_credits": float(job["estimated_pu"]) * 0.15, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, }, "links": links, From 2a1ad083b293f957750d738d89b9e3308db98492 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 16:48:29 +0200 Subject: [PATCH 08/42] Add new error --- rest/openeoerrors.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/rest/openeoerrors.py b/rest/openeoerrors.py index 5117f689..2a525e6b 100644 --- a/rest/openeoerrors.py +++ b/rest/openeoerrors.py @@ -153,3 +153,9 @@ def __init__(self, width, height) -> None: error_code = "ImageDimensionInvalid" http_code = 400 + + +class InsufficientCredits(SHOpenEOError): + error_code = "InsufficientCredits" + http_code = 402 + message = "You do not have sufficient credits to perform this request. Please visit https://portal.terrascope.be/pages/pricing to find more information on how to buy additional credits." \ No newline at end of file From 00e6b70254f8a64450433cb8e01810a2eb262c18 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 16:50:20 +0200 Subject: [PATCH 09/42] Create function for getting credits from reporting service --- rest/authentication/user.py | 6 ++++++ rest/usage_reporting/report_usage.py | 12 ++++++++++++ 2 files changed, 18 insertions(+) diff --git a/rest/authentication/user.py b/rest/authentication/user.py index 410e1570..fdd577d5 100644 --- a/rest/authentication/user.py +++ b/rest/authentication/user.py @@ -24,6 +24,9 @@ def get_user_info(self): if self.default_plan: user_info["default_plan"] = self.default_plan.name return user_info + + def get_leftover_credits(self): + pass def report_usage(self, pu_spent, job_id=None): pass @@ -59,6 +62,9 @@ def get_user_info(self): user_info = super().get_user_info() user_info["info"] = {"oidc_userinfo": self.oidc_userinfo} return user_info + + def get_leftover_credits(self): + return usageReporting.get_leftover_credits() def report_usage(self, pu_spent, job_id=None): usageReporting.report_usage(self.user_id, pu_spent, job_id) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index b86bb0e7..d9710d3f 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -57,6 +57,18 @@ def reporting_check_health(self): content = r.json() return r.status_code == 200 and content["status"] == "ok" + + def get_leftover_credits(self): + user_url = f"{self.base_url}user" + reporting_token = self.get_token() + + headers = {"Authorization": f"Bearer {reporting_token['access_token']}"} + + r = requests.get(user_url, headers=headers) + content = r.json() + credits = content["credits"] + + return credits def report_usage(self, user_id, pu_spent, job_id=None, max_tries=5): reporting_token = self.get_token() From 05a37554cdfbb91bdbc7ad4a104dd053410809b7 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 16:59:36 +0200 Subject: [PATCH 10/42] Improve function for fetching credits --- rest/usage_reporting/report_usage.py | 23 ++++++++++++++++++----- 1 file changed, 18 insertions(+), 5 deletions(-) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index d9710d3f..f8200f1d 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -58,17 +58,30 @@ def reporting_check_health(self): return r.status_code == 200 and content["status"] == "ok" - def get_leftover_credits(self): + def get_leftover_credits(self, max_tries=5): user_url = f"{self.base_url}user" reporting_token = self.get_token() headers = {"Authorization": f"Bearer {reporting_token['access_token']}"} - r = requests.get(user_url, headers=headers) - content = r.json() - credits = content["credits"] + if not self.reporting_check_health(): + log(ERROR, "Services for usage reporting are not healthy") + raise Internal("Services for usage reporting are not healthy") + - return credits + for try_number in range(max_tries): + r = requests.get(user_url, headers=headers) + + if r.status_code == 200: + content = r.json() + credits = content["credits"] + + return credits + else: + log(ERROR, f"Error fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}") + raise Internal(f"Problems during fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}") + + raise Internal(f"Out of retries. Fetching leftover credits failed: {r.status_code} {r.text}") def report_usage(self, user_id, pu_spent, job_id=None, max_tries=5): reporting_token = self.get_token() From 9e7b924a783334fbb5981e5bc01e4624ec27882c Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 17:08:35 +0200 Subject: [PATCH 11/42] Prepare part of code --- rest/processing/processing.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 5a2aa096..e9d3dfb5 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -54,6 +54,10 @@ def create_batch_job(process): def start_new_batch_job(sentinel_hub, process, job_id): + # add check here (after merge MR !318) and raise error if needed + # leftover_credits = g.user.get_leftover_credits() + # if leftover_credits < estimated_pu: + # raise InsufficientCredits() new_batch_request_id, deployment_endpoint = create_batch_job(process) estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(new_batch_request_id) @@ -85,6 +89,10 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if batch_request_info is None: return start_new_batch_job(sentinel_hub, process, job_id) elif batch_request_info.status in [BatchRequestStatus.CREATED, BatchRequestStatus.ANALYSIS_DONE]: + # add check here (after merge MR !318) and raise error if needed + # leftover_credits = g.user.get_leftover_credits() + # if leftover_credits < estimated_pu: + # raise InsufficientCredits() estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) From a977a4e99cb211db49aba712ea52ffd851144a00 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 5 Oct 2023 17:16:14 +0200 Subject: [PATCH 12/42] run linting --- rest/authentication/user.py | 4 ++-- rest/openeoerrors.py | 2 +- rest/processing/processing.py | 4 ++-- rest/usage_reporting/report_usage.py | 9 +++++---- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/rest/authentication/user.py b/rest/authentication/user.py index fdd577d5..2b6e300d 100644 --- a/rest/authentication/user.py +++ b/rest/authentication/user.py @@ -24,7 +24,7 @@ def get_user_info(self): if self.default_plan: user_info["default_plan"] = self.default_plan.name return user_info - + def get_leftover_credits(self): pass @@ -62,7 +62,7 @@ def get_user_info(self): user_info = super().get_user_info() user_info["info"] = {"oidc_userinfo": self.oidc_userinfo} return user_info - + def get_leftover_credits(self): return usageReporting.get_leftover_credits() diff --git a/rest/openeoerrors.py b/rest/openeoerrors.py index 2a525e6b..e04b74af 100644 --- a/rest/openeoerrors.py +++ b/rest/openeoerrors.py @@ -158,4 +158,4 @@ def __init__(self, width, height) -> None: class InsufficientCredits(SHOpenEOError): error_code = "InsufficientCredits" http_code = 402 - message = "You do not have sufficient credits to perform this request. Please visit https://portal.terrascope.be/pages/pricing to find more information on how to buy additional credits." \ No newline at end of file + message = "You do not have sufficient credits to perform this request. Please visit https://portal.terrascope.be/pages/pricing to find more information on how to buy additional credits." diff --git a/rest/processing/processing.py b/rest/processing/processing.py index e9d3dfb5..29c229b8 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -57,7 +57,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): # add check here (after merge MR !318) and raise error if needed # leftover_credits = g.user.get_leftover_credits() # if leftover_credits < estimated_pu: - # raise InsufficientCredits() + # raise InsufficientCredits() new_batch_request_id, deployment_endpoint = create_batch_job(process) estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(new_batch_request_id) @@ -92,7 +92,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): # add check here (after merge MR !318) and raise error if needed # leftover_credits = g.user.get_leftover_credits() # if leftover_credits < estimated_pu: - # raise InsufficientCredits() + # raise InsufficientCredits() estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index f8200f1d..1aaea4de 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -57,7 +57,7 @@ def reporting_check_health(self): content = r.json() return r.status_code == 200 and content["status"] == "ok" - + def get_leftover_credits(self, max_tries=5): user_url = f"{self.base_url}user" reporting_token = self.get_token() @@ -68,7 +68,6 @@ def get_leftover_credits(self, max_tries=5): log(ERROR, "Services for usage reporting are not healthy") raise Internal("Services for usage reporting are not healthy") - for try_number in range(max_tries): r = requests.get(user_url, headers=headers) @@ -79,8 +78,10 @@ def get_leftover_credits(self, max_tries=5): return credits else: log(ERROR, f"Error fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}") - raise Internal(f"Problems during fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}") - + raise Internal( + f"Problems during fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}" + ) + raise Internal(f"Out of retries. Fetching leftover credits failed: {r.status_code} {r.text}") def report_usage(self, user_id, pu_spent, job_id=None, max_tries=5): From 1308a23158b7ec442324a97375862fa16c056758 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Mon, 9 Oct 2023 10:42:49 +0200 Subject: [PATCH 13/42] Add check and raise error if not enough credits --- rest/processing/processing.py | 20 +++++++++++--------- 1 file changed, 11 insertions(+), 9 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 29c229b8..07bb8795 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -10,7 +10,7 @@ from processing.partially_supported_processes import partially_supported_processes from dynamodb.utils import get_user_defined_processes_graphs from const import openEOBatchJobStatus -from openeoerrors import Timeout +from openeoerrors import InsufficientCredits, Timeout def check_process_graph_conversion_validity(process_graph): @@ -54,12 +54,13 @@ def create_batch_job(process): def start_new_batch_job(sentinel_hub, process, job_id): - # add check here (after merge MR !318) and raise error if needed - # leftover_credits = g.user.get_leftover_credits() - # if leftover_credits < estimated_pu: - # raise InsufficientCredits() new_batch_request_id, deployment_endpoint = create_batch_job(process) estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) + + leftover_credits = g.user.get_leftover_credits() + if leftover_credits < estimated_pu: + raise InsufficientCredits() + sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_pu, job_id) return new_batch_request_id @@ -89,11 +90,12 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if batch_request_info is None: return start_new_batch_job(sentinel_hub, process, job_id) elif batch_request_info.status in [BatchRequestStatus.CREATED, BatchRequestStatus.ANALYSIS_DONE]: - # add check here (after merge MR !318) and raise error if needed - # leftover_credits = g.user.get_leftover_credits() - # if leftover_credits < estimated_pu: - # raise InsufficientCredits() estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) + + leftover_credits = g.user.get_leftover_credits() + if leftover_credits < estimated_pu: + raise InsufficientCredits() + sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) elif batch_request_info.status == BatchRequestStatus.PARTIAL: From 142d8d189a395083b2a79c2b756f4aa34c8542c4 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Wed, 11 Oct 2023 14:06:04 +0200 Subject: [PATCH 14/42] add missing import --- rest/authentication/authentication.py | 1 + 1 file changed, 1 insertion(+) diff --git a/rest/authentication/authentication.py b/rest/authentication/authentication.py index e4f40d90..a0d69fe1 100644 --- a/rest/authentication/authentication.py +++ b/rest/authentication/authentication.py @@ -15,6 +15,7 @@ Internal, CredentialsInvalid, BillingPlanInvalid, + TokenInvalid, ) from authentication.oidc_providers import oidc_providers from authentication.user import OIDCUser, SHUser From 7729e0892ba9758f5d8a6eef07e13627e2dcc2eb Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Wed, 11 Oct 2023 14:06:20 +0200 Subject: [PATCH 15/42] add header for content type --- rest/usage_reporting/report_usage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index 1aaea4de..d3739abe 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -62,7 +62,7 @@ def get_leftover_credits(self, max_tries=5): user_url = f"{self.base_url}user" reporting_token = self.get_token() - headers = {"Authorization": f"Bearer {reporting_token['access_token']}"} + headers = {"content-type": "application/json", "Authorization": f"Bearer {reporting_token['access_token']}"} if not self.reporting_check_health(): log(ERROR, "Services for usage reporting are not healthy") @@ -73,7 +73,7 @@ def get_leftover_credits(self, max_tries=5): if r.status_code == 200: content = r.json() - credits = content["credits"] + credits = content.get("credits") return credits else: From 274ddf423f739e9865f3baeba1c7e49ae13f5995 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 09:40:04 +0200 Subject: [PATCH 16/42] add costs and usage do /jobs/job_id endpoint --- rest/app.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rest/app.py b/rest/app.py index 11662a72..076310dc 100644 --- a/rest/app.py +++ b/rest/app.py @@ -489,6 +489,8 @@ def api_batch_job(job_id): error=error, created=convert_timestamp_to_simpler_format(job["created"]), updated=convert_timestamp_to_simpler_format(job["last_updated"]), + costs=float(job.get("estimated_pu", 0)) * 0.15, + usage={"Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_pu", 0))}} ), 200, ) From 09312ba2d8e7ad415d11f236bc9324d57d3b099d Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 09:51:58 +0200 Subject: [PATCH 17/42] Estimate immediately on job creation --- rest/app.py | 4 ++++ rest/processing/processing.py | 20 ++++++++++++-------- 2 files changed, 16 insertions(+), 8 deletions(-) diff --git a/rest/app.py b/rest/app.py index 076310dc..61bfd685 100644 --- a/rest/app.py +++ b/rest/app.py @@ -456,9 +456,13 @@ def api_jobs(): batch_request_id, deployment_endpoint = create_batch_job(data["process"]) + estimated_pu, estimated_file_size = get_batch_job_estimate(batch_request_id, data["process"], deployment_endpoint) + data["batch_request_id"] = batch_request_id data["user_id"] = g.user.user_id data["deployment_endpoint"] = deployment_endpoint + data["estimated_pu"] = str(estimated_pu) + data["estimated_file_size"] = str(estimated_file_size) record_id = JobsPersistence.create(data) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 52d6fb6d..05a5745d 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -11,7 +11,7 @@ from dynamodb.utils import get_user_defined_processes_graphs from dynamodb import JobsPersistence from const import openEOBatchJobStatus -from openeoerrors import Timeout +from openeoerrors import JobNotFound, Timeout def check_process_graph_conversion_validity(process_graph): @@ -55,12 +55,14 @@ def create_batch_job(process): def start_new_batch_job(sentinel_hub, process, job_id): - new_batch_request_id, deployment_endpoint = create_batch_job(process) - estimated_pu, estimated_file_size = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) + job = JobsPersistence.get_by_id(job_id) + if job is None: + raise JobNotFound() + + estimated_pu = float(job["estimated_pu"]) + new_batch_request_id, _ = create_batch_job(process) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_pu, job_id) - JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) - JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) return new_batch_request_id @@ -88,11 +90,13 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if batch_request_info is None: return start_new_batch_job(sentinel_hub, process, job_id) elif batch_request_info.status in [BatchRequestStatus.CREATED, BatchRequestStatus.ANALYSIS_DONE]: - estimated_pu, estimated_file_size = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) + job = JobsPersistence.get_by_id(job_id) + if job is None: + raise JobNotFound() + + estimated_pu = float(job["estimated_pu"]) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_pu, job_id) - JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) - JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) elif batch_request_info.status == BatchRequestStatus.PARTIAL: sentinel_hub.restart_batch_job(batch_request_id) elif batch_request_info.status in [ From b763b224f295ee649ba6baa9d57cd473d7122768 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 09:52:20 +0200 Subject: [PATCH 18/42] get estimate from db for a job --- rest/app.py | 14 +++----------- 1 file changed, 3 insertions(+), 11 deletions(-) diff --git a/rest/app.py b/rest/app.py index 61bfd685..33841425 100644 --- a/rest/app.py +++ b/rest/app.py @@ -661,17 +661,9 @@ def estimate_job_cost(job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - - # if estimate == 0, it has not been estimated yet, so do estimate and save it to db - if float(job["estimated_pu"]) == 0 and float(job["estimated_file_size"]) == 0: - estimated_pu, estimated_file_size = get_batch_job_estimate( - job["batch_request_id"], json.loads(job["process"]), job["deployment_endpoint"] - ) - JobsPersistence.update_key(job_id, "estimated_pu", estimated_pu) - JobsPersistence.update_key(job_id, "estimated_file_size", estimated_file_size) - else: - estimated_pu = float(job["estimated_pu"]) - estimated_file_size = float(job["estimated_file_size"]) + + estimated_pu = float(job["estimated_pu"]) + estimated_file_size = float(job["estimated_file_size"]) return flask.make_response( jsonify(costs=estimated_pu, size=estimated_file_size), From 1296c835aae344f18cdb1b8776b251f6c208af39 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 09:58:06 +0200 Subject: [PATCH 19/42] add also platform credits to /jobs/job_id endpoint --- rest/app.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/rest/app.py b/rest/app.py index 33841425..1dc32306 100644 --- a/rest/app.py +++ b/rest/app.py @@ -494,7 +494,8 @@ def api_batch_job(job_id): created=convert_timestamp_to_simpler_format(job["created"]), updated=convert_timestamp_to_simpler_format(job["last_updated"]), costs=float(job.get("estimated_pu", 0)) * 0.15, - usage={"Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_pu", 0))}} + usage={"Platform Credits": {"unit": "credits", "value": float(job.get("estimated_pu", 0)) * 0.15}, + "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_pu", 0))}} ), 200, ) From 5867e21e31d403b72a7be9ecbccd4f32593bb015 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 10:17:51 +0200 Subject: [PATCH 20/42] add similar usage reporting to metadata as for /jobs/job_id --- rest/app.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/rest/app.py b/rest/app.py index 1dc32306..b64d34b7 100644 --- a/rest/app.py +++ b/rest/app.py @@ -625,8 +625,10 @@ def add_job_to_queue(job_id): "title": job.get("title", None), "datetime": metadata_creation_time, "expires": metadata_valid, - "estimated_usage_sentinel_hub_processing_units": float(job["estimated_pu"]), - "estimated_usage_platform_credits": float(job["estimated_pu"]) * 0.15, + "usage": { + "Platform credits": {"unit": "credits", "value": float(job["estimated_pu"]) * 0.15}, + "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job["estimated_pu"])}, + }, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, }, "links": links, From bcf13e903e813cb9e1d8918f2d7d5afc215fd4f6 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 10:35:38 +0200 Subject: [PATCH 21/42] rename variables, save platform credits to db, round estimate to 3 decimals and also include platform credits to metadata --- rest/app.py | 17 +++++++++-------- rest/dynamodb/dynamodb.py | 3 ++- rest/processing/processing.py | 8 ++++---- 3 files changed, 15 insertions(+), 13 deletions(-) diff --git a/rest/app.py b/rest/app.py index b64d34b7..a27d60a0 100644 --- a/rest/app.py +++ b/rest/app.py @@ -461,7 +461,8 @@ def api_jobs(): data["batch_request_id"] = batch_request_id data["user_id"] = g.user.user_id data["deployment_endpoint"] = deployment_endpoint - data["estimated_pu"] = str(estimated_pu) + data["estimated_sentinelhub_pu"] = str(round(estimated_pu, 3)) + data["estimated_platform_credits"] = str(round(estimated_pu * 0.15, 3)) data["estimated_file_size"] = str(estimated_file_size) record_id = JobsPersistence.create(data) @@ -493,9 +494,9 @@ def api_batch_job(job_id): error=error, created=convert_timestamp_to_simpler_format(job["created"]), updated=convert_timestamp_to_simpler_format(job["last_updated"]), - costs=float(job.get("estimated_pu", 0)) * 0.15, - usage={"Platform Credits": {"unit": "credits", "value": float(job.get("estimated_pu", 0)) * 0.15}, - "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_pu", 0))}} + costs=float(job.get("estimated_platform_credits", 0)), + usage={"Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, + "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_sentinelhub_pu", 0))}} ), 200, ) @@ -626,8 +627,8 @@ def add_job_to_queue(job_id): "datetime": metadata_creation_time, "expires": metadata_valid, "usage": { - "Platform credits": {"unit": "credits", "value": float(job["estimated_pu"]) * 0.15}, - "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job["estimated_pu"])}, + "Platform credits": {"unit": "credits", "value": float(job["estimated_platform_credits"])}, + "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job["estimated_sentinelhub_pu"])}, }, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, }, @@ -665,11 +666,11 @@ def estimate_job_cost(job_id): if job is None: raise JobNotFound() - estimated_pu = float(job["estimated_pu"]) + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) estimated_file_size = float(job["estimated_file_size"]) return flask.make_response( - jsonify(costs=estimated_pu, size=estimated_file_size), + jsonify(costs=estimated_sentinelhub_pu, size=estimated_file_size), 200, ) diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 685a7d3e..347e1e7d 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -204,7 +204,8 @@ def create(cls, data): "http_code": {"N": data.get("http_code", "200")}, "results": {"S": json.dumps(data.get("results"))}, "deployment_endpoint": {"S": data.get("deployment_endpoint", "https://services.sentinel-hub.com")}, - "estimated_pu": {"N": data.get("estimated_pu", "0")}, + "estimated_sentinelhub_pu": {"N": data.get("estimated_sentinelhub_pu", "0")}, + "estimated_platform_credits": {"N": data.get("estimated_platform_credits", "0")}, "estimated_file_size": {"N": data.get("estimated_file_size", "0")}, } if data.get("title"): diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 05a5745d..90418e3d 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -59,10 +59,10 @@ def start_new_batch_job(sentinel_hub, process, job_id): if job is None: raise JobNotFound() - estimated_pu = float(job["estimated_pu"]) + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) new_batch_request_id, _ = create_batch_job(process) sentinel_hub.start_batch_job(new_batch_request_id) - g.user.report_usage(estimated_pu, job_id) + g.user.report_usage(estimated_sentinelhub_pu, job_id) return new_batch_request_id @@ -94,9 +94,9 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if job is None: raise JobNotFound() - estimated_pu = float(job["estimated_pu"]) + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) sentinel_hub.start_batch_job(batch_request_id) - g.user.report_usage(estimated_pu, job_id) + g.user.report_usage(estimated_sentinelhub_pu, job_id) elif batch_request_info.status == BatchRequestStatus.PARTIAL: sentinel_hub.restart_batch_job(batch_request_id) elif batch_request_info.status in [ From cddd0ea02201e4bf9ef7f86e87074a0b185a6dca Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 10:36:26 +0200 Subject: [PATCH 22/42] run linting --- rest/app.py | 22 ++++++++++++++++------ rest/processing/processing.py | 4 ++-- 2 files changed, 18 insertions(+), 8 deletions(-) diff --git a/rest/app.py b/rest/app.py index a27d60a0..faf3a428 100644 --- a/rest/app.py +++ b/rest/app.py @@ -456,8 +456,10 @@ def api_jobs(): batch_request_id, deployment_endpoint = create_batch_job(data["process"]) - estimated_pu, estimated_file_size = get_batch_job_estimate(batch_request_id, data["process"], deployment_endpoint) - + estimated_pu, estimated_file_size = get_batch_job_estimate( + batch_request_id, data["process"], deployment_endpoint + ) + data["batch_request_id"] = batch_request_id data["user_id"] = g.user.user_id data["deployment_endpoint"] = deployment_endpoint @@ -495,8 +497,13 @@ def api_batch_job(job_id): created=convert_timestamp_to_simpler_format(job["created"]), updated=convert_timestamp_to_simpler_format(job["last_updated"]), costs=float(job.get("estimated_platform_credits", 0)), - usage={"Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, - "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job.get("estimated_sentinelhub_pu", 0))}} + usage={ + "Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, + "Sentinel Hub": { + "unit": "sentinelhub_processing_unit", + "value": float(job.get("estimated_sentinelhub_pu", 0)), + }, + }, ), 200, ) @@ -628,7 +635,10 @@ def add_job_to_queue(job_id): "expires": metadata_valid, "usage": { "Platform credits": {"unit": "credits", "value": float(job["estimated_platform_credits"])}, - "Sentinel Hub": {"unit": "sentinelhub_processing_unit", "value": float(job["estimated_sentinelhub_pu"])}, + "Sentinel Hub": { + "unit": "sentinelhub_processing_unit", + "value": float(job["estimated_sentinelhub_pu"]), + }, }, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, }, @@ -665,7 +675,7 @@ def estimate_job_cost(job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) estimated_file_size = float(job["estimated_file_size"]) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 90418e3d..ad65b7b8 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -58,7 +58,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) new_batch_request_id, _ = create_batch_job(process) sentinel_hub.start_batch_job(new_batch_request_id) @@ -93,7 +93,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) From 64086d351ba78c2e4900f272673088616ef42288 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 14:54:56 +0200 Subject: [PATCH 23/42] one more safety check for estimate value --- rest/processing/processing.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index ad65b7b8..51133d4e 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -141,7 +141,7 @@ def get_batch_job_estimate(batch_request_id, process, deployment_endpoint): if batch_request.value_estimate is None: analysis_sleep_time_s = 5 total_sleep_time = 0 - MAX_TOTAL_TIME = 29 + MAX_TOTAL_TIME = 39 sentinel_hub.start_batch_job_analysis(batch_request_id) while batch_request.value_estimate is None and batch_request.status in [ @@ -178,7 +178,10 @@ def get_batch_job_estimate(batch_request_id, process, deployment_endpoint): if temporal_interval is None: temporal_interval = default_temporal_interval - estimated_pu = estimate_secure_factor * batch_request.value_estimate * default_temporal_interval / temporal_interval + estimated_batch_request_value = batch_request.value_estimate if batch_request.value_estimate is not None else 0 + estimated_pu = ( + estimate_secure_factor * estimated_batch_request_value * default_temporal_interval / temporal_interval + ) n_pixels = batch_request.tile_count * batch_request.tile_width_px * batch_request.tile_height_px estimated_file_size = p.estimate_file_size(n_pixels=n_pixels) From 4fefaa49711b0a2808266d3791f56e12a37100a0 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 12 Oct 2023 15:00:34 +0200 Subject: [PATCH 24/42] set back max total time for timeout --- rest/processing/processing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 51133d4e..7dde11a2 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -141,7 +141,7 @@ def get_batch_job_estimate(batch_request_id, process, deployment_endpoint): if batch_request.value_estimate is None: analysis_sleep_time_s = 5 total_sleep_time = 0 - MAX_TOTAL_TIME = 39 + MAX_TOTAL_TIME = 29 sentinel_hub.start_batch_job_analysis(batch_request_id) while batch_request.value_estimate is None and batch_request.status in [ From df18418c8066724822d878d85b973518dae55150 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Fri, 13 Oct 2023 12:58:54 +0200 Subject: [PATCH 25/42] Remove estimation on job creation and only hide the costs/usage if job has been started --- rest/app.py | 57 +++++++++++++++++------------------ rest/processing/processing.py | 30 +++++++++++++----- 2 files changed, 50 insertions(+), 37 deletions(-) diff --git a/rest/app.py b/rest/app.py index faf3a428..02921e22 100644 --- a/rest/app.py +++ b/rest/app.py @@ -36,8 +36,8 @@ start_batch_job, cancel_batch_job, modify_batch_job, - get_batch_job_estimate, get_batch_job_status, + create_or_get_estimate_values_from_db, ) from processing.utils import inject_variables_in_process_graph, overwrite_spatial_extent_without_parameters from processing.openeo_process_errors import OpenEOProcessError @@ -456,16 +456,9 @@ def api_jobs(): batch_request_id, deployment_endpoint = create_batch_job(data["process"]) - estimated_pu, estimated_file_size = get_batch_job_estimate( - batch_request_id, data["process"], deployment_endpoint - ) - data["batch_request_id"] = batch_request_id data["user_id"] = g.user.user_id data["deployment_endpoint"] = deployment_endpoint - data["estimated_sentinelhub_pu"] = str(round(estimated_pu, 3)) - data["estimated_platform_credits"] = str(round(estimated_pu * 0.15, 3)) - data["estimated_file_size"] = str(estimated_file_size) record_id = JobsPersistence.create(data) @@ -486,25 +479,28 @@ def api_batch_job(job_id): if flask.request.method == "GET": status, error = get_batch_job_status(job["batch_request_id"], job["deployment_endpoint"]) - return flask.make_response( - jsonify( - id=job_id, - title=job.get("title", None), - description=job.get("description", None), - process={"process_graph": json.loads(job["process"])["process_graph"]}, - status=status.value, - error=error, - created=convert_timestamp_to_simpler_format(job["created"]), - updated=convert_timestamp_to_simpler_format(job["last_updated"]), - costs=float(job.get("estimated_platform_credits", 0)), - usage={ - "Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, - "Sentinel Hub": { - "unit": "sentinelhub_processing_unit", - "value": float(job.get("estimated_sentinelhub_pu", 0)), - }, + data_to_jsonify = { + "id": job_id, + "title": job.get("title", None), + "description": job.get("description", None), + "process": {"process_graph": json.loads(job["process"])["process_graph"]}, + "status": status.value, + "error": error, + "created": convert_timestamp_to_simpler_format(job["created"]), + "updated": convert_timestamp_to_simpler_format(job["last_updated"]), + } + + if status is not openEOBatchJobStatus.CREATED: + data_to_jsonify["costs"] = float(job.get("estimated_platform_credits", 0)) + data_to_jsonify["usage"] = { + "Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, + "Sentinel Hub": { + "unit": "sentinelhub_processing_unit", + "value": float(job.get("estimated_sentinelhub_pu", 0)), }, - ), + } + return flask.make_response( + jsonify(data_to_jsonify), 200, ) @@ -634,10 +630,10 @@ def add_job_to_queue(job_id): "datetime": metadata_creation_time, "expires": metadata_valid, "usage": { - "Platform credits": {"unit": "credits", "value": float(job["estimated_platform_credits"])}, + "Platform credits": {"unit": "credits", "value": job["estimated_platform_credits"]}, "Sentinel Hub": { "unit": "sentinelhub_processing_unit", - "value": float(job["estimated_sentinelhub_pu"]), + "value": job["estimated_sentinelhub_pu"], }, }, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, @@ -676,8 +672,9 @@ def estimate_job_cost(job_id): if job is None: raise JobNotFound() - estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) - estimated_file_size = float(job["estimated_file_size"]) + estimated_sentinelhub_pu, _, estimated_file_size = create_or_get_estimate_values_from_db( + job, job["batch_request_id"] + ) return flask.make_response( jsonify(costs=estimated_sentinelhub_pu, size=estimated_file_size), diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 7dde11a2..2e54502f 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -1,3 +1,4 @@ +import json import time from pg_to_evalscript import convert_from_process_graph @@ -55,12 +56,13 @@ def create_batch_job(process): def start_new_batch_job(sentinel_hub, process, job_id): + new_batch_request_id, _ = create_batch_job(process) + job = JobsPersistence.get_by_id(job_id) if job is None: raise JobNotFound() - estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) - new_batch_request_id, _ = create_batch_job(process) + estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, new_batch_request_id) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) return new_batch_request_id @@ -94,7 +96,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): if job is None: raise JobNotFound() - estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) + estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, job["batch_request_id"]) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) elif batch_request_info.status == BatchRequestStatus.PARTIAL: @@ -178,10 +180,7 @@ def get_batch_job_estimate(batch_request_id, process, deployment_endpoint): if temporal_interval is None: temporal_interval = default_temporal_interval - estimated_batch_request_value = batch_request.value_estimate if batch_request.value_estimate is not None else 0 - estimated_pu = ( - estimate_secure_factor * estimated_batch_request_value * default_temporal_interval / temporal_interval - ) + estimated_pu = estimate_secure_factor * batch_request.value_estimate * default_temporal_interval / temporal_interval n_pixels = batch_request.tile_count * batch_request.tile_width_px * batch_request.tile_height_px estimated_file_size = p.estimate_file_size(n_pixels=n_pixels) @@ -200,3 +199,20 @@ def get_batch_job_status(batch_request_id, deployment_endpoint): ) else: return openEOBatchJobStatus.FINISHED, None + + +def create_or_get_estimate_values_from_db(job, batch_request_id): + if float(job["estimated_sentinelhub_pu"]) == 0 and float(job["estimated_file_size"]) == 0: + estimated_sentinelhub_pu, estimated_file_size = get_batch_job_estimate( + batch_request_id, json.loads(job["process"]), job["deployment_endpoint"] + ) + estimated_platform_credits = round(estimated_sentinelhub_pu * 0.15, 3) + JobsPersistence.update_key(job["id"], "estimated_sentinelhub_pu", str(round(estimated_sentinelhub_pu, 3))) + JobsPersistence.update_key(job["id"], "estimated_platform_credits", str(estimated_platform_credits)) + JobsPersistence.update_key(job["id"], "estimated_file_size", str(estimated_file_size)) + else: + estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) + estimated_platform_credits = float(job["estimated_platform_credits"]) + estimated_file_size = float(job["estimated_file_size"]) + + return estimated_sentinelhub_pu, estimated_platform_credits, estimated_file_size From e103825c53faf293c903b622f49d5d4bf88bacf8 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Fri, 13 Oct 2023 14:41:59 +0200 Subject: [PATCH 26/42] only try once to get credits --- rest/usage_reporting/report_usage.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index d3739abe..017d3735 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -58,7 +58,7 @@ def reporting_check_health(self): return r.status_code == 200 and content["status"] == "ok" - def get_leftover_credits(self, max_tries=5): + def get_leftover_credits(self): user_url = f"{self.base_url}user" reporting_token = self.get_token() @@ -68,21 +68,19 @@ def get_leftover_credits(self, max_tries=5): log(ERROR, "Services for usage reporting are not healthy") raise Internal("Services for usage reporting are not healthy") - for try_number in range(max_tries): - r = requests.get(user_url, headers=headers) + r = requests.get(user_url, headers=headers) - if r.status_code == 200: - content = r.json() - credits = content.get("credits") + if r.status_code == 200: + content = r.json() + credits = content.get("credits") - return credits - else: - log(ERROR, f"Error fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}") - raise Internal( - f"Problems during fetching leftover credits on try #{try_number+1}: {r.status_code} {r.text}" + return credits + else: + log(ERROR, f"Error fetching leftover credits: {r.status_code} {r.text}") + raise Internal( + f"Problems during fetching leftover credits: {r.status_code} {r.text}" ) - raise Internal(f"Out of retries. Fetching leftover credits failed: {r.status_code} {r.text}") def report_usage(self, user_id, pu_spent, job_id=None, max_tries=5): reporting_token = self.get_token() From 75495ae62123d2d0661db2078f36b3162e7276ac Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Mon, 16 Oct 2023 15:45:05 +0200 Subject: [PATCH 27/42] Have fallback for getting estimates from job. --- rest/app.py | 4 ++-- rest/processing/processing.py | 8 ++++---- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/rest/app.py b/rest/app.py index 02921e22..54e0cee4 100644 --- a/rest/app.py +++ b/rest/app.py @@ -630,10 +630,10 @@ def add_job_to_queue(job_id): "datetime": metadata_creation_time, "expires": metadata_valid, "usage": { - "Platform credits": {"unit": "credits", "value": job["estimated_platform_credits"]}, + "Platform credits": {"unit": "credits", "value": job.get("estimated_platform_credits", 0)}, "Sentinel Hub": { "unit": "sentinelhub_processing_unit", - "value": job["estimated_sentinelhub_pu"], + "value": job.get("estimated_sentinelhub_pu", 0), }, }, "processing:expression": {"format": "openeo", "expression": json.loads(job["process"])}, diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 2e54502f..b8a69ef7 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -202,7 +202,7 @@ def get_batch_job_status(batch_request_id, deployment_endpoint): def create_or_get_estimate_values_from_db(job, batch_request_id): - if float(job["estimated_sentinelhub_pu"]) == 0 and float(job["estimated_file_size"]) == 0: + if float(job.get("estimated_sentinelhub_pu", 0)) == 0 and float(job.get("estimated_file_size", 0)) == 0: estimated_sentinelhub_pu, estimated_file_size = get_batch_job_estimate( batch_request_id, json.loads(job["process"]), job["deployment_endpoint"] ) @@ -211,8 +211,8 @@ def create_or_get_estimate_values_from_db(job, batch_request_id): JobsPersistence.update_key(job["id"], "estimated_platform_credits", str(estimated_platform_credits)) JobsPersistence.update_key(job["id"], "estimated_file_size", str(estimated_file_size)) else: - estimated_sentinelhub_pu = float(job["estimated_sentinelhub_pu"]) - estimated_platform_credits = float(job["estimated_platform_credits"]) - estimated_file_size = float(job["estimated_file_size"]) + estimated_sentinelhub_pu = float(job.get("estimated_sentinelhub_pu", 0)) + estimated_platform_credits = float(job.get("estimated_platform_credits", 0)) + estimated_file_size = float(job.get("estimated_file_size", 0)) return estimated_sentinelhub_pu, estimated_platform_credits, estimated_file_size From f71c99ddfa732c94d6539adaff3d74468220017d Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 17 Oct 2023 11:30:38 +0200 Subject: [PATCH 28/42] re-estimate if process graph was updated --- rest/app.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/rest/app.py b/rest/app.py index 54e0cee4..efd1796e 100644 --- a/rest/app.py +++ b/rest/app.py @@ -31,6 +31,7 @@ from dynamodb import JobsPersistence, ProcessGraphsPersistence, ServicesPersistence from processing.processing import ( check_process_graph_conversion_validity, + get_batch_job_estimate, process_data_synchronously, create_batch_job, start_batch_job, @@ -523,6 +524,15 @@ def api_batch_job(job_id): update_batch_request_id(job_id, job, new_batch_request_id) data["deployment_endpoint"] = deployment_endpoint + if json.dumps(data.get("process"), sort_keys=True) != json.dumps(json.loads(job.get("process")), sort_keys=True): + estimated_sentinelhub_pu, estimated_file_size = get_batch_job_estimate( + new_batch_request_id, data.get("process"), deployment_endpoint + ) + estimated_platform_credits = round(estimated_sentinelhub_pu * 0.15, 3) + JobsPersistence.update_key(job["id"], "estimated_sentinelhub_pu", str(round(estimated_sentinelhub_pu, 3))) + JobsPersistence.update_key(job["id"], "estimated_platform_credits", str(estimated_platform_credits)) + JobsPersistence.update_key(job["id"], "estimated_file_size", str(estimated_file_size)) + for key in data: JobsPersistence.update_key(job_id, key, data[key]) From 2aca5068d27989fe35f06dad1bd97856c73345e4 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 17 Oct 2023 11:32:02 +0200 Subject: [PATCH 29/42] report SH pu's instead of openeo credits --- rest/app.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rest/app.py b/rest/app.py index efd1796e..177783ae 100644 --- a/rest/app.py +++ b/rest/app.py @@ -492,7 +492,7 @@ def api_batch_job(job_id): } if status is not openEOBatchJobStatus.CREATED: - data_to_jsonify["costs"] = float(job.get("estimated_platform_credits", 0)) + data_to_jsonify["costs"] = float(job.get("estimated_sentinelhub_pu", 0)) data_to_jsonify["usage"] = { "Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, "Sentinel Hub": { From adb326035ff6d6590d2c8c8f31d1b683fec5eb90 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 17 Oct 2023 14:26:57 +0200 Subject: [PATCH 30/42] Add sum of all costs that were generated so far --- rest/app.py | 14 +++++++++----- rest/dynamodb/dynamodb.py | 1 + rest/processing/processing.py | 2 ++ 3 files changed, 12 insertions(+), 5 deletions(-) diff --git a/rest/app.py b/rest/app.py index 177783ae..187adec4 100644 --- a/rest/app.py +++ b/rest/app.py @@ -492,12 +492,12 @@ def api_batch_job(job_id): } if status is not openEOBatchJobStatus.CREATED: - data_to_jsonify["costs"] = float(job.get("estimated_sentinelhub_pu", 0)) + data_to_jsonify["costs"] = float(job.get("sum_costs", 0)) data_to_jsonify["usage"] = { - "Platform Credits": {"unit": "credits", "value": float(job.get("estimated_platform_credits", 0))}, + "Platform Credits": {"unit": "credits", "value": round(float(job.get("sum_costs", 0)) * 0.15, 3)}, "Sentinel Hub": { "unit": "sentinelhub_processing_unit", - "value": float(job.get("estimated_sentinelhub_pu", 0)), + "value": float(job.get("sum_costs", 0)), }, } return flask.make_response( @@ -524,12 +524,16 @@ def api_batch_job(job_id): update_batch_request_id(job_id, job, new_batch_request_id) data["deployment_endpoint"] = deployment_endpoint - if json.dumps(data.get("process"), sort_keys=True) != json.dumps(json.loads(job.get("process")), sort_keys=True): + if json.dumps(data.get("process"), sort_keys=True) != json.dumps( + json.loads(job.get("process")), sort_keys=True + ): estimated_sentinelhub_pu, estimated_file_size = get_batch_job_estimate( new_batch_request_id, data.get("process"), deployment_endpoint ) estimated_platform_credits = round(estimated_sentinelhub_pu * 0.15, 3) - JobsPersistence.update_key(job["id"], "estimated_sentinelhub_pu", str(round(estimated_sentinelhub_pu, 3))) + JobsPersistence.update_key( + job["id"], "estimated_sentinelhub_pu", str(round(estimated_sentinelhub_pu, 3)) + ) JobsPersistence.update_key(job["id"], "estimated_platform_credits", str(estimated_platform_credits)) JobsPersistence.update_key(job["id"], "estimated_file_size", str(estimated_file_size)) diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 347e1e7d..04bc297b 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -207,6 +207,7 @@ def create(cls, data): "estimated_sentinelhub_pu": {"N": data.get("estimated_sentinelhub_pu", "0")}, "estimated_platform_credits": {"N": data.get("estimated_platform_credits", "0")}, "estimated_file_size": {"N": data.get("estimated_file_size", "0")}, + "sum_costs": {"N": data.get("sum_costs", "0")}, } if data.get("title"): item["title"] = {"S": str(data.get("title"))} diff --git a/rest/processing/processing.py b/rest/processing/processing.py index b8a69ef7..6d98a812 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -63,6 +63,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, new_batch_request_id) + JobsPersistence.update_key(job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3))) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) return new_batch_request_id @@ -97,6 +98,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, job["batch_request_id"]) + JobsPersistence.update_key(job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3))) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) elif batch_request_info.status == BatchRequestStatus.PARTIAL: From f0547a9b07d00fd334564296c12428c6611d6bc2 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 17 Oct 2023 14:27:08 +0200 Subject: [PATCH 31/42] run linting --- rest/processing/processing.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 6d98a812..36a3b66b 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -63,7 +63,9 @@ def start_new_batch_job(sentinel_hub, process, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, new_batch_request_id) - JobsPersistence.update_key(job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3))) + JobsPersistence.update_key( + job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3)) + ) sentinel_hub.start_batch_job(new_batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) return new_batch_request_id @@ -98,7 +100,9 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, job["batch_request_id"]) - JobsPersistence.update_key(job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3))) + JobsPersistence.update_key( + job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3)) + ) sentinel_hub.start_batch_job(batch_request_id) g.user.report_usage(estimated_sentinelhub_pu, job_id) elif batch_request_info.status == BatchRequestStatus.PARTIAL: From 87d5bd91b55e3fcbbb308c06e3d8044c2372b0d9 Mon Sep 17 00:00:00 2001 From: grensburg Date: Wed, 18 Oct 2023 08:58:00 +0200 Subject: [PATCH 32/42] fix update job endpoint caused by reserved word "plan" --- rest/dynamodb/dynamodb.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/rest/dynamodb/dynamodb.py b/rest/dynamodb/dynamodb.py index 9fbc4429..7d93cd53 100644 --- a/rest/dynamodb/dynamodb.py +++ b/rest/dynamodb/dynamodb.py @@ -16,6 +16,8 @@ FAKE_AWS_ACCESS_KEY_ID = "AKIAIOSFODNN7EXAMPLE" FAKE_AWS_SECRET_ACCESS_KEY = "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" +USED_RESERVED_WORDS = ["plan"] + class DeploymentTypes(Enum): PRODUCTION = "production" @@ -121,12 +123,17 @@ def update_key(cls, record_id, key, new_value): else: new_value = str(new_value) - updated_item = cls.dynamodb.update_item( + kwargs = dict( TableName=cls.TABLE_NAME, Key={"id": {"S": record_id}}, UpdateExpression="SET {} = :new_content".format(key), ExpressionAttributeValues={":new_content": {data_type: new_value}}, ) + if key in USED_RESERVED_WORDS: + kwargs["UpdateExpression"] = "SET #{} = :new_content".format(key) + kwargs["ExpressionAttributeNames"] = {"#{}".format(key): "{}".format(key)} + + updated_item = cls.dynamodb.update_item(**kwargs) return updated_item @classmethod @@ -316,7 +323,6 @@ def create(cls, data): if __name__ == "__main__": - # To create tables, run: # $ pipenv shell # $ DEPLOYMENT_TYPE="production" ./dynamodb.py From 9122914fdaa65c2a33880c25755ae2c02a057fd9 Mon Sep 17 00:00:00 2001 From: Daniel Thiex Date: Wed, 18 Oct 2023 11:07:36 +0200 Subject: [PATCH 33/42] reorder keys to match other collections --- .../commercial_collections/SKYSAT.json | 497 +++++++++--------- .../collection_information/SKYSAT.json | 497 +++++++++--------- 2 files changed, 498 insertions(+), 496 deletions(-) diff --git a/rest/openeo_collections/commercial_collections/SKYSAT.json b/rest/openeo_collections/commercial_collections/SKYSAT.json index 1e019b2a..771ff291 100644 --- a/rest/openeo_collections/commercial_collections/SKYSAT.json +++ b/rest/openeo_collections/commercial_collections/SKYSAT.json @@ -1,8 +1,65 @@ { - "cube:dimensions":{ - "bands":{ - "type":"bands", - "values":[ + "type": "Collection", + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/datacube/v1.0.0/schema.json", + "https://stac-extensions.github.io/scientific/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.0.0/schema.json" + ], + "id": "SKYSAT", + "datasource_type": "byoc-ID", + "title": "SkySat", + "links": [], + "description": "SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", + "keywords": [ + "sentinel hub", + "SkySat", + "vhr", + "commercial data" + ], + "license": "various", + "providers": [ + { + "description": "", + "name": "Sentinel Hub", + "roles": [ + "processor" + ], + "url": "https://services.sentinel-hub.com/" + }, + { + "description": "", + "name": "Planet", + "roles": [ + "producer" + ], + "url": "https://www.planet.com/products/planet-imagery/" + } + ], + "extent": { + "spatial": { + "bbox": [ + [ + -180, + -90, + 180, + 90 + ] + ] + }, + "temporal": { + "interval": [ + [ + "2014-01-01T00:00:00Z", + null + ] + ] + } + }, + "cube:dimensions": { + "bands": { + "type": "bands", + "values": [ "Blue", "Green", "Red", @@ -19,379 +76,324 @@ "dataMask" ] }, - "t":{ - "extent":[ + "t": { + "extent": [ "2014-01-01T00:00:00Z", null ], - "type":"temporal" + "type": "temporal" }, - "x":{ - "axis":"x", - "extent":[ + "x": { + "axis": "x", + "extent": [ -180, 180 ], - "reference_system":{ - "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", - "area":"World", - "base_crs":{ - "coordinate_system":{ - "axis":[ + "reference_system": { + "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", + "area": "World", + "base_crs": { + "coordinate_system": { + "axis": [ { - "abbreviation":"Lat", - "direction":"north", - "name":"Geodetic latitude", - "unit":"degree" + "abbreviation": "Lat", + "direction": "north", + "name": "Geodetic latitude", + "unit": "degree" }, { - "abbreviation":"Lon", - "direction":"east", - "name":"Geodetic longitude", - "unit":"degree" + "abbreviation": "Lon", + "direction": "east", + "name": "Geodetic longitude", + "unit": "degree" } ], - "subtype":"ellipsoidal" + "subtype": "ellipsoidal" }, - "datum":{ - "ellipsoid":{ - "inverse_flattening":298.257223563, - "name":"WGS 84", - "semi_major_axis":6378137 + "datum": { + "ellipsoid": { + "inverse_flattening": 298.257223563, + "name": "WGS 84", + "semi_major_axis": 6378137 }, - "name":"World Geodetic System 1984", - "type":"GeodeticReferenceFrame" + "name": "World Geodetic System 1984", + "type": "GeodeticReferenceFrame" }, - "name":"WGS 84" + "name": "WGS 84" }, - "bbox":{ - "east_longitude":180, - "north_latitude":90, - "south_latitude":-90, - "west_longitude":-180 + "bbox": { + "east_longitude": 180, + "north_latitude": 90, + "south_latitude": -90, + "west_longitude": -180 }, - "coordinate_system":{ - "axis":[ + "coordinate_system": { + "axis": [ { - "abbreviation":"E", - "direction":"east", - "name":"Easting", - "unit":"metre" + "abbreviation": "E", + "direction": "east", + "name": "Easting", + "unit": "metre" }, { - "abbreviation":"N", - "direction":"north", - "name":"Northing", - "unit":"metre" + "abbreviation": "N", + "direction": "north", + "name": "Northing", + "unit": "metre" } ], - "subtype":"Cartesian" + "subtype": "Cartesian" }, - "id":{ - "authority":"OGC", - "code":"Auto42001", - "version":"1.3" + "id": { + "authority": "OGC", + "code": "Auto42001", + "version": "1.3" }, - "name":"AUTO 42001 (Universal Transverse Mercator)", - "type":"ProjectedCRS" + "name": "AUTO 42001 (Universal Transverse Mercator)", + "type": "ProjectedCRS" }, - "type":"spatial" + "type": "spatial" }, - "y":{ - "axis":"y", - "extent":[ + "y": { + "axis": "y", + "extent": [ -90, 90 ], - "reference_system":{ - "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", - "area":"World", - "base_crs":{ - "coordinate_system":{ - "axis":[ + "reference_system": { + "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", + "area": "World", + "base_crs": { + "coordinate_system": { + "axis": [ { - "abbreviation":"Lat", - "direction":"north", - "name":"Geodetic latitude", - "unit":"degree" + "abbreviation": "Lat", + "direction": "north", + "name": "Geodetic latitude", + "unit": "degree" }, { - "abbreviation":"Lon", - "direction":"east", - "name":"Geodetic longitude", - "unit":"degree" + "abbreviation": "Lon", + "direction": "east", + "name": "Geodetic longitude", + "unit": "degree" } ], - "subtype":"ellipsoidal" + "subtype": "ellipsoidal" }, - "datum":{ - "ellipsoid":{ - "inverse_flattening":298.257223563, - "name":"WGS 84", - "semi_major_axis":6378137 + "datum": { + "ellipsoid": { + "inverse_flattening": 298.257223563, + "name": "WGS 84", + "semi_major_axis": 6378137 }, - "name":"World Geodetic System 1984", - "type":"GeodeticReferenceFrame" + "name": "World Geodetic System 1984", + "type": "GeodeticReferenceFrame" }, - "name":"WGS 84" + "name": "WGS 84" }, - "bbox":{ - "east_longitude":180, - "north_latitude":90, - "south_latitude":-90, - "west_longitude":-180 + "bbox": { + "east_longitude": 180, + "north_latitude": 90, + "south_latitude": -90, + "west_longitude": -180 }, - "coordinate_system":{ - "axis":[ + "coordinate_system": { + "axis": [ { - "abbreviation":"E", - "direction":"east", - "name":"Easting", - "unit":"metre" + "abbreviation": "E", + "direction": "east", + "name": "Easting", + "unit": "metre" }, { - "abbreviation":"N", - "direction":"north", - "name":"Northing", - "unit":"metre" + "abbreviation": "N", + "direction": "north", + "name": "Northing", + "unit": "metre" } ], - "subtype":"Cartesian" + "subtype": "Cartesian" }, - "id":{ - "authority":"OGC", - "code":"Auto42001", - "version":"1.3" + "id": { + "authority": "OGC", + "code": "Auto42001", + "version": "1.3" }, - "name":"AUTO 42001 (Universal Transverse Mercator)", - "type":"ProjectedCRS" + "name": "AUTO 42001 (Universal Transverse Mercator)", + "type": "ProjectedCRS" }, - "type":"spatial" - } - }, - "datasource_type":"byoc-ID", - "description":"SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", - "extent":{ - "spatial":{ - "bbox":[ - [ - -180, - -90, - 180, - 90 - ] - ] - }, - "temporal":{ - "interval":[ - [ - "2014-01-01T00:00:00Z", - null - ] - ] + "type": "spatial" } }, - "id":"SKYSAT", - "keywords":[ - "sentinel hub", - "SkySat", - "vhr", - "commercial data" - ], - "license":"various", - "links":[ - - ], - "providers":[ - { - "description":"", - "name":"Sentinel Hub", - "roles":[ - "processor" - ], - "url":"https://services.sentinel-hub.com/" - }, - { - "description":"", - "name":"Planet", - "roles":[ - "producer" - ], - "url":"https://www.planet.com/products/planet-imagery/" - } - ], - "sci:citation":"\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", - "stac_extensions":[ - "datacube" - ], - "stac_version":"0.9.0", - "summaries":{ - "eo:bands":[ + "sci:citation": "\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", + "summaries": { + "eo:bands": [ { - "center_wavelength":0.4825, - "common_name":"blue", - "description":"Blue", - "full_width_half_max":0.325, - "name":"Blue", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.4825, + "common_name": "blue", + "description": "Blue", + "full_width_half_max": 0.325, + "name": "Blue", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.545, - "common_name":"green", - "description":"Green", - "full_width_half_max":0.4, - "name":"Green", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.545, + "common_name": "green", + "description": "Green", + "full_width_half_max": 0.4, + "name": "Green", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.650, - "common_name":"red", - "description":"Red", - "full_width_half_max":0.45, - "name":"Red", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.650, + "common_name": "red", + "description": "Red", + "full_width_half_max": 0.45, + "name": "Red", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.82, - "common_name":"nir08", - "description":"Near Infrared", - "full_width_half_max":0.8, - "name":"NIR", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.82, + "common_name": "nir08", + "description": "Near Infrared", + "full_width_half_max": 0.8, + "name": "NIR", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Unusable Data Mask", - "name":"UDM", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Unusable Data Mask", + "name": "UDM", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Clear mask", - "name":"UDM2_Clear", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Clear mask", + "name": "UDM2_Clear", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Snow mask", - "name":"UDM2_Snow", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Snow mask", + "name": "UDM2_Snow", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Shadow mask", - "name":"UDM2_Shadow", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Shadow mask", + "name": "UDM2_Shadow", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Light haze mask", - "name":"UDM2_LightHaze", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Light haze mask", + "name": "UDM2_LightHaze", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Heavy haze mask", - "name":"UDM2_HeavyHaze", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Heavy haze mask", + "name": "UDM2_HeavyHaze", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Cloud mask", - "name":"UDM2_Cloud", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Cloud mask", + "name": "UDM2_Cloud", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Confidence map", - "name":"UDM2_Confidence", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Confidence map", + "name": "UDM2_Confidence", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.675, - "common_name":"nir08", - "description":"Panchromatic", - "full_width_half_max":0.225, - "name":"PAN", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.675, + "common_name": "nir08", + "description": "Panchromatic", + "full_width_half_max": 0.225, + "name": "PAN", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"The mask of data/no data pixels", - "name":"dataMask" + "description": "The mask of data/no data pixels", + "name": "dataMask" } ] }, - "crs":[ + "crs": [ "http://www.opengis.net/def/crs/OGC/1.3/CRS84", "http://www.opengis.net/def/crs/EPSG/0/2154", "http://www.opengis.net/def/crs/EPSG/0/2180", @@ -533,6 +535,5 @@ "http://www.opengis.net/def/crs/EPSG/0/32759", "http://www.opengis.net/def/crs/EPSG/0/32760", "http://www.opengis.net/def/crs/SR-ORG/0/98739" - ], - "title":"SkySat" + ] } \ No newline at end of file diff --git a/tests/fixtures/collection_information/SKYSAT.json b/tests/fixtures/collection_information/SKYSAT.json index 1e019b2a..771ff291 100644 --- a/tests/fixtures/collection_information/SKYSAT.json +++ b/tests/fixtures/collection_information/SKYSAT.json @@ -1,8 +1,65 @@ { - "cube:dimensions":{ - "bands":{ - "type":"bands", - "values":[ + "type": "Collection", + "stac_version": "1.0.0", + "stac_extensions": [ + "https://stac-extensions.github.io/datacube/v1.0.0/schema.json", + "https://stac-extensions.github.io/scientific/v1.0.0/schema.json", + "https://stac-extensions.github.io/eo/v1.0.0/schema.json" + ], + "id": "SKYSAT", + "datasource_type": "byoc-ID", + "title": "SkySat", + "links": [], + "description": "SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", + "keywords": [ + "sentinel hub", + "SkySat", + "vhr", + "commercial data" + ], + "license": "various", + "providers": [ + { + "description": "", + "name": "Sentinel Hub", + "roles": [ + "processor" + ], + "url": "https://services.sentinel-hub.com/" + }, + { + "description": "", + "name": "Planet", + "roles": [ + "producer" + ], + "url": "https://www.planet.com/products/planet-imagery/" + } + ], + "extent": { + "spatial": { + "bbox": [ + [ + -180, + -90, + 180, + 90 + ] + ] + }, + "temporal": { + "interval": [ + [ + "2014-01-01T00:00:00Z", + null + ] + ] + } + }, + "cube:dimensions": { + "bands": { + "type": "bands", + "values": [ "Blue", "Green", "Red", @@ -19,379 +76,324 @@ "dataMask" ] }, - "t":{ - "extent":[ + "t": { + "extent": [ "2014-01-01T00:00:00Z", null ], - "type":"temporal" + "type": "temporal" }, - "x":{ - "axis":"x", - "extent":[ + "x": { + "axis": "x", + "extent": [ -180, 180 ], - "reference_system":{ - "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", - "area":"World", - "base_crs":{ - "coordinate_system":{ - "axis":[ + "reference_system": { + "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", + "area": "World", + "base_crs": { + "coordinate_system": { + "axis": [ { - "abbreviation":"Lat", - "direction":"north", - "name":"Geodetic latitude", - "unit":"degree" + "abbreviation": "Lat", + "direction": "north", + "name": "Geodetic latitude", + "unit": "degree" }, { - "abbreviation":"Lon", - "direction":"east", - "name":"Geodetic longitude", - "unit":"degree" + "abbreviation": "Lon", + "direction": "east", + "name": "Geodetic longitude", + "unit": "degree" } ], - "subtype":"ellipsoidal" + "subtype": "ellipsoidal" }, - "datum":{ - "ellipsoid":{ - "inverse_flattening":298.257223563, - "name":"WGS 84", - "semi_major_axis":6378137 + "datum": { + "ellipsoid": { + "inverse_flattening": 298.257223563, + "name": "WGS 84", + "semi_major_axis": 6378137 }, - "name":"World Geodetic System 1984", - "type":"GeodeticReferenceFrame" + "name": "World Geodetic System 1984", + "type": "GeodeticReferenceFrame" }, - "name":"WGS 84" + "name": "WGS 84" }, - "bbox":{ - "east_longitude":180, - "north_latitude":90, - "south_latitude":-90, - "west_longitude":-180 + "bbox": { + "east_longitude": 180, + "north_latitude": 90, + "south_latitude": -90, + "west_longitude": -180 }, - "coordinate_system":{ - "axis":[ + "coordinate_system": { + "axis": [ { - "abbreviation":"E", - "direction":"east", - "name":"Easting", - "unit":"metre" + "abbreviation": "E", + "direction": "east", + "name": "Easting", + "unit": "metre" }, { - "abbreviation":"N", - "direction":"north", - "name":"Northing", - "unit":"metre" + "abbreviation": "N", + "direction": "north", + "name": "Northing", + "unit": "metre" } ], - "subtype":"Cartesian" + "subtype": "Cartesian" }, - "id":{ - "authority":"OGC", - "code":"Auto42001", - "version":"1.3" + "id": { + "authority": "OGC", + "code": "Auto42001", + "version": "1.3" }, - "name":"AUTO 42001 (Universal Transverse Mercator)", - "type":"ProjectedCRS" + "name": "AUTO 42001 (Universal Transverse Mercator)", + "type": "ProjectedCRS" }, - "type":"spatial" + "type": "spatial" }, - "y":{ - "axis":"y", - "extent":[ + "y": { + "axis": "y", + "extent": [ -90, 90 ], - "reference_system":{ - "$schema":"https://proj.org/schemas/v0.2/projjson.schema.json", - "area":"World", - "base_crs":{ - "coordinate_system":{ - "axis":[ + "reference_system": { + "$schema": "https://proj.org/schemas/v0.2/projjson.schema.json", + "area": "World", + "base_crs": { + "coordinate_system": { + "axis": [ { - "abbreviation":"Lat", - "direction":"north", - "name":"Geodetic latitude", - "unit":"degree" + "abbreviation": "Lat", + "direction": "north", + "name": "Geodetic latitude", + "unit": "degree" }, { - "abbreviation":"Lon", - "direction":"east", - "name":"Geodetic longitude", - "unit":"degree" + "abbreviation": "Lon", + "direction": "east", + "name": "Geodetic longitude", + "unit": "degree" } ], - "subtype":"ellipsoidal" + "subtype": "ellipsoidal" }, - "datum":{ - "ellipsoid":{ - "inverse_flattening":298.257223563, - "name":"WGS 84", - "semi_major_axis":6378137 + "datum": { + "ellipsoid": { + "inverse_flattening": 298.257223563, + "name": "WGS 84", + "semi_major_axis": 6378137 }, - "name":"World Geodetic System 1984", - "type":"GeodeticReferenceFrame" + "name": "World Geodetic System 1984", + "type": "GeodeticReferenceFrame" }, - "name":"WGS 84" + "name": "WGS 84" }, - "bbox":{ - "east_longitude":180, - "north_latitude":90, - "south_latitude":-90, - "west_longitude":-180 + "bbox": { + "east_longitude": 180, + "north_latitude": 90, + "south_latitude": -90, + "west_longitude": -180 }, - "coordinate_system":{ - "axis":[ + "coordinate_system": { + "axis": [ { - "abbreviation":"E", - "direction":"east", - "name":"Easting", - "unit":"metre" + "abbreviation": "E", + "direction": "east", + "name": "Easting", + "unit": "metre" }, { - "abbreviation":"N", - "direction":"north", - "name":"Northing", - "unit":"metre" + "abbreviation": "N", + "direction": "north", + "name": "Northing", + "unit": "metre" } ], - "subtype":"Cartesian" + "subtype": "Cartesian" }, - "id":{ - "authority":"OGC", - "code":"Auto42001", - "version":"1.3" + "id": { + "authority": "OGC", + "code": "Auto42001", + "version": "1.3" }, - "name":"AUTO 42001 (Universal Transverse Mercator)", - "type":"ProjectedCRS" + "name": "AUTO 42001 (Universal Transverse Mercator)", + "type": "ProjectedCRS" }, - "type":"spatial" - } - }, - "datasource_type":"byoc-ID", - "description":"SkySat is one of the satellite constellations operated by Planet. SkySat satellite constellation consists of 21 satellites, which were launched between 2013 and 2020. The satellites are based on a CubeSat concept but are a bit bigger comparing to the PlanetScope's satellites. Because of its rapid revisit time, this data is suitable to monitor fast changes on earth's surface. However, note that the data acquisition must be tasked, data is not acquired systematically.", - "extent":{ - "spatial":{ - "bbox":[ - [ - -180, - -90, - 180, - 90 - ] - ] - }, - "temporal":{ - "interval":[ - [ - "2014-01-01T00:00:00Z", - null - ] - ] + "type": "spatial" } }, - "id":"SKYSAT", - "keywords":[ - "sentinel hub", - "SkySat", - "vhr", - "commercial data" - ], - "license":"various", - "links":[ - - ], - "providers":[ - { - "description":"", - "name":"Sentinel Hub", - "roles":[ - "processor" - ], - "url":"https://services.sentinel-hub.com/" - }, - { - "description":"", - "name":"Planet", - "roles":[ - "producer" - ], - "url":"https://www.planet.com/products/planet-imagery/" - } - ], - "sci:citation":"\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", - "stac_extensions":[ - "datacube" - ], - "stac_version":"0.9.0", - "summaries":{ - "eo:bands":[ + "sci:citation": "\u00a9 Planet (YYYY), contains SkySat data processed by Sentinel Hub", + "summaries": { + "eo:bands": [ { - "center_wavelength":0.4825, - "common_name":"blue", - "description":"Blue", - "full_width_half_max":0.325, - "name":"Blue", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.4825, + "common_name": "blue", + "description": "Blue", + "full_width_half_max": 0.325, + "name": "Blue", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.545, - "common_name":"green", - "description":"Green", - "full_width_half_max":0.4, - "name":"Green", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.545, + "common_name": "green", + "description": "Green", + "full_width_half_max": 0.4, + "name": "Green", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.650, - "common_name":"red", - "description":"Red", - "full_width_half_max":0.45, - "name":"Red", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.650, + "common_name": "red", + "description": "Red", + "full_width_half_max": 0.45, + "name": "Red", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.82, - "common_name":"nir08", - "description":"Near Infrared", - "full_width_half_max":0.8, - "name":"NIR", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.82, + "common_name": "nir08", + "description": "Near Infrared", + "full_width_half_max": 0.8, + "name": "NIR", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Unusable Data Mask", - "name":"UDM", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Unusable Data Mask", + "name": "UDM", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Clear mask", - "name":"UDM2_Clear", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Clear mask", + "name": "UDM2_Clear", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Snow mask", - "name":"UDM2_Snow", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Snow mask", + "name": "UDM2_Snow", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Shadow mask", - "name":"UDM2_Shadow", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Shadow mask", + "name": "UDM2_Shadow", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Light haze mask", - "name":"UDM2_LightHaze", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Light haze mask", + "name": "UDM2_LightHaze", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Heavy haze mask", - "name":"UDM2_HeavyHaze", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Heavy haze mask", + "name": "UDM2_HeavyHaze", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Cloud mask", - "name":"UDM2_Cloud", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Cloud mask", + "name": "UDM2_Cloud", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"Usable Data mask - Confidence map", - "name":"UDM2_Confidence", - "openeo:gsd":{ - "unit":"m", - "value":[ + "description": "Usable Data mask - Confidence map", + "name": "UDM2_Confidence", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "center_wavelength":0.675, - "common_name":"nir08", - "description":"Panchromatic", - "full_width_half_max":0.225, - "name":"PAN", - "openeo:gsd":{ - "unit":"m", - "value":[ + "center_wavelength": 0.675, + "common_name": "nir08", + "description": "Panchromatic", + "full_width_half_max": 0.225, + "name": "PAN", + "openeo:gsd": { + "unit": "m", + "value": [ 0.5, 0.5 ] } }, { - "description":"The mask of data/no data pixels", - "name":"dataMask" + "description": "The mask of data/no data pixels", + "name": "dataMask" } ] }, - "crs":[ + "crs": [ "http://www.opengis.net/def/crs/OGC/1.3/CRS84", "http://www.opengis.net/def/crs/EPSG/0/2154", "http://www.opengis.net/def/crs/EPSG/0/2180", @@ -533,6 +535,5 @@ "http://www.opengis.net/def/crs/EPSG/0/32759", "http://www.opengis.net/def/crs/EPSG/0/32760", "http://www.opengis.net/def/crs/SR-ORG/0/98739" - ], - "title":"SkySat" + ] } \ No newline at end of file From 854b564b9d9a3449a7dbc9a8881c7895c7b5ba11 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 19 Oct 2023 10:00:27 +0200 Subject: [PATCH 34/42] add vito access token to oidc user for fetching leftover credits --- rest/authentication/authentication.py | 2 +- rest/authentication/user.py | 5 +++-- rest/usage_reporting/report_usage.py | 5 ++--- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/rest/authentication/authentication.py b/rest/authentication/authentication.py index a0d69fe1..c9f75c97 100644 --- a/rest/authentication/authentication.py +++ b/rest/authentication/authentication.py @@ -61,7 +61,7 @@ def authenticate_user_oidc(self, access_token, oidc_provider_id): user_id = userinfo["sub"] try: - user = OIDCUser(user_id, oidc_userinfo=userinfo) + user = OIDCUser(user_id, oidc_userinfo=userinfo, access_token=access_token) except BillingPlanInvalid: return None diff --git a/rest/authentication/user.py b/rest/authentication/user.py index 2b6e300d..95faebc4 100644 --- a/rest/authentication/user.py +++ b/rest/authentication/user.py @@ -33,7 +33,7 @@ def report_usage(self, pu_spent, job_id=None): class OIDCUser(User): - def __init__(self, user_id=None, oidc_userinfo={}): + def __init__(self, user_id=None, oidc_userinfo={}, access_token=None): super().__init__(user_id) self.entitlements = [ self.convert_entitlement(entitlement) for entitlement in oidc_userinfo.get("eduperson_entitlement", []) @@ -41,6 +41,7 @@ def __init__(self, user_id=None, oidc_userinfo={}): self.oidc_userinfo = oidc_userinfo self.default_plan = OpenEOPBillingPlan.get_billing_plan(self.entitlements) self.session = central_user_sentinelhub_session + self.access_token = access_token def __str__(self): return f"{self.__class__.__name__}: {self.user_id}" @@ -64,7 +65,7 @@ def get_user_info(self): return user_info def get_leftover_credits(self): - return usageReporting.get_leftover_credits() + return usageReporting.get_leftover_credits_for_user(self.access_token) def report_usage(self, pu_spent, job_id=None): usageReporting.report_usage(self.user_id, pu_spent, job_id) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index 017d3735..912405f5 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -58,11 +58,10 @@ def reporting_check_health(self): return r.status_code == 200 and content["status"] == "ok" - def get_leftover_credits(self): + def get_leftover_credits_for_user(self, user_access_token): user_url = f"{self.base_url}user" - reporting_token = self.get_token() - headers = {"content-type": "application/json", "Authorization": f"Bearer {reporting_token['access_token']}"} + headers = {"content-type": "application/json", "Authorization": f"Bearer {user_access_token}"} if not self.reporting_check_health(): log(ERROR, "Services for usage reporting are not healthy") From 871942eb801b04c5be5950f48721069191b4c848 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 19 Oct 2023 10:23:08 +0200 Subject: [PATCH 35/42] run linting --- rest/usage_reporting/report_usage.py | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index 912405f5..977d836a 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -76,10 +76,7 @@ def get_leftover_credits_for_user(self, user_access_token): return credits else: log(ERROR, f"Error fetching leftover credits: {r.status_code} {r.text}") - raise Internal( - f"Problems during fetching leftover credits: {r.status_code} {r.text}" - ) - + raise Internal(f"Problems during fetching leftover credits: {r.status_code} {r.text}") def report_usage(self, user_id, pu_spent, job_id=None, max_tries=5): reporting_token = self.get_token() From ffc5219e5c83f9c13bf82696763fa1ac7f5eeb17 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 19 Oct 2023 11:47:26 +0200 Subject: [PATCH 36/42] Add additional check if leftover_Credits even exist since SH user don't have it --- rest/processing/processing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 07bb8795..ffc1df5c 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -58,7 +58,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) leftover_credits = g.user.get_leftover_credits() - if leftover_credits < estimated_pu: + if leftover_credits is not None and leftover_credits < estimated_pu: raise InsufficientCredits() sentinel_hub.start_batch_job(new_batch_request_id) @@ -93,7 +93,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) leftover_credits = g.user.get_leftover_credits() - if leftover_credits < estimated_pu: + if leftover_credits is not None and leftover_credits < estimated_pu: raise InsufficientCredits() sentinel_hub.start_batch_job(batch_request_id) From c4d4fb7c003f36832581413de0a1a39bab7c39c9 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Thu, 19 Oct 2023 11:48:35 +0200 Subject: [PATCH 37/42] convert from SH PU's to platform credits before performing check if there is enough resources --- rest/processing/processing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index ffc1df5c..cf38b66a 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -58,7 +58,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): estimated_pu, _ = get_batch_job_estimate(new_batch_request_id, process, deployment_endpoint) leftover_credits = g.user.get_leftover_credits() - if leftover_credits is not None and leftover_credits < estimated_pu: + if leftover_credits is not None and leftover_credits < estimated_pu * 0.15: raise InsufficientCredits() sentinel_hub.start_batch_job(new_batch_request_id) @@ -93,7 +93,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): estimated_pu, _ = get_batch_job_estimate(batch_request_id, process, deployment_endpoint) leftover_credits = g.user.get_leftover_credits() - if leftover_credits is not None and leftover_credits < estimated_pu: + if leftover_credits is not None and leftover_credits < estimated_pu * 0.15: raise InsufficientCredits() sentinel_hub.start_batch_job(batch_request_id) From 2feb4f035f5fec8c8069aee1a13f365004c1c6ee Mon Sep 17 00:00:00 2001 From: Ziga Cernigoj Date: Tue, 24 Oct 2023 10:17:34 +0200 Subject: [PATCH 38/42] raise error if mandatory env vars for usage reporting are missing --- rest/usage_reporting/report_usage.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index b86bb0e7..f576ec84 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -15,6 +15,22 @@ def __init__(self): self.auth_client_secret = os.environ.get("USAGE_REPORTING_AUTH_CLIENT_SECRET") self.base_url = os.environ.get("USAGE_REPORTING_BASE_URL") + if self.auth_url is None: + log(ERROR, "USAGE_REPORTING_AUTH_URL environment variable is not set") + raise Internal("USAGE_REPORTING_AUTH_URL environment variable is not set") + + if self.auth_client_id is None: + log(ERROR, "USAGE_REPORTING_AUTH_CLIENT_ID environment variable is not set") + raise Internal("USAGE_REPORTING_AUTH_CLIENT_ID environment variable is not set") + + if self.auth_client_secret is None: + log(ERROR, "USAGE_REPORTING_AUTH_CLIENT_SECRET environment variable is not set") + raise Internal("USAGE_REPORTING_AUTH_CLIENT_SECRET environment variable is not set") + + if self.base_url is None: + log(ERROR, "USAGE_REPORTING_BASE_URL environment variable is not set") + raise Internal("USAGE_REPORTING_BASE_URL environment variable is not set") + self.authenticate() def authenticate(self, max_tries=5): From f7aefa51860cd982f1b2bc4c8e25e8688d0643e6 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 24 Oct 2023 10:19:21 +0200 Subject: [PATCH 39/42] run linting --- rest/processing/processing.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 26a0d850..2ad9e2bb 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -63,11 +63,11 @@ def start_new_batch_job(sentinel_hub, process, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, new_batch_request_id) - + leftover_credits = g.user.get_leftover_credits() if leftover_credits is not None and leftover_credits < estimated_sentinelhub_pu * 0.15: raise InsufficientCredits() - + JobsPersistence.update_key( job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3)) ) @@ -105,7 +105,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): raise JobNotFound() estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, job["batch_request_id"]) - + leftover_credits = g.user.get_leftover_credits() if leftover_credits is not None and leftover_credits < estimated_sentinelhub_pu * 0.15: raise InsufficientCredits() From bf4a67edf6b07bcc8add41d8ffb6022173c086ce Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 24 Oct 2023 14:59:38 +0200 Subject: [PATCH 40/42] extract duplicate code to function --- rest/processing/processing.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index 2ad9e2bb..ae5c7327 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -64,9 +64,7 @@ def start_new_batch_job(sentinel_hub, process, job_id): estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, new_batch_request_id) - leftover_credits = g.user.get_leftover_credits() - if leftover_credits is not None and leftover_credits < estimated_sentinelhub_pu * 0.15: - raise InsufficientCredits() + check_leftover_credits(estimated_sentinelhub_pu) JobsPersistence.update_key( job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3)) @@ -106,9 +104,7 @@ def start_batch_job(batch_request_id, process, deployment_endpoint, job_id): estimated_sentinelhub_pu, _, _ = create_or_get_estimate_values_from_db(job, job["batch_request_id"]) - leftover_credits = g.user.get_leftover_credits() - if leftover_credits is not None and leftover_credits < estimated_sentinelhub_pu * 0.15: - raise InsufficientCredits() + check_leftover_credits(estimated_sentinelhub_pu) JobsPersistence.update_key( job["id"], "sum_costs", str(round(float(job.get("sum_costs", 0)) + estimated_sentinelhub_pu, 3)) @@ -232,3 +228,10 @@ def create_or_get_estimate_values_from_db(job, batch_request_id): estimated_file_size = float(job.get("estimated_file_size", 0)) return estimated_sentinelhub_pu, estimated_platform_credits, estimated_file_size + + +def check_leftover_credits(estimated_pu): + leftover_credits = g.user.get_leftover_credits() + estimated_pu_as_credits = estimated_pu * 0.15 # platform credits === SH PU's * 0.15 + if leftover_credits is not None and leftover_credits < estimated_pu_as_credits: + raise InsufficientCredits() \ No newline at end of file From 2e74c7cfbf20a540ea106d13f613052e9eff68cc Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 24 Oct 2023 15:01:56 +0200 Subject: [PATCH 41/42] rename credits to not use python's built-in constant for credits --- rest/usage_reporting/report_usage.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/usage_reporting/report_usage.py b/rest/usage_reporting/report_usage.py index 6160a66a..e85cbf7f 100644 --- a/rest/usage_reporting/report_usage.py +++ b/rest/usage_reporting/report_usage.py @@ -87,9 +87,9 @@ def get_leftover_credits_for_user(self, user_access_token): if r.status_code == 200: content = r.json() - credits = content.get("credits") + platform_credits = content.get("credits") - return credits + return platform_credits else: log(ERROR, f"Error fetching leftover credits: {r.status_code} {r.text}") raise Internal(f"Problems during fetching leftover credits: {r.status_code} {r.text}") From 189521daca98da04ac8b05be48ebdb4fd61dead1 Mon Sep 17 00:00:00 2001 From: Zan Pecovnik Date: Tue, 24 Oct 2023 15:06:32 +0200 Subject: [PATCH 42/42] run linting --- rest/processing/processing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest/processing/processing.py b/rest/processing/processing.py index ae5c7327..3a49e136 100644 --- a/rest/processing/processing.py +++ b/rest/processing/processing.py @@ -232,6 +232,6 @@ def create_or_get_estimate_values_from_db(job, batch_request_id): def check_leftover_credits(estimated_pu): leftover_credits = g.user.get_leftover_credits() - estimated_pu_as_credits = estimated_pu * 0.15 # platform credits === SH PU's * 0.15 + estimated_pu_as_credits = estimated_pu * 0.15 # platform credits === SH PU's * 0.15 if leftover_credits is not None and leftover_credits < estimated_pu_as_credits: - raise InsufficientCredits() \ No newline at end of file + raise InsufficientCredits()