Skip to content

Commit

Permalink
Merge pull request #324 from EGA-archive/develop
Browse files Browse the repository at this point in the history
Handling errors
  • Loading branch information
costero-e authored May 14, 2024
2 parents c3102c9 + dcef8c4 commit 196c930
Show file tree
Hide file tree
Showing 10 changed files with 398 additions and 283 deletions.
447 changes: 244 additions & 203 deletions beacon/request/handlers.py

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion beacon/request/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from beacon import conf
from humps.main import camelize
from aiohttp.web_request import Request
from aiohttp import web

LOG = logging.getLogger(__name__)

Expand Down Expand Up @@ -97,8 +98,10 @@ def from_request(self, request: Request) -> Self:
self.query.pagination.limit = int(v)
elif k == "includeResultsetResponses":
self.query.include_resultset_responses = IncludeResultsetResponses(v)
else:
elif k == 'filters' or k in ["start", "end", "assemblyId", "referenceName", "referenceBases", "alternateBases", "variantType","variantMinLength","variantMaxLength","geneId","genomicAlleleShortForm","aminoacidChange","clinicalRelevance"]:
self.query.request_parameters[k] = v
else:
raise web.HTTPBadRequest(text='request parameter introduced is not allowed')
return self

def summary(self):
Expand Down
8 changes: 7 additions & 1 deletion beacon/request/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from beacon.db import analyses, biosamples, cohorts, datasets, g_variants, individuals, runs, filtering_terms
from beacon.request.handlers import collection_handler, generic_handler, filtering_terms_handler
from beacon.response import framework, info, service_info
from beacon.response import framework, info, service_info, error

routes = [

Expand Down Expand Up @@ -76,6 +76,9 @@
web.get('/api/runs/{id}/g_variants', generic_handler(db_fn=runs.get_variants_of_run)),
web.get('/api/runs/{id}/analyses', generic_handler(db_fn=runs.get_analyses_of_run)),

web.get('/api/{tail:.*}', error.handler),
web.get('/{tail:.*}', error.handler),



########################################
Expand Down Expand Up @@ -136,4 +139,7 @@
web.post('/api/runs/{id}/g_variants', generic_handler(db_fn=runs.get_variants_of_run)),
web.post('/api/runs/{id}/analyses', generic_handler(db_fn=runs.get_analyses_of_run)),

web.post('/api/{tail:.*}', error.handler),
web.post('/{tail:.*}', error.handler)

]
52 changes: 44 additions & 8 deletions beacon/response/build_response.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,14 +15,22 @@ def build_meta(qparams: RequestParams, entity_schema: Optional[DefaultSchemas],
We assume that receivedRequest is the evaluated request (qparams) sent by the user.
"""

meta = {
'beaconId': conf.beacon_id,
'apiVersion': conf.api_version,
'returnedGranularity': returned_granularity,
'receivedRequestSummary': qparams.summary(),
'returnedSchemas': [entity_schema.value] if entity_schema is not None else []
}
try:
meta = {
'beaconId': conf.beacon_id,
'apiVersion': conf.api_version,
'returnedGranularity': returned_granularity,
'receivedRequestSummary': qparams.summary(),
'returnedSchemas': [entity_schema.value] if entity_schema is not None else []
}
except Exception:
meta = {
'beaconId': conf.beacon_id,
'apiVersion': conf.api_version,
'returnedGranularity': returned_granularity,
'receivedRequestSummary': qparams,
'returnedSchemas': [entity_schema.value] if entity_schema is not None else []
}
return meta

def build_response_summary(exists, num_total_results):
Expand Down Expand Up @@ -253,6 +261,22 @@ def build_beacon_info_response(data, qparams, func_response_type, authorized_dat

return beacon_response

########################################
# Error Response
########################################

def build_beacon_error_response(errorCode, qparams, errorMessage):

beacon_response = {
'meta': build_meta(qparams, None, Granularity.RECORD),
'error': {
'errorCode': str(errorCode),
'errorMessage': str(errorMessage)
}
}

return beacon_response

########################################
# Service Info Response
########################################
Expand Down Expand Up @@ -300,6 +324,18 @@ def build_filtering_terms_response(data,
# TODO: 'extendedInfo': build_extended_info(),
'response': {
'filteringTerms': data,
'resources': [{"id": "hp","name": "Human Phenotype Ontology","url": "https://purl.obolibrary.org/obo/hp.owl","version": "27-03-2020","namespacePrefix": "HP","iriPrefix": "https://purl.obolibrary.org/obo/HP_"},
{"id": "icd10","name": "International Classification of Diseaes 10th edition","version": "17-03-2008","namespacePrefix": "ICD10"},
{"id": "ncit","name": "National Cancer Institute Thesaurus","url": "https://purl.obolibrary.org/obo/ncit.owl","version": "19-10-2023","namespacePrefix": "NCIT","iriPrefix": "https://purl.obolibrary.org/obo/NCIT_"},
{"id": "loinc","name": "Logical Observation Identifiers Names and Codes","url": "https://loinc.org/download/loinc-complete/","version": "19-10-2023","namespacePrefix": "LOINC"},
{"id": "gaz","name": "Gazetteer","url": "https://purl.obolibrary.org/obo/gaz.owl","namespacePrefix": "GAZ","iriPrefix": "https://purl.obolibrary.org/obo/GAZ_"},
{"id": "opcs4","name": "Office of Population Censuses and Surveys 4th revision","version": "01-04-2023","namespacePrefix": "OPCS4"},
{"id": "genepio","name": "Genomic Epidemiology Ontology","version": "19-08-2023","namespacePrefix": "GENEPIO"},
{"id": "obi","name": "Ontology for Biomedical Investigations","url": "http://purl.obolibrary.org/obo/obi.owl","version": "19-01-2024","namespacePrefix": "OBI","iriPrefix": "https://purl.obolibrary.org/obo/OBI_"},
{"id": "efo","name": "Experimental Factor Ontology","version": "15-04-2024","namespacePrefix": "EFO"},
{"id": "uberon","name": "Uber-anatomy ontology","version": "22-03-2024","namespacePrefix": "UBERON"},
{"id": "doid","name": "Human Disease Ontology","url": "http://purl.obolibrary.org/obo/doid.owl","version": "28-03-2024","namespacePrefix": "DOID","iriPrefix": "https://purl.obolibrary.org/obo/DOID_"},
{"id": "geno","name": "GENO ontology","version": "08-10-2023","namespacePrefix": "GENO"}]
},
'beaconHandovers': beacon_handovers(),
}
Expand Down
21 changes: 21 additions & 0 deletions beacon/response/error.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import subprocess
import logging
import json
from aiohttp.web_request import Request
from beacon.db.datasets import get_datasets
from beacon.request import RequestParams
from beacon.response.build_response import build_beacon_error_response
from beacon.utils.auth import resolve_token
from beacon.utils.stream import json_stream
from bson import json_util

LOG = logging.getLogger(__name__)

async def handler(request: Request):
LOG.error('Running an error request')

# Fetch datasets info
qparams = ''

response_converted= build_beacon_error_response(404, qparams, 'Not Found')
return await json_stream(request, response_converted)
87 changes: 50 additions & 37 deletions beacon/response/info.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,54 +16,67 @@
from aiohttp.web_request import Request
from beacon.db.datasets import get_datasets
from beacon.request import RequestParams
from beacon.response.build_response import build_beacon_info_response
from beacon.response.build_response import build_beacon_info_response, build_beacon_error_response
from beacon.utils.auth import resolve_token
from beacon.utils.stream import json_stream
from bson import json_util

LOG = logging.getLogger(__name__)

async def handler(request: Request):
LOG.info('Running a GET info request')
try:
LOG.info('Running a GET info request')

# Fetch datasets info
json_body = await request.json() if request.method == "POST" and request.has_body and request.can_read_body else {}
qparams = RequestParams(**json_body).from_request(request)
_, _, datasets = get_datasets(None, qparams)
beacon_datasets = [ r for r in datasets ]

all_datasets = [ r['_id'] for r in beacon_datasets]
specific_datasets = [ r['id'] for r in beacon_datasets]
LOG.debug(specific_datasets)
# Fetch datasets info
json_body = await request.json() if request.method == "POST" and request.has_body and request.can_read_body else {}
qparams = RequestParams(**json_body).from_request(request)
_, _, datasets = get_datasets(None, qparams)
beacon_datasets = [ r for r in datasets ]
all_datasets = [ r['_id'] for r in beacon_datasets]
specific_datasets = [ r['id'] for r in beacon_datasets]
LOG.debug(specific_datasets)

search_datasets = []
authenticated=False
access_token = request.headers.get('Authorization')
LOG.debug(access_token)
if access_token is not None:
access_token = access_token[7:] # cut out 7 characters: len('Bearer ')

authorized_datasets, authenticated = await resolve_token(access_token, search_datasets)
LOG.debug(authorized_datasets)
LOG.debug('all datasets: %s', all_datasets)
LOG.info('resolved datasets: %s', authorized_datasets)
LOG.debug(authenticated)
search_datasets = []
authenticated=False
access_token = request.headers.get('Authorization')
LOG.debug(access_token)
if access_token is not None:
access_token = access_token[7:] # cut out 7 characters: len('Bearer ')
authorized_datasets, authenticated = await resolve_token(access_token, search_datasets)
LOG.debug(authorized_datasets)
LOG.debug('all datasets: %s', all_datasets)
LOG.info('resolved datasets: %s', authorized_datasets)
LOG.debug(authenticated)

specific_datasets_authorized = []
specific_datasets_authorized = []

if not specific_datasets:
for auth_element in authorized_datasets:
specific_datasets_authorized = [ r for r in beacon_datasets if r['id'] == auth_element]
if not specific_datasets:
for auth_element in authorized_datasets:
specific_datasets_authorized = [ r for r in beacon_datasets if r['id'] == auth_element]
else:
for element in specific_datasets:
if element in authorized_datasets:
specific_datasets_authorized = [ r for r in beacon_datasets if r['id'] == element]
else:
for element in specific_datasets:
if element in authorized_datasets:
specific_datasets_authorized = [ r for r in beacon_datasets if r['id'] == element]
else:
specific_datasets_authorized = []
specific_datasets_authorized = []


response_converted = build_beacon_info_response(specific_datasets_authorized,
qparams,
lambda x,y,z: x,
authorized_datasets if authenticated else [])

response_converted = build_beacon_info_response(specific_datasets_authorized,
qparams,
lambda x,y,z: x,
authorized_datasets if authenticated else [])
except Exception as err:
qparams = ''
if str(err) == 'Not Found':
response_converted = build_beacon_error_response(404, qparams, str(err))
elif str(err) == 'Bad Request':
response_converted = build_beacon_error_response(400, qparams, str(err)+':'+str(err.text))
elif str(err) == 'Bad Gateway':
response_converted = build_beacon_error_response(502, qparams, str(err))
elif str(err) == 'Method Not Allowed':
response_converted = build_beacon_error_response(405, qparams, str(err))
else:
response_converted = build_beacon_error_response(500, qparams, str(err))
return await json_stream(request, response_converted)
15 changes: 12 additions & 3 deletions beacon/response/service_info.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
import logging
from aiohttp.web_request import Request
from beacon.response.build_response import build_beacon_service_info_response
from beacon.response.build_response import build_beacon_service_info_response, build_beacon_error_response
from beacon.utils.stream import json_stream

LOG = logging.getLogger(__name__)

async def handler(request: Request):
LOG.info('Running a GET service info request')
response_converted = build_beacon_service_info_response()
try:
LOG.info('Running a GET service info request')
response_converted = build_beacon_service_info_response()
except Exception as err:
qparams = ''
if str(err) == 'Not Found':
response_converted = build_beacon_error_response(404, qparams, str(err))
elif str(err) == 'Bad Request':
response_converted = build_beacon_error_response(400, qparams, str(err))
else:
response_converted = build_beacon_error_response(500, qparams, str(err))
return await json_stream(request, response_converted)
16 changes: 0 additions & 16 deletions deploy/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -9,22 +9,6 @@ build:
docker exec rimongo /bin/bash -c 'mongo beacon -u root -p example --authenticationDatabase admin --eval "db.genomicVariations.deleteMany({})"'
docker exec rimongo /bin/bash -c 'mongo beacon -u root -p example --authenticationDatabase admin --eval "db.individuals.deleteMany({})"'
docker exec rimongo /bin/bash -c 'mongo beacon -u root -p example --authenticationDatabase admin --eval "db.runs.deleteMany({})"'
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/analyses.json rimongo:tmp/analyses.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/biosamples.json rimongo:tmp/biosamples.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/cohorts.json rimongo:tmp/cohorts.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/datasets.json rimongo:tmp/datasets.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/genomicVariations.json rimongo:tmp/genomicVariations.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/individuals.json rimongo:tmp/individuals.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/runs.json rimongo:tmp/runs.json
docker cp /data/vault/bio-scratch/arnau/beacon/beacon2-ri-tools-v2_test_anot/filtering_terms.json rimongo:tmp/filtering_terms.json
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/datasets.json --collection datasets
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/individuals.json --collection individuals
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/cohorts.json --collection cohorts
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/analyses.json --collection analyses
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/biosamples.json --collection biosamples
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/runs.json --collection runs
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/genomicVariations.json --collection genomicVariations
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/filtering_terms.json --collection filtering_terms
docker cp data/analyses.json rimongo:tmp/analyses.json
docker cp data/biosamples.json rimongo:tmp/biosamples.json
docker cp data/cohorts.json rimongo:tmp/cohorts.json
Expand Down
28 changes: 14 additions & 14 deletions deploy/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -40,23 +40,23 @@ With `mongo-express` we can see the contents of the database at [http://localhos
To load the database we execute the following commands:

```bash
docker cp /path/to/analyses.json deploy_db_1:tmp/analyses.json
docker cp /path/to/biosamples.json deploy_db_1:tmp/biosamples.json
docker cp /path/to/cohorts.json deploy_db_1:tmp/cohorts.json
docker cp /path/to/datasets.json deploy_db_1:tmp/datasets.json
docker cp /path/to/genomicVariations.json deploy_db_1:tmp/genomicVariations.json
docker cp /path/to/individuals.json deploy_db_1:tmp/individuals.json
docker cp /path/to/runs.json deploy_db_1:tmp/runs.json
docker cp /path/to/analyses.json rimongo:tmp/analyses.json
docker cp /path/to/biosamples.json rimongo:tmp/biosamples.json
docker cp /path/to/cohorts.json rimongo:tmp/cohorts.json
docker cp /path/to/datasets.json rimongo:tmp/datasets.json
docker cp /path/to/genomicVariations.json rimongo:tmp/genomicVariations.json
docker cp /path/to/individuals.json rimongo:tmp/individuals.json
docker cp /path/to/runs.json rimongo:tmp/runs.json
```

```bash
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/datasets.json --collection datasets
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/analyses.json --collection analyses
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/biosamples.json --collection biosamples
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/cohorts.json --collection cohorts
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/genomicVariations.json --collection genomicVariations
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/individuals.json --collection individuals
docker exec deploy_db_1 mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/runs.json --collection runs
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/datasets.json --collection datasets
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/analyses.json --collection analyses
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/biosamples.json --collection biosamples
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/cohorts.json --collection cohorts
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/genomicVariations.json --collection genomicVariations
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/individuals.json --collection individuals
docker exec rimongo mongoimport --jsonArray --uri "mongodb://root:[email protected]:27017/beacon?authSource=admin" --file /tmp/runs.json --collection runs
```

This loads the JSON files inside of the `data` folder into the MongoDB database container. Each time you import data you will have to create indexes for the queries to run smoothly. Please, check the next point about how to Create the indexes.
Expand Down
2 changes: 2 additions & 0 deletions deploy/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ services:
db:
image: mongo:5
hostname: mongo
container_name: rimongo
ports:
- 27017:27017
environment:
Expand Down Expand Up @@ -79,6 +80,7 @@ services:
- ./conf.py:/beacon/beacon/conf.py
- ./logger.yml:/beacon/beacon/logger.yml
- ../beacon/request/datasets.yml:/beacon/beacon/request/datasets.yml
- ../beacon/request/cohorts.yml:/beacon/beacon/request/cohorts.yml
- ../beacon/request/response_type.yml:/beacon/beacon/request/response_type.yml
ports:
- "5050:5050"
Expand Down

0 comments on commit 196c930

Please sign in to comment.