diff --git a/.gitignore b/.gitignore index 05b23a22..2732b725 100644 --- a/.gitignore +++ b/.gitignore @@ -302,4 +302,9 @@ src/ *.dump # Celery Worker Tmp files -celery-worker-*.tmp \ No newline at end of file +celery-worker-*.tmp + +# local dev scripts +local*.sh +local*.py +setup/ diff --git a/AGENTS.md b/AGENTS.md new file mode 100644 index 00000000..630345ae --- /dev/null +++ b/AGENTS.md @@ -0,0 +1,23 @@ +# Repository Guidelines + +## Project Structure & Module Organization +`core/` contains the apps (e.g., `core/concepts`, `core/mappings`), plus integration helpers like `core/celery.py` and API wiring in `core/urls.py`. Entrypoints live at repo root (`manage.py`, `startup.sh`), environment tooling sits in `docker-compose*.yml`, and CLI helpers/scripts are under `tools/` (imports, releases, versioning). Tests ship alongside each app in `core/*/tests` with cross-cutting suites in `core/integration_tests/`. + +## Build, Test, and Development Commands +- `docker compose up -d` boots the API, Postgres, Elasticsearch, Redis, and workers for local hacking. +- `docker compose run --rm api python manage.py test --keepdb -v3` runs the Django test suite quickly against the dev database. +- `docker exec -it oclapi2-api-1 pylint -j2 core` enforces lint rules. +- `docker compose -f docker-compose.yml -f docker-compose.ci.yml run --rm api bash coverage.sh` reports coverage and fails under the CI thresholds. +- `docker exec -it oclapi2-api-1 python manage.py search_index --populate -f --parallel` rebuilds Elasticsearch indexes after model or serializer changes. + +## Coding Style & Naming Conventions +Use 4-space indentation, `snake_case` for functions/modules, and `PascalCase` for Django models and serializers. Keep imports sorted (stdlib, third-party, local) and lean on `pylint` to catch regressions. Favor explicit settings toggles by extending `core/toggles/` and document feature flags in docstrings above the flag definition. Apply KISS, DRY, and YAGNI: keep views/serializers simple, consolidate helpers per app, and skip speculative features. + +## Testing Guidelines +Target ≥93% coverage by pairing unit tests with integration cases. Name files `test_.py` and co-locate fixtures inside the app’s `tests/fixtures/`. Smoke Elasticsearch-dependent tests live in `core/integration_tests/` and should be guarded with `@skipUnless(settings.ES_ENABLED, ...)`. Use `python manage.py test app.tests --keepdb` for focused runs and include representative payloads in `core/samples/` when asserting serialization. + +## Commit & Pull Request Guidelines +Structure commit subjects using Conventional Commits (`feat(core): add repoType filter`) or issue references such as `OpenConceptLab/ocl_issues#2252 repo facets`. Keep one logical change per commit so release tooling can tag accurately. Follow GitFlow branches—start work on `feature/-slug` from `develop`, reserve `hotfix/*` for production fixes, and merge back through PRs only. Every PR must describe the motivation, list verification steps (`docker compose run --rm api python manage.py test…`), link the tracked issue, attach evidence when API responses change, and call out migrations, indexing, or env var impacts before requesting review. + +## Security & Configuration Tips +Keep `vm.max_map_count=262144` set before starting Elasticsearch. For SSO, export `OIDC_SERVER_URL`, `OIDC_SERVER_INTERNAL_URL`, and `OIDC_REALM`; omitting them falls back to Django auth. Never commit secrets—use `.env` overrides or Docker compose profiles instead. diff --git a/core/graphql/__init__.py b/core/graphql/__init__.py new file mode 100644 index 00000000..040fa6f9 --- /dev/null +++ b/core/graphql/__init__.py @@ -0,0 +1 @@ +"""GraphQL app for OCL.""" diff --git a/core/graphql/apps.py b/core/graphql/apps.py new file mode 100644 index 00000000..4f9c6b72 --- /dev/null +++ b/core/graphql/apps.py @@ -0,0 +1,7 @@ +from django.apps import AppConfig + + +class GraphqlConfig(AppConfig): + name = "core.graphql" + label = "core_graphql" + verbose_name = "GraphQL API" diff --git a/core/graphql/queries.py b/core/graphql/queries.py new file mode 100644 index 00000000..0084758b --- /dev/null +++ b/core/graphql/queries.py @@ -0,0 +1,277 @@ +from __future__ import annotations + +import logging +from typing import Iterable, List, Optional, Sequence + +import strawberry +from asgiref.sync import sync_to_async +from django.db.models import Case, IntegerField, Prefetch, Q, When +from elasticsearch import ConnectionError as ESConnectionError, TransportError +from elasticsearch_dsl import Q as ES_Q +from strawberry.exceptions import GraphQLError + +from core.common.constants import HEAD +from core.concepts.documents import ConceptDocument +from core.concepts.models import Concept +from core.mappings.models import Mapping +from core.sources.models import Source + +from .types import ConceptType, MappingType, ToSourceType + +logger = logging.getLogger(__name__) +ES_MAX_WINDOW = 10_000 + + +@strawberry.type +class ConceptSearchResult: + org: str + source: str + version_resolved: str = strawberry.field(name="versionResolved") + page: Optional[int] + limit: Optional[int] + total_count: int = strawberry.field(name="totalCount") + has_next_page: bool = strawberry.field(name="hasNextPage") + results: List[ConceptType] + + +async def resolve_source_version(org: str, source: str, version: Optional[str]) -> Source: + filters = {'organization__mnemonic': org} + target_version = version or HEAD + instance = await sync_to_async(Source.get_version)(source, target_version, filters) + + if not instance and version is None: + instance = await sync_to_async(Source.find_latest_released_version_by)({**filters, 'mnemonic': source}) + + if not instance: + raise GraphQLError( + f"Source '{source}' with version '{version or 'HEAD'}' was not found for org '{org}'." + ) + + return instance + + +def build_base_queryset(source_version: Source): + return source_version.get_concepts_queryset().filter(is_active=True, retired=False) + + +def build_mapping_prefetch(source_version: Source) -> Prefetch: + mapping_qs = ( + Mapping.objects.filter( + sources__id=source_version.id, + from_concept_id__isnull=False, + is_active=True, + retired=False, + ) + .select_related('to_source', 'to_concept', 'to_concept__parent') + .order_by('map_type', 'to_concept_code', 'to_concept__mnemonic') + .distinct() + ) + + return Prefetch('mappings_from', queryset=mapping_qs, to_attr='graphql_mappings') + + +def normalize_pagination(page: Optional[int], limit: Optional[int]) -> Optional[dict]: + if page is None or limit is None: + return None + if page < 1 or limit < 1: + raise GraphQLError('page and limit must be >= 1 when provided.') + start = (page - 1) * limit + end = start + limit + return {'page': page, 'limit': limit, 'start': start, 'end': end} + + +def has_next(total: int, pagination: Optional[dict]) -> bool: + if not pagination: + return False + return total > pagination['end'] + + +def apply_slice(qs, pagination: Optional[dict]): + if not pagination: + return qs + return qs[pagination['start']:pagination['end']] + + +def serialize_mappings(concept: Concept) -> List[MappingType]: + mappings = getattr(concept, 'graphql_mappings', []) or [] + result: List[MappingType] = [] + for mapping in mappings: + result.append( + MappingType( + map_type=str(mapping.map_type), + to_source=ToSourceType( + url=mapping.to_source_url, + name=mapping.to_source_name + ) if mapping.to_source_url or mapping.to_source_name else None, + to_code=mapping.get_to_concept_code(), + comment=mapping.comment, + ) + ) + return result + + +def serialize_concepts(concepts: Iterable[Concept]) -> List[ConceptType]: + output: List[ConceptType] = [] + for concept in concepts: + output.append( + ConceptType( + concept_id=concept.mnemonic, + display=concept.display_name, + mappings=serialize_mappings(concept), + ) + ) + return output + + +def concept_ids_from_es( + query: str, + source_version: Source, + pagination: Optional[dict], +) -> Optional[tuple[list[int], int]]: + trimmed = query.strip() + if not trimmed: + return [], 0 + + try: + search = ConceptDocument.search() + search = search.filter('term', source=source_version.mnemonic.lower()) + search = search.filter('term', source_version=source_version.version) + search = search.filter('term', retired=False) + + should_queries = [ + ES_Q('match', id={'query': trimmed, 'boost': 6, 'operator': 'AND'}), + ES_Q('match_phrase_prefix', name={'query': trimmed, 'boost': 4}), + ES_Q('match', synonyms={'query': trimmed, 'boost': 2, 'operator': 'AND'}), + ] + search = search.query(ES_Q('bool', should=should_queries, minimum_should_match=1)) + + if pagination: + search = search[pagination['start']:pagination['end']] + else: + search = search[0:ES_MAX_WINDOW] + + search = search.params(track_total_hits=True) + response = search.execute() + total_meta = getattr(getattr(response.hits, 'total', None), 'value', None) + total = int(total_meta) if total_meta is not None else len(response.hits) + concept_ids = [int(hit.meta.id) for hit in response] + return concept_ids, total + except (TransportError, ESConnectionError) as exc: # pragma: no cover - depends on ES at runtime + logger.warning('Falling back to DB search due to Elasticsearch error: %s', exc) + except Exception as exc: # pragma: no cover - unexpected ES error should not break API + logger.warning('Unexpected Elasticsearch error, falling back to DB search: %s', exc) + return None + + +def fallback_db_search(base_qs, query: str): + trimmed = query.strip() + if not trimmed: + return base_qs.none() + return base_qs.filter( + Q(mnemonic__icontains=trimmed) | Q(names__name__icontains=trimmed) + ).distinct() + + +async def concepts_for_ids( + base_qs, + concept_ids: Sequence[str], + pagination: Optional[dict], + mapping_prefetch: Prefetch, +) -> tuple[List[Concept], int]: + unique_ids = list(dict.fromkeys([cid for cid in concept_ids if cid])) + if not unique_ids: + raise GraphQLError('conceptIds must include at least one value when provided.') + + qs = base_qs.filter(mnemonic__in=unique_ids) + total = await sync_to_async(qs.count)() + ordering = Case( + *[When(mnemonic=value, then=pos) for pos, value in enumerate(unique_ids)], + output_field=IntegerField() + ) + qs = qs.order_by(ordering, 'mnemonic') + qs = apply_slice(qs, pagination) + qs = qs.prefetch_related('names', mapping_prefetch) + return await sync_to_async(list)(qs), total + + +async def concepts_for_query( + base_qs, + query: str, + source_version: Source, + pagination: Optional[dict], + mapping_prefetch: Prefetch, +) -> tuple[List[Concept], int]: + es_result = await sync_to_async(concept_ids_from_es)(query, source_version, pagination) + if es_result is not None: + concept_ids, total = es_result + if not concept_ids: + if total == 0: + logger.info( + 'ES returned zero hits for query="%s" in source "%s" version "%s". Falling back to DB search.', + query, + source_version.mnemonic, + source_version.version, + ) + else: + return [], total + else: + ordering = Case( + *[When(id=pk, then=pos) for pos, pk in enumerate(concept_ids)], + output_field=IntegerField() + ) + qs = base_qs.filter(id__in=concept_ids).order_by(ordering) + qs = qs.prefetch_related('names', mapping_prefetch) + return await sync_to_async(list)(qs), total + + qs = fallback_db_search(base_qs, query).order_by('mnemonic') + total = await sync_to_async(qs.count)() + qs = apply_slice(qs, pagination) + qs = qs.prefetch_related('names', mapping_prefetch) + return await sync_to_async(list)(qs), total + + +@strawberry.type +class Query: + @strawberry.field(name="conceptsFromSource") + async def concepts_from_source( + self, + info, # pylint: disable=unused-argument + org: str, + source: str, + version: Optional[str] = None, + conceptIds: Optional[List[str]] = None, + query: Optional[str] = None, + page: Optional[int] = None, + limit: Optional[int] = None, + ) -> ConceptSearchResult: + if info.context.auth_status == 'none': + raise GraphQLError('Authentication required') + elif info.context.auth_status == 'invalid': + raise GraphQLError('Authentication failure') + + concept_ids_param = conceptIds or [] + text_query = (query or '').strip() + + if not concept_ids_param and not text_query: + raise GraphQLError('Either conceptIds or query must be provided.') + + pagination = normalize_pagination(page, limit) + source_version = await resolve_source_version(org, source, version) + base_qs = build_base_queryset(source_version) + mapping_prefetch = build_mapping_prefetch(source_version) + + if concept_ids_param: + concepts, total = await concepts_for_ids(base_qs, concept_ids_param, pagination, mapping_prefetch) + else: + concepts, total = await concepts_for_query(base_qs, text_query, source_version, pagination, mapping_prefetch) + + return ConceptSearchResult( + org=org, + source=source, + version_resolved=source_version.version, + page=pagination['page'] if pagination else None, + limit=pagination['limit'] if pagination else None, + total_count=total, + has_next_page=has_next(total, pagination), + results=serialize_concepts(concepts), + ) diff --git a/core/graphql/schema.py b/core/graphql/schema.py new file mode 100644 index 00000000..70874634 --- /dev/null +++ b/core/graphql/schema.py @@ -0,0 +1,9 @@ +import strawberry +from strawberry_django.optimizer import DjangoOptimizerExtension + +from .queries import Query + +schema = strawberry.Schema( + query=Query, + extensions=[DjangoOptimizerExtension], +) diff --git a/core/graphql/tests/test_concepts_from_source.py b/core/graphql/tests/test_concepts_from_source.py new file mode 100644 index 00000000..992eada1 --- /dev/null +++ b/core/graphql/tests/test_concepts_from_source.py @@ -0,0 +1,192 @@ +import json +from unittest import mock + +from django.test import TestCase + +from core.common.constants import HEAD +from core.concepts.tests.factories import ConceptFactory, ConceptNameFactory +from core.mappings.tests.factories import MappingFactory +from core.orgs.tests.factories import OrganizationFactory +from core.sources.tests.factories import OrganizationSourceFactory + + +class ConceptsFromSourceQueryTests(TestCase): + maxDiff = None + + def setUp(self): + self.organization = OrganizationFactory(mnemonic='CIEL') + self.source = OrganizationSourceFactory( + organization=self.organization, + mnemonic='CIEL', + name='CIEL', + version=HEAD, + ) + self.concept1 = ConceptFactory(parent=self.source, mnemonic='12345') + ConceptNameFactory(concept=self.concept1, name='Hypertension', locale='en', locale_preferred=True) + self.concept2 = ConceptFactory(parent=self.source, mnemonic='67890') + ConceptNameFactory(concept=self.concept2, name='Diabetes', locale='en', locale_preferred=True) + self.mapping = MappingFactory( + parent=self.source, + from_concept=self.concept1, + to_concept=self.concept2, + map_type='Same As', + comment='primary link' + ) + + self.release_version = OrganizationSourceFactory( + organization=self.organization, + mnemonic=self.source.mnemonic, + name=self.source.name, + version='2024.01', + released=True, + is_latest_version=True, + ) + self.concept1.sources.add(self.release_version) + self.concept2.sources.add(self.release_version) + self.mapping.sources.add(self.release_version) + + def _execute(self, query: str, variables: dict): + response = self.client.post( + '/graphql/', + data=json.dumps({'query': query, 'variables': variables}), + content_type='application/json' + ) + payload = response.json() + if 'errors' in payload: + self.fail(payload['errors']) + return response.status_code, payload['data'] + + def test_fetch_concepts_by_ids_with_pagination(self): + query = """ + query ConceptsByIds($org: String!, $source: String!, $conceptIds: [String!], $page: Int, $limit: Int) { + conceptsFromSource(org: $org, source: $source, conceptIds: $conceptIds, page: $page, limit: $limit) { + org + source + versionResolved + page + limit + totalCount + hasNextPage + results { + conceptId + display + mappings { mapType toSource { url name } toCode comment } + } + } + } + """ + status, data = self._execute(query, { + 'org': self.organization.mnemonic, + 'source': self.source.mnemonic, + 'conceptIds': [self.concept1.mnemonic, self.concept2.mnemonic], + 'page': 1, + 'limit': 1, + }) + + self.assertEqual(status, 200) + payload = data['conceptsFromSource'] + self.assertEqual(payload['org'], self.organization.mnemonic) + self.assertEqual(payload['source'], self.source.mnemonic) + self.assertEqual(payload['versionResolved'], HEAD) + self.assertEqual(payload['totalCount'], 2) + self.assertTrue(payload['hasNextPage']) + self.assertEqual(payload['page'], 1) + self.assertEqual(payload['limit'], 1) + self.assertEqual(len(payload['results']), 1) + self.assertEqual(payload['results'][0]['conceptId'], self.concept1.mnemonic) + self.assertEqual(payload['results'][0]['mappings'][0]['toCode'], self.concept2.mnemonic) + + @mock.patch('core.graphql.queries.concept_ids_from_es') + def test_fetch_concepts_by_query_uses_es_ordering(self, mock_es): + mock_es.return_value = ([self.concept2.id, self.concept1.id], 2) + query = """ + query ConceptsByQuery($org: String!, $source: String!, $text: String!) { + conceptsFromSource(org: $org, source: $source, query: $text) { + versionResolved + page + limit + totalCount + hasNextPage + results { conceptId } + } + } + """ + status, data = self._execute(query, { + 'org': self.organization.mnemonic, + 'source': self.source.mnemonic, + 'text': 'concept' + }) + + self.assertEqual(status, 200) + payload = data['conceptsFromSource'] + self.assertEqual(payload['versionResolved'], HEAD) + self.assertIsNone(payload['page']) + self.assertIsNone(payload['limit']) + self.assertFalse(payload['hasNextPage']) + self.assertEqual(payload['totalCount'], 2) + self.assertEqual([item['conceptId'] for item in payload['results']], + [self.concept2.mnemonic, self.concept1.mnemonic]) + + @mock.patch('core.graphql.queries.concept_ids_from_es', return_value=None) + def test_fetch_concepts_by_query_falls_back_to_db(self, _mock_es): + query = """ + query ConceptsByQuery($org: String!, $source: String!, $text: String!) { + conceptsFromSource(org: $org, source: $source, query: $text) { + totalCount + results { conceptId } + } + } + """ + status, data = self._execute(query, { + 'org': self.organization.mnemonic, + 'source': self.source.mnemonic, + 'text': 'hyper' + }) + + self.assertEqual(status, 200) + payload = data['conceptsFromSource'] + self.assertEqual(payload['totalCount'], 1) + self.assertEqual(payload['results'][0]['conceptId'], self.concept1.mnemonic) + + @mock.patch('core.graphql.queries.concept_ids_from_es') + def test_fetch_concepts_by_query_recovers_when_es_returns_zero_hits(self, mock_es): + mock_es.return_value = ([], 0) + query = """ + query ConceptsByQuery($org: String!, $source: String!, $text: String!) { + conceptsFromSource(org: $org, source: $source, query: $text) { + totalCount + results { conceptId } + } + } + """ + status, data = self._execute(query, { + 'org': self.organization.mnemonic, + 'source': self.source.mnemonic, + 'text': 'diabetes' + }) + + self.assertEqual(status, 200) + payload = data['conceptsFromSource'] + self.assertEqual(payload['totalCount'], 1) + self.assertEqual(payload['results'][0]['conceptId'], self.concept2.mnemonic) + + def test_fetch_concepts_for_specific_version(self): + query = """ + query ConceptsByIds($org: String!, $source: String!, $conceptIds: [String!], $version: String) { + conceptsFromSource(org: $org, source: $source, conceptIds: $conceptIds, version: $version) { + versionResolved + results { conceptId } + } + } + """ + status, data = self._execute(query, { + 'org': self.organization.mnemonic, + 'source': self.source.mnemonic, + 'conceptIds': [self.concept1.mnemonic], + 'version': self.release_version.version, + }) + + self.assertEqual(status, 200) + payload = data['conceptsFromSource'] + self.assertEqual(payload['versionResolved'], self.release_version.version) + self.assertEqual(payload['results'][0]['conceptId'], self.concept1.mnemonic) diff --git a/core/graphql/types.py b/core/graphql/types.py new file mode 100644 index 00000000..6736f26e --- /dev/null +++ b/core/graphql/types.py @@ -0,0 +1,26 @@ +from __future__ import annotations + +from typing import List, Optional + +import strawberry + + +@strawberry.type +class ToSourceType: + url: Optional[str] + name: Optional[str] + + +@strawberry.type +class MappingType: + map_type: str = strawberry.field(name="mapType") + to_source: Optional[ToSourceType] = strawberry.field(name="toSource") + to_code: Optional[str] = strawberry.field(name="toCode") + comment: Optional[str] + + +@strawberry.type +class ConceptType: + concept_id: str = strawberry.field(name="conceptId") + display: Optional[str] + mappings: List[MappingType] diff --git a/core/graphql/urls.py b/core/graphql/urls.py new file mode 100644 index 00000000..d9ecfe7c --- /dev/null +++ b/core/graphql/urls.py @@ -0,0 +1,11 @@ +from django.urls import path + +from .schema import schema +from .views import AuthenticatedGraphQLView + +graphql_view = AuthenticatedGraphQLView.as_view(schema=schema, graphiql=True) + +urlpatterns = [ + path('graphql/', graphql_view, name='graphql'), + path('graphql', graphql_view), +] diff --git a/core/graphql/views.py b/core/graphql/views.py new file mode 100644 index 00000000..a2abea18 --- /dev/null +++ b/core/graphql/views.py @@ -0,0 +1,79 @@ +""" +Authenticated GraphQL View + +This view integrates header-based authentication for GraphQL endpoints, +similar to how REST endpoints handle authorization tokens. + +Supports: +- Django Token authentication (Authorization: Token ) +- OIDC Bearer tokens (Authorization: Bearer ) - basic support, may need extension for full JWT validation + +The authenticated user is passed in the GraphQL context as 'user'. +""" + +from asgiref.sync import sync_to_async +from strawberry.django.views import AsyncGraphQLView +from rest_framework.authentication import get_authorization_header +from rest_framework.authtoken.models import Token +from django.contrib.auth.models import AnonymousUser +from django.conf import settings + + +class AuthenticatedGraphQLView(AsyncGraphQLView): + async def get_context(self, request, response=None): + context = await super().get_context(request, response) + + # First, check if user is authenticated via session (e.g., browser login) + if hasattr(request, 'user') and request.user.is_authenticated: + context.user = request.user + context.auth_status = 'valid' + return context + + # Otherwise, check authorization header + auth = get_authorization_header(request).split() + + if not auth or auth[0].lower() not in [b'token', b'bearer']: + context.user = AnonymousUser() + context.auth_status = 'none' + return context + + if len(auth) == 1: + context.user = AnonymousUser() + context.auth_status = 'invalid' + return context + elif len(auth) > 2: + context.user = AnonymousUser() + context.auth_status = 'invalid' + return context + + try: + token = auth[1].decode() + except UnicodeError: + context.user = AnonymousUser() + context.auth_status = 'invalid' + return context + + # Handle Django Token authentication + if auth[0].lower() == b'token': + try: + token_obj = await sync_to_async(Token.objects.select_related('user').get)(key=token) + context.user = token_obj.user + context.auth_status = 'valid' + except Token.DoesNotExist: + context.user = AnonymousUser() + context.auth_status = 'invalid' + # Handle OIDC Bearer tokens + elif auth[0].lower() == b'bearer': + # For OIDC, basic check - in production, implement full JWT validation + # using mozilla_django_oidc or similar + from core.services.auth.core import AuthService + if await sync_to_async(AuthService.is_sso_enabled)(): + # Placeholder: Assume token is valid if present (extend with proper validation) + # TODO: Implement JWT decoding and validation for OIDC + context.user = AnonymousUser() # For now, treat as anonymous + context.auth_status = 'invalid' # Since not implemented + else: + context.user = AnonymousUser() + context.auth_status = 'invalid' + + return context \ No newline at end of file diff --git a/core/mappings/models.py b/core/mappings/models.py index 711de254..9f990c03 100644 --- a/core/mappings/models.py +++ b/core/mappings/models.py @@ -213,7 +213,22 @@ def get_from_source(self): @property def to_source_name(self): - return get(self.get_to_source(), 'mnemonic') + """ + Return the name of the source that this mapping is associated with. + + If the mapping has an associated source, then the mnemonic of that source is returned. + If the mapping has a to_source_url, but no associated source, then the parent uri of that url is returned. + If the mapping has no associated source or to_source_url, then None is returned. + """ + source = self.get_to_source() + if source: + return source.mnemonic + if self.to_source_url: + from core.common.utils import to_parent_uri + parent_uri = to_parent_uri(self.to_source_url) + if parent_uri: + return parent_uri.rstrip('/').split('/')[-1] + return None @property def to_source_owner(self): diff --git a/core/settings.py b/core/settings.py index cce9b438..9f9993e0 100644 --- a/core/settings.py +++ b/core/settings.py @@ -99,6 +99,7 @@ 'ordered_model', 'cid.apps.CidAppConfig', 'django_celery_beat', + 'strawberry.django', 'health_check', # required 'health_check.db', # stock Django health checkers # 'health_check.contrib.celery_ping', # requires celery @@ -117,7 +118,8 @@ 'core.repos', 'core.url_registry', 'core.events', - 'core.map_projects' + 'core.map_projects', + 'core.graphql.apps.GraphqlConfig' ] REST_FRAMEWORK = { 'DEFAULT_PERMISSION_CLASSES': ( diff --git a/core/urls.py b/core/urls.py index 1b815dc4..ab130f26 100644 --- a/core/urls.py +++ b/core/urls.py @@ -68,6 +68,9 @@ path('sources/', include('core.sources.urls'), name='sources_url'), path('repos/', include('core.repos.urls'), name='repos_url'), path('url-registry/', include('core.url_registry.urls'), name='url_registry_url'), + + # GraphQL path + path('', include('core.graphql.urls')), # TODO: require FHIR subdomain path('fhir/', include('core.fhir.urls'), name='fhir_urls'), diff --git a/docker-compose.yml b/docker-compose.yml index ac2d1355..bab2da06 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -10,11 +10,11 @@ services: volumes: - postgres-data:/var/lib/postgresql/data redis: - image: bitnami/redis:7.0.12 + image: bitnami/redis restart: "always" environment: - ALLOW_EMPTY_PASSWORD=yes - command: /opt/bitnami/scripts/redis/run.sh --maxmemory 2048mb + command: /opt/bitnami/scripts/redis/run.sh --maxmemory 4096mb healthcheck: test: "redis-cli ping" volumes: @@ -159,7 +159,7 @@ services: - upload-data:/code/uploads celery_bulk_import_0_1: image: openconceptlab/oclapi2:${ENVIRONMENT-production} - command: ["bash", "-c", "CELERY_WORKER_NAME=bulk_import_0_1 ./start_celery_worker.sh -Q bulk_import_0,bulk_import_1 -c 1"] + command: ["bash", "-c", "CELERY_WORKER_NAME=bulk_import_0_1 ./start_celery_worker.sh -Q bulk_import_0,bulk_import_1 -c 7"] restart: "always" healthcheck: test: ["CMD-SHELL", "-c", "CELERY_WORKER_NAME=bulk_import_0_1 ./ping_celery_worker.sh"] diff --git a/requirements.txt b/requirements.txt index 45853f6e..0ff619d1 100644 --- a/requirements.txt +++ b/requirements.txt @@ -46,4 +46,6 @@ pydantic==2.6.4 minio==7.2.9 sentence-transformers==3.3.1 elastic-transport==8.17 -litellm==1.77.0 \ No newline at end of file +litellm==1.77.0 +strawberry-graphql==0.285.0 +strawberry-graphql-django==0.67.0