From f2241c14793d1ce17218d6262b1f60d15113113f Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Fri, 15 Nov 2024 14:13:47 +0000 Subject: [PATCH 01/12] [feature/PI-528-reset_versions_to_v1] collapse versions and deactivate etl tests --- src/api/createCpmProduct/tests/test_index.py | 4 +- .../createCpmProductForEpr/src/v1/steps.py | 6 +- .../tests/test_index.py | 8 +- src/api/createDevice/src/v1/steps.py | 8 +- src/api/createDevice/tests/test_index.py | 14 +- .../src/v1/steps.py | 14 +- .../tests/test_index.py | 16 +- .../createDeviceReferenceData/src/v1/steps.py | 8 +- .../tests/test_index.py | 14 +- .../src/v1/steps.py | 18 +- .../tests/test_index.py | 16 +- .../src/v1/steps.py | 14 +- .../tests/test_index.py | 16 +- src/api/createProductTeam/src/v1/steps.py | 8 +- src/api/deleteCpmProduct/src/v1/steps.py | 2 +- src/api/deleteCpmProduct/tests/test_index.py | 8 +- src/api/readCpmProduct/tests/test_index.py | 6 +- src/api/readDevice/src/v1/steps.py | 14 +- src/api/readDevice/tests/test_index.py | 8 +- .../readDeviceReferenceData/src/v1/steps.py | 14 +- .../tests/test_index.py | 8 +- src/api/readProductTeam/src/v1/steps.py | 6 +- src/api/readProductTeam/tests/test_index.py | 4 +- src/api/readQuestionnaire/src/v1/steps.py | 6 +- src/api/searchCpmProduct/src/v1/steps.py | 6 +- src/api/searchCpmProduct/tests/test_index.py | 6 +- .../searchDeviceReferenceData/src/v1/steps.py | 12 +- .../tests/test_index.py | 6 +- src/api/searchSdsDevice/src/v1/steps.py | 2 +- .../tests/{test_index.py => _test_index.py} | 51 +- src/api/searchSdsEndpoint/src/v1/steps.py | 2 +- .../tests/{test_index.py => _test_index.py} | 15 +- .../{test_sds_smoke.py => _test_sds_smoke.py} | 0 ...er.py => _test_etl_state_lock_enforcer.py} | 0 ...ponents.py => _test_sds_etl_components.py} | 13 +- ...narios.py => _test_changelog_scenarios.py} | 8 +- src/etl/sds/tests/changelog/utils.py | 2 +- ..._bulk_trigger.py => _test_bulk_trigger.py} | 4 +- ...ual_trigger.py => _test_manual_trigger.py} | 0 ...ate_trigger.py => _test_update_trigger.py} | 0 ...lk_worker.py => _test_load_bulk_worker.py} | 8 +- ...lk_reduce.py => _test_load_bulk_reduce.py} | 0 src/etl/sds/worker/load_update/load_update.py | 2 +- ..._worker.py => _test_load_update_worker.py} | 4 +- ...rker.py => _test_transform_bulk_worker.py} | 0 .../worker/transform_bulk/transform_bulk.py | 2 +- ...er.py => _test_transform_update_worker.py} | 6 +- .../transform_update/transform_update.py | 2 +- src/etl/sds/worker/transform_update/utils.py | 2 +- .../domain/api/common_steps/create_device.py | 8 +- .../domain/api/common_steps/create_product.py | 11 +- .../domain/api/common_steps/read_product.py | 12 +- .../common_steps/tests/test_create_product.py | 6 +- .../common_steps/tests/test_read_product.py | 8 +- .../cpm_product/tests/test_cpm_product_v1.py | 2 +- src/layers/domain/core/cpm_product/v1.py | 8 +- .../tests/test_cpm_system_id_v1.py | 4 +- src/layers/domain/core/cpm_system_id/v1.py | 6 +- src/layers/domain/core/device/__init__.py | 19 +- .../core/device/tests/test_device_v1.py | 305 ++--- .../core/device/tests/test_device_v2.py | 272 ---- .../core/device/tests/test_device_v3.py | 230 ---- src/layers/domain/core/device/v1.py | 517 ++++---- src/layers/domain/core/device/v2.py | 424 ------ src/layers/domain/core/device/v3.py | 409 ------ .../device_key/tests/test_device_key_v1.py | 10 +- .../device_key/tests/test_device_key_v2.py | 38 - src/layers/domain/core/device_key/v1.py | 41 +- src/layers/domain/core/device_key/v2.py | 35 - .../tests/test_device_reference_data_v1.py | 8 +- .../domain/core/device_reference_data/v1.py | 6 +- .../tests/test_ods_organisation_v1.py | 2 +- .../tests/test_ods_organisation_v2.py | 26 - .../tests/test_ods_organisation_v3.py | 26 - src/layers/domain/core/ods_organisation/v1.py | 7 +- src/layers/domain/core/ods_organisation/v2.py | 23 - src/layers/domain/core/ods_organisation/v3.py | 22 - src/layers/domain/core/product_key/v1.py | 2 +- .../product_team/tests/test_product_team.py | 82 -- ...uct_team_v3.py => test_product_team_v1.py} | 6 +- src/layers/domain/core/product_team/v1.py | 57 +- src/layers/domain/core/product_team/v2.py | 46 - src/layers/domain/core/product_team/v3.py | 59 - .../tests/test_product_team_key_v1.py | 22 - src/layers/domain/core/product_team_key/v1.py | 28 +- .../domain/core/questionnaire/custom_rules.py | 12 - .../tests/test_questionnaire_v1.py | 1138 +---------------- .../tests/test_questionnaire_v2.py | 10 - .../tests/test_questionnaire_v3.py | 106 -- src/layers/domain/core/questionnaire/v1.py | 354 +---- src/layers/domain/core/questionnaire/v2.py | 99 -- src/layers/domain/core/questionnaire/v3.py | 64 - .../domain/core/root/tests/test_root_v1.py | 4 +- .../domain/core/root/tests/test_root_v2.py | 7 - .../domain/core/root/tests/test_root_v3.py | 7 - src/layers/domain/core/root/v2.py | 13 - src/layers/domain/core/root/v3.py | 13 - .../cpm_product_repository/__init__.py | 1 + .../tests/{v3 => v1}/conftest.py | 4 +- .../test_cpm_product_repository_keys_v1.py} | 8 +- .../test_cpm_product_repository_v1.py} | 10 +- .../test_cpm_product_repository_v1_delete.py} | 4 +- .../cpm_product_repository/{v3.py => v1.py} | 8 +- .../repository/cpm_system_id_repository.py | 8 +- .../tests/conftest.py | 4 +- ...est_device_reference_data_repository_v1.py | 4 +- .../device_reference_data_repository/v1.py | 18 +- .../repository/device_repository/__init__.py | 2 +- .../device_repository/tests/utils.py | 2 +- .../tests/{v3 => v1}/conftest.py | 8 +- .../test_device_repository_keys_v1.py} | 6 +- ..._repository_questionnaire_responses_v1.py} | 8 +- .../test_device_repository_tags_v1.py} | 4 +- .../test_device_repository_v1.py} | 47 +- .../test_device_repository_v1_compression.py} | 4 +- .../device_repository/tests/v2/conftest.py | 63 - .../v2/test_device_repository_keys_v2.py | 54 - ...e_repository_questionnaire_responses_v2.py | 109 -- .../v2/test_device_repository_tags_v2.py | 191 --- .../tests/v2/test_device_repository_v2.py | 333 ----- .../test_device_repository_v2_compression.py | 37 - .../device_repository/{v3.py => v1.py} | 10 +- .../domain/repository/device_repository/v2.py | 582 --------- .../repository/keys/tests/test_keys_v1.py | 25 +- src/layers/domain/repository/keys/v1.py | 24 +- src/layers/domain/repository/keys/v2.py | 9 - src/layers/domain/repository/keys/v3.py | 14 - ....py => test_product_team_repository_v1.py} | 4 +- .../tests/v1/test_product_team_repository.py | 84 -- .../repository/product_team_repository/v1.py | 55 +- .../repository/product_team_repository/v2.py | 37 - .../questionnaire_repository/__init__.py | 3 +- .../test_questionnaire_repository_v1.py} | 6 +- .../questionnaire_repository/v1/__init__.py | 52 - .../v1/deserialisers.py | 30 - .../questionnaire_repository.py} | 2 +- .../v1/questionnaires/__init__.py | 6 +- .../spine_as/field_mapping.json | 0 .../questionnaires/spine_as/v1.json | 0 .../field_mapping.json | 0 .../spine_as_additional_interactions/v1.json | 0 .../v1/questionnaires/spine_device/v1.json | 94 -- .../v1/questionnaires/spine_endpoint/v1.json | 177 --- .../spine_mhs/field_mapping.json | 0 .../questionnaires/spine_mhs/v1.json | 0 .../spine_mhs_message_sets/field_mapping.json | 0 .../spine_mhs_message_sets/v1.json | 0 .../tests/test_spine_device_questionnaire.py | 86 -- .../test_spine_endpoint_questionnaire.py | 68 - .../tests/test_spine_questionnaires.py | 51 +- .../v1/tests/test_deserialisers.py | 41 - .../v1/tests/test_questionnaire_repository.py | 40 - .../v2/questionnaires/__init__.py | 8 - .../repository/repository/tests/model.py | 82 -- .../tests/{model_v3.py => model_v1.py} | 6 +- .../repository/tests/test_repository_v1.py | 255 +++- .../repository/tests/test_repository_v2.py | 265 ---- .../repository/tests/test_repository_v3.py | 259 ---- src/layers/domain/repository/repository/v1.py | 301 ++++- src/layers/domain/repository/repository/v2.py | 144 --- src/layers/domain/repository/repository/v3.py | 305 ----- .../tests/test_cpm_system_id_repository.py | 4 +- src/layers/domain/request_models/__init__.py | 1 + .../tests/test_parse_cpm_product_params.py | 2 +- .../tests/test_parse_device_params.py | 2 +- .../tests/test_parse_device_reference_data.py | 2 +- ...test_parse_device_reference_data_params.py | 2 +- .../tests/test_parse_product_team_params.py | 2 +- .../tests/test_parse_questionnaire_params.py | 2 +- src/layers/domain/response/response_matrix.py | 2 +- .../sds/cpm_translation/modify_device.py | 4 +- src/layers/sds/cpm_translation/modify_key.py | 372 +++--- ...ranslation.py => _test_cpm_translation.py} | 0 ...odify_device.py => _test_modify_device.py} | 0 ...test_modify_key.py => _test_modify_key.py} | 4 +- ..._translations.py => _test_translations.py} | 0 .../tests/{test_utils.py => _test_utils.py} | 0 .../sds/cpm_translation/translations.py | 464 +++---- src/layers/sds/cpm_translation/utils.py | 2 +- .../sds/domain/nhs_accredited_system.py | 10 - src/layers/sds/domain/nhs_mhs.py | 10 - src/layers/sds/worker/load.py | 2 +- 182 files changed, 1923 insertions(+), 8033 deletions(-) rename src/api/searchSdsDevice/tests/{test_index.py => _test_index.py} (89%) rename src/api/searchSdsEndpoint/tests/{test_index.py => _test_index.py} (96%) rename src/api/tests/smoke_tests/{test_sds_smoke.py => _test_sds_smoke.py} (100%) rename src/etl/sds/etl_state_lock_enforcer/tests/{test_etl_state_lock_enforcer.py => _test_etl_state_lock_enforcer.py} (100%) rename src/etl/sds/tests/{test_sds_etl_components.py => _test_sds_etl_components.py} (96%) rename src/etl/sds/tests/changelog/{test_changelog_scenarios.py => _test_changelog_scenarios.py} (95%) rename src/etl/sds/trigger/bulk/tests/{test_bulk_trigger.py => _test_bulk_trigger.py} (96%) rename src/etl/sds/trigger/manual/tests/{test_manual_trigger.py => _test_manual_trigger.py} (100%) rename src/etl/sds/trigger/update/tests/{test_update_trigger.py => _test_update_trigger.py} (100%) rename src/etl/sds/worker/load_bulk/tests/{test_load_bulk_worker.py => _test_load_bulk_worker.py} (96%) rename src/etl/sds/worker/load_bulk_reduce/tests/{test_load_bulk_reduce.py => _test_load_bulk_reduce.py} (100%) rename src/etl/sds/worker/load_update/tests/{test_load_update_worker.py => _test_load_update_worker.py} (98%) rename src/etl/sds/worker/transform_bulk/tests/{test_transform_bulk_worker.py => _test_transform_bulk_worker.py} (100%) rename src/etl/sds/worker/transform_update/tests/{test_transform_update_worker.py => _test_transform_update_worker.py} (98%) delete mode 100644 src/layers/domain/core/device/tests/test_device_v2.py delete mode 100644 src/layers/domain/core/device/tests/test_device_v3.py delete mode 100644 src/layers/domain/core/device/v2.py delete mode 100644 src/layers/domain/core/device/v3.py delete mode 100644 src/layers/domain/core/device_key/tests/test_device_key_v2.py delete mode 100644 src/layers/domain/core/device_key/v2.py delete mode 100644 src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v2.py delete mode 100644 src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v3.py delete mode 100644 src/layers/domain/core/ods_organisation/v2.py delete mode 100644 src/layers/domain/core/ods_organisation/v3.py delete mode 100644 src/layers/domain/core/product_team/tests/test_product_team.py rename src/layers/domain/core/product_team/tests/{test_product_team_v3.py => test_product_team_v1.py} (97%) delete mode 100644 src/layers/domain/core/product_team/v2.py delete mode 100644 src/layers/domain/core/product_team/v3.py delete mode 100644 src/layers/domain/core/product_team_key/tests/test_product_team_key_v1.py delete mode 100644 src/layers/domain/core/questionnaire/custom_rules.py delete mode 100644 src/layers/domain/core/questionnaire/tests/test_questionnaire_v2.py delete mode 100644 src/layers/domain/core/questionnaire/tests/test_questionnaire_v3.py delete mode 100644 src/layers/domain/core/questionnaire/v2.py delete mode 100644 src/layers/domain/core/questionnaire/v3.py delete mode 100644 src/layers/domain/core/root/tests/test_root_v2.py delete mode 100644 src/layers/domain/core/root/tests/test_root_v3.py delete mode 100644 src/layers/domain/core/root/v2.py delete mode 100644 src/layers/domain/core/root/v3.py create mode 100644 src/layers/domain/repository/cpm_product_repository/__init__.py rename src/layers/domain/repository/cpm_product_repository/tests/{v3 => v1}/conftest.py (85%) rename src/layers/domain/repository/cpm_product_repository/tests/{v3/test_cpm_product_repository_keys_v3.py => v1/test_cpm_product_repository_keys_v1.py} (90%) rename src/layers/domain/repository/cpm_product_repository/tests/{v3/test_cpm_product_repository_v3.py => v1/test_cpm_product_repository_v1.py} (95%) rename src/layers/domain/repository/cpm_product_repository/tests/{v3/test_cpm_product_repository_v3_delete.py => v1/test_cpm_product_repository_v1_delete.py} (92%) rename src/layers/domain/repository/cpm_product_repository/{v3.py => v1.py} (94%) rename src/layers/domain/repository/device_repository/tests/{v3 => v1}/conftest.py (90%) rename src/layers/domain/repository/device_repository/tests/{v3/test_device_repository_keys_v3.py => v1/test_device_repository_keys_v1.py} (93%) rename src/layers/domain/repository/device_repository/tests/{v3/test_device_repository_questionnaire_responses_v3.py => v1/test_device_repository_questionnaire_responses_v1.py} (93%) rename src/layers/domain/repository/device_repository/tests/{v3/test_device_repository_tags_v3.py => v1/test_device_repository_tags_v1.py} (98%) rename src/layers/domain/repository/device_repository/tests/{v3/test_device_repository_v3.py => v1/test_device_repository_v1.py} (90%) rename src/layers/domain/repository/device_repository/tests/{v3/test_device_repository_v3_compression.py => v1/test_device_repository_v1_compression.py} (91%) delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/conftest.py delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/test_device_repository_keys_v2.py delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/test_device_repository_questionnaire_responses_v2.py delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/test_device_repository_tags_v2.py delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2.py delete mode 100644 src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2_compression.py rename src/layers/domain/repository/device_repository/{v3.py => v1.py} (98%) delete mode 100644 src/layers/domain/repository/device_repository/v2.py delete mode 100644 src/layers/domain/repository/keys/v2.py delete mode 100644 src/layers/domain/repository/keys/v3.py rename src/layers/domain/repository/product_team_repository/tests/{v2/test_product_team_repository.py => test_product_team_repository_v1.py} (95%) delete mode 100644 src/layers/domain/repository/product_team_repository/tests/v1/test_product_team_repository.py delete mode 100644 src/layers/domain/repository/product_team_repository/v2.py rename src/layers/domain/repository/questionnaire_repository/{v2/tests/test_questionnaire_repository_v2.py => tests/test_questionnaire_repository_v1.py} (89%) delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/__init__.py delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/deserialisers.py rename src/layers/domain/repository/questionnaire_repository/{v2/__init__.py => v1/questionnaire_repository.py} (95%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_as/field_mapping.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_as/v1.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_as_additional_interactions/field_mapping.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_as_additional_interactions/v1.json (100%) delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_device/v1.json delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_endpoint/v1.json rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_mhs/field_mapping.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_mhs/v1.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_mhs_message_sets/field_mapping.json (100%) rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/spine_mhs_message_sets/v1.json (100%) delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_device_questionnaire.py delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_endpoint_questionnaire.py rename src/layers/domain/repository/questionnaire_repository/{v2 => v1}/questionnaires/tests/test_spine_questionnaires.py (69%) delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/tests/test_deserialisers.py delete mode 100644 src/layers/domain/repository/questionnaire_repository/v1/tests/test_questionnaire_repository.py delete mode 100644 src/layers/domain/repository/questionnaire_repository/v2/questionnaires/__init__.py delete mode 100644 src/layers/domain/repository/repository/tests/model.py rename src/layers/domain/repository/repository/tests/{model_v3.py => model_v1.py} (95%) delete mode 100644 src/layers/domain/repository/repository/tests/test_repository_v2.py delete mode 100644 src/layers/domain/repository/repository/tests/test_repository_v3.py delete mode 100644 src/layers/domain/repository/repository/v2.py delete mode 100644 src/layers/domain/repository/repository/v3.py create mode 100644 src/layers/domain/request_models/__init__.py rename src/layers/sds/cpm_translation/tests/{test_cpm_translation.py => _test_cpm_translation.py} (100%) rename src/layers/sds/cpm_translation/tests/{test_modify_device.py => _test_modify_device.py} (100%) rename src/layers/sds/cpm_translation/tests/{test_modify_key.py => _test_modify_key.py} (99%) rename src/layers/sds/cpm_translation/tests/{test_translations.py => _test_translations.py} (100%) rename src/layers/sds/cpm_translation/tests/{test_utils.py => _test_utils.py} (100%) diff --git a/src/api/createCpmProduct/tests/test_index.py b/src/api/createCpmProduct/tests/test_index.py index 6b50bdc8..d19e4065 100644 --- a/src/api/createCpmProduct/tests/test_index.py +++ b/src/api/createCpmProduct/tests/test_index.py @@ -3,8 +3,8 @@ from unittest import mock import pytest -from domain.core.root.v3 import Root -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createCpmProductForEpr/src/v1/steps.py b/src/api/createCpmProductForEpr/src/v1/steps.py index c4d88582..8a91755d 100644 --- a/src/api/createCpmProductForEpr/src/v1/steps.py +++ b/src/api/createCpmProductForEpr/src/v1/steps.py @@ -7,9 +7,9 @@ read_product_team, ) from domain.core.cpm_product import CpmProduct -from domain.core.cpm_system_id.v1 import PartyKeyId -from domain.core.product_key.v1 import ProductKeyType -from domain.core.product_team.v3 import ProductTeam +from domain.core.cpm_system_id import PartyKeyId +from domain.core.product_key import ProductKeyType +from domain.core.product_team import ProductTeam from domain.repository.cpm_system_id_repository import CpmSystemIdRepository diff --git a/src/api/createCpmProductForEpr/tests/test_index.py b/src/api/createCpmProductForEpr/tests/test_index.py index cb5a1968..43dd0b94 100644 --- a/src/api/createCpmProductForEpr/tests/test_index.py +++ b/src/api/createCpmProductForEpr/tests/test_index.py @@ -4,11 +4,11 @@ from unittest import mock import pytest -from domain.core.cpm_system_id.v1 import PartyKeyId, ProductId +from domain.core.cpm_system_id import PartyKeyId, ProductId from domain.core.enum import Status -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createDevice/src/v1/steps.py b/src/api/createDevice/src/v1/steps.py index 784a5580..21971c3b 100644 --- a/src/api/createDevice/src/v1/steps.py +++ b/src/api/createDevice/src/v1/steps.py @@ -6,10 +6,10 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device.v3 import Device -from domain.repository.device_repository.v3 import DeviceRepository -from domain.request_models.v1 import CreateDeviceIncomingParams +from domain.core.cpm_product import CpmProduct +from domain.core.device import Device +from domain.repository.device_repository import DeviceRepository +from domain.request_models import CreateDeviceIncomingParams from domain.response.validation_errors import mark_validation_errors_as_inbound diff --git a/src/api/createDevice/tests/test_index.py b/src/api/createDevice/tests/test_index.py index d8ad9814..068c2e40 100644 --- a/src/api/createDevice/tests/test_index.py +++ b/src/api/createDevice/tests/test_index.py @@ -7,13 +7,13 @@ from unittest import mock import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device.v3 import Device -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_repository.v3 import DeviceRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import ProductId +from domain.core.device import Device +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_repository import DeviceRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py b/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py index c307f0f9..adb45fe6 100644 --- a/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py +++ b/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py @@ -7,16 +7,16 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device.v3 import Device +from domain.core.cpm_product import CpmProduct +from domain.core.device import Device from domain.core.error import InvalidSpineMhsResponse -from domain.core.questionnaire.v3 import Questionnaire, QuestionnaireResponse -from domain.repository.device_repository.v3 import DeviceRepository -from domain.repository.questionnaire_repository.v2 import QuestionnaireRepository -from domain.repository.questionnaire_repository.v2.questionnaires import ( +from domain.core.questionnaire import Questionnaire, QuestionnaireResponse +from domain.repository.device_repository import DeviceRepository +from domain.repository.questionnaire_repository import ( QuestionnaireInstance, + QuestionnaireRepository, ) -from domain.request_models.v1 import CreateMhsDeviceIncomingParams +from domain.request_models import CreateMhsDeviceIncomingParams from domain.response.validation_errors import mark_validation_errors_as_inbound diff --git a/src/api/createDeviceMessageHandlingSystem/tests/test_index.py b/src/api/createDeviceMessageHandlingSystem/tests/test_index.py index 6f8910ef..8614fc01 100644 --- a/src/api/createDeviceMessageHandlingSystem/tests/test_index.py +++ b/src/api/createDeviceMessageHandlingSystem/tests/test_index.py @@ -7,14 +7,14 @@ from unittest import mock import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device.v3 import Device -from domain.core.product_key.v1 import ProductKeyType -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_repository.v3 import DeviceRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import ProductId +from domain.core.device import Device +from domain.core.product_key import ProductKeyType +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_repository import DeviceRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createDeviceReferenceData/src/v1/steps.py b/src/api/createDeviceReferenceData/src/v1/steps.py index 3b264cef..bdca5549 100644 --- a/src/api/createDeviceReferenceData/src/v1/steps.py +++ b/src/api/createDeviceReferenceData/src/v1/steps.py @@ -6,12 +6,12 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.device_reference_data import DeviceReferenceData +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.request_models.v1 import CreateDeviceReferenceDataIncomingParams +from domain.request_models import CreateDeviceReferenceDataIncomingParams from domain.response.validation_errors import mark_validation_errors_as_inbound diff --git a/src/api/createDeviceReferenceData/tests/test_index.py b/src/api/createDeviceReferenceData/tests/test_index.py index 8dee73f0..f0a65b05 100644 --- a/src/api/createDeviceReferenceData/tests/test_index.py +++ b/src/api/createDeviceReferenceData/tests/test_index.py @@ -7,15 +7,15 @@ from unittest import mock import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import ProductId +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createDeviceReferenceDataASActions/src/v1/steps.py b/src/api/createDeviceReferenceDataASActions/src/v1/steps.py index 583c915f..b65f1e3d 100644 --- a/src/api/createDeviceReferenceDataASActions/src/v1/steps.py +++ b/src/api/createDeviceReferenceDataASActions/src/v1/steps.py @@ -6,22 +6,20 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device_reference_data.v1 import DeviceReferenceData +from domain.core.cpm_product import CpmProduct +from domain.core.device_reference_data import DeviceReferenceData from domain.core.error import ConfigurationError -from domain.core.product_key.v1 import ProductKeyType -from domain.core.questionnaire.v3 import Questionnaire, QuestionnaireResponse -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.product_key import ProductKeyType +from domain.core.questionnaire import Questionnaire, QuestionnaireResponse +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) from domain.repository.errors import AlreadyExistsError -from domain.repository.questionnaire_repository.v2 import QuestionnaireRepository -from domain.repository.questionnaire_repository.v2.questionnaires import ( +from domain.repository.questionnaire_repository import ( QuestionnaireInstance, + QuestionnaireRepository, ) -from domain.request_models.v1 import ( - CreateDeviceReferenceAdditionalInteractionsDataParams, -) +from domain.request_models import CreateDeviceReferenceAdditionalInteractionsDataParams from domain.response.validation_errors import mark_validation_errors_as_inbound DEVICE_NAME_MARKER = "AS Additional Interactions" diff --git a/src/api/createDeviceReferenceDataASActions/tests/test_index.py b/src/api/createDeviceReferenceDataASActions/tests/test_index.py index 72b3e7a5..a2d0bbf9 100644 --- a/src/api/createDeviceReferenceDataASActions/tests/test_index.py +++ b/src/api/createDeviceReferenceDataASActions/tests/test_index.py @@ -6,16 +6,16 @@ from typing import Any, Generator from unittest import mock -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import PartyKeyId, ProductId -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.core.product_key.v1 import ProductKeyType -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import PartyKeyId, ProductId +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.product_key import ProductKeyType +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createDeviceReferenceDataMessageSet/src/v1/steps.py b/src/api/createDeviceReferenceDataMessageSet/src/v1/steps.py index d910e13c..e49f2a98 100644 --- a/src/api/createDeviceReferenceDataMessageSet/src/v1/steps.py +++ b/src/api/createDeviceReferenceDataMessageSet/src/v1/steps.py @@ -7,18 +7,18 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.core.questionnaire.v3 import Questionnaire, QuestionnaireResponse -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.questionnaire import Questionnaire, QuestionnaireResponse +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) from domain.repository.errors import AlreadyExistsError -from domain.repository.questionnaire_repository.v2 import QuestionnaireRepository -from domain.repository.questionnaire_repository.v2.questionnaires import ( +from domain.repository.questionnaire_repository import ( QuestionnaireInstance, + QuestionnaireRepository, ) -from domain.request_models.v1 import CreateDeviceReferenceMessageSetsDataParams +from domain.request_models import CreateDeviceReferenceMessageSetsDataParams from domain.response.validation_errors import mark_validation_errors_as_inbound DEVICE_NAME_MARKER = "MHS Message Set" diff --git a/src/api/createDeviceReferenceDataMessageSet/tests/test_index.py b/src/api/createDeviceReferenceDataMessageSet/tests/test_index.py index 6a72f845..9eb92315 100644 --- a/src/api/createDeviceReferenceDataMessageSet/tests/test_index.py +++ b/src/api/createDeviceReferenceDataMessageSet/tests/test_index.py @@ -6,16 +6,16 @@ from typing import Any, Generator from unittest import mock -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import PartyKeyId, ProductId -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.core.product_key.v1 import ProductKeyType -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import PartyKeyId, ProductId +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.product_key import ProductKeyType +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/createProductTeam/src/v1/steps.py b/src/api/createProductTeam/src/v1/steps.py index f298fed5..ce8ac320 100644 --- a/src/api/createProductTeam/src/v1/steps.py +++ b/src/api/createProductTeam/src/v1/steps.py @@ -1,11 +1,11 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.product_team.v3 import ProductTeam -from domain.core.root.v3 import Root +from domain.core.product_team import ProductTeam +from domain.core.root import Root from domain.ods import validate_ods_code -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import CreateProductTeamIncomingParams +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import CreateProductTeamIncomingParams from domain.response.validation_errors import ( mark_json_decode_errors_as_inbound, mark_validation_errors_as_inbound, diff --git a/src/api/deleteCpmProduct/src/v1/steps.py b/src/api/deleteCpmProduct/src/v1/steps.py index 5eba28bc..80f309cc 100644 --- a/src/api/deleteCpmProduct/src/v1/steps.py +++ b/src/api/deleteCpmProduct/src/v1/steps.py @@ -2,7 +2,7 @@ from domain.api.common_steps.read_product import before_steps, read_product from domain.core.cpm_product import CpmProduct -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.repository.cpm_product_repository import CpmProductRepository def delete_product(data, cache) -> CpmProduct: diff --git a/src/api/deleteCpmProduct/tests/test_index.py b/src/api/deleteCpmProduct/tests/test_index.py index 521c738a..352bdfec 100644 --- a/src/api/deleteCpmProduct/tests/test_index.py +++ b/src/api/deleteCpmProduct/tests/test_index.py @@ -4,15 +4,15 @@ from unittest import mock import pytest -from domain.core.cpm_system_id.v1 import ProductId +from domain.core.cpm_system_id import ProductId from domain.core.enum import Status -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import ( +from domain.core.root import Root +from domain.repository.cpm_product_repository import ( CpmProductRepository, InactiveCpmProductRepository, ) from domain.repository.errors import ItemNotFound -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from test_helpers.dynamodb import mock_table from test_helpers.sample_data import CPM_PRODUCT_TEAM_NO_ID diff --git a/src/api/readCpmProduct/tests/test_index.py b/src/api/readCpmProduct/tests/test_index.py index 829afc18..d6accc1e 100644 --- a/src/api/readCpmProduct/tests/test_index.py +++ b/src/api/readCpmProduct/tests/test_index.py @@ -3,9 +3,9 @@ from unittest import mock import pytest -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/readDevice/src/v1/steps.py b/src/api/readDevice/src/v1/steps.py index b3a292d9..c41a95c0 100644 --- a/src/api/readDevice/src/v1/steps.py +++ b/src/api/readDevice/src/v1/steps.py @@ -1,13 +1,13 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device.v3 import Device -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_repository.v3 import DeviceRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import DevicePathParams +from domain.core.cpm_product import CpmProduct +from domain.core.device import Device +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_repository import DeviceRepository +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import DevicePathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/readDevice/tests/test_index.py b/src/api/readDevice/tests/test_index.py index 6a159df4..c24514ef 100644 --- a/src/api/readDevice/tests/test_index.py +++ b/src/api/readDevice/tests/test_index.py @@ -3,10 +3,10 @@ from unittest import mock import pytest -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_repository.v3 import DeviceRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_repository import DeviceRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/readDeviceReferenceData/src/v1/steps.py b/src/api/readDeviceReferenceData/src/v1/steps.py index 6aec9b74..6fc2a5dd 100644 --- a/src/api/readDeviceReferenceData/src/v1/steps.py +++ b/src/api/readDeviceReferenceData/src/v1/steps.py @@ -1,15 +1,15 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import DeviceReferenceDataPathParams +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import DeviceReferenceDataPathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/readDeviceReferenceData/tests/test_index.py b/src/api/readDeviceReferenceData/tests/test_index.py index 7157538e..f1f9287d 100644 --- a/src/api/readDeviceReferenceData/tests/test_index.py +++ b/src/api/readDeviceReferenceData/tests/test_index.py @@ -3,12 +3,12 @@ from unittest import mock import pytest -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/readProductTeam/src/v1/steps.py b/src/api/readProductTeam/src/v1/steps.py index ebe59be4..9e3dbf05 100644 --- a/src/api/readProductTeam/src/v1/steps.py +++ b/src/api/readProductTeam/src/v1/steps.py @@ -1,9 +1,9 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.product_team.v3 import ProductTeam -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import ProductTeamPathParams +from domain.core.product_team import ProductTeam +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import ProductTeamPathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/readProductTeam/tests/test_index.py b/src/api/readProductTeam/tests/test_index.py index 3207f55f..22e1fde8 100644 --- a/src/api/readProductTeam/tests/test_index.py +++ b/src/api/readProductTeam/tests/test_index.py @@ -3,8 +3,8 @@ from unittest import mock import pytest -from domain.core.root.v3 import Root -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.product_team_repository import ProductTeamRepository from event.json import json_loads from test_helpers.dynamodb import mock_table diff --git a/src/api/readQuestionnaire/src/v1/steps.py b/src/api/readQuestionnaire/src/v1/steps.py index 4f76eb60..8b4b13f0 100644 --- a/src/api/readQuestionnaire/src/v1/steps.py +++ b/src/api/readQuestionnaire/src/v1/steps.py @@ -1,9 +1,9 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.questionnaire.v3 import Questionnaire -from domain.repository.questionnaire_repository.v2 import QuestionnaireRepository -from domain.request_models.v1 import QuestionnairePathParams +from domain.core.questionnaire import Questionnaire +from domain.repository.questionnaire_repository import QuestionnaireRepository +from domain.request_models import QuestionnairePathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/searchCpmProduct/src/v1/steps.py b/src/api/searchCpmProduct/src/v1/steps.py index f17bebd0..862c044b 100644 --- a/src/api/searchCpmProduct/src/v1/steps.py +++ b/src/api/searchCpmProduct/src/v1/steps.py @@ -1,9 +1,9 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository from domain.response.response_models import SearchResponse from event.step_chain import StepChain diff --git a/src/api/searchCpmProduct/tests/test_index.py b/src/api/searchCpmProduct/tests/test_index.py index c5276b07..8ae93896 100644 --- a/src/api/searchCpmProduct/tests/test_index.py +++ b/src/api/searchCpmProduct/tests/test_index.py @@ -4,9 +4,9 @@ import pytest from domain.core.cpm_system_id import ProductId -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.aws.client import dynamodb_client from event.json import json_loads diff --git a/src/api/searchDeviceReferenceData/src/v1/steps.py b/src/api/searchDeviceReferenceData/src/v1/steps.py index ffa71bfe..60153d55 100644 --- a/src/api/searchDeviceReferenceData/src/v1/steps.py +++ b/src/api/searchDeviceReferenceData/src/v1/steps.py @@ -1,14 +1,14 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.cpm_product import CpmProduct +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import CpmProductPathParams +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import CpmProductPathParams from domain.response.response_models import SearchResponse from event.step_chain import StepChain diff --git a/src/api/searchDeviceReferenceData/tests/test_index.py b/src/api/searchDeviceReferenceData/tests/test_index.py index 82762d6a..f55f4a40 100644 --- a/src/api/searchDeviceReferenceData/tests/test_index.py +++ b/src/api/searchDeviceReferenceData/tests/test_index.py @@ -4,12 +4,12 @@ import pytest from domain.core.cpm_system_id import ProductId -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.aws.client import dynamodb_client from event.json import json_loads diff --git a/src/api/searchSdsDevice/src/v1/steps.py b/src/api/searchSdsDevice/src/v1/steps.py index 977c054e..8832cafb 100644 --- a/src/api/searchSdsDevice/src/v1/steps.py +++ b/src/api/searchSdsDevice/src/v1/steps.py @@ -2,7 +2,7 @@ from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent from domain.api.sds.query import SearchSDSDeviceQueryParams -from domain.repository.device_repository.v2 import DeviceRepository +from domain.repository.device_repository import DeviceRepository from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/searchSdsDevice/tests/test_index.py b/src/api/searchSdsDevice/tests/_test_index.py similarity index 89% rename from src/api/searchSdsDevice/tests/test_index.py rename to src/api/searchSdsDevice/tests/_test_index.py index 05d19e6c..f5b8158a 100644 --- a/src/api/searchSdsDevice/tests/test_index.py +++ b/src/api/searchSdsDevice/tests/_test_index.py @@ -2,17 +2,17 @@ from unittest import mock import pytest -from domain.core.device.v2 import DeviceType as DeviceTypeV2 -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.questionnaire.v2 import Questionnaire -from domain.core.root.v2 import Root -from domain.repository.device_repository.v2 import DeviceRepository +from domain.core.device import Device +from domain.core.device_key import DeviceKeyType +from domain.core.product_team import ProductTeam +from domain.core.questionnaire import Questionnaire +from domain.core.root import Root +from domain.repository.device_repository import DeviceRepository from event.aws.client import dynamodb_client from event.json import json_loads from test_helpers.dynamodb import mock_table from test_helpers.terraform import read_terraform_output -from test_helpers.uuid import consistent_uuid from test_helpers.validate_search_response import validate_result_body TABLE_NAME = "hiya" @@ -20,30 +20,27 @@ def _create_org(): org = Root.create_ods_organisation(ods_code="ABC") - product_team = org.create_product_team( - id=consistent_uuid(1), name="product-team-name-a" - ) + product_team = org.create_product_team(name="product-team-name-a") return product_team -def _create_device(device, product_team, params): - cpmdevice = product_team.create_device( - name=device["device_name"], device_type=DeviceTypeV2.PRODUCT - ) - cpmdevice.add_key(key_value=device["device_key"], key_type=DeviceKeyType.PRODUCT_ID) +def _create_device( + device_data: dict, product_team: ProductTeam, questionnaire_data: dict[str, str] +) -> Device: + product = product_team.create_cpm_product(name="my-product") - questionnaire = Questionnaire(name=f"spine_{device['device_name']}", version=1) + device = product.create_device(name=device_data["device_name"]) + device.add_key( + key_value=device_data["device_key"], key_type=DeviceKeyType.PRODUCT_ID + ) - response = [] - for key, value in params.items(): - questionnaire.add_question(name=key, answer_types=(str,), mandatory=True) - response.append({key: [value]}) + questionnaire = Questionnaire(name=f"spine_{device_data['device_name']}", version=1) questionnaire_response = questionnaire.respond(responses=response) - cpmdevice.add_questionnaire_response(questionnaire_response=questionnaire_response) - tag_params = [params] - cpmdevice.add_tags(tags=tag_params) - return cpmdevice + device.add_questionnaire_response(questionnaire_response=questionnaire_response) + tag_params = [questionnaire_data] + device.add_tags(tags=tag_params) + return device @pytest.mark.integration @@ -131,7 +128,9 @@ def test_no_results(params): ) def test_index(params, device): product_team = _create_org() - cpmdevice = _create_device(device=device, product_team=product_team, params=params) + cpmdevice = _create_device( + device_data=device, product_team=product_team, questionnaire_data=params + ) table_name = read_terraform_output("dynamodb_table_name.value") client = dynamodb_client() @@ -225,7 +224,7 @@ def test_multiple_returned(params, devices): product_team = _create_org() for device in devices: cpmdevice = _create_device( - device=device, product_team=product_team, params=params + device_data=device, product_team=product_team, questionnaire_data=params ) with mock.patch.dict( @@ -342,7 +341,7 @@ def test_only_active_returned(params, devices): product_team = _create_org() for index, device in enumerate(devices): cpmdevice = _create_device( - device=device, product_team=product_team, params=params + device_data=device, product_team=product_team, questionnaire_data=params ) with mock.patch.dict( diff --git a/src/api/searchSdsEndpoint/src/v1/steps.py b/src/api/searchSdsEndpoint/src/v1/steps.py index b09786f5..6f3fb2f0 100644 --- a/src/api/searchSdsEndpoint/src/v1/steps.py +++ b/src/api/searchSdsEndpoint/src/v1/steps.py @@ -2,7 +2,7 @@ from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent from domain.api.sds.query import SearchSDSEndpointQueryParams -from domain.repository.device_repository.v2 import DeviceRepository +from domain.repository.device_repository import DeviceRepository from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/api/searchSdsEndpoint/tests/test_index.py b/src/api/searchSdsEndpoint/tests/_test_index.py similarity index 96% rename from src/api/searchSdsEndpoint/tests/test_index.py rename to src/api/searchSdsEndpoint/tests/_test_index.py index c73ff240..c995128c 100644 --- a/src/api/searchSdsEndpoint/tests/test_index.py +++ b/src/api/searchSdsEndpoint/tests/_test_index.py @@ -2,13 +2,12 @@ from unittest import mock import pytest -from domain.core.device.v2 import Device -from domain.core.device.v2 import DeviceType as DeviceTypeV2 -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.product_team.v2 import ProductTeam -from domain.core.questionnaire.v2 import Questionnaire -from domain.core.root.v2 import Root -from domain.repository.device_repository.v2 import DeviceRepository +from domain.core.device import Device, DeviceType +from domain.core.device_key import DeviceKeyType +from domain.core.product_team import ProductTeam +from domain.core.questionnaire import Questionnaire +from domain.core.root import Root +from domain.repository.device_repository import DeviceRepository from event.aws.client import dynamodb_client from event.json import json_loads @@ -30,7 +29,7 @@ def _create_org(): def _create_device(device: Device, product_team: ProductTeam, params: dict): cpm_device = product_team.create_device( - name=device["device_name"], device_type=DeviceTypeV2.ENDPOINT + name=device["device_name"], device_type=DeviceType.ENDPOINT ) cpm_device.add_key( key_value=device["device_key"], key_type=DeviceKeyType.PRODUCT_ID diff --git a/src/api/tests/smoke_tests/test_sds_smoke.py b/src/api/tests/smoke_tests/_test_sds_smoke.py similarity index 100% rename from src/api/tests/smoke_tests/test_sds_smoke.py rename to src/api/tests/smoke_tests/_test_sds_smoke.py diff --git a/src/etl/sds/etl_state_lock_enforcer/tests/test_etl_state_lock_enforcer.py b/src/etl/sds/etl_state_lock_enforcer/tests/_test_etl_state_lock_enforcer.py similarity index 100% rename from src/etl/sds/etl_state_lock_enforcer/tests/test_etl_state_lock_enforcer.py rename to src/etl/sds/etl_state_lock_enforcer/tests/_test_etl_state_lock_enforcer.py diff --git a/src/etl/sds/tests/test_sds_etl_components.py b/src/etl/sds/tests/_test_sds_etl_components.py similarity index 96% rename from src/etl/sds/tests/test_sds_etl_components.py rename to src/etl/sds/tests/_test_sds_etl_components.py index 4fba4918..90fc6376 100644 --- a/src/etl/sds/tests/test_sds_etl_components.py +++ b/src/etl/sds/tests/_test_sds_etl_components.py @@ -5,8 +5,9 @@ import boto3 import pytest from botocore.config import Config -from domain.core.device import DeviceStatus, DeviceType +from domain.core.device import DeviceType from domain.core.device_key import DeviceKeyType +from domain.core.enum import Status from etl.clear_state_inputs import EMPTY_JSON_DATA, EMPTY_LDIF_DATA from etl_utils.constants import CHANGELOG_NUMBER, WorkerKey from etl_utils.io import pkl_dumps_lz4 @@ -21,7 +22,7 @@ ANOTHER_GOOD_SDS_RECORD, GOOD_SDS_RECORD, ) -from etl.sds.worker.load_bulk.tests.test_load_bulk_worker import MockDeviceRepository +from etl.sds.worker.load_bulk.tests._test_load_bulk_worker import MockDeviceRepository from test_helpers.dynamodb import clear_dynamodb_table from test_helpers.pytest_skips import long_running from test_helpers.terraform import read_terraform_output @@ -296,11 +297,11 @@ def test_end_to_end_changelog_delete( # Verify that the device with unique id 000428682512 is now "inactive" _device_000428682512 = repository.read_inactive(device_000428682512.id) - assert _device_000428682512.status == DeviceStatus.INACTIVE + assert _device_000428682512.status == Status.INACTIVE # Verify that the other device is still "active" (device_000842065542,) = repository.query_by_tag(unique_identifier="000842065542") - assert device_000842065542.status == DeviceStatus.ACTIVE + assert device_000842065542.status == Status.ACTIVE # Execute another changelog initial state in the ETL put_object(s3_client, key=WorkerKey.EXTRACT, body=DELETION_REQUEST_000842065542) @@ -314,11 +315,11 @@ def test_end_to_end_changelog_delete( # Verify that the device with unique id 000428682512 is still "inactive" __device_000428682512 = repository.read_inactive(_device_000428682512.id) - assert __device_000428682512.status == DeviceStatus.INACTIVE + assert __device_000428682512.status == Status.INACTIVE # Verify that the other device is now "inactive" _device_000842065542 = repository.read_inactive(device_000842065542.id) - assert _device_000842065542.status == DeviceStatus.INACTIVE + assert _device_000842065542.status == Status.INACTIVE # Verify that inactive devices cannot be queried by tag assert repository.query_by_tag(unique_identifier="000842065542") == [] diff --git a/src/etl/sds/tests/changelog/test_changelog_scenarios.py b/src/etl/sds/tests/changelog/_test_changelog_scenarios.py similarity index 95% rename from src/etl/sds/tests/changelog/test_changelog_scenarios.py rename to src/etl/sds/tests/changelog/_test_changelog_scenarios.py index 55ae1112..772743d7 100644 --- a/src/etl/sds/tests/changelog/test_changelog_scenarios.py +++ b/src/etl/sds/tests/changelog/_test_changelog_scenarios.py @@ -2,15 +2,15 @@ from typing import Generator import pytest -from domain.core.device.v2 import Device, DeviceTag -from domain.repository.device_repository.v2 import DeviceRepository -from domain.repository.keys.v2 import TableKey +from domain.core.device import Device, DeviceTag +from domain.repository.device_repository import DeviceRepository +from domain.repository.keys import TableKey from domain.repository.marshall import unmarshall from etl_utils.io import pkl_load_lz4 from event.aws.client import dynamodb_client as get_dynamodb_client from mypy_boto3_s3 import S3Client -from etl.sds.worker.load_bulk.tests.test_load_bulk_worker import MockDeviceRepository +from etl.sds.worker.load_bulk.tests._test_load_bulk_worker import MockDeviceRepository from test_helpers.terraform import read_terraform_output from .conftest import ETL_BUCKET, parametrize_over_scenarios diff --git a/src/etl/sds/tests/changelog/utils.py b/src/etl/sds/tests/changelog/utils.py index b7c81363..a77cac56 100644 --- a/src/etl/sds/tests/changelog/utils.py +++ b/src/etl/sds/tests/changelog/utils.py @@ -2,7 +2,7 @@ from pathlib import Path from typing import Any, Literal, Protocol -from domain.core.device.v2 import Device +from domain.core.device import Device from event.json import json_load, json_loads PATH_TO_HERE = Path(__file__).parent / "changelog_components" diff --git a/src/etl/sds/trigger/bulk/tests/test_bulk_trigger.py b/src/etl/sds/trigger/bulk/tests/_test_bulk_trigger.py similarity index 96% rename from src/etl/sds/trigger/bulk/tests/test_bulk_trigger.py rename to src/etl/sds/trigger/bulk/tests/_test_bulk_trigger.py index 9bc7ec73..292438a6 100644 --- a/src/etl/sds/trigger/bulk/tests/test_bulk_trigger.py +++ b/src/etl/sds/trigger/bulk/tests/_test_bulk_trigger.py @@ -4,14 +4,14 @@ import boto3 import pytest -from domain.core.device.v2 import DeviceType +from domain.core.device import DeviceType from etl_utils.constants import CHANGELOG_NUMBER, ETL_STATE_LOCK, WorkerKey from etl_utils.io.test.io_utils import pkl_loads_lz4 from event.aws.client import dynamodb_client from event.json import json_loads from etl.sds.worker.extract.tests.test_extract_worker import GOOD_SDS_RECORD -from etl.sds.worker.load_bulk.tests.test_load_bulk_worker import MockDeviceRepository +from etl.sds.worker.load_bulk.tests._test_load_bulk_worker import MockDeviceRepository from test_helpers.dynamodb import clear_dynamodb_table from test_helpers.s3 import ( _ask_s3, diff --git a/src/etl/sds/trigger/manual/tests/test_manual_trigger.py b/src/etl/sds/trigger/manual/tests/_test_manual_trigger.py similarity index 100% rename from src/etl/sds/trigger/manual/tests/test_manual_trigger.py rename to src/etl/sds/trigger/manual/tests/_test_manual_trigger.py diff --git a/src/etl/sds/trigger/update/tests/test_update_trigger.py b/src/etl/sds/trigger/update/tests/_test_update_trigger.py similarity index 100% rename from src/etl/sds/trigger/update/tests/test_update_trigger.py rename to src/etl/sds/trigger/update/tests/_test_update_trigger.py diff --git a/src/etl/sds/worker/load_bulk/tests/test_load_bulk_worker.py b/src/etl/sds/worker/load_bulk/tests/_test_load_bulk_worker.py similarity index 96% rename from src/etl/sds/worker/load_bulk/tests/test_load_bulk_worker.py rename to src/etl/sds/worker/load_bulk/tests/_test_load_bulk_worker.py index 41e9c50e..8ae30e64 100644 --- a/src/etl/sds/worker/load_bulk/tests/test_load_bulk_worker.py +++ b/src/etl/sds/worker/load_bulk/tests/_test_load_bulk_worker.py @@ -5,11 +5,11 @@ from uuid import UUID import pytest -from domain.core.device.v2 import Device, DeviceCreatedEvent, DeviceType -from domain.core.device_key.v2 import DeviceKeyType +from domain.core.device import Device, DeviceCreatedEvent, DeviceType +from domain.core.device_key import DeviceKeyType from domain.repository.compression import pkl_loads_gzip -from domain.repository.device_repository.v2 import DeviceRepository -from domain.repository.keys.v2 import TableKey +from domain.repository.device_repository import DeviceRepository +from domain.repository.keys import TableKey from domain.repository.marshall import unmarshall from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dumps_lz4 diff --git a/src/etl/sds/worker/load_bulk_reduce/tests/test_load_bulk_reduce.py b/src/etl/sds/worker/load_bulk_reduce/tests/_test_load_bulk_reduce.py similarity index 100% rename from src/etl/sds/worker/load_bulk_reduce/tests/test_load_bulk_reduce.py rename to src/etl/sds/worker/load_bulk_reduce/tests/_test_load_bulk_reduce.py diff --git a/src/etl/sds/worker/load_update/load_update.py b/src/etl/sds/worker/load_update/load_update.py index 3192d5f6..a7471ed0 100644 --- a/src/etl/sds/worker/load_update/load_update.py +++ b/src/etl/sds/worker/load_update/load_update.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING from domain.core.aggregate_root import AggregateRoot -from domain.core.device.v2 import DeviceEventDeserializer +from domain.core.device import DeviceEventDeserializer from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dump_lz4, pkl_load_lz4 from etl_utils.smart_open import smart_open diff --git a/src/etl/sds/worker/load_update/tests/test_load_update_worker.py b/src/etl/sds/worker/load_update/tests/_test_load_update_worker.py similarity index 98% rename from src/etl/sds/worker/load_update/tests/test_load_update_worker.py rename to src/etl/sds/worker/load_update/tests/_test_load_update_worker.py index 0c38e3ee..12e97146 100644 --- a/src/etl/sds/worker/load_update/tests/test_load_update_worker.py +++ b/src/etl/sds/worker/load_update/tests/_test_load_update_worker.py @@ -4,14 +4,14 @@ from unittest import mock import pytest -from domain.core.device.v2 import Device +from domain.core.device import Device from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dumps_lz4 from etl_utils.io.test.io_utils import pkl_loads_lz4 from moto import mock_aws from mypy_boto3_s3 import S3Client -from etl.sds.worker.load_bulk.tests.test_load_bulk_worker import ( +from etl.sds.worker.load_bulk.tests._test_load_bulk_worker import ( BUCKET_NAME, TABLE_NAME, MockDeviceRepository, diff --git a/src/etl/sds/worker/transform_bulk/tests/test_transform_bulk_worker.py b/src/etl/sds/worker/transform_bulk/tests/_test_transform_bulk_worker.py similarity index 100% rename from src/etl/sds/worker/transform_bulk/tests/test_transform_bulk_worker.py rename to src/etl/sds/worker/transform_bulk/tests/_test_transform_bulk_worker.py diff --git a/src/etl/sds/worker/transform_bulk/transform_bulk.py b/src/etl/sds/worker/transform_bulk/transform_bulk.py index bef80e09..6a8c9d97 100644 --- a/src/etl/sds/worker/transform_bulk/transform_bulk.py +++ b/src/etl/sds/worker/transform_bulk/transform_bulk.py @@ -3,7 +3,7 @@ from typing import TYPE_CHECKING import boto3 -from domain.core.device.v2 import Device +from domain.core.device import Device from domain.core.event import ExportedEventTypeDef from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dump_lz4, pkl_dumps_lz4, pkl_load_lz4 diff --git a/src/etl/sds/worker/transform_update/tests/test_transform_update_worker.py b/src/etl/sds/worker/transform_update/tests/_test_transform_update_worker.py similarity index 98% rename from src/etl/sds/worker/transform_update/tests/test_transform_update_worker.py rename to src/etl/sds/worker/transform_update/tests/_test_transform_update_worker.py index ead7b743..bc30891a 100644 --- a/src/etl/sds/worker/transform_update/tests/test_transform_update_worker.py +++ b/src/etl/sds/worker/transform_update/tests/_test_transform_update_worker.py @@ -7,15 +7,15 @@ from uuid import uuid4 import pytest -from domain.core.device.v2 import DeviceType -from domain.core.root.v2 import Root +from domain.core.device import DeviceType +from domain.core.root import Root from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dumps_lz4 from etl_utils.io.test.io_utils import pkl_loads_lz4 from moto import mock_aws from mypy_boto3_s3 import S3Client -from etl.sds.worker.transform_bulk.tests.test_transform_bulk_worker import ( +from etl.sds.worker.transform_bulk.tests._test_transform_bulk_worker import ( BAD_SDS_RECORD_AS_JSON, BUCKET_NAME, FATAL_SDS_RECORD_AS_JSON, diff --git a/src/etl/sds/worker/transform_update/transform_update.py b/src/etl/sds/worker/transform_update/transform_update.py index 2433c3fe..c7862816 100644 --- a/src/etl/sds/worker/transform_update/transform_update.py +++ b/src/etl/sds/worker/transform_update/transform_update.py @@ -4,7 +4,7 @@ import boto3 from domain.core.event import ExportedEventTypeDef -from domain.repository.device_repository.v2 import DeviceRepository +from domain.repository.device_repository import DeviceRepository from etl_utils.constants import WorkerKey from etl_utils.io import pkl_dump_lz4, pkl_load_lz4 from etl_utils.smart_open import smart_open diff --git a/src/etl/sds/worker/transform_update/utils.py b/src/etl/sds/worker/transform_update/utils.py index c09d1291..a1784115 100644 --- a/src/etl/sds/worker/transform_update/utils.py +++ b/src/etl/sds/worker/transform_update/utils.py @@ -1,6 +1,6 @@ from itertools import chain -from domain.core.device.v2 import Device +from domain.core.device import Device def export_events(devices: list[Device]) -> list[dict]: diff --git a/src/layers/domain/api/common_steps/create_device.py b/src/layers/domain/api/common_steps/create_device.py index 784a5580..21971c3b 100644 --- a/src/layers/domain/api/common_steps/create_device.py +++ b/src/layers/domain/api/common_steps/create_device.py @@ -6,10 +6,10 @@ read_product, read_product_team, ) -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device.v3 import Device -from domain.repository.device_repository.v3 import DeviceRepository -from domain.request_models.v1 import CreateDeviceIncomingParams +from domain.core.cpm_product import CpmProduct +from domain.core.device import Device +from domain.repository.device_repository import DeviceRepository +from domain.request_models import CreateDeviceIncomingParams from domain.response.validation_errors import mark_validation_errors_as_inbound diff --git a/src/layers/domain/api/common_steps/create_product.py b/src/layers/domain/api/common_steps/create_product.py index 77f17c4b..b7045972 100644 --- a/src/layers/domain/api/common_steps/create_product.py +++ b/src/layers/domain/api/common_steps/create_product.py @@ -4,13 +4,10 @@ from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent from domain.api.common_steps.general import parse_event_body from domain.core.cpm_product import CpmProduct -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import ( - CreateCpmProductIncomingParams, - ProductTeamPathParams, -) +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import CreateCpmProductIncomingParams, ProductTeamPathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/layers/domain/api/common_steps/read_product.py b/src/layers/domain/api/common_steps/read_product.py index d1513765..9324cd1d 100644 --- a/src/layers/domain/api/common_steps/read_product.py +++ b/src/layers/domain/api/common_steps/read_product.py @@ -1,13 +1,13 @@ from http import HTTPStatus from aws_lambda_powertools.utilities.data_classes import APIGatewayProxyEvent -from domain.core.cpm_product.v1 import CpmProduct +from domain.core.cpm_product import CpmProduct from domain.core.error import NotEprProductError -from domain.core.product_key.v1 import ProductKeyType -from domain.core.product_team.v3 import ProductTeam -from domain.repository.cpm_product_repository.v3 import CpmProductRepository -from domain.repository.product_team_repository.v2 import ProductTeamRepository -from domain.request_models.v1 import CpmProductPathParams +from domain.core.product_key import ProductKeyType +from domain.core.product_team import ProductTeam +from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.product_team_repository import ProductTeamRepository +from domain.request_models import CpmProductPathParams from domain.response.validation_errors import mark_validation_errors_as_inbound from event.step_chain import StepChain diff --git a/src/layers/domain/api/common_steps/tests/test_create_product.py b/src/layers/domain/api/common_steps/tests/test_create_product.py index fd1a4bc4..4cac7182 100644 --- a/src/layers/domain/api/common_steps/tests/test_create_product.py +++ b/src/layers/domain/api/common_steps/tests/test_create_product.py @@ -2,10 +2,10 @@ import pytest from domain.api.common_steps.create_product import before_steps -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.root.v3 import Root +from domain.core.cpm_product import CpmProduct +from domain.core.root import Root from domain.repository.errors import ItemNotFound -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from domain.response.validation_errors import ( InboundJSONDecodeError, InboundValidationError, diff --git a/src/layers/domain/api/common_steps/tests/test_read_product.py b/src/layers/domain/api/common_steps/tests/test_read_product.py index e9039c99..5c69a842 100644 --- a/src/layers/domain/api/common_steps/tests/test_read_product.py +++ b/src/layers/domain/api/common_steps/tests/test_read_product.py @@ -1,10 +1,10 @@ import pytest from domain.api.common_steps.read_product import before_steps -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.core.cpm_product import CpmProduct +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.errors import ItemNotFound -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from domain.response.validation_errors import InboundValidationError from event.aws.client import dynamodb_client from event.step_chain import StepChain diff --git a/src/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py b/src/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py index 156a9680..38f3a6be 100644 --- a/src/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py +++ b/src/layers/domain/core/cpm_product/tests/test_cpm_product_v1.py @@ -3,7 +3,7 @@ import pytest from domain.core.cpm_product import CpmProduct -from domain.core.cpm_system_id.v1 import ProductId +from domain.core.cpm_system_id import ProductId @pytest.fixture diff --git a/src/layers/domain/core/cpm_product/v1.py b/src/layers/domain/core/cpm_product/v1.py index b8857bb4..3fc8f87c 100644 --- a/src/layers/domain/core/cpm_product/v1.py +++ b/src/layers/domain/core/cpm_product/v1.py @@ -3,16 +3,16 @@ from attr import dataclass from domain.core import event from domain.core.aggregate_root import AggregateRoot -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device.v3 import UPDATED_ON, Device, DeviceCreatedEvent, event -from domain.core.device_reference_data.v1 import ( +from domain.core.cpm_system_id import ProductId +from domain.core.device import UPDATED_ON, Device, DeviceCreatedEvent, event +from domain.core.device_reference_data import ( DeviceReferenceData, DeviceReferenceDataCreatedEvent, ) from domain.core.enum import Status from domain.core.error import DuplicateError from domain.core.event import Event -from domain.core.product_key.v1 import ProductKey +from domain.core.product_key import ProductKey from domain.core.timestamp import now from domain.core.validation import CPM_PRODUCT_NAME_REGEX from pydantic import Field diff --git a/src/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py b/src/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py index df62e15e..545c6fb0 100644 --- a/src/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py +++ b/src/layers/domain/core/cpm_system_id/tests/test_cpm_system_id_v1.py @@ -1,7 +1,5 @@ import pytest -from domain.core.cpm_system_id import AsidId, PartyKeyId - -from src.layers.domain.core.cpm_system_id.v1 import ProductId +from domain.core.cpm_system_id import AsidId, PartyKeyId, ProductId def test_party_key_generator_format_key(): diff --git a/src/layers/domain/core/cpm_system_id/v1.py b/src/layers/domain/core/cpm_system_id/v1.py index 08b51cb9..edadc455 100644 --- a/src/layers/domain/core/cpm_system_id/v1.py +++ b/src/layers/domain/core/cpm_system_id/v1.py @@ -5,9 +5,9 @@ from uuid import uuid4 from domain.core.base import BaseModel -from domain.core.device_key.v1 import validate_key +from domain.core.device_key import validate_key from domain.core.error import InvalidKeyPattern -from domain.core.product_key.v1 import ProductKeyType +from domain.core.product_key import ProductKeyType from pydantic import validator FIRST_ASID = 200000099999 @@ -91,7 +91,7 @@ def create(cls, current_number: int, ods_code: str): def validate_cpm_system_id(cls, cpm_system_id: str) -> bool: """Validate that the party key has the correct format.""" try: - validate_key(key=cpm_system_id, type=ProductKeyType.PARTY_KEY) + validate_key(key_value=cpm_system_id, key_type=ProductKeyType.PARTY_KEY) except InvalidKeyPattern: return False return True diff --git a/src/layers/domain/core/device/__init__.py b/src/layers/domain/core/device/__init__.py index c773f400..e0d08e67 100644 --- a/src/layers/domain/core/device/__init__.py +++ b/src/layers/domain/core/device/__init__.py @@ -1,18 +1 @@ -from .v1 import ( - Device, - DeviceCreatedEvent, - DeviceEventDeserializer, - DeviceIndexAddedEvent, - DeviceKey, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceKeyType, - DeviceStatus, - DeviceType, - DeviceUpdatedEvent, - QuestionnaireNotFoundError, - QuestionnaireResponseNotFoundError, - QuestionNotFoundError, - _get_questionnaire_responses, - _get_unique_answers, -) +from .v1 import * # noqa diff --git a/src/layers/domain/core/device/tests/test_device_v1.py b/src/layers/domain/core/device/tests/test_device_v1.py index 8933b0ea..9df07e13 100644 --- a/src/layers/domain/core/device/tests/test_device_v1.py +++ b/src/layers/domain/core/device/tests/test_device_v1.py @@ -1,31 +1,25 @@ +import json from datetime import datetime -from itertools import chain import pytest from domain.core.device import ( Device, - DeviceIndexAddedEvent, + DeviceDeletedEvent, DeviceKeyAddedEvent, DeviceKeyDeletedEvent, - DeviceStatus, - DeviceType, + DeviceTag, + DeviceTagAddedEvent, + DeviceTagsAddedEvent, + DeviceTagsClearedEvent, DeviceUpdatedEvent, - QuestionnaireNotFoundError, - QuestionnaireResponseNotFoundError, - QuestionNotFoundError, - _get_questionnaire_responses, - _get_unique_answers, -) -from domain.core.device_key import DeviceKey, DeviceKeyType -from domain.core.error import NotFoundError -from domain.core.questionnaire import ( - Questionnaire, - QuestionnaireInstanceEvent, - QuestionnaireResponse, - QuestionnaireResponseAddedEvent, - QuestionnaireResponseDeletedEvent, + DuplicateQuestionnaireResponse, QuestionnaireResponseUpdatedEvent, ) +from domain.core.device_key import DeviceKey, DeviceKeyType +from domain.core.enum import Status +from domain.core.error import DuplicateError, NotFoundError +from domain.core.questionnaire import Questionnaire, QuestionnaireResponse +from domain.core.questionnaire.tests.test_questionnaire_v1 import VALID_SCHEMA @pytest.fixture @@ -34,29 +28,29 @@ def device(): name="Foo", ods_code="ABC123", product_team_id="18934119-5780-4d28-b9be-0e6dff3908ba", - type=DeviceType.PRODUCT, + product_id="P.XXX-YYY", ) @pytest.fixture -def questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["hi"]}]) +def questionnaire() -> Questionnaire: + return Questionnaire( + name="my-questionnaire", version="1", json_schema=json.dumps(VALID_SCHEMA) + ) @pytest.fixture -def another_good_questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["bye"]}]) +def questionnaire_response(questionnaire: Questionnaire) -> QuestionnaireResponse: + questionnaire_response = questionnaire.validate({"size": 4, "colour": "white"}) + return questionnaire_response @pytest.fixture -def another_questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="bar", version=1) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["bye"]}]) +def another_good_questionnaire_response( + questionnaire: Questionnaire, +) -> QuestionnaireResponse: + questionnaire_response = questionnaire.validate({"size": 7, "colour": "black"}) + return questionnaire_response def test_device_created_with_datetime(device: Device): @@ -81,215 +75,152 @@ def test_device_delete(device: Device): device_created_on = device.created_on assert device.deleted_on == None event = device.delete() - assert device.status == DeviceStatus.INACTIVE + assert device.status == Status.INACTIVE + assert device.tags == set() assert device.created_on == device_created_on assert isinstance(device.deleted_on, datetime) assert device.updated_on == device.deleted_on - assert isinstance(event, DeviceUpdatedEvent) + assert isinstance(event, DeviceDeletedEvent) def test_device_add_key(device: Device): - event = device.add_key(type=DeviceKeyType.PRODUCT_ID, key="P.XXX-YYY") - assert device.keys == { - "P.XXX-YYY": DeviceKey(type=DeviceKeyType.PRODUCT_ID, key="P.XXX-YYY") - } + event = device.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") + assert device.keys == [ + DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") + ] assert isinstance(event, DeviceKeyAddedEvent) + assert event.updated_on is not None + assert event.updated_on == device.updated_on def test_device_delete_key(device: Device): - device.add_key(type=DeviceKeyType.PRODUCT_ID, key="P.XXX-YYY") - event = device.delete_key(key="P.XXX-YYY") - assert device.keys == {} + device.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") + event = device.delete_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") + assert device.keys == [] assert isinstance(event, DeviceKeyDeletedEvent) + assert event.updated_on is not None + assert event.updated_on == device.updated_on def test_device_delete_key_fail(device: Device): with pytest.raises(NotFoundError): - device.delete_key(key="P.XXX-YYY") + device.delete_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") def test_device_add_questionnaire_response( - device: Device, questionnaire_response: QuestionnaireResponse -): - events = device.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - assert device.questionnaire_responses == {"foo/1": [questionnaire_response]} - assert len(events) == 2 - assert isinstance(events[0], QuestionnaireInstanceEvent) - assert isinstance(events[1], QuestionnaireResponseAddedEvent) - - events = device.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - assert device.questionnaire_responses == { - "foo/1": [questionnaire_response, questionnaire_response] - } - assert len(events) == 1 - assert isinstance(events[0], QuestionnaireResponseAddedEvent) - - -def test_device_update_questionnaire_response( device: Device, questionnaire_response: QuestionnaireResponse, another_good_questionnaire_response: QuestionnaireResponse, ): - device.add_questionnaire_response(questionnaire_response=questionnaire_response) - event = device.update_questionnaire_response( - questionnaire_response=another_good_questionnaire_response, - questionnaire_response_index=0, + event = device.add_questionnaire_response( + questionnaire_response=questionnaire_response ) + original_updated_on = device.updated_on assert device.questionnaire_responses == { - "foo/1": [another_good_questionnaire_response] + "my-questionnaire/1": [questionnaire_response] } assert isinstance(event, QuestionnaireResponseUpdatedEvent) + assert event.updated_on is not None + assert event.updated_on == device.updated_on + event_2 = device.add_questionnaire_response( + questionnaire_response=another_good_questionnaire_response + ) -def test_device_update_questionnaire_response_index_error( - device: Device, questionnaire_response: QuestionnaireResponse -): - device.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(QuestionnaireResponseNotFoundError): - device.update_questionnaire_response( - questionnaire_response=questionnaire_response, - questionnaire_response_index=1, - ) - - -def test_device_update_questionnaire_response_key_error( - device: Device, questionnaire_response: QuestionnaireResponse -): - with pytest.raises(QuestionnaireNotFoundError): - device.update_questionnaire_response( - questionnaire_response=questionnaire_response, - questionnaire_response_index=0, - ) + assert device.questionnaire_responses == { + "my-questionnaire/1": [ + questionnaire_response, + another_good_questionnaire_response, + ] + } + assert device.updated_on == event_2.updated_on + assert device.updated_on > original_updated_on -def test_device_delete_questionnaire_response( - device: Device, questionnaire_response: QuestionnaireResponse -): - device.add_questionnaire_response(questionnaire_response=questionnaire_response) - event = device.delete_questionnaire_response( - questionnaire_id="foo/1", questionnaire_response_index=0 - ) - assert device.questionnaire_responses == {} - assert isinstance(event, QuestionnaireResponseDeletedEvent) + assert isinstance(event_2, QuestionnaireResponseUpdatedEvent) + assert event_2.updated_on is not None + assert event_2.updated_on > event.updated_on + assert event_2.updated_on == device.updated_on -def test_device_delete_questionnaire_response_index_error( +def test_device_cannot_add_same_questionnaire_response_twice( device: Device, questionnaire_response: QuestionnaireResponse ): device.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(QuestionnaireResponseNotFoundError): - device.delete_questionnaire_response( - questionnaire_id="foo/1", questionnaire_response_index=1 - ) - - -def test_device_delete_questionnaire_response_key_error(device: Device): - with pytest.raises(QuestionnaireNotFoundError): - device.delete_questionnaire_response( - questionnaire_id="bar/1", questionnaire_response_index=0 - ) - - -def test__get_unique_answers(): - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="question1", multiple=True) - questionnaire_response_1 = questionnaire.respond( - [ - {"question1": ["foo"]}, - {"question1": ["bar"]}, - {"question1": ["foo"]}, - ] - ) + with pytest.raises(DuplicateQuestionnaireResponse): + device.add_questionnaire_response(questionnaire_response=questionnaire_response) - questionnaire_response_2 = questionnaire.respond( - [ - {"question1": ["baz", "BAR"]}, - {"question1": ["foo"]}, - ] - ) - questionnaire_response_3 = questionnaire.respond( - [ - {"question1": ["FOO"]}, - {"question1": ["bar"]}, - {"question1": ["foo"]}, - ] - ) +def test_device_add_tag(device: Device): + event_1 = device.add_tag(foo="first", bar="second") + assert isinstance(event_1, DeviceTagAddedEvent) + assert [tag.value for tag in device.tags] == ["bar=second&foo=first"] + assert event_1.updated_on is not None + assert event_1.updated_on == device.updated_on - unique_answers = _get_unique_answers( - questionnaire_responses=[ - questionnaire_response_1, - questionnaire_response_2, - questionnaire_response_3, - ], - question_name="question1", - ) + event_2 = device.add_tag(foo="first", bar="second", baz="third") + assert event_2.updated_on is not None + assert event_2.updated_on == device.updated_on - assert unique_answers == {"foo", "bar", "FOO", "BAR", "baz"} + with pytest.raises(DuplicateError): + device.add_tag(bar="second", foo="first") - -def test__get_questionnaire_responses(): - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="question1") - questionnaire_response = questionnaire.respond([{"question1": ["foo"]}]) - questionnaire_responses = [questionnaire_response] - assert ( - _get_questionnaire_responses( - questionnaire_responses={questionnaire.id: questionnaire_responses}, - questionnaire_id=questionnaire.id, - ) - == questionnaire_responses + assert sorted(tag.value for tag in device.tags) == sorted( + ["bar=second&foo=first", "bar=second&baz=third&foo=first"] ) + assert event_2.updated_on > event_1.updated_on -def test_device_add_index(device: Device): - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="question1", multiple=True) + event_3 = device.clear_tags() + assert isinstance(event_3, DeviceTagsClearedEvent) + assert event_3.updated_on > event_2.updated_on + assert device.tags == set() - N_QUESTIONNAIRE_RESPONSES = 123 - N_UNIQUE_ANSWERS = 7 - answers = [["a", "b", "c"], ["d"], ["e", "f", "g"], ["a"], ["b", "c"]] - assert len(set(chain.from_iterable(answers))) == N_UNIQUE_ANSWERS +def test_device_add_tags_in_one_go(device: Device): + event = device.add_tags( + tags=[ + dict(foo="first", bar="second"), + dict(foo="first", bar="second", baz="third"), + ] + ) + assert isinstance(event, DeviceTagsAddedEvent) + assert event.updated_on is not None + assert event.updated_on == device.updated_on - for _ in range(N_QUESTIONNAIRE_RESPONSES): - for _answers in answers: - questionnaire_response = questionnaire.respond( - responses=[{"question1": _answers}] - ) - device.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) + with pytest.raises(DuplicateError): + device.add_tags([dict(bar="second", foo="first")]) - events = device.add_index(questionnaire_id="foo/1", question_name="question1") - assert len(events) == N_UNIQUE_ANSWERS - assert all(isinstance(event, DeviceIndexAddedEvent) for event in events) + assert sorted(tag.value for tag in device.tags) == sorted( + ["bar=second&foo=first", "bar=second&baz=third&foo=first"] + ) - for answer in ["a", "b", "c", "d", "e", "f", "g"]: - assert (questionnaire.id, "question1", answer) in device.indexes - assert (questionnaire.id, "question1", "foo") not in device.indexes +def test_device_tag_from__root__(): + tag = DeviceTag(foo="bAr", boo="FaR") + assert tag.components == tuple((("boo", "far"), ("foo", "bar"))) # lowercased + tag_as_dict = tag.dict() + reconstituted_tag = DeviceTag(__root__=tag.__root__) -def test_device_add_index_no_such_questionnaire(device: Device): - with pytest.raises(QuestionnaireNotFoundError): - device.add_index(questionnaire_id="foo/1", question_name="question1") + assert tag_as_dict == tuple(tag.__root__) + assert reconstituted_tag == tag + assert reconstituted_tag in {tag} -def test_device_add_index_no_such_questionnaire_response(device: Device): - device.questionnaire_responses["foo/1"] = [] - with pytest.raises(QuestionnaireResponseNotFoundError): - device.add_index(questionnaire_id="foo/1", question_name="question1") +def test_device_tag_from_kwargs(): + tag = DeviceTag(foo="bar", boo="far") + tag_as_dict = tag.dict() + reconstituted_tag = DeviceTag(**{k: v for k, v in tag_as_dict}) + assert tag_as_dict == tuple(tag.components) + assert reconstituted_tag == tag + assert reconstituted_tag in {tag} -def test_device_add_index_no_such_question(device: Device): - questionnaire = Questionnaire(name="foo", version=1) - questionnaire_response = questionnaire.respond(responses=[]) - device.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(QuestionNotFoundError): - device.add_index(questionnaire_id="foo/1", question_name="question1") +def test_device_state_tags(device: Device): + device.add_tag(foo="bar") + (device_tag,) = device.tags + (state_tag,) = device.state()["tags"] + assert isinstance(state_tag, list) + assert state_tag == [list(component) for component in device_tag.components] diff --git a/src/layers/domain/core/device/tests/test_device_v2.py b/src/layers/domain/core/device/tests/test_device_v2.py deleted file mode 100644 index 061fc2b3..00000000 --- a/src/layers/domain/core/device/tests/test_device_v2.py +++ /dev/null @@ -1,272 +0,0 @@ -from datetime import datetime - -import pytest -from domain.core.device.v2 import ( - Device, - DeviceDeletedEvent, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceTag, - DeviceTagAddedEvent, - DeviceTagsAddedEvent, - DeviceTagsClearedEvent, - DeviceType, - DeviceUpdatedEvent, - DuplicateQuestionnaireResponse, - QuestionnaireNotFoundError, - QuestionnaireResponseNotFoundError, - QuestionnaireResponseUpdatedEvent, -) -from domain.core.device_key.v2 import DeviceKey, DeviceKeyType -from domain.core.enum import Status -from domain.core.error import DuplicateError, NotFoundError -from domain.core.questionnaire.v2 import Questionnaire, QuestionnaireResponse - - -@pytest.fixture -def device_v2(): - return Device( - name="Foo", - ods_code="ABC123", - product_team_id="18934119-5780-4d28-b9be-0e6dff3908ba", - device_type=DeviceType.PRODUCT, - ) - - -@pytest.fixture -def questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="foo", version=2) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["hi"]}]) - - -@pytest.fixture -def another_good_questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="foo", version=2) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["bye"]}]) - - -@pytest.fixture -def another_questionnaire_response() -> QuestionnaireResponse: - questionnaire = Questionnaire(name="bar", version=2) - questionnaire.add_question(name="question1") - return questionnaire.respond(responses=[{"question1": ["bye"]}]) - - -def test_device_created_with_datetime(device_v2: Device): - assert isinstance(device_v2.created_on, datetime) - assert device_v2.updated_on == None - assert device_v2.deleted_on == None - - -def test_device_update(device_v2: Device): - device_created_on = device_v2.created_on - device_updated_on = device_v2.updated_on - event = device_v2.update(name="bar") - assert device_v2.name == "bar" - assert device_v2.deleted_on == None - assert isinstance(device_v2.updated_on, datetime) - assert device_v2.updated_on != device_updated_on - assert device_v2.created_on == device_created_on - assert isinstance(event, DeviceUpdatedEvent) - - -def test_device_delete(device_v2: Device): - device_created_on = device_v2.created_on - assert device_v2.deleted_on == None - event = device_v2.delete() - assert device_v2.status == Status.INACTIVE - assert device_v2.tags == set() - assert device_v2.created_on == device_created_on - assert isinstance(device_v2.deleted_on, datetime) - assert device_v2.updated_on == device_v2.deleted_on - assert isinstance(event, DeviceDeletedEvent) - - -def test_device_add_key(device_v2: Device): - event = device_v2.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - assert device_v2.keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - ] - assert isinstance(event, DeviceKeyAddedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v2.updated_on - - -def test_device_delete_key(device_v2: Device): - device_v2.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - event = device_v2.delete_key( - key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY" - ) - assert device_v2.keys == [] - assert isinstance(event, DeviceKeyDeletedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v2.updated_on - - -def test_device_delete_key_fail(device_v2: Device): - with pytest.raises(NotFoundError): - device_v2.delete_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - - -def test_device_add_questionnaire_response( - device_v2: Device, - questionnaire_response: QuestionnaireResponse, - another_good_questionnaire_response: QuestionnaireResponse, -): - event = device_v2.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - created_on_1 = questionnaire_response.created_on.isoformat() - original_updated_on = device_v2.updated_on - assert device_v2.questionnaire_responses == { - "foo/2": {created_on_1: questionnaire_response} - } - assert isinstance(event, QuestionnaireResponseUpdatedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v2.updated_on - - event_2 = device_v2.add_questionnaire_response( - questionnaire_response=another_good_questionnaire_response - ) - created_on_2 = another_good_questionnaire_response.created_on.isoformat() - assert device_v2.questionnaire_responses == { - "foo/2": { - created_on_1: questionnaire_response, - created_on_2: another_good_questionnaire_response, - } - } - - assert device_v2.updated_on == event_2.updated_on - assert device_v2.updated_on > original_updated_on - - assert isinstance(event_2, QuestionnaireResponseUpdatedEvent) - assert event_2.updated_on is not None - assert event_2.updated_on > event.updated_on - assert event_2.updated_on == device_v2.updated_on - - -def test_device_cannot_add_same_questionnaire_response_twice( - device_v2: Device, questionnaire_response: QuestionnaireResponse -): - device_v2.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(DuplicateQuestionnaireResponse): - device_v2.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - - -def test_device_update_questionnaire_response( - device_v2: Device, - questionnaire_response: QuestionnaireResponse, - another_good_questionnaire_response: QuestionnaireResponse, -): - created_on = questionnaire_response.created_on - another_good_questionnaire_response.created_on = created_on - - device_v2.add_questionnaire_response(questionnaire_response=questionnaire_response) - event = device_v2.update_questionnaire_response( - questionnaire_response=another_good_questionnaire_response - ) - assert device_v2.questionnaire_responses == { - "foo/2": {created_on.isoformat(): another_good_questionnaire_response} - } - assert isinstance(event, QuestionnaireResponseUpdatedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v2.updated_on - - -def test_device_update_questionnaire_response_mismatching_created_on_error( - device_v2: Device, - questionnaire_response: QuestionnaireResponse, - another_good_questionnaire_response: QuestionnaireResponse, -): - device_v2.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(QuestionnaireResponseNotFoundError): - device_v2.update_questionnaire_response( - questionnaire_response=another_good_questionnaire_response - ) - - -def test_device_update_questionnaire_response_key_error( - device_v2: Device, questionnaire_response: QuestionnaireResponse -): - with pytest.raises(QuestionnaireNotFoundError): - device_v2.update_questionnaire_response( - questionnaire_response=questionnaire_response - ) - - -def test_device_add_tag(device_v2: Device): - event_1 = device_v2.add_tag(foo="first", bar="second") - assert isinstance(event_1, DeviceTagAddedEvent) - assert [tag.value for tag in device_v2.tags] == ["bar=second&foo=first"] - assert event_1.updated_on is not None - assert event_1.updated_on == device_v2.updated_on - - event_2 = device_v2.add_tag(foo="first", bar="second", baz="third") - assert event_2.updated_on is not None - assert event_2.updated_on == device_v2.updated_on - - with pytest.raises(DuplicateError): - device_v2.add_tag(bar="second", foo="first") - - assert sorted(tag.value for tag in device_v2.tags) == sorted( - ["bar=second&foo=first", "bar=second&baz=third&foo=first"] - ) - - assert event_2.updated_on > event_1.updated_on - - event_3 = device_v2.clear_tags() - assert isinstance(event_3, DeviceTagsClearedEvent) - assert event_3.updated_on > event_2.updated_on - assert device_v2.tags == set() - - -def test_device_add_tags_in_one_go(device_v2: Device): - event = device_v2.add_tags( - tags=[ - dict(foo="first", bar="second"), - dict(foo="first", bar="second", baz="third"), - ] - ) - assert isinstance(event, DeviceTagsAddedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v2.updated_on - - with pytest.raises(DuplicateError): - device_v2.add_tags([dict(bar="second", foo="first")]) - - assert sorted(tag.value for tag in device_v2.tags) == sorted( - ["bar=second&foo=first", "bar=second&baz=third&foo=first"] - ) - - -def test_device_tag_from__root__(): - tag = DeviceTag(foo="bAr", boo="FaR") - assert tag.components == tuple((("boo", "far"), ("foo", "bar"))) # lowercased - tag_as_dict = tag.dict() - reconstituted_tag = DeviceTag(__root__=tag.__root__) - - assert tag_as_dict == tuple(tag.__root__) - assert reconstituted_tag == tag - assert reconstituted_tag in {tag} - - -def test_device_tag_from_kwargs(): - tag = DeviceTag(foo="bar", boo="far") - tag_as_dict = tag.dict() - reconstituted_tag = DeviceTag(**{k: v for k, v in tag_as_dict}) - - assert tag_as_dict == tuple(tag.components) - assert reconstituted_tag == tag - assert reconstituted_tag in {tag} - - -def test_device_state_tags(device_v2: Device): - device_v2.add_tag(foo="bar") - (device_tag,) = device_v2.tags - (state_tag,) = device_v2.state()["tags"] - assert isinstance(state_tag, list) - assert state_tag == [list(component) for component in device_tag.components] diff --git a/src/layers/domain/core/device/tests/test_device_v3.py b/src/layers/domain/core/device/tests/test_device_v3.py deleted file mode 100644 index 244ecdd0..00000000 --- a/src/layers/domain/core/device/tests/test_device_v3.py +++ /dev/null @@ -1,230 +0,0 @@ -import json -from datetime import datetime - -import pytest -from domain.core.device.v3 import ( - Device, - DeviceDeletedEvent, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceTag, - DeviceTagAddedEvent, - DeviceTagsAddedEvent, - DeviceTagsClearedEvent, - DeviceUpdatedEvent, - DuplicateQuestionnaireResponse, - QuestionnaireResponseUpdatedEvent, -) -from domain.core.device_key.v2 import DeviceKey, DeviceKeyType -from domain.core.enum import Status -from domain.core.error import DuplicateError, NotFoundError -from domain.core.questionnaire.tests.test_questionnaire_v3 import VALID_SCHEMA -from domain.core.questionnaire.v3 import Questionnaire, QuestionnaireResponse - - -@pytest.fixture -def device_v3(): - return Device( - name="Foo", - ods_code="ABC123", - product_team_id="18934119-5780-4d28-b9be-0e6dff3908ba", - product_id="P.XXX-YYY", - ) - - -@pytest.fixture -def questionnaire() -> Questionnaire: - return Questionnaire( - name="my-questionnaire", version="1", json_schema=json.dumps(VALID_SCHEMA) - ) - - -@pytest.fixture -def questionnaire_response(questionnaire: Questionnaire) -> QuestionnaireResponse: - questionnaire_response = questionnaire.validate({"size": 4, "colour": "white"}) - return questionnaire_response - - -@pytest.fixture -def another_good_questionnaire_response( - questionnaire: Questionnaire, -) -> QuestionnaireResponse: - questionnaire_response = questionnaire.validate({"size": 7, "colour": "black"}) - return questionnaire_response - - -def test_device_created_with_datetime(device_v3: Device): - assert isinstance(device_v3.created_on, datetime) - assert device_v3.updated_on == None - assert device_v3.deleted_on == None - - -def test_device_update(device_v3: Device): - device_created_on = device_v3.created_on - device_updated_on = device_v3.updated_on - event = device_v3.update(name="bar") - assert device_v3.name == "bar" - assert device_v3.deleted_on == None - assert isinstance(device_v3.updated_on, datetime) - assert device_v3.updated_on != device_updated_on - assert device_v3.created_on == device_created_on - assert isinstance(event, DeviceUpdatedEvent) - - -def test_device_delete(device_v3: Device): - device_created_on = device_v3.created_on - assert device_v3.deleted_on == None - event = device_v3.delete() - assert device_v3.status == Status.INACTIVE - assert device_v3.tags == set() - assert device_v3.created_on == device_created_on - assert isinstance(device_v3.deleted_on, datetime) - assert device_v3.updated_on == device_v3.deleted_on - assert isinstance(event, DeviceDeletedEvent) - - -def test_device_add_key(device_v3: Device): - event = device_v3.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - assert device_v3.keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - ] - assert isinstance(event, DeviceKeyAddedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v3.updated_on - - -def test_device_delete_key(device_v3: Device): - device_v3.add_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - event = device_v3.delete_key( - key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY" - ) - assert device_v3.keys == [] - assert isinstance(event, DeviceKeyDeletedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v3.updated_on - - -def test_device_delete_key_fail(device_v3: Device): - with pytest.raises(NotFoundError): - device_v3.delete_key(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.XXX-YYY") - - -def test_device_add_questionnaire_response( - device_v3: Device, - questionnaire_response: QuestionnaireResponse, - another_good_questionnaire_response: QuestionnaireResponse, -): - event = device_v3.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - original_updated_on = device_v3.updated_on - - assert device_v3.questionnaire_responses == { - "my-questionnaire/1": [questionnaire_response] - } - assert isinstance(event, QuestionnaireResponseUpdatedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v3.updated_on - - event_2 = device_v3.add_questionnaire_response( - questionnaire_response=another_good_questionnaire_response - ) - - assert device_v3.questionnaire_responses == { - "my-questionnaire/1": [ - questionnaire_response, - another_good_questionnaire_response, - ] - } - - assert device_v3.updated_on == event_2.updated_on - assert device_v3.updated_on > original_updated_on - - assert isinstance(event_2, QuestionnaireResponseUpdatedEvent) - assert event_2.updated_on is not None - assert event_2.updated_on > event.updated_on - assert event_2.updated_on == device_v3.updated_on - - -def test_device_cannot_add_same_questionnaire_response_twice( - device_v3: Device, questionnaire_response: QuestionnaireResponse -): - device_v3.add_questionnaire_response(questionnaire_response=questionnaire_response) - with pytest.raises(DuplicateQuestionnaireResponse): - device_v3.add_questionnaire_response( - questionnaire_response=questionnaire_response - ) - - -def test_device_add_tag(device_v3: Device): - event_1 = device_v3.add_tag(foo="first", bar="second") - assert isinstance(event_1, DeviceTagAddedEvent) - assert [tag.value for tag in device_v3.tags] == ["bar=second&foo=first"] - assert event_1.updated_on is not None - assert event_1.updated_on == device_v3.updated_on - - event_2 = device_v3.add_tag(foo="first", bar="second", baz="third") - assert event_2.updated_on is not None - assert event_2.updated_on == device_v3.updated_on - - with pytest.raises(DuplicateError): - device_v3.add_tag(bar="second", foo="first") - - assert sorted(tag.value for tag in device_v3.tags) == sorted( - ["bar=second&foo=first", "bar=second&baz=third&foo=first"] - ) - - assert event_2.updated_on > event_1.updated_on - - event_3 = device_v3.clear_tags() - assert isinstance(event_3, DeviceTagsClearedEvent) - assert event_3.updated_on > event_2.updated_on - assert device_v3.tags == set() - - -def test_device_add_tags_in_one_go(device_v3: Device): - event = device_v3.add_tags( - tags=[ - dict(foo="first", bar="second"), - dict(foo="first", bar="second", baz="third"), - ] - ) - assert isinstance(event, DeviceTagsAddedEvent) - assert event.updated_on is not None - assert event.updated_on == device_v3.updated_on - - with pytest.raises(DuplicateError): - device_v3.add_tags([dict(bar="second", foo="first")]) - - assert sorted(tag.value for tag in device_v3.tags) == sorted( - ["bar=second&foo=first", "bar=second&baz=third&foo=first"] - ) - - -def test_device_tag_from__root__(): - tag = DeviceTag(foo="bAr", boo="FaR") - assert tag.components == tuple((("boo", "far"), ("foo", "bar"))) # lowercased - tag_as_dict = tag.dict() - reconstituted_tag = DeviceTag(__root__=tag.__root__) - - assert tag_as_dict == tuple(tag.__root__) - assert reconstituted_tag == tag - assert reconstituted_tag in {tag} - - -def test_device_tag_from_kwargs(): - tag = DeviceTag(foo="bar", boo="far") - tag_as_dict = tag.dict() - reconstituted_tag = DeviceTag(**{k: v for k, v in tag_as_dict}) - - assert tag_as_dict == tuple(tag.components) - assert reconstituted_tag == tag - assert reconstituted_tag in {tag} - - -def test_device_state_tags(device_v3: Device): - device_v3.add_tag(foo="bar") - (device_tag,) = device_v3.tags - (state_tag,) = device_v3.state()["tags"] - assert isinstance(state_tag, list) - assert state_tag == [list(component) for component in device_tag.components] diff --git a/src/layers/domain/core/device/v1.py b/src/layers/domain/core/device/v1.py index b8159760..d6498f55 100644 --- a/src/layers/domain/core/device/v1.py +++ b/src/layers/domain/core/device/v1.py @@ -1,25 +1,25 @@ from collections import defaultdict -from datetime import datetime, timezone -from enum import StrEnum, auto -from itertools import chain -from typing import Any, Optional +from datetime import datetime +from functools import cached_property +from urllib.parse import parse_qs, urlencode from uuid import UUID, uuid4 -from attr import dataclass, field -from domain.core.aggregate_root import AggregateRoot -from domain.core.device_key import DeviceKey, DeviceKeyType +import orjson +from attr import dataclass +from domain.core.aggregate_root import UPDATED_ON, AggregateRoot, event +from domain.core.base import BaseModel +from domain.core.cpm_system_id import ProductId +from domain.core.device_key import DeviceKey from domain.core.enum import Status from domain.core.error import DuplicateError, NotFoundError from domain.core.event import Event, EventDeserializer -from domain.core.questionnaire import ( - QuestionnaireInstanceEvent, - QuestionnaireResponse, - QuestionnaireResponseAddedEvent, - QuestionnaireResponseDeletedEvent, - QuestionnaireResponseUpdatedEvent, -) +from domain.core.questionnaire import QuestionnaireResponse +from domain.core.timestamp import now from domain.core.validation import DEVICE_NAME_REGEX -from pydantic import Field +from pydantic import Field, root_validator + +UPDATED_ON = "updated_on" +DEVICE_UPDATED_ON = f"device_{UPDATED_ON}" class QuestionnaireNotFoundError(Exception): @@ -30,7 +30,7 @@ class QuestionnaireResponseNotFoundError(Exception): pass -class QuestionNotFoundError(Exception): +class DuplicateQuestionnaireResponse(Exception): pass @@ -38,102 +38,185 @@ class QuestionNotFoundError(Exception): class DeviceCreatedEvent(Event): id: str name: str - type: "DeviceType" product_team_id: UUID + product_id: ProductId ods_code: str - status: "DeviceStatus" + status: Status created_on: str - updated_on: Optional[str] = None - deleted_on: Optional[str] = None - _trust: bool = field(alias="_trust", default=False) + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] @dataclass(kw_only=True, slots=True) class DeviceUpdatedEvent(Event): id: str name: str - type: "DeviceType" product_team_id: UUID + product_id: ProductId + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + + +@dataclass(kw_only=True, slots=True) +class DeviceDeletedEvent(Event): + id: str + name: str + product_team_id: UUID + product_id: ProductId ods_code: str - status: "DeviceStatus" + status: Status created_on: str - updated_on: str - deleted_on: Optional[str] = None + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + deleted_tags: list[str] = None @dataclass(kw_only=True, slots=True) class DeviceKeyAddedEvent(Event): + new_key: DeviceKey id: str - key: str - type: DeviceKeyType - _trust: bool = field(alias="_trust", default=False) + name: str + product_team_id: UUID + product_id: ProductId + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] @dataclass(kw_only=True, slots=True) class DeviceKeyDeletedEvent(Event): + deleted_key: DeviceKey + id: str + keys: list[DeviceKey] + tags: list[str] + updated_on: str = None + + +@dataclass(kw_only=True, slots=True) +class DeviceTagAddedEvent(Event): + new_tag: str id: str - key: str + name: str + product_team_id: UUID + product_id: ProductId + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] @dataclass(kw_only=True, slots=True) -class DeviceIndexAddedEvent(Event): +class DeviceTagsAddedEvent(Event): + new_tags: list[str] id: str - questionnaire_id: str - question_name: str - value: str + name: str + product_team_id: UUID + product_id: ProductId + ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[DeviceKey] + tags: list[str] + questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] -class DeviceType(StrEnum): +@dataclass(kw_only=True, slots=True) +class DeviceTagsClearedEvent(Event): + id: str + keys: list[dict] + deleted_tags: list[str] + updated_on: str = None + + +@dataclass(kw_only=True, slots=True) +class QuestionnaireResponseUpdatedEvent(Event): """ - A Product is to be classified as being one of the following. These terms - were provided by Aubyn Crawford in collaboration with Service Now. + This is adding the inital questionnaire response from the event body request. + """ + + id: str + questionnaire_responses: dict[str, list[QuestionnaireResponse]] + keys: list[DeviceKey] + tags: list[str] + updated_on: str = None + - NOTE: - A 'SERVICE' and 'API' is NOT what a developer would expect them to be. - These are terms from the problem domain and relate to how Assurance - is performed. +class DeviceTag(BaseModel): + """ + DeviceTag is a mechanism for indexing Device data. In DynamoDB then intention is for this + to be translated into a duplicated record in the database, so that Devices with with the + same DeviceTag can be queried directly, therefore mimicking efficient search-like behaviour. """ - PRODUCT = auto() - ENDPOINT = auto() - # SERVICE = auto() - # API = auto() + __root__: list[tuple[str, str]] + + class Config: + arbitrary_types_allowed = True + keep_untouched = (cached_property,) + + @root_validator(pre=True) + def encode_tag(cls, values: dict): + initialised_with_root = "__root__" in values and len(values) == 1 + item_to_process = values["__root__"] if initialised_with_root else values + + # Case 1: query string is provided (__root__="foo=bar") + if initialised_with_root and isinstance(item_to_process, str): + _components = ((k, v) for k, (v,) in parse_qs(item_to_process).items()) + # Case 2: query components are provided (__root__=("foo", "bar")) + elif initialised_with_root: + _components = ((k, v) for k, v in item_to_process) + # Case 3: query components are directly provided (("foo", "bar")) + else: + _components = ((k, str(v)) for k, v in item_to_process.items()) + + case_insensitive_components = tuple( + sorted((k, v.lower()) for k, v in _components) + ) + return {"__root__": case_insensitive_components} + def dict(self, *args, **kwargs): + return self.components -DeviceStatus = Status + @cached_property + def components(self): + return tuple(self.__root__) + @cached_property + def hash(self): + return hash(self.components) -def _get_unique_answers( - questionnaire_responses: list[QuestionnaireResponse], question_name: str -): - all_responses = chain.from_iterable( - _questionnaire_response.responses - for _questionnaire_response in questionnaire_responses - ) - matching_responses = filter( - lambda response: question_name in response, all_responses - ) - matching_response_answers = ( - answer for responses in matching_responses for answer in responses.values() - ) - unique_answers = set(chain.from_iterable(matching_response_answers)) - return unique_answers - - -def _get_questionnaire_responses( - questionnaire_responses: dict[str, list[QuestionnaireResponse]], - questionnaire_id: str, -) -> list[QuestionnaireResponse]: - _questionnaire_responses = questionnaire_responses.get(questionnaire_id) - if _questionnaire_responses is None: - raise QuestionnaireNotFoundError( - f"This device does not contain a Questionnaire with id '{questionnaire_id}'" - ) - elif not _questionnaire_responses: - raise QuestionnaireResponseNotFoundError( - f"This device does not contain a QuestionnaireResponse for Questionnaire with id '{questionnaire_id}'" - ) - return _questionnaire_responses + @property + def value(self) -> str: + return urlencode(self.components) + + def __hash__(self): + return self.hash + + def __eq__(self, other: "DeviceTag"): + return self.hash == other.hash class Device(AggregateRoot): @@ -151,192 +234,158 @@ class Device(AggregateRoot): id: UUID = Field(default_factory=uuid4, immutable=True) name: str = Field(regex=DEVICE_NAME_REGEX) - type: DeviceType = Field(immutable=True) - status: DeviceStatus = Field(default=DeviceStatus.ACTIVE) - product_team_id: UUID + status: Status = Field(default=Status.ACTIVE) + product_id: ProductId = Field(immutable=True) + product_team_id: str = Field(immutable=True) ods_code: str - created_on: datetime = Field( - default_factory=lambda: datetime.now(timezone.utc), immutable=True - ) - updated_on: Optional[datetime] = Field(default=None) - deleted_on: Optional[datetime] = Field(default=None) - keys: dict[str, DeviceKey] = Field(default_factory=dict, exclude=True) + created_on: datetime = Field(default_factory=now, immutable=True) + updated_on: datetime = Field(default=None) + deleted_on: datetime = Field(default=None) + keys: list[DeviceKey] = Field(default_factory=list) + tags: set[DeviceTag] | list[DeviceTag] = Field(default_factory=set) questionnaire_responses: dict[str, list[QuestionnaireResponse]] = Field( - default_factory=lambda: defaultdict(list), exclude=True + default_factory=lambda: defaultdict(list) ) - indexes: set[tuple[str, str, Any]] = Field(default_factory=set, exclude=True) + def state_exclude_tags(self) -> dict: + """ + Returns a deepcopy, useful for bulk operations rather than dealing with events. + + Exclude tags as we shouldn't return tags to the user on create. + """ + device_dict = orjson.loads(self.json()) + device_dict.pop("tags", None) + return device_dict + + @event def update(self, **kwargs) -> DeviceUpdatedEvent: - if "updated_on" not in kwargs: - kwargs["updated_on"] = datetime.now(timezone.utc) + kwargs[UPDATED_ON] = now() device_data = self._update(data=kwargs) - event = DeviceUpdatedEvent(**device_data) - return self.add_event(event) - - def delete(self) -> DeviceUpdatedEvent: - deletion_datetime = datetime.now(timezone.utc) - return self.update( - status=DeviceStatus.INACTIVE, - updated_on=deletion_datetime, - deleted_on=deletion_datetime, - ) - - def add_key(self, type: str, key: str, _trust=False) -> DeviceKeyAddedEvent: - if key in self.keys: - raise DuplicateError(f"It is forbidden to supply duplicate keys: '{key}'") - device_key = DeviceKey(key=key, type=type) - self.keys[key] = device_key - event = DeviceKeyAddedEvent(id=self.id, _trust=_trust, **device_key.dict()) - return self.add_event(event) - - def delete_key(self, key: str) -> DeviceKeyDeletedEvent: - try: - device_key = self.keys.pop(key) - except KeyError: - raise NotFoundError(f"This device does not contain key '{key}'") from None - event = DeviceKeyDeletedEvent(id=self.id, key=device_key.key) - return self.add_event(event) - - def add_index( - self, questionnaire_id: str, question_name: str - ) -> list[DeviceIndexAddedEvent]: - questionnaire_responses = _get_questionnaire_responses( - questionnaire_responses=self.questionnaire_responses, - questionnaire_id=questionnaire_id, + return DeviceUpdatedEvent(**device_data) + + @event + def delete(self) -> DeviceDeletedEvent: + deleted_on = now() + deleted_tags = {t.value for t in self.tags} + device_data = self._update( + data=dict( + status=Status.INACTIVE, + updated_on=deleted_on, + deleted_on=deleted_on, + tags=set(), + ) ) - if question_name not in questionnaire_responses[0].questionnaire.questions: - raise QuestionNotFoundError( - f"Questionnaire '{questionnaire_id}' does not " - f"contain question '{question_name}'" + return DeviceDeletedEvent(**device_data, deleted_tags=deleted_tags) + + @event + def add_key(self, key_type: str, key_value: str) -> DeviceKeyAddedEvent: + device_key = DeviceKey(key_value=key_value, key_type=key_type) + if device_key in self.keys: + raise DuplicateError( + f"It is forbidden to supply duplicate keys: '{key_type}':'{key_value}'" ) - unique_answers = _get_unique_answers( - questionnaire_responses=questionnaire_responses, - question_name=question_name, + self.keys.append(device_key) + device_data = self.state() + device_data["tags"] = {t.value for t in self.tags} + device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on + return DeviceKeyAddedEvent(new_key=device_key, **device_data) + + @event + def delete_key(self, key_type: str, key_value: str) -> DeviceKeyDeletedEvent: + device_key = DeviceKey(key_value=key_value, key_type=key_type) + if device_key not in self.keys: + raise NotFoundError( + f"This device does not contain key '{key_type}':'{key_value}'" + ) from None + self.keys.remove(device_key) + return DeviceKeyDeletedEvent( + deleted_key=device_key, + id=self.id, + keys=[k.dict() for k in self.keys], + tags=[t.value for t in self.tags], ) - events = [] - for answer in unique_answers: - event = DeviceIndexAddedEvent( - id=self.id, - questionnaire_id=questionnaire_id, - question_name=question_name, - value=answer, + @event + def add_tag(self, **kwargs) -> DeviceTagAddedEvent: + device_tag = DeviceTag(**kwargs) + if device_tag in self.tags: + raise DuplicateError( + f"It is forbidden to supply duplicate tag: '{device_tag.value}'" ) - events.append(event) - self.add_event(event) - self.indexes.add((questionnaire_id, question_name, answer)) - return events - - def add_questionnaire_response( - self, - questionnaire_response: QuestionnaireResponse, - _questionnaire: dict = None, - _trust=False, - ) -> list[QuestionnaireInstanceEvent, QuestionnaireResponseAddedEvent]: - _questionnaire = _questionnaire or questionnaire_response.questionnaire.dict() - - questionnaire_responses = self.questionnaire_responses[ - questionnaire_response.questionnaire.id - ] - questionnaire_response_index = len(questionnaire_responses) - questionnaire_responses.append(questionnaire_response) - questionnaire_used_already = questionnaire_response_index > 0 - - events = [] - if not questionnaire_used_already: - questionnaire_event = QuestionnaireInstanceEvent( - entity_id=self.id, - questionnaire_id=questionnaire_response.questionnaire.id, - **_questionnaire, + self.tags.add(device_tag) + device_data = self.state() + device_data["tags"] = {t.value for t in self.tags} + device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on + return DeviceTagAddedEvent(new_tag=device_tag.value, **device_data) + + @event + def add_tags(self, tags: list[dict]) -> DeviceTagsAddedEvent: + """Optimised bulk equivalent of performing device.add_tag sequentially.""" + new_tags = {DeviceTag(**tag) for tag in tags} + duplicate_tags = self.tags.intersection(new_tags) + if duplicate_tags: + raise DuplicateError( + f"It is forbidden to supply duplicate tags: {[t.value for t in duplicate_tags]}" ) - events.append(questionnaire_event) - self.add_event(questionnaire_event) - - questionnaire_response_event = QuestionnaireResponseAddedEvent( - entity_id=self.id, - questionnaire_response_index=questionnaire_response_index, - questionnaire_id=questionnaire_response.questionnaire.id, - responses=questionnaire_response.responses, - _trust=_trust, + self.tags = self.tags.union(new_tags) + device_data = self.state() + device_data["tags"] = {t.value for t in self.tags} + device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on + return DeviceTagsAddedEvent( + new_tags={tag.value for tag in new_tags}, **device_data ) - events.append(questionnaire_response_event) - self.add_event(questionnaire_response_event) - - return events - def update_questionnaire_response( - self, - questionnaire_response: QuestionnaireResponse, - questionnaire_response_index: int, - ) -> QuestionnaireResponseUpdatedEvent: - questionnaire_responses = self.questionnaire_responses.get( - questionnaire_response.questionnaire.id + @event + def clear_tags(self): + deleted_tags = self.tags + self.tags = set() + device_data = self.state() + return DeviceTagsClearedEvent( + id=device_data["id"], + keys=device_data["keys"], + deleted_tags={tag.value for tag in deleted_tags}, ) - if questionnaire_responses is None: - raise QuestionnaireNotFoundError( - "This device does not contain a Questionnaire " - f"with id '{questionnaire_response.questionnaire.id}'" - ) from None - try: - questionnaire_responses[questionnaire_response_index] = ( - questionnaire_response + @event + def add_questionnaire_response( + self, questionnaire_response: QuestionnaireResponse + ) -> QuestionnaireResponseUpdatedEvent: + questionnaire_id = questionnaire_response.questionnaire_id + questionnaire_responses = self.questionnaire_responses[questionnaire_id] + created_on = questionnaire_response.created_on + + current_created_ons = [qr.created_on for qr in questionnaire_responses] + if created_on in current_created_ons: + raise DuplicateQuestionnaireResponse( + "This Device already contains a " + f"response created on {created_on.isoformat()}" + f"for Questionnaire {questionnaire_id}" ) - except IndexError: - raise QuestionnaireResponseNotFoundError( - "This device does not contain a Questionnaire with a " - f"response at index '{questionnaire_response_index}'" - ) from None - - event = QuestionnaireResponseUpdatedEvent( - entity_id=self.id, - questionnaire_id=questionnaire_response.questionnaire.id, - questionnaire_response_index=questionnaire_response_index, - responses=questionnaire_response.responses, - ) - return self.add_event(event) - - def delete_questionnaire_response( - self, questionnaire_id: str, questionnaire_response_index: int - ) -> QuestionnaireResponseDeletedEvent: - questionnaire_responses = self.questionnaire_responses.get(questionnaire_id) - if questionnaire_responses is None: - raise QuestionnaireNotFoundError( - "This device does not contain a Questionnaire " - f"with id '{questionnaire_id}'" - ) from None - - try: - questionnaire_responses.pop(questionnaire_response_index) - except IndexError: - raise QuestionnaireResponseNotFoundError( - "This device does not contain a Questionnaire with a " - f"response at index '{questionnaire_response_index}'" - ) from None - - if len(questionnaire_responses) == 0: - self.questionnaire_responses.pop(questionnaire_id) + questionnaire_responses.append(questionnaire_response) - event = QuestionnaireResponseDeletedEvent( - entity_id=self.id, - questionnaire_id=questionnaire_id, - questionnaire_response_index=questionnaire_response_index, + return QuestionnaireResponseUpdatedEvent( + id=self.id, + keys=[k.dict() for k in self.keys], + tags=[t.value for t in self.tags], + questionnaire_responses={ + qid: [qr.dict() for qr in qrs] + for qid, qrs in self.questionnaire_responses.items() + }, ) - return self.add_event(event) def is_active(self): - return self.status is DeviceStatus.ACTIVE + return self.status is Status.ACTIVE class DeviceEventDeserializer(EventDeserializer): event_types = ( DeviceCreatedEvent, DeviceUpdatedEvent, + DeviceDeletedEvent, DeviceKeyAddedEvent, DeviceKeyDeletedEvent, - DeviceIndexAddedEvent, - QuestionnaireResponseAddedEvent, + DeviceTagAddedEvent, + DeviceTagsClearedEvent, + DeviceTagsAddedEvent, QuestionnaireResponseUpdatedEvent, - QuestionnaireResponseDeletedEvent, - QuestionnaireInstanceEvent, ) diff --git a/src/layers/domain/core/device/v2.py b/src/layers/domain/core/device/v2.py deleted file mode 100644 index 7f8e0850..00000000 --- a/src/layers/domain/core/device/v2.py +++ /dev/null @@ -1,424 +0,0 @@ -from collections import defaultdict -from datetime import datetime -from enum import StrEnum, auto -from functools import cached_property -from urllib.parse import urlencode -from uuid import UUID, uuid4 - -from attr import dataclass -from domain.core.aggregate_root import UPDATED_ON, AggregateRoot, event -from domain.core.base import BaseModel -from domain.core.device_key.v2 import DeviceKey -from domain.core.enum import Status -from domain.core.error import DuplicateError, NotFoundError -from domain.core.event import Event, EventDeserializer -from domain.core.questionnaire.v2 import ( - QuestionnaireResponse, - QuestionnaireResponseUpdatedEvent, -) -from domain.core.timestamp import now -from domain.core.validation import DEVICE_NAME_REGEX -from pydantic import Field, root_validator - -UPDATED_ON = "updated_on" -DEVICE_UPDATED_ON = f"device_{UPDATED_ON}" - - -class QuestionnaireNotFoundError(Exception): - pass - - -class QuestionnaireResponseNotFoundError(Exception): - pass - - -class DuplicateQuestionnaireResponse(Exception): - pass - - -class QuestionNotFoundError(Exception): - pass - - -@dataclass(kw_only=True, slots=True) -class DeviceCreatedEvent(Event): - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceUpdatedEvent(Event): - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceDeletedEvent(Event): - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - deleted_tags: list["DeviceTag"] = None - - -@dataclass(kw_only=True, slots=True) -class DeviceDeletedEvent(Event): - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - deleted_tags: list["DeviceTag"] = None - - -@dataclass(kw_only=True, slots=True) -class DeviceKeyAddedEvent(Event): - new_key: DeviceKey - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceKeyDeletedEvent(Event): - deleted_key: DeviceKey - id: str - keys: list[DeviceKey] - tags: list["DeviceTag"] - updated_on: str = None - - -@dataclass(kw_only=True, slots=True) -class DeviceTagAddedEvent(Event): - new_tag: "DeviceTag" - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceTagsAddedEvent(Event): - new_tags: list["DeviceTag"] - id: str - name: str - device_type: "DeviceType" - product_team_id: UUID - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list["DeviceTag"] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceTagsClearedEvent(Event): - id: str - keys: list[dict] - deleted_tags: list["DeviceTag"] - updated_on: str = None - - -class DeviceType(StrEnum): - """ - A Product is to be classified as being one of the following. These terms - were provided by Aubyn Crawford in collaboration with Service Now. - - NOTE: - A 'SERVICE' and 'API' is NOT what a developer would expect them to be. - These are terms from the problem domain and relate to how Assurance - is performed. - """ - - PRODUCT = auto() - ENDPOINT = auto() - # SERVICE = auto() - # API = auto() - - -class DeviceTag(BaseModel): - """ - DeviceTag is a mechanism for indexing Device data. In DynamoDB then intention is for this - to be translated into a duplicated record in the database, so that Devices with with the - same DeviceTag can be queried directly, therefore mimicking efficient search-like behaviour. - """ - - __root__: list[tuple[str, str]] - - class Config: - arbitrary_types_allowed = True - keep_untouched = (cached_property,) - - @root_validator(pre=True) - def encode_tag(cls, values: dict): - initialised_with_root = "__root__" in values and len(values) == 1 - item_to_process = values["__root__"] if initialised_with_root else values - if initialised_with_root: - _components = ((k, v) for k, v in item_to_process) - else: # otherwise initialise directly with key value pairs - _components = sorted((k, str(v)) for k, v in item_to_process.items()) - - case_insensitive_components = tuple((k, v.lower()) for k, v in _components) - return {"__root__": case_insensitive_components} - - def dict(self, *args, **kwargs): - return self.components - - @cached_property - def components(self): - return tuple(self.__root__) - - @cached_property - def hash(self): - return hash(self.components) - - @property - def value(self) -> str: - return urlencode(self.components) - - def __hash__(self): - return self.hash - - def __eq__(self, other: "DeviceTag"): - return self.hash == other.hash - - -class Device(AggregateRoot): - """ - An entity in the database. It could model all sorts of different logical or - physical entities: - e.g. - NRL (SERVICE) - +-- NRL.v2 (API) - | +-- nrl (???) - +-- NRL.v3 (API) - +-- nrl-consumer-api (???) - +-- nrl-producer-api (???) - """ - - id: UUID = Field(default_factory=uuid4, immutable=True) - name: str = Field(regex=DEVICE_NAME_REGEX) - device_type: DeviceType = Field(immutable=True) - status: Status = Field(default=Status.ACTIVE) - product_team_id: UUID - ods_code: str - created_on: datetime = Field(default_factory=now, immutable=True) - updated_on: datetime = Field(default=None) - deleted_on: datetime = Field(default=None) - keys: list[DeviceKey] = Field(default_factory=list) - tags: set[DeviceTag] | list[DeviceTag] = Field(default_factory=set) - questionnaire_responses: dict[str, dict[str, QuestionnaireResponse]] = Field( - default_factory=lambda: defaultdict(dict) - ) - - @event - def update(self, **kwargs) -> DeviceUpdatedEvent: - kwargs[UPDATED_ON] = now() - device_data = self._update(data=kwargs) - return DeviceUpdatedEvent(**device_data) - - @event - def delete(self) -> DeviceDeletedEvent: - deleted_on = now() - deleted_tags = {t.dict() for t in self.tags} - device_data = self._update( - data=dict( - status=Status.INACTIVE, - updated_on=deleted_on, - deleted_on=deleted_on, - tags=set(), - ) - ) - return DeviceDeletedEvent(**device_data, deleted_tags=deleted_tags) - - @event - def add_key(self, key_type: str, key_value: str) -> DeviceKeyAddedEvent: - device_key = DeviceKey(key_value=key_value, key_type=key_type) - if device_key in self.keys: - raise DuplicateError( - f"It is forbidden to supply duplicate keys: '{key_type}':'{key_value}'" - ) - self.keys.append(device_key) - device_data = self.state() - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceKeyAddedEvent(new_key=device_key, **device_data) - - @event - def delete_key(self, key_type: str, key_value: str) -> DeviceKeyDeletedEvent: - device_key = DeviceKey(key_value=key_value, key_type=key_type) - if device_key not in self.keys: - raise NotFoundError( - f"This device does not contain key '{key_type}':'{key_value}'" - ) from None - self.keys.remove(device_key) - return DeviceKeyDeletedEvent( - deleted_key=device_key, - id=self.id, - keys=[k.dict() for k in self.keys], - tags=[t.dict() for t in self.tags], - ) - - @event - def add_tag(self, **kwargs) -> DeviceTagAddedEvent: - device_tag = DeviceTag(**kwargs) - if device_tag in self.tags: - raise DuplicateError( - f"It is forbidden to supply duplicate tag: '{device_tag.value}'" - ) - self.tags.add(device_tag) - device_data = self.state() - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceTagAddedEvent(new_tag=device_tag, **device_data) - - @event - def add_tags(self, tags: list[dict]) -> DeviceTagsAddedEvent: - """Optimised bulk equivalent of performing device.add_tag sequentially.""" - new_tags = {DeviceTag(**tag) for tag in tags} - duplicate_tags = self.tags.intersection(new_tags) - if duplicate_tags: - raise DuplicateError( - f"It is forbidden to supply duplicate tags: {[t.value for t in duplicate_tags]}" - ) - self.tags = self.tags.union(new_tags) - device_data = self.state() - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceTagsAddedEvent(new_tags=new_tags, **device_data) - - @event - def clear_tags(self): - deleted_tags = self.tags - self.tags = set() - device_data = self.state() - return DeviceTagsClearedEvent( - id=device_data["id"], keys=device_data["keys"], deleted_tags=deleted_tags - ) - - @event - def add_questionnaire_response( - self, questionnaire_response: QuestionnaireResponse - ) -> QuestionnaireResponseUpdatedEvent: - questionnaire_id = questionnaire_response.questionnaire.id - questionnaire_responses = self.questionnaire_responses[questionnaire_id] - - created_on_str = questionnaire_response.created_on.isoformat() - if created_on_str in questionnaire_responses: - raise DuplicateQuestionnaireResponse( - "This Device already contains a " - f"response created on {created_on_str}" - f"for Questionnaire {questionnaire_id}" - ) - questionnaire_responses[created_on_str] = questionnaire_response - - return QuestionnaireResponseUpdatedEvent( - entity_id=self.id, - entity_keys=[k.dict() for k in self.keys], - entity_tags=[t.dict() for t in self.tags], - questionnaire_responses={ - qid: {_created_on: qr.dict() for _created_on, qr in _qr.items()} - for qid, _qr in self.questionnaire_responses.items() - }, - ) - - @event - def update_questionnaire_response( - self, - questionnaire_response: QuestionnaireResponse, - ) -> QuestionnaireResponseUpdatedEvent: - questionnaire_id = questionnaire_response.questionnaire.id - questionnaire_responses = self.questionnaire_responses.get(questionnaire_id) - if questionnaire_responses is None: - raise QuestionnaireNotFoundError( - "This device does not contain a Questionnaire " - f"with id '{questionnaire_id}'" - ) from None - - created_on_str = questionnaire_response.created_on.isoformat() - if created_on_str not in questionnaire_responses: - raise QuestionnaireResponseNotFoundError( - "This device does not contain a Questionnaire with a " - f"response created on '{created_on_str}'" - ) from None - - questionnaire_responses[created_on_str] = questionnaire_response - - return QuestionnaireResponseUpdatedEvent( - entity_id=self.id, - entity_keys=[k.dict() for k in self.keys], - entity_tags=[t.dict() for t in self.tags], - questionnaire_responses={ - qid: {_created_on: qr.dict() for _created_on, qr in _qr.items()} - for qid, _qr in self.questionnaire_responses.items() - }, - ) - - def is_active(self): - return self.status is Status.ACTIVE - - -class DeviceEventDeserializer(EventDeserializer): - event_types = ( - DeviceCreatedEvent, - DeviceUpdatedEvent, - DeviceDeletedEvent, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceTagAddedEvent, - DeviceTagsClearedEvent, - DeviceTagsAddedEvent, - QuestionnaireResponseUpdatedEvent, - ) diff --git a/src/layers/domain/core/device/v3.py b/src/layers/domain/core/device/v3.py deleted file mode 100644 index 0f892408..00000000 --- a/src/layers/domain/core/device/v3.py +++ /dev/null @@ -1,409 +0,0 @@ -from collections import defaultdict -from datetime import datetime -from enum import StrEnum, auto -from functools import cached_property -from urllib.parse import parse_qs, urlencode -from uuid import UUID, uuid4 - -import orjson -from attr import dataclass -from domain.core.aggregate_root import UPDATED_ON, AggregateRoot, event -from domain.core.base import BaseModel -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device_key.v2 import DeviceKey -from domain.core.enum import Status -from domain.core.error import DuplicateError, NotFoundError -from domain.core.event import Event, EventDeserializer -from domain.core.questionnaire.v3 import QuestionnaireResponse -from domain.core.timestamp import now -from domain.core.validation import DEVICE_NAME_REGEX -from pydantic import Field, root_validator - -UPDATED_ON = "updated_on" -DEVICE_UPDATED_ON = f"device_{UPDATED_ON}" - - -class QuestionnaireNotFoundError(Exception): - pass - - -class QuestionnaireResponseNotFoundError(Exception): - pass - - -class DuplicateQuestionnaireResponse(Exception): - pass - - -@dataclass(kw_only=True, slots=True) -class DeviceCreatedEvent(Event): - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceUpdatedEvent(Event): - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceDeletedEvent(Event): - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - deleted_tags: list[str] = None - - -@dataclass(kw_only=True, slots=True) -class DeviceKeyAddedEvent(Event): - new_key: DeviceKey - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceKeyDeletedEvent(Event): - deleted_key: DeviceKey - id: str - keys: list[DeviceKey] - tags: list[str] - updated_on: str = None - - -@dataclass(kw_only=True, slots=True) -class DeviceTagAddedEvent(Event): - new_tag: str - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceTagsAddedEvent(Event): - new_tags: list[str] - id: str - name: str - product_team_id: UUID - product_id: ProductId - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[DeviceKey] - tags: list[str] - questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] - - -@dataclass(kw_only=True, slots=True) -class DeviceTagsClearedEvent(Event): - id: str - keys: list[dict] - deleted_tags: list[str] - updated_on: str = None - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireResponseUpdatedEvent(Event): - """ - This is adding the inital questionnaire response from the event body request. - """ - - id: str - questionnaire_responses: dict[str, list[QuestionnaireResponse]] - keys: list[DeviceKey] - tags: list[str] - updated_on: str = None - - -class DeviceType(StrEnum): - """ - A Product is to be classified as being one of the following. These terms - were provided by Aubyn Crawford in collaboration with Service Now. - - NOTE: - A 'SERVICE' and 'API' is NOT what a developer would expect them to be. - These are terms from the problem domain and relate to how Assurance - is performed. - """ - - PRODUCT = auto() - ENDPOINT = auto() - # SERVICE = auto() - # API = auto() - - -class DeviceTag(BaseModel): - """ - DeviceTag is a mechanism for indexing Device data. In DynamoDB then intention is for this - to be translated into a duplicated record in the database, so that Devices with with the - same DeviceTag can be queried directly, therefore mimicking efficient search-like behaviour. - """ - - __root__: list[tuple[str, str]] - - class Config: - arbitrary_types_allowed = True - keep_untouched = (cached_property,) - - @root_validator(pre=True) - def encode_tag(cls, values: dict): - initialised_with_root = "__root__" in values and len(values) == 1 - item_to_process = values["__root__"] if initialised_with_root else values - - # Case 1: query string is provided (__root__="foo=bar") - if initialised_with_root and isinstance(item_to_process, str): - _components = ((k, v) for k, (v,) in parse_qs(item_to_process).items()) - # Case 2: query components are provided (__root__=("foo", "bar")) - elif initialised_with_root: - _components = ((k, v) for k, v in item_to_process) - # Case 3: query components are directly provided (("foo", "bar")) - else: - _components = ((k, str(v)) for k, v in item_to_process.items()) - - case_insensitive_components = tuple( - sorted((k, v.lower()) for k, v in _components) - ) - return {"__root__": case_insensitive_components} - - def dict(self, *args, **kwargs): - return self.components - - @cached_property - def components(self): - return tuple(self.__root__) - - @cached_property - def hash(self): - return hash(self.components) - - @property - def value(self) -> str: - return urlencode(self.components) - - def __hash__(self): - return self.hash - - def __eq__(self, other: "DeviceTag"): - return self.hash == other.hash - - -class Device(AggregateRoot): - """ - An entity in the database. It could model all sorts of different logical or - physical entities: - e.g. - NRL (SERVICE) - +-- NRL.v2 (API) - | +-- nrl (???) - +-- NRL.v3 (API) - +-- nrl-consumer-api (???) - +-- nrl-producer-api (???) - """ - - id: UUID = Field(default_factory=uuid4, immutable=True) - name: str = Field(regex=DEVICE_NAME_REGEX) - status: Status = Field(default=Status.ACTIVE) - product_id: ProductId = Field(immutable=True) - product_team_id: str = Field(immutable=True) - ods_code: str - created_on: datetime = Field(default_factory=now, immutable=True) - updated_on: datetime = Field(default=None) - deleted_on: datetime = Field(default=None) - keys: list[DeviceKey] = Field(default_factory=list) - tags: set[DeviceTag] | list[DeviceTag] = Field(default_factory=set) - questionnaire_responses: dict[str, list[QuestionnaireResponse]] = Field( - default_factory=lambda: defaultdict(list) - ) - - def state_exclude_tags(self) -> dict: - """ - Returns a deepcopy, useful for bulk operations rather than dealing with events. - - Exclude tags as we shouldn't return tags to the user on create. - """ - device_dict = orjson.loads(self.json()) - device_dict.pop("tags", None) - return device_dict - - @event - def update(self, **kwargs) -> DeviceUpdatedEvent: - kwargs[UPDATED_ON] = now() - device_data = self._update(data=kwargs) - return DeviceUpdatedEvent(**device_data) - - @event - def delete(self) -> DeviceDeletedEvent: - deleted_on = now() - deleted_tags = {t.value for t in self.tags} - device_data = self._update( - data=dict( - status=Status.INACTIVE, - updated_on=deleted_on, - deleted_on=deleted_on, - tags=set(), - ) - ) - return DeviceDeletedEvent(**device_data, deleted_tags=deleted_tags) - - @event - def add_key(self, key_type: str, key_value: str) -> DeviceKeyAddedEvent: - device_key = DeviceKey(key_value=key_value, key_type=key_type) - if device_key in self.keys: - raise DuplicateError( - f"It is forbidden to supply duplicate keys: '{key_type}':'{key_value}'" - ) - self.keys.append(device_key) - device_data = self.state() - device_data["tags"] = {t.value for t in self.tags} - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceKeyAddedEvent(new_key=device_key, **device_data) - - @event - def delete_key(self, key_type: str, key_value: str) -> DeviceKeyDeletedEvent: - device_key = DeviceKey(key_value=key_value, key_type=key_type) - if device_key not in self.keys: - raise NotFoundError( - f"This device does not contain key '{key_type}':'{key_value}'" - ) from None - self.keys.remove(device_key) - return DeviceKeyDeletedEvent( - deleted_key=device_key, - id=self.id, - keys=[k.dict() for k in self.keys], - tags=[t.value for t in self.tags], - ) - - @event - def add_tag(self, **kwargs) -> DeviceTagAddedEvent: - device_tag = DeviceTag(**kwargs) - if device_tag in self.tags: - raise DuplicateError( - f"It is forbidden to supply duplicate tag: '{device_tag.value}'" - ) - self.tags.add(device_tag) - device_data = self.state() - device_data["tags"] = {t.value for t in self.tags} - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceTagAddedEvent(new_tag=device_tag.value, **device_data) - - @event - def add_tags(self, tags: list[dict]) -> DeviceTagsAddedEvent: - """Optimised bulk equivalent of performing device.add_tag sequentially.""" - new_tags = {DeviceTag(**tag) for tag in tags} - duplicate_tags = self.tags.intersection(new_tags) - if duplicate_tags: - raise DuplicateError( - f"It is forbidden to supply duplicate tags: {[t.value for t in duplicate_tags]}" - ) - self.tags = self.tags.union(new_tags) - device_data = self.state() - device_data["tags"] = {t.value for t in self.tags} - device_data.pop(UPDATED_ON) # The @event decorator will handle updated_on - return DeviceTagsAddedEvent( - new_tags={tag.value for tag in new_tags}, **device_data - ) - - @event - def clear_tags(self): - deleted_tags = self.tags - self.tags = set() - device_data = self.state() - return DeviceTagsClearedEvent( - id=device_data["id"], - keys=device_data["keys"], - deleted_tags={tag.value for tag in deleted_tags}, - ) - - @event - def add_questionnaire_response( - self, questionnaire_response: QuestionnaireResponse - ) -> QuestionnaireResponseUpdatedEvent: - questionnaire_id = questionnaire_response.questionnaire_id - questionnaire_responses = self.questionnaire_responses[questionnaire_id] - created_on = questionnaire_response.created_on - - current_created_ons = [qr.created_on for qr in questionnaire_responses] - if created_on in current_created_ons: - raise DuplicateQuestionnaireResponse( - "This Device already contains a " - f"response created on {created_on.isoformat()}" - f"for Questionnaire {questionnaire_id}" - ) - questionnaire_responses.append(questionnaire_response) - - return QuestionnaireResponseUpdatedEvent( - id=self.id, - keys=[k.dict() for k in self.keys], - tags=[t.value for t in self.tags], - questionnaire_responses={ - qid: [qr.dict() for qr in qrs] - for qid, qrs in self.questionnaire_responses.items() - }, - ) - - def is_active(self): - return self.status is Status.ACTIVE - - -class DeviceEventDeserializer(EventDeserializer): - event_types = ( - DeviceCreatedEvent, - DeviceUpdatedEvent, - DeviceDeletedEvent, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceTagAddedEvent, - DeviceTagsClearedEvent, - DeviceTagsAddedEvent, - QuestionnaireResponseUpdatedEvent, - ) diff --git a/src/layers/domain/core/device_key/tests/test_device_key_v1.py b/src/layers/domain/core/device_key/tests/test_device_key_v1.py index ddd17832..7510e070 100644 --- a/src/layers/domain/core/device_key/tests/test_device_key_v1.py +++ b/src/layers/domain/core/device_key/tests/test_device_key_v1.py @@ -4,14 +4,14 @@ GOOD_ID_EXAMPLES = { "product_id": "P.XXX-YYY", - "accredited_system_id": "ABC:123456", - "message_handling_system_id": "ABC:ABC-123456:abc123", + "accredited_system_id": "ABC:12345", + "message_handling_system_id": "ABC:ABC-12345:abc123", } @pytest.mark.parametrize(["type", "key"], GOOD_ID_EXAMPLES.items()) def test_validate_key_pass(key, type): - assert validate_key(key=key, type=DeviceKeyType(type)) == key + assert validate_key(key_value=key, key_type=DeviceKeyType(type)) == key @pytest.mark.parametrize("type", GOOD_ID_EXAMPLES.keys()) @@ -21,7 +21,7 @@ def test_validate_key_fail(key, type): pytest.skip("Already covered in 'test_validate_key_pass'") with pytest.raises(InvalidKeyPattern): - validate_key(key=key, type=DeviceKeyType(type)) + validate_key(key_value=key, key_type=DeviceKeyType(type)) @pytest.mark.parametrize("type", GOOD_ID_EXAMPLES.keys()) @@ -35,4 +35,4 @@ def test_validate_key_fail(key, type): ) def test_validate_key_fail_other(key, type): with pytest.raises(InvalidKeyPattern): - validate_key(key=key, type=DeviceKeyType(type)) + validate_key(key_value=key, key_type=DeviceKeyType(type)) diff --git a/src/layers/domain/core/device_key/tests/test_device_key_v2.py b/src/layers/domain/core/device_key/tests/test_device_key_v2.py deleted file mode 100644 index 45acc1a9..00000000 --- a/src/layers/domain/core/device_key/tests/test_device_key_v2.py +++ /dev/null @@ -1,38 +0,0 @@ -import pytest -from domain.core.device_key.v2 import DeviceKeyType, validate_key -from domain.core.error import InvalidKeyPattern - -GOOD_ID_EXAMPLES = { - "product_id": "P.XXX-YYY", - "accredited_system_id": "ABC:12345", - "message_handling_system_id": "ABC:ABC-12345:abc123", -} - - -@pytest.mark.parametrize(["type", "key"], GOOD_ID_EXAMPLES.items()) -def test_validate_key_pass(key, type): - assert validate_key(key_value=key, key_type=DeviceKeyType(type)) == key - - -@pytest.mark.parametrize("type", GOOD_ID_EXAMPLES.keys()) -@pytest.mark.parametrize("key", GOOD_ID_EXAMPLES.values()) -def test_validate_key_fail(key, type): - if (type, key) in GOOD_ID_EXAMPLES.items(): - pytest.skip("Already covered in 'test_validate_key_pass'") - - with pytest.raises(InvalidKeyPattern): - validate_key(key_value=key, key_type=DeviceKeyType(type)) - - -@pytest.mark.parametrize("type", GOOD_ID_EXAMPLES.keys()) -@pytest.mark.parametrize( - "key", - ( - "12345", - "ABC-12345", - "XXX-YYY", - ), -) -def test_validate_key_fail_other(key, type): - with pytest.raises(InvalidKeyPattern): - validate_key(key_value=key, key_type=DeviceKeyType(type)) diff --git a/src/layers/domain/core/device_key/v1.py b/src/layers/domain/core/device_key/v1.py index f8853938..8636ab15 100644 --- a/src/layers/domain/core/device_key/v1.py +++ b/src/layers/domain/core/device_key/v1.py @@ -7,6 +7,16 @@ from pydantic import validator +def validate_key(key: str, type: "DeviceKeyType"): + if type and type.pattern.match(key) is None: + raise InvalidKeyPattern( + f"Key '{key}' does not match the expected " + f"pattern '{type.pattern.pattern}' associated with " + f"key type '{type}'" + ) + return key + + class DeviceKeyType(StrEnum): PRODUCT_ID = auto() ACCREDITED_SYSTEM_ID = auto() @@ -30,20 +40,27 @@ class DeviceKey(BaseModel): A Device Key is a secondary way of indexing / retrieving Devices """ - type: DeviceKeyType - key: str + key_type: DeviceKeyType + key_value: str - @validator("key", check_fields=True) - def validate_key(cls, key: str, values: dict): - type: DeviceKeyType = values.get("type") - return validate_key(key=key, type=type) + @validator("key_value", check_fields=True) + def validate_key(cls, key_value: str, values: dict): + key_type: DeviceKeyType = values.get("key_type") + return validate_key(key_value=key_value, key_type=key_type) + @property + def parts(self): + return (self.key_type, self.key_value) -def validate_key(key: str, type: DeviceKeyType): - if type and type.pattern.match(key) is None: + def __hash__(self): + return hash(self.parts) + + +def validate_key(key_value: str, key_type: DeviceKeyType): + if key_type and key_type.pattern.match(key_value) is None: raise InvalidKeyPattern( - f"Key '{key}' does not match the expected " - f"pattern '{type.pattern.pattern}' associated with " - f"key type '{type}'" + f"Key '{key_value}' does not match the expected " + f"pattern '{key_type.pattern.pattern}' associated with " + f"key type '{key_type}'" ) - return key + return key_value diff --git a/src/layers/domain/core/device_key/v2.py b/src/layers/domain/core/device_key/v2.py deleted file mode 100644 index 1362ad39..00000000 --- a/src/layers/domain/core/device_key/v2.py +++ /dev/null @@ -1,35 +0,0 @@ -from domain.core.base import BaseModel -from domain.core.device_key.v1 import DeviceKeyType, validate_key -from domain.core.error import InvalidKeyPattern -from pydantic import validator - - -class DeviceKey(BaseModel): - """ - A Device Key is a secondary way of indexing / retrieving Devices - """ - - key_type: DeviceKeyType - key_value: str - - @validator("key_value", check_fields=True) - def validate_key(cls, key_value: str, values: dict): - key_type: DeviceKeyType = values.get("key_type") - return validate_key(key_value=key_value, key_type=key_type) - - @property - def parts(self): - return (self.key_type, self.key_value) - - def __hash__(self): - return hash(self.parts) - - -def validate_key(key_value: str, key_type: DeviceKeyType): - if key_type and key_type.pattern.match(key_value) is None: - raise InvalidKeyPattern( - f"Key '{key_value}' does not match the expected " - f"pattern '{key_type.pattern.pattern}' associated with " - f"key type '{key_type}'" - ) - return key_value diff --git a/src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py b/src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py index af5f3837..4b20a6e7 100644 --- a/src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py +++ b/src/layers/domain/core/device_reference_data/tests/test_device_reference_data_v1.py @@ -1,10 +1,10 @@ import json import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.device_reference_data.v1 import QuestionnaireResponseUpdatedEvent -from domain.core.questionnaire.tests.test_questionnaire_v3 import VALID_SCHEMA -from domain.core.questionnaire.v3 import Questionnaire +from domain.core.cpm_product import CpmProduct +from domain.core.device_reference_data import QuestionnaireResponseUpdatedEvent +from domain.core.questionnaire import Questionnaire +from domain.core.questionnaire.tests.test_questionnaire_v1 import VALID_SCHEMA from test_helpers.uuid import consistent_uuid diff --git a/src/layers/domain/core/device_reference_data/v1.py b/src/layers/domain/core/device_reference_data/v1.py index 238bf458..9c14898f 100644 --- a/src/layers/domain/core/device_reference_data/v1.py +++ b/src/layers/domain/core/device_reference_data/v1.py @@ -4,10 +4,10 @@ from attr import dataclass from domain.core.aggregate_root import AggregateRoot, event -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.device.v3 import DuplicateQuestionnaireResponse +from domain.core.cpm_system_id import ProductId +from domain.core.device import DuplicateQuestionnaireResponse from domain.core.event import Event -from domain.core.questionnaire.v3 import QuestionnaireResponse +from domain.core.questionnaire import QuestionnaireResponse from domain.core.timestamp import now from domain.core.validation import DEVICE_NAME_REGEX from pydantic import Field diff --git a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py b/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py index f5f42c5f..638b1fae 100644 --- a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py +++ b/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v1.py @@ -21,6 +21,6 @@ def test__can_instantiate_ods_organisation(ods_code: str): "!@£$%", ], ) -def test__id_must_be_ods_code(ods_code: str): +def test__id_must_be_valid_ods_code(ods_code: str): with pytest.raises(ValidationError): Root.create_ods_organisation(ods_code=ods_code) diff --git a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v2.py b/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v2.py deleted file mode 100644 index ff0ee616..00000000 --- a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v2.py +++ /dev/null @@ -1,26 +0,0 @@ -from contextlib import nullcontext as do_not_raise - -import pytest -from domain.core.root.v2 import Root -from pydantic import ValidationError - - -@pytest.mark.parametrize( - "ods_code", - ["F5H1R", "RTG09", "NLF02", "D82007002"], -) -def test__can_instantiate_ods_organisation(ods_code: str): - with do_not_raise(): - Root.create_ods_organisation(ods_code=ods_code) - - -@pytest.mark.parametrize( - "ods_code", - [ - "ABCDEFGHIJ", - "!@£$%", - ], -) -def test__id_must_be_ods_code(ods_code: str): - with pytest.raises(ValidationError): - Root.create_ods_organisation(ods_code=ods_code) diff --git a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v3.py b/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v3.py deleted file mode 100644 index ce551489..00000000 --- a/src/layers/domain/core/ods_organisation/tests/test_ods_organisation_v3.py +++ /dev/null @@ -1,26 +0,0 @@ -from contextlib import nullcontext as do_not_raise - -import pytest -from domain.core.root.v3 import Root -from pydantic import ValidationError - - -@pytest.mark.parametrize( - "ods_code", - ["F5H1R", "RTG09", "NLF02", "D82007002"], -) -def test__can_instantiate_ods_organisation(ods_code: str): - with do_not_raise(): - Root.create_ods_organisation(ods_code=ods_code) - - -@pytest.mark.parametrize( - "ods_code", - [ - "ABCDEFGHIJ", - "!@£$%", - ], -) -def test__id_must_be_valid_ods_code(ods_code: str): - with pytest.raises(ValidationError): - Root.create_ods_organisation(ods_code=ods_code) diff --git a/src/layers/domain/core/ods_organisation/v1.py b/src/layers/domain/core/ods_organisation/v1.py index c2c03824..56251002 100644 --- a/src/layers/domain/core/ods_organisation/v1.py +++ b/src/layers/domain/core/ods_organisation/v1.py @@ -1,5 +1,3 @@ -from uuid import UUID - from domain.core.aggregate_root import AggregateRoot from domain.core.product_team import ProductTeam, ProductTeamCreatedEvent from domain.core.validation import ODS_CODE_REGEX @@ -15,8 +13,9 @@ class OdsOrganisation(AggregateRoot): ods_code: str = Field(regex=ODS_CODE_REGEX) - def create_product_team(self, id: UUID, name: str) -> ProductTeam: - product_team = ProductTeam(id=id, name=name, ods_code=self.ods_code) + def create_product_team(self, name: str, keys: list = None) -> ProductTeam: + keys = keys or [] + product_team = ProductTeam(name=name, ods_code=self.ods_code, keys=keys) event = ProductTeamCreatedEvent(**product_team.dict()) product_team.add_event(event) self.add_event(event=event) diff --git a/src/layers/domain/core/ods_organisation/v2.py b/src/layers/domain/core/ods_organisation/v2.py deleted file mode 100644 index 45681205..00000000 --- a/src/layers/domain/core/ods_organisation/v2.py +++ /dev/null @@ -1,23 +0,0 @@ -from uuid import UUID - -from domain.core.aggregate_root import AggregateRoot -from domain.core.product_team.v2 import ProductTeam, ProductTeamCreatedEvent -from domain.core.validation import ODS_CODE_REGEX -from pydantic import Field - - -class OdsOrganisation(AggregateRoot): - """ - An object that maps onto the Organisational Data Service (ODS) definition - of an "Organisation". We are only interested in a sub-set of the fields - they hold. - """ - - ods_code: str = Field(regex=ODS_CODE_REGEX) - - def create_product_team(self, id: UUID, name: str) -> ProductTeam: - product_team = ProductTeam(id=id, name=name, ods_code=self.ods_code) - event = ProductTeamCreatedEvent(**product_team.dict()) - product_team.add_event(event) - self.add_event(event=event) - return product_team diff --git a/src/layers/domain/core/ods_organisation/v3.py b/src/layers/domain/core/ods_organisation/v3.py deleted file mode 100644 index 80d4545e..00000000 --- a/src/layers/domain/core/ods_organisation/v3.py +++ /dev/null @@ -1,22 +0,0 @@ -from domain.core.aggregate_root import AggregateRoot -from domain.core.product_team.v3 import ProductTeam, ProductTeamCreatedEvent -from domain.core.validation import ODS_CODE_REGEX -from pydantic import Field - - -class OdsOrganisation(AggregateRoot): - """ - An object that maps onto the Organisational Data Service (ODS) definition - of an "Organisation". We are only interested in a sub-set of the fields - they hold. - """ - - ods_code: str = Field(regex=ODS_CODE_REGEX) - - def create_product_team(self, name: str, keys: list = None) -> ProductTeam: - keys = keys or [] - product_team = ProductTeam(name=name, ods_code=self.ods_code, keys=keys) - event = ProductTeamCreatedEvent(**product_team.dict()) - product_team.add_event(event) - self.add_event(event=event) - return product_team diff --git a/src/layers/domain/core/product_key/v1.py b/src/layers/domain/core/product_key/v1.py index 6e34bd68..4abaa1aa 100644 --- a/src/layers/domain/core/product_key/v1.py +++ b/src/layers/domain/core/product_key/v1.py @@ -1,7 +1,7 @@ import re from enum import StrEnum, auto -from domain.core.device_key.v2 import DeviceKey +from domain.core.device_key import DeviceKey from domain.core.validation import SdsId diff --git a/src/layers/domain/core/product_team/tests/test_product_team.py b/src/layers/domain/core/product_team/tests/test_product_team.py deleted file mode 100644 index 32ce10ae..00000000 --- a/src/layers/domain/core/product_team/tests/test_product_team.py +++ /dev/null @@ -1,82 +0,0 @@ -from uuid import UUID - -import pytest -from domain.core.product_team.v1 import ProductTeam as ProductTeamV1 -from domain.core.product_team.v1 import ( - ProductTeamCreatedEvent as ProductTeamCreatedEventV1, -) -from domain.core.product_team.v2 import ProductTeam as ProductTeamV2 -from domain.core.product_team.v2 import ( - ProductTeamCreatedEvent as ProductTeamCreatedEventV2, -) -from domain.core.root.v1 import Root as RootV1 -from domain.core.root.v2 import Root as RootV2 -from pydantic import ValidationError - - -@pytest.mark.parametrize( - ["ProductTeam", "ProductTeamCreatedEvent", "Root"], - ( - [ProductTeamV1, ProductTeamCreatedEventV1, RootV1], - [ProductTeamV2, ProductTeamCreatedEventV2, RootV2], - ), -) -@pytest.mark.parametrize( - "id,name", - [ - [UUID("ae28e872-843d-4e2e-9f0b-b5d3c42d441f"), "First"], - [UUID("edf90c3a-f865-4dd9-9ab9-400e6ebc02e0"), "Second"], - [UUID("f9518c12-6c83-4544-97db-d9dd1d64da97"), "Third"], - ], -) -def test__create_product_team( - ProductTeam: type[ProductTeamV2], - ProductTeamCreatedEvent: type[ProductTeamCreatedEventV2], - Root: type[RootV2], - id: str, - name: str, -): - org = Root.create_ods_organisation(ods_code="AB123") - - result = org.create_product_team(id=str(id), name=name) - event = result.events[0] - - assert isinstance(result, ProductTeam) - assert result.id == id - assert result.name == name - assert result.ods_code == org.ods_code - - assert len(result.events) == 1 - assert isinstance(event, ProductTeamCreatedEvent) - assert event.id == id - assert event.name == name - assert event.ods_code == org.ods_code - - -@pytest.mark.parametrize("Root", (RootV1, RootV2)) -@pytest.mark.parametrize( - "id,name", - [ - ["123", "First"], - [" ", "Second"], - ], -) -def test__create_product_team_bad_id(Root: type[RootV2], id: str, name: str): - org = Root.create_ods_organisation(ods_code="AB123") - - with pytest.raises(ValidationError): - org.create_product_team(id=id, name=name) - - -@pytest.mark.parametrize("Root", (RootV1, RootV2)) -@pytest.mark.parametrize( - "id,name", - [ - ["ae28e872-843d-4e2e-9f0b-b5d3c42d441f", " "], - ], -) -def test__create_product_team_bad_name(Root: type[RootV2], id: str, name: str): - org = Root.create_ods_organisation(ods_code="AB123") - - with pytest.raises(ValidationError): - org.create_product_team(id=id, name=name) diff --git a/src/layers/domain/core/product_team/tests/test_product_team_v3.py b/src/layers/domain/core/product_team/tests/test_product_team_v1.py similarity index 97% rename from src/layers/domain/core/product_team/tests/test_product_team_v3.py rename to src/layers/domain/core/product_team/tests/test_product_team_v1.py index 51629f0d..3fde8338 100644 --- a/src/layers/domain/core/product_team/tests/test_product_team_v3.py +++ b/src/layers/domain/core/product_team/tests/test_product_team_v1.py @@ -1,9 +1,9 @@ import re import pytest -from domain.core.product_team.v3 import ProductTeam, ProductTeamCreatedEvent -from domain.core.root.v3 import Root -from domain.request_models.v1 import CreateProductTeamIncomingParams +from domain.core.product_team import ProductTeam, ProductTeamCreatedEvent +from domain.core.root import Root +from domain.request_models import CreateProductTeamIncomingParams from pydantic import ValidationError diff --git a/src/layers/domain/core/product_team/v1.py b/src/layers/domain/core/product_team/v1.py index 4db7527a..63ec68bb 100644 --- a/src/layers/domain/core/product_team/v1.py +++ b/src/layers/domain/core/product_team/v1.py @@ -1,18 +1,27 @@ -from uuid import UUID +from datetime import datetime from attr import dataclass from domain.core.aggregate_root import AggregateRoot -from domain.core.device import Device, DeviceCreatedEvent, DeviceStatus, DeviceType +from domain.core.cpm_product import CpmProduct, CpmProductCreatedEvent +from domain.core.cpm_system_id import ProductTeamId +from domain.core.enum import Status from domain.core.event import Event +from domain.core.product_team_key import ProductTeamKey +from domain.core.timestamp import now from domain.core.validation import ENTITY_NAME_REGEX -from pydantic import Field +from pydantic import Field, root_validator @dataclass(kw_only=True, slots=True) class ProductTeamCreatedEvent(Event): - id: UUID + id: str name: str ods_code: str + status: Status + created_on: str + updated_on: str = None + deleted_on: str = None + keys: list[ProductTeamKey] = Field(default_factory=list) class ProductTeam(AggregateRoot): @@ -22,25 +31,29 @@ class ProductTeam(AggregateRoot): ProductTeams, meaning that `ods_code` is not unique amongst ProductTeams. """ - id: UUID + id: str = None name: str = Field(regex=ENTITY_NAME_REGEX) ods_code: str + status: Status = Status.ACTIVE + created_on: datetime = Field(default_factory=now, immutable=True) + updated_on: datetime = Field(default=None) + deleted_on: datetime = Field(default=None) + keys: list[ProductTeamKey] = Field(default_factory=list) - def create_device( - self, - name: str, - type: DeviceType, - status: DeviceStatus = DeviceStatus.ACTIVE, - _trust=False, - ) -> Device: - device = Device( - name=name, - type=type, - status=status, - product_team_id=self.id, - ods_code=self.ods_code, + @root_validator(pre=True) + def set_id(cls, values): + ods_code = values.get("ods_code") + if ods_code and not values.get("id"): + product_team = ProductTeamId.create(ods_code=ods_code) + values["id"] = product_team.id + return values + + def create_cpm_product(self, name: str, product_id: str = None) -> CpmProduct: + extra_kwargs = {"id": product_id} if product_id is not None else {} + product = CpmProduct( + product_team_id=self.id, name=name, ods_code=self.ods_code, **extra_kwargs ) - device_created_event = DeviceCreatedEvent(**device.dict(), _trust=_trust) - device.add_event(device_created_event) - self.add_event(device_created_event) - return device + product_created_event = CpmProductCreatedEvent(**product.dict(exclude={"keys"})) + product.add_event(product_created_event) + self.add_event(product_created_event) + return product diff --git a/src/layers/domain/core/product_team/v2.py b/src/layers/domain/core/product_team/v2.py deleted file mode 100644 index a606ce90..00000000 --- a/src/layers/domain/core/product_team/v2.py +++ /dev/null @@ -1,46 +0,0 @@ -from uuid import UUID - -from attr import dataclass -from domain.core.aggregate_root import AggregateRoot -from domain.core.device.v2 import Device, DeviceCreatedEvent, DeviceType -from domain.core.enum import Status -from domain.core.event import Event -from domain.core.validation import ENTITY_NAME_REGEX -from pydantic import Field - - -@dataclass(kw_only=True, slots=True) -class ProductTeamCreatedEvent(Event): - id: UUID - name: str - ods_code: str - - -class ProductTeam(AggregateRoot): - """ - A ProductTeam is the entity that owns Products, and is derived from ODS - Organisations. A single ODS Organisation can be mapped onto multiple - ProductTeams, meaning that `ods_code` is not unique amongst ProductTeams. - """ - - id: UUID - name: str = Field(regex=ENTITY_NAME_REGEX) - ods_code: str - - def create_device( - self, - name: str, - device_type: DeviceType, - status: Status = Status.ACTIVE, - ) -> Device: - device = Device( - name=name, - device_type=device_type, - status=status, - product_team_id=self.id, - ods_code=self.ods_code, - ) - device_created_event = DeviceCreatedEvent(**device.dict()) - device.add_event(device_created_event) - self.add_event(device_created_event) - return device diff --git a/src/layers/domain/core/product_team/v3.py b/src/layers/domain/core/product_team/v3.py deleted file mode 100644 index 209d60ec..00000000 --- a/src/layers/domain/core/product_team/v3.py +++ /dev/null @@ -1,59 +0,0 @@ -from datetime import datetime - -from attr import dataclass -from domain.core.aggregate_root import AggregateRoot -from domain.core.cpm_product import CpmProduct, CpmProductCreatedEvent -from domain.core.cpm_system_id.v1 import ProductTeamId -from domain.core.enum import Status -from domain.core.event import Event -from domain.core.product_team_key import ProductTeamKey -from domain.core.timestamp import now -from domain.core.validation import ENTITY_NAME_REGEX -from pydantic import Field, root_validator - - -@dataclass(kw_only=True, slots=True) -class ProductTeamCreatedEvent(Event): - id: str - name: str - ods_code: str - status: Status - created_on: str - updated_on: str = None - deleted_on: str = None - keys: list[ProductTeamKey] = Field(default_factory=list) - - -class ProductTeam(AggregateRoot): - """ - A ProductTeam is the entity that owns Products, and is derived from ODS - Organisations. A single ODS Organisation can be mapped onto multiple - ProductTeams, meaning that `ods_code` is not unique amongst ProductTeams. - """ - - id: str = None - name: str = Field(regex=ENTITY_NAME_REGEX) - ods_code: str - status: Status = Status.ACTIVE - created_on: datetime = Field(default_factory=now, immutable=True) - updated_on: datetime = Field(default=None) - deleted_on: datetime = Field(default=None) - keys: list[ProductTeamKey] = Field(default_factory=list) - - @root_validator(pre=True) - def set_id(cls, values): - ods_code = values.get("ods_code") - if ods_code and not values.get("id"): - product_team = ProductTeamId.create(ods_code=ods_code) - values["id"] = product_team.id - return values - - def create_cpm_product(self, name: str, product_id: str = None) -> CpmProduct: - extra_kwargs = {"id": product_id} if product_id is not None else {} - product = CpmProduct( - product_team_id=self.id, name=name, ods_code=self.ods_code, **extra_kwargs - ) - product_created_event = CpmProductCreatedEvent(**product.dict(exclude={"keys"})) - product.add_event(product_created_event) - self.add_event(product_created_event) - return product diff --git a/src/layers/domain/core/product_team_key/tests/test_product_team_key_v1.py b/src/layers/domain/core/product_team_key/tests/test_product_team_key_v1.py deleted file mode 100644 index a48bb278..00000000 --- a/src/layers/domain/core/product_team_key/tests/test_product_team_key_v1.py +++ /dev/null @@ -1,22 +0,0 @@ -import pytest -from domain.core.product_team_key.v1 import ProductTeamKeyType, validate_key - -GOOD_ID_EXAMPLES = { - "product_team_id_alias": "FOOBAR", -} - -BAD_ID_EXAMPLES = { - "product_team_id": "FOOBAR", - "foo": "BAR", -} - - -@pytest.mark.parametrize(["type", "key"], GOOD_ID_EXAMPLES.items()) -def test_validate_key_pass(key, type): - assert validate_key(key_value=key, key_type=ProductTeamKeyType(type)) == key - - -@pytest.mark.parametrize(["type", "key"], BAD_ID_EXAMPLES.items()) -def test_validate_type_fail_other(key, type): - with pytest.raises(ValueError): - validate_key(key_value=key, key_type=ProductTeamKeyType(type)) diff --git a/src/layers/domain/core/product_team_key/v1.py b/src/layers/domain/core/product_team_key/v1.py index 5ea27ae7..d79fc023 100644 --- a/src/layers/domain/core/product_team_key/v1.py +++ b/src/layers/domain/core/product_team_key/v1.py @@ -1,10 +1,8 @@ import re from enum import StrEnum, auto -from domain.core.base import BaseModel -from domain.core.error import InvalidProductTeamKeyError +from domain.core.device_key import DeviceKey from domain.core.validation import CpmId -from pydantic import validator class ProductTeamKeyType(StrEnum): @@ -19,32 +17,10 @@ def pattern(self) -> re.Pattern: raise NotImplementedError(f"No ID validation configured for '{self}'") -class ProductTeamKey(BaseModel): +class ProductTeamKey(DeviceKey): """ A ProductTeam Key is a secondary way of indexing / retrieving Product Teams """ key_type: ProductTeamKeyType key_value: str - - @validator("key_value", check_fields=True) - def validate_key(cls, key_value: str, values: dict): - key_type: ProductTeamKeyType = values.get("key_type") - return validate_key(key_value=key_value, key_type=key_type) - - @property - def parts(self): - return (self.key_type, self.key_value) - - def __hash__(self): - return hash(self.parts) - - -def validate_key(key_value: str, key_type: ProductTeamKeyType): - if key_type and key_type.pattern.match(key_value) is None: - raise InvalidProductTeamKeyError( - f"Key '{key_value}' does not match the expected " - f"pattern '{key_type.pattern.pattern}' associated with " - f"key type '{key_type}'" - ) - return key_value diff --git a/src/layers/domain/core/questionnaire/custom_rules.py b/src/layers/domain/core/questionnaire/custom_rules.py deleted file mode 100644 index e26e4300..00000000 --- a/src/layers/domain/core/questionnaire/custom_rules.py +++ /dev/null @@ -1,12 +0,0 @@ -from urllib.parse import urlparse - - -def url(value): - parsed_url = urlparse(value) - if not parsed_url.scheme or not parsed_url.netloc: - raise ValueError("Invalid URL format") - - -def empty_str(value): - if isinstance(value, str) and len(value) != 0: - raise ValueError("Expected empty string") diff --git a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py b/src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py index 764fd5e1..a7366075 100644 --- a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py +++ b/src/layers/domain/core/questionnaire/tests/test_questionnaire_v1.py @@ -1,1102 +1,106 @@ -from datetime import date, datetime, time -from types import FunctionType -from typing import Type +import json import pytest -from domain.core.error import DuplicateError, InvalidResponseError from domain.core.questionnaire import ( - InvalidChoiceType, - Question, Questionnaire, - QuestionnaireResponse, - TooManyAnswerTypes, - custom_rules, - validate_mandatory_questions_answered, - validate_response_against_question, + QuestionnaireResponseMissingValue, + QuestionnaireResponseValidationError, ) +from domain.core.timestamp import now from pydantic import ValidationError -QUESTIONNAIRE_NAME = "sample_questionnaire" -VERSION_1 = "1" +VALID_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "size": { + "type": "number", + "minimum": 1, + "maximum": 14, + }, + "colour": { + "type": "string", + "enum": ["black", "white"], + }, + "brand": {"type": "string"}, # not required + }, + "required": ["size", "colour"], + "additionalProperties": False, +} - -@pytest.mark.parametrize( - ["name", "version"], - [[QUESTIONNAIRE_NAME, VERSION_1]], -) -def test_questionnaire_constructor(name: str, version: str): - questionnaire = Questionnaire(name=name, version=version) - - assert questionnaire.name == name - assert questionnaire.version == version - assert questionnaire.questions is not None - - -@pytest.mark.parametrize( - [ - "name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - ], - [ - [ - "Q1", - "question 1", - {str}, - False, - False, - set(), - {"choice1", "choice2", "choice3"}, - ], - ["Q2", "question 2", {int}, False, True, set(), {1, 2, 3}], - ["Q3", "question 3", {bool}, False, False, set(), set()], - ["Q4", "question 4", {datetime}, False, True, set(), set()], - ], -) -def test_question_constructor( - name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set[str], -): - question = Question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - assert question.name == name - assert question.human_readable_name == human_readable_name - assert question.answer_types == answer_types - assert question.mandatory == mandatory - assert question.multiple == multiple - assert question.validation_rules == validation_rules - assert question.choices == choices - - -@pytest.mark.parametrize( - [ - "name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - ], - [ - ["question1", "", {str}, True, False, None, {"choice1", "choice2", "choice3"}], - ["question2", "", {int}, False, True, None, None], - ], -) -def test_add_question( - name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set[str], -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - - result = questionnaire.add_question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - assert result is not None - assert name in questionnaire.questions - - -@pytest.mark.parametrize("question_name", ["question1", "question2", "question3"]) -def test_cannot_add_duplicate_question(question_name: str): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name=question_name) - - with pytest.raises(DuplicateError) as error: - questionnaire.add_question(name=question_name) - - assert str(error.value) == f"Question '{question_name}' already exists." - - -@pytest.mark.parametrize( - [ - "name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - ], - [ - ["question1", "", {list}, False, False, None, None], - ], -) -def test_cannot_add_question_of_wrong_type( - name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set[str], -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - invalid_answer_types = {list} - - with pytest.raises(ValueError) as error: - questionnaire.add_question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - assert ( - error.value.errors()[0]["msg"] - == f"Answer types {invalid_answer_types} are not allowed." - ) - - -@pytest.mark.parametrize( - [ - "name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - ], - [ - ["question1", "", {dict, list}, False, False, None, None], - ], -) -def test_cannot_add_question_of_wrong_type_2( - name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set[str], -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - invalid_answer_types = {dict, list} - - with pytest.raises(ValueError) as error: - questionnaire.add_question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - assert ( - error.value.errors()[0]["msg"] - == f"Answer types {invalid_answer_types} are not allowed." - ) - - -@pytest.mark.parametrize("name", ["question1", "question2", "question3"]) -def test_has_question(name: str): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name=name) - - result = questionnaire.__contains__(name) - - assert result == True - - -@pytest.mark.parametrize( - [ - "question_name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - ], - [ - ["question", "", {str}, False, False, {"not_custom_rule_function"}, None], - ], -) -def test_invalid_question_validation_rules_type( - question_name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set, -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - - with pytest.raises(ValidationError) as error: - questionnaire.add_question( - name=question_name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - assert error.value.errors()[0]["msg"] == "instance of function expected" - - -@pytest.mark.parametrize( - [ - "name", - "human_readable_name", - "answer_types", - "mandatory", - "multiple", - "validation_rules", - "choices", - "expected_error", - ], - [ - [ - "question1", - "", - {str, bool}, - False, - False, - None, - {1, 2, 3}, - InvalidChoiceType, - ], - [ - "question2", - "", - {int}, - False, - True, - None, - {"not_int", "not_int2"}, - InvalidChoiceType, - ], - [ - "question3", - "", - {str, bool}, - False, - False, - None, - {"foo", "bar", True}, - TooManyAnswerTypes, - ], - ], -) -def test_invalid_question_choices_type( - name: str, - human_readable_name: str, - answer_types: Type, - mandatory: bool, - multiple: bool, - validation_rules: set[FunctionType], - choices: set, - expected_error: Exception, -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - - # can't add question with set of choices that don't match question type - with pytest.raises(expected_error): - questionnaire.add_question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, - ) - - -@pytest.mark.parametrize( - "response", - [ - [ - {"question1": ["answer_a"]}, - {"not_a_question": [1]}, - {"question3": [True]}, - ], - [ - {"not_a_question": ["answer_c"]}, - {"question2": [1]}, - {"question3": [False]}, - ], - ], -) -def test_incorrect_questionnaire_answered_raises_error(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="question1") - questionnaire.add_question(name="question2", answer_types={int}) - questionnaire.add_question(name="question3") - - with pytest.raises(ValidationError) as error: - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - - error_message = str(error.value) - question_name = error_message.split("'")[1] - assert ( - error.value.errors()[0]["msg"] - == f"Unexpected answer for the question '{question_name}'. The questionnaire 'sample_questionnaire' does not contain this question." - ) - - -@pytest.mark.parametrize( - "response", - [ - [ - {"mandatory_question": ["answer"]}, - {"not_mandatory_question": [1]}, - ], - ], -) -def test_mandatory_questions_answered(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="mandatory_question", mandatory=True) - questionnaire.add_question(name="not_mandatory_question", answer_types={int}) - mandatory_questions = questionnaire.mandatory_questions - answered_question_names = [ - question_name for (question_name, _), in map(dict.items, response) - ] - - validate_mandatory_questions_answered( - questionnaire_name=questionnaire.name, - mandatory_questions=mandatory_questions, - answered_question_names=answered_question_names, - ) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {"mandatory_question": ["answer"]}, - {"not_mandatory_question": [1]}, - ], - ], -) -def test_mandatory_questions_answered_successfully(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="mandatory_question", mandatory=True) - questionnaire.add_question(name="not_mandatory_question", answer_types={int}) - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {"not_mandatory_question": [1]}, - ], - ], -) -def test_mandatory_questions_not_answered(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="mandatory_question", mandatory=True) - questionnaire.add_question(name="not_mandatory_question", answer_types={int}) - mandatory_questions = questionnaire.mandatory_questions - answered_question_names = [ - question_name for (question_name, _), in map(dict.items, response) - ] - with pytest.raises(InvalidResponseError) as error: - validate_mandatory_questions_answered( - questionnaire_name=questionnaire.name, - mandatory_questions=mandatory_questions, - answered_question_names=answered_question_names, - ) - assert ( - str(error.value) - == f"Mandatory question 'mandatory_question' in questionnaire '{questionnaire.name}' has not been answered." - ) - - -@pytest.mark.parametrize( - "response", - [ - [ - {"not_mandatory_question": [1]}, - ], - ], -) -def test_mandatory_questions_not_answered_raises_error(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="mandatory_question", mandatory=True) - questionnaire.add_question(name="not_mandatory_question", answer_types={int}) - with pytest.raises(ValidationError) as error: - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - - assert ( - error.value.errors()[0]["msg"] - == f"Mandatory question 'mandatory_question' in questionnaire '{questionnaire.name}' has not been answered." - ) - - -@pytest.mark.parametrize( - "response", - [ - [ - {"question1": ["answer_a", "answer_b"]}, - {"question2": [1, 2, 3]}, - {"question3": [True, False]}, - ], - [ - {"question1": ["answer_c"]}, - {"question2": [4, 5]}, - {"question3": [False]}, - ], - ], -) -def test_multiple_questions_responses_allowed(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="question1", multiple=True) - questionnaire.add_question(name="question2", answer_types={int}, multiple=True) - questionnaire.add_question(name="question3", answer_types={bool}, multiple=True) - - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - ["answer_a", "answer_b"], - ["answer_c", "answer_d"], - ], -) -def test_multiple_question_responses_allowed(response: list): - question = Question( - name="Question", - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {"question1": ["answer_a", "answer_b"]}, - {"question2": [1, 2, 3]}, - {"question3": [True, False]}, - ], - [ - {"question1": ["answer_c", "answer_d"]}, - {"question2": [4, 5]}, - {"question3": [False]}, - ], - ], -) -def test_multiple_questions_responses_not_allowed(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question(name="question1") - questionnaire.add_question(name="question2", answer_types={int}) - questionnaire.add_question(name="question3", answer_types={bool}) - - with pytest.raises(ValidationError) as error: - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - - error_message = str(error.value) - question_name = error_message.split("'")[1] - response_given = error_message.split("Response given: ")[1].split(".")[0].strip() - assert ( - error.value.errors()[0]["msg"] - == f"Question '{question_name}' does not allow multiple responses. Response given: {response_given}." - ) - - -@pytest.mark.parametrize( - "response", - [ - ["answer_a", "answer_b"], - ["answer_c", "answer_d"], - ], -) -def test_multiple_question_responses_not_allowed(response: list): - question = Question( - name="Question", - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=False, - validation_rules=set(), - choices=set(), - ) - - with pytest.raises(InvalidResponseError) as error: - validate_response_against_question(answers=response, question=question) - - assert ( - str(error.value) - == f"Question 'Question' does not allow multiple responses. Response given: {response}." - ) - - -@pytest.mark.parametrize( - "response", - [ - [ - {"String or integer response": ["answer_a", 1]}, - {"Integer or boolean": [1]}, - {"Integer or boolean or string multiple": [True, 1, "string"]}, - ], - ], -) -def test_multiple_answer_types_allowed(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name="String or integer response", answer_types={str, int}, multiple=True - ) - questionnaire.add_question(name="Integer or boolean", answer_types={int, bool}) - questionnaire.add_question( - name="Integer or boolean or string multiple", - answer_types={str, int, bool}, - multiple=True, - ) - - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.fixture -def standard_questionnaire() -> Questionnaire: - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name="String response", answer_types={str}, multiple=True - ) - questionnaire.add_question(name="Integer response", answer_types={int}) - questionnaire.add_question(name="Boolean response", answer_types={bool}) - questionnaire.add_question(name="Date-Time response", answer_types={datetime}) - questionnaire.add_question(name="Decimal response", answer_types={float}) - questionnaire.add_question(name="Date response", answer_types={date}) - questionnaire.add_question(name="Time response", answer_types={time}) - return questionnaire - - -@pytest.mark.parametrize( - "response", - [ - [ - {"String response": ["answer_a", "answer_b"]}, - {"Integer response": [1]}, - {"Boolean response": [True]}, - {"Date-Time response": [datetime(2024, 1, 24, 14, 21, 7, 484991)]}, - {"Decimal response": [1.1]}, - {"Date response": [datetime(2024, 1, 24)]}, - {"Time response": [datetime.strptime("14:21:07", "%H:%M:%S").time()]}, - ], - ], -) -def test_valid_questionnaire_responses_types( - response: list[dict[str, list]], standard_questionnaire: Questionnaire -): - QuestionnaireResponse(questionnaire=standard_questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {"String response": [1]}, - ], - [ - {"Integer response": ["answer"]}, - ], - [ - {"Boolean response": [1.1]}, - ], - [ - {"Date-Time response": [1]}, - ], - [ - {"Decimal response": [False]}, - ], - [ - {"Date response": ["answer"]}, - ], - [ - {"Time response": [True]}, - ], - ], -) -def test_invalid_questionnaire_responses_types_raises_error( - response: list[dict[str, list]], standard_questionnaire: Questionnaire -): - with pytest.raises(ValidationError) as error: - QuestionnaireResponse(questionnaire=standard_questionnaire, responses=response) - - error_message = str(error.value) - question_name = error_message.split("'")[1] - response_given_str = error_message.split("Response '")[1].split("'")[0] - try: - response_given = eval(response_given_str) # Can't evaluate strings or datetime - except NameError: - response_given = response_given_str - response_type = repr(type(response_given)) - expected_type = standard_questionnaire.questions[question_name].answer_types - - assert ( - error.value.errors()[0]["msg"] - == f"Question '{question_name}' rule 'validate_answer_types' failed validation for response '{response_given}' with error: Question '{question_name}' expects type {expected_type}. Response '{response_given}' is of type '{response_type}'." - ) - - -@pytest.mark.parametrize( - "response", - [ - ["answer"], - ], -) -def test_valid_question_response_type_string(response: list): - question = Question( - name="String response", - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - ["answer_type", "response"], - [ - [str, 1], - [int, "a"], - [bool, 1], - [datetime, "a"], - [float, "1.2"], - [date, "a"], - ], -) -def test_invalid_question_response(answer_type: type, response: any): - question = Question( - name="question", - human_readable_name="", - answer_types={answer_type}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - with pytest.raises(InvalidResponseError) as error: - validate_response_against_question(answers=[response], question=question) - - assert ( - str(error.value) - == f"Question '{question.name}' rule 'validate_answer_types' failed validation for response '{response}' with error: Question '{question.name}' expects type {question.answer_types}. Response '{response}' is of type '{type(response)}'." - ) +INVALID_SCHEMA = { + "$schema": "http://json-schema.org/draft-07/schema#", + "type": "object", + "properties": { + "a-field": { + "type": "not-a-type", + } + }, + "required": ["a-field"], +} @pytest.mark.parametrize( - "response", + "data", [ - [1], + {"size": 1, "colour": "black"}, + {"size": 14, "colour": "white"}, + {"size": 7, "colour": "white", "brand": "something"}, ], ) -def test_valid_question_response_type_integer(response: list): - question = Question( - name="Integer response", - human_readable_name="", - answer_types={int}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), +def test_schema_validation_pass(data): + questionnaire = Questionnaire( + name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed + response = questionnaire.validate(data=data) + assert response.questionnaire_name == "foo" + assert response.questionnaire_version == "1" + assert response.data == data + assert response.created_on.date() == now().date() @pytest.mark.parametrize( - "response", + "data", [ - [True], + {"size": 1, "colour": "red"}, + {"size": "not a number", "colour": "white"}, + { + "size": 7, + "colour": "white", + "brand": "something", + "unknown_field": "foo", + }, ], ) -def test_valid_question_response_type_bool(response: list): - question = Question( - name="Boolean response", - human_readable_name="", - answer_types={bool}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), +def test_schema_validation_fail(data): + questionnaire = Questionnaire( + name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed + with pytest.raises(QuestionnaireResponseValidationError): + questionnaire.validate(data=data) @pytest.mark.parametrize( - "response", + "data", [ - [datetime(2024, 1, 24, 14, 21, 7, 484991)], + {"size": 1}, + {"colour": "white"}, ], ) -def test_valid_question_response_type_datetime(response: list): - question = Question( - name="Date-Time response", - human_readable_name="", - answer_types={datetime}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), +def test_schema_validation_missing_fail(data): + questionnaire = Questionnaire( + name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed + with pytest.raises(QuestionnaireResponseMissingValue): + questionnaire.validate(data=data) @pytest.mark.parametrize( - "response", + "schema", [ - [1.27], + INVALID_SCHEMA, ], ) -def test_valid_question_response_type_float(response: list): - question = Question( - name="Decimal response", - human_readable_name="", - answer_types={float}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [datetime(2024, 1, 24)], - ], -) -def test_valid_question_response_type_date(response: list): - question = Question( - name="Date response", - human_readable_name="", - answer_types={date}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [datetime.strptime("14:21:07", "%H:%M:%S").time()], - ], -) -def test_valid_question_response_type_time(response: list): - question = Question( - name="Time response", - human_readable_name="", - answer_types={time}, - mandatory=False, - multiple=True, - validation_rules=set(), - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {"question1": ["answer_a", "answer_b"]}, - {"question2": [1, 2, 3]}, - ], - [ - {"question1": ["answer_c"]}, - {"question2": [4, 5]}, - ], - ], -) -def test_valid_questionnaire_responses_choices(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name="question1", multiple=True, choices={"answer_a", "answer_b", "answer_c"} - ) - questionnaire.add_question( - name="question2", - answer_types={int}, - multiple=True, - choices={1, 2, 3, 4, 5, 6, 7}, - ) - - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - ["answer_a"], - ["answer_b"], - ["answer_c"], - ], -) -def test_valid_question_response_choice(response: list): - question = Question( - name="Question", - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=False, - validation_rules=set(), - choices={"answer_a", "answer_b", "answer_c"}, - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -# Validation rule questions: -EMPTY_STR_QUESTION_NAME = "empty string" -EMPTY_STR_OR_INT_QUESTION_NAME = "empty string or integer" -URL_QUESTION_NAME = "url" - - -@pytest.mark.parametrize( - "response", - [ - [ - {URL_QUESTION_NAME: ["https://www.example.com"]}, - ], - ], -) -def test_valid_questionnaire_responses_rule_url(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name=URL_QUESTION_NAME, - validation_rules={custom_rules.url}, - ) - - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [["https://www.example.com"]], -) -def test_valid_question_response_rule_url(response: list): - question = Question( - name=URL_QUESTION_NAME, - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=False, - validation_rules={custom_rules.url}, - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [ - [ - {EMPTY_STR_QUESTION_NAME: [""]}, - {EMPTY_STR_OR_INT_QUESTION_NAME: ["", 1]}, - ], - ], -) -def test_valid_questionnaire_responses_rule_empty_str(response: list[dict[str, list]]): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name=EMPTY_STR_QUESTION_NAME, - validation_rules={custom_rules.empty_str}, - ) - questionnaire.add_question( - name=EMPTY_STR_OR_INT_QUESTION_NAME, - answer_types={str, int}, - multiple=True, - validation_rules={custom_rules.empty_str}, - ) - - QuestionnaireResponse(questionnaire=questionnaire, responses=response) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [[""]], -) -def test_valid_question_response_rule_empty_str(response: list[dict[str, list]]): - question = Question( - name=EMPTY_STR_QUESTION_NAME, - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=False, - validation_rules={custom_rules.empty_str}, - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "response", - [["", 1]], -) -def test_valid_question_response_rule_empty_str_2(response: list[dict[str, list]]): - question = Question( - name=EMPTY_STR_QUESTION_NAME, - human_readable_name="", - answer_types={str, int}, - mandatory=False, - multiple=True, - validation_rules={custom_rules.empty_str}, - choices=set(), - ) - - validate_response_against_question(answers=response, question=question) - # if no error raised, the test has implicitly passed - - -@pytest.mark.parametrize( - "responses", - [ - [ - {URL_QUESTION_NAME: ["not_a_url"]}, - {EMPTY_STR_QUESTION_NAME: ["not an empty string"]}, - ], - ], -) -def test_invalid_questionnaire_response_rules_raises_error( - responses: list[dict[str, list]] -): - questionnaire = Questionnaire(name=QUESTIONNAIRE_NAME, version=VERSION_1) - questionnaire.add_question( - name=URL_QUESTION_NAME, - validation_rules={custom_rules.url}, - ) - questionnaire.add_question( - name=EMPTY_STR_QUESTION_NAME, - validation_rules={custom_rules.empty_str}, - ) - - with pytest.raises(ValidationError) as error: - QuestionnaireResponse(questionnaire=questionnaire, responses=responses) - - assert ( - error.value.errors()[0]["msg"] - == f"Question '{URL_QUESTION_NAME}' rule 'url' failed validation for response 'not_a_url' with error: Invalid URL format." - ) - assert ( - error.value.errors()[1]["msg"] - == f"Question '{EMPTY_STR_QUESTION_NAME}' rule 'empty_str' failed validation for response 'not an empty string' with error: Expected empty string." - ) - - -@pytest.mark.parametrize( - "response", - [["not_a_url"]], -) -def test_invalid_question_response_rule_url_raises_error(response: list): - question = Question( - name=URL_QUESTION_NAME, - human_readable_name="", - answer_types={str}, - mandatory=False, - multiple=False, - validation_rules={custom_rules.url}, - choices=set(), - ) - - with pytest.raises(InvalidResponseError) as error: - validate_response_against_question(answers=response, question=question) - - assert ( - str(error.value) - == f"Question '{question.name}' rule 'url' failed validation for response '{response[0]}' with error: Invalid URL format." - ) - - -@pytest.mark.parametrize( - "response", - [[1, "not_a_empty_string"]], -) -def test_invalid_question_response_rule_empty_str_raises_error(response: list): - question = Question( - name=EMPTY_STR_QUESTION_NAME, - human_readable_name="", - answer_types={str, int}, - mandatory=False, - multiple=True, - validation_rules={custom_rules.empty_str}, - choices=set(), - ) - - with pytest.raises(InvalidResponseError) as error: - validate_response_against_question(answers=response, question=question) - - assert ( - str(error.value) - == f"Question '{question.name}' rule 'empty_str' failed validation for response '{response[1]}' with error: Expected empty string." - ) +def test_invalid_schema(schema): + with pytest.raises(ValidationError): + Questionnaire(name="name", version="123", json_schema=json.dumps(schema)) diff --git a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v2.py b/src/layers/domain/core/questionnaire/tests/test_questionnaire_v2.py deleted file mode 100644 index b993b9dc..00000000 --- a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v2.py +++ /dev/null @@ -1,10 +0,0 @@ -from datetime import datetime, timezone - -from domain.core.questionnaire.v2 import Questionnaire - - -def test_questionnaire_response_created_on(): - questionnaire = Questionnaire(name="foo", version=1) - questionnaire.add_question(name="bar", answer_types={str}) - response = questionnaire.respond(responses=[{"bar": ["BAR"]}]) - assert response.created_on.date() == datetime.now(timezone.utc).date() diff --git a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v3.py b/src/layers/domain/core/questionnaire/tests/test_questionnaire_v3.py deleted file mode 100644 index 20b24eb5..00000000 --- a/src/layers/domain/core/questionnaire/tests/test_questionnaire_v3.py +++ /dev/null @@ -1,106 +0,0 @@ -import json - -import pytest -from domain.core.questionnaire.v3 import ( - Questionnaire, - QuestionnaireResponseMissingValue, - QuestionnaireResponseValidationError, -) -from domain.core.timestamp import now -from pydantic import ValidationError - -VALID_SCHEMA = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "size": { - "type": "number", - "minimum": 1, - "maximum": 14, - }, - "colour": { - "type": "string", - "enum": ["black", "white"], - }, - "brand": {"type": "string"}, # not required - }, - "required": ["size", "colour"], - "additionalProperties": False, -} - -INVALID_SCHEMA = { - "$schema": "http://json-schema.org/draft-07/schema#", - "type": "object", - "properties": { - "a-field": { - "type": "not-a-type", - } - }, - "required": ["a-field"], -} - - -@pytest.mark.parametrize( - "data", - [ - {"size": 1, "colour": "black"}, - {"size": 14, "colour": "white"}, - {"size": 7, "colour": "white", "brand": "something"}, - ], -) -def test_schema_validation_pass(data): - questionnaire = Questionnaire( - name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) - ) - response = questionnaire.validate(data=data) - assert response.questionnaire_name == "foo" - assert response.questionnaire_version == "1" - assert response.data == data - assert response.created_on.date() == now().date() - - -@pytest.mark.parametrize( - "data", - [ - {"size": 1, "colour": "red"}, - {"size": "not a number", "colour": "white"}, - { - "size": 7, - "colour": "white", - "brand": "something", - "unknown_field": "foo", - }, - ], -) -def test_schema_validation_fail(data): - questionnaire = Questionnaire( - name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) - ) - with pytest.raises(QuestionnaireResponseValidationError): - questionnaire.validate(data=data) - - -@pytest.mark.parametrize( - "data", - [ - {"size": 1}, - {"colour": "white"}, - ], -) -def test_schema_validation_missing_fail(data): - questionnaire = Questionnaire( - name="foo", version="1", json_schema=json.dumps(VALID_SCHEMA) - ) - with pytest.raises(QuestionnaireResponseMissingValue): - questionnaire.validate(data=data) - - -@pytest.mark.parametrize( - "schema", - [ - INVALID_SCHEMA, - ], -) -def test_invalid_schema(schema): - with pytest.raises(ValidationError): - Questionnaire(name="name", version="123", json_schema=json.dumps(schema)) diff --git a/src/layers/domain/core/questionnaire/v1.py b/src/layers/domain/core/questionnaire/v1.py index f996c4d1..080a2ca0 100644 --- a/src/layers/domain/core/questionnaire/v1.py +++ b/src/layers/domain/core/questionnaire/v1.py @@ -1,334 +1,64 @@ -from datetime import date, datetime, time -from functools import partial -from types import FunctionType -from typing import Self +from datetime import datetime +from uuid import UUID, uuid4 -import orjson -from attr import dataclass, field +import jsonschema from domain.core.aggregate_root import AggregateRoot from domain.core.base import BaseModel -from domain.core.error import DuplicateError, InvalidResponseError -from domain.core.event import Event -from domain.core.validation import ENTITY_NAME_REGEX -from pydantic import Field, validator +from domain.core.timestamp import now +from pydantic import Field, Json, validator +REQUIRED = "required" -class CaseInsensitiveString(str): - pass +class QuestionnaireResponseValidationError(Exception): ... -class InvalidChoiceType(ValueError): - pass - -class TooManyAnswerTypes(ValueError): - pass - - -class NoSuchQuestionType(Exception): - pass - - -ALLOWED_ANSWER_TYPES = { - str, - int, - bool, - datetime, - float, - date, - time, - CaseInsensitiveString, -} - -ALLOWED_ANSWER_TYPES_LOOKUP = {_type.__name__: _type for _type in ALLOWED_ANSWER_TYPES} - - -@dataclass(kw_only=True, slots=True) -class _Question: - name: str - human_readable_name: str - answer_types: list[str] - mandatory: bool - multiple: bool - validation_rules: list[str] - choices: list - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireInstanceEvent(Event): - """Event for when a Questionnaire has been responded to by an Entity""" - - entity_id: str - questionnaire_id: str - name: str - version: int - questions: dict[str, _Question] - _trust: bool = field(alias="_trust", default=False) - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireResponseAddedEvent(Event): - entity_id: str - questionnaire_id: str - questionnaire_response_index: int - responses: list[dict[str, list]] - _trust: bool = field(alias="_trust", default=False) - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireResponseUpdatedEvent(Event): - entity_id: str - questionnaire_id: str - questionnaire_response_index: int - responses: list[dict[str, list]] - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireResponseDeletedEvent(Event): - entity_id: str - questionnaire_id: str - questionnaire_response_index: int - - -class Question(BaseModel): - """ - A single Questionnaire Question - """ - - name: str = Field(regex=ENTITY_NAME_REGEX) - human_readable_name: str - answer_types: set[type] - mandatory: bool - multiple: bool - validation_rules: set[FunctionType] - choices: set - - @validator("answer_types") - def validate_question_type(cls, answer_types): - invalid_types = { - item for item in answer_types if item not in ALLOWED_ANSWER_TYPES - } - if invalid_types: - raise ValueError(f"Answer types {invalid_types} are not allowed.") - return answer_types - - def dict(self, **kwargs): - _data = self.json(**kwargs) - return orjson.loads(_data) - - -def choice_type_matches_answer_types(choice, answer_types: set): - """ - Choice either exactly matches the answer type, or the - answer type is a subclass of the choice type - """ - return any( - isinstance(choice, answer_type) - or (isinstance(choice, str) and answer_type is CaseInsensitiveString) - for answer_type in answer_types - ) +class QuestionnaireResponseMissingValue(Exception): ... class Questionnaire(AggregateRoot): - """ - A Questionnaire represents a collection of Questions, in a specific order. - """ - - name: str = Field(regex=ENTITY_NAME_REGEX) + name: str version: str - questions: dict[str, Question] = Field(default_factory=dict) - - @property - def id(self): - return f"{self.name}/{self.version}" - - def __contains__(self, question_name: str) -> bool: - """ - Returns true if the question specified exists within the questionnaire - """ - return question_name in self.questions - - def __hash__(self): - question_names = ".".join(self.questions) - return hash(f"{self.id}.{question_names}") - - def __eq__(self, other: Self): - return self.id == other.id - - def add_question( - self, - name: str, - human_readable_name: str = "", - answer_types: set = None, - mandatory: bool = False, - multiple: bool = False, - validation_rules: set[FunctionType] = None, - choices: set = None, - ): - """ - Adds a new question to the questionnaire. - """ - validation_rules = validation_rules or set() - choices = choices or set() - answer_types = answer_types or {str} - - if name in self.questions: - raise DuplicateError(f"Question '{name}' already exists.") - # Validate each choice is one of the allowed answer types - if choices and not all( - choice_type_matches_answer_types(choice=choice, answer_types=answer_types) - for choice in choices - ): - raise InvalidChoiceType( - f"Question '{name}': Choices ({choices}) must be of the same type as the answer types: {answer_types}." + json_schema: Json + + @validator("json_schema") + def validate_json_schema(cls, json_schema): + try: + jsonschema.Draft7Validator.check_schema(json_schema) + except jsonschema.SchemaError as err: + raise ValueError(err.message) + return json_schema + + def validate(self, data) -> "QuestionnaireResponse": + try: + jsonschema.validate(instance=data, schema=self.json_schema) + except jsonschema.ValidationError as error: + *_, variable_name = error.schema_path + exception_type = ( + QuestionnaireResponseMissingValue + if variable_name == REQUIRED + else QuestionnaireResponseValidationError ) - - if choices and len(answer_types) > 1: - raise TooManyAnswerTypes( - f"Question '{name}': There must only be one answer type (provided answer types: '{answer_types}') " - f"if choices are specified (provided choices: '{choices}')" + raise exception_type( + f"Failed to validate data against '{self.id}': {error.message}" ) - if answer_types == {CaseInsensitiveString}: - choices = set(map(str.lower, choices)) - - question = Question( - name=name, - human_readable_name=human_readable_name, - answer_types=answer_types, - mandatory=mandatory, - multiple=multiple, - validation_rules=validation_rules, - choices=choices, + return QuestionnaireResponse( + questionnaire_name=self.name, questionnaire_version=self.version, data=data ) - self.questions[name] = question - return question @property - def mandatory_questions(self) -> list[Question]: - return [q for q in self.questions.values() if q.mandatory] - - def respond(self, responses: list[dict[str, list]]): - return QuestionnaireResponse(questionnaire=self, responses=responses) + def id(self) -> str: + return f"{self.name}/{self.version}" class QuestionnaireResponse(BaseModel): - """ - Validates questionnaire responses against questionnaire questions - Responses is of the form: - [{"question_name": ["answer_1", ..., "answer_n"]}] - - where n > 1 if Question.multiple is true for the Question in Questionnaire - with the matching Question.name - """ - - questionnaire: Questionnaire - responses: list[dict[str, list]] - - @validator("responses") - def validate_mandatory_questions_are_answered( - cls, responses: list[dict[str, list]], values: dict[str, Questionnaire] - ): - questionnaire = values.get("questionnaire") - validate_mandatory_questions_answered( - questionnaire_name=questionnaire.name, - mandatory_questions=( - [] if questionnaire is None else questionnaire.mandatory_questions - ), - answered_question_names=[ - question_name for (question_name, _), in map(dict.items, responses) - ], - ) - return responses - - @validator("responses", each_item=True) - def validate_responses( - cls, response: dict[str, list], values: dict[str, Questionnaire] - ): - questionnaire = values.get("questionnaire") - ((question_name, answers),) = response.items() - if questionnaire is not None: - questionnaire_name = questionnaire.name - question = questionnaire.questions.get(question_name) - if question is None: - raise InvalidResponseError( - f"Unexpected answer for the question '{question_name}'. The questionnaire '{questionnaire_name}' does not contain this question." - ) - validate_response_against_question(question=question, answers=answers) - return response - - def get_response(self, question_name) -> list: - for response in self.responses: - value = response.get(question_name) - if value is not None: - return value - return [] - + id: UUID = Field(default_factory=uuid4) + questionnaire_name: str + questionnaire_version: str + data: dict + created_on: datetime = Field(default_factory=now) -def validate_mandatory_questions_answered( - questionnaire_name: str, - mandatory_questions: list[Question], - answered_question_names: list[str], -): - for question in mandatory_questions: - if question.name not in answered_question_names: - raise InvalidResponseError( - f"Mandatory question '{question.name}' in questionnaire '{questionnaire_name}' has not been answered." - ) - return mandatory_questions - - -def validate_answer_types(answer, answer_types, question_name): - if not choice_type_matches_answer_types(choice=answer, answer_types=answer_types): - raise ValueError( - f"Question '{question_name}' expects type {answer_types}. Response '{answer}' is of type '{type(answer)}'" - ) - - -def validate_choices(answer, choices, question_name, answer_types): - if answer_types == {CaseInsensitiveString}: - answer = answer.lower() - - if choices and answer not in choices: - raise ValueError( - f"Question '{question_name}' expects choices {choices}. Response given: {answer}" - ) - - -def named_partial(fn, *args, **kwargs): - _fn = partial(fn, *args, **kwargs) - _fn.__name__ = fn.__name__ - return _fn - - -def validate_response_against_question(answers: list, question: Question): - if not question.multiple and len(answers) > 1: - raise InvalidResponseError( - f"Question '{question.name}' does not allow multiple responses. Response given: {answers}." - ) - errors = [] - answer_types_rule = named_partial( - validate_answer_types, - answer_types=question.answer_types, - question_name=question.name, - ) - choices_rule = named_partial( - validate_choices, - choices=question.choices, - question_name=question.name, - answer_types=question.answer_types, - ) - - for answer in answers: - for validation_rule in question.validation_rules.union( - [answer_types_rule, choices_rule] - ): - try: - validation_rule(answer) - except ValueError as e: - errors.append( - f"Question '{question.name}' rule '{validation_rule.__name__}' failed validation for response '{answer}' with error: {e}." - ) - if errors: - raise InvalidResponseError("\n".join(errors)) - return answers + @property + def questionnaire_id(self) -> str: + return f"{self.questionnaire_name}/{self.questionnaire_version}" diff --git a/src/layers/domain/core/questionnaire/v2.py b/src/layers/domain/core/questionnaire/v2.py deleted file mode 100644 index abcec73d..00000000 --- a/src/layers/domain/core/questionnaire/v2.py +++ /dev/null @@ -1,99 +0,0 @@ -from datetime import datetime -from typing import Optional - -from attr import dataclass -from domain.core.enum import Status -from domain.core.error import InvalidResponseError -from domain.core.event import Event -from domain.core.timestamp import now -from pydantic import BaseModel, Field, validator - -from .v1 import Questionnaire as QuestionnaireV1 -from .v1 import ( - validate_mandatory_questions_answered, - validate_response_against_question, -) - - -@dataclass(kw_only=True, slots=True) -class QuestionnaireResponseUpdatedEvent(Event): - entity_id: str - entity_keys: list - entity_tags: list - questionnaire_responses: list["QuestionnaireResponse"] - updated_on: Optional[str] = None - - -class Questionnaire(QuestionnaireV1): - def respond(self, responses: list[dict[str, list]]): - return QuestionnaireResponse( - questionnaire=self, questionnaire_id=self.id, answers=responses - ) - - -class QuestionnaireResponse(BaseModel): - """ - Validates questionnaire responses against questionnaire questions - Answers are of the form: - [{"question_name": ["answer_1", ..., "answer_n"]}] - - where n > 1 if Question.multiple is true for the Question in Questionnaire - with the matching Question.name - """ - - questionnaire: Optional[Questionnaire] = Field(exclude=True, default=None) - questionnaire_id: str - answers: list[dict[str, list]] - created_on: datetime = Field(default_factory=now) - status: Status = Field(default=Status.ACTIVE) - - @validator("answers") - def validate_mandatory_questions_are_answered( - cls, answers: list[dict[str, list]], values: dict[str, Questionnaire] - ): - questionnaire = values.get("questionnaire") - if questionnaire is None: - return answers - validate_mandatory_questions_answered( - questionnaire_name=questionnaire.name, - mandatory_questions=( - [] if questionnaire is None else questionnaire.mandatory_questions - ), - answered_question_names=[ - question_name for (question_name, _), in map(dict.items, answers) - ], - ) - return answers - - @validator("answers", each_item=True) - def validate_responses( - cls, answer: dict[str, list], values: dict[str, Questionnaire] - ): - questionnaire = values.get("questionnaire") - if questionnaire is None: - return answer - ((question_name, answers),) = answer.items() - if questionnaire is not None: - questionnaire_name = questionnaire.name - question = questionnaire.questions.get(question_name) - if question is None: - raise InvalidResponseError( - f"Unexpected answer for the question '{question_name}'. The questionnaire '{questionnaire_name}' does not contain this question." - ) - validate_response_against_question(question=question, answers=answers) - return answer - - def get_response(self, question_name) -> list: - for response in self.answers: - value = response.get(question_name) - if value is not None: - return value - return [] - - @property - def flat_answers(self) -> dict[str, any]: - return { - question: (answer[0] if len(answer) == 1 else answer) - for question_answer in self.answers - for question, answer in question_answer.items() - } diff --git a/src/layers/domain/core/questionnaire/v3.py b/src/layers/domain/core/questionnaire/v3.py deleted file mode 100644 index 080a2ca0..00000000 --- a/src/layers/domain/core/questionnaire/v3.py +++ /dev/null @@ -1,64 +0,0 @@ -from datetime import datetime -from uuid import UUID, uuid4 - -import jsonschema -from domain.core.aggregate_root import AggregateRoot -from domain.core.base import BaseModel -from domain.core.timestamp import now -from pydantic import Field, Json, validator - -REQUIRED = "required" - - -class QuestionnaireResponseValidationError(Exception): ... - - -class QuestionnaireResponseMissingValue(Exception): ... - - -class Questionnaire(AggregateRoot): - name: str - version: str - json_schema: Json - - @validator("json_schema") - def validate_json_schema(cls, json_schema): - try: - jsonschema.Draft7Validator.check_schema(json_schema) - except jsonschema.SchemaError as err: - raise ValueError(err.message) - return json_schema - - def validate(self, data) -> "QuestionnaireResponse": - try: - jsonschema.validate(instance=data, schema=self.json_schema) - except jsonschema.ValidationError as error: - *_, variable_name = error.schema_path - exception_type = ( - QuestionnaireResponseMissingValue - if variable_name == REQUIRED - else QuestionnaireResponseValidationError - ) - raise exception_type( - f"Failed to validate data against '{self.id}': {error.message}" - ) - - return QuestionnaireResponse( - questionnaire_name=self.name, questionnaire_version=self.version, data=data - ) - - @property - def id(self) -> str: - return f"{self.name}/{self.version}" - - -class QuestionnaireResponse(BaseModel): - id: UUID = Field(default_factory=uuid4) - questionnaire_name: str - questionnaire_version: str - data: dict - created_on: datetime = Field(default_factory=now) - - @property - def questionnaire_id(self) -> str: - return f"{self.questionnaire_name}/{self.questionnaire_version}" diff --git a/src/layers/domain/core/root/tests/test_root_v1.py b/src/layers/domain/core/root/tests/test_root_v1.py index 7eac5c9a..d4d16323 100644 --- a/src/layers/domain/core/root/tests/test_root_v1.py +++ b/src/layers/domain/core/root/tests/test_root_v1.py @@ -1,7 +1,7 @@ -from domain.core.ods_organisation.v1 import OdsOrganisation as OdsOrganisationV1 +from domain.core.ods_organisation import OdsOrganisation from domain.core.root import Root def test_create_ods_organisation(): org = Root.create_ods_organisation(ods_code="ABC") - assert isinstance(org, OdsOrganisationV1) + assert isinstance(org, OdsOrganisation) diff --git a/src/layers/domain/core/root/tests/test_root_v2.py b/src/layers/domain/core/root/tests/test_root_v2.py deleted file mode 100644 index 6cb89f28..00000000 --- a/src/layers/domain/core/root/tests/test_root_v2.py +++ /dev/null @@ -1,7 +0,0 @@ -from domain.core.ods_organisation.v2 import OdsOrganisation as OdsOrganisationV2 -from domain.core.root.v2 import Root - - -def test_create_ods_organisation(): - org = Root.create_ods_organisation(ods_code="ABC") - assert isinstance(org, OdsOrganisationV2) diff --git a/src/layers/domain/core/root/tests/test_root_v3.py b/src/layers/domain/core/root/tests/test_root_v3.py deleted file mode 100644 index ed45643a..00000000 --- a/src/layers/domain/core/root/tests/test_root_v3.py +++ /dev/null @@ -1,7 +0,0 @@ -from domain.core.ods_organisation.v3 import OdsOrganisation as OdsOrganisationV3 -from domain.core.root.v3 import Root - - -def test_create_ods_organisation(): - org = Root.create_ods_organisation(ods_code="ABC") - assert isinstance(org, OdsOrganisationV3) diff --git a/src/layers/domain/core/root/v2.py b/src/layers/domain/core/root/v2.py deleted file mode 100644 index 2959b062..00000000 --- a/src/layers/domain/core/root/v2.py +++ /dev/null @@ -1,13 +0,0 @@ -from domain.core.ods_organisation.v2 import OdsOrganisation - - -class Root: - """ - Domain entities that have no parent are created by this Root entity, in - order to preserve the rule that all Aggregate Roots are created by other - Aggregate Roots. - """ - - @staticmethod - def create_ods_organisation(ods_code: str) -> OdsOrganisation: - return OdsOrganisation(ods_code=ods_code) diff --git a/src/layers/domain/core/root/v3.py b/src/layers/domain/core/root/v3.py deleted file mode 100644 index 67b9fe7f..00000000 --- a/src/layers/domain/core/root/v3.py +++ /dev/null @@ -1,13 +0,0 @@ -from domain.core.ods_organisation.v3 import OdsOrganisation - - -class Root: - """ - Domain entities that have no parent are created by this Root entity, in - order to preserve the rule that all Aggregate Roots are created by other - Aggregate Roots. - """ - - @staticmethod - def create_ods_organisation(ods_code: str) -> OdsOrganisation: - return OdsOrganisation(ods_code=ods_code) diff --git a/src/layers/domain/repository/cpm_product_repository/__init__.py b/src/layers/domain/repository/cpm_product_repository/__init__.py new file mode 100644 index 00000000..e0d08e67 --- /dev/null +++ b/src/layers/domain/repository/cpm_product_repository/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa diff --git a/src/layers/domain/repository/cpm_product_repository/tests/v3/conftest.py b/src/layers/domain/repository/cpm_product_repository/tests/v1/conftest.py similarity index 85% rename from src/layers/domain/repository/cpm_product_repository/tests/v3/conftest.py rename to src/layers/domain/repository/cpm_product_repository/tests/v1/conftest.py index 7e51dc2c..25362a5e 100644 --- a/src/layers/domain/repository/cpm_product_repository/tests/v3/conftest.py +++ b/src/layers/domain/repository/cpm_product_repository/tests/v1/conftest.py @@ -1,8 +1,8 @@ from collections.abc import Generator import pytest -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.device_repository.tests.utils import repository_fixture diff --git a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_keys_v3.py b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_keys_v1.py similarity index 90% rename from src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_keys_v3.py rename to src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_keys_v1.py index c6b34021..d2c01573 100644 --- a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_keys_v3.py +++ b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_keys_v1.py @@ -1,8 +1,8 @@ import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.product_key.v1 import ProductKey, ProductKeyType -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.core.cpm_product import CpmProduct +from domain.core.product_key import ProductKey, ProductKeyType +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.errors import AlreadyExistsError, ItemNotFound from test_helpers.sample_data import CPM_PRODUCT_TEAM_NO_ID diff --git a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3.py b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1.py similarity index 95% rename from src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3.py rename to src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1.py index c85cd299..6af91f50 100644 --- a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3.py +++ b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1.py @@ -1,10 +1,10 @@ import pytest -from domain.core.cpm_product.v1 import CpmProduct -from domain.core.cpm_system_id.v1 import ProductId -from domain.core.root.v3 import Root -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.core.cpm_product import CpmProduct +from domain.core.cpm_system_id import ProductId +from domain.core.root import Root +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.errors import AlreadyExistsError, ItemNotFound -from domain.repository.keys.v3 import TableKey +from domain.repository.keys import TableKey from domain.repository.marshall import marshall_value from event.aws.client import dynamodb_client diff --git a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3_delete.py b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1_delete.py similarity index 92% rename from src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3_delete.py rename to src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1_delete.py index cb27b5d2..dfb2b60c 100644 --- a/src/layers/domain/repository/cpm_product_repository/tests/v3/test_cpm_product_repository_v3_delete.py +++ b/src/layers/domain/repository/cpm_product_repository/tests/v1/test_cpm_product_repository_v1_delete.py @@ -1,7 +1,7 @@ import pytest -from domain.core.cpm_product.v1 import CpmProduct +from domain.core.cpm_product import CpmProduct from domain.core.error import NotFoundError -from domain.repository.cpm_product_repository.v3 import CpmProductRepository +from domain.repository.cpm_product_repository import CpmProductRepository from domain.repository.errors import ItemNotFound diff --git a/src/layers/domain/repository/cpm_product_repository/v3.py b/src/layers/domain/repository/cpm_product_repository/v1.py similarity index 94% rename from src/layers/domain/repository/cpm_product_repository/v3.py rename to src/layers/domain/repository/cpm_product_repository/v1.py index 8bb7882d..6d2a91d3 100644 --- a/src/layers/domain/repository/cpm_product_repository/v3.py +++ b/src/layers/domain/repository/cpm_product_repository/v1.py @@ -1,13 +1,13 @@ from attr import asdict -from domain.core.cpm_product.v1 import ( +from domain.core.cpm_product import ( CpmProduct, CpmProductCreatedEvent, CpmProductDeletedEvent, CpmProductKeyAddedEvent, ) -from domain.core.product_key.v1 import ProductKey -from domain.repository.keys.v3 import TableKey -from domain.repository.repository.v3 import Repository +from domain.core.product_key import ProductKey +from domain.repository.keys import TableKey +from domain.repository.repository import Repository class CpmProductRepository(Repository[CpmProduct]): diff --git a/src/layers/domain/repository/cpm_system_id_repository.py b/src/layers/domain/repository/cpm_system_id_repository.py index 35305d7c..f83d22eb 100644 --- a/src/layers/domain/repository/cpm_system_id_repository.py +++ b/src/layers/domain/repository/cpm_system_id_repository.py @@ -1,6 +1,6 @@ from domain.core.cpm_system_id import CpmSystemId -from .keys.v3 import TableKey +from .keys import TableKey from .marshall import marshall, marshall_value, unmarshall from .repository import Repository @@ -9,7 +9,11 @@ class CpmSystemIdRepository[T](Repository[T]): def __init__(self, table_name: str, model: type[T], dynamodb_client): super().__init__( - table_name=table_name, model=model, dynamodb_client=dynamodb_client + table_name=table_name, + model=model, + dynamodb_client=dynamodb_client, + parent_table_keys=None, + table_key=None, ) def read(self) -> T: diff --git a/src/layers/domain/repository/device_reference_data_repository/tests/conftest.py b/src/layers/domain/repository/device_reference_data_repository/tests/conftest.py index 33d0d312..5c4442b6 100644 --- a/src/layers/domain/repository/device_reference_data_repository/tests/conftest.py +++ b/src/layers/domain/repository/device_reference_data_repository/tests/conftest.py @@ -1,8 +1,8 @@ from collections.abc import Generator import pytest -from domain.core.root.v3 import Root -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.root import Root +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) from domain.repository.device_repository.tests.utils import repository_fixture diff --git a/src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py b/src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py index 495923bf..10d5057d 100644 --- a/src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py +++ b/src/layers/domain/repository/device_reference_data_repository/tests/test_device_reference_data_repository_v1.py @@ -1,6 +1,6 @@ import pytest -from domain.core.device_reference_data.v1 import DeviceReferenceData -from domain.repository.device_reference_data_repository.v1 import ( +from domain.core.device_reference_data import DeviceReferenceData +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) from domain.repository.errors import AlreadyExistsError, ItemNotFound diff --git a/src/layers/domain/repository/device_reference_data_repository/v1.py b/src/layers/domain/repository/device_reference_data_repository/v1.py index 9d9d1d0d..cf53e3be 100644 --- a/src/layers/domain/repository/device_reference_data_repository/v1.py +++ b/src/layers/domain/repository/device_reference_data_repository/v1.py @@ -1,14 +1,13 @@ from enum import StrEnum from attr import asdict -from domain.core.device_reference_data.v1 import ( +from domain.core.device_reference_data import ( DeviceReferenceData, DeviceReferenceDataCreatedEvent, QuestionnaireResponseUpdatedEvent, ) -from domain.repository.device_repository.v2 import create_device_index -from domain.repository.keys.v3 import TableKey -from domain.repository.repository.v3 import Repository +from domain.repository.keys import TableKey +from domain.repository.repository import Repository from domain.repository.transaction import TransactItem @@ -17,17 +16,6 @@ class QueryType(StrEnum): BEGINS_WITH = "begins_with({}, {})" -def create_device_reference_data(table_name: str, id: str, data: dict, root: bool): - return create_device_index( - table_name=table_name, - pk_key_parts=(id,), - pk_table_key=TableKey.DEVICE_REFERENCE_DATA, - sk_table_key=TableKey.DEVICE_REFERENCE_DATA, - device_data=data, - root=root, - ) - - class DeviceReferenceDataRepository(Repository[DeviceReferenceData]): def __init__(self, table_name, dynamodb_client): super().__init__( diff --git a/src/layers/domain/repository/device_repository/__init__.py b/src/layers/domain/repository/device_repository/__init__.py index 8f55f201..e1ddb07c 100644 --- a/src/layers/domain/repository/device_repository/__init__.py +++ b/src/layers/domain/repository/device_repository/__init__.py @@ -1 +1 @@ -from .v2 import * # noqa: F403, F401 +from .v1 import * # noqa: F403, F401 diff --git a/src/layers/domain/repository/device_repository/tests/utils.py b/src/layers/domain/repository/device_repository/tests/utils.py index da36fd87..bcf20749 100644 --- a/src/layers/domain/repository/device_repository/tests/utils.py +++ b/src/layers/domain/repository/device_repository/tests/utils.py @@ -1,7 +1,7 @@ from typing import Generator from domain.core.device import Device -from domain.repository.device_reference_data_repository.v1 import ( +from domain.repository.device_reference_data_repository import ( DeviceReferenceDataRepository, ) from domain.repository.device_repository import DeviceRepository diff --git a/src/layers/domain/repository/device_repository/tests/v3/conftest.py b/src/layers/domain/repository/device_repository/tests/v1/conftest.py similarity index 90% rename from src/layers/domain/repository/device_repository/tests/v3/conftest.py rename to src/layers/domain/repository/device_repository/tests/v1/conftest.py index d0089563..a206147f 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/conftest.py +++ b/src/layers/domain/repository/device_repository/tests/v1/conftest.py @@ -1,11 +1,11 @@ from typing import Generator import pytest -from domain.core.device.v3 import Device -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.root.v3 import Root +from domain.core.device import Device +from domain.core.device_key import DeviceKeyType +from domain.core.root import Root +from domain.repository.device_repository import DeviceRepository from domain.repository.device_repository.tests.utils import repository_fixture -from domain.repository.device_repository.v3 import DeviceRepository @pytest.fixture diff --git a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_keys_v3.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py similarity index 93% rename from src/layers/domain/repository/device_repository/tests/v3/test_device_repository_keys_v3.py rename to src/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py index 93e15289..cc2fed61 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_keys_v3.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_keys_v1.py @@ -1,7 +1,7 @@ import pytest -from domain.core.device.v3 import Device -from domain.core.device_key.v2 import DeviceKey, DeviceKeyType -from domain.repository.device_repository.v3 import DeviceRepository +from domain.core.device import Device +from domain.core.device_key import DeviceKey, DeviceKeyType +from domain.repository.device_repository import DeviceRepository @pytest.mark.integration diff --git a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_questionnaire_responses_v3.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py similarity index 93% rename from src/layers/domain/repository/device_repository/tests/v3/test_device_repository_questionnaire_responses_v3.py rename to src/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py index 790ae555..8efc3a11 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_questionnaire_responses_v3.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_questionnaire_responses_v1.py @@ -1,10 +1,10 @@ import json import pytest -from domain.core.device.v3 import Device -from domain.core.questionnaire.v3 import Questionnaire -from domain.core.root.v3 import Root -from domain.repository.device_repository.v3 import DeviceRepository +from domain.core.device import Device +from domain.core.questionnaire import Questionnaire +from domain.core.root import Root +from domain.repository.device_repository import DeviceRepository VALID_SHOE_SCHEMA = { "$schema": "http://json-schema.org/draft-07/schema#", diff --git a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_tags_v3.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py similarity index 98% rename from src/layers/domain/repository/device_repository/tests/v3/test_device_repository_tags_v3.py rename to src/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py index 37e0994d..4eace642 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_tags_v3.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_tags_v1.py @@ -1,9 +1,9 @@ from collections import defaultdict import pytest -from domain.core.device.v3 import Device, DeviceTag +from domain.core.device import Device, DeviceTag from domain.core.enum import Status -from domain.repository.device_repository.v3 import ( +from domain.repository.device_repository import ( CannotDropMandatoryFields, DeviceRepository, ) diff --git a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py similarity index 90% rename from src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3.py rename to src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py index d7c641bb..a26be3a1 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py @@ -2,16 +2,13 @@ import pytest from attr import asdict -from domain.core.device.v3 import Device as DeviceV3 -from domain.core.device.v3 import DeviceCreatedEvent -from domain.core.device_key.v2 import DeviceKeyType +from domain.core.device import Device, DeviceCreatedEvent +from domain.core.device_key import DeviceKeyType from domain.core.enum import Status -from domain.core.root.v3 import Root +from domain.core.root import Root from domain.repository.compression import pkl_loads_gzip -from domain.repository.device_repository.v3 import ( - DeviceRepository as DeviceRepositoryV3, -) -from domain.repository.device_repository.v3 import ( +from domain.repository.device_repository import ( + DeviceRepository, InactiveDeviceRepository, compress_device_fields, ) @@ -21,7 +18,7 @@ @pytest.fixture -def device() -> DeviceV3: +def device() -> Device: org = Root.create_ods_organisation(ods_code="AB123") product_team = org.create_product_team(name="Product Team") product = product_team.create_cpm_product(name="Product") @@ -31,7 +28,7 @@ def device() -> DeviceV3: @pytest.fixture -def device_with_tag() -> DeviceV3: +def device_with_tag() -> Device: org = Root.create_ods_organisation(ods_code="AB123") product_team = org.create_product_team(name="Product Team") product = product_team.create_cpm_product(name="Product") @@ -44,7 +41,7 @@ def device_with_tag() -> DeviceV3: @pytest.fixture -def another_device_with_same_key() -> DeviceV3: +def another_device_with_same_key() -> Device: org = Root.create_ods_organisation(ods_code="AB123") product_team = org.create_product_team(name="Product Team") product = product_team.create_cpm_product(name="Product") @@ -54,9 +51,7 @@ def another_device_with_same_key() -> DeviceV3: @pytest.mark.integration -def test__device_repository_read_by_id( - device: DeviceV3, repository: DeviceRepositoryV3 -): +def test__device_repository_read_by_id(device: Device, repository: DeviceRepository): repository.write(device) device_from_db = repository.read( product_team_id=device.product_team_id, @@ -67,9 +62,7 @@ def test__device_repository_read_by_id( @pytest.mark.integration -def test__device_repository_read_by_key( - device: DeviceV3, repository: DeviceRepositoryV3 -): +def test__device_repository_read_by_key(device: Device, repository: DeviceRepository): repository.write(device) device_from_db = repository.read( product_team_id=device.product_team_id, @@ -80,7 +73,7 @@ def test__device_repository_read_by_key( @pytest.mark.integration -def test__device_repository_already_exists(device, repository: DeviceRepositoryV3): +def test__device_repository_already_exists(device, repository: DeviceRepository): repository.write(device) with pytest.raises(AlreadyExistsError): repository.write(device) @@ -88,7 +81,7 @@ def test__device_repository_already_exists(device, repository: DeviceRepositoryV @pytest.mark.integration def test__device_repository_key_already_exists_on_another_device( - device, another_device_with_same_key, repository: DeviceRepositoryV3 + device, another_device_with_same_key, repository: DeviceRepository ): repository.write(device) with pytest.raises(AlreadyExistsError): @@ -96,7 +89,7 @@ def test__device_repository_key_already_exists_on_another_device( def test__device_repository_key_already_exists_on_another_device( - device, another_device_with_same_key, repository: DeviceRepositoryV3 + device, another_device_with_same_key, repository: DeviceRepository ): repository.write(device) with pytest.raises(AlreadyExistsError): @@ -104,12 +97,12 @@ def test__device_repository_key_already_exists_on_another_device( @pytest.mark.integration -def test__device_repository__device_does_not_exist(repository: DeviceRepositoryV3): +def test__device_repository__device_does_not_exist(repository: DeviceRepository): with pytest.raises(ItemNotFound): repository.read(product_team_id="foo", product_id="bar", id="123") -def test__device_repository_local(device: DeviceV3, repository: DeviceRepositoryV3): +def test__device_repository_local(device: Device, repository: DeviceRepository): repository.write(device) device_from_db = repository.read( product_team_id=device.product_team_id, @@ -120,14 +113,14 @@ def test__device_repository_local(device: DeviceV3, repository: DeviceRepository def test__device_repository__device_does_not_exist_local( - repository: DeviceRepositoryV3, + repository: DeviceRepository, ): with pytest.raises(ItemNotFound): repository.read(product_team_id="foo", product_id="bar", id="123") @pytest.mark.integration -def test__device_repository__update(device: DeviceV3, repository: DeviceRepositoryV3): +def test__device_repository__update(device: Device, repository: DeviceRepository): repository.write(device) # Retrieve the model and treat this as the initial state @@ -154,7 +147,7 @@ def test__device_repository__update(device: DeviceV3, repository: DeviceReposito @pytest.mark.integration def test__device_repository__delete( - device_with_tag: DeviceV3, repository: DeviceRepositoryV3 + device_with_tag: Device, repository: DeviceRepository ): repository.write(device_with_tag) @@ -189,7 +182,7 @@ def test__device_repository__delete( @pytest.mark.integration def test__device_repository__can_delete_second_device_with_same_key( - repository: DeviceRepositoryV3, + repository: DeviceRepository, ): org = Root.create_ods_organisation(ods_code="AAA") product_team = org.create_product_team(name="MyTeam") @@ -244,7 +237,7 @@ def test__device_repository__can_delete_second_device_with_same_key( @pytest.mark.integration -def test__device_repository__add_key(device: DeviceV3, repository: DeviceRepositoryV3): +def test__device_repository__add_key(device: Device, repository: DeviceRepository): repository.write(device) # Retrieve the model and treat this as the initial state diff --git a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3_compression.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py similarity index 91% rename from src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3_compression.py rename to src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py index cb3cb3f8..c08b4208 100644 --- a/src/layers/domain/repository/device_repository/tests/v3/test_device_repository_v3_compression.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1_compression.py @@ -1,6 +1,6 @@ -from domain.core.device.v3 import Device +from domain.core.device import Device from domain.repository.compression import pkl_dumps_gzip -from domain.repository.device_repository.v3 import compress_device_fields +from domain.repository.device_repository import compress_device_fields def test_compress_device_fields_default_is_tags(device: Device): diff --git a/src/layers/domain/repository/device_repository/tests/v2/conftest.py b/src/layers/domain/repository/device_repository/tests/v2/conftest.py deleted file mode 100644 index 31ca001d..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/conftest.py +++ /dev/null @@ -1,63 +0,0 @@ -from typing import Generator - -import pytest -from domain.core.device.v2 import Device, DeviceType -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.root.v2 import Root -from domain.repository.device_repository.tests.utils import repository_fixture -from domain.repository.device_repository.v2 import DeviceRepository - - -@pytest.fixture -def repository(request) -> Generator[DeviceRepository, None, None]: - yield from repository_fixture( - is_integration_test=request.node.get_closest_marker("integration"), - repository_class=DeviceRepository, - ) - - -@pytest.fixture -def device() -> Device: - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device(name="Device-1", device_type=DeviceType.PRODUCT) - device.add_tag(abc="123") - device.add_tag(bar="foo") - device.add_tag(mixed_case="AbC") - device.add_key(key_value="P.WWW-XXX", key_type=DeviceKeyType.PRODUCT_ID) - return device - - -@pytest.fixture -def device_with_asid() -> Device: - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device(name="Device-1", device_type=DeviceType.PRODUCT) - device.add_tag(foo="bar", abc="123") - device.add_tag(bar="foo") - device.add_key( - key_value="ABC:1234567890", key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID - ) - device.add_key(key_value="P.WWW-CCC", key_type=DeviceKeyType.PRODUCT_ID) - return device - - -@pytest.fixture -def device_with_mhs_id() -> Device: - org = Root.create_ods_organisation(ods_code="AB123") - team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = team.create_device(name="Device-2", device_type=DeviceType.ENDPOINT) - device.add_key(key_value="P.WWW-YYY", key_type=DeviceKeyType.PRODUCT_ID) - device.add_key( - key_value="ABC:DEF-444:4444444444", - key_type=DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID, - ) - device.add_tag(abc="123") - device.add_tag(bar="foo") - return device diff --git a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_keys_v2.py b/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_keys_v2.py deleted file mode 100644 index 8229e702..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_keys_v2.py +++ /dev/null @@ -1,54 +0,0 @@ -import pytest -from domain.core.device.v2 import Device -from domain.core.device_key.v2 import DeviceKey, DeviceKeyType -from domain.repository.device_repository.v2 import DeviceRepository - - -@pytest.mark.integration -def test__device_repository__add_two_keys(device: Device, repository: DeviceRepository): - repository.write(device) - second_device = repository.read(device.id) - second_device.add_key( - key_value="ABC:1234567890", key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID - ) - repository.write(second_device) - - assert repository.read(device.id).keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.WWW-XXX"), - DeviceKey( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value="ABC:1234567890" - ), - ] - assert repository.read(DeviceKeyType.PRODUCT_ID, "P.WWW-XXX").keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.WWW-XXX"), - DeviceKey( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value="ABC:1234567890" - ), - ] - assert repository.read( - DeviceKeyType.ACCREDITED_SYSTEM_ID, "ABC:1234567890" - ).keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.WWW-XXX"), - DeviceKey( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value="ABC:1234567890" - ), - ] - - -@pytest.mark.integration -def test__device_repository__delete_key( - device_with_asid: Device, repository: DeviceRepository -): - # Persist model before deleting from model - repository.write(device_with_asid) - - # Retrieve the model and treat this as the initial state - intermediate_device = repository.read(device_with_asid.id) - intermediate_device.delete_key( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value="ABC:1234567890" - ) - repository.write(intermediate_device) - - assert repository.read(device_with_asid.id).keys == [ - DeviceKey(key_type=DeviceKeyType.PRODUCT_ID, key_value="P.WWW-CCC") - ] diff --git a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_questionnaire_responses_v2.py b/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_questionnaire_responses_v2.py deleted file mode 100644 index 570b3022..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_questionnaire_responses_v2.py +++ /dev/null @@ -1,109 +0,0 @@ -import pytest -from domain.core.device.v2 import Device, DeviceType -from domain.core.questionnaire.v2 import Questionnaire -from domain.core.root.v2 import Root -from domain.repository.device_repository.tests.utils import devices_exactly_equal -from domain.repository.device_repository.v2 import DeviceRepository - - -@pytest.fixture -def shoe_questionnaire() -> Device: - questionnaire = Questionnaire(name="shoe", version=1) - questionnaire.add_question( - name="foot", answer_types=(str,), mandatory=True, choices={"L", "R"} - ) - questionnaire.add_question(name="shoe-size", answer_types=(int,), mandatory=True) - return questionnaire - - -@pytest.fixture -def health_questionnaire() -> Device: - questionnaire = Questionnaire(name="health", version=1) - questionnaire.add_question(name="weight", answer_types=(int,), mandatory=True) - questionnaire.add_question(name="height", answer_types=(int,), mandatory=True) - return questionnaire - - -@pytest.fixture -def device( - shoe_questionnaire: Questionnaire, health_questionnaire: Questionnaire -) -> Device: - shoe_response_1 = shoe_questionnaire.respond( - responses=[{"foot": ["L"]}, {"shoe-size": [123]}], - ) - shoe_response_2 = shoe_questionnaire.respond( - responses=[{"foot": ["L"]}, {"shoe-size": [345]}], - ) - - health_response = health_questionnaire.respond( - responses=[{"weight": [123]}, {"height": [345]}] - ) - - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device(name="Device-1", device_type=DeviceType.PRODUCT) - device.add_questionnaire_response(questionnaire_response=shoe_response_1) - device.add_questionnaire_response(questionnaire_response=shoe_response_2) - device.add_questionnaire_response(questionnaire_response=health_response) - return device - - -@pytest.mark.integration -def test__device_repository__with_questionnaires( - device: Device, repository: DeviceRepository -): - repository.write(device) - assert repository.read(device.id) == device - - -@pytest.mark.integration -def test__device_repository__with_questionnaires_and_tags( - device: Device, repository: DeviceRepository -): - """ - This test might look specific but it previously raised a bug due - to model/datetime serialisation issues - """ - device.add_tag(foo="bar") - repository.write(device) - assert repository.read(device.id) == device - - -@pytest.mark.integration -def test__device_repository__modify_questionnaire_response_that_has_been_persisted( - device: Device, repository: DeviceRepository, shoe_questionnaire: Questionnaire -): - # Persist model before updating model - repository.write(device) - intermediate_device = repository.read(device.id) - - # Update the model - questionnaire_responses = intermediate_device.questionnaire_responses - assert len(questionnaire_responses["shoe/1"]) == 2 - (_questionnaire_response, _) = questionnaire_responses["shoe/1"].values() - - questionnaire_response = shoe_questionnaire.respond( - responses=[{"foot": ["R"]}, {"shoe-size": [789]}] - ) - questionnaire_response.created_on = _questionnaire_response.created_on - - intermediate_device.update_questionnaire_response( - questionnaire_response=questionnaire_response - ) - - # Persist and verify consistency - repository.write(intermediate_device) - device_from_db = repository.read(intermediate_device.id) - assert devices_exactly_equal(device_from_db, intermediate_device) - assert not devices_exactly_equal(device_from_db, device) - assert device_from_db.questionnaire_responses["shoe/1"][ - _questionnaire_response.created_on.isoformat() - ].answers == [ - {"foot": ["R"]}, - {"shoe-size": [789]}, - ] - - assert device_from_db.created_on == device.created_on - assert device_from_db.updated_on > device.updated_on diff --git a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_tags_v2.py b/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_tags_v2.py deleted file mode 100644 index b8550545..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_tags_v2.py +++ /dev/null @@ -1,191 +0,0 @@ -from collections import defaultdict - -import pytest -from domain.core.device.v2 import Device, DeviceTag -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.enum import Status -from domain.repository.device_repository.v2 import ( - CannotDropMandatoryFields, - DeviceRepository, -) - -DONT_COMPARE_FIELDS = {"tags"} - - -@pytest.mark.integration -def test__device_repository__tags(device: Device, repository: DeviceRepository): - repository.write(device) - (_device_123,) = repository.query_by_tag(abc=123) - assert _device_123.dict(exclude=DONT_COMPARE_FIELDS) == device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - (_device_bar,) = repository.query_by_tag(bar="foo") - assert _device_bar.dict(exclude=DONT_COMPARE_FIELDS) == device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - for value in ["aBc", "ABC", "abc", "AbC"]: - (_device_abc,) = repository.query_by_tag(mixed_case=value) - assert _device_abc.dict(exclude=DONT_COMPARE_FIELDS) == device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - -@pytest.mark.integration -def test__device_repository__tag_does_not_exist( - device: Device, repository: DeviceRepository -): - repository.write(device) - results = repository.query_by_tag(abc=12) - assert len(results) == 0 - - -@pytest.mark.integration -def test__device_repository__multiple_devices_with_same_tags( - device: Device, - device_with_asid: Device, - device_with_mhs_id: Device, - repository: DeviceRepository, -): - repository.write(device) - repository.write(device_with_asid) - repository.write(device_with_mhs_id) - - devices = repository.query_by_tag(bar="foo") - assert len(devices) == 3 - - expected_devices = sorted( - (device, device_with_asid, device_with_mhs_id), key=lambda d: d.id - ) - # Tags are dropped by 'query_by_tag' so re-set these manually for comparison - for d1, d2 in zip(devices, expected_devices): - d1.tags = d2.tags - - assert devices == expected_devices - - -def _test_add_two_tags( - device: Device, second_device: Device, repository: DeviceRepository -): - expected_tags = { - DeviceTag(abc="123"), - DeviceTag(bar="foo"), - DeviceTag(mixed_case="abc"), - DeviceTag(shoe_size="123"), - DeviceTag(shoe_size="456"), - } - - assert repository.read(device.id).tags == expected_tags - assert repository.read(DeviceKeyType.PRODUCT_ID, "P.WWW-XXX").tags == expected_tags - - (_device_123,) = repository.query_by_tag(shoe_size=123) - assert _device_123.dict(exclude=DONT_COMPARE_FIELDS) == second_device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - (_device_456,) = repository.query_by_tag(shoe_size=456) - assert _device_456.dict(exclude=DONT_COMPARE_FIELDS) == second_device.dict( - exclude=DONT_COMPARE_FIELDS - ) - return True - - -@pytest.mark.integration -def test__device_repository__add_two_tags(device: Device, repository: DeviceRepository): - repository.write(device) - second_device = repository.read(device.id) - second_device.add_tag(shoe_size=123) - second_device.add_tag(shoe_size=456) - repository.write(second_device) - - assert _test_add_two_tags( - device=device, second_device=second_device, repository=repository - ) - - -@pytest.mark.integration -def test__device_repository__add_two_tags_at_once( - device: Device, repository: DeviceRepository -): - repository.write(device) - second_device = repository.read(device.id) - second_device.add_tags([dict(shoe_size=123), dict(shoe_size=456)]) - repository.write(second_device) - - assert _test_add_two_tags( - device=device, second_device=second_device, repository=repository - ) - - -@pytest.mark.integration -def test__device_repository__add_two_tags_and_then_clear( - device: Device, repository: DeviceRepository -): - repository.write(device) - second_device = repository.read(device.id) - second_device.add_tags([dict(shoe_size=123), dict(shoe_size=456)]) - repository.write(second_device) - - second_device.clear_events() - second_device.clear_tags() - repository.write(second_device) - - assert repository.read(device.id).tags == set() - assert repository.read(DeviceKeyType.PRODUCT_ID, "P.WWW-XXX").tags == set() - - assert repository.query_by_tag(shoe_size=123) == [] - assert repository.query_by_tag(shoe_size=456) == [] - - -@pytest.mark.integration -@pytest.mark.parametrize( - "field_to_drop, expected_default_value", - [ - (["tags"], set()), # If 'tags' is dropped, it should default to an empty set - (["keys"], []), # If 'keys' is dropped, it should default to an empty list - (["status"], Status.ACTIVE), # 'status' should default to Status.ACTIVE - (["updated_on"], None), # 'updated_on' should default to None - (["deleted_on"], None), # 'deleted_on' should default to None - ( - ["questionnaire_responses"], - defaultdict(dict), - ), # 'questionnaire_responses' defaults to an empty dict - ], -) -def test__device_repository__drop_fields( - device: Device, repository: DeviceRepository, field_to_drop, expected_default_value -): - repository.write(device) - (_device_123,) = repository.query_by_tag(abc=123) - assert _device_123.dict(exclude=DONT_COMPARE_FIELDS) == device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - # Query with specific fields to drop - results = repository.query_by_tag(abc=123, fields_to_drop=field_to_drop) - assert len(results) == 1 - - device_result = results[0] - - assert device_result.dict()[field_to_drop[0]] == expected_default_value - assert all(field in device_result.dict() for field in Device.get_mandatory_fields()) - - -@pytest.mark.integration -def test__device_repository__drop_mandatory_fields( - device: Device, repository: DeviceRepository -): - repository.write(device) - (_device_123,) = repository.query_by_tag(abc=123) - assert _device_123.dict(exclude=DONT_COMPARE_FIELDS) == device.dict( - exclude=DONT_COMPARE_FIELDS - ) - - # Query with mandatory fields to drop - fields_to_drop = Device.get_mandatory_fields() - - with pytest.raises( - CannotDropMandatoryFields, match="Cannot drop mandatory fields:" - ): - repository.query_by_tag(abc=123, fields_to_drop=fields_to_drop) diff --git a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2.py b/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2.py deleted file mode 100644 index e0df677b..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2.py +++ /dev/null @@ -1,333 +0,0 @@ -from copy import deepcopy - -import pytest -from attr import asdict -from domain.core.device.v2 import Device as DeviceV2 -from domain.core.device.v2 import DeviceCreatedEvent, DeviceTag -from domain.core.device.v2 import DeviceType as DeviceTypeV2 -from domain.core.device_key.v2 import DeviceKey as DeviceKeyV2 -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.enum import Status -from domain.core.root.v2 import Root -from domain.repository.compression import pkl_loads_gzip -from domain.repository.device_repository.v2 import ( - DeviceRepository as DeviceRepositoryV2, -) -from domain.repository.device_repository.v2 import ( - _device_non_root_primary_keys, - _device_root_primary_key, - compress_device_fields, -) -from domain.repository.errors import AlreadyExistsError, ItemNotFound - -DEVICE_KEY = "P.WWW-XXX" - - -@pytest.fixture -def device() -> DeviceV2: - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device( - name="Device-1", device_type=DeviceTypeV2.PRODUCT - ) - device.add_key(key_value=DEVICE_KEY, key_type=DeviceKeyType.PRODUCT_ID) - return device - - -@pytest.fixture -def device_with_tag() -> DeviceV2: - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device( - name="Device-1", device_type=DeviceTypeV2.PRODUCT - ) - device.add_key(key_value=DEVICE_KEY, key_type=DeviceKeyType.PRODUCT_ID) - device.add_tag( - nhs_as_client="5NR", nhs_as_svc_ia="urn:nhs:names:services:mm:PORX_IN090101UK31" - ) - return device - - -@pytest.fixture -def another_device_with_same_key() -> DeviceV2: - org = Root.create_ods_organisation(ods_code="AB123") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="Team" - ) - device = product_team.create_device( - name="Device-2", device_type=DeviceTypeV2.PRODUCT - ) - device.add_key(key_value=DEVICE_KEY, key_type=DeviceKeyType.PRODUCT_ID) - return device - - -def test__device_root_primary_key(): - primary_key = _device_root_primary_key(device_id="123") - assert primary_key == {"pk": {"S": "D#123"}, "sk": {"S": "D#123"}} - - -def test__device_non_root_primary_keys(): - primary_keys = _device_non_root_primary_keys( - device_id="123", - device_keys=[ - DeviceKeyV2(key_type=DeviceKeyType.PRODUCT_ID, key_value=DEVICE_KEY) - ], - device_tags=[DeviceTag(foo="bar")], - ) - assert primary_keys == [ - { - "pk": {"S": "D#product_id#P.WWW-XXX"}, - "sk": {"S": "D#product_id#P.WWW-XXX"}, - }, - { - "pk": {"S": "DT#foo=bar"}, - "sk": {"S": "D#123"}, - }, - ] - - -@pytest.mark.integration -def test__device_repository_read_by_id( - device: DeviceV2, repository: DeviceRepositoryV2 -): - repository.write(device) - device_from_db = repository.read(device.id) - assert device_from_db.dict() == device.dict() - - -@pytest.mark.integration -def test__device_repository_read_by_key( - device: DeviceV2, repository: DeviceRepositoryV2 -): - repository.write(device) - device_from_db = repository.read(*device.keys[0].parts) - assert device_from_db.dict() == device.dict() - - -@pytest.mark.integration -def test__device_repository_already_exists(device, repository: DeviceRepositoryV2): - repository.write(device) - with pytest.raises(AlreadyExistsError): - repository.write(device) - - -@pytest.mark.integration -def test__device_repository_key_already_exists_on_another_device( - device, another_device_with_same_key, repository: DeviceRepositoryV2 -): - repository.write(device) - with pytest.raises(AlreadyExistsError): - repository.write(another_device_with_same_key) - - -def test__device_repository_key_already_exists_on_another_device( - device, another_device_with_same_key, repository: DeviceRepositoryV2 -): - repository.write(device) - with pytest.raises(AlreadyExistsError): - repository.write(another_device_with_same_key) - - -@pytest.mark.integration -def test__device_repository__device_does_not_exist(repository: DeviceRepositoryV2): - with pytest.raises(ItemNotFound): - repository.read("123") - - -def test__device_repository_local(device: DeviceV2, repository: DeviceRepositoryV2): - repository.write(device) - device_from_db = repository.read(device.id) - assert device_from_db.dict() == device.dict() - - -def test__device_repository__device_does_not_exist_local( - repository: DeviceRepositoryV2, -): - with pytest.raises(ItemNotFound): - repository.read("123") - - -@pytest.mark.integration -def test__device_repository__update(device: DeviceV2, repository: DeviceRepositoryV2): - repository.write(device) - - # Retrieve the model and treat this as the initial state - intermediate_device = repository.read(device.id) - intermediate_device.update(name="foo-bar") - - repository.write(intermediate_device) - - final_device = repository.read(device.id) - - assert final_device.name == "foo-bar" - - assert final_device.created_on == device.created_on - assert final_device.updated_on > device.updated_on - - -@pytest.mark.integration -def test__device_repository__delete( - device_with_tag: DeviceV2, repository: DeviceRepositoryV2 -): - repository.write(device_with_tag) - - # Retrieve the model and treat this as the initial state - device = repository.read(device_with_tag.id) - device.delete() - repository.write(device) - - # Attempt to read the original device, expecting an ItemNotFound error - with pytest.raises(ItemNotFound): - repository.read(device_with_tag.id) - - # Read the deleted device - deleted_device = repository.read_inactive(device_with_tag.id) - - # Assert device is inactive after being deleted - assert deleted_device is not None - assert deleted_device.status is Status.INACTIVE - assert deleted_device.tags == set() - assert deleted_device.created_on == device_with_tag.created_on - assert deleted_device.updated_on > device_with_tag.updated_on - - -@pytest.mark.integration -def test__device_repository__can_delete_second_device_with_same_key( - repository: DeviceRepositoryV2, -): - org = Root.create_ods_organisation(ods_code="AAA") - product_team = org.create_product_team( - id="6f8c285e-04a2-4194-a84e-dabeba474ff7", name="MyTeam" - ) - - device = product_team.create_device( - name="OriginalDevice", device_type=DeviceTypeV2.PRODUCT - ) - device.add_key(key_value=DEVICE_KEY, key_type=DeviceKeyType.PRODUCT_ID) - repository.write(device) - repository.read(DeviceKeyType.PRODUCT_ID, DEVICE_KEY) # passes - - device.clear_events() - device.delete() - repository.write(device) - with pytest.raises(ItemNotFound): - repository.read(DeviceKeyType.PRODUCT_ID, DEVICE_KEY) - - deleted_device = repository.read_inactive(device.id) - assert deleted_device.status is Status.INACTIVE - - # Can re-add the same product id Key after a previous device is inactive - for i in range(5): - _device = product_team.create_device( - name=f"Device-{i}", device_type=DeviceTypeV2.PRODUCT - ) - _device.add_key(key_value=DEVICE_KEY, key_type=DeviceKeyType.PRODUCT_ID) - repository.write(_device) - repository.read(DeviceKeyType.PRODUCT_ID, DEVICE_KEY) # passes - - _device.clear_events() - _device.delete() - repository.write(_device) - with pytest.raises(ItemNotFound): - repository.read(DeviceKeyType.PRODUCT_ID, DEVICE_KEY) - - # Assert device is inactive after being deleted - _deleted_device = repository.read_inactive(_device.id) - assert _deleted_device.status is Status.INACTIVE - - -@pytest.mark.integration -def test__device_repository__add_key(device: DeviceV2, repository: DeviceRepositoryV2): - repository.write(device) - - # Retrieve the model and treat this as the initial state - intermediate_device = repository.read(device.id) - assert len(intermediate_device.keys) == 1 - - intermediate_device.add_key( - key_type=DeviceKeyType.PRODUCT_ID, key_value="P.AAA-CCC" - ) - repository.write(intermediate_device) - - # Read the same device multiple times, indexed by key and id - # to verify that they're all the same - root_index = [(intermediate_device.id,)] - non_root_indexes = [k.parts for k in intermediate_device.keys] - - retrieved_devices = [] - for key_parts in root_index + non_root_indexes: - _device = repository.read(*key_parts).dict() - retrieved_devices.append(_device) - - # Assert that there are 2 keys, the device can be retrieved 3 ways from the db, - # and that all 3 are identical - assert len(intermediate_device.keys) == 2 - assert len(retrieved_devices) == 3 - assert [retrieved_devices[0]] * 3 == retrieved_devices - - assert retrieved_devices[0]["created_on"] == device.created_on - assert retrieved_devices[0]["updated_on"] > device.updated_on - - -@pytest.fixture -def device_created_event(): - return DeviceCreatedEvent( - id="123", - name="foo", - device_type="type", - product_team_id="123", - ods_code="abc", - status="good", - created_on="123", - updated_on=None, - deleted_on=None, - keys=[], - tags=["a", "b", "c"], - questionnaire_responses={"foo": "bar"}, - ) - - -def test_serialise_data_with_event(device_created_event): - _serialised_data = compress_device_fields(data=device_created_event) - _serialised_tags = _serialised_data.pop("tags") - - _data = asdict(device_created_event) - _tags = _data.pop("tags") - - assert _data == _serialised_data - assert [pkl_loads_gzip(tag) for tag in pkl_loads_gzip(_serialised_tags)] == _tags - - -def test_serialise_data_with_dict(device_created_event): - data = asdict(device_created_event, recurse=False) - _serialised_data = compress_device_fields(data=deepcopy(data)) - _serialised_tags = _serialised_data.pop("tags") - - _tags = data.pop("tags") - - assert data == _serialised_data - assert [pkl_loads_gzip(tag) for tag in pkl_loads_gzip(_serialised_tags)] == _tags - - -def test_serialise_data_with_event_with_other_fields_compressed(device_created_event): - _serialised_data = compress_device_fields( - data=device_created_event, - fields_to_compress=["questionnaire_responses", "status"], - ) - _serialised_tags = _serialised_data.pop("tags") - _serialised_responses = _serialised_data.pop("questionnaire_responses") - _serialised_status = _serialised_data.pop("status") - - _data = asdict(device_created_event) - _tags = _data.pop("tags") - _responses = _data.pop("questionnaire_responses") - _status = _data.pop("status") - - assert _data == _serialised_data - assert [pkl_loads_gzip(tag) for tag in pkl_loads_gzip(_serialised_tags)] == _tags - assert pkl_loads_gzip(_serialised_responses) == _responses - assert pkl_loads_gzip(_serialised_status) == _status diff --git a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2_compression.py b/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2_compression.py deleted file mode 100644 index 32194aa7..00000000 --- a/src/layers/domain/repository/device_repository/tests/v2/test_device_repository_v2_compression.py +++ /dev/null @@ -1,37 +0,0 @@ -from domain.core.device.v2 import Device -from domain.repository.compression import pkl_dumps_gzip -from domain.repository.device_repository.v2 import compress_device_fields - - -def test_compress_device_fields_default_is_tags(device: Device): - device_state = device.state() - device_with_compressed_fields = compress_device_fields(device_state) - original_tags = device_state.pop("tags") - compressed_tags = device_with_compressed_fields.pop("tags") - assert ( - pkl_dumps_gzip([pkl_dumps_gzip(tag) for tag in original_tags]) - == compressed_tags - ) - - -def test_compress_device_fields_with_other_fields(device: Device): - fields_to_compress = ["id", "keys"] - - device_state = device.state() - device_with_compressed_fields = compress_device_fields( - device_state, fields_to_compress=fields_to_compress - ) - - for field in fields_to_compress: - original_value = device_state.pop(field) - compressed_value = device_with_compressed_fields.pop(field) - assert pkl_dumps_gzip(original_value) == compressed_value - - original_tags = device_state.pop("tags") - compressed_tags = device_with_compressed_fields.pop("tags") - assert ( - pkl_dumps_gzip([pkl_dumps_gzip(tag) for tag in original_tags]) - == compressed_tags - ) - - assert device_with_compressed_fields == device_state diff --git a/src/layers/domain/repository/device_repository/v3.py b/src/layers/domain/repository/device_repository/v1.py similarity index 98% rename from src/layers/domain/repository/device_repository/v3.py rename to src/layers/domain/repository/device_repository/v1.py index 95a61eed..c3329df6 100644 --- a/src/layers/domain/repository/device_repository/v3.py +++ b/src/layers/domain/repository/device_repository/v1.py @@ -1,8 +1,8 @@ from copy import copy from attr import asdict -from domain.core.device.v3 import Device as _Device -from domain.core.device.v3 import ( +from domain.core.device import Device as _Device +from domain.core.device import ( DeviceCreatedEvent, DeviceDeletedEvent, DeviceKeyAddedEvent, @@ -14,13 +14,13 @@ DeviceUpdatedEvent, QuestionnaireResponseUpdatedEvent, ) -from domain.core.device_key.v2 import DeviceKey +from domain.core.device_key import DeviceKey from domain.core.enum import Status from domain.core.event import Event from domain.repository.compression import pkl_dumps_gzip, pkl_loads_gzip -from domain.repository.keys.v3 import TableKey +from domain.repository.keys import TableKey from domain.repository.marshall import marshall, marshall_value, unmarshall -from domain.repository.repository.v3 import Repository, TooManyResults +from domain.repository.repository import Repository from domain.repository.transaction import ( ConditionExpression, TransactionStatement, diff --git a/src/layers/domain/repository/device_repository/v2.py b/src/layers/domain/repository/device_repository/v2.py deleted file mode 100644 index a3186ff4..00000000 --- a/src/layers/domain/repository/device_repository/v2.py +++ /dev/null @@ -1,582 +0,0 @@ -from copy import copy - -from attr import asdict -from domain.core.device.v2 import ( - Device, - DeviceCreatedEvent, - DeviceDeletedEvent, - DeviceKeyAddedEvent, - DeviceKeyDeletedEvent, - DeviceTag, - DeviceTagAddedEvent, - DeviceTagsAddedEvent, - DeviceTagsClearedEvent, - DeviceUpdatedEvent, -) -from domain.core.device_key.v2 import DeviceKey -from domain.core.enum import Status -from domain.core.event import Event -from domain.core.questionnaire.v2 import QuestionnaireResponseUpdatedEvent -from domain.repository.compression import pkl_dumps_gzip, pkl_loads_gzip -from domain.repository.errors import ItemNotFound -from domain.repository.keys.v2 import TableKey -from domain.repository.marshall import marshall, marshall_value, unmarshall -from domain.repository.repository.v2 import Repository -from domain.repository.transaction import ( - ConditionExpression, - TransactionStatement, - TransactItem, - update_transactions, -) - -TAGS = "tags" -ROOT_FIELDS_TO_COMPRESS = [TAGS] -NON_ROOT_FIELDS_TO_COMPRESS = ["questionnaire_responses"] -BATCH_GET_SIZE = 100 - - -class TooManyResults(Exception): - pass - - -class CannotDropMandatoryFields(Exception): - def __init__(self, bad_fields: set[str]) -> None: - super().__init__(f"Cannot drop mandatory fields: {', '.join(bad_fields)}") - - -def compress_device_fields(data: Event | dict, fields_to_compress=None) -> dict: - _data = copy(data) if isinstance(data, dict) else asdict(data, recurse=False) - - # Pop unknown keys - unknown_keys = _data.keys() - set(Device.__fields__) - for k in unknown_keys: - _data.pop(k) - - # Compress specified keys if they exist in the data - fields_to_compress = (fields_to_compress or []) + ROOT_FIELDS_TO_COMPRESS - fields_to_compress_that_exist = [f for f in fields_to_compress if _data.get(f)] - for field in fields_to_compress_that_exist: - # Only proceed if the field is not empty - if field == TAGS: - # Tags are doubly compressed: first compress each tag in the list - _data[field] = [pkl_dumps_gzip(tag) for tag in _data[field]] - # Compress the entire field (which includes the doubly compressed tags) - _data[field] = pkl_dumps_gzip(_data[field]) - return _data - - -def decompress_device_fields(device: dict): - for field in ROOT_FIELDS_TO_COMPRESS: - if device.get(field): # Check if the field is present and not empty - device[field] = pkl_loads_gzip(device[field]) # First decompression - if field == TAGS: # Tags are doubly compressed. - # Second decompression: Decompress each tag in the list - device[field] = [pkl_loads_gzip(tag) for tag in device[field]] - - # Decompress non-root fields if the device is not a root and fields exist - if not device.get("root"): # Use get to handle missing 'root' field - for field in NON_ROOT_FIELDS_TO_COMPRESS: - if device.get(field): # Check if the field is present and non empty - device[field] = pkl_loads_gzip(device[field]) - - return device - - -def _device_root_primary_key(device_id: str) -> dict: - """ - Generates one fully marshalled (i.e. {"pk": {"S": "123"}} DynamoDB - primary key (i.e. pk + sk) for the provided Device, indexed by the Device ID - """ - root_pk = TableKey.DEVICE.key(device_id) - return marshall(pk=root_pk, sk=root_pk) - - -def _device_non_root_primary_keys( - device_id: str, device_keys: list[DeviceKey], device_tags: list[DeviceTag] -) -> list[dict]: - """ - Generates all the fully marshalled (i.e. {"pk": {"S": "123"}} DynamoDB - primary keys (i.e. pk + sk) for the provided Device. This is one primary key - for every value of Device.keys and Device.tags - """ - root_pk = TableKey.DEVICE.key(device_id) - device_key_primary_keys = [ - marshall(pk=pk, sk=pk) - for pk in (TableKey.DEVICE.key(k.key_type, k.key_value) for k in device_keys) - ] - device_tag_primary_keys = [ - marshall(pk=pk, sk=root_pk) - for pk in (TableKey.DEVICE_TAG.key(t.value) for t in device_tags) - ] - return device_key_primary_keys + device_tag_primary_keys - - -def update_device_indexes( - table_name: str, - data: dict | DeviceUpdatedEvent, - id: str, - keys: list[DeviceKey], - tags: list[DeviceTag], -): - # Update the root device without compressing the 'questionnaire_responses' field - root_primary_key = _device_root_primary_key(device_id=id) - update_root_device_transactions = update_transactions( - table_name=table_name, - primary_keys=[root_primary_key], - data=compress_device_fields(data), - ) - # Update non-root devices with compressed 'questionnaire_responses' field - non_root_primary_keys = _device_non_root_primary_keys( - device_id=id, device_keys=keys, device_tags=tags - ) - update_non_root_devices_transactions = update_transactions( - table_name=table_name, - primary_keys=non_root_primary_keys, - data=compress_device_fields( - data, fields_to_compress=NON_ROOT_FIELDS_TO_COMPRESS - ), - ) - return update_root_device_transactions + update_non_root_devices_transactions - - -def create_device_index( - table_name: str, - pk_key_parts: tuple[str], - device_data: dict, - sk_key_parts=None, - pk_table_key: TableKey = TableKey.DEVICE, - sk_table_key: TableKey = TableKey.DEVICE, - root=False, -) -> TransactItem: - pk = pk_table_key.key(*pk_key_parts) - sk = sk_table_key.key(*sk_key_parts) if sk_key_parts else pk - return TransactItem( - Put=TransactionStatement( - TableName=table_name, - Item=marshall(pk=pk, sk=sk, root=root, **device_data), - ConditionExpression=ConditionExpression.MUST_NOT_EXIST, - ) - ) - - -def create_device_index_batch( - pk_key_parts: tuple[str], - device_data: dict, - sk_key_parts=None, - pk_table_key: TableKey = TableKey.DEVICE, - sk_table_key: TableKey = TableKey.DEVICE, - root=False, -) -> dict: - """ - Difference between `create_device_index` and `create_device_index_batch`: - - `create_device_index` is intended for the event-based - handlers (e.g. `handle_DeviceCreatedEvent`) which are called by the base - `write` method, which expects `TransactItem`s for use with `client.transact_write_items` - - `create_device_index_batch` is intended the device-based handler - `handle_bulk` which is called by the base method `write_bulk`, which expects - `BatchWriteItem`s which we render as a `dict` for use with `client.batch_write_items` - """ - pk = pk_table_key.key(*pk_key_parts) - sk = sk_table_key.key(*sk_key_parts) if sk_key_parts else pk - return { - "PutRequest": { - "Item": marshall(pk=pk, sk=sk, root=root, **device_data), - }, - } - - -def delete_device_index( - table_name: str, - pk_key_parts: tuple[str], - sk_key_parts=None, - pk_table_key: TableKey = TableKey.DEVICE, - sk_table_key: TableKey = TableKey.DEVICE, -) -> TransactItem: - pk = pk_table_key.key(*pk_key_parts) - sk = sk_table_key.key(*sk_key_parts) if sk_key_parts else pk - return TransactItem( - Delete=TransactionStatement( - TableName=table_name, - Key=marshall(pk=pk, sk=sk), - ConditionExpression=ConditionExpression.MUST_EXIST, - ) - ) - - -class DeviceRepository(Repository[Device]): - def __init__(self, table_name, dynamodb_client): - super().__init__( - table_name=table_name, model=Device, dynamodb_client=dynamodb_client - ) - - def handle_DeviceCreatedEvent(self, event: DeviceCreatedEvent) -> TransactItem: - return create_device_index( - table_name=self.table_name, - pk_key_parts=(event.id,), - device_data=compress_device_fields(event), - root=True, - ) - - def handle_DeviceUpdatedEvent( - self, event: DeviceUpdatedEvent - ) -> list[TransactItem]: - keys = {DeviceKey(**key) for key in event.keys} - tags = {DeviceTag(__root__=tag) for tag in event.tags} - return update_device_indexes( - table_name=self.table_name, data=event, id=event.id, keys=keys, tags=tags - ) - - def handle_DeviceDeletedEvent( - self, event: DeviceDeletedEvent - ) -> list[TransactItem]: - # Inactive Devices have tags removed so that they are - # no longer searchable - delete_transactions = [ - delete_device_index( - table_name=self.table_name, - pk_key_parts=(DeviceTag(__root__=tag).value,), - sk_key_parts=(event.id,), - pk_table_key=TableKey.DEVICE_TAG, - ) - for tag in event.deleted_tags - ] - - # Prepare data for the inactive copies - inactive_data = compress_device_fields(event) - inactive_data["status"] = str(Status.INACTIVE) - - # Collect keys for the original devices - original_keys = {DeviceKey(**key) for key in event.keys} - - # Create copy of original device and indexes with new pk and sk - inactive_root_copy_transactions = [] - inactive_root_copy_transactions.append( - create_device_index( - table_name=self.table_name, - pk_table_key=TableKey.DEVICE_STATUS, - pk_key_parts=(event.status, event.id), - sk_key_parts=(event.id,), - device_data=inactive_data, - root=True, - ) - ) - - inactive_key_indexes_copy_transactions = [] - for key in original_keys: - inactive_key_indexes_copy_transactions.append( - create_device_index( - table_name=self.table_name, - pk_table_key=TableKey.DEVICE_STATUS, - pk_key_parts=(event.status, event.id), - sk_key_parts=key.parts, - device_data=inactive_data, - root=False, - ) - ) - - # Create delete transactions for original device and key indexes - original_root_delete_transactions = [] - original_root_delete_transactions.append( - delete_device_index( - table_name=self.table_name, - pk_key_parts=(event.id,), - pk_table_key=TableKey.DEVICE, - ) - ) - - original_key_indexes_delete_transactions = [] - for key in original_keys: - original_key_indexes_delete_transactions.append( - delete_device_index( - table_name=self.table_name, - pk_key_parts=key.parts, - pk_table_key=TableKey.DEVICE, - ) - ) - - return ( - delete_transactions - + inactive_root_copy_transactions - + inactive_key_indexes_copy_transactions - + original_root_delete_transactions - + original_key_indexes_delete_transactions - ) - - def handle_DeviceKeyAddedEvent( - self, event: DeviceKeyAddedEvent - ) -> list[TransactItem]: - # Create a copy of the Device indexed against the new key - create_transaction = create_device_index( - table_name=self.table_name, - pk_key_parts=event.new_key.parts, - device_data=compress_device_fields( - event, fields_to_compress=NON_ROOT_FIELDS_TO_COMPRESS - ), - ) - # Update the value of "keys" on all other copies of this Device - device_keys = {DeviceKey(**key) for key in event.keys} - device_keys_before_update = device_keys - {event.new_key} - device_tags = {DeviceTag(__root__=tag) for tag in event.tags} - update_transactions = update_device_indexes( - table_name=self.table_name, - id=event.id, - keys=device_keys_before_update, - tags=device_tags, - data={ - "keys": event.keys, - "updated_on": event.updated_on, - }, - ) - return [create_transaction] + update_transactions - - def handle_DeviceKeyDeletedEvent( - self, event: DeviceKeyDeletedEvent - ) -> list[TransactItem]: - # Delete the copy of the Device indexed against the deleted key - delete_transaction = delete_device_index( - table_name=self.table_name, pk_key_parts=event.deleted_key.parts - ) - # Update the value of "keys" on all other copies of this Device - device_keys = {DeviceKey(**key) for key in event.keys} - device_keys_before_update = device_keys - {event.deleted_key} - device_tags = {DeviceTag(__root__=tag) for tag in event.tags} - update_transactions = update_device_indexes( - table_name=self.table_name, - id=event.id, - keys=device_keys_before_update, - tags=device_tags, - data={ - "keys": event.keys, - "updated_on": event.updated_on, - }, - ) - return [delete_transaction] + update_transactions - - def handle_DeviceTagAddedEvent( - self, event: DeviceTagAddedEvent - ) -> list[TransactItem]: - # Create a copy of the Device indexed against the new tag - create_transaction = create_device_index( - table_name=self.table_name, - pk_key_parts=(event.new_tag.value,), - sk_key_parts=(event.id,), - pk_table_key=TableKey.DEVICE_TAG, - device_data=compress_device_fields( - event, fields_to_compress=NON_ROOT_FIELDS_TO_COMPRESS - ), - ) - # Update the value of "tags" on all other copies of this Device - device_keys = {DeviceKey(**key) for key in event.keys} - device_tags = {DeviceTag(__root__=tag) for tag in event.tags} - device_tags_before_update = device_tags - {event.new_tag} - update_transactions = update_device_indexes( - table_name=self.table_name, - id=event.id, - keys=device_keys, - tags=device_tags_before_update, - data={"tags": event.tags, "updated_on": event.updated_on}, - ) - return [create_transaction] + update_transactions - - def handle_DeviceTagsAddedEvent(self, event: DeviceTagsAddedEvent): - # Create a copy of the Device indexed against the new tag - device_data = compress_device_fields( - event, fields_to_compress=NON_ROOT_FIELDS_TO_COMPRESS - ) - create_transactions = [ - create_device_index( - table_name=self.table_name, - pk_key_parts=(new_tag.value,), - sk_key_parts=(event.id,), - pk_table_key=TableKey.DEVICE_TAG, - device_data=device_data, - ) - for new_tag in event.new_tags - ] - - # Update the value of "tags" on all other copies of this Device - device_keys = {DeviceKey(**key) for key in event.keys} - device_tags = {DeviceTag(__root__=tag) for tag in event.tags} - device_tags_before_update = device_tags - event.new_tags - update_transactions = update_device_indexes( - table_name=self.table_name, - id=event.id, - keys=device_keys, - tags=device_tags_before_update, - data={"tags": event.tags, "updated_on": event.updated_on}, - ) - return create_transactions + update_transactions - - def handle_DeviceTagsClearedEvent(self, event: DeviceTagsClearedEvent): - delete_tags_transactions = [ - delete_device_index( - table_name=self.table_name, - pk_key_parts=(tag.value,), - sk_key_parts=(event.id,), - pk_table_key=TableKey.DEVICE_TAG, - ) - for tag in event.deleted_tags - ] - - keys = {DeviceKey(**key) for key in event.keys} - update_transactions = update_device_indexes( - table_name=self.table_name, - id=event.id, - keys=keys, - tags=[], # tags already deleted in delete_tags_transactions - data={"tags": []}, - ) - return delete_tags_transactions + update_transactions - - def handle_QuestionnaireResponseUpdatedEvent( - self, event: QuestionnaireResponseUpdatedEvent - ): - keys = {DeviceKey(**key) for key in event.entity_keys} - tags = {DeviceTag(__root__=tag) for tag in event.entity_tags} - return update_device_indexes( - table_name=self.table_name, - id=event.entity_id, - keys=keys, - tags=tags, - data={ - "questionnaire_responses": event.questionnaire_responses, - "updated_on": event.updated_on, - }, - ) - - def handle_bulk(self, item: dict) -> list[dict]: - create_device_transaction = create_device_index_batch( - pk_key_parts=(item["id"],), - device_data=compress_device_fields(item), - root=True, - ) - - device_data = compress_device_fields( - item, fields_to_compress=NON_ROOT_FIELDS_TO_COMPRESS - ) - create_keys_transactions = [ - create_device_index_batch( - pk_key_parts=(key["key_type"], key["key_value"]), - device_data=device_data, - ) - for key in item["keys"] - ] - create_tags_transactions = [ - create_device_index_batch( - pk_key_parts=(DeviceTag(__root__=tag).value,), - sk_key_parts=(item["id"],), - pk_table_key=TableKey.DEVICE_TAG, - device_data=device_data, - ) - for tag in item["tags"] - ] - return ( - [create_device_transaction] - + create_keys_transactions - + create_tags_transactions - ) - - def read(self, *key_parts: str) -> Device: - """ - Read the device by either id or key. If calling by id, then do: - - repository.read("123") - - If calling by key then you must include the key type (e.g. 'product_id'): - - repository.read("product_id", "123") - - """ - key = TableKey.DEVICE.key(*key_parts) - result = self.client.get_item( - TableName=self.table_name, Key=marshall(pk=key, sk=key) - ) - try: - item = result["Item"] - except KeyError: - raise ItemNotFound(*key_parts, item_type=Device) - - _device = unmarshall(item) - return Device(**decompress_device_fields(_device)) - - def read_inactive(self, *key_parts: str) -> Device: - """ - Read the inactive device by id:: - - repository.read("123") - - """ - pk = TableKey.DEVICE_STATUS.key(Status.INACTIVE, *key_parts) - sk = TableKey.DEVICE.key(*key_parts) - - result = self.client.get_item( - TableName=self.table_name, Key=marshall(pk=pk, sk=sk) - ) - try: - item = result["Item"] - except KeyError: - raise ItemNotFound(*key_parts, item_type=Device) - - _device = unmarshall(item) - return Device(**decompress_device_fields(_device)) - - def query_by_tag( - self, - fields_to_drop: list[str] | set[str] = None, - drop_tags_field=True, - **kwargs, - ) -> list[Device]: - """ - Query the device by predefined tags, optionally dropping specific fields from the query result, - noting that 'tags' field is dropped by default. - - Example: - repository.query_by_tag(fields_to_drop=["field1", "field2"], foo="123", bar="456") - """ - fields_to_drop = { - *(fields_to_drop or []), - *(["tags"] if drop_tags_field else []), - } - fields_to_return = Device.get_all_fields() - fields_to_drop - - dropped_mandatory_fields = Device.get_mandatory_fields() & fields_to_drop - if dropped_mandatory_fields: - raise CannotDropMandatoryFields(dropped_mandatory_fields) - - tag_value = DeviceTag(**kwargs).value - pk = TableKey.DEVICE_TAG.key(tag_value) - query_params = { - "ExpressionAttributeValues": {":pk": marshall_value(pk)}, - "KeyConditionExpression": "pk = :pk", - "TableName": self.table_name, - **_dynamodb_projection_expression(fields_to_return), - } - - response = self.client.query(**query_params) - if "LastEvaluatedKey" in response: - raise TooManyResults(f"Too many results for query '{kwargs}'") - - # Convert to Device, sorted by 'pk' - compressed_devices = map(unmarshall, response["Items"]) - devices_as_dict = map(decompress_device_fields, compressed_devices) - return [Device(**d) for d in sorted(devices_as_dict, key=lambda d: d["id"])] - - -def _dynamodb_projection_expression(updated_fields: list[str]): - expression_attribute_names = {} - update_clauses = [] - - for field_name in updated_fields: - field_name_placeholder = f"#{field_name}" - - update_clauses.append(field_name_placeholder) - expression_attribute_names[field_name_placeholder] = field_name - - projection_expression = ", ".join(update_clauses) - - return dict( - ProjectionExpression=projection_expression, - ExpressionAttributeNames=expression_attribute_names, - ) diff --git a/src/layers/domain/repository/keys/tests/test_keys_v1.py b/src/layers/domain/repository/keys/tests/test_keys_v1.py index 325aeddb..51a5d5b8 100644 --- a/src/layers/domain/repository/keys/tests/test_keys_v1.py +++ b/src/layers/domain/repository/keys/tests/test_keys_v1.py @@ -1,12 +1,7 @@ from uuid import uuid4 import pytest -from domain.repository.keys import ( - TableKeys, - group_by_key, - remove_keys, - strip_key_prefix, -) +from domain.repository.keys import TableKey, group_by_key, remove_keys, strip_key_prefix FIXED_UUID = uuid4() @@ -14,22 +9,22 @@ @pytest.mark.parametrize( ("table_key", "args", "expected"), [ - (TableKeys.DEVICE, (FIXED_UUID, "foo", 1), f"D#{FIXED_UUID}#foo#1"), - (TableKeys.PRODUCT_TEAM, ("foo",), "PT#foo"), + (TableKey.DEVICE, (FIXED_UUID, "foo", 1), f"D#{FIXED_UUID}#foo#1"), + (TableKey.PRODUCT_TEAM, ("foo",), "PT#foo"), ], ) -def test_TableKeys_key(table_key: TableKeys, args, expected): +def test_TableKeys_key(table_key: TableKey, args, expected): assert table_key.key(*args) == expected @pytest.mark.parametrize( ("table_key", "expected"), [ - (TableKeys.DEVICE, [{"key": "D#foo"}, {"key": "D#bar"}]), - (TableKeys.PRODUCT_TEAM, [{"key": "PT#foo"}]), + (TableKey.DEVICE, [{"key": "D#foo"}, {"key": "D#bar"}]), + (TableKey.PRODUCT_TEAM, [{"key": "PT#foo"}]), ], ) -def test_TableKeys_filter(table_key: TableKeys, expected): +def test_TableKeys_filter(table_key: TableKey, expected): iterable = [{"key": "D#foo"}, {"key": "PT#foo"}, {"key": "D#bar"}] assert list(table_key.filter(iterable=iterable, key="key")) == expected @@ -38,16 +33,16 @@ def test_TableKeys_filter(table_key: TableKeys, expected): ("table_key", "expected"), [ ( - TableKeys.DEVICE, + TableKey.DEVICE, [("foo", {"other_data": "FOO"}), ("bar", {"other_data": "BAR"})], ), ( - TableKeys.PRODUCT_TEAM, + TableKey.PRODUCT_TEAM, [("baz", {"other_data": "BAZ"})], ), ], ) -def test_TableKeys_filter_and_group(table_key: TableKeys, expected): +def test_TableKeys_filter_and_group(table_key: TableKey, expected): iterable = [ {"pk_read": "D#foo", "other_data": "FOO"}, {"pk_read": "PT#baz", "other_data": "BAZ"}, diff --git a/src/layers/domain/repository/keys/v1.py b/src/layers/domain/repository/keys/v1.py index 1b376d60..68884f26 100644 --- a/src/layers/domain/repository/keys/v1.py +++ b/src/layers/domain/repository/keys/v1.py @@ -24,15 +24,15 @@ def filter_and_group( ) -class TableKeys(TableKeyAction, StrEnum): - DEVICE = "D" - DEVICE_KEY = "DK" - DEVICE_TYPE = "DT" - DEVICE_KEY_TYPE = "DKT" - DEVICE_INDEX = "DI" +class TableKey(TableKeyAction, StrEnum): PRODUCT_TEAM = "PT" - QUESTIONNAIRE = "Q" - QUESTIONNAIRE_RESPONSE = "QR" + CPM_SYSTEM_ID = "CSI" + CPM_PRODUCT = "P" + CPM_PRODUCT_STATUS = "PS" + DEVICE_REFERENCE_DATA = "DRD" + DEVICE = "D" + DEVICE_TAG = "DT" + DEVICE_STATUS = "DS" def group_by_key( @@ -53,11 +53,5 @@ def strip_key_prefix(key: str): return tail -def remove_keys( - pk=None, - sk=None, - pk_read=None, - sk_read=None, - **values, -): +def remove_keys(pk=None, sk=None, pk_read=None, sk_read=None, **values): return values diff --git a/src/layers/domain/repository/keys/v2.py b/src/layers/domain/repository/keys/v2.py deleted file mode 100644 index 2a31b25a..00000000 --- a/src/layers/domain/repository/keys/v2.py +++ /dev/null @@ -1,9 +0,0 @@ -from enum import StrEnum - -from .v1 import TableKeyAction - - -class TableKey(TableKeyAction, StrEnum): - DEVICE = "D" - DEVICE_TAG = "DT" - DEVICE_STATUS = "DS" diff --git a/src/layers/domain/repository/keys/v3.py b/src/layers/domain/repository/keys/v3.py deleted file mode 100644 index 42cebb72..00000000 --- a/src/layers/domain/repository/keys/v3.py +++ /dev/null @@ -1,14 +0,0 @@ -from enum import StrEnum - -from .v1 import TableKeyAction - - -class TableKey(TableKeyAction, StrEnum): - PRODUCT_TEAM = "PT" - CPM_SYSTEM_ID = "CSI" - CPM_PRODUCT = "P" - CPM_PRODUCT_STATUS = "PS" - DEVICE_REFERENCE_DATA = "DRD" - DEVICE = "D" - DEVICE_TAG = "DT" - DEVICE_STATUS = "DS" diff --git a/src/layers/domain/repository/product_team_repository/tests/v2/test_product_team_repository.py b/src/layers/domain/repository/product_team_repository/tests/test_product_team_repository_v1.py similarity index 95% rename from src/layers/domain/repository/product_team_repository/tests/v2/test_product_team_repository.py rename to src/layers/domain/repository/product_team_repository/tests/test_product_team_repository_v1.py index 43ea0882..9450db9f 100644 --- a/src/layers/domain/repository/product_team_repository/tests/v2/test_product_team_repository.py +++ b/src/layers/domain/repository/product_team_repository/tests/test_product_team_repository_v1.py @@ -1,7 +1,7 @@ import pytest -from domain.core.root.v3 import Root +from domain.core.root import Root from domain.repository.errors import AlreadyExistsError, ItemNotFound -from domain.repository.product_team_repository.v2 import ProductTeamRepository +from domain.repository.product_team_repository import ProductTeamRepository from event.aws.client import dynamodb_client from test_helpers.dynamodb import mock_table diff --git a/src/layers/domain/repository/product_team_repository/tests/v1/test_product_team_repository.py b/src/layers/domain/repository/product_team_repository/tests/v1/test_product_team_repository.py deleted file mode 100644 index e7df4422..00000000 --- a/src/layers/domain/repository/product_team_repository/tests/v1/test_product_team_repository.py +++ /dev/null @@ -1,84 +0,0 @@ -import pytest -from domain.core.root import Root -from domain.repository.errors import AlreadyExistsError, ItemNotFound -from domain.repository.product_team_repository.v1 import ProductTeamRepository -from event.aws.client import dynamodb_client - -from test_helpers.dynamodb import mock_table -from test_helpers.terraform import read_terraform_output - - -@pytest.mark.integration -def test__product_team_repository(): - team_id = "359e28eb-6e2c-409c-a3ab-a4868ab5c2df" - table_name = read_terraform_output("dynamodb_table_name.value") - - org = Root.create_ods_organisation(ods_code="AB123") - team = org.create_product_team(id=team_id, name="Test Team") - - repo = ProductTeamRepository( - table_name=table_name, - dynamodb_client=dynamodb_client(), - ) - - repo.write(team) - result = repo.read(team_id) - assert result == team - - -@pytest.mark.integration -def test__product_team_repository_already_exists(): - team_id = "359e28eb-6e2c-409c-a3ab-a4868ab5c2df" - table_name = read_terraform_output("dynamodb_table_name.value") - - org = Root.create_ods_organisation(ods_code="AB123") - team = org.create_product_team(id=team_id, name="Test Team") - repo = ProductTeamRepository( - table_name=table_name, - dynamodb_client=dynamodb_client(), - ) - - repo.write(team) - with pytest.raises(AlreadyExistsError): - repo.write(team) - - -@pytest.mark.integration -def test__product_team_repository__product_team_does_not_exist(): - team_id = "359e28eb-6e2c-409c-a3ab-a4868ab5c2df" - table_name = read_terraform_output("dynamodb_table_name.value") - repo = ProductTeamRepository( - table_name=table_name, - dynamodb_client=dynamodb_client(), - ) - with pytest.raises(ItemNotFound): - repo.read(team_id) - - -def test__product_team_repository_local(): - team_id = "359e28eb-6e2c-409c-a3ab-a4868ab5c2df" - - org = Root.create_ods_organisation(ods_code="AB123") - team = org.create_product_team(id=team_id, name="Test Team") - - with mock_table("my_table") as client: - repo = ProductTeamRepository( - table_name="my_table", - dynamodb_client=client, - ) - - repo.write(team) - result = repo.read(team_id) - assert result == team - - -def test__product_team_repository__product_team_does_not_exist_local(): - team_id = "359e28eb-6e2c-409c-a3ab-a4868ab5c2df" - - with mock_table("my_table") as client: - repo = ProductTeamRepository( - table_name="my_table", - dynamodb_client=client, - ) - with pytest.raises(ItemNotFound): - repo.read(team_id) diff --git a/src/layers/domain/repository/product_team_repository/v1.py b/src/layers/domain/repository/product_team_repository/v1.py index afe8621c..90a12264 100644 --- a/src/layers/domain/repository/product_team_repository/v1.py +++ b/src/layers/domain/repository/product_team_repository/v1.py @@ -1,46 +1,37 @@ from attr import asdict from domain.core.product_team import ProductTeam, ProductTeamCreatedEvent -from domain.repository.errors import ItemNotFound -from domain.repository.keys import TableKeys -from domain.repository.marshall import marshall, marshall_value, unmarshall +from domain.core.product_team_key import ProductTeamKey +from domain.repository.keys import TableKey from domain.repository.repository import Repository -from domain.repository.transaction import ( - ConditionExpression, - TransactionStatement, - TransactItem, -) class ProductTeamRepository(Repository[ProductTeam]): def __init__(self, table_name: str, dynamodb_client): super().__init__( - table_name=table_name, model=ProductTeam, dynamodb_client=dynamodb_client + table_name=table_name, + model=ProductTeam, + dynamodb_client=dynamodb_client, + table_key=TableKey.PRODUCT_TEAM, + parent_table_keys=(TableKey.PRODUCT_TEAM,), ) + def read(self, id: str) -> ProductTeam: + return super()._read(parent_ids=(), id=id) + def handle_ProductTeamCreatedEvent(self, event: ProductTeamCreatedEvent): - pk = TableKeys.PRODUCT_TEAM.key(event.id) - return TransactItem( - Put=TransactionStatement( - TableName=self.table_name, - Item=marshall(pk=pk, sk=pk, **asdict(event)), - ConditionExpression=ConditionExpression.MUST_NOT_EXIST, - ) + create_root_transaction = self.create_index( + id=event.id, parent_key_parts=(event.id,), data=asdict(event), root=True ) - def read(self, id) -> ProductTeam: - pk = TableKeys.PRODUCT_TEAM.key(id) - args = { - "TableName": self.table_name, - "KeyConditionExpression": "pk = :pk AND sk = :sk", - "ExpressionAttributeValues": { - ":pk": marshall_value(pk), - ":sk": marshall_value(pk), - }, - } - result = self.client.query(**args) - items = [unmarshall(i) for i in result["Items"]] - if len(items) == 0: - raise ItemNotFound(id, item_type=ProductTeam) - (item,) = items + keys = {ProductTeamKey(**key) for key in event.keys} + create_key_transactions = [ + self.create_index( + id=key.key_value, + parent_key_parts=(key.key_value,), + data=asdict(event), + root=True, + ) + for key in keys + ] - return ProductTeam(**item) + return [create_root_transaction] + create_key_transactions diff --git a/src/layers/domain/repository/product_team_repository/v2.py b/src/layers/domain/repository/product_team_repository/v2.py deleted file mode 100644 index dd75b58a..00000000 --- a/src/layers/domain/repository/product_team_repository/v2.py +++ /dev/null @@ -1,37 +0,0 @@ -from attr import asdict -from domain.core.product_team.v3 import ProductTeam, ProductTeamCreatedEvent -from domain.core.product_team_key import ProductTeamKey -from domain.repository.keys.v3 import TableKey -from domain.repository.repository.v3 import Repository - - -class ProductTeamRepository(Repository[ProductTeam]): - def __init__(self, table_name: str, dynamodb_client): - super().__init__( - table_name=table_name, - model=ProductTeam, - dynamodb_client=dynamodb_client, - table_key=TableKey.PRODUCT_TEAM, - parent_table_keys=(TableKey.PRODUCT_TEAM,), - ) - - def read(self, id: str) -> ProductTeam: - return super()._read(parent_ids=(), id=id) - - def handle_ProductTeamCreatedEvent(self, event: ProductTeamCreatedEvent): - create_root_transaction = self.create_index( - id=event.id, parent_key_parts=(event.id,), data=asdict(event), root=True - ) - - keys = {ProductTeamKey(**key) for key in event.keys} - create_key_transactions = [ - self.create_index( - id=key.key_value, - parent_key_parts=(key.key_value,), - data=asdict(event), - root=True, - ) - for key in keys - ] - - return [create_root_transaction] + create_key_transactions diff --git a/src/layers/domain/repository/questionnaire_repository/__init__.py b/src/layers/domain/repository/questionnaire_repository/__init__.py index e0d08e67..99f8aed3 100644 --- a/src/layers/domain/repository/questionnaire_repository/__init__.py +++ b/src/layers/domain/repository/questionnaire_repository/__init__.py @@ -1 +1,2 @@ -from .v1 import * # noqa +from .v1.questionnaire_repository import * # noqa +from .v1.questionnaires import * # noqa diff --git a/src/layers/domain/repository/questionnaire_repository/v2/tests/test_questionnaire_repository_v2.py b/src/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py similarity index 89% rename from src/layers/domain/repository/questionnaire_repository/v2/tests/test_questionnaire_repository_v2.py rename to src/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py index 6a5768ea..da8b52c5 100644 --- a/src/layers/domain/repository/questionnaire_repository/v2/tests/test_questionnaire_repository_v2.py +++ b/src/layers/domain/repository/questionnaire_repository/tests/test_questionnaire_repository_v1.py @@ -2,12 +2,10 @@ import pytest from domain.repository.errors import ItemNotFound -from domain.repository.questionnaire_repository.v2 import ( +from domain.repository.questionnaire_repository import ( PATH_TO_QUESTIONNAIRES, - QuestionnaireRepository, -) -from domain.repository.questionnaire_repository.v2.questionnaires import ( QuestionnaireInstance, + QuestionnaireRepository, ) diff --git a/src/layers/domain/repository/questionnaire_repository/v1/__init__.py b/src/layers/domain/repository/questionnaire_repository/v1/__init__.py deleted file mode 100644 index f2431887..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/__init__.py +++ /dev/null @@ -1,52 +0,0 @@ -from pathlib import Path - -from domain.core.questionnaire.v2 import Questionnaire -from domain.repository.errors import ItemNotFound -from event.json import json_load - -from .deserialisers import QUESTION_DESERIALISERS - -PATH_TO_QUESTIONNAIRES = Path(__file__).parent / "questionnaires" - - -def deserialise_question(question: dict) -> dict: - for field, deserialiser in QUESTION_DESERIALISERS.items(): - value = question.get(field) - if value: - question[field] = deserialiser(value) - return question - - -def version_from_file_path(file_path: Path) -> int: - return int(file_path.stem.lstrip("v")) - - -def get_latest_questions_by_name(name: str) -> Path | None: - possible_paths = PATH_TO_QUESTIONNAIRES.glob(f"{name}/v*.json") - paths_sorted_by_version = sorted(possible_paths, key=version_from_file_path) - try: - path = paths_sorted_by_version[-1] - except IndexError: - path = None - return path - - -def read_questions(path: Path): - with open(path, "r") as fp: - raw_questions = json_load(fp) - return list(map(deserialise_question, raw_questions)) - - -class QuestionnaireRepository: - - def read(self, name: str) -> Questionnaire: - path = get_latest_questions_by_name(name=name) - if not path: - raise ItemNotFound(name, item_type=Questionnaire) - - version = version_from_file_path(path) - questions = read_questions(path=path) - questionnaire = Questionnaire(name=name, version=version) - for question in questions: - questionnaire.add_question(**question) - return questionnaire diff --git a/src/layers/domain/repository/questionnaire_repository/v1/deserialisers.py b/src/layers/domain/repository/questionnaire_repository/v1/deserialisers.py deleted file mode 100644 index ff2440a4..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/deserialisers.py +++ /dev/null @@ -1,30 +0,0 @@ -from domain.core.questionnaire import custom_rules -from domain.core.questionnaire.v1 import ALLOWED_ANSWER_TYPES_LOOKUP, NoSuchQuestionType - - -def _deserialise_answer_type( - answer_type_name: str, - allowed_answer_types_lookup: dict = ALLOWED_ANSWER_TYPES_LOOKUP, -) -> type: - try: - return allowed_answer_types_lookup[answer_type_name] - except KeyError: - raise NoSuchQuestionType(answer_type_name) - - -def _deserialise_answer_types(answer_type_names: list[str]) -> set[type]: - return set(map(_deserialise_answer_type, answer_type_names)) - - -def _deserialise_rule(rule_name: str, rules=custom_rules): - return getattr(rules, rule_name) - - -def _deserialise_rules(rule_names: list[str]): - return list(map(_deserialise_rule, rule_names)) - - -QUESTION_DESERIALISERS = { - "answer_types": _deserialise_answer_types, - "validation_rules": _deserialise_rules, -} diff --git a/src/layers/domain/repository/questionnaire_repository/v2/__init__.py b/src/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py similarity index 95% rename from src/layers/domain/repository/questionnaire_repository/v2/__init__.py rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py index 522bd6a1..366f89de 100644 --- a/src/layers/domain/repository/questionnaire_repository/v2/__init__.py +++ b/src/layers/domain/repository/questionnaire_repository/v1/questionnaire_repository.py @@ -1,6 +1,6 @@ from pathlib import Path -from domain.core.questionnaire.v3 import Questionnaire +from domain.core.questionnaire import Questionnaire from domain.repository.errors import ItemNotFound PATH_TO_QUESTIONNAIRES = Path(__file__).parent / "questionnaires" diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py index dded7fd2..7490d617 100644 --- a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py +++ b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/__init__.py @@ -2,5 +2,7 @@ class QuestionnaireInstance(StrEnum): - SPINE_DEVICE = auto() - SPINE_ENDPOINT = auto() + SPINE_AS = auto() + SPINE_MHS = auto() + SPINE_AS_ADDITIONAL_INTERACTIONS = auto() + SPINE_MHS_MESSAGE_SETS = auto() diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as/field_mapping.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as/field_mapping.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as/field_mapping.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as/field_mapping.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as/v1.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as/v1.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as/v1.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as_additional_interactions/field_mapping.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as_additional_interactions/field_mapping.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as_additional_interactions/field_mapping.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as_additional_interactions/field_mapping.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as_additional_interactions/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as_additional_interactions/v1.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_as_additional_interactions/v1.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_as_additional_interactions/v1.json diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_device/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_device/v1.json deleted file mode 100644 index 8d93c7b1..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_device/v1.json +++ /dev/null @@ -1,94 +0,0 @@ -[ - { - "name": "nhs_mhs_manufacturer_org", - "human_readable_name": "Manufacturer Ods Code", - "answer_types": ["str"] - }, - { - "name": "nhs_as_svc_ia", - "human_readable_name": "Interaction Ids", - "answer_types": ["str"], - "mandatory": true, - "multiple": true - }, - { - "name": "nhs_mhs_party_key", - "human_readable_name": "Party Key", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_id_code", - "human_readable_name": "Owner", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_product_name", - "answer_types": ["str"] - }, - { - "name": "unique_identifier", - "human_readable_name": "ASID", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_as_client", - "human_readable_name": "Owner", - "answer_types": ["str"] - }, - { - "name": "object_class", - "answer_types": ["CaseInsensitiveString"], - "mandatory": true, - "choices": ["nhsas"] - }, - { - "name": "nhs_approver_urp", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_date_approved", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_requestor_urp", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_date_requested", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_product_key", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_product_version", - "answer_types": ["str"] - }, - { - "name": "nhs_as_acf", - "answer_types": ["str"], - "multiple": true - }, - { - "name": "nhs_temp_uid", - "answer_types": ["str"] - }, - { - "name": "description", - "answer_types": ["str"] - }, - { - "name": "nhs_as_category_bag", - "answer_types": ["str"], - "multiple": true - } -] diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_endpoint/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_endpoint/v1.json deleted file mode 100644 index 7738c5df..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_endpoint/v1.json +++ /dev/null @@ -1,177 +0,0 @@ -[ - { - "name": "nhs_mhs_end_point", - "human_readable_name": "Address", - "answer_types": ["str"], - "mandatory": true, - "validation_rules": ["url"] - }, - { - "name": "unique_identifier", - "human_readable_name": "ASID", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_id_code", - "human_readable_name": "Managing Organization", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_party_key", - "human_readable_name": "Mhs Party key", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_cpa_id", - "human_readable_name": "Mhs CPA ID", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_actor", - "human_readable_name": "Reliability Configuration Actor", - "answer_types": ["str"], - "choices": [ - "urn:oasis:names:tc:ebxml-msg:actor:topartymsh", - "urn:oasis:names:tc:ebxml-msg:actor:nextmsh", - "IGNORED" - ] - }, - { - "name": "nhs_mhs_sync_reply_mode", - "human_readable_name": "Reliability Configuration Reply Mode", - "answer_types": ["CaseInsensitiveString"], - "choices": ["MSHSIGNALSONLY", "NEVER", "NONE", "SIGNALSANDRESPONSE"] - }, - { - "name": "nhs_mhs_retry_interval", - "human_readable_name": "Reliability Configuration Retry Interval", - "answer_types": ["str"] - }, - { - "name": "nhs_mhs_retries", - "human_readable_name": "Reliability Configuration Retries", - "answer_types": ["str", "int"], - "validation_rules": ["empty_str"] - }, - { - "name": "nhs_mhs_persist_duration", - "human_readable_name": "Reliability Configuration Persist Duration", - "answer_types": ["str"] - }, - { - "name": "nhs_mhs_duplicate_elimination", - "human_readable_name": "Reliability Configuration Duplication Elimination", - "answer_types": ["CaseInsensitiveString"], - "choices": ["ALWAYS", "NEVER"] - }, - { - "name": "nhs_mhs_ack_requested", - "human_readable_name": "Reliability Configuration Ack Requested", - "answer_types": ["CaseInsensitiveString"], - "choices": ["ALWAYS", "NEVER"] - }, - { - "name": "nhs_mhs_svc_ia", - "human_readable_name": "Interaction ID", - "answer_types": ["str"] - }, - { - "name": "object_class", - "answer_types": ["CaseInsensitiveString"], - "mandatory": true, - "choices": ["nhsmhs"] - }, - { - "name": "nhs_approver_urp", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_contract_property_template_key", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_date_approved", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_date_dns_approved", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_date_requested", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_dns_approver", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_ep_interaction_type", - "answer_types": ["CaseInsensitiveString"], - "mandatory": true, - "choices": ["FHIR", "HL7", "EBXML", "N/A", "MSHSIGNALSONLY"] - }, - { - "name": "nhs_mhs_fqdn", - "human_readable_name": "Mhs FQDN", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_in", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_ip_address", - "answer_types": ["str"] - }, - { - "name": "nhs_mhs_is_authenticated", - "answer_types": ["CaseInsensitiveString"], - "mandatory": true, - "choices": ["NONE", "TRANSIENT", "PERSISTENT"] - }, - { - "name": "nhs_mhs_sn", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_product_key", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_product_name", - "answer_types": ["str"] - }, - { - "name": "nhs_product_version", - "answer_types": ["str"] - }, - { - "name": "nhs_requestor_urp", - "answer_types": ["str"], - "mandatory": true - }, - { - "name": "nhs_mhs_service_description", - "answer_types": ["str"] - }, - { - "name": "nhs_mhs_manufacturer_org", - "human_readable_name": "Manufacturer ODS code", - "answer_types": ["str"] - } -] diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs/field_mapping.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs/field_mapping.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs/field_mapping.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs/field_mapping.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs/v1.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs/v1.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs/v1.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs_message_sets/field_mapping.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs_message_sets/field_mapping.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs_message_sets/field_mapping.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs_message_sets/field_mapping.json diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs_message_sets/v1.json b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs_message_sets/v1.json similarity index 100% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/spine_mhs_message_sets/v1.json rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/spine_mhs_message_sets/v1.json diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_device_questionnaire.py b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_device_questionnaire.py deleted file mode 100644 index 0924cd32..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_device_questionnaire.py +++ /dev/null @@ -1,86 +0,0 @@ -import pytest -from domain.core.questionnaire.v2 import Questionnaire -from domain.repository.questionnaire_repository.v1 import QuestionnaireRepository -from domain.repository.questionnaire_repository.v1.questionnaires import ( - QuestionnaireInstance, -) -from event.json import json_load -from hypothesis import assume, given, settings -from sds.cpm_translation.tests.test_cpm_translation import ( - NHS_ACCREDITED_SYSTEM_STRATEGY, -) -from sds.cpm_translation.translations import update_in_list_of_dict -from sds.domain.nhs_accredited_system import NhsAccreditedSystem - -from etl.sds.tests.constants import EtlTestDataPath - - -@pytest.fixture -def spine_device_questionnaire_v1() -> Questionnaire: - repo = QuestionnaireRepository() - return repo.read(name=QuestionnaireInstance.SPINE_DEVICE) - - -def _is_accredited_system(obj: dict[str, str]) -> bool: - return obj["object_class"].lower() == NhsAccreditedSystem.OBJECT_CLASS - - -def _test_spine_device_questionnaire_v1( - nhs_accredited_system: NhsAccreditedSystem, questionnaire: Questionnaire -): - assert nhs_accredited_system.questionnaire() == questionnaire - count_mandatory_questions = len(questionnaire.mandatory_questions) - questionnaire_response_responses = ( - nhs_accredited_system.as_questionnaire_response_answers() - ) - - count_accredited_systems = 0 - ods_codes = nhs_accredited_system.nhs_as_client or ["ABC"] - for ods_code in ods_codes: - count_accredited_systems += 1 - update_in_list_of_dict( - obj=questionnaire_response_responses, key="nhs_as_client", value=[ods_code] - ) - _questionnaire_response = questionnaire.respond( - responses=questionnaire_response_responses - ) - assert ( - _questionnaire_response.questionnaire.id - == f"{QuestionnaireInstance.SPINE_DEVICE}/1" - ) - assert len(_questionnaire_response.answers) >= count_mandatory_questions - - assert count_accredited_systems > 0 - assert count_accredited_systems == len(ods_codes) - return True - - -@settings(deadline=1500) -@given(nhs_accredited_system=NHS_ACCREDITED_SYSTEM_STRATEGY) -def test_spine_device_questionnaire_v1_local( - nhs_accredited_system: NhsAccreditedSystem, -): - assume(len(nhs_accredited_system.nhs_as_client) > 0) - repo = QuestionnaireRepository() - spine_device_questionnaire_v1 = repo.read(name=QuestionnaireInstance.SPINE_DEVICE) - - assert _test_spine_device_questionnaire_v1( - nhs_accredited_system=nhs_accredited_system, - questionnaire=spine_device_questionnaire_v1, - ) - - -@pytest.mark.s3(EtlTestDataPath.FULL_JSON) -def test_spine_device_questionnaire_v1_integration( - spine_device_questionnaire_v1: Questionnaire, test_data_paths -): - (path,) = test_data_paths - with open(path) as f: - data: list[dict] = json_load(f) - - for accredited_system in filter(_is_accredited_system, data): - nhs_accredited_system = NhsAccreditedSystem.construct(**accredited_system) - assert _test_spine_device_questionnaire_v1( - nhs_accredited_system=nhs_accredited_system, - questionnaire=spine_device_questionnaire_v1, - ) diff --git a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_endpoint_questionnaire.py b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_endpoint_questionnaire.py deleted file mode 100644 index f4b895b5..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_endpoint_questionnaire.py +++ /dev/null @@ -1,68 +0,0 @@ -import pytest -from domain.core.questionnaire.v2 import Questionnaire -from domain.repository.questionnaire_repository import QuestionnaireRepository -from domain.repository.questionnaire_repository.v1.questionnaires import ( - QuestionnaireInstance, -) -from event.json import json_load -from hypothesis import given, settings -from sds.cpm_translation.tests.test_cpm_translation import NHS_MHS_STRATEGY -from sds.domain.nhs_mhs import NhsMhs - -from etl.sds.tests.constants import EtlTestDataPath - - -@pytest.fixture -def spine_endpoint_questionnaire_v1() -> Questionnaire: - repo = QuestionnaireRepository() - return repo.read(name=QuestionnaireInstance.SPINE_ENDPOINT) - - -def _mhs(obj: dict[str, str]) -> bool: - return obj["object_class"].lower() == NhsMhs.OBJECT_CLASS - - -def _test_spine_endpoint_questionnaire_v1( - nhs_mhs: NhsMhs, questionnaire: Questionnaire -): - assert nhs_mhs.questionnaire() == questionnaire - count_mandatory_questions = len(questionnaire.mandatory_questions) - questionnaire_response_answers = nhs_mhs.as_questionnaire_response_answers() - - _questionnaire_response = questionnaire.respond( - responses=questionnaire_response_answers - ) - assert ( - _questionnaire_response.questionnaire.id - == f"{QuestionnaireInstance.SPINE_ENDPOINT}/1" - ) - assert len(_questionnaire_response.answers) >= count_mandatory_questions - return True - - -@settings(deadline=1500) -@given(nhs_mhs=NHS_MHS_STRATEGY) -def test_spine_endpoint_questionnaire_v1_local(nhs_mhs: NhsMhs): - repo = QuestionnaireRepository() - spine_endpoint_questionnaire_v1 = repo.read( - name=QuestionnaireInstance.SPINE_ENDPOINT - ) - - assert _test_spine_endpoint_questionnaire_v1( - nhs_mhs=nhs_mhs, questionnaire=spine_endpoint_questionnaire_v1 - ) - - -@pytest.mark.s3(EtlTestDataPath.FULL_JSON) -def test_spine_endpoint_questionnaire_v1_integration( - spine_endpoint_questionnaire_v1: Questionnaire, test_data_paths -): - (path,) = test_data_paths - with open(path) as f: - data: list[dict] = json_load(f) - - for mhs in filter(_mhs, data): - nhs_mhs = NhsMhs.construct(**mhs) - assert _test_spine_endpoint_questionnaire_v1( - nhs_mhs=nhs_mhs, questionnaire=spine_endpoint_questionnaire_v1 - ) diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/tests/test_spine_questionnaires.py b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_questionnaires.py similarity index 69% rename from src/layers/domain/repository/questionnaire_repository/v2/questionnaires/tests/test_spine_questionnaires.py rename to src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_questionnaires.py index 70556b11..beec55d9 100644 --- a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/tests/test_spine_questionnaires.py +++ b/src/layers/domain/repository/questionnaire_repository/v1/questionnaires/tests/test_spine_questionnaires.py @@ -1,19 +1,54 @@ -from domain.repository.questionnaire_repository.v2 import ( +from string import ascii_letters, digits + +from domain.core.validation import ODS_CODE_REGEX, SdsId +from domain.repository.questionnaire_repository import ( PATH_TO_QUESTIONNAIRES, - QuestionnaireRepository, -) -from domain.repository.questionnaire_repository.v2.questionnaires import ( QuestionnaireInstance, + QuestionnaireRepository, ) +from etl_utils.ldif.model import DistinguishedName from event.json import json_load from hypothesis import given, settings -from sds.cpm_translation.tests.test_cpm_translation import ( - NHS_ACCREDITED_SYSTEM_STRATEGY, - NHS_MHS_STRATEGY, -) +from hypothesis.provisional import urls +from hypothesis.strategies import builds, from_regex, just, sets, text from sds.domain.nhs_accredited_system import NhsAccreditedSystem from sds.domain.nhs_mhs import NhsMhs +DUMMY_DISTINGUISHED_NAME = DistinguishedName( + parts=(("ou", "services"), ("uniqueidentifier", "foobar"), ("o", "nhs")) +) + +NHS_MHS_STRATEGY = builds( + NhsMhs, + _distinguished_name=just(DUMMY_DISTINGUISHED_NAME), + objectclass=just( + { + "nhsmhs", + } + ), + nhsidcode=from_regex(ODS_CODE_REGEX, fullmatch=True), + nhsproductname=text(alphabet=ascii_letters + digits + " -_", min_size=1), + nhsmhspartykey=from_regex( + SdsId.MessageHandlingSystem.PARTY_KEY_REGEX, fullmatch=True + ), + nhsmhssvcia=text(alphabet=ascii_letters + digits + ":", min_size=1), + nhsmhsendpoint=urls(), +) + +NHS_ACCREDITED_SYSTEM_STRATEGY = builds( + NhsAccreditedSystem, + _distinguished_name=just(DUMMY_DISTINGUISHED_NAME), + objectclass=just( + { + "nhsas", + } + ), + nhsproductname=text(alphabet=ascii_letters + digits + " -_", min_size=1), + nhsasclient=sets(from_regex(ODS_CODE_REGEX, fullmatch=True), min_size=1), + nhsassvcia=sets(text(alphabet="abc", min_size=1, max_size=1), min_size=1), + uniqueidentifier=text(alphabet=digits, min_size=1, max_size=8), +) + def _apply_field_mapping(name: str, data: dict) -> dict: with open(PATH_TO_QUESTIONNAIRES / name / "field_mapping.json") as f: diff --git a/src/layers/domain/repository/questionnaire_repository/v1/tests/test_deserialisers.py b/src/layers/domain/repository/questionnaire_repository/v1/tests/test_deserialisers.py deleted file mode 100644 index 3c0f93e3..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/tests/test_deserialisers.py +++ /dev/null @@ -1,41 +0,0 @@ -import pytest -from domain.core.questionnaire.v1 import NoSuchQuestionType -from domain.repository.questionnaire_repository.v1.deserialisers import ( - _deserialise_answer_type, - _deserialise_rule, -) - - -def test__deserialise_rule(): - def _my_rule(value): - return value - - class Rules: - my_rule = _my_rule - - _rule = _deserialise_rule("my_rule", rules=Rules) - assert _rule is _my_rule - - -@pytest.mark.parametrize( - ["type_name", "_type"], [("str", str), ("int", int), ("bool", bool)] -) -def test__deserialise_answer_type(type_name: str, _type: type): - assert ( - _deserialise_answer_type( - answer_type_name=type_name, - allowed_answer_types_lookup={"str": str, "int": int, "bool": bool}, - ) - is _type - ) - - -@pytest.mark.parametrize( - "type_name", - ["str", "int", "bool"], -) -def test__deserialise_question_types_not_found(type_name: str): - with pytest.raises(NoSuchQuestionType): - _deserialise_answer_type( - answer_type_name=type_name, allowed_answer_types_lookup={} - ) diff --git a/src/layers/domain/repository/questionnaire_repository/v1/tests/test_questionnaire_repository.py b/src/layers/domain/repository/questionnaire_repository/v1/tests/test_questionnaire_repository.py deleted file mode 100644 index 648846bc..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v1/tests/test_questionnaire_repository.py +++ /dev/null @@ -1,40 +0,0 @@ -from collections import defaultdict - -import pytest -from domain.repository.errors import ItemNotFound -from domain.repository.questionnaire_repository.v1 import ( - PATH_TO_QUESTIONNAIRES, - QuestionnaireRepository, -) -from domain.repository.questionnaire_repository.v1.questionnaires import ( - QuestionnaireInstance, -) - - -def test_no_zombie_questionnaires(): - possible_paths = PATH_TO_QUESTIONNAIRES.glob("*/v*.json") - - questionnaires = defaultdict(list) - for path in possible_paths: - questionnaires[path.parent.name].append(path.stem) - - questionnaire_names_in_repo = sorted(questionnaires.keys()) - questionnaire_names_in_enum = sorted(QuestionnaireInstance._member_map_.values()) - assert questionnaire_names_in_repo == questionnaire_names_in_enum - - -@pytest.mark.parametrize( - "questionnaire_name", QuestionnaireInstance._member_map_.values() -) -def test_questionnaire_repository_read(questionnaire_name: str): - repo = QuestionnaireRepository() - questionnaire = repo.read(name=questionnaire_name) - assert questionnaire.name == questionnaire_name - assert questionnaire.version == "1" - assert len(questionnaire.questions) > 0 - - -def test_questionnaire_repository_read_not_found(): - repo = QuestionnaireRepository() - with pytest.raises(ItemNotFound): - repo.read(name="oops") diff --git a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/__init__.py b/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/__init__.py deleted file mode 100644 index 7490d617..00000000 --- a/src/layers/domain/repository/questionnaire_repository/v2/questionnaires/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -from enum import StrEnum, auto - - -class QuestionnaireInstance(StrEnum): - SPINE_AS = auto() - SPINE_MHS = auto() - SPINE_AS_ADDITIONAL_INTERACTIONS = auto() - SPINE_MHS_MESSAGE_SETS = auto() diff --git a/src/layers/domain/repository/repository/tests/model.py b/src/layers/domain/repository/repository/tests/model.py deleted file mode 100644 index 47292243..00000000 --- a/src/layers/domain/repository/repository/tests/model.py +++ /dev/null @@ -1,82 +0,0 @@ -from attr import asdict, dataclass -from domain.repository.marshall import marshall, marshall_value, unmarshall -from domain.repository.transaction import ( - ConditionExpression, - TransactionStatement, - TransactItem, -) -from pydantic import BaseModel, Field - - -@dataclass -class MyEventAdd: - field: str - - -@dataclass -class MyOtherEventAdd: - field: str - - -@dataclass -class MyEventDelete: - field: str - - -class MyModel(BaseModel): - field: str - events: list[MyEventAdd | MyOtherEventAdd | MyEventDelete] = Field( - default_factory=list, exclude=True - ) - - class Config: - arbitrary_types_allowed = True - - -class _NotFoundError(Exception): - pass - - -class MyRepositoryMixin: - def handle_MyEventAdd(self, event: MyEventAdd): - # This event will raise a transaction error on duplicates - return TransactItem( - Put=TransactionStatement( - TableName=self.table_name, - Item=marshall(pk=event.field, sk=event.field, **asdict(event)), - ConditionExpression=ConditionExpression.MUST_NOT_EXIST, - ) - ) - - def handle_MyOtherEventAdd(self, event: MyOtherEventAdd): - # This event will never raise a transaction error - key = "prefix:" + event.field - return TransactItem( - Put=TransactionStatement( - TableName=self.table_name, - Item=marshall(pk=key, sk=key, **asdict(event)), - ) - ) - - def handle_MyEventDelete(self, event: MyEventDelete): - # This event will never raise a transaction error - key = event.field - return TransactItem( - Delete=TransactionStatement( - TableName=self.table_name, - Key=marshall(pk=key, sk=key), - ConditionExpression=ConditionExpression.MUST_EXIST, - ) - ) - - def read(self, pk) -> MyModel: - args = { - "TableName": self.table_name, - "KeyConditionExpression": "pk = :pk", - "ExpressionAttributeValues": {":pk": marshall_value(pk)}, - } - result = self.client.query(**args) - items = [unmarshall(i) for i in result["Items"]] - if len(items) == 0: - raise _NotFoundError() - return MyModel(**items[0]) diff --git a/src/layers/domain/repository/repository/tests/model_v3.py b/src/layers/domain/repository/repository/tests/model_v1.py similarity index 95% rename from src/layers/domain/repository/repository/tests/model_v3.py rename to src/layers/domain/repository/repository/tests/model_v1.py index a9d18d24..ab073e6f 100644 --- a/src/layers/domain/repository/repository/tests/model_v3.py +++ b/src/layers/domain/repository/repository/tests/model_v1.py @@ -1,9 +1,9 @@ from enum import StrEnum from attr import asdict, dataclass -from domain.repository.keys.v1 import TableKeyAction +from domain.repository.keys import TableKeyAction from domain.repository.marshall import marshall -from domain.repository.repository.v3 import Repository +from domain.repository.repository import Repository from domain.repository.transaction import ( ConditionExpression, TransactionStatement, @@ -45,7 +45,7 @@ class Config: arbitrary_types_allowed = True -class MyRepositoryV3(Repository[MyModel]): +class MyRepository(Repository[MyModel]): def __init__(self): table_name = read_terraform_output("dynamodb_table_name.value") super().__init__( diff --git a/src/layers/domain/repository/repository/tests/test_repository_v1.py b/src/layers/domain/repository/repository/tests/test_repository_v1.py index 2a6605cc..a1466e49 100644 --- a/src/layers/domain/repository/repository/tests/test_repository_v1.py +++ b/src/layers/domain/repository/repository/tests/test_repository_v1.py @@ -1,40 +1,91 @@ import pytest -from domain.repository.errors import AlreadyExistsError, UnhandledTransaction -from domain.repository.repository import Repository -from event.aws.client import dynamodb_client - -from test_helpers.terraform import read_terraform_output +from domain.repository.errors import AlreadyExistsError, ItemNotFound +from domain.repository.repository import ( + exponential_backoff_with_jitter, + retry_with_jitter, +) -from .model import ( +from .model_v1 import ( MyEventAdd, MyEventDelete, MyModel, MyOtherEventAdd, - MyRepositoryMixin, - _NotFoundError, + MyRepository, + MyTableKey, ) @pytest.fixture def repository() -> "MyRepository": - table_name = read_terraform_output("dynamodb_table_name.value") - return MyRepository( - table_name=table_name, - model=MyModel, - dynamodb_client=dynamodb_client(), - ) - - -class MyRepository(Repository[MyModel], MyRepositoryMixin): - pass + return MyRepository() @pytest.mark.integration -def test_repository_write(repository: MyRepository): +def test_single_repository_write(repository: MyRepository): value = "123" my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) repository.write(my_item) - assert repository.read(pk=value).dict() == my_item.dict() + assert repository.read(id=value).dict() == my_item.dict() + + +@pytest.mark.integration +def test_writes_to_same_key_split_over_batches_repository_write( + repository: MyRepository, +): + first_value = "123" + second_value = "abc" + third_value = "xyz" + + my_item = MyModel( + field=first_value, + events=[ + MyEventAdd(field=first_value), + MyEventAdd(field=second_value), + MyEventAdd(field=third_value), + # batch split should occur here since MyEventDelete requires + # MyEventAdd to have occurred first + MyEventDelete(field=first_value), + MyEventDelete(field=second_value), + MyEventDelete(field=third_value), + ], + ) + db_responses = repository.write(my_item) + batch_count = len(db_responses) + + with pytest.raises(ItemNotFound): + repository.read(id=first_value) + + with pytest.raises(ItemNotFound): + repository.read(id=second_value) + + with pytest.raises(ItemNotFound): + repository.read(id=third_value) + + assert batch_count == 2 + + +@pytest.mark.integration +@pytest.mark.parametrize( + ["number_of_adds", "number_of_batches"], + [ + (12, 1), + (100, 1), + (101, 2), + (150, 2), + (200, 2), + (201, 3), + ], +) +def test_writes_to_different_keys_split_over_batches_repository_write( + repository: MyRepository, number_of_adds: int, number_of_batches: int +): + my_item = MyModel( + field="abc", + events=[MyEventAdd(field=str(i)) for i in range(number_of_adds)], + ) + db_responses = repository.write(my_item) + batch_count = len(db_responses) + assert batch_count == number_of_batches @pytest.mark.integration @@ -60,43 +111,149 @@ def test_repository_raise_already_exists_multiple_events(repository: MyRepositor repository.write(my_item) # Should cause AlreadyExistsError -@pytest.mark.integration -def test_repository_raise_already_exists_from_single_transaction( - repository: MyRepository, -): - my_item = MyModel( - field="123", - events=[ - MyOtherEventAdd(field="456"), - MyEventAdd(field="123"), - MyOtherEventAdd(field="345"), - MyEventAdd(field="123"), - ], - ) - with pytest.raises(UnhandledTransaction) as exc: - repository.write(my_item) - assert str(exc.value) == "\n".join( - ( - "ValidationException: Transaction request cannot include multiple operations on one item", - f'{{"Put": {{"TableName": "{repository.table_name}", "Item": {{"pk": {{"S": "prefix:456"}}, "sk": {{"S": "prefix:456"}}, "field": {{"S": "456"}}}}}}}}', - f'{{"Put": {{"TableName": "{repository.table_name}", "Item": {{"pk": {{"S": "123"}}, "sk": {{"S": "123"}}, "field": {{"S": "123"}}}}, "ConditionExpression": "attribute_not_exists(pk)"}}}}', - f'{{"Put": {{"TableName": "{repository.table_name}", "Item": {{"pk": {{"S": "prefix:345"}}, "sk": {{"S": "prefix:345"}}, "field": {{"S": "345"}}}}}}}}', - f'{{"Put": {{"TableName": "{repository.table_name}", "Item": {{"pk": {{"S": "123"}}, "sk": {{"S": "123"}}, "field": {{"S": "123"}}}}, "ConditionExpression": "attribute_not_exists(pk)"}}}}', - ) - ) - - @pytest.mark.integration def test_repository_add_and_delete_separate_transactions(repository: MyRepository): value = "123" my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) repository.write(my_item) - intermediate_item = repository.read(pk=value) + intermediate_item = repository.read(id=value) assert intermediate_item == my_item intermediate_item.events.append(MyEventDelete(field=value)) repository.write(intermediate_item) - with pytest.raises(_NotFoundError): - repository.read(pk=value) + with pytest.raises(ItemNotFound): + repository.read(id=value) + + +@pytest.mark.integration +def test_repository_write_bulk(repository: MyRepository): + responses = repository.write_bulk( + [ + { + "pk": str(i), + "sk": str(i), + "pk_read": MyTableKey.FOO.key(str(i)), + "sk_read": MyTableKey.FOO.key(str(i)), + "field": f"boo-{i}", + } + for i in range(51) + ], + batch_size=25, + ) + assert len(responses) >= 3 # 51/25 + + for i in range(51): + assert repository.read(id=str(i)).field == f"boo-{i}" + + +def test_exponential_backoff_with_jitter(): + base_delay = 0.1 + max_delay = 5 + min_delay = 0.05 + n_samples = 1000 + + delays = [] + for retry in range(n_samples): + delay = exponential_backoff_with_jitter( + n_retries=retry, + base_delay=base_delay, + min_delay=min_delay, + max_delay=max_delay, + ) + assert max_delay >= delay >= min_delay + delays.append(delay) + assert len(set(delays)) == n_samples # all delays should be unique + assert sum(delays[n_samples:]) < sum( + delays[:n_samples] + ) # final delays should be larger than first delays + + +@pytest.mark.parametrize( + "error_code", + [ + "ProvisionedThroughputExceededException", + "ThrottlingException", + "InternalServerError", + ], +) +def test_retry_with_jitter_all_fail(error_code: str): + class MockException(Exception): + def __init__(self, error_code): + self.response = {"Error": {"Code": error_code}} + + max_retries = 3 + + @retry_with_jitter(max_retries=max_retries, error=MockException) + def throw(error_code): + raise MockException(error_code=error_code) + + with pytest.raises(ExceptionGroup) as exception_info: + throw(error_code=error_code) + + assert ( + exception_info.value.message + == f"Failed to put item after {max_retries} retries" + ) + assert len(exception_info.value.exceptions) == max_retries + assert all( + isinstance(exc, MockException) for exc in exception_info.value.exceptions + ) + + +@pytest.mark.parametrize( + "error_code", + [ + "ProvisionedThroughputExceededException", + "ThrottlingException", + "InternalServerError", + ], +) +def test_retry_with_jitter_third_passes(error_code: str): + class MockException(Exception): + retries = 0 + + def __init__(self, error_code): + self.response = {"Error": {"Code": error_code}} + + max_retries = 3 + + @retry_with_jitter(max_retries=max_retries, error=MockException) + def throw(error_code): + if MockException.retries == max_retries - 1: + return "foo" + MockException.retries += 1 + raise MockException(error_code=error_code) + + assert throw(error_code=error_code) == "foo" + + +@pytest.mark.parametrize( + "error_code", + [ + "SomeOtherError", + ], +) +def test_retry_with_jitter_other_code(error_code: str): + class MockException(Exception): + def __init__(self, error_code): + self.response = {"Error": {"Code": error_code}} + + @retry_with_jitter(max_retries=3, error=MockException) + def throw(error_code): + raise MockException(error_code=error_code) + + with pytest.raises(MockException) as exception_info: + throw(error_code=error_code) + + assert exception_info.value.response == {"Error": {"Code": error_code}} + + +def test_retry_with_jitter_other_exception(): + @retry_with_jitter(max_retries=3, error=ValueError) + def throw(): + raise TypeError() + + with pytest.raises(TypeError): + throw() diff --git a/src/layers/domain/repository/repository/tests/test_repository_v2.py b/src/layers/domain/repository/repository/tests/test_repository_v2.py deleted file mode 100644 index a202f00d..00000000 --- a/src/layers/domain/repository/repository/tests/test_repository_v2.py +++ /dev/null @@ -1,265 +0,0 @@ -import pytest -from domain.repository.errors import AlreadyExistsError -from domain.repository.marshall import marshall -from domain.repository.repository.v2 import Repository as RepositoryV2 -from domain.repository.repository.v2 import ( - exponential_backoff_with_jitter, - retry_with_jitter, -) -from event.aws.client import dynamodb_client - -from test_helpers.terraform import read_terraform_output - -from .model import ( - MyEventAdd, - MyEventDelete, - MyModel, - MyOtherEventAdd, - MyRepositoryMixin, - _NotFoundError, -) - - -@pytest.fixture -def repository() -> "MyRepositoryV2": - table_name = read_terraform_output("dynamodb_table_name.value") - return MyRepositoryV2( - table_name=table_name, - model=MyModel, - dynamodb_client=dynamodb_client(), - ) - - -class MyRepositoryV2(RepositoryV2[MyModel], MyRepositoryMixin): - def handle_bulk(self, item): - return [{"PutRequest": {"Item": marshall(**item)}}] - - -@pytest.mark.integration -def test_single_repository_write(repository: MyRepositoryV2): - value = "123" - my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) - repository.write(my_item) - assert repository.read(pk=value).dict() == my_item.dict() - - -@pytest.mark.integration -def test_writes_to_same_key_split_over_batches_repository_write( - repository: MyRepositoryV2, -): - first_value = "123" - second_value = "abc" - third_value = "xyz" - - my_item = MyModel( - field=first_value, - events=[ - MyEventAdd(field=first_value), - MyEventAdd(field=second_value), - MyEventAdd(field=third_value), - # batch split should occur here since MyEventDelete requires - # MyEventAdd to have occurred first - MyEventDelete(field=first_value), - MyEventDelete(field=second_value), - MyEventDelete(field=third_value), - ], - ) - db_responses = repository.write(my_item) - batch_count = len(db_responses) - - with pytest.raises(_NotFoundError): - repository.read(pk=first_value) - - with pytest.raises(_NotFoundError): - repository.read(pk=second_value) - - with pytest.raises(_NotFoundError): - repository.read(pk=third_value) - - assert batch_count == 2 - - -@pytest.mark.integration -@pytest.mark.parametrize( - ["number_of_adds", "number_of_batches"], - [ - (12, 1), - (100, 1), - (101, 2), - (150, 2), - (200, 2), - (201, 3), - ], -) -def test_writes_to_different_keys_split_over_batches_repository_write( - repository: MyRepositoryV2, number_of_adds: int, number_of_batches: int -): - my_item = MyModel( - field="abc", - events=[MyEventAdd(field=str(i)) for i in range(number_of_adds)], - ) - db_responses = repository.write(my_item) - batch_count = len(db_responses) - assert batch_count == number_of_batches - - -@pytest.mark.integration -def test_repository_raise_already_exists(repository: MyRepositoryV2): - my_item = MyModel(field="123", events=[MyEventAdd(field="123")]) - repository.write(my_item) - with pytest.raises(AlreadyExistsError): - repository.write(my_item) - - -@pytest.mark.integration -def test_repository_raise_already_exists_multiple_events(repository: MyRepositoryV2): - my_item = MyModel( - field="123", - events=[ - MyOtherEventAdd(field="456"), - MyEventAdd(field="123"), - MyOtherEventAdd(field="345"), - ], - ) - repository.write(my_item) - with pytest.raises(AlreadyExistsError): - repository.write(my_item) # Should cause AlreadyExistsError - - -@pytest.mark.integration -def test_repository_add_and_delete_separate_transactions(repository: MyRepositoryV2): - value = "123" - my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) - repository.write(my_item) - intermediate_item = repository.read(pk=value) - - assert intermediate_item == my_item - - intermediate_item.events.append(MyEventDelete(field=value)) - repository.write(intermediate_item) - - with pytest.raises(_NotFoundError): - repository.read(pk=value) - - -@pytest.mark.integration -def test_repository_write_bulk(repository: MyRepositoryV2): - responses = repository.write_bulk( - [{"pk": str(i), "sk": str(i), "field": f"boo-{i}"} for i in range(51)], - batch_size=25, - ) - assert len(responses) >= 3 # 51/25 - - for i in range(51): - assert repository.read(pk=str(i)).field == f"boo-{i}" - - -def test_exponential_backoff_with_jitter(): - base_delay = 0.1 - max_delay = 5 - min_delay = 0.05 - n_samples = 1000 - - delays = [] - for retry in range(n_samples): - delay = exponential_backoff_with_jitter( - n_retries=retry, - base_delay=base_delay, - min_delay=min_delay, - max_delay=max_delay, - ) - assert max_delay >= delay >= min_delay - delays.append(delay) - assert len(set(delays)) == n_samples # all delays should be unique - assert sum(delays[n_samples:]) < sum( - delays[:n_samples] - ) # final delays should be larger than first delays - - -@pytest.mark.parametrize( - "error_code", - [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", - ], -) -def test_retry_with_jitter_all_fail(error_code: str): - class MockException(Exception): - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - max_retries = 3 - - @retry_with_jitter(max_retries=max_retries, error=MockException) - def throw(error_code): - raise MockException(error_code=error_code) - - with pytest.raises(ExceptionGroup) as exception_info: - throw(error_code=error_code) - - assert ( - exception_info.value.message - == f"Failed to put item after {max_retries} retries" - ) - assert len(exception_info.value.exceptions) == max_retries - assert all( - isinstance(exc, MockException) for exc in exception_info.value.exceptions - ) - - -@pytest.mark.parametrize( - "error_code", - [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", - ], -) -def test_retry_with_jitter_third_passes(error_code: str): - class MockException(Exception): - retries = 0 - - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - max_retries = 3 - - @retry_with_jitter(max_retries=max_retries, error=MockException) - def throw(error_code): - if MockException.retries == max_retries - 1: - return "foo" - MockException.retries += 1 - raise MockException(error_code=error_code) - - assert throw(error_code=error_code) == "foo" - - -@pytest.mark.parametrize( - "error_code", - [ - "SomeOtherError", - ], -) -def test_retry_with_jitter_other_code(error_code: str): - class MockException(Exception): - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - @retry_with_jitter(max_retries=3, error=MockException) - def throw(error_code): - raise MockException(error_code=error_code) - - with pytest.raises(MockException) as exception_info: - throw(error_code=error_code) - - assert exception_info.value.response == {"Error": {"Code": error_code}} - - -def test_retry_with_jitter_other_exception(): - @retry_with_jitter(max_retries=3, error=ValueError) - def throw(): - raise TypeError() - - with pytest.raises(TypeError): - throw() diff --git a/src/layers/domain/repository/repository/tests/test_repository_v3.py b/src/layers/domain/repository/repository/tests/test_repository_v3.py deleted file mode 100644 index 9512e759..00000000 --- a/src/layers/domain/repository/repository/tests/test_repository_v3.py +++ /dev/null @@ -1,259 +0,0 @@ -import pytest -from domain.repository.errors import AlreadyExistsError, ItemNotFound -from domain.repository.repository.v3 import ( - exponential_backoff_with_jitter, - retry_with_jitter, -) - -from .model_v3 import ( - MyEventAdd, - MyEventDelete, - MyModel, - MyOtherEventAdd, - MyRepositoryV3, - MyTableKey, -) - - -@pytest.fixture -def repository() -> "MyRepositoryV3": - return MyRepositoryV3() - - -@pytest.mark.integration -def test_single_repository_write(repository: MyRepositoryV3): - value = "123" - my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) - repository.write(my_item) - assert repository.read(id=value).dict() == my_item.dict() - - -@pytest.mark.integration -def test_writes_to_same_key_split_over_batches_repository_write( - repository: MyRepositoryV3, -): - first_value = "123" - second_value = "abc" - third_value = "xyz" - - my_item = MyModel( - field=first_value, - events=[ - MyEventAdd(field=first_value), - MyEventAdd(field=second_value), - MyEventAdd(field=third_value), - # batch split should occur here since MyEventDelete requires - # MyEventAdd to have occurred first - MyEventDelete(field=first_value), - MyEventDelete(field=second_value), - MyEventDelete(field=third_value), - ], - ) - db_responses = repository.write(my_item) - batch_count = len(db_responses) - - with pytest.raises(ItemNotFound): - repository.read(id=first_value) - - with pytest.raises(ItemNotFound): - repository.read(id=second_value) - - with pytest.raises(ItemNotFound): - repository.read(id=third_value) - - assert batch_count == 2 - - -@pytest.mark.integration -@pytest.mark.parametrize( - ["number_of_adds", "number_of_batches"], - [ - (12, 1), - (100, 1), - (101, 2), - (150, 2), - (200, 2), - (201, 3), - ], -) -def test_writes_to_different_keys_split_over_batches_repository_write( - repository: MyRepositoryV3, number_of_adds: int, number_of_batches: int -): - my_item = MyModel( - field="abc", - events=[MyEventAdd(field=str(i)) for i in range(number_of_adds)], - ) - db_responses = repository.write(my_item) - batch_count = len(db_responses) - assert batch_count == number_of_batches - - -@pytest.mark.integration -def test_repository_raise_already_exists(repository: MyRepositoryV3): - my_item = MyModel(field="123", events=[MyEventAdd(field="123")]) - repository.write(my_item) - with pytest.raises(AlreadyExistsError): - repository.write(my_item) - - -@pytest.mark.integration -def test_repository_raise_already_exists_multiple_events(repository: MyRepositoryV3): - my_item = MyModel( - field="123", - events=[ - MyOtherEventAdd(field="456"), - MyEventAdd(field="123"), - MyOtherEventAdd(field="345"), - ], - ) - repository.write(my_item) - with pytest.raises(AlreadyExistsError): - repository.write(my_item) # Should cause AlreadyExistsError - - -@pytest.mark.integration -def test_repository_add_and_delete_separate_transactions(repository: MyRepositoryV3): - value = "123" - my_item = MyModel(field=value, events=[MyEventAdd(field=value)]) - repository.write(my_item) - intermediate_item = repository.read(id=value) - - assert intermediate_item == my_item - - intermediate_item.events.append(MyEventDelete(field=value)) - repository.write(intermediate_item) - - with pytest.raises(ItemNotFound): - repository.read(id=value) - - -@pytest.mark.integration -def test_repository_write_bulk(repository: MyRepositoryV3): - responses = repository.write_bulk( - [ - { - "pk": str(i), - "sk": str(i), - "pk_read": MyTableKey.FOO.key(str(i)), - "sk_read": MyTableKey.FOO.key(str(i)), - "field": f"boo-{i}", - } - for i in range(51) - ], - batch_size=25, - ) - assert len(responses) >= 3 # 51/25 - - for i in range(51): - assert repository.read(id=str(i)).field == f"boo-{i}" - - -def test_exponential_backoff_with_jitter(): - base_delay = 0.1 - max_delay = 5 - min_delay = 0.05 - n_samples = 1000 - - delays = [] - for retry in range(n_samples): - delay = exponential_backoff_with_jitter( - n_retries=retry, - base_delay=base_delay, - min_delay=min_delay, - max_delay=max_delay, - ) - assert max_delay >= delay >= min_delay - delays.append(delay) - assert len(set(delays)) == n_samples # all delays should be unique - assert sum(delays[n_samples:]) < sum( - delays[:n_samples] - ) # final delays should be larger than first delays - - -@pytest.mark.parametrize( - "error_code", - [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", - ], -) -def test_retry_with_jitter_all_fail(error_code: str): - class MockException(Exception): - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - max_retries = 3 - - @retry_with_jitter(max_retries=max_retries, error=MockException) - def throw(error_code): - raise MockException(error_code=error_code) - - with pytest.raises(ExceptionGroup) as exception_info: - throw(error_code=error_code) - - assert ( - exception_info.value.message - == f"Failed to put item after {max_retries} retries" - ) - assert len(exception_info.value.exceptions) == max_retries - assert all( - isinstance(exc, MockException) for exc in exception_info.value.exceptions - ) - - -@pytest.mark.parametrize( - "error_code", - [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", - ], -) -def test_retry_with_jitter_third_passes(error_code: str): - class MockException(Exception): - retries = 0 - - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - max_retries = 3 - - @retry_with_jitter(max_retries=max_retries, error=MockException) - def throw(error_code): - if MockException.retries == max_retries - 1: - return "foo" - MockException.retries += 1 - raise MockException(error_code=error_code) - - assert throw(error_code=error_code) == "foo" - - -@pytest.mark.parametrize( - "error_code", - [ - "SomeOtherError", - ], -) -def test_retry_with_jitter_other_code(error_code: str): - class MockException(Exception): - def __init__(self, error_code): - self.response = {"Error": {"Code": error_code}} - - @retry_with_jitter(max_retries=3, error=MockException) - def throw(error_code): - raise MockException(error_code=error_code) - - with pytest.raises(MockException) as exception_info: - throw(error_code=error_code) - - assert exception_info.value.response == {"Error": {"Code": error_code}} - - -def test_retry_with_jitter_other_exception(): - @retry_with_jitter(max_retries=3, error=ValueError) - def throw(): - raise TypeError() - - with pytest.raises(TypeError): - throw() diff --git a/src/layers/domain/repository/repository/v1.py b/src/layers/domain/repository/repository/v1.py index b34b270d..0ba06016 100644 --- a/src/layers/domain/repository/repository/v1.py +++ b/src/layers/domain/repository/repository/v1.py @@ -1,39 +1,304 @@ -from itertools import batched -from typing import TYPE_CHECKING +import random +import time +from abc import abstractmethod +from enum import StrEnum +from functools import wraps +from itertools import batched, chain +from typing import TYPE_CHECKING, Generator, Iterable +from botocore.exceptions import ClientError from domain.core.aggregate_root import AggregateRoot -from domain.repository.transaction import ( +from domain.repository.errors import ItemNotFound +from domain.repository.keys import KEY_SEPARATOR, TableKey +from domain.repository.marshall import marshall, unmarshall +from domain.repository.transaction import ( # TransactItem, + ConditionExpression, Transaction, + TransactionStatement, TransactItem, handle_client_errors, + update_transactions, ) if TYPE_CHECKING: from mypy_boto3_dynamodb import DynamoDBClient + from mypy_boto3_dynamodb.type_defs import ( + BatchWriteItemOutputTypeDef, + TransactWriteItemsOutputTypeDef, + ) BATCH_SIZE = 100 +MAX_BATCH_WRITE_SIZE = 10 +RETRY_ERRORS = [ + "ProvisionedThroughputExceededException", + "ThrottlingException", + "InternalServerError", +] + + +class TooManyResults(Exception): + pass + + +class QueryType(StrEnum): + EQUALS = "{} = {}" + BEGINS_WITH = "begins_with({}, {})" + + +def exponential_backoff_with_jitter( + n_retries, base_delay=0.1, min_delay=0.05, max_delay=5 +): + """Calculate the delay with exponential backoff and jitter.""" + delay = min(base_delay * (2**n_retries), max_delay) + return random.uniform(min_delay, delay) + + +def retry_with_jitter(max_retries=5, error=ClientError): + def wrapper(func): + @wraps(func) + def wrapped(*args, **kwargs): + exceptions = [] + while len(exceptions) < max_retries: + try: + return func(*args, **kwargs) + except error as e: + error_code = e.response["Error"]["Code"] + if error_code not in RETRY_ERRORS: + raise + exceptions.append(e) + delay = exponential_backoff_with_jitter(n_retries=len(exceptions)) + time.sleep(delay) + raise ExceptionGroup( + f"Failed to put item after {max_retries} retries", exceptions + ) + + return wrapped + + return wrapper + + +def _split_transactions_by_key( + transact_items: Iterable[TransactItem], n_max: int +) -> Generator[list[TransactItem], None, None]: + buffer, keys = [], set() + for transact_item in transact_items: + transaction_statement = ( + transact_item.Put or transact_item.Delete or transact_item.Update + ) + item = transaction_statement.Key or transaction_statement.Item + key = (item["pk"]["S"], item["sk"]["S"]) + if key in keys: + yield from batched(buffer, n=n_max) + buffer, keys = [], set() + buffer.append(transact_item) + keys.add(key) + yield from batched(buffer, n=n_max) + + +def transact_write_chunk( + client: "DynamoDBClient", chunk: list[TransactItem] +) -> "TransactWriteItemsOutputTypeDef": + transaction = Transaction(TransactItems=chunk) + with handle_client_errors(commands=chunk): + _response = client.transact_write_items(**transaction.dict(exclude_none=True)) + return _response + + +@retry_with_jitter() +def batch_write_chunk( + client: "DynamoDBClient", table_name: str, chunk: list[dict] +) -> "BatchWriteItemOutputTypeDef": + while chunk: + _response = client.batch_write_item(RequestItems={table_name: chunk}) + chunk = _response["UnprocessedItems"].get(table_name) + return _response class Repository[ModelType: AggregateRoot]: - def __init__(self, table_name, model: type[ModelType], dynamodb_client): + + def __init__( + self, + table_name, + model: type[ModelType], + dynamodb_client, + parent_table_keys: tuple[TableKey], + table_key: TableKey, + ): self.table_name = table_name self.model = model self.client: "DynamoDBClient" = dynamodb_client + self.batch_size = BATCH_SIZE + self.parent_table_keys = parent_table_keys + self.table_key = table_key + + @abstractmethod + def handle_bulk(self, item): ... - def write(self, entity: ModelType, batch_size=BATCH_SIZE): - def generate_transaction_statements(event) -> TransactItem: + def write(self, entity: ModelType, batch_size=None): + batch_size = batch_size or self.batch_size + + def generate_transaction_statements(event): handler_name = f"handle_{type(event).__name__}" handler = getattr(self, handler_name) - return handler(event=event) - - responses = [] - for events in batched(entity.events, n=batch_size): - transact_items = list(map(generate_transaction_statements, events)) - transaction = Transaction(TransactItems=transact_items) - - with handle_client_errors(commands=transact_items): - _response = self.client.transact_write_items( - **transaction.dict(exclude_none=True) - ) - responses.append(_response) + transact_items = handler(event=event) + + if not isinstance(transact_items, list): + transact_items = [transact_items] + return transact_items + + transact_items = chain.from_iterable( + (generate_transaction_statements(event) for event in entity.events) + ) + + responses = [ + transact_write_chunk(client=self.client, chunk=transact_item_chunk) + for transact_item_chunk in _split_transactions_by_key( + transact_items, batch_size + ) + ] return responses + + def write_bulk(self, entities: list[ModelType], batch_size=None): + batch_size = batch_size or MAX_BATCH_WRITE_SIZE + batch_write_items = list(chain.from_iterable(map(self.handle_bulk, entities))) + responses = [ + batch_write_chunk( + client=self.client, table_name=self.table_name, chunk=chunk + ) + for chunk in batched(batch_write_items, batch_size) + ] + return responses + + def create_index( + self, + id: str, + parent_key_parts: tuple[str], + data: dict, + root: bool, + table_key: TableKey = None, + parent_table_keys: tuple[TableKey] = None, + ) -> TransactItem: + if table_key is None: + table_key = self.table_key + if parent_table_keys is None: + parent_table_keys = self.parent_table_keys + + if len(parent_table_keys) != len(parent_key_parts): + raise ValueError( + f"Expected provide {len(parent_table_keys)} parent key parts, got {len(parent_key_parts)}" + ) + + write_key = table_key.key(id) + read_key = KEY_SEPARATOR.join( + table_key.key(_id) + for table_key, _id in zip(parent_table_keys, parent_key_parts) + ) + + return TransactItem( + Put=TransactionStatement( + TableName=self.table_name, + Item=marshall( + pk=write_key, + sk=write_key, + pk_read=read_key, + sk_read=write_key, + root=root, + **data, + ), + ConditionExpression=ConditionExpression.MUST_NOT_EXIST, + ) + ) + + def create_index_batch( + self, + id: str, + parent_key_parts: tuple[str], + data: dict, + root: bool, + table_key: TableKey = None, + parent_table_keys: tuple[TableKey] = None, + ) -> TransactItem: + """ + Difference between `create_index` and `create_index_batch`: + + `create_index` is intended for the event-based + handlers (e.g. `handle_XyzCreatedEvent`) which are called by the base + `write` method, which expects `TransactItem`s for use with `client.transact_write_items` + + `create_index_batch` is intended for the entity-based handler + `handle_bulk` which is called by the base method `write_bulk`, which expects + `BatchWriteItem`s which we render as a `dict` for use with `client.batch_write_items` + """ + + if table_key is None: + table_key = self.table_key + if parent_key_parts is None: + parent_table_keys = self.parent_table_keys + + write_key = table_key.key(id) + read_key = KEY_SEPARATOR.join( + table_key.key(_id) + for table_key, _id in zip(parent_table_keys, parent_key_parts) + ) + + return { + "PutRequest": { + "Item": marshall( + pk=write_key, + sk=write_key, + pk_read=read_key, + sk_read=write_key, + root=root, + **data, + ), + }, + } + + def update_indexes(self, id: str, keys: list[str], data: dict): + primary_keys = [ + marshall(pk=pk, sk=pk) for pk in map(self.table_key.key, [id, *keys]) + ] + return update_transactions( + table_name=self.table_name, primary_keys=primary_keys, data=data + ) + + def delete_index(self, id: str): + pk = self.table_key.key(id) + return TransactItem( + Delete=TransactionStatement( + TableName=self.table_name, + Key=marshall(pk=pk, sk=pk), + ConditionExpression=ConditionExpression.MUST_EXIST, + ) + ) + + def _query(self, parent_ids: tuple[str], id: str = None) -> list[ModelType]: + pk_read = KEY_SEPARATOR.join( + table_key.key(_id) + for table_key, _id in zip(self.parent_table_keys, parent_ids) + ) + sk_read = self.table_key.key(id or "") + + sk_query_type = QueryType.BEGINS_WITH if id is None else QueryType.EQUALS + sk_condition = sk_query_type.format("sk_read", ":sk_read") + + args = { + "TableName": self.table_name, + "IndexName": "idx_gsi_read", + "KeyConditionExpression": f"pk_read = :pk_read AND {sk_condition}", + "ExpressionAttributeValues": marshall( + **{":pk_read": pk_read, ":sk_read": sk_read} + ), + } + result = self.client.query(**args) + if "LastEvaluatedKey" in result: + raise TooManyResults(f"Too many results for query ({(*parent_ids, id)})") + return [self.model(**item) for item in map(unmarshall, result["Items"])] + + def _read(self, parent_ids: tuple[str], id: str) -> ModelType: + items = self._query(parent_ids=parent_ids or (id,), id=id) + try: + (item,) = items + except ValueError: + raise ItemNotFound(*parent_ids, id, item_type=self.model) + return item diff --git a/src/layers/domain/repository/repository/v2.py b/src/layers/domain/repository/repository/v2.py deleted file mode 100644 index 368fdf76..00000000 --- a/src/layers/domain/repository/repository/v2.py +++ /dev/null @@ -1,144 +0,0 @@ -import random -import time -from abc import abstractmethod -from functools import wraps -from itertools import batched, chain -from typing import TYPE_CHECKING, Generator, Iterable - -from botocore.exceptions import ClientError -from domain.core.aggregate_root import AggregateRoot -from domain.repository.transaction import ( # TransactItem, - Transaction, - TransactItem, - handle_client_errors, -) - -if TYPE_CHECKING: - from mypy_boto3_dynamodb import DynamoDBClient - from mypy_boto3_dynamodb.type_defs import ( - BatchWriteItemOutputTypeDef, - TransactWriteItemsOutputTypeDef, - ) - -BATCH_SIZE = 100 -MAX_BATCH_WRITE_SIZE = 10 -RETRY_ERRORS = [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", -] - - -def exponential_backoff_with_jitter( - n_retries, base_delay=0.1, min_delay=0.05, max_delay=5 -): - """Calculate the delay with exponential backoff and jitter.""" - delay = min(base_delay * (2**n_retries), max_delay) - return random.uniform(min_delay, delay) - - -def retry_with_jitter(max_retries=5, error=ClientError): - def wrapper(func): - @wraps(func) - def wrapped(*args, **kwargs): - exceptions = [] - while len(exceptions) < max_retries: - try: - return func(*args, **kwargs) - except error as e: - error_code = e.response["Error"]["Code"] - if error_code not in RETRY_ERRORS: - raise - exceptions.append(e) - delay = exponential_backoff_with_jitter(n_retries=len(exceptions)) - time.sleep(delay) - raise ExceptionGroup( - f"Failed to put item after {max_retries} retries", exceptions - ) - - return wrapped - - return wrapper - - -def _split_transactions_by_key( - transact_items: Iterable[TransactItem], n_max: int -) -> Generator[list[TransactItem], None, None]: - buffer, keys = [], set() - for transact_item in transact_items: - transaction_statement = ( - transact_item.Put or transact_item.Delete or transact_item.Update - ) - item = transaction_statement.Key or transaction_statement.Item - key = (item["pk"]["S"], item["sk"]["S"]) - if key in keys: - yield from batched(buffer, n=n_max) - buffer, keys = [], set() - buffer.append(transact_item) - keys.add(key) - yield from batched(buffer, n=n_max) - - -def transact_write_chunk( - client: "DynamoDBClient", chunk: list[TransactItem] -) -> "TransactWriteItemsOutputTypeDef": - transaction = Transaction(TransactItems=chunk) - with handle_client_errors(commands=chunk): - _response = client.transact_write_items(**transaction.dict(exclude_none=True)) - return _response - - -@retry_with_jitter() -def batch_write_chunk( - client: "DynamoDBClient", table_name: str, chunk: list[dict] -) -> "BatchWriteItemOutputTypeDef": - while chunk: - _response = client.batch_write_item(RequestItems={table_name: chunk}) - chunk = _response["UnprocessedItems"].get(table_name) - return _response - - -class Repository[ModelType: AggregateRoot]: - def __init__(self, table_name, model: type[ModelType], dynamodb_client): - self.table_name = table_name - self.model = model - self.client: "DynamoDBClient" = dynamodb_client - self.batch_size = BATCH_SIZE - - @abstractmethod - def handle_bulk(self, item): ... - - def write(self, entity: ModelType, batch_size=None): - batch_size = batch_size or self.batch_size - - def generate_transaction_statements(event): - handler_name = f"handle_{type(event).__name__}" - handler = getattr(self, handler_name) - transact_items = handler(event=event) - - if not isinstance(transact_items, list): - transact_items = [transact_items] - return transact_items - - transact_items = chain.from_iterable( - (generate_transaction_statements(event) for event in entity.events) - ) - - responses = [ - transact_write_chunk(client=self.client, chunk=transact_item_chunk) - for transact_item_chunk in _split_transactions_by_key( - transact_items, batch_size - ) - ] - return responses - - def write_bulk(self, entities: list[ModelType], batch_size=None): - batch_size = batch_size or MAX_BATCH_WRITE_SIZE - batch_write_items = list(chain.from_iterable(map(self.handle_bulk, entities))) - responses = [ - batch_write_chunk( - client=self.client, table_name=self.table_name, chunk=chunk - ) - for chunk in batched(batch_write_items, batch_size) - ] - return responses diff --git a/src/layers/domain/repository/repository/v3.py b/src/layers/domain/repository/repository/v3.py deleted file mode 100644 index 9434d918..00000000 --- a/src/layers/domain/repository/repository/v3.py +++ /dev/null @@ -1,305 +0,0 @@ -import random -import time -from abc import abstractmethod -from enum import StrEnum -from functools import wraps -from itertools import batched, chain -from typing import TYPE_CHECKING, Generator, Iterable - -from botocore.exceptions import ClientError -from domain.core.aggregate_root import AggregateRoot -from domain.repository.errors import ItemNotFound -from domain.repository.keys.v1 import KEY_SEPARATOR -from domain.repository.keys.v3 import TableKey -from domain.repository.marshall import marshall, unmarshall -from domain.repository.transaction import ( # TransactItem, - ConditionExpression, - Transaction, - TransactionStatement, - TransactItem, - handle_client_errors, - update_transactions, -) - -if TYPE_CHECKING: - from mypy_boto3_dynamodb import DynamoDBClient - from mypy_boto3_dynamodb.type_defs import ( - BatchWriteItemOutputTypeDef, - TransactWriteItemsOutputTypeDef, - ) - -BATCH_SIZE = 100 -MAX_BATCH_WRITE_SIZE = 10 -RETRY_ERRORS = [ - "ProvisionedThroughputExceededException", - "ThrottlingException", - "InternalServerError", -] - - -class TooManyResults(Exception): - pass - - -class QueryType(StrEnum): - EQUALS = "{} = {}" - BEGINS_WITH = "begins_with({}, {})" - - -def exponential_backoff_with_jitter( - n_retries, base_delay=0.1, min_delay=0.05, max_delay=5 -): - """Calculate the delay with exponential backoff and jitter.""" - delay = min(base_delay * (2**n_retries), max_delay) - return random.uniform(min_delay, delay) - - -def retry_with_jitter(max_retries=5, error=ClientError): - def wrapper(func): - @wraps(func) - def wrapped(*args, **kwargs): - exceptions = [] - while len(exceptions) < max_retries: - try: - return func(*args, **kwargs) - except error as e: - error_code = e.response["Error"]["Code"] - if error_code not in RETRY_ERRORS: - raise - exceptions.append(e) - delay = exponential_backoff_with_jitter(n_retries=len(exceptions)) - time.sleep(delay) - raise ExceptionGroup( - f"Failed to put item after {max_retries} retries", exceptions - ) - - return wrapped - - return wrapper - - -def _split_transactions_by_key( - transact_items: Iterable[TransactItem], n_max: int -) -> Generator[list[TransactItem], None, None]: - buffer, keys = [], set() - for transact_item in transact_items: - transaction_statement = ( - transact_item.Put or transact_item.Delete or transact_item.Update - ) - item = transaction_statement.Key or transaction_statement.Item - key = (item["pk"]["S"], item["sk"]["S"]) - if key in keys: - yield from batched(buffer, n=n_max) - buffer, keys = [], set() - buffer.append(transact_item) - keys.add(key) - yield from batched(buffer, n=n_max) - - -def transact_write_chunk( - client: "DynamoDBClient", chunk: list[TransactItem] -) -> "TransactWriteItemsOutputTypeDef": - transaction = Transaction(TransactItems=chunk) - with handle_client_errors(commands=chunk): - _response = client.transact_write_items(**transaction.dict(exclude_none=True)) - return _response - - -@retry_with_jitter() -def batch_write_chunk( - client: "DynamoDBClient", table_name: str, chunk: list[dict] -) -> "BatchWriteItemOutputTypeDef": - while chunk: - _response = client.batch_write_item(RequestItems={table_name: chunk}) - chunk = _response["UnprocessedItems"].get(table_name) - return _response - - -class Repository[ModelType: AggregateRoot]: - - def __init__( - self, - table_name, - model: type[ModelType], - dynamodb_client, - parent_table_keys: tuple[TableKey], - table_key: TableKey, - ): - self.table_name = table_name - self.model = model - self.client: "DynamoDBClient" = dynamodb_client - self.batch_size = BATCH_SIZE - self.parent_table_keys = parent_table_keys - self.table_key = table_key - - @abstractmethod - def handle_bulk(self, item): ... - - def write(self, entity: ModelType, batch_size=None): - batch_size = batch_size or self.batch_size - - def generate_transaction_statements(event): - handler_name = f"handle_{type(event).__name__}" - handler = getattr(self, handler_name) - transact_items = handler(event=event) - - if not isinstance(transact_items, list): - transact_items = [transact_items] - return transact_items - - transact_items = chain.from_iterable( - (generate_transaction_statements(event) for event in entity.events) - ) - - responses = [ - transact_write_chunk(client=self.client, chunk=transact_item_chunk) - for transact_item_chunk in _split_transactions_by_key( - transact_items, batch_size - ) - ] - return responses - - def write_bulk(self, entities: list[ModelType], batch_size=None): - batch_size = batch_size or MAX_BATCH_WRITE_SIZE - batch_write_items = list(chain.from_iterable(map(self.handle_bulk, entities))) - responses = [ - batch_write_chunk( - client=self.client, table_name=self.table_name, chunk=chunk - ) - for chunk in batched(batch_write_items, batch_size) - ] - return responses - - def create_index( - self, - id: str, - parent_key_parts: tuple[str], - data: dict, - root: bool, - table_key: TableKey = None, - parent_table_keys: tuple[TableKey] = None, - ) -> TransactItem: - if table_key is None: - table_key = self.table_key - if parent_table_keys is None: - parent_table_keys = self.parent_table_keys - - if len(parent_table_keys) != len(parent_key_parts): - raise ValueError( - f"Expected provide {len(parent_table_keys)} parent key parts, got {len(parent_key_parts)}" - ) - - write_key = table_key.key(id) - read_key = KEY_SEPARATOR.join( - table_key.key(_id) - for table_key, _id in zip(parent_table_keys, parent_key_parts) - ) - - return TransactItem( - Put=TransactionStatement( - TableName=self.table_name, - Item=marshall( - pk=write_key, - sk=write_key, - pk_read=read_key, - sk_read=write_key, - root=root, - **data, - ), - ConditionExpression=ConditionExpression.MUST_NOT_EXIST, - ) - ) - - def create_index_batch( - self, - id: str, - parent_key_parts: tuple[str], - data: dict, - root: bool, - table_key: TableKey = None, - parent_table_keys: tuple[TableKey] = None, - ) -> TransactItem: - """ - Difference between `create_index` and `create_index_batch`: - - `create_index` is intended for the event-based - handlers (e.g. `handle_XyzCreatedEvent`) which are called by the base - `write` method, which expects `TransactItem`s for use with `client.transact_write_items` - - `create_index_batch` is intended for the entity-based handler - `handle_bulk` which is called by the base method `write_bulk`, which expects - `BatchWriteItem`s which we render as a `dict` for use with `client.batch_write_items` - """ - - if table_key is None: - table_key = self.table_key - if parent_key_parts is None: - parent_table_keys = self.parent_table_keys - - write_key = table_key.key(id) - read_key = KEY_SEPARATOR.join( - table_key.key(_id) - for table_key, _id in zip(parent_table_keys, parent_key_parts) - ) - - return { - "PutRequest": { - "Item": marshall( - pk=write_key, - sk=write_key, - pk_read=read_key, - sk_read=write_key, - root=root, - **data, - ), - }, - } - - def update_indexes(self, id: str, keys: list[str], data: dict): - primary_keys = [ - marshall(pk=pk, sk=pk) for pk in map(self.table_key.key, [id, *keys]) - ] - return update_transactions( - table_name=self.table_name, primary_keys=primary_keys, data=data - ) - - def delete_index(self, id: str): - pk = self.table_key.key(id) - return TransactItem( - Delete=TransactionStatement( - TableName=self.table_name, - Key=marshall(pk=pk, sk=pk), - ConditionExpression=ConditionExpression.MUST_EXIST, - ) - ) - - def _query(self, parent_ids: tuple[str], id: str = None) -> list[ModelType]: - pk_read = KEY_SEPARATOR.join( - table_key.key(_id) - for table_key, _id in zip(self.parent_table_keys, parent_ids) - ) - sk_read = self.table_key.key(id or "") - - sk_query_type = QueryType.BEGINS_WITH if id is None else QueryType.EQUALS - sk_condition = sk_query_type.format("sk_read", ":sk_read") - - args = { - "TableName": self.table_name, - "IndexName": "idx_gsi_read", - "KeyConditionExpression": f"pk_read = :pk_read AND {sk_condition}", - "ExpressionAttributeValues": marshall( - **{":pk_read": pk_read, ":sk_read": sk_read} - ), - } - result = self.client.query(**args) - if "LastEvaluatedKey" in result: - raise TooManyResults(f"Too many results for query ({(*parent_ids, id)})") - return [self.model(**item) for item in map(unmarshall, result["Items"])] - - def _read(self, parent_ids: tuple[str], id: str) -> ModelType: - items = self._query(parent_ids=parent_ids or (id,), id=id) - try: - (item,) = items - except ValueError: - raise ItemNotFound(*parent_ids, id, item_type=self.model) - return item diff --git a/src/layers/domain/repository/tests/test_cpm_system_id_repository.py b/src/layers/domain/repository/tests/test_cpm_system_id_repository.py index d1ad548d..b40b24fc 100644 --- a/src/layers/domain/repository/tests/test_cpm_system_id_repository.py +++ b/src/layers/domain/repository/tests/test_cpm_system_id_repository.py @@ -1,7 +1,7 @@ import pytest -from domain.core.cpm_system_id.v1 import AsidId, PartyKeyId +from domain.core.cpm_system_id import AsidId, PartyKeyId from domain.repository.cpm_system_id_repository import CpmSystemIdRepository -from domain.repository.keys.v3 import TableKey +from domain.repository.keys import TableKey from domain.repository.marshall import marshall_value from event.aws.client import dynamodb_client diff --git a/src/layers/domain/request_models/__init__.py b/src/layers/domain/request_models/__init__.py new file mode 100644 index 00000000..e0d08e67 --- /dev/null +++ b/src/layers/domain/request_models/__init__.py @@ -0,0 +1 @@ +from .v1 import * # noqa diff --git a/src/layers/domain/request_models/tests/test_parse_cpm_product_params.py b/src/layers/domain/request_models/tests/test_parse_cpm_product_params.py index e489d329..881175bf 100644 --- a/src/layers/domain/request_models/tests/test_parse_cpm_product_params.py +++ b/src/layers/domain/request_models/tests/test_parse_cpm_product_params.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import CreateCpmProductIncomingParams +from domain.request_models import CreateCpmProductIncomingParams from pydantic import ValidationError from test_helpers.sample_data import CPM_PRODUCT, CPM_PRODUCT_EXTRA_PARAMS diff --git a/src/layers/domain/request_models/tests/test_parse_device_params.py b/src/layers/domain/request_models/tests/test_parse_device_params.py index 700ecc36..57fbe458 100644 --- a/src/layers/domain/request_models/tests/test_parse_device_params.py +++ b/src/layers/domain/request_models/tests/test_parse_device_params.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import CreateDeviceIncomingParams +from domain.request_models import CreateDeviceIncomingParams from pydantic import ValidationError from test_helpers.sample_data import CPM_DEVICE, CPM_DEVICE_EXTRA_PARAMS diff --git a/src/layers/domain/request_models/tests/test_parse_device_reference_data.py b/src/layers/domain/request_models/tests/test_parse_device_reference_data.py index 6a8b6e9d..05dc2a4a 100644 --- a/src/layers/domain/request_models/tests/test_parse_device_reference_data.py +++ b/src/layers/domain/request_models/tests/test_parse_device_reference_data.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import CreateDeviceReferenceMessageSetsDataParams +from domain.request_models import CreateDeviceReferenceMessageSetsDataParams from pydantic import ValidationError diff --git a/src/layers/domain/request_models/tests/test_parse_device_reference_data_params.py b/src/layers/domain/request_models/tests/test_parse_device_reference_data_params.py index cf717825..2a8fb0c1 100644 --- a/src/layers/domain/request_models/tests/test_parse_device_reference_data_params.py +++ b/src/layers/domain/request_models/tests/test_parse_device_reference_data_params.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import CreateDeviceReferenceDataIncomingParams +from domain.request_models import CreateDeviceReferenceDataIncomingParams from pydantic import ValidationError from test_helpers.sample_data import ( diff --git a/src/layers/domain/request_models/tests/test_parse_product_team_params.py b/src/layers/domain/request_models/tests/test_parse_product_team_params.py index efd71a2d..90543556 100644 --- a/src/layers/domain/request_models/tests/test_parse_product_team_params.py +++ b/src/layers/domain/request_models/tests/test_parse_product_team_params.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import CreateProductTeamIncomingParams +from domain.request_models import CreateProductTeamIncomingParams from pydantic import ValidationError from test_helpers.sample_data import ( diff --git a/src/layers/domain/request_models/tests/test_parse_questionnaire_params.py b/src/layers/domain/request_models/tests/test_parse_questionnaire_params.py index b828760a..4fafdd95 100644 --- a/src/layers/domain/request_models/tests/test_parse_questionnaire_params.py +++ b/src/layers/domain/request_models/tests/test_parse_questionnaire_params.py @@ -1,5 +1,5 @@ import pytest -from domain.request_models.v1 import QuestionnairePathParams +from domain.request_models import QuestionnairePathParams from pydantic import ValidationError diff --git a/src/layers/domain/response/response_matrix.py b/src/layers/domain/response/response_matrix.py index d8f3c59c..eb372b2e 100644 --- a/src/layers/domain/response/response_matrix.py +++ b/src/layers/domain/response/response_matrix.py @@ -6,7 +6,7 @@ InvalidSpineMhsResponse, NotEprProductError, ) -from domain.core.questionnaire.v3 import ( +from domain.core.questionnaire import ( QuestionnaireResponseMissingValue, QuestionnaireResponseValidationError, ) diff --git a/src/layers/sds/cpm_translation/modify_device.py b/src/layers/sds/cpm_translation/modify_device.py index 578ee60a..85316102 100644 --- a/src/layers/sds/cpm_translation/modify_device.py +++ b/src/layers/sds/cpm_translation/modify_device.py @@ -1,5 +1,5 @@ -from domain.core.device.v2 import Device -from domain.core.questionnaire.v2 import QuestionnaireResponse +from domain.core.device import Device +from domain.core.questionnaire import QuestionnaireResponse from sds.domain.constants import ModificationType from sds.domain.nhs_accredited_system import NhsAccreditedSystem from sds.domain.nhs_mhs import NhsMhs diff --git a/src/layers/sds/cpm_translation/modify_key.py b/src/layers/sds/cpm_translation/modify_key.py index 84603b14..c5f0d14a 100644 --- a/src/layers/sds/cpm_translation/modify_key.py +++ b/src/layers/sds/cpm_translation/modify_key.py @@ -1,186 +1,186 @@ -from dataclasses import astuple -from typing import Callable, Generator - -import sds.domain -from domain.api.sds.query import ( - SearchSDSDeviceQueryParams, - SearchSDSEndpointQueryParams, -) -from domain.core.device.v2 import Device -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.product_team.v2 import ProductTeam -from domain.core.validation import DEVICE_KEY_SEPARATOR -from sds.cpm_translation.utils import get_in_list_of_dict, set_device_tags -from sds.domain.constants import ModificationType -from sds.domain.nhs_accredited_system import NhsAccreditedSystem -from sds.domain.nhs_mhs import MessageHandlingSystemKey, NhsMhs - -from .constants import DEFAULT_PRODUCT_TEAM, UNIQUE_IDENTIFIER -from .modify_device import new_questionnaire_response_from_template - - -class AccreditedSystemAlreadyExists(Exception): ... - - -class NotAnSdsKey(Exception): ... - - -class InvalidModificationRequest(Exception): ... - - -def get_modify_key_function( - model: type[NhsMhs] | type[NhsAccreditedSystem], - modification_type: ModificationType, - field_name: str, -) -> Callable[[list[Device], str, any], Generator[Device, None, None]]: - """Returns a function which yields deleted and created Devices""" - if not model.is_key_field(field_name): - raise NotAnSdsKey(field_name) - - match (model, modification_type): - case (sds.domain.NhsAccreditedSystem, ModificationType.ADD): - return copy_new_accredited_system_from_sibling_device - case (sds.domain.NhsAccreditedSystem, ModificationType.REPLACE): - return replace_accredited_systems - case (sds.domain.NhsMhs, ModificationType.REPLACE): - return replace_msg_handling_system - case _: - raise InvalidModificationRequest( - f"Forbidden to {modification_type} {model.__name__}.{field_name}", - ) - - -def copy_new_accredited_system_from_sibling_device( - devices: list[Device], field_name: str, value: str -) -> Generator[Device, None, None]: - (ods_code,) = NhsAccreditedSystem.parse_and_validate_field( - field=field_name, value=value - ) - - current_ods_codes = {device.ods_code for device in devices} - if ods_code in current_ods_codes: - raise AccreditedSystemAlreadyExists( - f"Accredited System with ODS code '{ods_code}' already exists" - ) - - _device = devices[0] # could copy any sibling, but pick the 0th one as convention - - (questionnaire_response_by_datetime,) = _device.questionnaire_responses.values() - (questionnaire_response,) = questionnaire_response_by_datetime.values() - questionnaire_response.questionnaire = NhsAccreditedSystem.questionnaire() - - new_questionnaire_response = new_questionnaire_response_from_template( - questionnaire_response=questionnaire_response, - field_to_update=field_name, - value=ods_code, - ) - (unique_identifier,) = get_in_list_of_dict( - obj=questionnaire_response.answers, key=UNIQUE_IDENTIFIER - ) - new_accredited_system_id = DEVICE_KEY_SEPARATOR.join((ods_code, unique_identifier)) - - product_team = ProductTeam( - id=_device.product_team_id, - ods_code=ods_code, - name=DEFAULT_PRODUCT_TEAM["name"], - ) - new_device = product_team.create_device( - name=_device.name, device_type=_device.device_type - ) - new_device.add_questionnaire_response( - questionnaire_response=new_questionnaire_response - ) - new_device.add_key( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value=new_accredited_system_id - ) - set_device_tags( - device=new_device, - data=new_questionnaire_response.flat_answers, - model=SearchSDSDeviceQueryParams, - ) - - # "yield" to match the pattern of the functions returned by 'get_modify_key_function' - # which may in general yield multiple deleted / added Devices - for device in devices: - yield device - yield new_device - - -def replace_accredited_systems( - devices: list[Device], field_name: str, value: str -) -> Generator[Device, None, None]: - current_ods_codes = {device.ods_code for device in devices} - final_ods_codes = NhsAccreditedSystem.parse_and_validate_field( - field=field_name, value=value - ) - removed_ods_codes = current_ods_codes - final_ods_codes - for device in devices: - if device.ods_code in removed_ods_codes: - device.delete() - yield device - - for new_ods_code in final_ods_codes - current_ods_codes: - *_, new_device = copy_new_accredited_system_from_sibling_device( - devices=devices, field_name=field_name, value=[new_ods_code] - ) - yield new_device - - -def _get_msg_handling_system_key(responses: list[dict]) -> MessageHandlingSystemKey: - """Construct the MHS scoped party key from questionnaire responses""" - return MessageHandlingSystemKey( - **{ - key: values - for key in NhsMhs.key_fields() - for values in get_in_list_of_dict(obj=responses, key=key) - } - ) - - -def replace_msg_handling_system( - devices: list[Device], field_name: str, value: str -) -> Generator[Device, None, None]: - (device,) = devices - - (questionnaire_response_by_datetime,) = device.questionnaire_responses.values() - (_questionnaire_response,) = questionnaire_response_by_datetime.values() - _questionnaire_response.questionnaire = NhsMhs.questionnaire() - - new_value = NhsMhs.parse_and_validate_field(field=field_name, value=value) - - # Nothing to do if replacing with itself - old_value = _questionnaire_response.flat_answers[field_name] - if new_value == old_value: - return - device.delete() - yield device - - questionnaire_response = new_questionnaire_response_from_template( - questionnaire_response=_questionnaire_response, - field_to_update=field_name, - value=new_value, - ) - msg_handling_system_key = _get_msg_handling_system_key( - responses=_questionnaire_response.answers - ) - new_scoped_party_key = DEVICE_KEY_SEPARATOR.join(astuple(msg_handling_system_key)) - product_team = ProductTeam( - id=device.product_team_id, - ods_code=msg_handling_system_key.nhs_id_code, - name=DEFAULT_PRODUCT_TEAM["name"], - ) - new_device = product_team.create_device( - name=device.name, device_type=device.device_type - ) - new_device.add_questionnaire_response(questionnaire_response=questionnaire_response) - new_device.add_key( - key_type=DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID, - key_value=new_scoped_party_key, - ) - set_device_tags( - device=new_device, - data=questionnaire_response.flat_answers, - model=SearchSDSEndpointQueryParams, - ) - - yield new_device +# from dataclasses import astuple +# from typing import Callable, Generator + +# import sds.domain +# from domain.api.sds.query import ( +# SearchSDSDeviceQueryParams, +# SearchSDSEndpointQueryParams, +# ) +# from domain.core.device.v2 import Device +# from domain.core.device_key.v1 import DeviceKeyType +# from domain.core.product_team.v2 import ProductTeam +# from domain.core.validation import DEVICE_KEY_SEPARATOR +# from sds.cpm_translation.utils import get_in_list_of_dict, set_device_tags +# from sds.domain.constants import ModificationType +# from sds.domain.nhs_accredited_system import NhsAccreditedSystem +# from sds.domain.nhs_mhs import MessageHandlingSystemKey, NhsMhs + +# from .constants import DEFAULT_PRODUCT_TEAM, UNIQUE_IDENTIFIER +# from .modify_device import new_questionnaire_response_from_template + + +# class AccreditedSystemAlreadyExists(Exception): ... + + +# class NotAnSdsKey(Exception): ... + + +# class InvalidModificationRequest(Exception): ... + + +# def get_modify_key_function( +# model: type[NhsMhs] | type[NhsAccreditedSystem], +# modification_type: ModificationType, +# field_name: str, +# ) -> Callable[[list[Device], str, any], Generator[Device, None, None]]: +# """Returns a function which yields deleted and created Devices""" +# if not model.is_key_field(field_name): +# raise NotAnSdsKey(field_name) + +# match (model, modification_type): +# case (sds.domain.NhsAccreditedSystem, ModificationType.ADD): +# return copy_new_accredited_system_from_sibling_device +# case (sds.domain.NhsAccreditedSystem, ModificationType.REPLACE): +# return replace_accredited_systems +# case (sds.domain.NhsMhs, ModificationType.REPLACE): +# return replace_msg_handling_system +# case _: +# raise InvalidModificationRequest( +# f"Forbidden to {modification_type} {model.__name__}.{field_name}", +# ) + + +# def copy_new_accredited_system_from_sibling_device( +# devices: list[Device], field_name: str, value: str +# ) -> Generator[Device, None, None]: +# (ods_code,) = NhsAccreditedSystem.parse_and_validate_field( +# field=field_name, value=value +# ) + +# current_ods_codes = {device.ods_code for device in devices} +# if ods_code in current_ods_codes: +# raise AccreditedSystemAlreadyExists( +# f"Accredited System with ODS code '{ods_code}' already exists" +# ) + +# _device = devices[0] # could copy any sibling, but pick the 0th one as convention + +# (questionnaire_response_by_datetime,) = _device.questionnaire_responses.values() +# (questionnaire_response,) = questionnaire_response_by_datetime.values() +# questionnaire_response.questionnaire = NhsAccreditedSystem.questionnaire() + +# new_questionnaire_response = new_questionnaire_response_from_template( +# questionnaire_response=questionnaire_response, +# field_to_update=field_name, +# value=ods_code, +# ) +# (unique_identifier,) = get_in_list_of_dict( +# obj=questionnaire_response.answers, key=UNIQUE_IDENTIFIER +# ) +# new_accredited_system_id = DEVICE_KEY_SEPARATOR.join((ods_code, unique_identifier)) + +# product_team = ProductTeam( +# id=_device.product_team_id, +# ods_code=ods_code, +# name=DEFAULT_PRODUCT_TEAM["name"], +# ) +# new_device = product_team.create_device( +# name=_device.name, device_type=_device.device_type +# ) +# new_device.add_questionnaire_response( +# questionnaire_response=new_questionnaire_response +# ) +# new_device.add_key( +# key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, key_value=new_accredited_system_id +# ) +# set_device_tags( +# device=new_device, +# data=new_questionnaire_response.flat_answers, +# model=SearchSDSDeviceQueryParams, +# ) + +# # "yield" to match the pattern of the functions returned by 'get_modify_key_function' +# # which may in general yield multiple deleted / added Devices +# for device in devices: +# yield device +# yield new_device + + +# def replace_accredited_systems( +# devices: list[Device], field_name: str, value: str +# ) -> Generator[Device, None, None]: +# current_ods_codes = {device.ods_code for device in devices} +# final_ods_codes = NhsAccreditedSystem.parse_and_validate_field( +# field=field_name, value=value +# ) +# removed_ods_codes = current_ods_codes - final_ods_codes +# for device in devices: +# if device.ods_code in removed_ods_codes: +# device.delete() +# yield device + +# for new_ods_code in final_ods_codes - current_ods_codes: +# *_, new_device = copy_new_accredited_system_from_sibling_device( +# devices=devices, field_name=field_name, value=[new_ods_code] +# ) +# yield new_device + + +# def _get_msg_handling_system_key(responses: list[dict]) -> MessageHandlingSystemKey: +# """Construct the MHS scoped party key from questionnaire responses""" +# return MessageHandlingSystemKey( +# **{ +# key: values +# for key in NhsMhs.key_fields() +# for values in get_in_list_of_dict(obj=responses, key=key) +# } +# ) + + +# def replace_msg_handling_system( +# devices: list[Device], field_name: str, value: str +# ) -> Generator[Device, None, None]: +# (device,) = devices + +# (questionnaire_response_by_datetime,) = device.questionnaire_responses.values() +# (_questionnaire_response,) = questionnaire_response_by_datetime.values() +# _questionnaire_response.questionnaire = NhsMhs.questionnaire() + +# new_value = NhsMhs.parse_and_validate_field(field=field_name, value=value) + +# # Nothing to do if replacing with itself +# old_value = _questionnaire_response.flat_answers[field_name] +# if new_value == old_value: +# return +# device.delete() +# yield device + +# questionnaire_response = new_questionnaire_response_from_template( +# questionnaire_response=_questionnaire_response, +# field_to_update=field_name, +# value=new_value, +# ) +# msg_handling_system_key = _get_msg_handling_system_key( +# responses=_questionnaire_response.answers +# ) +# new_scoped_party_key = DEVICE_KEY_SEPARATOR.join(astuple(msg_handling_system_key)) +# product_team = ProductTeam( +# id=device.product_team_id, +# ods_code=msg_handling_system_key.nhs_id_code, +# name=DEFAULT_PRODUCT_TEAM["name"], +# ) +# new_device = product_team.create_device( +# name=device.name, device_type=device.device_type +# ) +# new_device.add_questionnaire_response(questionnaire_response=questionnaire_response) +# new_device.add_key( +# key_type=DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID, +# key_value=new_scoped_party_key, +# ) +# set_device_tags( +# device=new_device, +# data=questionnaire_response.flat_answers, +# model=SearchSDSEndpointQueryParams, +# ) + +# yield new_device diff --git a/src/layers/sds/cpm_translation/tests/test_cpm_translation.py b/src/layers/sds/cpm_translation/tests/_test_cpm_translation.py similarity index 100% rename from src/layers/sds/cpm_translation/tests/test_cpm_translation.py rename to src/layers/sds/cpm_translation/tests/_test_cpm_translation.py diff --git a/src/layers/sds/cpm_translation/tests/test_modify_device.py b/src/layers/sds/cpm_translation/tests/_test_modify_device.py similarity index 100% rename from src/layers/sds/cpm_translation/tests/test_modify_device.py rename to src/layers/sds/cpm_translation/tests/_test_modify_device.py diff --git a/src/layers/sds/cpm_translation/tests/test_modify_key.py b/src/layers/sds/cpm_translation/tests/_test_modify_key.py similarity index 99% rename from src/layers/sds/cpm_translation/tests/test_modify_key.py rename to src/layers/sds/cpm_translation/tests/_test_modify_key.py index be34c6ca..4f5020e1 100644 --- a/src/layers/sds/cpm_translation/tests/test_modify_key.py +++ b/src/layers/sds/cpm_translation/tests/_test_modify_key.py @@ -1,7 +1,7 @@ from dataclasses import astuple import pytest -from domain.core.device.v2 import Device, DeviceTag, Status +from domain.core.device import Device, DeviceTag, Status from hypothesis import given, settings from sds.cpm_translation.modify_key import ( InvalidModificationRequest, @@ -12,7 +12,7 @@ replace_accredited_systems, replace_msg_handling_system, ) -from sds.cpm_translation.tests.test_cpm_translation import ( +from sds.cpm_translation.tests._test_cpm_translation import ( NHS_ACCREDITED_SYSTEM_STRATEGY, NHS_MHS_STRATEGY, ) diff --git a/src/layers/sds/cpm_translation/tests/test_translations.py b/src/layers/sds/cpm_translation/tests/_test_translations.py similarity index 100% rename from src/layers/sds/cpm_translation/tests/test_translations.py rename to src/layers/sds/cpm_translation/tests/_test_translations.py diff --git a/src/layers/sds/cpm_translation/tests/test_utils.py b/src/layers/sds/cpm_translation/tests/_test_utils.py similarity index 100% rename from src/layers/sds/cpm_translation/tests/test_utils.py rename to src/layers/sds/cpm_translation/tests/_test_utils.py diff --git a/src/layers/sds/cpm_translation/translations.py b/src/layers/sds/cpm_translation/translations.py index b47982e1..7e9d330d 100644 --- a/src/layers/sds/cpm_translation/translations.py +++ b/src/layers/sds/cpm_translation/translations.py @@ -1,232 +1,232 @@ -from functools import partial -from itertools import filterfalse -from typing import Generator - -import orjson -from domain.api.sds.query import ( - SearchSDSDeviceQueryParams, - SearchSDSEndpointQueryParams, -) -from domain.core.device.v2 import Device, DeviceType -from domain.core.device_key.v2 import DeviceKeyType -from domain.core.product_team.v2 import ProductTeam -from domain.core.root.v2 import Root -from domain.core.validation import DEVICE_KEY_SEPARATOR -from domain.repository.device_repository.v2 import DeviceRepository -from sds.domain.nhs_accredited_system import NhsAccreditedSystem -from sds.domain.nhs_mhs import NhsMhs -from sds.domain.parse import UnknownSdsModel -from sds.domain.sds_deletion_request import SdsDeletionRequest -from sds.domain.sds_modification_request import SdsModificationRequest - -from .constants import ( - BAD_UNIQUE_IDENTIFIERS, - DEFAULT_ORGANISATION, - DEFAULT_PRODUCT_TEAM, - EXCEPTIONAL_ODS_CODES, -) -from .modify_device import update_device_metadata -from .modify_key import NotAnSdsKey, get_modify_key_function -from .utils import set_device_tags, set_device_tags_bulk, update_in_list_of_dict - - -def accredited_system_ids( - nhs_accredited_system: NhsAccreditedSystem, -) -> Generator[tuple[str, str], None, None]: - for ods_code in nhs_accredited_system.nhs_as_client or [DEFAULT_ORGANISATION]: - yield ods_code, DEVICE_KEY_SEPARATOR.join( - (ods_code, nhs_accredited_system.unique_identifier) - ) - - -def scoped_party_key(nhs_mhs: NhsMhs) -> str: - return DEVICE_KEY_SEPARATOR.join( - map( - str.strip, - (getattr(nhs_mhs, key) for key in NhsMhs.key_fields()), - ) - ) - - -def create_product_team(ods_code: str) -> ProductTeam: - if ods_code in EXCEPTIONAL_ODS_CODES: - product_team = ProductTeam(**DEFAULT_PRODUCT_TEAM, ods_code=ods_code) - else: - organisation = Root.create_ods_organisation(ods_code=ods_code) - product_team = organisation.create_product_team(**DEFAULT_PRODUCT_TEAM) - return product_team - - -def nhs_accredited_system_to_cpm_devices( - nhs_accredited_system: NhsAccreditedSystem, bulk: bool -) -> Generator[Device, None, None]: - unique_identifier = nhs_accredited_system.unique_identifier - product_name = nhs_accredited_system.nhs_product_name or unique_identifier - raw_questionnaire_response_answers = nhs_accredited_system.export() - questionnaire_response_answers = ( - nhs_accredited_system.as_questionnaire_response_answers( - data=raw_questionnaire_response_answers - ) - ) - - questionnaire = NhsAccreditedSystem.questionnaire() - - for ( - ods_code, - accredited_system_id, - ) in accredited_system_ids(nhs_accredited_system): - update_in_list_of_dict( - obj=questionnaire_response_answers, - key="nhs_as_client", - value=[ods_code], - ) - _questionnaire_response = questionnaire.respond( - responses=questionnaire_response_answers - ) - _organisation = Root.create_ods_organisation(ods_code=ods_code) - _product_team = _organisation.create_product_team(**DEFAULT_PRODUCT_TEAM) - _device = _product_team.create_device( - name=product_name, device_type=DeviceType.PRODUCT - ) - _device.add_key( - key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, - key_value=accredited_system_id, - ) - _device.add_questionnaire_response( - questionnaire_response=_questionnaire_response - ) - if bulk: - set_device_tags_bulk( - device=_device, - data=raw_questionnaire_response_answers, - model=SearchSDSDeviceQueryParams, - ) - else: - set_device_tags( - device=_device, - data=raw_questionnaire_response_answers, - model=SearchSDSDeviceQueryParams, - ) - yield _device - - -def nhs_mhs_to_cpm_device(nhs_mhs: NhsMhs, bulk: bool) -> Device: - ods_code = nhs_mhs.nhs_id_code - _scoped_party_key = scoped_party_key(nhs_mhs) - product_name = nhs_mhs.nhs_product_name or _scoped_party_key - raw_questionnaire_response_answers = orjson.loads( - nhs_mhs.json(exclude_none=True, exclude={"change_type"}) - ) - questionnaire_response_answers = nhs_mhs.as_questionnaire_response_answers( - data=raw_questionnaire_response_answers - ) - - questionnaire = NhsMhs.questionnaire() - questionnaire_response = questionnaire.respond( - responses=questionnaire_response_answers - ) - - product_team = create_product_team(ods_code=ods_code) - device = product_team.create_device( - name=product_name, device_type=DeviceType.ENDPOINT - ) - device.add_key( - key_type=DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID, - key_value=_scoped_party_key, - ) - device.add_questionnaire_response(questionnaire_response=questionnaire_response) - if bulk: - set_device_tags_bulk( - device=device, - data=raw_questionnaire_response_answers, - model=SearchSDSEndpointQueryParams, - ) - else: - set_device_tags( - device=device, - data=raw_questionnaire_response_answers, - model=SearchSDSEndpointQueryParams, - ) - return device - - -def modify_devices( - modification_request: SdsModificationRequest, repository: DeviceRepository -) -> Generator[Device, None, None]: - devices = repository.query_by_tag( - unique_identifier=modification_request.unique_identifier, drop_tags_field=False - ) - - # Only apply modifications if there are devices to modify - modifications = modification_request.modifications if devices else [] - - _devices = devices - for modification_type, field, new_values in modifications: - device_type = _devices[0].device_type - model = NhsAccreditedSystem if device_type is DeviceType.PRODUCT else NhsMhs - field_name = model.get_field_name_for_alias(alias=field) - - try: - modify_key = get_modify_key_function( - model=model, - field_name=field_name, - modification_type=modification_type, - ) - _devices = list( - modify_key(devices=_devices, field_name=field_name, value=new_values) - ) - except NotAnSdsKey: - update_metadata = partial( - update_device_metadata, - model=model, - modification_type=modification_type, - field_alias=field, - new_values=new_values, - ) - _active_devices = list(filter(Device.is_active, _devices)) - _inactive_devices = list(filterfalse(Device.is_active, _devices)) - _devices = [*map(update_metadata, _active_devices), *_inactive_devices] - yield from _devices - - -def delete_devices( - deletion_request: SdsDeletionRequest, repository: DeviceRepository -) -> list[Device]: - devices = [] - for _device in repository.query_by_tag( - unique_identifier=deletion_request.unique_identifier, drop_tags_field=False - ): - _device.delete() - devices.append(_device) - return devices - - -def translate(obj: dict[str, str], repository: DeviceRepository, bulk: bool): - if obj.get("unique_identifier") in BAD_UNIQUE_IDENTIFIERS: - return [] - - object_class = obj["object_class"].lower() - if object_class == NhsAccreditedSystem.OBJECT_CLASS: - nhs_accredited_system = NhsAccreditedSystem.construct(**obj) - devices = nhs_accredited_system_to_cpm_devices( - nhs_accredited_system=nhs_accredited_system, bulk=bulk - ) - elif object_class == NhsMhs.OBJECT_CLASS: - nhs_mhs = NhsMhs.construct(**obj) - device = nhs_mhs_to_cpm_device(nhs_mhs=nhs_mhs, bulk=bulk) - devices = [device] - elif object_class == SdsDeletionRequest.OBJECT_CLASS: - deletion_request = SdsDeletionRequest.construct(**obj) - devices = delete_devices( - deletion_request=deletion_request, repository=repository - ) - elif object_class == SdsModificationRequest.OBJECT_CLASS: - modification_request = SdsModificationRequest.construct(**obj) - devices = modify_devices( - modification_request=modification_request, repository=repository - ) - else: - raise UnknownSdsModel( - f"No translation available for models with object class '{object_class}'" - ) - return devices +# from functools import partial +# from itertools import filterfalse +# from typing import Generator + +# import orjson +# from domain.api.sds.query import ( +# SearchSDSDeviceQueryParams, +# SearchSDSEndpointQueryParams, +# ) +# from domain.core.device.v2 import Device, DeviceType +# from domain.core.device_key.v1 import DeviceKeyType +# from domain.core.product_team.v2 import ProductTeam +# from domain.core.root.v2 import Root +# from domain.core.validation import DEVICE_KEY_SEPARATOR +# from domain.repository.device_repository.v2 import DeviceRepository +# from sds.domain.nhs_accredited_system import NhsAccreditedSystem +# from sds.domain.nhs_mhs import NhsMhs +# from sds.domain.parse import UnknownSdsModel +# from sds.domain.sds_deletion_request import SdsDeletionRequest +# from sds.domain.sds_modification_request import SdsModificationRequest + +# from .constants import ( +# BAD_UNIQUE_IDENTIFIERS, +# DEFAULT_ORGANISATION, +# DEFAULT_PRODUCT_TEAM, +# EXCEPTIONAL_ODS_CODES, +# ) +# from .modify_device import update_device_metadata +# from .modify_key import NotAnSdsKey, get_modify_key_function +# from .utils import set_device_tags, set_device_tags_bulk, update_in_list_of_dict + + +# def accredited_system_ids( +# nhs_accredited_system: NhsAccreditedSystem, +# ) -> Generator[tuple[str, str], None, None]: +# for ods_code in nhs_accredited_system.nhs_as_client or [DEFAULT_ORGANISATION]: +# yield ods_code, DEVICE_KEY_SEPARATOR.join( +# (ods_code, nhs_accredited_system.unique_identifier) +# ) + + +# def scoped_party_key(nhs_mhs: NhsMhs) -> str: +# return DEVICE_KEY_SEPARATOR.join( +# map( +# str.strip, +# (getattr(nhs_mhs, key) for key in NhsMhs.key_fields()), +# ) +# ) + + +# def create_product_team(ods_code: str) -> ProductTeam: +# if ods_code in EXCEPTIONAL_ODS_CODES: +# product_team = ProductTeam(**DEFAULT_PRODUCT_TEAM, ods_code=ods_code) +# else: +# organisation = Root.create_ods_organisation(ods_code=ods_code) +# product_team = organisation.create_product_team(**DEFAULT_PRODUCT_TEAM) +# return product_team + + +# def nhs_accredited_system_to_cpm_devices( +# nhs_accredited_system: NhsAccreditedSystem, bulk: bool +# ) -> Generator[Device, None, None]: +# unique_identifier = nhs_accredited_system.unique_identifier +# product_name = nhs_accredited_system.nhs_product_name or unique_identifier +# raw_questionnaire_response_answers = nhs_accredited_system.export() +# questionnaire_response_answers = ( +# nhs_accredited_system.as_questionnaire_response_answers( +# data=raw_questionnaire_response_answers +# ) +# ) + +# questionnaire = NhsAccreditedSystem.questionnaire() + +# for ( +# ods_code, +# accredited_system_id, +# ) in accredited_system_ids(nhs_accredited_system): +# update_in_list_of_dict( +# obj=questionnaire_response_answers, +# key="nhs_as_client", +# value=[ods_code], +# ) +# _questionnaire_response = questionnaire.respond( +# responses=questionnaire_response_answers +# ) +# _organisation = Root.create_ods_organisation(ods_code=ods_code) +# _product_team = _organisation.create_product_team(**DEFAULT_PRODUCT_TEAM) +# _device = _product_team.create_device( +# name=product_name, device_type=DeviceType.PRODUCT +# ) +# _device.add_key( +# key_type=DeviceKeyType.ACCREDITED_SYSTEM_ID, +# key_value=accredited_system_id, +# ) +# _device.add_questionnaire_response( +# questionnaire_response=_questionnaire_response +# ) +# if bulk: +# set_device_tags_bulk( +# device=_device, +# data=raw_questionnaire_response_answers, +# model=SearchSDSDeviceQueryParams, +# ) +# else: +# set_device_tags( +# device=_device, +# data=raw_questionnaire_response_answers, +# model=SearchSDSDeviceQueryParams, +# ) +# yield _device + + +# def nhs_mhs_to_cpm_device(nhs_mhs: NhsMhs, bulk: bool) -> Device: +# ods_code = nhs_mhs.nhs_id_code +# _scoped_party_key = scoped_party_key(nhs_mhs) +# product_name = nhs_mhs.nhs_product_name or _scoped_party_key +# raw_questionnaire_response_answers = orjson.loads( +# nhs_mhs.json(exclude_none=True, exclude={"change_type"}) +# ) +# questionnaire_response_answers = nhs_mhs.as_questionnaire_response_answers( +# data=raw_questionnaire_response_answers +# ) + +# questionnaire = NhsMhs.questionnaire() +# questionnaire_response = questionnaire.respond( +# responses=questionnaire_response_answers +# ) + +# product_team = create_product_team(ods_code=ods_code) +# device = product_team.create_device( +# name=product_name, device_type=DeviceType.ENDPOINT +# ) +# device.add_key( +# key_type=DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID, +# key_value=_scoped_party_key, +# ) +# device.add_questionnaire_response(questionnaire_response=questionnaire_response) +# if bulk: +# set_device_tags_bulk( +# device=device, +# data=raw_questionnaire_response_answers, +# model=SearchSDSEndpointQueryParams, +# ) +# else: +# set_device_tags( +# device=device, +# data=raw_questionnaire_response_answers, +# model=SearchSDSEndpointQueryParams, +# ) +# return device + + +# def modify_devices( +# modification_request: SdsModificationRequest, repository: DeviceRepository +# ) -> Generator[Device, None, None]: +# devices = repository.query_by_tag( +# unique_identifier=modification_request.unique_identifier, drop_tags_field=False +# ) + +# # Only apply modifications if there are devices to modify +# modifications = modification_request.modifications if devices else [] + +# _devices = devices +# for modification_type, field, new_values in modifications: +# device_type = _devices[0].device_type +# model = NhsAccreditedSystem if device_type is DeviceType.PRODUCT else NhsMhs +# field_name = model.get_field_name_for_alias(alias=field) + +# try: +# modify_key = get_modify_key_function( +# model=model, +# field_name=field_name, +# modification_type=modification_type, +# ) +# _devices = list( +# modify_key(devices=_devices, field_name=field_name, value=new_values) +# ) +# except NotAnSdsKey: +# update_metadata = partial( +# update_device_metadata, +# model=model, +# modification_type=modification_type, +# field_alias=field, +# new_values=new_values, +# ) +# _active_devices = list(filter(Device.is_active, _devices)) +# _inactive_devices = list(filterfalse(Device.is_active, _devices)) +# _devices = [*map(update_metadata, _active_devices), *_inactive_devices] +# yield from _devices + + +# def delete_devices( +# deletion_request: SdsDeletionRequest, repository: DeviceRepository +# ) -> list[Device]: +# devices = [] +# for _device in repository.query_by_tag( +# unique_identifier=deletion_request.unique_identifier, drop_tags_field=False +# ): +# _device.delete() +# devices.append(_device) +# return devices + + +# def translate(obj: dict[str, str], repository: DeviceRepository, bulk: bool): +# if obj.get("unique_identifier") in BAD_UNIQUE_IDENTIFIERS: +# return [] + +# object_class = obj["object_class"].lower() +# if object_class == NhsAccreditedSystem.OBJECT_CLASS: +# nhs_accredited_system = NhsAccreditedSystem.construct(**obj) +# devices = nhs_accredited_system_to_cpm_devices( +# nhs_accredited_system=nhs_accredited_system, bulk=bulk +# ) +# elif object_class == NhsMhs.OBJECT_CLASS: +# nhs_mhs = NhsMhs.construct(**obj) +# device = nhs_mhs_to_cpm_device(nhs_mhs=nhs_mhs, bulk=bulk) +# devices = [device] +# elif object_class == SdsDeletionRequest.OBJECT_CLASS: +# deletion_request = SdsDeletionRequest.construct(**obj) +# devices = delete_devices( +# deletion_request=deletion_request, repository=repository +# ) +# elif object_class == SdsModificationRequest.OBJECT_CLASS: +# modification_request = SdsModificationRequest.construct(**obj) +# devices = modify_devices( +# modification_request=modification_request, repository=repository +# ) +# else: +# raise UnknownSdsModel( +# f"No translation available for models with object class '{object_class}'" +# ) +# return devices diff --git a/src/layers/sds/cpm_translation/utils.py b/src/layers/sds/cpm_translation/utils.py index 46a1bcf0..58635318 100644 --- a/src/layers/sds/cpm_translation/utils.py +++ b/src/layers/sds/cpm_translation/utils.py @@ -2,7 +2,7 @@ from typing import Iterable from domain.api.sds.query import SearchSDSQueryParams -from domain.core.device.v2 import Device, DeviceTag +from domain.core.device import Device, DeviceTag from sds.cpm_translation.constants import UNIQUE_IDENTIFIER diff --git a/src/layers/sds/domain/nhs_accredited_system.py b/src/layers/sds/domain/nhs_accredited_system.py index 819e7351..7d304df5 100644 --- a/src/layers/sds/domain/nhs_accredited_system.py +++ b/src/layers/sds/domain/nhs_accredited_system.py @@ -1,11 +1,6 @@ from typing import ClassVar, Literal, Optional from domain.api.sds.query import SearchSDSDeviceQueryParams -from domain.core.questionnaire.v1 import Questionnaire -from domain.repository.questionnaire_repository import QuestionnaireRepository -from domain.repository.questionnaire_repository.v1.questionnaires import ( - QuestionnaireInstance, -) from pydantic import Field from .base import OBJECT_CLASS_FIELD_NAME, SdsBaseModel @@ -42,11 +37,6 @@ class NhsAccreditedSystem(SdsBaseModel): def key_fields(cls) -> tuple[str, ...]: return ACCREDITED_SYSTEM_KEY_FIELDS - @classmethod - def questionnaire(cls) -> Questionnaire: - repo = QuestionnaireRepository() - return repo.read(name=QuestionnaireInstance.SPINE_DEVICE) - @classmethod def query_params_model(cls) -> type[SearchSDSDeviceQueryParams]: return SearchSDSDeviceQueryParams diff --git a/src/layers/sds/domain/nhs_mhs.py b/src/layers/sds/domain/nhs_mhs.py index f2292c55..3b68f1bc 100644 --- a/src/layers/sds/domain/nhs_mhs.py +++ b/src/layers/sds/domain/nhs_mhs.py @@ -2,11 +2,6 @@ from typing import ClassVar, Literal, Optional from domain.api.sds.query import SearchSDSEndpointQueryParams -from domain.core.questionnaire.v2 import Questionnaire -from domain.repository.questionnaire_repository import QuestionnaireRepository -from domain.repository.questionnaire_repository.v1.questionnaires import ( - QuestionnaireInstance, -) from pydantic import Field from sds.domain.constants import ( Authentication, @@ -78,11 +73,6 @@ class NhsMhs(SdsBaseModel): def key_fields(cls) -> tuple[str, ...]: return KEY_FIELDS - @classmethod - def questionnaire(cls) -> Questionnaire: - repo = QuestionnaireRepository() - return repo.read(name=QuestionnaireInstance.SPINE_ENDPOINT) - @classmethod def query_params_model(cls) -> type[SearchSDSEndpointQueryParams]: return SearchSDSEndpointQueryParams diff --git a/src/layers/sds/worker/load.py b/src/layers/sds/worker/load.py index 317aa8a3..324ccda4 100644 --- a/src/layers/sds/worker/load.py +++ b/src/layers/sds/worker/load.py @@ -4,7 +4,7 @@ """ import boto3 -from domain.repository.device_repository.v2 import DeviceRepository +from domain.repository.device_repository import DeviceRepository from event.aws.client import dynamodb_client from event.environment import BaseEnvironment From bcaef1f6e106263ae97a40bb78e2a604d5e8d71d Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Tue, 19 Nov 2024 11:04:27 +0000 Subject: [PATCH 02/12] [feature/PI-528-reset_versions_to_v1] delete duplicated fn --- src/layers/domain/core/device_key/v1.py | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/src/layers/domain/core/device_key/v1.py b/src/layers/domain/core/device_key/v1.py index 8636ab15..160bcb39 100644 --- a/src/layers/domain/core/device_key/v1.py +++ b/src/layers/domain/core/device_key/v1.py @@ -7,16 +7,6 @@ from pydantic import validator -def validate_key(key: str, type: "DeviceKeyType"): - if type and type.pattern.match(key) is None: - raise InvalidKeyPattern( - f"Key '{key}' does not match the expected " - f"pattern '{type.pattern.pattern}' associated with " - f"key type '{type}'" - ) - return key - - class DeviceKeyType(StrEnum): PRODUCT_ID = auto() ACCREDITED_SYSTEM_ID = auto() From 08c87cc022950a8e5d506f9b6dbd26cc33c642c3 Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Tue, 19 Nov 2024 11:27:36 +0000 Subject: [PATCH 03/12] [release/2024-11-19-a] create release --- CHANGELOG.md | 4 ++++ VERSION | 2 +- changelog/2024-11-19-a.md | 2 ++ pyproject.toml | 2 +- 4 files changed, 8 insertions(+), 2 deletions(-) create mode 100644 changelog/2024-11-19-a.md diff --git a/CHANGELOG.md b/CHANGELOG.md index f745df20..f8b0c8d7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,6 +3,10 @@ ## 2024-11-19 - [PI-601] Workspace destroy, use main branch as fallback +## 2024-11-19-a +- [PI-528] Collapse versioning to v1 +- [PI-581] MHS Device with Device Reference Data + ## 2024-11-18 - [PI-601] Workspace destroy, use main branch if branch no longer exists diff --git a/VERSION b/VERSION index 92c5f411..fdc45e84 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2024.11.19 +2024.11.19.a diff --git a/changelog/2024-11-19-a.md b/changelog/2024-11-19-a.md new file mode 100644 index 00000000..3c330890 --- /dev/null +++ b/changelog/2024-11-19-a.md @@ -0,0 +1,2 @@ +- [PI-528] Collapse versioning to v1 +- [PI-581] MHS Device with Device Reference Data diff --git a/pyproject.toml b/pyproject.toml index 58aba087..f7f039e4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "connecting-party-manager" -version = "2024.11.19" +version = "2024.11.19.a" description = "Repository for the Connecting Party Manager API and related services" authors = ["NHS England"] license = "LICENSE.md" From 6633a17beb242e5fd6fc9ea00dee98d78ce1cbcd Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Tue, 19 Nov 2024 11:45:15 +0000 Subject: [PATCH 04/12] [release/2024-11-19-a] Merge branch feature/PI-581-create_mhs_device_with_drd into release/2024-11-19-a --- src/api/createDevice/src/v1/steps.py | 2 +- .../src/v1/steps.py | 134 ++++++++++++++---- .../tests/test_index.py | 111 ++++++++++++++- .../createDeviceReferenceData/src/v1/steps.py | 4 +- src/api/readDevice/src/v1/steps.py | 2 +- .../features/createDevice.success.feature | 12 +- ...eviceMessageHandlingSystem.failure.feature | 118 +++++++++++++-- ...eviceMessageHandlingSystem.success.feature | 72 ++++++---- ...DataAdditionalInteractions.failure.feature | 8 +- ...iceReferenceDataMessageSet.failure.feature | 10 +- .../features/readDevice.success.feature | 6 +- .../domain/api/common_steps/read_product.py | 2 +- src/layers/domain/core/device/v1.py | 30 +++- src/layers/domain/core/device_key/v1.py | 3 + .../tests/v1/test_device_repository_v1.py | 1 + .../domain/repository/device_repository/v1.py | 39 +++-- src/layers/domain/request_models/v1.py | 20 ++- 17 files changed, 452 insertions(+), 122 deletions(-) diff --git a/src/api/createDevice/src/v1/steps.py b/src/api/createDevice/src/v1/steps.py index 21971c3b..43d669c5 100644 --- a/src/api/createDevice/src/v1/steps.py +++ b/src/api/createDevice/src/v1/steps.py @@ -14,7 +14,7 @@ @mark_validation_errors_as_inbound -def parse_device_payload(data, cache) -> Device: +def parse_device_payload(data, cache) -> CreateDeviceIncomingParams: payload: dict = data[parse_event_body] return CreateDeviceIncomingParams(**payload) diff --git a/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py b/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py index adb45fe6..dc4505e3 100644 --- a/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py +++ b/src/api/createDeviceMessageHandlingSystem/src/v1/steps.py @@ -8,24 +8,77 @@ read_product_team, ) from domain.core.cpm_product import CpmProduct -from domain.core.device import Device -from domain.core.error import InvalidSpineMhsResponse +from domain.core.device import ( + MHS_DEVICE_NAME, + Device, + DeviceKeyAddedEvent, + DeviceReferenceDataIdAddedEvent, + DeviceTagAddedEvent, + QuestionnaireResponseUpdatedEvent, +) +from domain.core.device_key import DeviceKeyType +from domain.core.device_reference_data import DeviceReferenceData +from domain.core.error import ConfigurationError +from domain.core.product_team import ProductTeam from domain.core.questionnaire import Questionnaire, QuestionnaireResponse +from domain.repository.device_reference_data_repository import ( + DeviceReferenceDataRepository, +) from domain.repository.device_repository import DeviceRepository from domain.repository.questionnaire_repository import ( QuestionnaireInstance, QuestionnaireRepository, ) -from domain.request_models import CreateMhsDeviceIncomingParams +from domain.request_models import CpmProductPathParams, CreateMhsDeviceIncomingParams from domain.response.validation_errors import mark_validation_errors_as_inbound @mark_validation_errors_as_inbound -def parse_mhs_device_payload(data, cache) -> Device: +def parse_mhs_device_payload(data, cache) -> CreateMhsDeviceIncomingParams: payload: dict = data[parse_event_body] return CreateMhsDeviceIncomingParams(**payload) +def check_for_existing_mhs(data, cache): + product_team: ProductTeam = data[read_product_team] + product: CpmProduct = data[read_product] + + device_repo = DeviceRepository( + table_name=cache["DYNAMODB_TABLE"], dynamodb_client=cache["DYNAMODB_CLIENT"] + ) + + devices = device_repo.search(product_team_id=product_team.id, product_id=product.id) + if any(device.name == MHS_DEVICE_NAME for device in devices): + raise ConfigurationError( + "There is already an existing MHS Device for this Product" + ) + + +def read_device_reference_data(data, cache) -> DeviceReferenceData: + path_params: CpmProductPathParams = data[parse_path_params] + drd_repo = DeviceReferenceDataRepository( + table_name=cache["DYNAMODB_TABLE"], dynamodb_client=cache["DYNAMODB_CLIENT"] + ) + device_reference_datas = drd_repo.search( + product_id=path_params.product_id, + product_team_id=path_params.product_team_id, + ) + + party_key: str = data[get_party_key] + # use {QuestionnaireInstance.SPINE_MHS_MESSAGE_SETS} + mhs_message_set_drd_name = f"{party_key} - MHS Message Set" + + try: + (device_reference_data,) = filter( + lambda drd: drd.name == mhs_message_set_drd_name, device_reference_datas + ) + except ValueError: + raise ConfigurationError( + "You must configure exactly one MessageSet Device Reference Data before creating an MHS Device" + ) + return device_reference_data + + def read_spine_mhs_questionnaire(data, cache) -> Questionnaire: return QuestionnaireRepository().read(QuestionnaireInstance.SPINE_MHS) @@ -33,25 +86,13 @@ def read_spine_mhs_questionnaire(data, cache) -> Questionnaire: def validate_spine_mhs_questionnaire_response(data, cache) -> QuestionnaireResponse: spine_mhs_questionnaire: Questionnaire = data[read_spine_mhs_questionnaire] payload: CreateMhsDeviceIncomingParams = data[parse_mhs_device_payload] - questionnaire_responses = payload.questionnaire_responses - # Ensure there's a questionnaire named 'spine_mhs' in the responses - if QuestionnaireInstance.SPINE_MHS not in questionnaire_responses: - raise InvalidSpineMhsResponse( - "Require a 'spine_mhs' questionnaire response to create a MHS Device" - ) - - raw_spine_mhs_questionnaire_response = payload.questionnaire_responses[ + spine_mhs_questionnaire_response = payload.questionnaire_responses[ QuestionnaireInstance.SPINE_MHS ] - # Ensure there's only one response to 'spine_mhs' - if len(raw_spine_mhs_questionnaire_response) != 1: - raise InvalidSpineMhsResponse( - "Expected only one response for the 'spine_mhs' questionnaire" - ) return spine_mhs_questionnaire.validate( - data=raw_spine_mhs_questionnaire_response[0] + data=spine_mhs_questionnaire_response.__root__[0] ) @@ -64,32 +105,63 @@ def create_mhs_device(data, cache) -> Device: return product.create_device(**device_payload) -def create_party_key_tag(data, cache): +def create_party_key_tag(data, cache) -> DeviceTagAddedEvent: + mhs_device: Device = data[create_mhs_device] + return mhs_device.add_tag(party_key=data[get_party_key]) + + +def create_cpa_id_keys(data, cache) -> DeviceKeyAddedEvent: mhs_device: Device = data[create_mhs_device] - mhs_device.add_tag(party_key=data[get_party_key]) + party_key = data[get_party_key] + drd: DeviceReferenceData = data[read_device_reference_data] + interaction_ids = [] + + # Extract Interaction IDs from questionnaire responses + questionnaire_responses = drd.questionnaire_responses.get( + f"{QuestionnaireInstance.SPINE_MHS_MESSAGE_SETS}/1", [] + ) + for response in questionnaire_responses: + interaction_ids.append(response.data.get("Interaction ID")) + + # Use cpa_id in furture + for id in interaction_ids: + mhs_device.add_key( + key_type=DeviceKeyType.INTERACTION_ID, key_value=f"{party_key}:{id}" + ) + return mhs_device -def add_spine_mhs_questionnaire_response(data, cache) -> list[QuestionnaireResponse]: +def add_device_reference_data_id(data, cache) -> DeviceReferenceDataIdAddedEvent: + mhs_device: Device = data[create_mhs_device] + drd: DeviceReferenceData = data[read_device_reference_data] + return mhs_device.add_device_reference_data_id( + device_reference_data_id=str(drd.id), path_to_data=["*"] + ) + + +def add_spine_mhs_questionnaire_response( + data, cache +) -> QuestionnaireResponseUpdatedEvent: spine_mhs_questionnaire_response: QuestionnaireResponse = data[ validate_spine_mhs_questionnaire_response ] - mhs_device: Device = data[create_party_key_tag] - mhs_device.add_questionnaire_response(spine_mhs_questionnaire_response) - return mhs_device + mhs_device: Device = data[create_mhs_device] + + return mhs_device.add_questionnaire_response(spine_mhs_questionnaire_response) -def write_device(data: dict[str, CpmProduct], cache) -> CpmProduct: - mhs_device: Device = data[add_spine_mhs_questionnaire_response] +def write_device(data: dict[str, Device], cache) -> Device: + mhs_device: Device = data[create_mhs_device] repo = DeviceRepository( table_name=cache["DYNAMODB_TABLE"], dynamodb_client=cache["DYNAMODB_CLIENT"] ) return repo.write(mhs_device) -def set_http_status(data, cache) -> tuple[HTTPStatus, str]: - device: Device = data[create_mhs_device] - return HTTPStatus.CREATED, device.state_exclude_tags() +def set_http_status(data, cache) -> tuple[HTTPStatus, dict]: + mhs_device: Device = data[create_mhs_device] + return HTTPStatus.CREATED, mhs_device.state_exclude_tags() steps = [ @@ -99,10 +171,14 @@ def set_http_status(data, cache) -> tuple[HTTPStatus, str]: read_product_team, read_product, get_party_key, + check_for_existing_mhs, + read_device_reference_data, read_spine_mhs_questionnaire, validate_spine_mhs_questionnaire_response, create_mhs_device, create_party_key_tag, + create_cpa_id_keys, + add_device_reference_data_id, add_spine_mhs_questionnaire_response, write_device, set_http_status, diff --git a/src/api/createDeviceMessageHandlingSystem/tests/test_index.py b/src/api/createDeviceMessageHandlingSystem/tests/test_index.py index 8614fc01..5bade299 100644 --- a/src/api/createDeviceMessageHandlingSystem/tests/test_index.py +++ b/src/api/createDeviceMessageHandlingSystem/tests/test_index.py @@ -13,8 +13,15 @@ from domain.core.product_key import ProductKeyType from domain.core.root import Root from domain.repository.cpm_product_repository import CpmProductRepository +from domain.repository.device_reference_data_repository import ( + DeviceReferenceDataRepository, +) from domain.repository.device_repository import DeviceRepository from domain.repository.product_team_repository import ProductTeamRepository +from domain.repository.questionnaire_repository import ( + QuestionnaireInstance, + QuestionnaireRepository, +) from event.json import json_loads from test_helpers.dynamodb import mock_table @@ -49,7 +56,9 @@ @contextmanager -def mock_epr_product() -> Generator[tuple[ModuleType, CpmProduct], Any, None]: +def mock_epr_product_with_message_set_drd() -> ( + Generator[tuple[ModuleType, CpmProduct], Any, None] +): org = Root.create_ods_organisation(ods_code=ODS_CODE) product_team = org.create_product_team(name=PRODUCT_TEAM_NAME) @@ -72,6 +81,37 @@ def mock_epr_product() -> Generator[tuple[ModuleType, CpmProduct], Any, None]: ) product_repo.write(entity=product) + # set up questionnaire response + mhs_message_set_questionnaire = QuestionnaireRepository().read( + QuestionnaireInstance.SPINE_MHS_MESSAGE_SETS + ) + questionnaire_response = mhs_message_set_questionnaire.validate( + data={ + "Interaction ID": "urn:foo", + "MHS SN": "bar", + "MHS IN": "baz", + } + ) + + questionnaire_response_2 = mhs_message_set_questionnaire.validate( + data={ + "Interaction ID": "urn:foo2", + "MHS SN": "bar2", + "MHS IN": "baz2", + }, + ) + + # Set up DeviceReferenceData in DB + device_reference_data = product.create_device_reference_data( + name="ABC1234-987654 - MHS Message Set" + ) + device_reference_data.add_questionnaire_response(questionnaire_response) + device_reference_data.add_questionnaire_response(questionnaire_response_2) + device_reference_data_repo = DeviceReferenceDataRepository( + table_name=TABLE_NAME, dynamodb_client=client + ) + device_reference_data_repo.write(device_reference_data) + import api.createDeviceMessageHandlingSystem.index as index index.cache["DYNAMODB_CLIENT"] = client @@ -109,8 +149,41 @@ def mock_not_epr_product() -> Generator[tuple[ModuleType, CpmProduct], Any, None yield index, product +@contextmanager +def mock_epr_product_without_message_set_drd() -> ( + Generator[tuple[ModuleType, CpmProduct], Any, None] +): + org = Root.create_ods_organisation(ods_code=ODS_CODE) + product_team = org.create_product_team(name=PRODUCT_TEAM_NAME) + + with mock_table(table_name=TABLE_NAME) as client, mock.patch.dict( + os.environ, + {"DYNAMODB_TABLE": TABLE_NAME, "AWS_DEFAULT_REGION": "eu-west-2"}, + clear=True, + ): + product_team_repo = ProductTeamRepository( + table_name=TABLE_NAME, dynamodb_client=client + ) + product_team_repo.write(entity=product_team) + + product = product_team.create_cpm_product( + name=PRODUCT_NAME, product_id=PRODUCT_ID + ) + product.add_key(key_type=ProductKeyType.PARTY_KEY, key_value="ABC1234-987654") + product_repo = CpmProductRepository( + table_name=TABLE_NAME, dynamodb_client=client + ) + product_repo.write(entity=product) + + import api.createDeviceMessageHandlingSystem.index as index + + index.cache["DYNAMODB_CLIENT"] = client + + yield index, product + + def test_index() -> None: - with mock_epr_product() as (index, product): + with mock_epr_product_with_message_set_drd() as (index, product): # Execute the lambda response = index.handler( event={ @@ -194,7 +267,7 @@ def test_index() -> None: ], ) def test_incoming_errors(body, path_parameters, error_code, status_code): - with mock_epr_product() as (index, _): + with mock_epr_product_with_message_set_drd() as (index, _): # Execute the lambda response = index.handler( event={ @@ -219,7 +292,7 @@ def test_incoming_errors(body, path_parameters, error_code, status_code): }, }, "VALIDATION_ERROR", - "Expected only one response for the 'spine_mhs' questionnaire", + "CreateMhsDeviceIncomingParams.questionnaire_responses.spine_mhs.__root__: ensure this value has at most 1 items", 400, ), ( @@ -237,7 +310,7 @@ def test_incoming_errors(body, path_parameters, error_code, status_code): def test_questionnaire_response_validation_errors( body, error_code, error_message, status_code ): - with mock_epr_product() as (index, product): + with mock_epr_product_with_message_set_drd() as (index, product): # Execute the lambda response = index.handler( event={ @@ -274,6 +347,32 @@ def test_not_epr_product(): assert response["statusCode"] == 400 expected_error_code = "VALIDATION_ERROR" - expected_message_code = "Not an EPR Product: Cannot create MHS device for product without exactly one Party Key" + expected_message_code = "Not an EPR Product: Cannot create MHS Device for product without exactly one Party Key" assert expected_error_code in response["body"] assert expected_message_code in response["body"] + + +def test_no_existing_message_set_drd(): + with mock_epr_product_without_message_set_drd() as (index, product): + # Execute the lambda + response = index.handler( + event={ + "headers": {"version": VERSION}, + "body": json.dumps( + {"questionnaire_responses": {"spine_mhs": [QUESTIONNAIRE_DATA]}} + ), + "pathParameters": { + "product_team_id": str(product.product_team_id), + "product_id": str(product.id), + }, + } + ) + + assert response["statusCode"] == 400 + expected_error_code = "VALIDATION_ERROR" + expected_message_code = "You must configure exactly one MessageSet Device Reference Data before creating an MHS Device" + assert expected_error_code in response["body"] + assert expected_message_code in response["body"] + + +# add test for already existing mhs device? diff --git a/src/api/createDeviceReferenceData/src/v1/steps.py b/src/api/createDeviceReferenceData/src/v1/steps.py index bdca5549..acf4e94e 100644 --- a/src/api/createDeviceReferenceData/src/v1/steps.py +++ b/src/api/createDeviceReferenceData/src/v1/steps.py @@ -16,7 +16,9 @@ @mark_validation_errors_as_inbound -def parse_device_reference_data_payload(data, cache) -> DeviceReferenceData: +def parse_device_reference_data_payload( + data, cache +) -> CreateDeviceReferenceDataIncomingParams: payload: dict = data[parse_event_body] return CreateDeviceReferenceDataIncomingParams(**payload) diff --git a/src/api/readDevice/src/v1/steps.py b/src/api/readDevice/src/v1/steps.py index c41a95c0..6ec2fe73 100644 --- a/src/api/readDevice/src/v1/steps.py +++ b/src/api/readDevice/src/v1/steps.py @@ -34,7 +34,7 @@ def read_product(data, cache) -> CpmProduct: table_name=cache["DYNAMODB_TABLE"], dynamodb_client=cache["DYNAMODB_CLIENT"] ) cpm_product = product_repo.read( - product_team_id=product_team.id, id=path_params.product_id + id=path_params.product_id, product_team_id=product_team.id ) return cpm_product diff --git a/src/api/tests/feature_tests/features/createDevice.success.feature b/src/api/tests/feature_tests/features/createDevice.success.feature index 0526b2de..80afa69a 100644 --- a/src/api/tests/feature_tests/features/createDevice.success.feature +++ b/src/api/tests/feature_tests/features/createDevice.success.feature @@ -36,10 +36,11 @@ Feature: Create Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | And I note the response field "$.id" as "device_id" When I make a "GET" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/${ note(device_id) }" Then I receive a status code "200" with body @@ -56,10 +57,11 @@ Feature: Create Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | Examples: | product_team_id | @@ -96,10 +98,11 @@ Feature: Create Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | And I note the response field "$.id" as "device_id" When I make a "GET" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/${ note(device_id) }" Then I receive a status code "200" with body @@ -116,10 +119,11 @@ Feature: Create Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | Examples: | product_team_id | product_id | diff --git a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature index 50d4a813..c75740b4 100644 --- a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature +++ b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature @@ -101,12 +101,49 @@ Feature: Create MHS Device - failure scenarios Then I receive a status code "400" with body | path | value | | errors.0.code | VALIDATION_ERROR | - | errors.0.message | Not an EPR Product: Cannot create MHS device for product without exactly one Party Key | + | errors.0.message | Not an EPR Product: Cannot create MHS Device for product without exactly one Party Key | And the response headers contain: | name | value | | Content-Type | application/json | | Content-Length | 143 | + Scenario: Cannot create a MHS Device for a Product that does not have a MessageSet Device Reference Data + Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: + | path | value | + | name | My Great Product Team | + | ods_code | F5H1R | + And I note the response field "$.id" as "product_team_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/Epr" with body: + | path | value | + | name | My Great Product | + And I note the response field "$.id" as "product_id" + When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: + | path | value | + | questionnaire_responses.spine_mhs.0.Address | http://example.com | + | questionnaire_responses.spine_mhs.0.Unique Identifier | 123456 | + | questionnaire_responses.spine_mhs.0.Managing Organization | Example Org | + | questionnaire_responses.spine_mhs.0.MHS Party key | party-key-001 | + | questionnaire_responses.spine_mhs.0.MHS CPA ID | cpa-id-001 | + | questionnaire_responses.spine_mhs.0.Approver URP | approver-123 | + | questionnaire_responses.spine_mhs.0.Contract Property Template Key | contract-key-001 | + | questionnaire_responses.spine_mhs.0.Date Approved | 2024-01-01 | + | questionnaire_responses.spine_mhs.0.Date DNS Approved | 2024-01-02 | + | questionnaire_responses.spine_mhs.0.Date Requested | 2024-01-03 | + | questionnaire_responses.spine_mhs.0.DNS Approver | dns-approver-456 | + | questionnaire_responses.spine_mhs.0.Interaction Type | FHIR | + | questionnaire_responses.spine_mhs.0.MHS FQDN | mhs.example.com | + | questionnaire_responses.spine_mhs.0.MHS Is Authenticated | PERSISTENT | + | questionnaire_responses.spine_mhs.0.Product Key | product-key-001 | + | questionnaire_responses.spine_mhs.0.Requestor URP | requestor-789 | + Then I receive a status code "400" with body + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | You must configure exactly one MessageSet Device Reference Data before creating an MHS Device | + And the response headers contain: + | name | value | + | Content-Type | application/json | + | Content-Length | 150 | + Scenario: Cannot create a MHS Device with a Device body that has no questionnaire responses for 'spine_mhs' Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: | path | value | @@ -117,17 +154,18 @@ Feature: Create MHS Device - failure scenarios | path | value | | name | My Great Product | And I note the response field "$.id" as "product_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/DeviceReferenceData/MhsMessageSet" When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: | path | value | | questionnaire_responses.not_spine_mhs.0.Question | Answer | Then I receive a status code "400" with body - | path | value | - | errors.0.code | VALIDATION_ERROR | - | errors.0.message | Require a 'spine_mhs' questionnaire response to create a MHS Device | + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | CreateMhsDeviceIncomingParams.questionnaire_responses.__key__: unexpected value; permitted: | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 124 | + | Content-Length | 195 | Scenario: Cannot create a MHS Device with a Device body that has multiple questionnaire responses for 'spine_mhs' Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: @@ -139,6 +177,7 @@ Feature: Create MHS Device - failure scenarios | path | value | | name | My Great Product | And I note the response field "$.id" as "product_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/DeviceReferenceData/MhsMessageSet" When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: | path | value | | questionnaire_responses.spine_mhs.0.Address | http://example.com | @@ -174,13 +213,13 @@ Feature: Create MHS Device - failure scenarios | questionnaire_responses.spine_mhs.1.Product Key | product-key-001 | | questionnaire_responses.spine_mhs.1.Requestor URP | requestor-789 | Then I receive a status code "400" with body - | path | value | - | errors.0.code | VALIDATION_ERROR | - | errors.0.message | Expected only one response for the 'spine_mhs' questionnaire | + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | CreateMhsDeviceIncomingParams.questionnaire_responses.spine_mhs.__root__: ensure this value has at most 1 items | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 117 | + | Content-Length | 168 | Scenario: Cannot create a MHS Device with a Device body that has an invalid questionnaire responses for the questionnaire 'spine_mhs' Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: @@ -192,6 +231,7 @@ Feature: Create MHS Device - failure scenarios | path | value | | name | My Great Product | And I note the response field "$.id" as "product_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/DeviceReferenceData/MhsMessageSet" When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: | path | value | | questionnaire_responses.spine_mhs.0.Address | http://example.com | @@ -204,3 +244,63 @@ Feature: Create MHS Device - failure scenarios | name | value | | Content-Type | application/json | | Content-Length | 147 | + + Scenario: Cannot create a MHS Device with a Product that already has an MHS Device + Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: + | path | value | + | name | My Great Product Team | + | ods_code | F5H1R | + And I note the response field "$.id" as "product_team_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/Epr" with body: + | path | value | + | name | My Great Product | + And I note the response field "$.id" as "product_id" + And I note the response field "$.keys.0.key_type" as "party_key_tag" + And I note the response field "$.keys.0.key_value" as "party_key_value" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/DeviceReferenceData/MhsMessageSet" + And I note the response field "$.id" as "message_set_drd_id" + And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: + | path | value | + | questionnaire_responses.spine_mhs.0.Address | http://example.com | + | questionnaire_responses.spine_mhs.0.Unique Identifier | 123456 | + | questionnaire_responses.spine_mhs.0.Managing Organization | Example Org | + | questionnaire_responses.spine_mhs.0.MHS Party key | party-key-001 | + | questionnaire_responses.spine_mhs.0.MHS CPA ID | cpa-id-001 | + | questionnaire_responses.spine_mhs.0.Approver URP | approver-123 | + | questionnaire_responses.spine_mhs.0.Contract Property Template Key | contract-key-001 | + | questionnaire_responses.spine_mhs.0.Date Approved | 2024-01-01 | + | questionnaire_responses.spine_mhs.0.Date DNS Approved | 2024-01-02 | + | questionnaire_responses.spine_mhs.0.Date Requested | 2024-01-03 | + | questionnaire_responses.spine_mhs.0.DNS Approver | dns-approver-456 | + | questionnaire_responses.spine_mhs.0.Interaction Type | FHIR | + | questionnaire_responses.spine_mhs.0.MHS FQDN | mhs.example.com | + | questionnaire_responses.spine_mhs.0.MHS Is Authenticated | PERSISTENT | + | questionnaire_responses.spine_mhs.0.Product Key | product-key-001 | + | questionnaire_responses.spine_mhs.0.Requestor URP | requestor-789 | + And I note the response field "$.id" as "device_id" + When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: + | path | value | + | questionnaire_responses.spine_mhs.0.Address | http://example.com | + | questionnaire_responses.spine_mhs.0.Unique Identifier | 123457 | + | questionnaire_responses.spine_mhs.0.Managing Organization | Example Org | + | questionnaire_responses.spine_mhs.0.MHS Party key | party-key-003 | + | questionnaire_responses.spine_mhs.0.MHS CPA ID | cpa-id-001 | + | questionnaire_responses.spine_mhs.0.Approver URP | approver-123 | + | questionnaire_responses.spine_mhs.0.Contract Property Template Key | contract-key-001 | + | questionnaire_responses.spine_mhs.0.Date Approved | 2024-01-01 | + | questionnaire_responses.spine_mhs.0.Date DNS Approved | 2024-01-02 | + | questionnaire_responses.spine_mhs.0.Date Requested | 2024-01-03 | + | questionnaire_responses.spine_mhs.0.DNS Approver | dns-approver-456 | + | questionnaire_responses.spine_mhs.0.Interaction Type | FHIR | + | questionnaire_responses.spine_mhs.0.MHS FQDN | mhs.example.com | + | questionnaire_responses.spine_mhs.0.MHS Is Authenticated | PERSISTENT | + | questionnaire_responses.spine_mhs.0.Product Key | product-key-001 | + | questionnaire_responses.spine_mhs.0.Requestor URP | requestor-789 | + Then I receive a status code "400" with body + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | There is already an existing MHS Device for this Product | + And the response headers contain: + | name | value | + | Content-Type | application/json | + | Content-Length | 113 | diff --git a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.success.feature b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.success.feature index 6c415d57..a93fd01b 100644 --- a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.success.feature +++ b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.success.feature @@ -15,12 +15,19 @@ Feature: Create MHS Device - success scenarios | keys.0.key_type | product_team_id_alias | | keys.0.key_value | FOOBAR | And I note the response field "$.id" as "product_team_id" - And I have already made a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/Epr" with body: + And I have already made a "POST" request with "default" headers to "ProductTeam//Product/Epr" with body: | path | value | | name | My Great Product | And I note the response field "$.id" as "product_id" - And I note the response field "$.keys.0.key_value" as "party_key" - When I make a "POST" request with "default" headers to "ProductTeam//Product//Device/MessageHandlingSystem" with body: + And I note the response field "$.keys.0.key_type" as "party_key_tag" + And I note the response field "$.keys.0.key_value" as "party_key_value" + And I have already made a "POST" request with "default" headers to "ProductTeam//Product/${ note(product_id) }/DeviceReferenceData/MhsMessageSet" with body: + | path | value | + | questionnaire_responses.spine_mhs_message_sets.0.Interaction ID | urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 | + | questionnaire_responses.spine_mhs_message_sets.0.MHS SN | urn:nhs:names:services:ers | + | questionnaire_responses.spine_mhs_message_sets.0.MHS IN | READ_PRACTITIONER_ROLE_R4_V001 | + And I note the response field "$.id" as "message_set_drd_id" + When I make a "POST" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/MessageHandlingSystem" with body: | path | value | | questionnaire_responses.spine_mhs.0.Address | http://example.com | | questionnaire_responses.spine_mhs.0.Unique Identifier | 123456 | @@ -39,42 +46,47 @@ Feature: Create MHS Device - success scenarios | questionnaire_responses.spine_mhs.0.Product Key | product-key-001 | | questionnaire_responses.spine_mhs.0.Requestor URP | requestor-789 | Then I receive a status code "201" with body - | path | value | - | id | << ignore >> | - | name | Product-MHS | - | status | active | - | product_id | ${ note(product_id) } | - | product_team_id | ${ note(product_team_id) } | - | ods_code | F5H1R | - | created_on | << ignore >> | - | updated_on | << ignore >> | - | deleted_on | << ignore >> | - | keys | [] | - | questionnaire_responses | << ignore >> | + | path | value | + | id | << ignore >> | + | name | Product-MHS | + | status | active | + | product_id | ${ note(product_id) } | + | product_team_id | ${ note(product_team_id) } | + | ods_code | F5H1R | + | created_on | << ignore >> | + | updated_on | << ignore >> | + | deleted_on | << ignore >> | + | keys.0.key_type | interaction_id | + | keys.0.key_value | F5H1R-850000:urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 | + | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 1104 | + | Content-Length | 1295 | And I note the response field "$.id" as "device_id" When I make a "GET" request with "default" headers to "ProductTeam/${ note(product_team_id) }/Product/${ note(product_id) }/Device/${ note(device_id) }" Then I receive a status code "200" with body - | path | value | - | id | ${ note(device_id) } | - | name | Product-MHS | - | status | active | - | product_id | ${ note(product_id) } | - | product_team_id | ${ note(product_team_id) } | - | ods_code | F5H1R | - | created_on | << ignore >> | - | updated_on | << ignore >> | - | deleted_on | << ignore >> | - | keys | [] | - | tags | << ignore >> | - | questionnaire_responses | << ignore >> | + | path | value | + | id | ${ note(device_id) } | + | name | Product-MHS | + | status | active | + | product_id | ${ note(product_id) } | + | product_team_id | ${ note(product_team_id) } | + | ods_code | F5H1R | + | created_on | << ignore >> | + | updated_on | << ignore >> | + | deleted_on | << ignore >> | + | keys.0.key_type | interaction_id | + | keys.0.key_value | F5H1R-850000:urn:nhs:names:services:ers:READ_PRACTITIONER_ROLE_R4_V001 | + | tags.0.0.0 | ${ note(party_key_tag) } | + | tags.0.0.1 | ${ note(party_key_value) } | + | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 1147 | + | Content-Length | 1338 | Examples: | product_team_id | product_id | diff --git a/src/api/tests/feature_tests/features/createDeviceReferenceDataAdditionalInteractions.failure.feature b/src/api/tests/feature_tests/features/createDeviceReferenceDataAdditionalInteractions.failure.feature index d1a48fe5..e0d3ac03 100644 --- a/src/api/tests/feature_tests/features/createDeviceReferenceDataAdditionalInteractions.failure.feature +++ b/src/api/tests/feature_tests/features/createDeviceReferenceDataAdditionalInteractions.failure.feature @@ -88,13 +88,13 @@ Feature: Create "Additional Interactions" Device Reference Data - failure scenar | path | value | | questionnaire_responses.bad_questionnaire_name.0.some_value | 123 | Then I receive a status code "400" with body - | path | value | - | errors.0.code | VALIDATION_ERROR | - | errors.0.message | CreateDeviceReferenceAdditionalInteractionsDataParams.questionnaire_responses.__key__: unexpected value; permitted: 'spine_as_additional_interactions' | + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | CreateDeviceReferenceAdditionalInteractionsDataParams.questionnaire_responses.__key__: unexpected value; permitted: | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 207 | + | Content-Length | 265 | Scenario: Fail to create a second "AS Additional Interactions" Device Reference Data in the same EPR Product Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: diff --git a/src/api/tests/feature_tests/features/createDeviceReferenceDataMessageSet.failure.feature b/src/api/tests/feature_tests/features/createDeviceReferenceDataMessageSet.failure.feature index b2599be3..0f008112 100644 --- a/src/api/tests/feature_tests/features/createDeviceReferenceDataMessageSet.failure.feature +++ b/src/api/tests/feature_tests/features/createDeviceReferenceDataMessageSet.failure.feature @@ -93,13 +93,13 @@ Feature: Create "Message Set" Device Reference Data - failure scenarios | path | value | | questionnaire_responses.bad_questionnaire_name.0.some_value | 123 | Then I receive a status code "400" with body - | path | value | - | errors.0.code | VALIDATION_ERROR | - | errors.0.message | CreateDeviceReferenceMessageSetsDataParams.questionnaire_responses.__key__: unexpected value; permitted: 'spine_mhs_message_sets' | + | path | value | + | errors.0.code | VALIDATION_ERROR | + | errors.0.message | CreateDeviceReferenceMessageSetsDataParams.questionnaire_responses.__key__: unexpected value; permitted: | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 186 | + | Content-Length | 234 | Scenario: Fail to create a second "MHS Message Set" Device Reference Data in the same EPR Product Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: @@ -137,7 +137,7 @@ Feature: Create "Message Set" Device Reference Data - failure scenarios Then I receive a status code "400" with body | path | value | | errors.0.code | VALIDATION_ERROR | - | errors.0.message | Not an EPR Product: Cannot create MHS device for product without exactly one Party Key | + | errors.0.message | Not an EPR Product: Cannot create MHS Device for product without exactly one Party Key | And the response headers contain: | name | value | | Content-Type | application/json | diff --git a/src/api/tests/feature_tests/features/readDevice.success.feature b/src/api/tests/feature_tests/features/readDevice.success.feature index 37dcdd5b..25fbdf5b 100644 --- a/src/api/tests/feature_tests/features/readDevice.success.feature +++ b/src/api/tests/feature_tests/features/readDevice.success.feature @@ -38,10 +38,11 @@ Feature: Read Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | Examples: | product_team_id | @@ -80,10 +81,11 @@ Feature: Read Device - success scenarios | keys | [] | | tags | [] | | questionnaire_responses | << ignore >> | + | device_reference_data | << ignore >> | And the response headers contain: | name | value | | Content-Type | application/json | - | Content-Length | 345 | + | Content-Length | 374 | Examples: | product_team_id | product_id | diff --git a/src/layers/domain/api/common_steps/read_product.py b/src/layers/domain/api/common_steps/read_product.py index 9324cd1d..1b2d76b9 100644 --- a/src/layers/domain/api/common_steps/read_product.py +++ b/src/layers/domain/api/common_steps/read_product.py @@ -50,7 +50,7 @@ def get_party_key(data, cache) -> str: (party_key,) = party_keys except ValueError: raise NotEprProductError( - "Not an EPR Product: Cannot create MHS device for product without exactly one Party Key" + "Not an EPR Product: Cannot create MHS Device for product without exactly one Party Key" ) return party_key diff --git a/src/layers/domain/core/device/v1.py b/src/layers/domain/core/device/v1.py index d6498f55..363d6d13 100644 --- a/src/layers/domain/core/device/v1.py +++ b/src/layers/domain/core/device/v1.py @@ -20,6 +20,7 @@ UPDATED_ON = "updated_on" DEVICE_UPDATED_ON = f"device_{UPDATED_ON}" +MHS_DEVICE_NAME = "Product-MHS" class QuestionnaireNotFoundError(Exception): @@ -48,6 +49,7 @@ class DeviceCreatedEvent(Event): keys: list[DeviceKey] tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -64,6 +66,7 @@ class DeviceUpdatedEvent(Event): keys: list[DeviceKey] tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -81,6 +84,7 @@ class DeviceDeletedEvent(Event): tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] deleted_tags: list[str] = None + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -98,6 +102,7 @@ class DeviceKeyAddedEvent(Event): keys: list[DeviceKey] tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -124,6 +129,7 @@ class DeviceTagAddedEvent(Event): keys: list[DeviceKey] tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -141,6 +147,7 @@ class DeviceTagsAddedEvent(Event): keys: list[DeviceKey] tags: list[str] questionnaire_responses: dict[str, dict[str, "QuestionnaireResponse"]] + device_reference_data: dict[str, list[str]] @dataclass(kw_only=True, slots=True) @@ -154,7 +161,7 @@ class DeviceTagsClearedEvent(Event): @dataclass(kw_only=True, slots=True) class QuestionnaireResponseUpdatedEvent(Event): """ - This is adding the inital questionnaire response from the event body request. + This is adding the initial questionnaire response from the event body request. """ id: str @@ -164,6 +171,13 @@ class QuestionnaireResponseUpdatedEvent(Event): updated_on: str = None +@dataclass(kw_only=True, slots=True) +class DeviceReferenceDataIdAddedEvent(Event): + id: str + device_reference_data: dict[str, list[str]] + updated_on: str = None + + class DeviceTag(BaseModel): """ DeviceTag is a mechanism for indexing Device data. In DynamoDB then intention is for this @@ -246,6 +260,9 @@ class Device(AggregateRoot): questionnaire_responses: dict[str, list[QuestionnaireResponse]] = Field( default_factory=lambda: defaultdict(list) ) + device_reference_data: dict[str, list[str]] = Field( + default_factory=lambda: defaultdict(list) + ) def state_exclude_tags(self) -> dict: """ @@ -373,6 +390,16 @@ def add_questionnaire_response( }, ) + @event + def add_device_reference_data_id( + self, device_reference_data_id: str, path_to_data: list[str] + ) -> DeviceReferenceDataIdAddedEvent: + self.device_reference_data[device_reference_data_id] = path_to_data + + return DeviceReferenceDataIdAddedEvent( + id=self.id, device_reference_data=self.device_reference_data + ) + def is_active(self): return self.status is Status.ACTIVE @@ -387,5 +414,6 @@ class DeviceEventDeserializer(EventDeserializer): DeviceTagAddedEvent, DeviceTagsClearedEvent, DeviceTagsAddedEvent, + DeviceReferenceDataIdAddedEvent, QuestionnaireResponseUpdatedEvent, ) diff --git a/src/layers/domain/core/device_key/v1.py b/src/layers/domain/core/device_key/v1.py index 160bcb39..0cc25769 100644 --- a/src/layers/domain/core/device_key/v1.py +++ b/src/layers/domain/core/device_key/v1.py @@ -11,6 +11,7 @@ class DeviceKeyType(StrEnum): PRODUCT_ID = auto() ACCREDITED_SYSTEM_ID = auto() MESSAGE_HANDLING_SYSTEM_ID = auto() + INTERACTION_ID = auto() @property def pattern(self) -> re.Pattern: @@ -21,6 +22,8 @@ def pattern(self) -> re.Pattern: return SdsId.AccreditedSystem.ID_PATTERN case DeviceKeyType.MESSAGE_HANDLING_SYSTEM_ID: return SdsId.MessageHandlingSystem.ID_PATTERN + case DeviceKeyType.INTERACTION_ID: + return re.compile(rf"{SdsId.PartyKey.PARTY_KEY_REGEX[:-1]}:urn:[\w:]+$") case _: raise NotImplementedError(f"No ID validation configured for '{self}'") diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py index a26be3a1..8f7ac344 100644 --- a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py @@ -292,6 +292,7 @@ def device_created_event(): keys=[], tags=["a", "b", "c"], questionnaire_responses={"foo": "bar"}, + device_reference_data={}, ) diff --git a/src/layers/domain/repository/device_repository/v1.py b/src/layers/domain/repository/device_repository/v1.py index c3329df6..e5a33624 100644 --- a/src/layers/domain/repository/device_repository/v1.py +++ b/src/layers/domain/repository/device_repository/v1.py @@ -7,6 +7,7 @@ DeviceDeletedEvent, DeviceKeyAddedEvent, DeviceKeyDeletedEvent, + DeviceReferenceDataIdAddedEvent, DeviceTag, DeviceTagAddedEvent, DeviceTagsAddedEvent, @@ -20,7 +21,7 @@ from domain.repository.compression import pkl_dumps_gzip, pkl_loads_gzip from domain.repository.keys import TableKey from domain.repository.marshall import marshall, marshall_value, unmarshall -from domain.repository.repository import Repository +from domain.repository.repository import Repository, TooManyResults from domain.repository.transaction import ( ConditionExpression, TransactionStatement, @@ -160,7 +161,7 @@ def __init__(self, table_name, dynamodb_client): def read(self, product_team_id: str, product_id: str, id: str): return super()._read(parent_ids=(product_team_id, product_id), id=id) - def search(self, product_team_id: str, product_id: str, id: str): + def search(self, product_team_id: str, product_id: str): return super()._query(parent_ids=(product_team_id, product_id)) def handle_DeviceCreatedEvent(self, event: DeviceCreatedEvent) -> TransactItem: @@ -394,28 +395,22 @@ def handle_DeviceTagsClearedEvent(self, event: DeviceTagsClearedEvent): ) return delete_tags_transactions + update_transactions - def handle_QuestionnaireResponseUpdatedEvent( - self, event: QuestionnaireResponseUpdatedEvent - ): - data = { - "questionnaire_responses": event.questionnaire_responses, - "updated_on": event.updated_on, - } - - # Update "questionnaire_responses" on the root and key-indexed Devices - keys = {DeviceKey(**key).key_value for key in event.keys} - update_root_and_key_transactions = self.update_indexes( - id=event.id, keys=keys, data=data + def handle_DeviceReferenceDataIdAddedEvent( + self, event: DeviceReferenceDataIdAddedEvent + ) -> TransactItem: + pk = TableKey.DEVICE.key(event.id) + data = asdict(event) + data.pop("id") + return update_transactions( + table_name=self.table_name, primary_keys=[marshall(pk=pk, sk=pk)], data=data ) - # Update "questionnaire_responses" on the tag-indexed Devices - update_tag_transactions = update_tag_indexes( - table_name=self.table_name, - device_id=event.id, - tag_values=event.tags, - data=data, - ) - return update_root_and_key_transactions + update_tag_transactions + def handle_QuestionnaireResponseUpdatedEvent( + self, event: QuestionnaireResponseUpdatedEvent + ) -> TransactItem: + data = asdict(event) + data.pop("id") + return self.update_indexes(id=event.id, keys=[], data=data) def handle_bulk(self, item: dict) -> list[dict]: parent_key = (item["product_team_id"], item["product_id"]) diff --git a/src/layers/domain/request_models/v1.py b/src/layers/domain/request_models/v1.py index 0434134b..c1dcf16a 100644 --- a/src/layers/domain/request_models/v1.py +++ b/src/layers/domain/request_models/v1.py @@ -1,7 +1,9 @@ from collections import defaultdict from typing import Literal +from domain.core.device import MHS_DEVICE_NAME from domain.core.product_team_key import ProductTeamKey +from domain.repository.questionnaire_repository import QuestionnaireInstance from pydantic import BaseModel, Extra, Field ALPHANUMERIC_SPACES_AND_UNDERSCORES = r"^[a-zA-Z0-9 _]*$" @@ -38,16 +40,16 @@ class CreateDeviceReferenceDataIncomingParams(BaseModel, extra=Extra.forbid): class CreateDeviceReferenceMessageSetsDataParams(BaseModel, extra=Extra.forbid): - questionnaire_responses: dict[Literal["spine_mhs_message_sets"], list[dict]] = ( - Field(default_factory=lambda: defaultdict(list)) - ) + questionnaire_responses: dict[ + Literal[QuestionnaireInstance.SPINE_MHS_MESSAGE_SETS], list[dict] + ] = Field(default_factory=lambda: defaultdict(list)) class CreateDeviceReferenceAdditionalInteractionsDataParams( BaseModel, extra=Extra.forbid ): questionnaire_responses: dict[ - Literal["spine_as_additional_interactions"], list[dict] + Literal[QuestionnaireInstance.SPINE_AS_ADDITIONAL_INTERACTIONS], list[dict] ] = Field(default_factory=lambda: defaultdict(list)) @@ -67,9 +69,15 @@ class CreateDeviceIncomingParams(BaseModel, extra=Extra.forbid): name: str = Field(...) +class SpineMhsQuestionnaireRsponse(BaseModel): + __root__: list[dict] = Field(min_items=1, max_items=1) + + class CreateMhsDeviceIncomingParams(BaseModel, extra=Extra.forbid): - name: str = "Product-MHS" - questionnaire_responses: dict[str, list[dict]] = Field(...) + name: str = MHS_DEVICE_NAME + questionnaire_responses: dict[ + Literal[QuestionnaireInstance.SPINE_MHS], SpineMhsQuestionnaireRsponse + ] = Field(...) class DevicePathParams(BaseModel, extra=Extra.forbid): From 6e1be8e8b750a10c4d844c1dbdd95dfaaf49afdf Mon Sep 17 00:00:00 2001 From: Megan Date: Tue, 19 Nov 2024 12:33:59 +0000 Subject: [PATCH 05/12] release/2024-11-19-a Re-trigger pipeline commit --- .../features/createDeviceMessageHandlingSystem.failure.feature | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature index c75740b4..62afa72c 100644 --- a/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature +++ b/src/api/tests/feature_tests/features/createDeviceMessageHandlingSystem.failure.feature @@ -7,7 +7,7 @@ Feature: Create MHS Device - failure scenarios | version | 1 | | Authorization | letmein | - Scenario: Cannot create a MHS Device with a Device body that is missing fields (no questionnaire_responses) and has extra param + Scenario: Cannot create a MHS Device with a body that is missing fields (no questionnaire_responses) and has extra param Given I have already made a "POST" request with "default" headers to "ProductTeam" with body: | path | value | | name | My Great Product Team | From 50ff245ad48ccedadab26cce3ddbabba5e92e13b Mon Sep 17 00:00:00 2001 From: Megan Date: Tue, 19 Nov 2024 12:58:10 +0000 Subject: [PATCH 06/12] release/2024-11-19-a Shorten lambda name --- infrastructure/terraform/per_workspace/main.tf | 3 ++- .../per_workspace/modules/api_entrypoint/api_gateway/locals.tf | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/infrastructure/terraform/per_workspace/main.tf b/infrastructure/terraform/per_workspace/main.tf index 5b94404f..216f5551 100644 --- a/infrastructure/terraform/per_workspace/main.tf +++ b/infrastructure/terraform/per_workspace/main.tf @@ -80,7 +80,8 @@ module "lambdas" { source = "./modules/api_worker/api_lambda" python_version = var.python_version name = each.key - lambda_name = "${local.project}--${replace(terraform.workspace, "_", "-")}--${replace(replace(replace(each.key, "_", "-"), "DeviceReferenceData", "DeviceRefData"), "MessageHandlingSystem", "MHS")}" + lambda_name = "${local.project}--${replace(terraform.workspace, "_", "-")}--${replace(replace(replace(replace(each.key, "_", "-"), "DeviceReferenceData", "DeviceRefData"), "MessageHandlingSystem", "MHS"), "MessageSet", "MsgSet")}" + //Compact will remove all nulls from the list and create a new one - this is because TF throws an error if there is a null item in the list. layers = concat( compact([for instance in module.layers : contains(var.api_lambda_layers, instance.name) ? instance.layer_arn : null]), diff --git a/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf b/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf index 03f82eb6..5d50c10f 100644 --- a/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf +++ b/infrastructure/terraform/per_workspace/modules/api_entrypoint/api_gateway/locals.tf @@ -5,7 +5,7 @@ locals { } methods = [ for lambda_alias in setsubtract(var.lambdas, ["authoriser"]) : - { "method_${lambda_alias}" = "${local.apigateway_lambda_arn_prefix}:${var.assume_account}:function:${var.project}--${replace(terraform.workspace, "_", "-")}--${replace(replace(replace(lambda_alias, "_", "-"), "DeviceReferenceData", "DeviceRefData"), "MessageHandlingSystem", "MHS")}/invocations" } + { "method_${lambda_alias}" = "${local.apigateway_lambda_arn_prefix}:${var.assume_account}:function:${var.project}--${replace(terraform.workspace, "_", "-")}--${replace(replace(replace(replace(lambda_alias, "_", "-"), "DeviceReferenceData", "DeviceRefData"), "MessageHandlingSystem", "MHS"), "MessageSet", "MsgSet")}/invocations" } ] swagger_file = templatefile("${path.root}/../../swagger/dist/aws/swagger.yaml", merge({ lambda_invoke_arn = var.authoriser_metadata.lambda_invoke_arn, From a1413a6bd81610c3d570992afb5b4b266a552f34 Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Wed, 20 Nov 2024 09:10:04 +0000 Subject: [PATCH 07/12] [release/2024-11-20] create release --- CHANGELOG.md | 8 ++++---- VERSION | 2 +- changelog/{2024-11-19-a.md => 2024-11-20.md} | 0 pyproject.toml | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) rename changelog/{2024-11-19-a.md => 2024-11-20.md} (100%) diff --git a/CHANGELOG.md b/CHANGELOG.md index f8b0c8d7..93926277 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,12 @@ # Changelog -## 2024-11-19 -- [PI-601] Workspace destroy, use main branch as fallback - -## 2024-11-19-a +## 2024-11-20 - [PI-528] Collapse versioning to v1 - [PI-581] MHS Device with Device Reference Data +## 2024-11-19 +- [PI-601] Workspace destroy, use main branch as fallback + ## 2024-11-18 - [PI-601] Workspace destroy, use main branch if branch no longer exists diff --git a/VERSION b/VERSION index fdc45e84..94e466cc 100644 --- a/VERSION +++ b/VERSION @@ -1 +1 @@ -2024.11.19.a +2024.11.20 diff --git a/changelog/2024-11-19-a.md b/changelog/2024-11-20.md similarity index 100% rename from changelog/2024-11-19-a.md rename to changelog/2024-11-20.md diff --git a/pyproject.toml b/pyproject.toml index f7f039e4..546f0e2f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "connecting-party-manager" -version = "2024.11.19.a" +version = "2024.11.20" description = "Repository for the Connecting Party Manager API and related services" authors = ["NHS England"] license = "LICENSE.md" From e7464f44a01daa5a22b5f2eafc13fe970a281953 Mon Sep 17 00:00:00 2001 From: Megan Date: Thu, 21 Nov 2024 16:31:28 +0000 Subject: [PATCH 08/12] release/2024-11-20 Update QuestionnaireResponseUpdatedEvent --- src/layers/domain/repository/device_repository/v1.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/src/layers/domain/repository/device_repository/v1.py b/src/layers/domain/repository/device_repository/v1.py index e5a33624..eeaa2fff 100644 --- a/src/layers/domain/repository/device_repository/v1.py +++ b/src/layers/domain/repository/device_repository/v1.py @@ -408,9 +408,7 @@ def handle_DeviceReferenceDataIdAddedEvent( def handle_QuestionnaireResponseUpdatedEvent( self, event: QuestionnaireResponseUpdatedEvent ) -> TransactItem: - data = asdict(event) - data.pop("id") - return self.update_indexes(id=event.id, keys=[], data=data) + return self.handle_DeviceUpdatedEvent(event=event) def handle_bulk(self, item: dict) -> list[dict]: parent_key = (item["product_team_id"], item["product_id"]) From 075fe4f5839ab569cc873c888c02daf88f2b905b Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Fri, 22 Nov 2024 12:11:59 +0000 Subject: [PATCH 09/12] [release/2024-11-20] fix forward: can't deploy to int workspace --- .github/workflows/deploy-nonprod-workspace.yml | 2 +- .../tests/v1/test_device_repository_v1.py | 18 +++++++++++++++++- src/test_helpers/dynamodb.py | 4 ++++ 3 files changed, 22 insertions(+), 2 deletions(-) diff --git a/.github/workflows/deploy-nonprod-workspace.yml b/.github/workflows/deploy-nonprod-workspace.yml index dc329d63..8a350c31 100644 --- a/.github/workflows/deploy-nonprod-workspace.yml +++ b/.github/workflows/deploy-nonprod-workspace.yml @@ -11,7 +11,7 @@ on: options: - dev - qa - - int + # - int - ref sandbox: description: Do you want to deploy the sandbox version? diff --git a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py index 8f7ac344..2dffaf3b 100644 --- a/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py +++ b/src/layers/domain/repository/device_repository/tests/v1/test_device_repository_v1.py @@ -121,6 +121,7 @@ def test__device_repository__device_does_not_exist_local( @pytest.mark.integration def test__device_repository__update(device: Device, repository: DeviceRepository): + print(f"--> Writing device {device}") # noqa repository.write(device) # Retrieve the model and treat this as the initial state @@ -129,8 +130,16 @@ def test__device_repository__update(device: Device, repository: DeviceRepository product_id=device.product_id, id=device.id, ) - intermediate_device.update(name="foo-bar") + intermediate_device_by_key = repository.read( + product_team_id=device.product_team_id, + product_id=device.product_id, + id=DEVICE_KEY, + ) + print(f"--> Got intermediate device {intermediate_device}") # noqa + print(f"--> Got intermediate device by key {intermediate_device_by_key}") # noqa + intermediate_device.update(name="foo-bar") + print(f"--> Updated intermediate device {intermediate_device}") # noqa repository.write(intermediate_device) final_device = repository.read( @@ -138,6 +147,13 @@ def test__device_repository__update(device: Device, repository: DeviceRepository product_id=device.product_id, id=device.id, ) + final_device_by_key = repository.read( + product_team_id=device.product_team_id, + product_id=device.product_id, + id=DEVICE_KEY, + ) + print(f"--> Got final device {final_device}") # noqa + print(f"--> Got final device by key {final_device_by_key}") # noqa assert final_device.name == "foo-bar" diff --git a/src/test_helpers/dynamodb.py b/src/test_helpers/dynamodb.py index a96bd72c..ddfbbd63 100644 --- a/src/test_helpers/dynamodb.py +++ b/src/test_helpers/dynamodb.py @@ -56,6 +56,10 @@ def clear_dynamodb_table( if transact_items: client.transact_write_items(TransactItems=transact_items) + ### DELETE ME + items_in_db = _scan(client=client, table_name=table_name) + print(f"==> There are now {len(items_in_db)} items in the db") # noqa + @contextmanager def patch_dynamodb_client(client: DynamoDBClient): From aeb3ddfda4edbf2eda5b02cbc364e87f5b0491f2 Mon Sep 17 00:00:00 2001 From: Megan Date: Fri, 22 Nov 2024 12:38:38 +0000 Subject: [PATCH 10/12] release/2024-11-20 Debugging to be deleted --- src/test_helpers/dynamodb.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/test_helpers/dynamodb.py b/src/test_helpers/dynamodb.py index ddfbbd63..358a1da5 100644 --- a/src/test_helpers/dynamodb.py +++ b/src/test_helpers/dynamodb.py @@ -57,7 +57,7 @@ def clear_dynamodb_table( client.transact_write_items(TransactItems=transact_items) ### DELETE ME - items_in_db = _scan(client=client, table_name=table_name) + items_in_db = list(_scan(client=client, table_name=table_name)) print(f"==> There are now {len(items_in_db)} items in the db") # noqa From 01302a6aacdeaa63dad0ee06bd2a7ced2b00d000 Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Fri, 22 Nov 2024 13:53:02 +0000 Subject: [PATCH 11/12] [release/2024-11-20] more debugging --- src/layers/domain/repository/repository/v1.py | 5 ++++- src/layers/domain/repository/transaction.py | 1 + 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/layers/domain/repository/repository/v1.py b/src/layers/domain/repository/repository/v1.py index 0ba06016..5266663b 100644 --- a/src/layers/domain/repository/repository/v1.py +++ b/src/layers/domain/repository/repository/v1.py @@ -88,10 +88,12 @@ def _split_transactions_by_key( item = transaction_statement.Key or transaction_statement.Item key = (item["pk"]["S"], item["sk"]["S"]) if key in keys: + print("ooo> Batch writing keys", keys) # noqa yield from batched(buffer, n=n_max) buffer, keys = [], set() buffer.append(transact_item) keys.add(key) + print("ooo> Batch writing keys", keys) # noqa yield from batched(buffer, n=n_max) @@ -101,6 +103,7 @@ def transact_write_chunk( transaction = Transaction(TransactItems=chunk) with handle_client_errors(commands=chunk): _response = client.transact_write_items(**transaction.dict(exclude_none=True)) + print("ooo> DB Response", _response) # noqa return _response @@ -141,7 +144,7 @@ def generate_transaction_statements(event): handler_name = f"handle_{type(event).__name__}" handler = getattr(self, handler_name) transact_items = handler(event=event) - + print(f"ooo> Handler {handler_name} --> {transact_items}") # noqa if not isinstance(transact_items, list): transact_items = [transact_items] return transact_items diff --git a/src/layers/domain/repository/transaction.py b/src/layers/domain/repository/transaction.py index a0a50649..83bb1a00 100644 --- a/src/layers/domain/repository/transaction.py +++ b/src/layers/domain/repository/transaction.py @@ -123,6 +123,7 @@ def update_transactions( transact_items = [ TransactItem(Update=update_statement(Key=key)) for key in primary_keys ] + print(f"ooo> Issuing transactions {transact_items}") # noqa return transact_items From 46c9127296ce4d2b376413acade1f79753e7a4cc Mon Sep 17 00:00:00 2001 From: Joel Klinger Date: Fri, 22 Nov 2024 14:37:35 +0000 Subject: [PATCH 12/12] [release/2024-11-20] exit on first failure --- .github/workflows/pull-requests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/pull-requests.yml b/.github/workflows/pull-requests.yml index 11aec8fc..bac3599f 100644 --- a/.github/workflows/pull-requests.yml +++ b/.github/workflows/pull-requests.yml @@ -249,7 +249,7 @@ jobs: ref: ${{ env.BRANCH_NAME }} - uses: ./.github/actions/make/ with: - command: test--feature--integration + command: test--feature--integration PYTEST_FLAGS=-xvv requires-aws: true test--smoke: