diff --git a/app/model/osdu_model.py b/app/model/osdu_model.py index aa6e3c4b51345ce85994b88fb718f21c7dc2cd97..e68744453127c912f1d23b3ac00e135b5ce60061 100644 --- a/app/model/osdu_model.py +++ b/app/model/osdu_model.py @@ -1243,12 +1243,15 @@ class WellData(AbstractCommonResources100, AbstractMaster100, AbstractFacility10 ExtensionProperties: Optional[Dict[str, Any]] = None +WellId = constr(regex=r'^[\w\-\.]+:master-data\-\-Well:[\w\-\.\:\%]+$') + + class Well(DDMSBaseModel): """ The origin of a set of wellbores. """ - id: Optional[constr(regex=r'^[\w\-\.]+:master-data\-\-Well:[\w\-\.\:\%]+$')] = Field( + id: Optional[WellId] = Field( None, description='Previously called ResourceID or SRN which identifies this OSDU resource object without version.', example='namespace:master-data--Well:6c60ceb0-3521-57b7-9bd8-e1d7c9f66230', @@ -1384,14 +1387,15 @@ class WellBoreData(AbstractCommonResources100, AbstractMaster100, AbstractFacili ExtensionProperties: Optional[Dict[str, Any]] = None +WellboreId = constr(regex=r'^[\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+$') + + class Wellbore(DDMSBaseModel): """ A hole in the ground extending from a point at the earth's surface to the maximum point of penetration. """ - id: Optional[ - constr(regex=r'^[\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+$') - ] = Field( + id: Optional[WellboreId] = Field( None, description='Previously called ResourceID or SRN which identifies this OSDU resource object without version.', example='namespace:master-data--Wellbore:c7c421a7-f496-5aef-8093-298c32bfdea9', diff --git a/app/model_examples/well_v2.json b/app/model_examples/well_v2.json index 4d0b2b49e605153eb034afb9304e6c0d1e778446..73d8a4e087e96a0d959d78fa6d0cc9d8c8d29f37 100644 --- a/app/model_examples/well_v2.json +++ b/app/model_examples/well_v2.json @@ -2,10 +2,10 @@ { "acl": { "viewers": [ - "data.default.viewers@{{datapartitionid}}.{{domain}}.com" + "data.default.viewers@{{datapartitionid}}.{{domain}}" ], "owners": [ - "data.default.owners@{{datapartitionid}}.{{domain}}.com" + "data.default.owners@{{datapartitionid}}.{{domain}}" ] }, "data": { @@ -25,4 +25,4 @@ "status": "compliant" } } -] \ No newline at end of file +] diff --git a/app/model_examples/well_v3.json b/app/model_examples/well_v3.json index efb46d31cb801ff08a25293477052708efb7e9b7..4506dd0516958722981639f6a616e48e8d85e02d 100644 --- a/app/model_examples/well_v3.json +++ b/app/model_examples/well_v3.json @@ -13,7 +13,7 @@ "FacilityNameAliases": [ { "AliasName": "20-000-00000-00", - "AliasNameTypeID": "{{datapartitionid}}:osdu:reference-data--AliasNameType:UniqueIdentifier:" + "AliasNameTypeID": "{{datapartitionid}}:reference-data--AliasNameType:UniqueIdentifier:" } ], "ExtensionProperties": {} @@ -31,4 +31,4 @@ "status": "compliant" } } -] \ No newline at end of file +] diff --git a/app/model_examples/wellbore_v2.json b/app/model_examples/wellbore_v2.json index 49646687c976da15f16d6f7f10b81355c8857200..36450e38f23c64e21865e9f836a9606f2ab49efe 100644 --- a/app/model_examples/wellbore_v2.json +++ b/app/model_examples/wellbore_v2.json @@ -2,10 +2,10 @@ { "acl": { "viewers": [ - "data.default.viewers@{{datapartitionid}}.{{domain}}.com" + "data.default.viewers@{{datapartitionid}}.{{domain}}" ], "owners": [ - "data.default.owners@{{datapartitionid}}.{{domain}}.com" + "data.default.owners@{{datapartitionid}}.{{domain}}" ] }, "data": { @@ -130,4 +130,4 @@ "status": "compliant" } } -] \ No newline at end of file +] diff --git a/app/model_examples/wellbore_v3.json b/app/model_examples/wellbore_v3.json index ec902b6a7a8abc9692afb29957c8efbf654868ab..f41ad109f1e56ba1347d37c1b2534eb6f921e89b 100644 --- a/app/model_examples/wellbore_v3.json +++ b/app/model_examples/wellbore_v3.json @@ -19,7 +19,7 @@ }, "data": { "ExtensionProperties": {}, - "Geocontexts": [ + "GeoContexts": [ { "GeoTypeID": "datapartition:osdu:reference-data--GeoPoliticalEntityType:Country:", "GeoPoliticalEntityID": "{{datapartitionid}}:osdu:master-data--GeoPoliticalEntity:USA:" @@ -33,7 +33,7 @@ "FacilityNameAliases": [ { "AliasName": "33-089-00300-00-01", - "AliasNameTypeID": "{{datapartitionid}}:osdu:reference-data--AliasNameType:UniqueIdentifier:" + "AliasNameTypeID": "{{datapartitionid}}:reference-data--AliasNameType:UniqueIdentifier:" } ], "SpatialLocation": { @@ -96,4 +96,4 @@ } ] } -] \ No newline at end of file +] diff --git a/app/routers/ddms_v2/well_ddms_v2.py b/app/routers/ddms_v2/well_ddms_v2.py index 632214cda99dd54b4588f5dfa38b4a735abdb9ff..40b1dbeb4374734dfd318a99402d976ae5b40892 100644 --- a/app/routers/ddms_v2/well_ddms_v2.py +++ b/app/routers/ddms_v2/well_ddms_v2.py @@ -125,6 +125,7 @@ async def get_well_version( data_partition_id=ctx.partition_id) return from_record(well, well_record) + @router.post('/wells', response_model=CreateUpdateRecordsResponse, summary="Create or update the Wells using wks:well:1.0.2 schema", diff --git a/app/routers/ddms_v3/ddms_v3_utils.py b/app/routers/ddms_v3/ddms_v3_utils.py index c87f7ebb3a9cd9eb6bf18fe287b71c1630986d06..0f969a0a23f22a7f457a8103f1a61fba2b6e4aa7 100644 --- a/app/routers/ddms_v3/ddms_v3_utils.py +++ b/app/routers/ddms_v3/ddms_v3_utils.py @@ -26,10 +26,12 @@ from app.routers.record_utils import fetch_record from app.utils import Context, get_ctx OSDU_WELL_VERSION_REGEX = re.compile(r"^([\w\-\.]+:master-data\-\-Well:[\w\-\.\:\%]+):([0-9]*)$") -OSDU_WELL_REGEX = re.compile(r"^[\w\-\.]+:master-data\-\-Well:[\w\-\.\:\%]+$") +# Now done via osdu_model +# OSDU_WELL_REGEX = re.compile(r"^[\w\-\.]+:master-data\-\-Well:[\w\-\.\:\%]+$") OSDU_WELLBORE_VERSION_REGEX = re.compile(r"^([\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+):([0-9]*)$") -OSDU_WELLBORE_REGEX = re.compile(r"^[\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+$") +# Now done via osdu_model +# OSDU_WELLBORE_REGEX = re.compile(r"^[\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+$") OSDU_WELLLOG_VERSION_REGEX = re.compile(r"^([\w\-\.]+:work-product-component\-\-WellLog:[\w\-\.\:\%]+):([0-9]*)$") OSDU_WELLLOG_REGEX = re.compile(r"^[\w\-\.]+:work-product-component\-\-WellLog:[\w\-\.\:\%]+$") @@ -54,13 +56,6 @@ entity_names = { class DMSV3RouterUtils: - @staticmethod - def is_osdu_wellbore_id(entity_id: str) -> bool: - return OSDU_WELLBORE_REGEX.match(entity_id) is not None - - @staticmethod - def is_osdu_well_id(entity_id: str) -> bool: - return OSDU_WELL_REGEX.match(entity_id) is not None @staticmethod def is_osdu_versioned_entity_id(entity_regexp, entity_id: str) -> Tuple[bool, str, str]: diff --git a/app/routers/ddms_v3/well_ddms_v3.py b/app/routers/ddms_v3/well_ddms_v3.py index 2d174d94bb6044b9c76af8c774eeec752d750bcc..af319ec6f5a3bd4e0807faff5762f915f543b4a1 100644 --- a/app/routers/ddms_v3/well_ddms_v3.py +++ b/app/routers/ddms_v3/well_ddms_v3.py @@ -26,6 +26,7 @@ from ..common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE from app.utils import Context from app.utils import get_ctx from app.utils import load_schema_example +from app.model.osdu_model import WellId from app.model.model_utils import to_record, from_record from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils from app.routers.record_utils import fetch_record @@ -54,15 +55,13 @@ async def get_osdu_well(wellid: str, ctx: Context) -> Well: }, ) async def get_well_osdu( - wellid: str, ctx: Context = Depends(get_ctx) + wellid: WellId, ctx: Context = Depends(get_ctx) ) -> Well: is_osdu_versioned, osdu_id, version = DMSV3RouterUtils.is_osdu_versioned_well_id(wellid) if is_osdu_versioned: return await get_osdu_well(osdu_id, ctx) - if DMSV3RouterUtils.is_osdu_well_id(wellid): + else: return await get_osdu_well(wellid, ctx) - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Id is not OSDU Well") - @router.delete( @@ -80,7 +79,7 @@ async def get_well_osdu( }, }, ) -async def del_osdu_well(wellid: str, ctx: Context = Depends(get_ctx)): +async def del_osdu_well(wellid: WellId, ctx: Context = Depends(get_ctx)): storage_client = await get_storage_record_service(ctx) await storage_client.delete_record( id=wellid, data_partition_id=ctx.partition_id @@ -98,7 +97,7 @@ async def del_osdu_well(wellid: str, ctx: Context = Depends(get_ctx)): }, ) async def get_osdu_well_versions( - wellid: str, request: Request, ctx: Context = Depends(get_ctx) + wellid: WellId, request: Request, ctx: Context = Depends(get_ctx) ) -> RecordVersions: record = await fetch_record(ctx, wellid) DMSV3RouterUtils.raise_if_not_osdu_right_entity_kind(record, request.state) @@ -120,7 +119,7 @@ async def get_osdu_well_versions( response_model_exclude_unset=True, ) async def get_osdu_well_version( - wellid: str, version: int, request: Request, ctx: Context = Depends(get_ctx) + wellid: WellId, version: int, request: Request, ctx: Context = Depends(get_ctx) ) -> Well: storage_client = await get_storage_record_service(ctx) well_record = await storage_client.get_record_version( diff --git a/app/routers/ddms_v3/wellbore_ddms_v3.py b/app/routers/ddms_v3/wellbore_ddms_v3.py index 4e20be753b5e39a86cd7a5bd267bc84489acbf4a..4beee2c8cd79bfa38f60753968113ea8e3557924 100644 --- a/app/routers/ddms_v3/wellbore_ddms_v3.py +++ b/app/routers/ddms_v3/wellbore_ddms_v3.py @@ -18,7 +18,7 @@ from starlette.requests import Request from app.clients.storage_service_client import get_storage_record_service from app.model.model_utils import from_record, to_record -from app.model.osdu_model import Wellbore +from app.model.osdu_model import Wellbore, WellboreId from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils from app.routers.record_utils import fetch_record from app.utils import Context, get_ctx, load_schema_example @@ -49,14 +49,13 @@ async def get_osdu_wellbore(wellboreid: str, ctx: Context) -> Wellbore: }, ) async def get_wellbore_osdu( - wellboreid: str, ctx: Context = Depends(get_ctx) + wellboreid: WellboreId, ctx: Context = Depends(get_ctx) ) -> Wellbore: is_osdu_versioned, osdu_id, version = DMSV3RouterUtils.is_osdu_versioned_wellbore_id(wellboreid) if is_osdu_versioned: return await get_osdu_wellbore(osdu_id, ctx) - if DMSV3RouterUtils.is_osdu_wellbore_id(wellboreid): + else: return await get_osdu_wellbore(wellboreid, ctx) - raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail="Id is not OSDU Wellbore") @router.delete( @@ -74,7 +73,7 @@ async def get_wellbore_osdu( }, }, ) -async def del_osdu_wellbore(wellboreid: str, ctx: Context = Depends(get_ctx)): +async def del_osdu_wellbore(wellboreid: WellboreId, ctx: Context = Depends(get_ctx)): storage_client = await get_storage_record_service(ctx) await storage_client.delete_record( id=wellboreid, data_partition_id=ctx.partition_id @@ -92,7 +91,7 @@ async def del_osdu_wellbore(wellboreid: str, ctx: Context = Depends(get_ctx)): }, ) async def get_osdu_wellbore_versions( - wellboreid: str, request: Request, ctx: Context = Depends(get_ctx) + wellboreid: WellboreId, request: Request, ctx: Context = Depends(get_ctx) ) -> RecordVersions: record = await fetch_record(ctx, wellboreid) DMSV3RouterUtils.raise_if_not_osdu_right_entity_kind(record, request.state) @@ -114,7 +113,7 @@ async def get_osdu_wellbore_versions( response_model_exclude_unset=True, ) async def get_osdu_wellbore_version( - wellboreid: str, version: int, request: Request, ctx: Context = Depends(get_ctx) + wellboreid: WellboreId, version: int, request: Request, ctx: Context = Depends(get_ctx) ) -> Wellbore: storage_client = await get_storage_record_service(ctx) wellbore_record = await storage_client.get_record_version( diff --git a/app/wdms_app.py b/app/wdms_app.py index 896c5363473b0596fe29ba4e41dce969b2d29a89..33354b9cb98bdfabca1def70c14e1ff47da7f1fe 100644 --- a/app/wdms_app.py +++ b/app/wdms_app.py @@ -153,12 +153,12 @@ async def startup_event(): async def shutdown_event(): # clients close storage_client = await app_injector.get(StorageRecordServiceClient) - if storage_client is not None: + if storage_client is not None and hasattr(storage_client, 'api_client'): await storage_client.api_client.close() search_client = await app_injector.get(SearchServiceClient) - if search_client is not None: - await storage_client.api_client.close() + if search_client is not None and hasattr(search_client, 'api_client'): + await search_client.api_client.close() await get_http_client_session().close() await DaskClient.close() diff --git a/requirements_dev.in b/requirements_dev.in index 50d69bb0c72e66559089b8cf2cf593584b2e5fc7..04857ead595a29b2631a7a5061a7cd0c9268be92 100644 --- a/requirements_dev.in +++ b/requirements_dev.in @@ -12,6 +12,7 @@ numpy pandas pyarrow openapi-spec-validator +hypothesis mockito~=1.2 # Note since 3.8 includes Mock 4.0+. diff --git a/spec/generated/openapi.json b/spec/generated/openapi.json index 2d0d00f46701eefb3dcff74b89c3c9d71909b0f4..1189e8ba230779420323947b6e8ad27b620c7d79 100644 --- a/spec/generated/openapi.json +++ b/spec/generated/openapi.json @@ -15057,10 +15057,10 @@ { "acl": { "owners": [ - "data.default.owners@{{datapartitionid}}.{{domain}}.com" + "data.default.owners@{{datapartitionid}}.{{domain}}" ], "viewers": [ - "data.default.viewers@{{datapartitionid}}.{{domain}}.com" + "data.default.viewers@{{datapartitionid}}.{{domain}}" ] }, "data": { @@ -15529,10 +15529,10 @@ { "acl": { "owners": [ - "data.default.owners@{{datapartitionid}}.{{domain}}.com" + "data.default.owners@{{datapartitionid}}.{{domain}}" ], "viewers": [ - "data.default.viewers@{{datapartitionid}}.{{domain}}.com" + "data.default.viewers@{{datapartitionid}}.{{domain}}" ] }, "data": { @@ -16975,7 +16975,7 @@ "FacilityNameAliases": [ { "AliasName": "33-089-00300-00-01", - "AliasNameTypeID": "{{datapartitionid}}:osdu:reference-data--AliasNameType:UniqueIdentifier:" + "AliasNameTypeID": "{{datapartitionid}}:reference-data--AliasNameType:UniqueIdentifier:" } ], "FacilityOperators": [ @@ -16983,7 +16983,7 @@ "FacilityOperatorID": "Francois Vinyes" } ], - "Geocontexts": [ + "GeoContexts": [ { "GeoPoliticalEntityID": "{{datapartitionid}}:osdu:master-data--GeoPoliticalEntity:USA:", "GeoTypeID": "datapartition:osdu:reference-data--GeoPoliticalEntityType:Country:" @@ -17122,6 +17122,7 @@ "name": "wellboreid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Wellbore:[\\w\\-\\.\\:\\%]+$", "title": "Wellboreid", "type": "string" } @@ -17176,6 +17177,7 @@ "name": "wellboreid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Wellbore:[\\w\\-\\.\\:\\%]+$", "title": "Wellboreid", "type": "string" } @@ -17239,6 +17241,7 @@ "name": "wellboreid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Wellbore:[\\w\\-\\.\\:\\%]+$", "title": "Wellboreid", "type": "string" } @@ -17302,6 +17305,7 @@ "name": "wellboreid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Wellbore:[\\w\\-\\.\\:\\%]+$", "title": "Wellboreid", "type": "string" } @@ -17401,7 +17405,7 @@ "FacilityNameAliases": [ { "AliasName": "33-089-00300-00-01", - "AliasNameTypeID": "{{datapartitionid}}:osdu:reference-data--AliasNameType:UniqueIdentifier:" + "AliasNameTypeID": "{{datapartitionid}}:reference-data--AliasNameType:UniqueIdentifier:" } ], "FacilityOperators": [ @@ -17409,7 +17413,7 @@ "FacilityOperatorID": "Francois Vinyes" } ], - "Geocontexts": [ + "GeoContexts": [ { "GeoPoliticalEntityID": "{{datapartitionid}}:osdu:master-data--GeoPoliticalEntity:USA:", "GeoTypeID": "datapartition:osdu:reference-data--GeoPoliticalEntityType:Country:" @@ -19719,7 +19723,7 @@ "FacilityNameAliases": [ { "AliasName": "20-000-00000-00", - "AliasNameTypeID": "{{datapartitionid}}:osdu:reference-data--AliasNameType:UniqueIdentifier:" + "AliasNameTypeID": "{{datapartitionid}}:reference-data--AliasNameType:UniqueIdentifier:" } ] }, @@ -19794,6 +19798,7 @@ "name": "wellid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Well:[\\w\\-\\.\\:\\%]+$", "title": "Wellid", "type": "string" } @@ -19848,6 +19853,7 @@ "name": "wellid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Well:[\\w\\-\\.\\:\\%]+$", "title": "Wellid", "type": "string" } @@ -19911,6 +19917,7 @@ "name": "wellid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Well:[\\w\\-\\.\\:\\%]+$", "title": "Wellid", "type": "string" } @@ -19974,6 +19981,7 @@ "name": "wellid", "required": true, "schema": { + "pattern": "^[\\w\\-\\.]+:master-data\\-\\-Well:[\\w\\-\\.\\:\\%]+$", "title": "Wellid", "type": "string" } diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 90975193c12be5fa3fffb4f66e165313671d0bfc..b8cdd697fa1a279a2b675c3883d6d5b44bdaa656 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -22,6 +22,21 @@ from app.utils import Context, DaskClient from fastapi import Header +from .data import ( + well_v2_file_contents, well_v3_file_contents, wellbore_v2_file_contents, wellbore_v3_file_contents, + domain, data_partition, legal_tags, + well_v2_record_list, well_v3_record_list, wellbore_v2_record_list, wellbore_v3_record_list, + well_wks_record, well_wks_mini_record, wellbore_wks_record, wellbore_wks_mini_record +) + +from .fixtures import ( + local_dev_config, + app_initialized_with_testclient, + app_configurable_with_testclient, + mock_storage_client_holding_data +) + + @pytest.fixture(autouse=True) def top_fixture(monkeypatch): """ diff --git a/tests/unit/converter/converter_test.py b/tests/unit/converter/converter_test.py index 77ced9dd92a154f606dd263eed90638773d90379..222bd0b8f10736213bf946f2bf0fb0c3d873174f 100644 --- a/tests/unit/converter/converter_test.py +++ b/tests/unit/converter/converter_test.py @@ -1,5 +1,6 @@ import json import os +from pprint import pprint import pytest from odes_storage.models import Record @@ -112,34 +113,39 @@ def test_record_conversion(ConverterClass, input_record: dict): assert record == res_record -OSDU_TYPES_CONVERTER = [ - ("wellbore_wks.json", Wellbore, WellboreConverter), - ("wellbore_wks_mini.json", Wellbore, WellboreConverter), - ("../../../app/model_examples/wellbore_v2.json", Wellbore, WellboreConverter), - ("well_wks.json", Well, WellConverter), - ("well_wks_mini.json", Well, WellConverter), - ("../../../app/model_examples/well_v2.json", Well, WellConverter), -] +def _conversion(record, object_class, converter_class): -def replace_template(source_obj_str: str) -> str: - source_obj_str = source_obj_str.replace("{{datapartitionid}}", "datapartitionid")\ - .replace("{datapartitionid}", "datapartitionid").replace("{{domain}}", "domain") - return source_obj_str - -@pytest.mark.parametrize("input_file, ObjectClass, ConverterClass", OSDU_TYPES_CONVERTER) -def test_conversion(input_file, ObjectClass, ConverterClass): - dir_path = os.path.dirname(os.path.realpath(__file__)) - with open(os.path.join(dir_path, input_file)) as f: - source_obj_str = replace_template(f.read()) - source_obj_dict = json.loads(source_obj_str) - if isinstance(source_obj_dict, list): - source_obj_dict = source_obj_dict[0] - source_obj: Record = Record.parse_obj(source_obj_dict) - - res: dict = ConverterClass.convert_delfi_to_osdu( - source_obj.dict(by_alias=True, exclude_none=True, exclude_unset=True), + res: dict = converter_class.convert_delfi_to_osdu( + record.dict(by_alias=True, exclude_none=True, exclude_unset=True), context={"namespace": "test_namespace"}) # Uncomment those lines to dump the actual result of the conversion # with open("dumpsresdict.json", 'w') as fp: # json.dump(res, fp, indent=2, default=str) - ObjectClass.validate(res) + try: + object_class.validate(res) + except Exception as exc: + pprint(res) + raise exc + +def test_conversion_well(well_wks_record): + _conversion(well_wks_record, Well, WellConverter) + + +def test_conversion_well_mini(well_wks_mini_record): + _conversion(well_wks_mini_record, Well, WellConverter) + +def test_conversion_well_v2(well_v2_record_list): + for well_v2_record in well_v2_record_list: + _conversion(well_v2_record, Well, WellConverter) + + +def test_conversion_wellbore(wellbore_wks_record): + _conversion(wellbore_wks_record, Well, WellConverter) + + +def test_conversion_wellbore_mini(wellbore_wks_mini_record): + _conversion(wellbore_wks_mini_record, Well, WellConverter) + +def test_conversion_wellbore_mini(wellbore_v2_record_list): + for wellbore_v2_record in wellbore_v2_record_list: + _conversion(wellbore_v2_record, Well, WellConverter) diff --git a/tests/unit/data/__init__.py b/tests/unit/data/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..cf5c052b7f088d33ffcdb7345494354d0ed5c069 --- /dev/null +++ b/tests/unit/data/__init__.py @@ -0,0 +1,58 @@ +from typing import List + +import pytest +import os +import json + +from odes_storage.models import Record + +from .model_examples import (well_v2_file_contents, well_v3_file_contents, wellbore_v2_file_contents, wellbore_v3_file_contents, domain, data_partition, legal_tags, + well_v2_record_list, well_v3_record_list, wellbore_v2_record_list, wellbore_v3_record_list) + + +@pytest.fixture(scope="session") +def well_wks_record() -> Record: + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "well_wks.json" + ) + ) as f: + file_content = json.load(f) + + return Record.parse_obj(file_content) + + +@pytest.fixture(scope="session") +def well_wks_mini_record() -> Record: + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "well_wks_mini.json" + ) + ) as f: + file_content = json.load(f) + + return Record.parse_obj(file_content) + + +@pytest.fixture(scope="session") +def wellbore_wks_record() -> Record: + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "wellbore_wks.json" + ) + ) as f: + file_content = json.load(f) + + return Record.parse_obj(file_content) + + +@pytest.fixture(scope="session") +def wellbore_wks_mini_record() -> Record: + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "wellbore_wks_mini.json" + ) + ) as f: + file_content = json.load(f) + + return Record.parse_obj(file_content) diff --git a/tests/unit/data/model_examples.py b/tests/unit/data/model_examples.py new file mode 100644 index 0000000000000000000000000000000000000000..35ffd930f375d7feabfe8d41cc3267985166a617 --- /dev/null +++ b/tests/unit/data/model_examples.py @@ -0,0 +1,125 @@ +import uuid +from typing import List + +import pytest +import os +import json + +from odes_storage.models import Record + +from wdms_client.variables import Variables + + +@pytest.fixture(scope="session") +def well_v2_file_contents() -> str: + + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../../../app/model_examples/well_v2.json" + ) + ) as f: + + return json.load(f) + + +@pytest.fixture(scope="session") +def well_v3_file_contents() -> str: + + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../../../app/model_examples/well_v3.json" + ) + ) as f: + return json.load(f) + + +@pytest.fixture(scope="session") +def wellbore_v2_file_contents() -> str: + + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../../../app/model_examples/wellbore_v2.json" + ) + ) as f: + return json.load(f) + + +@pytest.fixture(scope="session") +def wellbore_v3_file_contents() -> str: + + with open( + os.path.join( + os.path.dirname(os.path.realpath(__file__)), "../../../app/model_examples/wellbore_v3.json" + ) + ) as f: + return json.load(f) + + +@pytest.fixture +def domain(): + return 'test-domain.com' + + +@pytest.fixture +def data_partition(): + return 'test-data-partition' + + +@pytest.fixture +def legal_tags(): + return 'test-legal-tag1, test-legal-tag2' + + +@pytest.fixture +def well_v2_record_list(well_v2_file_contents, domain, data_partition, legal_tags) -> List[Record]: + + vars_to_replace = Variables.from_dict({ + "domain": domain, + "datapartitionid": data_partition, + "legaltags": legal_tags, + }) + + return [Record.parse_obj(vars_to_replace.resolve(file_content)) for file_content in well_v2_file_contents] + + +@pytest.fixture +def well_v3_record_list(well_v3_file_contents, domain, data_partition, legal_tags) -> List[Record]: + + vars_to_replace = Variables.from_dict({ + "domain": domain, + "datapartitionid": data_partition, + "legaltags": legal_tags, + # to replace missing data in example + "wellName": "my-example-well", + "wellId": "my-well-v3-example", + }) + + return [Record.parse_obj(vars_to_replace.resolve(file_content)) for file_content in well_v3_file_contents] + + +@pytest.fixture +def wellbore_v2_record_list(wellbore_v2_file_contents, domain, data_partition, legal_tags) -> List[Record]: + + vars_to_replace = Variables.from_dict({ + "domain": domain, + "datapartitionid": data_partition, + "legaltags": legal_tags, + }) + + return [Record.parse_obj(vars_to_replace.resolve(file_content)) for file_content in wellbore_v2_file_contents] + + +@pytest.fixture +def wellbore_v3_record_list(wellbore_v3_file_contents, domain, data_partition, legal_tags) -> List[Record]: + + vars_to_replace = Variables.from_dict({ + "domain": domain, + "datapartitionid": data_partition, + "legaltags": legal_tags, + # to replace missing data in example + "wellboreName": "my-example-wellbore", + "wellboreId": "my-wellbore-v3-example", + "wellId": "my-well-v3-example" + }) + + return [Record.parse_obj(vars_to_replace.resolve(file_content)) for file_content in wellbore_v3_file_contents] diff --git a/tests/unit/converter/well_wks.json b/tests/unit/data/well_wks.json similarity index 100% rename from tests/unit/converter/well_wks.json rename to tests/unit/data/well_wks.json diff --git a/tests/unit/converter/well_wks_mini.json b/tests/unit/data/well_wks_mini.json similarity index 100% rename from tests/unit/converter/well_wks_mini.json rename to tests/unit/data/well_wks_mini.json diff --git a/tests/unit/converter/wellbore_wks.json b/tests/unit/data/wellbore_wks.json similarity index 100% rename from tests/unit/converter/wellbore_wks.json rename to tests/unit/data/wellbore_wks.json diff --git a/tests/unit/converter/wellbore_wks_mini.json b/tests/unit/data/wellbore_wks_mini.json similarity index 100% rename from tests/unit/converter/wellbore_wks_mini.json rename to tests/unit/data/wellbore_wks_mini.json diff --git a/tests/unit/fixtures.py b/tests/unit/fixtures.py new file mode 100644 index 0000000000000000000000000000000000000000..72a31ac6a94a477d630e3df5d25c66ab17d20547 --- /dev/null +++ b/tests/unit/fixtures.py @@ -0,0 +1,214 @@ +import asyncio +import copy +import types +from typing import List + +import httpx +import odes_storage +import pytest +from mock import mock +from mock.mock import AsyncMock, create_autospec +from starlette.testclient import TestClient + +from app.auth.auth import require_opendes_authorized_user +from app.middleware.basic_context_middleware import require_data_partition_id +from app.clients import SearchServiceClient, StorageRecordServiceClient, make_storage_record_client +from app.helper.traces import CombinedExporter +from app.injector.app_injector import WithLifeTime +from app.wdms_app import base_app, wdms_app, app_injector + + +@pytest.fixture(scope="module") +def local_dev_config(): + # local import + from app.conf import Config + + # set config to a local dev config (assumption for running unit tests) + Config.dev_mode.value = True + Config.cloud_provider.value = "local" + Config.service_host_search.value = "https://test-endpoint/api/search" + Config.service_host_storage.value = "https://test-endpoint/api/storage" + Config.modules.value = "fluids.routers.fluid_service,trajectory_service.routers.trajectory_service,search.routers.search,common.routers.common" + # This one is necessary as long as we have can_run() in modules dependending on it + Config.environment_name.value = "evd" + + # patching Config in app.conf module, so it is found by other modules + with mock.patch('app.conf') as app_conf: + app_conf.Config = Config + + yield Config + + +@pytest.fixture(scope="module") +def app_initialized_with_testclient(local_dev_config, request): + """ + Fixture providing wdms_app started, along with a test client + """ + global base_app, wdms_app + + # this app, initialized, and as part of a hierarchy of apps + with TestClient( + base_app + ): # TOFIX: currently necessary because base_app and wdms_app are interdependent + with TestClient(wdms_app) as client: + yield wdms_app, client + + +@pytest.fixture +def mock_storage_client_holding_data(local_dev_config): + """ + Fixture mocking the Storage Client, except for a specific record that we want to return when requested. + The data we want the Client to hold and return as the service would normally do is passed as an argument. + + For usage examples, see fixtures_test.py in this directory + """ + + def setup_data_for_mock(data): + template_client = make_storage_record_client( + local_dev_config.service_host_storage + ) + + # Note: we want to be able to modify the mock to handle get_record and get_record_version specifically + mock = create_autospec(spec=template_client, instance=True) + + # override api_client to use an async mock (needed on shutdown when we call api_client.close()) + mock.api_client = AsyncMock(spec_set=template_client.api_client) + + async def mocked_get_record(self, + id: str, + data_partition_id: str = None, + attribute: List[str] = None, + appkey: str = None, + token: str = None) -> odes_storage.models.Record: + # return the latest + return await self.get_record_version(id, None, data_partition_id, appkey, token) + + async def mocked_get_record_version(self, + id: str, + version: int, + data_partition_id: str = None, + appkey: str = None, + token: str = None) -> odes_storage.models.Record: + for d in data: + # CAREFUL: id might be optional in the model (not set on write) + # Also storage seems to have problematic behavior with id ending in ':' + if id is not None and (id == d.id or id + ":" == d.id): + if version is None or version == d.version: # Note: version None means latest + return d + + # if not found, attempt to emulate behavior of the actual client + raise odes_storage.UnexpectedResponse( + status_code=404, + reason_phrase="Item not found", + # not sure what to put here at this time + content="".encode(encoding="utf-8"), + headers=httpx.Headers(), + ) + + # override get_record method on the instance to return sample data + mock.get_record = types.MethodType(mocked_get_record, mock) + mock.get_record_version = types.MethodType(mocked_get_record_version, mock) + + return mock + + return setup_data_for_mock + + +@pytest.fixture +def app_configurable_with_testclient(app_initialized_with_testclient): + """ + Fixture to configure wdms_app after it has been started. + It returns a function to be called from the test to configure the app, + and it will return the configured app, along with its client. + + By default, everything will be mocked with mock.AsyncMock() instances and an authorized opendes user. + + For example usage, check fixtures_test.py + """ + + app, client = app_initialized_with_testclient + + # saving app state for reset later on + original_trace_exporter = app.trace_exporter + original_dependency_overrides = copy.copy(app.dependency_overrides) + + loop = asyncio.get_event_loop() + original_storage_client = loop.run_until_complete(app_injector.get(StorageRecordServiceClient)) + original_search_client = loop.run_until_complete(app_injector.get(SearchServiceClient)) + + # setup safe defaults for tests + default_storage_mock = AsyncMock(spec=StorageRecordServiceClient) + # override api_client to use an async mock (needed on shutdown when we call api_client.close()) + default_storage_mock.api_client = AsyncMock() + + default_search_mock = AsyncMock(spec=SearchServiceClient) + # override api_client to use an async mock (needed on shutdown when we call api_client.close()) + default_search_mock.api_client = AsyncMock() + + def injection_coro_builder(*, return_value): + # because of our app_injector design + async def injection_coro( + *args, **kwargs + ): + print(f"configure {return_value} in app_injector") + return return_value + return injection_coro + + def configure_app( + *, + search_client_mock=default_search_mock, + storage_client_mock=default_storage_mock, + trace_exporter=create_autospec(CombinedExporter, spec_set=True, instance=True), + fake_opendes_authorized_user: bool = True, + fake_data_partition_id: bool = False + ): + """builder generator that output an app mocked by default, and cleanup properly after use. + If None is passed as a mock, then the original implementation is used. + """ + nonlocal app, client + + ## configure app_injector -- needs to be reset after fixture execution ## + if storage_client_mock is not None: + app_injector.register( + StorageRecordServiceClient, injection_coro_builder(return_value=storage_client_mock), + WithLifeTime.Singleton() + ) + + if search_client_mock is not None: + app_injector.register(SearchServiceClient, injection_coro_builder(return_value=search_client_mock), + WithLifeTime.Singleton()) + + ## configure app -- needs to be reset after fixture execution ## + app.trace_exporter = trace_exporter + + async def opendes_authorized_user_mock_depend(): + pass + + app.dependency_overrides[ + require_opendes_authorized_user + ] = opendes_authorized_user_mock_depend if fake_opendes_authorized_user else require_opendes_authorized_user + + async def require_data_partition_id_mock_depend(): + pass + + app.dependency_overrides[ + require_data_partition_id + ] = require_data_partition_id_mock_depend if fake_data_partition_id else require_data_partition_id + + # return the app, ready to be started along with the client + return app, client + + yield configure_app + + # reset app for reuse (we always cleanup without recreating the app - it would be too slow) + app_injector.register( + StorageRecordServiceClient, injection_coro_builder(return_value=original_storage_client), + WithLifeTime.Singleton()) + app_injector.register( + SearchServiceClient, injection_coro_builder(return_value=original_search_client), + WithLifeTime.Singleton()) + + app.trace_exporter = original_trace_exporter + + app.dependency_overrides = original_dependency_overrides + diff --git a/tests/unit/fixtures_test.py b/tests/unit/fixtures_test.py new file mode 100644 index 0000000000000000000000000000000000000000..afb2dc8dce2a575aec9e4f10b81000ce9ed54986 --- /dev/null +++ b/tests/unit/fixtures_test.py @@ -0,0 +1,194 @@ +import asyncio + +from fastapi import Depends +from mock.mock import AsyncMock + +from app.clients.search_service_client import get_search_service +from app.clients.storage_service_client import get_storage_record_service +from app.utils import Context, get_ctx + + +def test_mock_storage_client_holding_well_v2_record_data( + mock_storage_client_holding_data, well_v2_record_list +): + """Test the mock_storage_client_holding_data behavior, along with the well_v2_record data itself""" + storage_client = mock_storage_client_holding_data(well_v2_record_list) + + # grab current eventloop if we already have one, otherwise creates it + loop = asyncio.get_event_loop() + + w2ids = [w2.id for w2 in well_v2_record_list] + for w2id in w2ids: + assert ( + loop.run_until_complete( + storage_client.get_record(w2id, "fake_data_partition_id") + ) + == [w for w in well_v2_record_list if w.id == w2id][0] + ) + + +def test_mock_storage_client_holding_well_v3_record_data( + mock_storage_client_holding_data, well_v3_record_list +): + """Test the mock_storage_client_holding_data behavior, along with the well_v2_record data itself""" + storage_client = mock_storage_client_holding_data(well_v3_record_list) + + # grab current eventloop if we already have one, otherwise creates it + loop = asyncio.get_event_loop() + + w3ids = [w3.id for w3 in well_v3_record_list] + for w3id in w3ids: + assert ( + loop.run_until_complete( + storage_client.get_record(w3id, "fake_data_partition_id") + ) + == [w for w in well_v3_record_list if w.id == w3id][0] + ) + + +def test_mock_storage_client_holding_wellbore_v2_record_data( + mock_storage_client_holding_data, wellbore_v2_record_list +): + """Test the mock_storage_client_holding_data behavior, along with the well_v2_record data itself""" + storage_client = mock_storage_client_holding_data(wellbore_v2_record_list) + + # grab current eventloop if we already have one, otherwise creates it + loop = asyncio.get_event_loop() + + w2ids = [w2.id for w2 in wellbore_v2_record_list] + for w2id in w2ids: + assert ( + loop.run_until_complete( + storage_client.get_record(w2id, "fake_data_partition_id") + ) + == [w for w in wellbore_v2_record_list if w.id == w2id][0] + ) + + +def test_mock_storage_client_holding_wellbore_v3_record_data( + mock_storage_client_holding_data, wellbore_v3_record_list +): + """Test the mock_storage_client_holding_data behavior, along with the well_v2_record data itself""" + storage_client = mock_storage_client_holding_data(wellbore_v3_record_list) + + # grab current eventloop if we already have one, otherwise creates it + loop = asyncio.get_event_loop() + + w3ids = [w3.id for w3 in wellbore_v3_record_list] + for w3id in w3ids: + assert ( + loop.run_until_complete( + storage_client.get_record(w3id, "fake_data_partition_id") + ) + == [w for w in wellbore_v3_record_list if w.id == w3id][0] + ) + + +def test_app_configurable_with_and_without_data_partition( + app_configurable_with_testclient, mock_storage_client_holding_data, well_v3_record_list +): + """Test the app configuration""" + storage_client = mock_storage_client_holding_data(well_v3_record_list) + app, client = app_configurable_with_testclient( + storage_client_mock=storage_client, + fake_data_partition_id=False, + ) + + # no partition needed + assert client.get("/about").status_code == 200 + # no partition needed but authentication ok + response = client.get("/version") + assert response + # partition needed for any data retrieval + assert client.post(f"/wells/{well_v3_record_list[0].id}").status_code == 404 + assert client.get(f"/wellbores/123").status_code == 404 + assert client.get(f"/logsets/123").status_code == 404 + assert client.get(f"/trajectories/123").status_code == 404 + assert client.get(f"/logs/123").status_code == 404 + + app, client = app_configurable_with_testclient( + storage_client_mock=storage_client, + fake_data_partition_id=True + ) + + # no partition needed + assert client.get("/about").status_code == 200 + # no partition needed but authentication ok + assert client.get("/version").status_code == 200 + # partition needed for any data retrieval + assert client.get(f"/wells/{well_v3_record_list[0].id}").status_code == 404 + assert client.get(f"/wellbores/123").status_code == 404 + assert client.get(f"/logsets/123").status_code == 404 + assert client.get(f"/trajectories/123").status_code == 404 + assert client.get(f"/logs/123").status_code == 404 + + +def test_app_configurable_with_unauthorized_client( + app_configurable_with_testclient, +): + """Test the app configuration""" + + app, client = app_configurable_with_testclient( + fake_opendes_authorized_user=False + ) + + # anonymous ok + assert client.get("/about").status_code == 200 + # not authorized + assert client.get("/version").status_code == 403 + + app, client = app_configurable_with_testclient( + fake_opendes_authorized_user=True + ) + + # anonymous ok + assert client.get("/about").status_code == 200 + # authorized + assert client.get("/version").status_code == 200 + + +def test_app_configurable_with_client_and_mocks( + app_configurable_with_testclient, +): + """Test the app configuration""" + + # custom mocks for testing the fixture itself + class StorageClientMock(AsyncMock): + def mock_name(self): + return "MyStorageClientMock" + + storage_client_mock = StorageClientMock() + + class SearchClientMock(AsyncMock): + def mock_name(self): + return "MySearchClientMock" + + search_client_mock = SearchClientMock() + + app, client = app_configurable_with_testclient( + search_client_mock=search_client_mock, + storage_client_mock=storage_client_mock, + ) + + # create a handler that returns the name of the mocks, for validating the app configuration + async def inside_out_handler(ctx: Context = Depends(get_ctx)): + return { + "search": (await get_search_service(ctx)).mock_name(), + "storage": (await get_storage_record_service(ctx)).mock_name(), + } + + try: + # setup the route with the handler + app.router.add_api_route("/inside_out", inside_out_handler) + + # do the request + response = client.get("/inside_out").json() + + assert response["search"] == "MySearchClientMock" + assert response["storage"] == "MyStorageClientMock" + + finally: + # remove the route we added to not mess with other tests + app.router.routes = [r for r in app.routes if r.name != inside_out_handler.__name__] + + diff --git a/tests/unit/middleware/client_test.py b/tests/unit/middleware/client_test.py index 0156c5d36385a3662ca24e96a590fae247fe7688..b22df5a05adf51826eb2cb387a9cd4d5d1349a8d 100755 --- a/tests/unit/middleware/client_test.py +++ b/tests/unit/middleware/client_test.py @@ -49,6 +49,7 @@ async def wdms_app_mocked(): from app.wdms_app import wdms_app, app_injector from app.clients import StorageRecordServiceClient + conf.Config.dev_mode.value = False conf.Config.service_host_search.value = "http://localhost:8888" conf.Config.service_host_storage.value = "http://localhost:9999" diff --git a/tests/unit/middleware/traces_middleware_test.py b/tests/unit/middleware/traces_middleware_test.py index b18fbc5757cabbf9bf91e5ca51f7f499f06b5d59..042d5056f071c457cb03d0c63cb5a0d53d04b2ca 100644 --- a/tests/unit/middleware/traces_middleware_test.py +++ b/tests/unit/middleware/traces_middleware_test.py @@ -44,93 +44,70 @@ class ExporterInTest(base_exporter.Exporter): return sd - - -@pytest.fixture -def client(ctx_fixture): - with TestClient(wdms_app) as client: - yield client - - -@pytest.fixture -def client_after_startup(ctx_fixture): - # using base_app client to trigger startup event. - with TestClient(base_app): - - async def build_mock_storage(): - return AsyncMock() - - async def build_mock_search(): - return AsyncMock() - - app_injector.register(StorageRecordServiceClient, build_mock_storage) - app_injector.register(SearchServiceClient, build_mock_search) - - with TestClient(wdms_app) as client: - yield client - - def build_url(path: str): return DDMS_V2_PATH + path -def test_about_call_creates_correlation_id_if_absent(client: TestClient): - - # Initialize traces exporter in app, like it is in app's startup_event - wdms_app.trace_exporter = ExporterInTest() +def test_about_call_creates_correlation_id_if_absent(app_configurable_with_testclient): + app, client_after_startup = app_configurable_with_testclient( + trace_exporter=ExporterInTest(), + fake_opendes_authorized_user=False + ) # no header -> works fine - response = client.get(build_url("/about")) + response = client_after_startup.get(build_url("/about")) assert response.status_code == 200 # one call was exported, with correlation-id - assert len(wdms_app.trace_exporter.exported) == 1 # one call => one export - spandata = wdms_app.trace_exporter.exported[0] + assert len(app.trace_exporter.exported) == 1 # one call => one export + spandata = app.trace_exporter.exported[0] assert "correlation-id" in spandata.attributes.keys() assert spandata.attributes["correlation-id"] is not None -def test_about_call_traces_existing_correlation_id(client: TestClient): - - # Initialize traces exporter in app, like it is in app's startup_event - wdms_app.trace_exporter = ExporterInTest() +def test_about_call_traces_existing_correlation_id(app_configurable_with_testclient): + app, client_after_startup = app_configurable_with_testclient( + trace_exporter=ExporterInTest(), + fake_opendes_authorized_user=False + ) # no header -> works fine - response = client.get( + response = client_after_startup.get( build_url("/about"), headers={"correlation-id": "some correlation id"} ) assert response.status_code == 200 # one call was exported, with correlation-id - assert len(wdms_app.trace_exporter.exported) == 1 # one call => one export - spandata = wdms_app.trace_exporter.exported[0] + assert len(app.trace_exporter.exported) == 1 # one call => one export + spandata = app.trace_exporter.exported[0] assert "correlation-id" in spandata.attributes.keys() assert spandata.attributes["correlation-id"] == "some correlation id" @pytest.mark.parametrize("header_name", ["x-app-id", "data-partition-id"]) -def test_about_call_traces_request_header(header_name, client: TestClient): - - # Initialize traces exporter in app, like it is in app's startup_event - wdms_app.trace_exporter = ExporterInTest() +def test_about_call_traces_request_header(app_configurable_with_testclient, header_name): + app, client_after_startup = app_configurable_with_testclient( + trace_exporter=ExporterInTest(), + fake_opendes_authorized_user=False + ) # no header -> works fine - response = client.get(build_url("/about")) + response = client_after_startup.get(build_url("/about")) assert response.status_code == 200 # one call was exported, without header - assert len(wdms_app.trace_exporter.exported) == 1 # one call => one export - spandata = wdms_app.trace_exporter.exported[0] + assert len(app.trace_exporter.exported) == 1 # one call => one export + spandata = app.trace_exporter.exported[0] assert header_name in spandata.attributes.keys() assert spandata.attributes[header_name] is None # with header -> works as well - client.get(build_url("/about"), headers={header_name: "some value"}) + client_after_startup.get(build_url("/about"), headers={header_name: "some value"}) assert response.status_code == 200 # a second call was exported, with header - assert len(wdms_app.trace_exporter.exported) == 2 # one call => one export - spandata = wdms_app.trace_exporter.exported[1] + assert len(app.trace_exporter.exported) == 2 # one call => one export + spandata = app.trace_exporter.exported[1] assert header_name in spandata.attributes.keys() assert spandata.attributes[header_name] == "some value" @@ -150,12 +127,35 @@ def gen_all_routes_request(rtr: Router, prefix: Optional[str] = None): RuntimeError(f"{route} routes retrieval not implemented") -def test_call_trace_url(client_after_startup: TestClient): - # Initialize traces exporter in app, like it is in app's startup_event - client_after_startup.app.trace_exporter = ExporterInTest() +def test_call_trace_url(app_configurable_with_testclient, mock_storage_client_holding_data, well_v2_record_list, well_v3_record_list): + # empty storage client mock required because we use get_record result in route. + storage_client_mock = mock_storage_client_holding_data(data=[]) + + app, client_after_startup = app_configurable_with_testclient( + storage_client_mock=storage_client_mock, + trace_exporter=ExporterInTest(), + fake_opendes_authorized_user=False + ) path_var_rgx = re.compile(r"/{[\w:]*}") + # special id case + well_v2_var_rgx = re.compile(r"/v2/wells/{wellid[:\w]?}") + well_v3_var_rgx = re.compile(r"/v3/wells/{wellid[:\w]?}") + + def path_sub(path): + path_with_id = path + # modify on regex match, or noop + + # special id case -> pick one id from example data + path_with_id = re.sub(well_v2_var_rgx, f"/v2/wells/{well_v2_record_list[0].id}", path_with_id) + path_with_id = re.sub(well_v3_var_rgx, f"/v3/wells/{well_v3_record_list[0].id}", path_with_id) + + # other cases... + path_with_id = re.sub(path_var_rgx, r"/123456", path_with_id) + + return path_with_id + call_count = 0 # startup event has been called (client has been called in a contest), so all routers should be mounted @@ -171,13 +171,12 @@ def test_call_trace_url(client_after_startup: TestClient): ]: continue - # replace variable in path with fake id... - fake_path = re.sub(path_var_rgx, r"/123456", path) - print(fake_path) - + path_with_id = path_sub(path) call_count += 1 - client_after_startup.request(method=method, url=fake_path) + # Note : most of these will fail because of authentication -> we do not need to mock complex behaviour + resp = client_after_startup.request(method=method, url=path_with_id) + print(f"{path_with_id} -> {resp.status_code}") # one call was exported assert ( @@ -186,5 +185,5 @@ def test_call_trace_url(client_after_startup: TestClient): spandata = client_after_startup.app.trace_exporter.exported[call_count - 1] # with expected name and route - assert spandata.name == fake_path + assert spandata.name == path_with_id assert spandata.attributes["http.route"] == path diff --git a/tests/unit/routers/ddms_v2/common_ddms_v2_test.py b/tests/unit/routers/ddms_v2/common_ddms_v2_test.py index bed0af2e65c887ae2d683867d8c72a9e6aa1ada2..2dee5871cd97bb7467c2d3a65c9fab5d8e369a6d 100644 --- a/tests/unit/routers/ddms_v2/common_ddms_v2_test.py +++ b/tests/unit/routers/ddms_v2/common_ddms_v2_test.py @@ -13,7 +13,7 @@ # limitations under the License. import json - +import uuid import mock import pytest @@ -50,8 +50,8 @@ tests_parameters = [ data=markerData(md=ValueWithUnit(value=1.0, unitKey='m'), name='name'), id='123456')), ('/ddms/v2/trajectories', trajectory(id='123456', data={})), - ('/ddms/v2/wellbores', wellbore(id='123456', data={})), - ('/ddms/v2/wells', well(id='123456', data={})), + ('/ddms/v2/wellbores', wellbore(id=f'namespace:master-data--Wellbore:{uuid.uuid4()}', data={})), + ('/ddms/v2/wells', well(id=f'namespace:master-data--Well:{uuid.uuid4()}', data={})), ('/ddms/v3/wellbores', Wellbore( id=r"namespace:master-data--Wellbore:c7c421a7-f496-5aef-8093-298c32bfdea9", kind="namespace:osdu:master-data--Wellbore:1.0.0", @@ -132,7 +132,7 @@ tests_parameters_for_recursive = [ ('/ddms/v2/logsets', logset(id='123456', data={})), ('/ddms/v2/dipsets', dipset(id="123456", data={})), ('/ddms/v2/wellbores', wellbore(id='123456', data={})), - ('/ddms/v2/wells', well(id='123456', data={})) + ('/ddms/v2/wells', well(id=f'namespace:master-data--Well:{uuid.uuid4()}', data={})) ] StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) @@ -191,6 +191,7 @@ def test_get_record_success(client, base_url, record_obj): # assert it validates the input object schema record_obj.validate(response.json()) + @pytest.mark.parametrize('base_url, record_obj', tests_errors_422) def test_get_record_422(client, base_url, record_obj): record_id = record_obj.id @@ -314,25 +315,25 @@ def test_delete_recursive_record_with_recursive_true_successful_delete_multiple_ assert moc_storage_delete_record.call_count == 3 -@pytest.mark.parametrize('base_url, sub_entity_list', [ - ('/ddms/v2/logsets', [Entity.LOG]), - ('/ddms/v2/dipsets', [Entity.LOG]), - ('/ddms/v2/wellbores', [Entity.LOGSET, +@pytest.mark.parametrize('url, sub_entity_list', [ + ('/ddms/v2/logsets/123', [Entity.LOG]), + ('/ddms/v2/dipsets/123', [Entity.LOG]), + ('/ddms/v2/wellbores/123', [Entity.LOGSET, Entity.LOG, Entity.MARKER]), - ('/ddms/v2/wells', [Entity.WELLBORE, + (f'/ddms/v2/wells/123', [Entity.WELLBORE, Entity.LOGSET, Entity.LOG, Entity.MARKER, Entity.TRAJECTORY, Entity.DIPSET]) ]) -def test_delete_recursive_check_sub_deleted_type(client, base_url, sub_entity_list): +def test_delete_recursive_check_sub_deleted_type(client, url, sub_entity_list): with mock.patch( 'app.routers.ddms_v2.storage_helper.StorageHelper.delete_recursively', return_value=None ) as moc_delete_recursively: - client.delete(f'{base_url}/123', + client.delete(url, headers={'data-partition-id': 'dp'}, params={'recursive': True}) assert set(moc_delete_recursively.call_args.kwargs['entity_list']) == set(sub_entity_list) diff --git a/tests/unit/routers/ddms_v3/common_ddms_v3_test.py b/tests/unit/routers/ddms_v3/common_ddms_v3_test.py index 603378d6b8ad7b525cc53cfcf0920ee75ca49da5..91329d9282bdf6a114f9986e82f772ae20e7149a 100644 --- a/tests/unit/routers/ddms_v3/common_ddms_v3_test.py +++ b/tests/unit/routers/ddms_v3/common_ddms_v3_test.py @@ -148,22 +148,22 @@ getas_parameters = [ ( Wellbore, "/ddms/v3/wellbores", - r"../../converter/wellbore_wks.json", + r"../../data/wellbore_wks.json", "opendes:wellbore:12345", - status.HTTP_400_BAD_REQUEST, + status.HTTP_422_UNPROCESSABLE_ENTITY, ), ( Wellbore, "/ddms/v3/wellbores", - r"../../converter/wellbore_wks.json", + r"../../data/wellbore_wks.json", "opendes:master-data--Wellbore:6f70656e6465733a646f633a3132333435:", status.HTTP_422_UNPROCESSABLE_ENTITY, ), - (Well, "/ddms/v3/wells", r"../../converter/well_wks.json", "opendes:well:12345", status.HTTP_400_BAD_REQUEST), + (Well, "/ddms/v3/wells", r"../../data/well_wks.json", "opendes:well:12345", status.HTTP_422_UNPROCESSABLE_ENTITY), ( Well, "/ddms/v3/wells", - r"../../converter/well_wks.json", + r"../../data/well_wks.json", "opendes:master-data--Well:6f70656e6465733a646f633a3132333435:", status.HTTP_422_UNPROCESSABLE_ENTITY, ), @@ -221,7 +221,7 @@ def test_get_record_incorrect_id(dasked_test_app_with_mocked_core_service, entit f"{base_url}/{record_id}", headers={"data-partition-id": "testing_partition"}, ) - assert response.status_code == status.HTTP_400_BAD_REQUEST + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY @pytest.mark.parametrize("base_url, id, record_obj", tests_parameters) @@ -287,7 +287,14 @@ def validation_test_restricted_record_id( record_id, record_id_to_test, response, ok_response=status.HTTP_200_OK, error_response=status.HTTP_400_BAD_REQUEST ): if record_id != record_id_to_test: - assert response.status_code == error_response + # TMP: special case for well and wellbores since ids are checked by fastapi + if record_id in [ + r"namespace:master-data--Well:c7c421a7-f496-5aef-8093-298c32gtrfd9", + r"namespace:master-data--Wellbore:c7c421a7-f496-5aef-8093-298c32qwer9" + ]: + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + else: + assert response.status_code == error_response else: assert response.status_code == ok_response diff --git a/tests/unit/routers/ddms_v3/well_ddms_v3_test.py b/tests/unit/routers/ddms_v3/well_ddms_v3_test.py new file mode 100644 index 0000000000000000000000000000000000000000..4bd98672b552ff87a9587e4156457075082b0479 --- /dev/null +++ b/tests/unit/routers/ddms_v3/well_ddms_v3_test.py @@ -0,0 +1,64 @@ +from fastapi.testclient import TestClient +import pytest +from odes_storage.models import Record + +from app.model.model_utils import from_record +from app.model.osdu_model import WellId, Well +from app.wdms_app import DDMS_V3_PATH + + +@pytest.mark.parametrize( + "method, url", + [ + # examples of string that are expected to fail because of id not matching regex + ("GET", DDMS_V3_PATH + "/wells/some_random_string"), + ("GET", DDMS_V3_PATH + "/wells/some_random_string/versions"), + ("GET", DDMS_V3_PATH + "/wells/some_random_string/versions/42"), + ("DELETE", DDMS_V3_PATH + "/wells/some_random_string"), + ], +) +def test_routes_refuse_incorrect_record_id( + app_configurable_with_testclient, method, url +): + app, client = app_configurable_with_testclient() + + response = client.request(method=method, url=url) + assert response.status_code == 422 + assert "string does not match regex" in response.json()["detail"][0]["msg"] + + +def test_routes_accept_correct_well_v3_id( + app_configurable_with_testclient, + mock_storage_client_holding_data, + well_v3_record_list, + data_partition +): + mock_storage = mock_storage_client_holding_data(data=well_v3_record_list) + app, client = app_configurable_with_testclient(storage_client_mock=mock_storage) + + for wr in well_v3_record_list: + # retrieve the record using its id, via slb.app fluid router + # We indirectly retrieve from the mock storage client. + response = client.get(DDMS_V3_PATH + f"/wells/{wr.id}") + assert response.status_code == 200 + record_data = response.json() + + # assert that we got the expected record by parsing it into the model + retrieved_wr = Well.parse_obj(record_data) + assert retrieved_wr == Well(**wr.dict(exclude_unset=True, by_alias=True)) + + # TODO : check endpoints with version + + # attempt delete and check mock + client.delete(DDMS_V3_PATH + f"/wells/{wr.id}") + mock_storage.delete_record.assert_awaited_once_with( + id=wr.id, data_partition_id=None + ) + + # unknown id test case + import uuid + + fake_id: WellId = f"{data_partition}:master-data--Well:" + str(uuid.uuid4()) + + response = client.get(DDMS_V3_PATH + f"/wells/{fake_id}") + assert response.status_code == 404 diff --git a/tests/unit/routers/ddms_v3/wellbore_ddms_v3_test.py b/tests/unit/routers/ddms_v3/wellbore_ddms_v3_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b20777b33471cbe668705a684f6c1afb5c0aac23 --- /dev/null +++ b/tests/unit/routers/ddms_v3/wellbore_ddms_v3_test.py @@ -0,0 +1,64 @@ +from fastapi.testclient import TestClient +import pytest +from odes_storage.models import Record + +from app.model.model_utils import from_record +from app.model.osdu_model import WellId, Well, Wellbore +from app.wdms_app import DDMS_V3_PATH + + +@pytest.mark.parametrize( + "method, url", + [ + # examples of string that are expected to fail because of id not matching regex + ("GET", DDMS_V3_PATH + "/wellbores/some_random_string"), + ("GET", DDMS_V3_PATH + "/wellbores/some_random_string/versions"), + ("GET", DDMS_V3_PATH + "/wellbores/some_random_string/versions/42"), + ("DELETE", DDMS_V3_PATH + "/wellbores/some_random_string"), + ], +) +def test_routes_refuse_incorrect_record_id( + app_configurable_with_testclient, method, url +): + app, client = app_configurable_with_testclient() + + response = client.request(method=method, url=url) + assert response.status_code == 422 + assert "string does not match regex" in response.json()["detail"][0]["msg"] + + +def test_routes_accept_correct_wellbore_v3_id( + app_configurable_with_testclient, + mock_storage_client_holding_data, + wellbore_v3_record_list, + data_partition +): + mock_storage = mock_storage_client_holding_data(data=wellbore_v3_record_list) + app, client = app_configurable_with_testclient(storage_client_mock=mock_storage) + + for wb in wellbore_v3_record_list: + # retrieve the record using its id, via slb.app fluid router + # We indirectly retrieve from the mock storage client. + response = client.get(DDMS_V3_PATH + f"/wellbores/{wb.id}") + assert response.status_code == 200 + record_data = response.json() + + # assert that we got the expected record by parsing it into the model + retrieved_wb = Wellbore.parse_obj(record_data) + assert retrieved_wb == Wellbore(**wb.dict(exclude_unset=True, by_alias=True)) + + # TODO : check endpoints with version + + # attempt delete and check mock + client.delete(DDMS_V3_PATH + f"/wellbores/{wb.id}") + mock_storage.delete_record.assert_awaited_once_with( + id=wb.id, data_partition_id=None + ) + + # unknown id test case + import uuid + + fake_id: WellId = f"{data_partition}:master-data--Wellbore:" + str(uuid.uuid4()) + + response = client.get(DDMS_V3_PATH + f"/wellbores/{fake_id}") + assert response.status_code == 404 diff --git a/tests/unit/spec/api_spec_test.py b/tests/unit/spec/api_spec_test.py index aad997fd25c62fd99990733c2c53cb2bef9ac345..fe24f1d352fa46b0981fce04a2b3cce2af4d8855 100644 --- a/tests/unit/spec/api_spec_test.py +++ b/tests/unit/spec/api_spec_test.py @@ -27,30 +27,21 @@ from tests.unit.test_utils import ctx_fixture, format_routes from fastapi.testclient import TestClient from openapi_spec_validator import validate_spec -from app.helper import traces -from app.wdms_app import wdms_app - -# Initialize traces exporter in app, like it is in app's startup decorator -wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') - -# Initialize route filters for documentation -prefix = os.environ.get('OPENAPI_FILTER_PREFIX') -tags = os.environ.get('OPENAPI_FILTER_TAGS') -# Filter and reformat routes only if a prefix is provided -if prefix: - # Make a tags list from the comma separated env var if needed - if tags: - tags = tags.split(',') - format_routes(wdms_app, prefix, tags) @pytest.fixture -def client(ctx_fixture): - yield TestClient(wdms_app) - wdms_app.dependency_overrides = {} +def openapi_json(app_configurable_with_testclient): + app, client = app_configurable_with_testclient() + # Initialize route filters for documentation + prefix = os.environ.get('OPENAPI_FILTER_PREFIX') + tags = os.environ.get('OPENAPI_FILTER_TAGS') + # Filter and reformat routes only if a prefix is provided + if prefix: + # Make a tags list from the comma separated env var if needed + if tags: + tags = tags.split(',') + format_routes(app, prefix, tags) -@pytest.fixture -def openapi_json(client): # get the openapi spec response = client.get("/openapi.json") assert response.status_code == 200 diff --git a/wdms_client/tests/__init__.py b/wdms_client/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/wdms_client/tests/variables_test.py b/wdms_client/tests/variables_test.py new file mode 100644 index 0000000000000000000000000000000000000000..a632fe7ae453506a51614f471bd4c349a08b5d7c --- /dev/null +++ b/wdms_client/tests/variables_test.py @@ -0,0 +1,134 @@ +import functools +from string import printable + +from numpy import nan + +from ..variables import Variables + +from hypothesis import given, settings, Verbosity +import hypothesis.strategies as st + + +@st.composite +def generate_original_and_expected( + draw, + replace_dict: dict, + # default to json elements possible values + # preventing nan generation as it will fail the equality test: nan != nan + elem_strat=st.none() | st.booleans() | st.floats(allow_nan=False) | st.text(printable), +): + # generate a string + original_elem = draw(elem_strat) + + # replacement only valid if it is a string + if isinstance(original_elem, str): + # pick a sorted list of indexes to insert strings to replace + replace_indexes = draw( + st.lists( + elements=st.integers(min_value=0, max_value=len(original_elem)), + max_size=len(replace_dict), + ) + ) + + # replace in string, starting from the end to not mess with indexes. + # We do need two lists to avoid mutating the same one. + string_to_resolve = list(original_elem) + string_resolved = list(original_elem) + for di, si in enumerate(sorted(replace_indexes, reverse=True)): + k = list(replace_dict.keys())[di] # Note: dicts are ordered since python 3.7 + + # generate string to resolve and already resolved string + string_to_resolve.insert(si, "{{" + k + "}}") + string_resolved.insert(si, replace_dict[k]) + + # return the replacement dict along with the joined strings + return replace_dict, "".join(string_to_resolve), "".join(string_resolved) + else: + return replace_dict, original_elem, original_elem # no modification if it is not a string + + +@given( + replace_original_expected = st.dictionaries( + keys=st.text(printable, min_size=1), values=st.text(printable) + ).flatmap(generate_original_and_expected), +) +# @settings(verbosity=Verbosity.verbose) +def test_variables_resolve_in_string(replace_original_expected): + + # with replace dict, draw and unpack original and expected + replace_dict, original, expected = replace_original_expected + + # generate the Variables instance + var_values = Variables.from_dict(replace_dict) + + # check the resolver behaves as we expect + assert var_values.resolve(original) == expected + + +def generate_collections_of_original_and_expected( + replace_dict: dict, + elem_strat=st.none() | st.booleans() | st.floats(allow_nan=False) | st.text(printable), + min_collec_size=1 +) -> st.SearchStrategy: + + collec_strat = st.recursive( + generate_original_and_expected( + replace_dict=replace_dict, elem_strat=elem_strat + ), + lambda strat: st.one_of( + # recursive list and extract tuple + st.lists(elements=strat, min_size=min_collec_size).map( + lambda collec: ( + replace_dict, + [c[1] for c in collec], + [c[2] for c in collec]) + ), + # recursive dict and extract tuple + st.dictionaries(keys=st.text(printable, min_size=1), values=strat, min_size=min_collec_size).map( + lambda collec: ( + replace_dict, + {k: v[1] for k, v in collec.items()}, + {k: v[2] for k, v in collec.items()}, + ) + ), + ), + max_leaves=5, + ) + + return collec_strat + + +@given( + replace_original_expected=st.dictionaries( + keys=st.text(printable, min_size=1), values=st.text(printable) + ).flatmap(functools.partial( + generate_collections_of_original_and_expected, + elem_strat=st.text(printable) # to test only relevant use cases + )), +) +# @settings(verbosity=Verbosity.verbose) +def test_variables_resolve_in_collections_of_string(replace_original_expected): + # with replace dict, draw and unpack original and expected + replace_dict, original, expected = replace_original_expected + + # generate the Variables instance + var_values = Variables.from_dict(replace_dict) + + assert var_values.resolve(original) == expected + + +if __name__ == "__main__": + + replace_dict, original, expected = st.dictionaries( + keys=st.text(printable, min_size=1), values=st.text(printable), + min_size=1 # to check only relevant use cases + ).flatmap( + # using min_size % for relevant example + functools.partial(generate_collections_of_original_and_expected, + elem_strat=st.text(printable), # to check only relevant use cases + min_collec_size=5) + ).example() + + print(f"replacement: {replace_dict}") + print(f"original: {original}") + print(f"expected: {expected}") diff --git a/wdms_client/variables.py b/wdms_client/variables.py index a6fba087ccd11178e41ae4fdbfb711fe8b4c93b9..f79f5169ab16a0d12daa22de1314ff61485ea94c 100644 --- a/wdms_client/variables.py +++ b/wdms_client/variables.py @@ -11,8 +11,8 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. - -from typing import Optional +import re +from typing import Optional, Pattern from contextlib import contextmanager import json @@ -48,7 +48,8 @@ class Variables: def from_dict(cls, variables_dict): inst = cls() for k, v in variables_dict.items(): - inst.set(k, v) + if len(k) > 0: # avoiding empty string issues + inst.set(k, v) return inst def update(self, **kwargs): @@ -68,34 +69,30 @@ class Variables: inst._variables[var_data['key']] = var_data['value'] return inst - def resolve(self, d): + def resolve(self, d, regex: Optional[Pattern] = None): + + if regex is None: + varmatches = r'|'.join(re.escape(k) for k in self._variables.keys()) + regex = re.compile(r'{{[\s]*(?P' + varmatches + r')[\s]*}}') + if isinstance(d, dict): - return {k: self.resolve(v) for k, v in d.items()} + return {k: self.resolve(v, regex=regex) for k, v in d.items()} + + if isinstance(d, list): + return [self.resolve(e, regex=regex) for e in d] if isinstance(d, str): - return self._resolve_value(d) + resolved = list(d) + # find all matches (reversely sorted by start index) and replace with corresponding value + for matched in sorted(re.finditer(regex, d), key=lambda m: m.start(), reverse=True): + # Ref : https://docs.python.org/3/library/re.html#match-objects + key = matched.groupdict()["var_key"] + # put in resolved in the proper slice + resolved[matched.start():matched.end()] = self._variables[key] + return ''.join(resolved) return d - def _resolve_value(self, value: Optional[str]) -> Optional[str]: - if not value or not isinstance(value, str): - return value - - idx = value.find('{{', 0) - while idx >= 0: - idx_end = value.find('}}', idx) - if idx_end > idx + 2: - nested_var = value[idx+2: idx_end] - if nested_var in self: - nested_value = self.get(nested_var) - if isinstance(nested_value, dict): - nested_value = json.dumps(nested_value, indent=0) - value = value.replace('{{' + str(nested_var) + '}}', str(nested_value)) - idx = 0 - - idx = value.find('{{', idx + 2) - return value - def get(self, key: str, default=None) -> Optional: if key not in self._variables: return self.resolve(default)