Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • osdu/platform/domain-data-mgmt-services/wellbore/wellbore-domain-services
  • Vkamani/vkamani-wellbore-domain-services
  • Yan_Sushchynski/wellbore-domain-services-comm-impl
3 results
Show changes
Commits on Source (104)
Showing
with 21316 additions and 19783 deletions
from .welllog_consistency import welllog_consistency_check, DuplicatedCurveIdException, ReferenceCurveIdNotFoundException
from app.model.osdu_model import WellLog110 as WellLog
class DuplicatedCurveIdException(RuntimeError):
"""raised if all curveID values are not unique"""
pass
class ReferenceCurveIdNotFoundException(RuntimeError):
"""raised when there is no curve with a curveID value equal to the ReferenceCurveID value"""
pass
def welllog_consistency_check(wl: WellLog):
"""Check if wellLog is consistent.
Each curves in data.Curves must have a unique CurveID.
Welllog must have a curve whose curveID value is equal to the wellLog's ReferenceCurveID value
Args:
wl (Welllog): wellLog object to be verified
Returns:
Raises:
DuplicatedCurveIdException: All CurveIDs are not unique.
ReferenceCurveIdNotFoundException: No curve whose curveID value are equal to ReferenceCurveID value.
"""
if not wl.data:
return
curve_ids = set()
# check all curve ids are unique
if wl.data.Curves:
# expression generator fetch curve_ids and evaluate on demand if a curve is duplicated
duplicated = (curve.CurveID in curve_ids or curve_ids.add(curve.CurveID) for curve in wl.data.Curves)
if any(duplicated):
raise DuplicatedCurveIdException()
# Check there is a curve with curveID == referenceCurveID
if wl.data.ReferenceCurveID and wl.data.ReferenceCurveID not in curve_ids:
raise ReferenceCurveIdNotFoundException()
...@@ -31,46 +31,46 @@ class Tags(BaseModel): ...@@ -31,46 +31,46 @@ class Tags(BaseModel):
__root__: str __root__: str
class LogServiceDateInterval(DDMSBaseModel): class AvailableMarkerProperty(DDMSBaseModel):
"""
An interval built from two nested values : StartDate and EndDate. It applies to the whole log services and may apply to composite logs as [start of the first run job] and [end of the last run job]Log Service Date
"""
StartDate: Optional[datetime] = None
EndDate: Optional[datetime] = None
class AvailableTrajectoryStationProperty(DDMSBaseModel):
""" """
A set of properties describing a trajectory station property which is available for this instance of a WellboreTrajectory. A set of properties describing a marker property which is available for this instance of a WellboreMarkerSet.
""" """
TrajectoryStationPropertyTypeID: Optional[ MarkerPropertyTypeID: Optional[
constr( constr(
regex=r'^[\w\-\.]+:reference-data\-\-TrajectoryStationPropertyType:[\w\-\.\:\%]+:[0-9]*$' regex=r'^[\w\-\.]+:reference-data\-\-MarkerPropertyType:[\w\-\.\:\%]+:[0-9]*$'
) )
] = Field( ] = Field(
None, None,
description='The reference to a trajectory station property type - of if interpreted as channels, the curve or channel name type, identifying e.g. MD, Inclination, Azimuth. This is a relationship to a reference-data--TrajectoryStationPropertyType record id.', description="The reference to a marker property type - or if interpreted as CSV columns, the 'well-known column type. It is a relationship to a reference-data--MarkerPropertyType record id.",
example='partition-id:reference-data--TrajectoryStationPropertyType:AzimuthTN:', example='partition-id:reference-data--MarkerPropertyType:MissingThickness:',
title='Trajectory Station Property Type ID', title='Marker Property Type ID',
) )
StationPropertyUnitID: Optional[ MarkerPropertyUnitID: Optional[
constr(regex=r'^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$') constr(regex=r'^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$')
] = Field( ] = Field(
None, None,
description='Unit of Measure for the station properties of type TrajectoryStationPropertyType.', description='Unit of Measure for the marker properties of type MarkerPropertyType.',
example='partition-id:reference-data--UnitOfMeasure:dega:', example='partition-id:reference-data--UnitOfMeasure:ft:',
title='Station Property Unit ID', title='Marker Property Unit ID',
) )
Name: Optional[str] = Field( Name: Optional[str] = Field(
None, None,
description='The name of the curve (e.g. column in a CSV document) as originally found. If absent The name of the TrajectoryCurveType is intended to be used.', description='The name of the marker property (e.g. column in a CSV document) as originally found. If absent The name of the MarkerPropertyType is intended to be used.',
example='AzimuthTN', example='MissingThickness',
title='Name', title='Name',
) )
class LogServiceDateInterval(DDMSBaseModel):
"""
An interval built from two nested values : StartDate and EndDate. It applies to the whole log services and may apply to composite logs as [start of the first run job] and [end of the last run job]Log Service Date
"""
StartDate: Optional[datetime] = None
EndDate: Optional[datetime] = None
class Owner(DDMSBaseModel): class Owner(DDMSBaseModel):
__root__: constr( __root__: constr(
regex=r'^[a-zA-Z0-9_+&*-]+(?:\.[a-zA-Z0-9_+&*-]+)*@(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,7}$' regex=r'^[a-zA-Z0-9_+&*-]+(?:\.[a-zA-Z0-9_+&*-]+)*@(?:[a-zA-Z0-9-]+\.)+[a-zA-Z]{2,7}$'
...@@ -1150,7 +1150,7 @@ class Curve110(DDMSBaseModel): ...@@ -1150,7 +1150,7 @@ class Curve110(DDMSBaseModel):
NullValue: Optional[bool] = Field( NullValue: Optional[bool] = Field(
None, description='Indicates that there is no measurement within the curve' None, description='Indicates that there is no measurement within the curve'
) )
DepthCoding: Optional[constr(regex='^REGULAR|DISCRETE$')] = Field( DepthCoding: Optional[constr(regex=r'^REGULAR|DISCRETE$')] = Field(
None, None,
description='DEPRECATED: Replaced by boolean data.IsRegular. The Coding of the depth.', description='DEPRECATED: Replaced by boolean data.IsRegular. The Coding of the depth.',
) )
...@@ -1166,34 +1166,34 @@ class Curve110(DDMSBaseModel): ...@@ -1166,34 +1166,34 @@ class Curve110(DDMSBaseModel):
description='DEPRECATED: populate data.SamplingStart or data.SamplingStop instead, for informational purposes data.BottomMeasuredDepth. SamplingStart and SamplingStop support other domains than depth.', description='DEPRECATED: populate data.SamplingStart or data.SamplingStop instead, for informational purposes data.BottomMeasuredDepth. SamplingStart and SamplingStop support other domains than depth.',
) )
DepthUnit: Optional[ DepthUnit: Optional[
constr(regex='^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$') constr(regex=r'^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$')
] = Field( ] = Field(
None, None,
description='DEPRECATED: use the meta[] to specify the sampling UnitOfMeasure (AbstractMetaItem of kind "Unit"; propertyNames["TopMeasuredDepth", "BottomMeasuredDepth"]). Originally: Unit of Measure for Top and Base depth.', description='DEPRECATED: use the meta[] to specify the sampling UnitOfMeasure (AbstractMetaItem of kind "Unit"; propertyNames["TopMeasuredDepth", "BottomMeasuredDepth"]). Originally: Unit of Measure for Top and Base depth.',
) )
CurveUnit: Optional[ CurveUnit: Optional[
constr(regex='^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$') constr(regex=r'^[\w\-\.]+:reference-data\-\-UnitOfMeasure:[\w\-\.\:\%]+:[0-9]*$')
] = Field(None, description='Unit of Measure for the Log Curve') ] = Field(None, description='Unit of Measure for the Log Curve')
Mnemonic: Optional[str] = Field( Mnemonic: Optional[str] = Field(
None, None,
description='The Mnemonic of the Log Curve is the value as received either from Raw Providers or from Internal Processing team', description='The Mnemonic of the Log Curve is the value as received either from Raw Providers or from Internal Processing team',
) )
LogCurveTypeID: Optional[ LogCurveTypeID: Optional[
constr(regex='^[\w\-\.]+:reference-data\-\-LogCurveType:[\w\-\.\:\%]+:[0-9]*$') constr(regex=r'^[\w\-\.]+:reference-data\-\-LogCurveType:[\w\-\.\:\%]+:[0-9]*$')
] = Field( ] = Field(
None, None,
description='The related record id of the Log Curve Type - which is the standard mnemonic chosen by the company - OSDU provides an initial list', description='The related record id of the Log Curve Type - which is the standard mnemonic chosen by the company - OSDU provides an initial list',
) )
LogCurveBusinessValueID: Optional[ LogCurveBusinessValueID: Optional[
constr( constr(
regex='^[\w\-\.]+:reference-data\-\-LogCurveBusinessValue:[\w\-\.\:\%]+:[0-9]*$' regex=r'^[\w\-\.]+:reference-data\-\-LogCurveBusinessValue:[\w\-\.\:\%]+:[0-9]*$'
) )
] = Field( ] = Field(
None, description='The related record id of the Log Curve Business Value Type.' None, description='The related record id of the Log Curve Business Value Type.'
) )
LogCurveMainFamilyID: Optional[ LogCurveMainFamilyID: Optional[
constr( constr(
regex='^[\w\-\.]+:reference-data\-\-LogCurveMainFamily:[\w\-\.\:\%]+:[0-9]*$' regex=r'^[\w\-\.]+:reference-data\-\-LogCurveMainFamily:[\w\-\.\:\%]+:[0-9]*$'
) )
] = Field( ] = Field(
None, None,
...@@ -1201,7 +1201,7 @@ class Curve110(DDMSBaseModel): ...@@ -1201,7 +1201,7 @@ class Curve110(DDMSBaseModel):
) )
LogCurveFamilyID: Optional[ LogCurveFamilyID: Optional[
constr( constr(
regex='^[\w\-\.]+:reference-data\-\-LogCurveFamily:[\w\-\.\:\%]+:[0-9]*$' regex=r'^[\w\-\.]+:reference-data\-\-LogCurveFamily:[\w\-\.\:\%]+:[0-9]*$'
) )
] = Field( ] = Field(
None, None,
...@@ -1618,7 +1618,7 @@ class WellLogData110( ...@@ -1618,7 +1618,7 @@ class WellLogData110(
None, description='Type of mud at time of logging (oil, water based,...)' None, description='Type of mud at time of logging (oil, water based,...)'
) )
HoleTypeLogging: Optional[ HoleTypeLogging: Optional[
constr(regex='^OPENHOLE|CASEDHOLE|CEMENTEDHOLE$') constr(regex=r'^OPENHOLE|CASEDHOLE|CEMENTEDHOLE$')
] = Field( ] = Field(
None, None,
description='Description of the hole related type of logging - POSSIBLE VALUE : OpenHole / CasedHole / CementedHole', description='Description of the hole related type of logging - POSSIBLE VALUE : OpenHole / CasedHole / CementedHole',
...@@ -2343,6 +2343,61 @@ class Marker(DDMSBaseModel): ...@@ -2343,6 +2343,61 @@ class Marker(DDMSBaseModel):
GeologicalAge: Optional[str] = Field(None, description='Associated geological age') GeologicalAge: Optional[str] = Field(None, description='Associated geological age')
class Marker110(DDMSBaseModel):
MarkerName: Optional[str] = Field(None, description='Name of the Marker')
MarkerMeasuredDepth: Optional[float] = Field(
None, description='The depth at which the Marker was noted.'
)
MarkerSubSeaVerticalDepth: Optional[float] = Field(
None,
description="The Marker's TVD converted to a Sub-Sea Vertical depth, i.e., below Mean Sea Level. Note that TVD values above MSL are negative. This is the same as true vertical depth referenced to the vertical CRS MSL depth.",
)
MarkerDate: Optional[datetime] = Field(
None,
description='Timestamp of the date and time when the when the Marker was interpreted.',
)
MarkerObservationNumber: Optional[float] = Field(
None,
description='Any observation number that distinguishes a Marker observation from others with same Marker name, date.',
)
MarkerInterpreter: Optional[str] = Field(
None,
description='The name of the Marker interpreter (could be a person or vendor).',
)
MarkerTypeID: Optional[
constr(regex=r'^[\w\-\.]+:reference-data\-\-MarkerType:[\w\-\.\:\%]+:[0-9]*$')
] = Field(
None,
description='Marker Type Reference Type. Possible values - Biostratigraphy, Lithostratigraphy, seismic, depth of well, sequence, flow unit',
)
FeatureTypeID: Optional[
constr(regex=r'^[\w\-\.]+:reference-data\-\-FeatureType:[\w\-\.\:\%]+:[0-9]*$')
] = Field(
None,
description='Feature Type Reference Type. Possible values - Base, top, fault, salt, reef, sea floor',
)
FeatureName: Optional[str] = Field(
None, description='Name of the feature the marker is characterizing'
)
PositiveVerticalDelta: Optional[float] = Field(
None,
description='The distance vertically above the Marker position that marks the limit of the high confidence range for the Marker pick.',
)
NegativeVerticalDelta: Optional[float] = Field(
None,
description='The distance vertically below the Marker position that marks the limit of the high confidence range for the Marker pick.',
)
SurfaceDipAngle: Optional[float] = Field(
None, description='Dip angle for the Wellbore Marker.'
)
SurfaceDipAzimuth: Optional[float] = Field(
None, description='Dip azimuth for the Wellbore Marker.'
)
Missing: Optional[str] = None
GeologicalAge: Optional[str] = Field(None, description='Associated geological age')
class WellboreMarkerSetData( class WellboreMarkerSetData(
AbstractCommonResources100, AbstractCommonResources100,
AbstractWPCGroupType100, AbstractWPCGroupType100,
...@@ -2361,6 +2416,32 @@ class WellboreMarkerSetData( ...@@ -2361,6 +2416,32 @@ class WellboreMarkerSetData(
ExtensionProperties: Optional[Dict[str, Any]] = None ExtensionProperties: Optional[Dict[str, Any]] = None
class WellboreMarkerSetData110(
AbstractCommonResources100, AbstractWPCGroupType100, AbstractWorkProductComponent100
):
WellboreID: Optional[
constr(regex=r'^[\w\-\.]+:master-data\-\-Wellbore:[\w\-\.\:\%]+:[0-9]*$')
] = Field(
None,
description='The Wellbore ID, to which the markers in this set belong.',
title='Wellbore ID',
)
VerticalMeasurement: Optional[AbstractFacilityVerticalMeasurement100] = Field(
None,
description='References an entry in the Vertical Measurement array for the Wellbore identified by WellboreID, which defines the vertical reference datum for all marker measured depths of the Wellbore Marker Set Markers array.',
title='Vertical Measurement',
)
AvailableMarkerProperties: Optional[List[AvailableMarkerProperty]] = Field(
None,
description='The array of MarkerProperty definitions describing the available properties for this instance of WellboreMarkerSet.',
title='Available Marker Properties',
)
Markers: Optional[List[Marker110]] = Field(
None, description='The array of marker meta data in this set.', title='Markers'
)
ExtensionProperties: Optional[Dict[str, Any]] = None
class WellboreMarkerSet(DDMSBaseModel): class WellboreMarkerSet(DDMSBaseModel):
""" """
Wellbore Markers identify the depth in a wellbore, measured below a reference elevation, at which a person or an automated process identifies a noteworthy observation, which is usually a change in the rock that intersects that wellbore. Formation Marker data includes attributes/properties that put these depths in context. Formation Markers are sometimes known as picks or formation tops. Wellbore Markers identify the depth in a wellbore, measured below a reference elevation, at which a person or an automated process identifies a noteworthy observation, which is usually a change in the rock that intersects that wellbore. Formation Marker data includes attributes/properties that put these depths in context. Formation Markers are sometimes known as picks or formation tops.
...@@ -2439,3 +2520,83 @@ class WellboreMarkerSet(DDMSBaseModel): ...@@ -2439,3 +2520,83 @@ class WellboreMarkerSet(DDMSBaseModel):
title='Frame of Reference Meta Data', title='Frame of Reference Meta Data',
) )
data: Optional[WellboreMarkerSetData] = None data: Optional[WellboreMarkerSetData] = None
class WellboreMarkerSet110(DDMSBaseModel):
"""
Wellbore Markers identify the depth in a wellbore, measured below a reference elevation, at which a person or an automated process identifies a noteworthy observation, which is usually a change in the rock that intersects that wellbore. Formation Marker data includes attributes/properties that put these depths in context. Formation Markers are sometimes known as picks or formation tops.
"""
id: Optional[
constr(
regex=r'^[\w\-\.]+:work-product-component\-\-WellboreMarkerSet:[\w\-\.\:\%]+$'
)
] = Field(
None,
description='Previously called ResourceID or SRN which identifies this OSDU resource object without version.',
example='namespace:work-product-component--WellboreMarkerSet:d5303b79-7904-5bfe-9c44-9a3ff41b6d6c',
title='Entity ID',
)
kind: constr(regex=r'^[\w\-\.]+:[\w\-\.]+:[\w\-\.]+:[0-9]+.[0-9]+.[0-9]+$') = Field(
...,
description='The schema identification for the OSDU resource object following the pattern {Namespace}:{Source}:{Type}:{VersionMajor}.{VersionMinor}.{VersionPatch}. The versioning scheme follows the semantic versioning, https://semver.org/.',
example='osdu:wks:work-product-component--WellboreMarkerSet:1.1.0',
title='Entity Kind',
)
version: Optional[int] = Field(
None,
description='The version number of this OSDU resource; set by the framework.',
example=1562066009929332,
title='Version Number',
)
acl: AbstractAccessControlList100 = Field(
...,
description='The access control tags associated with this entity.',
title='Access Control List',
)
legal: AbstractLegalTags100 = Field(
...,
description="The entity's legal tags and compliance status. The actual contents associated with the legal tags is managed by the Compliance Service.",
title='Legal Tags',
)
tags: Optional[Dict[str, Tags]] = Field(
None,
description='A generic dictionary of string keys mapping to string value. Only strings are permitted as keys and values.',
example={'NameOfKey': 'String value'},
title='Tag Dictionary',
)
createTime: Optional[datetime] = Field(
None,
description='Timestamp of the time at which initial version of this OSDU resource object was created. Set by the System. The value is a combined date-time string in ISO-8601 given in UTC.',
example='2020-12-16T11:46:20.163Z',
title='Resource Object Creation DateTime',
)
createUser: Optional[str] = Field(
None,
description='The user reference, which created the first version of this resource object. Set by the System.',
example='some-user@some-company-cloud.com',
title='Resource Object Creation User Reference',
)
modifyTime: Optional[datetime] = Field(
None,
description='Timestamp of the time at which this version of the OSDU resource object was created. Set by the System. The value is a combined date-time string in ISO-8601 given in UTC.',
example='2020-12-16T11:52:24.477Z',
title='Resource Object Version Creation DateTime',
)
modifyUser: Optional[str] = Field(
None,
description='The user reference, which created this version of this resource object. Set by the System.',
example='some-user@some-company-cloud.com',
title='Resource Object Version Creation User Reference',
)
ancestry: Optional[AbstractLegalParentList100] = Field(
None,
description='The links to data, which constitute the inputs.',
title='Ancestry',
)
meta: Optional[List[Any]] = Field(
None,
description='The Frame of Reference meta data section linking the named properties to self-contained definitions.',
title='Frame of Reference Meta Data',
)
data: Optional[WellboreMarkerSetData110] = None
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import List
from fastapi import APIRouter, Depends, Response, status
from app.clients.storage_service_client import get_storage_record_service
from ..common_parameters import REQUIRED_ROLES_WRITE
from app.utils import Context
from app.utils import get_ctx
router = APIRouter()
@router.post(
"/records/delete",
summary="Delete records. The API performs a logical deletion of the given records. ",
description="{}".format(REQUIRED_ROLES_WRITE),
operation_id="post_del_multiple_osdu_records",
status_code=status.HTTP_204_NO_CONTENT,
response_class=Response,
responses={
status.HTTP_404_NOT_FOUND: {"description": "record/s not found"},
status.HTTP_204_NO_CONTENT: {
"description": "Records deleted successfully"
},
},
)
async def post_del_multiple_osdu_records(record_ids: List[str], ctx: Context = Depends(get_ctx)):
storage_client = await get_storage_record_service(ctx)
await storage_client.delete_records(
request_body=record_ids, data_partition_id=ctx.partition_id
)
...@@ -22,7 +22,7 @@ from starlette.requests import Request ...@@ -22,7 +22,7 @@ from starlette.requests import Request
from app.clients.storage_service_client import get_storage_record_service from app.clients.storage_service_client import get_storage_record_service
from app.model.model_utils import to_record, from_record from app.model.model_utils import to_record, from_record
from app.model.osdu_model import WellboreMarkerSet from app.model.osdu_model import WellboreMarkerSet110 as WellboreMarkerSet
from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE
from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLBOREMARKERSET_VERSION_REGEX from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLBOREMARKERSET_VERSION_REGEX
from app.routers.record_utils import fetch_record from app.routers.record_utils import fetch_record
......
...@@ -23,9 +23,11 @@ from starlette.requests import Request ...@@ -23,9 +23,11 @@ from starlette.requests import Request
from app.clients.storage_service_client import get_storage_record_service from app.clients.storage_service_client import get_storage_record_service
from app.model.osdu_model import WellboreTrajectory110 as WellboreTrajectory from app.model.osdu_model import WellboreTrajectory110 as WellboreTrajectory
from app.routers.bulk.bulk_uri_dependencies import BulkIdAccess, get_bulk_id_access
from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE
from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLBORETRAJECTORY_VERSION_REGEX from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLBORETRAJECTORY_VERSION_REGEX
from app.routers.record_utils import fetch_record from app.routers.record_utils import fetch_record
from app.routers.delete.delete_bulk_data import delete_record
from app.utils import Context from app.utils import Context
from app.utils import get_ctx from app.utils import get_ctx
from app.model.model_utils import to_record, from_record from app.model.model_utils import to_record, from_record
...@@ -75,13 +77,17 @@ async def get_wellbore_trajectory_osdu( ...@@ -75,13 +77,17 @@ async def get_wellbore_trajectory_osdu(
}, },
}, },
) )
async def del_osdu_wellboreTrajectory(wellboretrajectoryid: str, ctx: Context = Depends(get_ctx)): async def del_osdu_wellboreTrajectory(wellboretrajectoryid: str,
storage_client = await get_storage_record_service(ctx) purge: bool = False,
ctx: Context = Depends(get_ctx),
bulk_uri_access: BulkIdAccess = Depends(get_bulk_id_access)):
wellboretrajectoryid = DMSV3RouterUtils.get_id_without_version(OSDU_WELLBORETRAJECTORY_VERSION_REGEX, wellboretrajectoryid = DMSV3RouterUtils.get_id_without_version(OSDU_WELLBORETRAJECTORY_VERSION_REGEX,
wellboretrajectoryid) wellboretrajectoryid)
await storage_client.delete_record( await delete_record(record_id=wellboretrajectoryid,
id=wellboretrajectoryid, data_partition_id=ctx.partition_id purge=purge,
) ctx=ctx,
bulk_uri_access=bulk_uri_access)
@router.get( @router.get(
......
...@@ -14,10 +14,11 @@ ...@@ -14,10 +14,11 @@
from fastapi import ( from fastapi import (
APIRouter, APIRouter,
Body, Depends, Body,
Depends,
Response, Response,
status) status,
HTTPException)
from odes_storage.models import (CreateUpdateRecordsResponse, List, from odes_storage.models import (CreateUpdateRecordsResponse, List,
RecordVersions) RecordVersions)
...@@ -29,9 +30,13 @@ from app.model.osdu_model import WellLog110 as WellLog ...@@ -29,9 +30,13 @@ from app.model.osdu_model import WellLog110 as WellLog
from app.utils import Context, get_ctx, load_schema_example from app.utils import Context, get_ctx, load_schema_example
from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLLOG_VERSION_REGEX from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils, OSDU_WELLLOG_VERSION_REGEX
from app.routers.bulk.bulk_uri_dependencies import BulkIdAccess, get_bulk_id_access
from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE
from app.routers.record_utils import fetch_record from app.routers.record_utils import fetch_record
from app.routers.common_parameters import REQUIRED_ROLES_READ, REQUIRED_ROLES_WRITE
from app.routers.delete.delete_bulk_data import delete_record
from app.consistency import welllog_consistency_check, DuplicatedCurveIdException, ReferenceCurveIdNotFoundException
router = APIRouter() router = APIRouter()
...@@ -77,13 +82,16 @@ async def get_welllog_osdu( ...@@ -77,13 +82,16 @@ async def get_welllog_osdu(
}, },
}, },
) )
async def del_osdu_welllog(welllogid: str, ctx: Context = Depends(get_ctx)): async def del_osdu_welllog(welllogid: str,
storage_client = await get_storage_record_service(ctx) purge: bool = False,
ctx: Context = Depends(get_ctx),
bulk_uri_access: BulkIdAccess = Depends(get_bulk_id_access)):
welllogid = DMSV3RouterUtils.get_id_without_version(OSDU_WELLLOG_VERSION_REGEX, welllogid = DMSV3RouterUtils.get_id_without_version(OSDU_WELLLOG_VERSION_REGEX,
welllogid) welllogid)
await storage_client.delete_record( await delete_record(record_id=welllogid,
id=welllogid, data_partition_id=ctx.partition_id purge=purge,
) ctx=ctx,
bulk_uri_access=bulk_uri_access)
@router.get( @router.get(
...@@ -149,6 +157,21 @@ async def post_welllog_osdu( ...@@ -149,6 +157,21 @@ async def post_welllog_osdu(
welllogs: List[WellLog] = Body(..., example=load_schema_example("wellLog_v3.json")), welllogs: List[WellLog] = Body(..., example=load_schema_example("wellLog_v3.json")),
ctx: Context = Depends(get_ctx) ctx: Context = Depends(get_ctx)
) -> CreateUpdateRecordsResponse: ) -> CreateUpdateRecordsResponse:
for idx, w in enumerate(welllogs):
try:
welllog_consistency_check(w)
except DuplicatedCurveIdException:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"All CurveID in WellLog[{idx}] should be unique"
)
except ReferenceCurveIdNotFoundException:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"WellLog[{idx}] should have a curve with a curveID value equal to the ReferenceCurveID value: '{w.data.ReferenceCurveID}'"
)
storage_client = await get_storage_record_service(ctx) storage_client = await get_storage_record_service(ctx)
return await storage_client.create_or_update_records( return await storage_client.create_or_update_records(
......
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from app.bulk_persistence import resolve_tenant
from osdu.core.api.storage.blob_storage_base import BlobStorageBase
import asyncio
from app.bulk_persistence.dask.storage_path_builder import hash_record_id
from app.clients import StorageRecordServiceClient
from app.clients.storage_service_client import get_storage_record_service
from app.routers.bulk.bulk_uri_dependencies import BulkIdAccess
from app.routers.record_utils import fetch_record
from app.utils import Context, get_ctx
async def _get_bulk_uri_from_version(ctx: Context, bulk_uri_access: BulkIdAccess, record_id: str, index: int,
record_versions):
version = record_versions.versions[index]
record_from_version = await fetch_record(ctx, record_id, version)
obj_bulk_uri = bulk_uri_access.get_bulk_uri(record=record_from_version)
if obj_bulk_uri.is_valid():
return obj_bulk_uri.encode()
async def _get_bulk_uris_of_versions_from_record_id(ctx: Context,
bulk_uri_access: BulkIdAccess,
storage_client: StorageRecordServiceClient,
record_id: str):
record_versions = await storage_client.get_all_record_versions(id=record_id, data_partition_id=ctx.partition_id)
record_bulk_uris = [bulk_uri for bulk_uri in await asyncio.gather(*[
_get_bulk_uri_from_version(ctx, bulk_uri_access, record_id, i, record_versions)
for i in range(len(record_versions.versions))
], return_exceptions=True) if bulk_uri is not None]
return record_bulk_uris
async def delete_record(
record_id: str,
purge: bool,
ctx: Context,
bulk_uri_access: BulkIdAccess):
storage_client = await get_storage_record_service(ctx)
if not purge:
await storage_client.delete_record(id=record_id, data_partition_id=ctx.partition_id)
else:
record_bulk_uris = await _get_bulk_uris_of_versions_from_record_id(ctx, bulk_uri_access, storage_client,
record_id)
# Delete meta data
await storage_client.purge_record(id=record_id, data_partition_id=ctx.partition_id)
tenant = await resolve_tenant(ctx.partition_id)
blob_storage: BlobStorageBase = await ctx.app_injector.get(BlobStorageBase)
encode_record_id = hash_record_id(record_id)
bulk_file_names = await blob_storage.list_objects(tenant=tenant,
prefix=encode_record_id)
tasks = [blob_storage.delete(tenant=tenant, object_name=bulk_file_name)
for bulk_file_name in bulk_file_names
for bulk_id in record_bulk_uris if bulk_id in bulk_file_name]
for task in tasks:
# create_task => ensure_future
delete_result = asyncio.ensure_future(task)
def task_done(future_result):
if future_result.exception():
get_ctx().logger.exception(
f"Exception on bulk versions deletion: {future_result.exception().detail}")
delete_result.add_done_callback(task_done)
...@@ -45,7 +45,8 @@ from app.routers.ddms_v3 import ( ...@@ -45,7 +45,8 @@ from app.routers.ddms_v3 import (
well_ddms_v3, well_ddms_v3,
welllog_ddms_v3, welllog_ddms_v3,
wellbore_trajectory_ddms_v3, wellbore_trajectory_ddms_v3,
markerset_ddms_v3) markerset_ddms_v3,
delete_v3)
from app.routers.bulk import bulk_routes from app.routers.bulk import bulk_routes
from app.routers.trajectory import trajectory_ddms_v2 from app.routers.trajectory import trajectory_ddms_v2
from app.routers.dipset import dipset_ddms_v2, dip_ddms_v2 from app.routers.dipset import dipset_ddms_v2, dip_ddms_v2
...@@ -191,19 +192,30 @@ for v2_api, tag, entity_type in ddms_v2_routes_groups: ...@@ -191,19 +192,30 @@ for v2_api, tag, entity_type in ddms_v2_routes_groups:
tags=[tag], tags=[tag],
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V2"))]) dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V2"))])
ddms_v3_routes_groups = [ ddms_v3_routes_groups_without_bulk = [
(wellbore_ddms_v3, "Wellbore", Entity.WELLBORE), (wellbore_ddms_v3, "Wellbore", Entity.WELLBORE),
(well_ddms_v3, "Well", Entity.WELL), (well_ddms_v3, "Well", Entity.WELL),
(markerset_ddms_v3, "Marker", Entity.MARKER)
]
ddms_v3_routes_groups_with_bulk = [
(welllog_ddms_v3, "WellLog", Entity.WELL_LOG), (welllog_ddms_v3, "WellLog", Entity.WELL_LOG),
(wellbore_trajectory_ddms_v3, "Trajectory v3", Entity.TRAJECTORY), (wellbore_trajectory_ddms_v3, "Trajectory v3", Entity.TRAJECTORY)
(markerset_ddms_v3, "Marker", Entity.MARKER),
] ]
for v3_api, tag, entity_type in ddms_v3_routes_groups:
for v3_api, tag, entity_type in ddms_v3_routes_groups_without_bulk:
wdms_app.include_router(v3_api.router, wdms_app.include_router(v3_api.router,
prefix=DDMS_V3_PATH, prefix=DDMS_V3_PATH,
tags=[tag], tags=[tag],
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V3"))]) dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V3"))])
v3_bulk_dependencies = [*basic_dependencies, Depends(set_v3_input_dataframe_check), Depends(set_osdu_bulk_id_access)]
for v3_api, tag, entity_type in ddms_v3_routes_groups_with_bulk:
wdms_app.include_router(v3_api.router,
prefix=DDMS_V3_PATH,
tags=[tag],
dependencies=[*v3_bulk_dependencies, Depends(make_entity_type_dependency(entity_type, "V3"))])
wdms_app.include_router(search.router, prefix='/ddms', tags=['search'], dependencies=basic_dependencies) wdms_app.include_router(search.router, prefix='/ddms', tags=['search'], dependencies=basic_dependencies)
wdms_app.include_router(fast_search.router, prefix='/ddms', tags=['fast-search'], dependencies=basic_dependencies) wdms_app.include_router(fast_search.router, prefix='/ddms', tags=['fast-search'], dependencies=basic_dependencies)
...@@ -214,8 +226,9 @@ wdms_app.include_router(search_v3_alpha.router, prefix=ALPHA_APIS_PREFIX + DDMS_ ...@@ -214,8 +226,9 @@ wdms_app.include_router(search_v3_alpha.router, prefix=ALPHA_APIS_PREFIX + DDMS_
tags=['ALPHA feature: search v3'], tags=['ALPHA feature: search v3'],
dependencies=basic_dependencies) dependencies=basic_dependencies)
wdms_app.include_router(delete_v3.router, prefix=DDMS_V3_PATH, tags=["Delete records V3"], dependencies=basic_dependencies)
alpha_tags = ['ALPHA feature: bulk data chunking'] alpha_tags = ['ALPHA feature: bulk data chunking']
v3_bulk_dependencies = [*basic_dependencies, Depends(set_v3_input_dataframe_check), Depends(set_osdu_bulk_id_access)]
for bulk_prefix, bulk_tags, is_visible in [(ALPHA_APIS_PREFIX + DDMS_V3_PATH, alpha_tags, False), for bulk_prefix, bulk_tags, is_visible in [(ALPHA_APIS_PREFIX + DDMS_V3_PATH, alpha_tags, False),
(DDMS_V3_PATH, [], True) (DDMS_V3_PATH, [], True)
......
...@@ -33,7 +33,7 @@ https://community.opengroup.org/api/v4/projects/465/packages/pypi/simple/ ...@@ -33,7 +33,7 @@ https://community.opengroup.org/api/v4/projects/465/packages/pypi/simple/
osdu-log-recognition-lib>=0.0.9 osdu-log-recognition-lib>=0.0.9
osdu-data-ecosystem-storage~=1.2.0 osdu-data-ecosystem-storage~=1.3.0
osdu-data-ecosystem-search>=0.3.2, <0.4 osdu-data-ecosystem-search>=0.3.2, <0.4
osdu-core-lib-python-ibm==1.3.0 osdu-core-lib-python-ibm==1.3.0
......
...@@ -273,7 +273,7 @@ osdu-core-lib-python-ibm==1.3.0 ...@@ -273,7 +273,7 @@ osdu-core-lib-python-ibm==1.3.0
# via -r requirements.in # via -r requirements.in
osdu-data-ecosystem-search==0.3.3 osdu-data-ecosystem-search==0.3.3
# via -r requirements.in # via -r requirements.in
osdu-data-ecosystem-storage==1.2.0 osdu-data-ecosystem-storage==1.3.0
# via -r requirements.in # via -r requirements.in
osdu-log-recognition-lib==0.0.9 osdu-log-recognition-lib==0.0.9
# via -r requirements.in # via -r requirements.in
......
This diff is collapsed.
...@@ -24,7 +24,8 @@ ...@@ -24,7 +24,8 @@
}, },
"work-product-component--WellboreMarkerSet": { "work-product-component--WellboreMarkerSet": {
"versions": [ "versions": [
"1.0.0" "1.0.0",
"1.1.0"
] ]
} }
}, },
......
...@@ -11,12 +11,14 @@ ...@@ -11,12 +11,14 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and # See the License for the specific language governing permissions and
# limitations under the License. # limitations under the License.
import json
import pytest import pytest
from wdms_client.request_builders.wdms.delete_records import build_request_delete_osdu_records
from .fixtures import with_wdms_env from .fixtures import with_wdms_env
from wdms_client.request_builders import build_request, get_cleaned_ref_and_res from wdms_client.request_builders import build_request, get_cleaned_ref_and_res
kind_list = ['osdu_wellbore', 'osdu_well', 'osdu_welllog', 'osdu_wellboretrajectory', 'osdu_wellboremarkerset'] kind_list = ['osdu_wellbore', 'osdu_well', 'osdu_welllog', 'osdu_wellboretrajectory', 'osdu_wellboremarkerset']
# parametrize of kind + dependency on the create_record # parametrize of kind + dependency on the create_record
...@@ -63,7 +65,7 @@ def test_crud_record_versions(with_wdms_env, kind): ...@@ -63,7 +65,7 @@ def test_crud_record_versions(with_wdms_env, kind):
# get specific version of the record # get specific version of the record
result = build_request(f'crud.{kind}.get_{kind}_specific_version').call( result = build_request(f'crud.{kind}.get_{kind}_specific_version').call(
with_wdms_env, with_wdms_env,
**{f'{kind}_record_version': resobj.versions[len(resobj.versions)-1]} # set/pass version to fetch **{f'{kind}_record_version': resobj.versions[len(resobj.versions) - 1]} # set/pass version to fetch
) )
result.assert_ok() result.assert_ok()
...@@ -109,3 +111,21 @@ def test_crud_get_as_record(delfi_id, kind, with_wdms_env): ...@@ -109,3 +111,21 @@ def test_crud_get_as_record(delfi_id, kind, with_wdms_env):
# Get it as osdu wellbore with delfi id # Get it as osdu wellbore with delfi id
result = build_request(f'crud.osdu_{kind}.get_osdu_{kind}').call(with_wdms_env) result = build_request(f'crud.osdu_{kind}.get_osdu_{kind}').call(with_wdms_env)
result.assert_status_code(400) result.assert_status_code(400)
@pytest.mark.tag('basic', 'crud', 'smoke')
def test_crud_delete_records(with_wdms_env):
record_ids = []
for kind in kind_list:
result = build_request(f'crud.{kind}.create_{kind}').call(with_wdms_env)
resobj = result.get_response_obj()
with_wdms_env.set(f'{kind}_record_id', resobj.recordIds[0])
record_ids.append(resobj.recordIds[0])
result = build_request(f'crud.{kind}.get_{kind}').call(with_wdms_env)
result.assert_status_code(200)
with_wdms_env.set(f'record_ids', json.dumps(record_ids)) # stored the record id for the following tests
result = build_request_delete_osdu_records().call(with_wdms_env)
result.assert_status_code(204)
for kind in kind_list:
result = build_request(f'crud.{kind}.get_{kind}').call(with_wdms_env)
result.assert_status_code(404)
\ No newline at end of file
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from wdms_client.request_builders.wdms.crud.osdu_wellboretrajectory import build_request_create_osdu_wellboretrajectory
from wdms_client.request_builders.wdms.crud.osdu_welllog import build_request_create_osdu_welllog
from .fixtures import with_wdms_env
from .test_chunking import ParquetSerializer, JsonSerializer, generate_df, build_request
from wdms_client.request_builders.wdms.delete import build_request_delete_purge_record, build_request_get_record
from wdms_client.request_runner import RequestRunner
entity_type_dict = {
"welllog": "welllogs",
"wellboretrajectory": "wellboretrajectories"
}
def build_request_post_data(entity_type: str, record_id: str, payload) -> RequestRunner:
url = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type] + f'/{record_id}/data'
return build_request(f'{entity_type} post data', 'POST', url, payload=payload)
def create_record_with_data(with_wdms_env, entity_type, serializer, nb_version):
if entity_type == 'welllog':
result = build_request_create_osdu_welllog(False).call(with_wdms_env)
elif entity_type == 'wellboretrajectory':
result = build_request_create_osdu_wellboretrajectory(False).call(with_wdms_env)
resobj = result.get_response_obj()
data = generate_df(['MD', 'X'], range(8))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
# DATA
for i in range(nb_version):
build_request_post_data(entity_type, resobj.recordIds[0], data_to_send).call(with_wdms_env,
headers=headers).assert_ok()
assert resobj.recordCount == 1
assert len(resobj.recordIds) == 1
with_wdms_env.set(f'osdu_{entity_type}_record_id',
resobj.recordIds[0]) # stored the record id for the following tests
@pytest.mark.tag('basic', 'smoke')
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
@pytest.mark.parametrize('entity_type', [entity_type for entity_type in entity_type_dict.keys()])
def test_hard_delete_purge_record(with_wdms_env, entity_type, serializer):
create_record_with_data(with_wdms_env, entity_type, serializer, 20)
record_id = with_wdms_env.get(f'osdu_{entity_type}_record_id')
purge = 'true'
v3_entity = entity_type_dict[entity_type]
result = build_request_delete_purge_record(record_id, v3_entity, purge).call(with_wdms_env)
result.assert_status_code(204)
base_url_v3_record = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type]
result = build_request_get_record(base_url_v3_record, record_id).call(with_wdms_env)
result.assert_status_code(404)
@pytest.mark.tag('basic', 'smoke')
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
@pytest.mark.parametrize('entity_type', [entity_type for entity_type in entity_type_dict.keys()])
def test_soft_delete_purge_record(with_wdms_env, entity_type, serializer):
create_record_with_data(with_wdms_env, entity_type, serializer, 20)
record_id = with_wdms_env.get(f'osdu_{entity_type}_record_id')
purge = 'false'
v3_entity = entity_type_dict[entity_type]
result = build_request_delete_purge_record(record_id, v3_entity, purge).call(with_wdms_env)
result.assert_status_code(204)
base_url_v3_record = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type]
result = build_request_get_record(base_url_v3_record, record_id).call(with_wdms_env)
result.assert_status_code(404)
import pytest
from app.model.osdu_model import WellLog110, AbstractLegalTags100, AbstractAccessControlList100, WellLogData110, \
Curve110
from app.consistency import welllog_consistency_check, DuplicatedCurveIdException, ReferenceCurveIdNotFoundException
KIND = "osdu:wks:work-product-component--WellLog:1.0.0"
LEGAL = AbstractLegalTags100(legaltags=["legal_tag"], otherRelevantDataCountries=["FR"], status="compliant")
ACL = AbstractAccessControlList100(owners=["data.default.owners@opendes.slb.com"],
viewers=["data.default.viewers@opendes.slb.com"])
@pytest.mark.parametrize("data", [
WellLogData110(
ReferenceCurveID="MD",
Curves=[
Curve110(CurveID="MD"),
Curve110(CurveID="ZONE_NAME")
]
),
WellLogData110(
Curves=[
Curve110(CurveID="MD"),
Curve110(CurveID="ZONE_NAME")
]
),
WellLogData110(
Curves=[
]
)
])
def test_consistency_check(data):
welllog_consistency_check(
WellLog110(kind=KIND, legal=LEGAL, acl=ACL, data=data)
)
@pytest.mark.parametrize("data", [
WellLogData110(
ReferenceCurveID="MD",
Curves=[
Curve110(CurveID="ZONE_NAME"),
Curve110(CurveID="ZONE_NAME"),
Curve110(CurveID="MD")
]
),
WellLogData110(
Curves=[
Curve110(CurveID="ZONE_NAME"),
Curve110(CurveID="ZONE_NAME")
]
)
])
def test_consistency_inconsistent_curves_welllog(data):
with pytest.raises(DuplicatedCurveIdException) as excinfo:
welllog_consistency_check(
WellLog110(kind=KIND, legal=LEGAL, acl=ACL, data=data
)
)
@pytest.mark.parametrize("data", [
WellLogData110(
ReferenceCurveID="MD",
Curves=[
Curve110(CurveID="ZONE_NAME"),
Curve110(CurveID="A")
]
),
WellLogData110(
ReferenceCurveID="MD",
Curves=[
Curve110(CurveID="A"),
Curve110(CurveID="B")
]
),
WellLogData110(
ReferenceCurveID="MD"
)
])
def test_consistency_inconsistent_reference_id_welllog(data):
with pytest.raises(ReferenceCurveIdNotFoundException) as excinfo:
welllog_consistency_check(
WellLog110(kind=KIND, legal=LEGAL, acl=ACL, data=data
)
)
...@@ -28,7 +28,7 @@ from app.helper import traces ...@@ -28,7 +28,7 @@ from app.helper import traces
from app.middleware import require_data_partition_id from app.middleware import require_data_partition_id
from app.model.entity_utils import Entity from app.model.entity_utils import Entity
from app.model.model_curated import * from app.model.model_curated import *
from app.model.osdu_model import Wellbore, Well, WellLog, WellboreTrajectory, WellboreMarkerSet from app.model.osdu_model import Wellbore, Well, WellLog, WellboreTrajectory, WellboreMarkerSet, WellboreMarkerSet110
from app.routers.ddms_v2.storage_helper import StorageHelper from app.routers.ddms_v2.storage_helper import StorageHelper
from app.routers.search.search_wrapper import SearchWrapper from app.routers.search.search_wrapper import SearchWrapper
from app.utils import Context from app.utils import Context
...@@ -83,7 +83,19 @@ tests_parameters = [ ...@@ -83,7 +83,19 @@ tests_parameters = [
)), )),
('/ddms/v3/wellboremarkersets', WellboreMarkerSet( ('/ddms/v3/wellboremarkersets', WellboreMarkerSet(
id=r"namespace:work-product-component--WellboreMarkerSet:c7c421a7-f496-5aef-8093-298c32bfdea9", id=r"namespace:work-product-component--WellboreMarkerSet:c7c421a7-f496-5aef-8093-298c32bfdea9",
kind="namespace:osdu:work-product-component--WellboreMarkerSet:1.0.0", kind="namespace:osdu:work-product-component--WellboreMarkerSet:1.1.0",
acl={"owners": ["me@osdu.org"], "viewers": ["ze@osdu.org"]},
legal={
"legaltags": ["string"],
"otherRelevantDataCountries": ["FR"],
},
data={
"WellboreID": r"namespace:master-data--Wellbore:c7c421a7-f496-5aef-8093-298c32bfdea9:456",
},
)),
('/ddms/v3/wellboremarkersets', WellboreMarkerSet110(
id=r"namespace:work-product-component--WellboreMarkerSet:c7c421a7-f496-5aef-8093-298c32bfdea9",
kind="namespace:osdu:work-product-component--WellboreMarkerSet:1.1.0",
acl={"owners": ["me@osdu.org"], "viewers": ["ze@osdu.org"]}, acl={"owners": ["me@osdu.org"], "viewers": ["ze@osdu.org"]},
legal={ legal={
"legaltags": ["string"], "legaltags": ["string"],
......
This diff is collapsed.
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import mock
from fastapi.testclient import TestClient
from fastapi import Header, status, HTTPException
from opencensus.trace import base_exporter
from osdu.core.api.storage.blob_storage_base import BlobStorageBase
from app.bulk_persistence.dask.dask_bulk_storage import DaskBulkStorage
from app.clients import StorageRecordServiceClient
from odes_storage.models import RecordVersions
from app.helper import traces
from app.middleware import require_data_partition_id
from app.auth.auth import require_opendes_authorized_user
from app.routers.delete import delete_bulk_data
from app.utils import Context
from app.wdms_app import wdms_app, app_injector
from tests.unit.test_utils import create_mock_class, nope_logger_fixture
StorageRecordServiceClientMock = mock.AsyncMock()
BlobStorageMock = mock.AsyncMock()
@pytest.fixture
def logger_fixture():
from app.helper import logger
logger._LOGGER = mock.MagicMock()
yield logger._LOGGER
@pytest.fixture
def client_delete(logger_fixture):
async def bypass_authorization():
# empty method
pass
async def set_default_partition(data_partition_id: str = Header("opendes")):
Context.set_current_with_value(partition_id=data_partition_id)
async def build_mock_storage():
return StorageRecordServiceClientMock
async def build_mock_blob_storage():
return BlobStorageMock
async def build_mock_dask_bulk_storage():
return mock.AsyncMock()
app_injector.register(StorageRecordServiceClient, build_mock_storage)
app_injector.register(BlobStorageBase, build_mock_blob_storage)
app_injector.register(DaskBulkStorage, build_mock_dask_bulk_storage)
# override authentication dependency
previous_overrides = wdms_app.dependency_overrides
try:
wdms_app.dependency_overrides[
require_opendes_authorized_user
] = bypass_authorization
wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition
client = TestClient(wdms_app)
yield client
finally:
wdms_app.dependency_overrides = previous_overrides # clean up
# Initialize traces exporter in app, like it is in app's startup decorator
wdms_app.trace_exporter = traces.CombinedExporter(service_name="tested-ddms")
record_bulk_uris = ['59c1ab7b-3bc9-4963-976d-815952bc8ddc', None, None, '87be6134-1b8f-43c0-a7f6-384a6a323f60', None,
'356eb799-ba19-49ea-814c-cdd8cf87553b', None, 'a764776c-a389-415b-a92c-af8366ce6901']
list_objects = ['bulk/59c1ab7b-3bc9-4963-976d-815952bc8ddc/data/part.0.parquet',
'bulk/87be6134-1b8f-43c0-a7f6-384a6a323f60/data/part.0.parquet',
'bulk/356eb799-ba19-49ea-814c-cdd8cf87553b/data/part.0.parquet',
'bulk/a764776c-a389-415b-a92c-af8366ce6901/data/part.0.parquet']
versions = [1972724675421999416275969243854301388, 1972724691719041369425630371084748387,
1972724692685381136056789571749686596, 1972724693418066907321024541915238319,
1972724692685381136056789571458786590, 1972724691719041369425637411084748854,
1972724691719041369425637411084487596, 1972724691719041369425637411257894562]
v3_entities = ["welllogs", "wellboretrajectories"]
@pytest.mark.parametrize("v3_entity", v3_entities)
def test_delete_purge_record(client_delete, logger_fixture, v3_entity):
record_id = f'opendes:work-product-component--{v3_entity}:00001234'
record_versions = RecordVersions(record_id=record_id,
versions=versions)
mock_storage_service_delete_record = mock.AsyncMock(return_value=status.HTTP_204_NO_CONTENT,
side_effect=status.HTTP_404_NOT_FOUND)
mock_blob_storage = mock.AsyncMock(return_value=status.HTTP_204_NO_CONTENT,
side_effect=HTTPException(status_code=status.HTTP_404_NOT_FOUND,
detail="Error 404 not found"))
mock_storage_list_objects = mock.AsyncMock(return_value=list_objects)
mock_get_bulk_uri_from_version = mock.AsyncMock(side_effect=record_bulk_uris)
mock_storage_service_get_all_record_versions = mock.AsyncMock(return_value=record_versions)
with mock.patch.object(StorageRecordServiceClientMock, "delete_record", mock_storage_service_delete_record), \
mock.patch.object(StorageRecordServiceClientMock, "get_all_record_versions",
mock_storage_service_get_all_record_versions), \
mock.patch.object(BlobStorageMock, "list_objects", mock_storage_list_objects), \
mock.patch.object(BlobStorageMock, "delete", mock_blob_storage), \
mock.patch.object(delete_bulk_data, "_get_bulk_uri_from_version", mock_get_bulk_uri_from_version):
response = client_delete.delete(
f"/ddms/v3/{v3_entity}/{record_id}?purge=true",
headers={"data-partition-id": "testing_partition"},
)
assert response.status_code == status.HTTP_204_NO_CONTENT
for i in range(4):
logger_exception = logger_fixture.exception.mock_calls[i].args[0]
assert logger_exception == "Exception on bulk versions deletion: Error 404 not found"