Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • osdu/platform/domain-data-mgmt-services/wellbore/wellbore-domain-services
  • Vkamani/vkamani-wellbore-domain-services
  • Yan_Sushchynski/wellbore-domain-services-comm-impl
3 results
Show changes
Showing
with 423 additions and 215 deletions
......@@ -35,7 +35,7 @@ from app.model.model_curated import (
)
from app.model.model_utils import from_record, to_record
from app.routers.dipset.dip_model import Dip
from app.bulk_persistence import get_dataframe, create_and_store_dataframe, BulkId
from app.bulk_persistence import get_dataframe, create_and_store_dataframe, BulkURI
async def create_missing_logs(ctx, my_dipset: dipset):
"""
......@@ -246,9 +246,9 @@ def df_to_dips(dataframe: pd.DataFrame) -> List[Dip]:
#TODO refactor duplicate with trajectory
async def write_bulk(ctx, dataframe: pd.DataFrame) -> str:
async def write_bulk(ctx, dataframe: pd.DataFrame) -> BulkURI:
bulk_id = await create_and_store_dataframe(ctx, dataframe)
return BulkId.bulk_urn_encode(bulk_id)
return BulkURI.from_bulk_storage_V0(bulk_id)
async def write_dipset_data(ctx, dataframe: pd.DataFrame, ds: Union[dipset, str]) -> dipset:
......@@ -260,7 +260,8 @@ async def write_dipset_data(ctx, dataframe: pd.DataFrame, ds: Union[dipset, str]
dataframe.sort_values(by=["reference", "azimuth"], inplace=True, ignore_index=True)
# Write data in storage and update dipset bulk URI
my_dipset.data.bulkURI = await write_bulk(ctx, dataframe)
bulk_uri = await write_bulk(ctx, dataframe)
my_dipset.data.bulkURI = bulk_uri.encode()
# Create or update logs
await create_missing_logs(ctx, my_dipset)
......@@ -294,9 +295,9 @@ async def read_dipset_data(ctx, ds: Union[dipset, str]) -> Tuple[dipset, pd.Data
return my_dipset, pd.DataFrame()
# Fetch data
bulk_uri, _prefix = BulkId.bulk_urn_decode(my_dipset.data.bulkURI)
bulk_uri = BulkURI.decode(my_dipset.data.bulkURI)
# TODO use prefix to know how to read the bulk
df = await get_dataframe(ctx, bulk_uri)
df = await get_dataframe(ctx, bulk_uri.bulk_id)
return my_dipset, df
......
......@@ -3,12 +3,16 @@ from enum import Enum
from typing import Dict, Optional, Union, List
from asyncio import gather
from starlette.requests import Request
from app.bulk_persistence import resolve_tenant
from app.clients import StorageRecordServiceClient
from app.persistence.sessions_storage import (Session,
SessionInternal,
SessionsStorage,
SessionUpdateMode)
from app.routers.ddms_v3.ddms_v3_utils import DMSV3RouterUtils
from app.routers.record_utils import fetch_record
from app.utils import Context
from fastapi import APIRouter, Depends
from fastapi.responses import Response
......@@ -116,6 +120,7 @@ async def get_session_dependencies():
response_model=Session
)
async def create_session(record_id: str,
request: Request,
create_rq: CreateDataSessionRequest = None,
with_storages: WithSessionStorages = Depends(get_session_dependencies)) -> Session:
"""
......@@ -124,9 +129,10 @@ async def create_session(record_id: str,
check that version exists if fromVersion is passed
The user should be able to passe a record meta data (data.curves) to be patch at the end.
"""
# fetch latest version
record = await with_storages.storage_service_client.get_record(record_id, with_storages.ctx.partition_id)
DMSV3RouterUtils.raise_if_not_osdu_right_entity_kind(record, request.state)
if create_rq.fromVersion == 0:
# fetch latest version
record = await with_storages.storage_service_client.get_record(record_id, with_storages.ctx.partition_id)
create_rq.fromVersion = record.version
else:
# check version exists
......@@ -157,7 +163,11 @@ async def create_session(record_id: str,
)
async def get_session(record_id: str,
session_id: str,
request: Request,
with_storages: WithSessionStorages = Depends(get_session_dependencies)) -> Session:
if hasattr(request.state, 'version') and request.state.version != "V2":
record = await with_storages.storage_service_client.get_record(record_id, with_storages.ctx.partition_id)
DMSV3RouterUtils.raise_if_not_osdu_right_entity_kind(record, request.state)
i_session = await with_storages.get_session(record_id, session_id)
return i_session.session
......@@ -181,7 +191,11 @@ async def delete_session(record_id: str,
response_model=List[Session]
)
async def list_session(record_id: str,
request: Request,
with_storages: WithSessionStorages = Depends(get_session_dependencies)) -> List[Session]:
if hasattr(request.state, 'version') and request.state.version != "V2":
record = await with_storages.storage_service_client.get_record(record_id, with_storages.ctx.partition_id)
DMSV3RouterUtils.raise_if_not_osdu_right_entity_kind(record, request.state)
session_ids = await with_storages.sessions_storage.list_sessions(with_storages.tenant, record_id)
get_session_tasks = [
......
......@@ -13,7 +13,7 @@
# limitations under the License.
import pandas as pd
from app.bulk_persistence import BulkId, NoBulkException, UnknownChannelsException, InvalidBulkException
from app.bulk_persistence import BulkURI, NoBulkException, UnknownChannelsException, InvalidBulkException
from app.model.model_curated import trajectory as Trajectory
from app.bulk_persistence import get_dataframe, create_and_store_dataframe
......@@ -46,11 +46,11 @@ class Persistence:
raise NoBulkException
try:
bulkid, _prefix = BulkId.bulk_urn_decode(record.data.bulkURI)
bulk_uri = BulkURI.decode(record.data.bulkURI)
# TODO use prefix to know how to read the bulk
df = await get_dataframe(ctx, bulkid)
df = await get_dataframe(ctx, bulk_uri.bulk_id)
except Exception as ex:
raise InvalidBulkException(ex)
raise InvalidBulkException(ex) from ex
if not channels:
return df
......@@ -60,8 +60,7 @@ class Persistence:
except KeyError as key_error: # unknown channels
raise UnknownChannelsException(key_error)
@classmethod
async def write_bulk(cls, ctx, dataframe: pd.DataFrame) -> str:
async def write_bulk(cls, ctx, dataframe: pd.DataFrame) -> BulkURI:
bulk_id = await create_and_store_dataframe(ctx, dataframe)
return BulkId.bulk_urn_encode(bulk_id)
return BulkURI.from_bulk_storage_V0(bulk_id)
......@@ -236,7 +236,8 @@ async def post_traj_data(
trajectory_record = await fetch_trajectory_record(ctx, trajectoryid)
record = from_record(Trajectory, trajectory_record)
record.data.bulkURI = await persistence.write_bulk(ctx, df)
bulk_uri = await persistence.write_bulk(ctx, df)
record.data.bulkURI = bulk_uri.encode()
# update record's channels
if not record.data.channels:
......@@ -297,7 +298,7 @@ async def _get_trajectory_data(
except UnknownChannelsException as key_error: # unknown channels
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(key_error)) from key_error
except InvalidBulkException as ex:
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(ex))
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str("Bulk is invalid")) from ex
content = await DataframeSerializerAsync().to_json(df, orient=orient)
return Response(content=content, media_type=MimeTypes.JSON.type)
......
......@@ -202,7 +202,8 @@ class Context:
'_api_key',
'_user',
'_app_injector',
'_attr_dict'
'_attr_dict',
'_x_user_id'
]
def __init__(self,
......@@ -217,6 +218,7 @@ class Context:
api_key: Optional[str] = None,
user: Optional[User] = None,
app_injector: Optional[AppInjector] = None,
x_user_id: Optional[str] = None,
**keys):
self._tracer = tracer
......@@ -230,6 +232,7 @@ class Context:
self._api_key = api_key
self._user = user
self._app_injector = app_injector
self._x_user_id = x_user_id
# pass
self._attr_dict = keys or {}
......@@ -250,7 +253,7 @@ class Context:
@classmethod
def set_current_with_value(cls, tracer=None, logger=None, correlation_id=None, request_id=None, auth=None,
partition_id=None, app_key=None, api_key=None, user=None, app_injector=None,
dev_mode=Config.dev_mode.value,
dev_mode=Config.dev_mode.value, x_user_id=None,
**keys) -> 'Context':
"""
clone the current context with the given values, set the new ctx as current and returns it
......@@ -269,6 +272,7 @@ class Context:
user=user,
app_injector=app_injector,
dev_mode=dev_mode,
x_user_id=x_user_id,
**keys)
new_ctx.set_current()
return new_ctx
......@@ -301,6 +305,7 @@ class Context:
api_key=self._api_key,
user=self._user,
app_injector=self._app_injector,
x_user_id=self._x_user_id,
**self._attr_dict)
def with_correlation_id(self, correlation_id):
......@@ -323,6 +328,11 @@ class Context:
clone._partition_id = partition_id
return clone
def with_x_user_id(self, x_user_id):
clone = self.__copy__()
clone._x_user_id = x_user_id
return clone
def with_user(self, user):
clone = self.__copy__()
clone._user = user
......@@ -345,7 +355,7 @@ class Context:
def with_value(self, tracer=None, logger=None, correlation_id=None, request_id=None, auth=None,
partition_id=None, app_key=None, api_key=None, user=None, app_injector=None,
dev_mode=Config.dev_mode.value, **keys) -> 'Context':
dev_mode=Config.dev_mode.value, x_user_id=None, **keys) -> 'Context':
""" Clone context, adding all keys in future logs """
cloned = self.__class__(
tracer=tracer or self._tracer,
......@@ -359,6 +369,7 @@ class Context:
api_key=api_key or self._api_key,
user=user or self._user,
app_injector=app_injector or self._app_injector,
x_user_id=x_user_id or self._x_user_id,
**self._attr_dict)
if keys is not None:
......@@ -409,6 +420,11 @@ class Context:
def app_injector(self) -> Optional[AppInjector]:
return self._app_injector
@property
def x_user_id(self) -> Optional[str]:
return self._x_user_id
def __dict__(self):
return {
"tracer": self.tracer,
......@@ -418,7 +434,8 @@ class Context:
"dev_mode": self.dev_mode,
"partition_id": self.partition_id,
"app_key": self.app_key,
"api_key": self.api_key
"api_key": self.api_key,
"x_user_id": self.x_user_id,
}
def __repr__(self):
......
......@@ -16,13 +16,14 @@ from os import getpid
import asyncio
from time import sleep
from fastapi import FastAPI, Depends
from fastapi import FastAPI, Depends, Request
from fastapi.openapi.utils import get_openapi
from app import __version__, __build_number__, __app_name__
from app.auth.auth import require_opendes_authorized_user
from app.conf import Config, check_environment
from app.errors.exception_handlers import add_exception_handlers, create_custom_http_exception_handler
from app.model.entity_utils import Entity
from app.modules import discoverer
from app.helper import traces, logger
......@@ -113,6 +114,13 @@ def executor_startup_task():
sleep(0.2) # to keep executor "busy"
def make_entity_type_dependency(entity_type: Entity, version: str):
def _set_entity_type(request: Request):
request.state.entity_type = entity_type
request.state.version = version
return _set_entity_type
@base_app.on_event("startup")
async def startup_event():
service_name = Config.service_name.value
......@@ -134,9 +142,6 @@ async def startup_event():
for _ in range(POOL_EXECUTOR_MAX_WORKER):
asyncio.create_task(run_in_pool_executor(executor_startup_task))
if Config.alpha_feature_enabled.value:
enable_alpha_feature()
add_modules_routers()
......@@ -171,34 +176,44 @@ wdms_app.include_router(about.router, prefix=DDMS_V2_PATH, tags=["Wellbore DDMS"
wdms_app.include_router(ddms_v2.router, prefix=DDMS_V2_PATH, tags=["Wellbore DDMS"], include_in_schema=False)
ddms_v2_routes_groups = [
(well_ddms_v2, "Well"),
(wellbore_ddms_v2, "Wellbore"),
(logset_ddms_v2, "Logset"),
(trajectory_ddms_v2, "Trajectory"),
(marker_ddms_v2, "Marker"),
(log_ddms_v2, "Log"),
(dipset_ddms_v2, "Dipset"),
(dip_ddms_v2, "Dips"),
(well_ddms_v2, "Well", Entity.WELL),
(wellbore_ddms_v2, "Wellbore", Entity.WELLBORE),
(logset_ddms_v2, "Logset", Entity.LOGSET),
(trajectory_ddms_v2, "Trajectory", Entity.TRAJECTORY),
(marker_ddms_v2, "Marker", Entity.MARKER),
(log_ddms_v2, "Log", Entity.LOG),
(dipset_ddms_v2, "Dipset", Entity.DIPSET),
(dip_ddms_v2, "Dips", Entity.DIP),
]
for v2_api, tag in ddms_v2_routes_groups:
for v2_api, tag, entity_type in ddms_v2_routes_groups:
wdms_app.include_router(v2_api.router,
prefix=DDMS_V2_PATH,
tags=[tag],
dependencies=basic_dependencies)
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V2"))])
ddms_v3_routes_groups = [
(wellbore_ddms_v3, "Wellbore"),
(well_ddms_v3, "Well"),
(welllog_ddms_v3, "WellLog"),
(wellbore_trajectory_ddms_v3, "Trajectory v3"),
(markerset_ddms_v3, "Marker"),
ddms_v3_routes_groups_without_bulk = [
(wellbore_ddms_v3, "Wellbore", Entity.WELLBORE),
(well_ddms_v3, "Well", Entity.WELL),
(markerset_ddms_v3, "Marker", Entity.MARKER)
]
ddms_v3_routes_groups_with_bulk = [
(welllog_ddms_v3, "WellLog", Entity.WELL_LOG),
(wellbore_trajectory_ddms_v3, "Trajectory v3", Entity.TRAJECTORY)
]
for v3_api, tag in ddms_v3_routes_groups:
for v3_api, tag, entity_type in ddms_v3_routes_groups_without_bulk:
wdms_app.include_router(v3_api.router,
prefix=DDMS_V3_PATH,
tags=[tag],
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(entity_type, "V3"))])
v3_bulk_dependencies = [*basic_dependencies, Depends(set_v3_input_dataframe_check), Depends(set_osdu_bulk_id_access)]
for v3_api, tag, entity_type in ddms_v3_routes_groups_with_bulk:
wdms_app.include_router(v3_api.router,
prefix=DDMS_V3_PATH,
tags=[tag],
dependencies=basic_dependencies)
dependencies=[*v3_bulk_dependencies, Depends(make_entity_type_dependency(entity_type, "V3"))])
wdms_app.include_router(search.router, prefix='/ddms', tags=['search'], dependencies=basic_dependencies)
wdms_app.include_router(fast_search.router, prefix='/ddms', tags=['fast-search'], dependencies=basic_dependencies)
......@@ -206,12 +221,11 @@ wdms_app.include_router(fast_search.router, prefix='/ddms', tags=['fast-search']
wdms_app.include_router(search_v3.router, prefix=DDMS_V3_PATH, tags=['search v3'], dependencies=basic_dependencies)
wdms_app.include_router(fast_search_v3.router, prefix=DDMS_V3_PATH, tags=['fast-search v3'],
dependencies=basic_dependencies)
wdms_app.include_router(search_v3_alpha.router, prefix=ALPHA_APIS_PREFIX + DDMS_V3_PATH, tags=['ALPHA feature: search v3'],
wdms_app.include_router(search_v3_alpha.router, prefix=ALPHA_APIS_PREFIX + DDMS_V3_PATH,
tags=['ALPHA feature: search v3'],
dependencies=basic_dependencies)
alpha_tags = ['ALPHA feature: bulk data chunking']
v3_bulk_dependencies = [*basic_dependencies, Depends(set_v3_input_dataframe_check), Depends(set_osdu_bulk_id_access)]
for bulk_prefix, bulk_tags, is_visible in [(ALPHA_APIS_PREFIX + DDMS_V3_PATH, alpha_tags, False),
(DDMS_V3_PATH, [], True)
......@@ -221,14 +235,14 @@ for bulk_prefix, bulk_tags, is_visible in [(ALPHA_APIS_PREFIX + DDMS_V3_PATH, al
sessions.router,
prefix=bulk_prefix + welllog_ddms_v3.WELL_LOGS_API_BASE_PATH,
tags=bulk_tags if bulk_tags else ["WellLog"],
dependencies=basic_dependencies,
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(Entity.WELL_LOG, "V3"))],
include_in_schema=is_visible)
wdms_app.include_router(
bulk_routes.router,
prefix=bulk_prefix + welllog_ddms_v3.WELL_LOGS_API_BASE_PATH,
tags=bulk_tags if bulk_tags else ["WellLog"],
dependencies=v3_bulk_dependencies,
dependencies=[*v3_bulk_dependencies, Depends(make_entity_type_dependency(Entity.WELL_LOG, "V3"))],
include_in_schema=is_visible)
# wellbore trajectory bulk v3 APIs
......@@ -236,14 +250,14 @@ for bulk_prefix, bulk_tags, is_visible in [(ALPHA_APIS_PREFIX + DDMS_V3_PATH, al
sessions.router,
prefix=bulk_prefix + wellbore_trajectory_ddms_v3.WELLBORE_TRAJECTORIES_API_BASE_PATH,
tags=bulk_tags if bulk_tags else ["Trajectory v3"],
dependencies=basic_dependencies,
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(Entity.TRAJECTORY, "V3"))],
include_in_schema=is_visible)
wdms_app.include_router(
bulk_routes.router,
prefix=bulk_prefix + wellbore_trajectory_ddms_v3.WELLBORE_TRAJECTORIES_API_BASE_PATH,
tags=bulk_tags if bulk_tags else ["Trajectory v3"],
dependencies=v3_bulk_dependencies,
dependencies=[*v3_bulk_dependencies, Depends(make_entity_type_dependency(Entity.TRAJECTORY, "V3"))],
include_in_schema=is_visible)
# log bulk v2 APIs
......@@ -251,25 +265,17 @@ wdms_app.include_router(
sessions.router,
prefix=ALPHA_APIS_PREFIX + DDMS_V2_PATH + log_ddms_v2.LOGS_API_BASE_PATH,
tags=alpha_tags,
dependencies=basic_dependencies)
dependencies=[*basic_dependencies, Depends(make_entity_type_dependency(Entity.LOG, "V2"))])
wdms_app.include_router(
bulk_routes.router,
prefix=ALPHA_APIS_PREFIX + DDMS_V2_PATH + log_ddms_v2.LOGS_API_BASE_PATH,
tags=alpha_tags,
dependencies=[*basic_dependencies, Depends(set_legacy_input_dataframe_check), Depends(set_log_bulk_id_access)])
dependencies=[*basic_dependencies, Depends(set_legacy_input_dataframe_check), Depends(set_log_bulk_id_access), Depends(make_entity_type_dependency(Entity.LOG, "V2"))])
# The multiple instantiation of bulk_utils router create some duplicates operation_id
update_operation_ids(wdms_app)
# ------------- add alpha feature: ONLY MOUNTED IN DEV AND DA ENVs
def enable_alpha_feature():
""" must be called to enable and activate alpha feature"""
# logger.get_logger().warning("Enabling alpha feature")
# include alpha routers down below #
pass
# order is last executed first
wdms_app.add_middleware(TracingMiddleware)
wdms_app.add_middleware(CreateBasicContextMiddleware, injector=app_injector)
......
......@@ -12,7 +12,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
FROM python:3.7-slim-buster
# Using Debian 11 - bullseye, slim version as base OS
FROM python:3.7-slim-bullseye
COPY requirements.txt ./
......
......@@ -23,6 +23,7 @@ parameters:
centralAzureSubscription: '' #Subscription to a central ACR from which to pull the container
sourceACRName: ''
imageTag: ''
appName: 'os-wellbore-ddms'
steps:
- task: AzureCLI@2
......@@ -40,5 +41,5 @@ steps:
echo "az acr login -n ${{ parameters.sourceACRName }}"
az acr login -n ${{ parameters.sourceACRName }}
echo "docker pull ${{ parameters.sourceACRName}}.azurecr.io/os-wellbore-ddms:${{ parameters.imageTag }}"
docker pull ${{ parameters.sourceACRName}}.azurecr.io/os-wellbore-ddms:${{ parameters.imageTag }}
echo "docker pull ${{ parameters.sourceACRName}}.azurecr.io/${{ parameters.appName }}:${{ parameters.imageTag }}"
docker pull ${{ parameters.sourceACRName}}.azurecr.io/${{ parameters.appName }}:${{ parameters.imageTag }}
......@@ -28,11 +28,14 @@ parameters:
type: string
- name: acr_names
type: object #List of string
- name: app_name
type: string
default: 'os-wellbore-ddms'
steps:
- ${{ each acr_name in parameters.acr_names }}:
- task: AzureCLI@2
displayName: 'push image to acr "${{ acr_name }}""'
displayName: 'push to "${{ acr_name }}"'
inputs:
azureSubscription: ${{ parameters.azureSubscription }}
scriptType: bash
......@@ -40,5 +43,5 @@ steps:
inlineScript: |
set -e
az acr login --name ${{ acr_name }}
docker tag wdms-osdu:$(tag_name) ${{ acr_name }}.azurecr.io/os-wellbore-ddms:$(tag_name)
docker push ${{ acr_name }}.azurecr.io/os-wellbore-ddms:$(tag_name)
\ No newline at end of file
docker tag $(app):$(tag_name) ${{ acr_name }}.azurecr.io/${{ parameters.app_name }}:$(tag_name)
docker push ${{ acr_name }}.azurecr.io/${{ parameters.app_name }}:$(tag_name)
......@@ -62,7 +62,7 @@ steps:
done
- task: Bash@3
displayName: 'build image'
displayName: 'build $(app) image'
inputs:
targetType: 'inline'
script: |
......@@ -70,8 +70,8 @@ steps:
echo $current_utc_date
echo ----- BUILD IMAGE ------------
docker build -t=wdms-osdu:$(tag_name) \
-t=wdms-osdu:latest \
docker build -t=$(app):$(tag_name) \
-t=$(app):latest \
--rm . -f ./build/Dockerfile \
--build-arg PIP_WHEEL_DIR=python-packages \
--build-arg build_date="$current_utc_date" \
......
......@@ -87,7 +87,6 @@ steps:
set -e
insecure_flag=""
test_alpha_feature_flag=""
checkCert="True"
# Depending on the value of 'skipCertValidation' we add the '--insecure' option to newman
......@@ -99,21 +98,12 @@ steps:
insecure_flag="--insecure"
checkCert=""
# in that case it means dev env, in that case enable test on alpha feature
echo "skipCertValidation case: adding --test-alpha-feature flag to e2e tests to enable test on alpha features."
test_alpha_feature_flag="--test-alpha-feature"
fi
# Depending on the value of environmentName, activate test on alpha feature
envName="${{ parameters.environmentName }}"
if [ "$envName" = "evd" ] || [ "$envName" = "dev" ] || [ "$envName" = "qa" ];
then
echo "Env = $envName: adding --test-alpha-feature flag to e2e tests to enable test on alpha features."
test_alpha_feature_flag="--test-alpha-feature"
fi
echo "##vso[task.setvariable variable=insecure_flag]${insecure_flag}"
echo "##vso[task.setvariable variable=test_alpha_feature_flag]${test_alpha_feature_flag}"
echo "##vso[task.setvariable variable=checkCert]${checkCert}"
displayName: 'Create test variables'
......@@ -152,7 +142,6 @@ steps:
-o junit_suite_name="wdms_integration_$(testReportBaseName)" --environment=generated/postman_environment.json \
--retry-on-error=502 \
$(insecure_flag) \
$(test_alpha_feature_flag) \
--param=ENVIRONMENT_NAME:$ENVIRONMENT_NAME
deactivate
displayName: 'RUN INTEGRATION-E2E'
......
......@@ -20,7 +20,7 @@ opencensus-ext-ocagent
opencensus-ext-logging
# for chunking feature
dask[distributed]==2021.7.2
dask[distributed]==2021.10.0
fsspec
python-ulid
s3fs
......
......@@ -120,7 +120,7 @@ cryptography==3.4.7
# osdu-core-lib-python
# osdu-core-lib-python-gcp
# pyjwt
dask[distributed]==2021.7.2
dask[distributed]==2021.10.0
# via
# -r requirements.in
# distributed
......@@ -128,7 +128,7 @@ decorator==5.0.9
# via
# gcsfs
# jsonpath-ng
distributed==2021.07.2
distributed==2021.10.0
# via dask
fastapi==0.68.1
# via
......@@ -198,6 +198,8 @@ importlib-metadata==4.8.1
# jsonschema
isodate==0.6.0
# via msrest
jinja2==3.0.3
# via distributed
jmespath==0.10.0
# via
# boto3
......@@ -210,6 +212,8 @@ jsonschema==3.2.0
# via osdu-api
locket==0.2.1
# via partd
markupsafe==2.0.1
# via jinja2
msal==1.12.0
# via
# azure-identity
......
......@@ -14,6 +14,18 @@ The `spec` directory contains the OpenAPI specification files for Wellbore DMS.
Under `spec/generated`, the OpenAPI in JSON format is saved as-is.
## Generating a partial OpenAPI for documentation purposes
The OpenAPI specification in `spec/generated` folder is re-created at every API change as part of the spec unit tests.
These unit tests can also be used to generate a partial OpenAPI spec by setting the OPENAPI_FILTER_PREFIX and the OPENAPI_FILTER_TAGS environment variables.
Example:
```
export OPENAPI_FILTER_PREFIX='/ddms/v3'
export OPENAPI_FILTER_TAGS='Wellbore,WellLog'
python -m pytest ./tests/unit/spec/
```
## Publishing to community documentation repo
A sanitized version of the OpenAPI specification including only the OSDU v3 APIs is published
in the [platform/api/Wellbore-DDMS](https://community.opengroup.org/osdu/documentation/-/tree/master/platform/api/Wellbore-DDMS)
......
This diff is collapsed.
......@@ -33,7 +33,6 @@ from .tests.fixtures import WDMS_Variables
FILTER_IN_TAGS = set()
FILTER_OUT_TAGS = set()
TEST_ALPHA_FEATURE = False
def pytest_addoption(parser):
......@@ -53,9 +52,6 @@ def pytest_addoption(parser):
parser.addoption('--insecure', action='store_true',
help='Disables SSL validations')
parser.addoption('--test-alpha-feature', action='store_true',
help='Enable tests tagged with "alpha-feature"')
parser.addoption(
'--retry-on-error', default='',
help='retry up to 4 times on the specific error code (>=500). Separate multiple code by "|"')
......@@ -127,7 +123,6 @@ def pytest_configure(config):
set_environment_from_config(config, WDMS_Variables)
config.addinivalue_line("markers", "tag: add tags to a test to extend filtering capability")
config.addinivalue_line("markers", "alpha_feature: mark a test against an alpha feature")
if CmdLineSpecialVar.get_disable_ssl_validation(WDMS_Variables):
# filter warning when disabling ssl validation in order to not be spammed by warning and still spot real onces
......@@ -145,14 +140,8 @@ def pytest_configure(config):
else:
FILTER_IN_TAGS.add(tag.lower())
global TEST_ALPHA_FEATURE
TEST_ALPHA_FEATURE = config.getoption('test_alpha_feature', default=False)
def pytest_runtest_setup(item):
if list(item.iter_markers(name="alpha_feature")) and not TEST_ALPHA_FEATURE:
pytest.skip('test on alpha feature disabled')
if FILTER_IN_TAGS or FILTER_OUT_TAGS:
item_tags = set()
for mark in item.iter_markers(name="tag"):
......
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from wdms_client.request_builders.wdms.crud.osdu_wellboretrajectory import build_request_create_osdu_wellboretrajectory
from wdms_client.request_builders.wdms.crud.osdu_welllog import build_request_create_osdu_welllog
from .fixtures import with_wdms_env
from .test_chunking import ParquetSerializer, JsonSerializer, generate_df, build_request
from wdms_client.request_builders.wdms.delete import build_request_delete_purge_record, build_request_get_record
from wdms_client.request_runner import RequestRunner
entity_type_dict = {
"welllog": "welllogs",
"wellboretrajectory": "wellboretrajectories"
}
def build_request_post_data(entity_type: str, record_id: str, payload) -> RequestRunner:
url = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type] + f'/{record_id}/data'
return build_request(f'{entity_type} post data', 'POST', url, payload=payload)
def create_record_with_data(with_wdms_env, entity_type, serializer, nb_version):
if entity_type == 'welllog':
result = build_request_create_osdu_welllog(False).call(with_wdms_env)
elif entity_type == 'wellboretrajectory':
result = build_request_create_osdu_wellboretrajectory(False).call(with_wdms_env)
resobj = result.get_response_obj()
data = generate_df(['MD', 'X'], range(8))
data_to_send = serializer.dump(data)
headers = {'Content-Type': serializer.mime_type, 'Accept': serializer.mime_type}
# DATA
for i in range(nb_version):
build_request_post_data(entity_type, resobj.recordIds[0], data_to_send).call(with_wdms_env,
headers=headers).assert_ok()
assert resobj.recordCount == 1
assert len(resobj.recordIds) == 1
with_wdms_env.set(f'osdu_{entity_type}_record_id',
resobj.recordIds[0]) # stored the record id for the following tests
@pytest.mark.tag('basic', 'smoke')
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
@pytest.mark.parametrize('entity_type', [entity_type for entity_type in entity_type_dict.keys()])
def test_hard_delete_purge_record(with_wdms_env, entity_type, serializer):
create_record_with_data(with_wdms_env, entity_type, serializer, 20)
record_id = with_wdms_env.get(f'osdu_{entity_type}_record_id')
purge = 'true'
v3_entity = entity_type_dict[entity_type]
result = build_request_delete_purge_record(record_id, v3_entity, purge).call(with_wdms_env)
result.assert_status_code(204)
base_url_v3_record = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type]
result = build_request_get_record(base_url_v3_record, record_id).call(with_wdms_env)
result.assert_status_code(404)
@pytest.mark.tag('basic', 'smoke')
@pytest.mark.parametrize('serializer', [ParquetSerializer(), JsonSerializer()])
@pytest.mark.parametrize('entity_type', [entity_type for entity_type in entity_type_dict.keys()])
def test_soft_delete_purge_record(with_wdms_env, entity_type, serializer):
create_record_with_data(with_wdms_env, entity_type, serializer, 20)
record_id = with_wdms_env.get(f'osdu_{entity_type}_record_id')
purge = 'false'
v3_entity = entity_type_dict[entity_type]
result = build_request_delete_purge_record(record_id, v3_entity, purge).call(with_wdms_env)
result.assert_status_code(204)
base_url_v3_record = '{{base_url}}/ddms/v3/' + entity_type_dict[entity_type]
result = build_request_get_record(base_url_v3_record, record_id).call(with_wdms_env)
result.assert_status_code(404)
......@@ -13,24 +13,9 @@
# limitations under the License.
import pytest
from app.bulk_persistence import BulkId
from app.bulk_persistence.bulk_id import new_bulk_id
import uuid
def test_bulk_id_is_an_uuid():
uuid.UUID(BulkId.new_bulk_id())
# urn decode test
def test_decode_urn_no_prefix():
uuid, prefix = BulkId.bulk_urn_decode("urn:uuid:489768d2-eee1-4a8f-ae95-7b0c30b0dcd8")
assert uuid == "489768d2-eee1-4a8f-ae95-7b0c30b0dcd8"
assert prefix is None
def test_decode_urn_with_prefix():
uuid, prefix = BulkId.bulk_urn_decode("urn:myprefix:uuid:489768d2-eee1-4a8f-ae95-7b0c30b0dcd8")
assert uuid == "489768d2-eee1-4a8f-ae95-7b0c30b0dcd8"
assert prefix == 'myprefix'
def test_decode_urn_none():
with pytest.raises(ValueError):
uuid, prefix = BulkId.bulk_urn_decode(None)
uuid.UUID(new_bulk_id())
# Copyright 2021 Schlumberger
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from app.bulk_persistence.bulk_uri import BulkURI
from app.bulk_persistence.bulk_storage_version import (
BulkStorageVersion_V0, BulkStorageVersion_V1, BulkStorageVersion_Invalid)
# urn decode test
def test_from_uri_without_prefix():
uri_str = 'urn:uuid:489768d2-eee1-4a8f-ae95-7b0c30b0dcd8'
bulk_uri = BulkURI.decode(uri_str)
assert bulk_uri.bulk_id == '489768d2-eee1-4a8f-ae95-7b0c30b0dcd8'
assert bulk_uri.is_bulk_storage_V0()
assert bulk_uri.storage_version == BulkStorageVersion_V0
assert bulk_uri.storage_version.uri_prefix is None
assert bulk_uri.is_valid()
# should encode back to the same uri
assert bulk_uri.encode() == uri_str
def test_decode_urn_with_prefix():
uri_str = f'urn:{BulkStorageVersion_V1.uri_prefix}:uuid:489768d2-eee1-4a8f-ae95-7b0c30b0dcd8'
bulk_uri = BulkURI.decode(uri_str)
assert bulk_uri.bulk_id == '489768d2-eee1-4a8f-ae95-7b0c30b0dcd8'
assert not bulk_uri.is_bulk_storage_V0()
assert bulk_uri.storage_version == BulkStorageVersion_V1
assert bulk_uri.storage_version.uri_prefix == BulkStorageVersion_V1.uri_prefix
assert bulk_uri.is_valid()
# should encode back to the same uri
assert bulk_uri.encode() == uri_str
@pytest.mark.parametrize("bulk_id, version", [
('489768d2-eee1-4a8f-ae95-7b0c30b0dcd8', None),
('', BulkStorageVersion_Invalid),
('489768d2-eee1-4a8f-ae95-7b0c30b0dcd8', BulkStorageVersion_Invalid),
('', None),
('', BulkStorageVersion_V1),
])
def test_invalid_uri(bulk_id, version):
invalid_uri = BulkURI(bulk_id, version)
assert not invalid_uri.is_valid()
assert not invalid_uri.bulk_id
assert invalid_uri.storage_version == BulkStorageVersion_Invalid
# explicit encode raises
with pytest.raises(ValueError):
invalid_uri.encode()
def test_decode_urn_invalid_input_should_throw():
# bad formed urn format
with pytest.raises(ValueError):
BulkURI.decode('invalid_urn_uri')
# bulk_id not a valid UUID
with pytest.raises(ValueError):
BulkURI.decode('urn:uuid:invalid_uuid')
# unknown prefix
with pytest.raises(ValueError):
BulkURI.decode('urn:UNKNOWN_PREFIX:uuid:489768d2-eee1-4a8f-ae95-7b0c30b0dcd8')
......@@ -123,7 +123,7 @@ async def test_get_df_from_request_json(basic_dataframe):
request = RequestMock({"Content-Type": "application/json"},
basic_dataframe.to_json(orient='split'))
actual_df = await get_df_from_request(request, orient='split')
actual_df = await get_df_from_request(request)
assert_frame_equal(basic_dataframe, actual_df)
......@@ -136,6 +136,6 @@ async def test_get_df_from_request_json(basic_dataframe):
async def test_get_df_from_request_invalid_raise(content_type, status):
request = RequestMock({"Content-Type": content_type}, b'some invalid data')
with pytest.raises(HTTPException) as ex_info:
await get_df_from_request(request, orient='split')
await get_df_from_request(request)
exception = ex_info.value
assert exception.status_code == status