diff --git a/.dockerignore b/.dockerignore new file mode 100644 index 0000000000000000000000000000000000000000..e33a093ab76be7aab8e9749455c6dbbbdd4db8cc --- /dev/null +++ b/.dockerignore @@ -0,0 +1,2 @@ +media +docs diff --git a/.fossa.yml b/.fossa.yml new file mode 100644 index 0000000000000000000000000000000000000000..f751c7f092b9e2fed08dd4660d5e33d437d91a51 --- /dev/null +++ b/.fossa.yml @@ -0,0 +1,28 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by FOSSA CLI (https://github.com/fossas/fossa-cli) +# Visit https://fossa.com to learn more + +version: 2 +cli: + server: https://app.fossa.com + fetcher: custom + project: Wellbore Domain Services +analyze: + modules: + - name: . + type: pip + target: . + path: . diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000000000000000000000000000000000000..88a236c9cd0dd744104e55d87ebe72abfcbac4b2 --- /dev/null +++ b/.gitignore @@ -0,0 +1,43 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# IDE +.idea/ + +# Distribution / packaging +.Python +develop-eggs/ +dist/ +eggs/ +.eggs/ +sdist/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +python-packages + + +# Unit test / coverage reports +.pytest_cache/ +htmlcov +.coverage* +coverage.xml +unit_tests_report.xml + +# Environments +.env +.venv* +env/ +venv/ +venv*/ +ENV/ +env.bak/ +venv.bak/ +.envs/ +secrets/ + +**/.DS_Store + diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml new file mode 100644 index 0000000000000000000000000000000000000000..99ba9363c44b227c33fca8243341388701c6679c --- /dev/null +++ b/.gitlab-ci.yml @@ -0,0 +1,89 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +variables: + PIP_REQUIREMENTS: "requirements.txt requirements_dev.txt" + + OSDU_GCP_APPLICATION_NAME: wellbore-ddms + OSDU_GCP_SERVICE: wellbore-ddms + OSDU_GCP_ENV_VARS: CLOUD_PROVIDER=gcp,OS_WELLBORE_DDMS_DATA_PROJECT_ID=$OSDU_GCP_PROJECT,SERVICE_HOST_ENTITLEMENTS=$OSDU_GCP_ENTITLEMENTS_URL,SERVICE_HOST_SEARCH=$OSDU_GCP_SEARCH_HOST,SERVICE_HOST_STORAGE=$OSDU_GCP_STORAGE_URL + OSDU_GCP_CLOUD_RUN_PARAMETERS: "--port 8097" + OSDU_GCP_VENDOR: gcp + +include: + - project: "osdu/platform/ci-cd-pipelines" + file: "standard-setup.yml" + + - project: "osdu/platform/ci-cd-pipelines" + file: "build/python.yml" + + - project: "osdu/platform/ci-cd-pipelines" + file: "scanners/fossa-python.yml" + + - project: "osdu/platform/ci-cd-pipelines" + file: "scanners/gitlab-ultimate.yml" + + - project: 'osdu/platform/ci-cd-pipelines' + file: 'cloud-providers/osdu-gcp-cloudrun.yml' +# -------------------------------------------------------------------------------- + +containerize: + extends: + - .skipForTriggeringMergeRequests + + stage: containerize + image: docker:19.03 + tags: ['osdu-medium'] + variables: + IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA + + script: + - echo ---- ---- ---- SYSTEM DEPENDENCIES ---- ---- ---- + - apk update + - apk add git + - echo ---- ---- ---- BUILD IMAGE ---- ---- ---- + - commit_id=$(git log -n 1 --pretty="%h") + - echo ---- ---- TAG NAME + - tag_name="_gitlab_$commit_id" + - echo $tag_name + - echo ---- ---- DATE + - current_utc_date=`date --utc` + - echo $current_utc_date + - echo ---- ---- COMMIT BRANCH + - commit_branch=$commit_id + - echo $commit_branch + - echo ---- ---- BUILD IMAGE + - docker build -t $IMAGE_TAG -t=$CI_REGISTRY_IMAGE:latest --rm . -f ./build/Dockerfile --build-arg PIP_WHEEL_DIR=python-packages --build-arg build_date="$current_utc_date" --build-arg build_number=$commit_id --build-arg commit_id=$commit_id --build-arg build_origin="Gitlab" --build-arg commit_branch=$commit_branch + - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY + - echo ---- ---- PUSH IMAGE + - docker push $IMAGE_TAG + - docker push $CI_REGISTRY_IMAGE:latest + +osdu-gcp-containerize: + script: + - gcloud auth activate-service-account --key-file $OSDU_GCP_DEPLOY_FILE + - gcloud config set project $OSDU_GCP_PROJECT + - touch .gcloudignore + - docker build -t gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_APPLICATION_NAME-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA --rm . -f ./build/Dockerfile + - gcloud docker -- push gcr.io/$OSDU_GCP_PROJECT/$OSDU_GCP_APPLICATION_NAME/$OSDU_GCP_APPLICATION_NAME-$OSDU_GCP_VENDOR:$CI_COMMIT_SHORT_SHA + cache: { } + tags: [ 'osdu-medium' ] + +osdu-gcp-test-python: + allow_failure: true + script: + - echo "STUB"; exit 1 + +osdu-gcp-test: + extends: [] diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000000000000000000000000000000000000..615aafb035a1f363f614a893635f9d40d63ba911 --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,3 @@ +{ + "python.pythonPath": "/usr/bin/python3" +} \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..ffb98ce823eca42f9966512b0601c0bf2dda9d37 --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Open Subsurface Data Universe Software / Platform / Domain Data Mgmt Services / Wellbore + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSE.TXT b/LICENSE.TXT new file mode 100644 index 0000000000000000000000000000000000000000..4947287f7b5ccb5d1e8b7b2d3aa5d89f322c160d --- /dev/null +++ b/LICENSE.TXT @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS \ No newline at end of file diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000000000000000000000000000000000..04ca2ccdfb3bab710a3d4567d6ed293b79cbeea3 --- /dev/null +++ b/NOTICE @@ -0,0 +1,267 @@ +# 3rd-Party Software License Notice +Generated by fossa-cli (https://github.com/fossas/fossa-cli). +This software includes the following software and licenses: + +======================================================================== +Apache-2.0 +======================================================================== +The following software have components provided under the terms of this license: + +- aiohttp (from https://github.com/aio-libs/aiohttp/) +- async-timeout (from https://github.com/aio-libs/async_timeout/) +- boto3 (from https://github.com/boto/boto3) +- botocore (from https://github.com/boto/botocore) +- coverage (from https://coverage.readthedocs.io) +- cryptography (from https://github.com/pyca/cryptography) +- google-api-core (from https://github.com/GoogleCloudPlatform/google-cloud-python) +- google-auth (from https://github.com/GoogleCloudPlatform/google-auth-library-python) +- google-cloud-core (from https://github.com/GoogleCloudPlatform/google-cloud-python) +- google-cloud-monitoring (from https://github.com/GoogleCloudPlatform/google-cloud-python) +- google-cloud-trace (from https://github.com/googleapis/googleapis) +- googleapis-common-protos (from https://github.com/googleapis/googleapis) +- grpcio (from http://www.grpc.io) +- importlib-metadata (from http://importlib-metadata.readthedocs.io/) +- jsonpath-ng (from https://github.com/h2non/jsonpath-ng) +- multidict (from https://github.com/aio-libs/multidict/) +- opencensus (from https://github.com/census-instrumentation/opencensus-python) +- opencensus-context (from https://github.com/census-instrumentation/opencensus-python/tree/master/context/opencensus-context) +- opencensus-ext-azure (from ) +- opencensus-ext-logging (from ) +- opencensus-ext-ocagent (from https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-ocagent) +- opencensus-ext-stackdriver (from https://github.com/census-instrumentation/opencensus-python/tree/master/contrib/opencensus-ext-stackdriver) +- opencensus-proto (from https://github.com/census-instrumentation/opencensus-proto/tree/master/gen-python) +- packaging (from https://github.com/pypa/packaging) +- pyarrow (from https://arrow.apache.org/) +- pytest-asyncio (from https://github.com/pytest-dev/pytest-asyncio) +- pytest-dependency (from ) +- python-dateutil (from https://dateutil.readthedocs.org) +- python-multipart (from http://github.com/andrew-d/python-multipart) +- requests (from http://python-requests.org) +- rfc3986 (from https://rfc3986.readthedocs.org) +- rsa (from https://stuvel.eu/rsa) +- s3transfer (from https://github.com/boto/s3transfer) +- sniffio (from https://github.com/python-trio/sniffio) +- structlog (from http://www.structlog.org/) +- yarl (from https://github.com/aio-libs/yarl/) + +======================================================================== +BSD-2-Clause +======================================================================== +The following software have components provided under the terms of this license: + +- mock (from https://github.com/testing-cabal/mock) +- packaging (from https://github.com/pypa/packaging) +- ply (from http://www.dabeaz.com/ply/) +- pyasn1 (from http://sourceforge.net/projects/pyasn1/) +- pyasn1-modules (from http://sourceforge.net/projects/pyasn1/) +- pycparser (from https://github.com/eliben/pycparser) + +======================================================================== +BSD-3-Clause +======================================================================== +The following software have components provided under the terms of this license: + +- click (from http://github.com/mitsuhiko/click) +- cryptography (from https://github.com/pyca/cryptography) +- decorator (from https://github.com/micheles/decorator) +- hiredis (from https://github.com/redis/hiredis-py) +- httpcore (from https://github.com/encode/httpcore) +- httpx (from https://github.com/encode/httpx) +- idna (from https://github.com/kjd/idna) +- isodate (from http://cheeseshop.python.org/pypi/isodate) +- mock (from https://github.com/testing-cabal/mock) +- numpy (from http://www.numpy.org) +- oauthlib (from https://github.com/idan/oauthlib) +- packaging (from https://github.com/pypa/packaging) +- pandas (from http://pandas.pydata.org) +- ply (from http://www.dabeaz.com/ply/) +- protobuf (from https://developers.google.com/protocol-buffers/) +- psutil (from https://github.com/giampaolo/psutil) +- pyarrow (from https://arrow.apache.org/) +- pyasn1 (from http://sourceforge.net/projects/pyasn1/) +- pyasn1-modules (from http://sourceforge.net/projects/pyasn1/) +- pycparser (from https://github.com/eliben/pycparser) +- pyrsistent (from http://github.com/tobgu/pyrsistent/) +- pytest-cov (from https://github.com/pytest-dev/pytest-cov) +- python-dateutil (from https://dateutil.readthedocs.org) +- python-rapidjson (from https://github.com/python-rapidjson/python-rapidjson) +- requests-oauthlib (from https://github.com/requests/requests-oauthlib) +- starlette (from https://github.com/encode/starlette) +- uvicorn (from https://github.com/tomchristie/uvicorn) + +======================================================================== +CC0-1.0 +======================================================================== +The following software have components provided under the terms of this license: + +- coverage (from https://coverage.readthedocs.io) + +======================================================================== +CNRI-Python +======================================================================== +The following software have components provided under the terms of this license: + +- isodate (from http://cheeseshop.python.org/pypi/isodate) +- ply (from http://www.dabeaz.com/ply/) + +======================================================================== +GPL-2.0-only +======================================================================== +The following software have components provided under the terms of this license: + +- coverage (from https://coverage.readthedocs.io) + +======================================================================== +GPL-3.0-only +======================================================================== +The following software have components provided under the terms of this license: + +- coverage (from https://coverage.readthedocs.io) +- numpy (from http://www.numpy.org) +- pyparsing (from http://pyparsing.wikispaces.com/) +- rfc3986 (from https://rfc3986.readthedocs.org) + +======================================================================== +GPL-3.0-with-GCC-exception +======================================================================== +The following software have components provided under the terms of this license: + +- numpy (from http://www.numpy.org) + +======================================================================== +ISC +======================================================================== +The following software have components provided under the terms of this license: + +- requests-oauthlib (from https://github.com/requests/requests-oauthlib) + +======================================================================== +JSON +======================================================================== +The following software have components provided under the terms of this license: + +- python-rapidjson (from https://github.com/python-rapidjson/python-rapidjson) + +======================================================================== +LGPL-2.1-only +======================================================================== +The following software have components provided under the terms of this license: + +- chardet (from https://github.com/chardet/chardet) + +======================================================================== +LGPL-2.1-or-later +======================================================================== +The following software have components provided under the terms of this license: + +- chardet (from https://github.com/chardet/chardet) + +======================================================================== +LGPL-3.0-only +======================================================================== +The following software have components provided under the terms of this license: + +- chardet (from https://github.com/chardet/chardet) +- pycparser (from https://github.com/eliben/pycparser) + +======================================================================== +MIT +======================================================================== +The following software have components provided under the terms of this license: + +- PyJWT (from http://github.com/jpadilla/pyjwt) +- aiohttp (from https://github.com/aio-libs/aiohttp/) +- aioredis (from https://github.com/aio-libs/aioredis) +- attrs (from https://attrs.readthedocs.io/) +- azure-common (from https://github.com/Azure/azure-sdk-for-python) +- azure-core (from https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/core/azure-core) +- azure-identity (from https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/identity/azure-identity) +- azure-keyvault (from https://github.com/Azure/azure-sdk-for-python) +- azure-keyvault-certificates (from https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/keyvault/azure-keyvault-certificates) +- azure-keyvault-keys (from https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/keyvault/azure-keyvault-keys) +- azure-keyvault-secrets (from https://github.com/Azure/azure-sdk-for-python/tree/master/sdk/keyvault/azure-keyvault-secrets) +- azure-storage-blob (from https://github.com/Azure/azure-storage-python) +- botocore (from https://github.com/boto/botocore) +- cachetools (from https://github.com/tkem/cachetools) +- cffi (from http://cffi.readthedocs.org) +- coverage (from https://coverage.readthedocs.io) +- fastapi (from https://github.com/tiangolo/fastapi) +- h11 (from https://github.com/python-hyper/h11) +- iniconfig (from http://github.com/RonnyPfannschmidt/iniconfig) +- jmespath (from https://github.com/jmespath/jmespath.py) +- jsonschema (from http://github.com/Julian/jsonschema) +- mockito (from https://github.com/kaste/mockito-python) +- msal (from https://github.com/AzureAD/microsoft-authentication-library-for-python) +- msal-extensions (from https://pypi.org/project/msal-extensions/0.1.3/) +- msrest (from https://github.com/Azure/msrest-for-python) +- munch (from http://github.com/Infinidat/munch) +- pluggy (from https://github.com/pytest-dev/pluggy) +- py (from http://pylib.readthedocs.org/) +- pyarrow (from https://arrow.apache.org/) +- pydantic (from https://github.com/samuelcolvin/pydantic) +- pyparsing (from http://pyparsing.wikispaces.com/) +- pyrsistent (from http://github.com/tobgu/pyrsistent/) +- pytest (from http://pytest.org) +- pytest-cov (from https://github.com/pytest-dev/pytest-cov) +- pytest-httpx (from ) +- pytest-mock (from https://github.com/pytest-dev/pytest-mock/) +- python-rapidjson (from https://github.com/python-rapidjson/python-rapidjson) +- pytz (from http://pythonhosted.org/pytz) +- requests-oauthlib (from https://github.com/requests/requests-oauthlib) +- six (from http://pypi.python.org/pypi/six/) +- sniffio (from https://github.com/python-trio/sniffio) +- structlog (from http://www.structlog.org/) +- toml (from https://github.com/uiri/toml) +- urllib3 (from https://urllib3.readthedocs.io/) +- xmltodict (from https://github.com/martinblech/xmltodict) +- zipp (from https://github.com/jaraco/zipp) + +======================================================================== +MPL-2.0 +======================================================================== +The following software have components provided under the terms of this license: + +- certifi (from http://certifi.io/) + +======================================================================== +Python-2.0 +======================================================================== +The following software have components provided under the terms of this license: + +- async-timeout (from https://github.com/aio-libs/async_timeout/) +- coverage (from https://coverage.readthedocs.io) +- cryptography (from https://github.com/pyca/cryptography) +- google-auth (from https://github.com/GoogleCloudPlatform/google-auth-library-python) +- portalocker (from https://github.com/WoLpH/portalocker) +- python-dateutil (from https://dateutil.readthedocs.org) +- pytz (from http://pythonhosted.org/pytz) +- rsa (from https://stuvel.eu/rsa) +- sniffio (from https://github.com/python-trio/sniffio) +- typing-extensions (from https://github.com/python/typing) +- urllib3 (from https://urllib3.readthedocs.io/) + +======================================================================== +WTFPL +======================================================================== +The following software have components provided under the terms of this license: + +- jsonpath-ng (from https://github.com/h2non/jsonpath-ng) + +======================================================================== +ZPL-2.1 +======================================================================== +The following software have components provided under the terms of this license: + +- pytz (from http://pythonhosted.org/pytz) + +======================================================================== +public-domain +======================================================================== +The following software have components provided under the terms of this license: + +- botocore (from https://github.com/boto/botocore) +- coverage (from https://coverage.readthedocs.io) +- py (from http://pylib.readthedocs.org/) +- pytz (from http://pythonhosted.org/pytz) + + diff --git a/README.md b/README.md index 0ca446aab9d09eac8625b53e3df8da661976c458..383393148e95c02a4096448b8c41819607a51307 100644 --- a/README.md +++ b/README.md @@ -1,20 +1,347 @@ -# Introduction -TODO: Give a short introduction of your project. Let this section explain the objectives or the motivation behind this project. - -# Getting Started -TODO: Guide users through getting your code up and running on their own system. In this section you can talk about: -1. Installation process -2. Software dependencies -3. Latest releases -4. API references - -# Build and Test -TODO: Describe and show how to build your code and run the tests. - -# Contribute -TODO: Explain how other users and developers can contribute to make your code better. - -If you want to learn more about creating good readme files then refer the following [guidelines](https://docs.microsoft.com/en-us/azure/devops/repos/git/create-a-readme?view=azure-devops). You can also seek inspiration from the below readme files: -- [ASP.NET Core](https://github.com/aspnet/Home) -- [Visual Studio Code](https://github.com/Microsoft/vscode) -- [Chakra Core](https://github.com/Microsoft/ChakraCore) \ No newline at end of file +# Introduction + +Wellbore Data Management Services (WDMS) Open Subsurface Data Universe (OSDU) is one of the several backend services that comprise Schlumberger's Exploration and Production (E&P) software ecosystem. It is a single, containerized service written in Python that provides an API for wellbore related data. + +[[_TOC_]] + +## Install Software and Packages + +1. Clone the os-wellbore-ddms [repository](https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/wellbore/wellbore-domain-services.git) +2. Download [Python](https://www.python.org/downloads/) >=3.7 +3. Ensure pip, a pre-installed package manager and installer for Python, is installed and is upgraded to the latest version. + + ```bash + # Windows + python -m pip install --upgrade pip + python -m pip --version + + # macOS and Linux + python3 -m pip install --upgrade pip + python3 -m pip --version + ``` + +4. Using pip, download [FastAPI](https://fastapi.tiangolo.com/), the main framework to build the service APIs. To install fastapi and uvicorn (to work as the server), run the following command: + + ```bash + pip install fastapi[all] + ``` + +5. [venv](https://docs.python.org/3/library/venv.html) allows you to manage separate package installations for different projects. They essentially allow you to create a "virtual" isolated Python installation and packages into that virtual environment. venv is already included in the Python standard library and requires no additional installation. + +### Fast API Dependencies + +- [pydantic](https://pydantic-docs.helpmanual.io/): provides the ability to do data validation using python type annotations. It enforces type hints at runtime provide a more robust data validation option. + - [dataclasses](https://pydantic-docs.helpmanual.io/usage/dataclasses/): module in python which provides a decorator and functions for automatically adding generated special methods to user-defined classes. +- [starlette](https://fastapi.tiangolo.com/features/#starlette-features): lightweight ASGI framework. FastAPI is a sub-class of Starlette and includes features such as websocket support, startup and shutdown events, session and cookie support. + +### Additional Dependencies + +- [uvicorn](https://www.uvicorn.org/) used as ASGI server to run WDMS app +- [cachetools](https://pypi.org/project/cachetools/) +- [pyjwt](https://pypi.org/project/PyJWT/) and [cryptography](https://pypi.org/project/cryptography/) for auth purposes +- [pandas](https://pandas.pydata.org/) and [numpy](https://numpy.org/) for data manipulation +- [pyarrow](https://pypi.org/project/pyarrow/) for load and save data into parquet format +- [opencensus](https://opencensus.io/guides/grpc/python/) for tracing and logging on cloud provider + +### Library Dependencies + +- Common parts and interfaces + - osdu-core-lib-python + +- Implementation of blob storage on GCP + - osdu-core-lib-python-gcp + +- Implementation of blob storage and partition service on Azure + - osdu-core-lib-python-azure + +- Client libraries for OSDU data ecosystem services + - osdu-data-ecosystem-entitlements + - osdu-data-ecosystem-search + - osdu-data-ecosystem-storage + +## Project Startup + +### Run the service locally + +1. Create virtual environment in the wellbore project directory. This will create a folder inside of the wellbore project directory. For example: ~/os-wellbore-ddms/nameofvirtualenv + + ```bash + # Windows + python -m venv env + + # macOS/Linux + python3 -m venv env + ``` + +2. Activate the virtual environment + + ```bash + # Windows + source env/Scripts/activate + + # macOS/Linux + source env/bin/activate + ``` + +3. Create pip.ini (Windows) or pip.conf (MacOS and Linux) file inside the `env` directory. This allows us to set a global index url which can download packages/libraries needed from the AzDO artifacts. There are several ways to add this extra index url: + + - It is also possible to use [--extra-index-url](https://pip.pypa.io/en/stable/reference/pip_install/#install-extra-index-url) parameter to specify it on the pip install cmd inline + +4. Install dependencies + + ```bash + pip install -r requirements.txt + ``` + +5. Run the service + + ```bash + # Run the service which will default to http://127.0.0.1:8080 + python main.py + + # Run on specific host, port and enforce dev mode + python main.py --host MY_HOST --port MY_PORT --dev_mode 1 + ``` + + If host is `127.0.0.1` or `localhost`, the dev_mode is automatically set to True. + The only significant change if dev_mode is on, is that configuration errors at startup are logged but don’t prevent the service to run, and allow to override some implementations. + +The hosts for the entitlements, search and storage services have to be provided as environment variables, or on the command line. + +```bash +python main.py -e SERVICE_HOST_ENTITLEMENTS https://api.example.com/entitlements -e SERVICE_HOST_STORAGE https://api.example.com/storage -e SERVICE_HOST_SEARCH https://api.example.com/search +``` + +### Connect and Run Endpoints + +1. Generate bearer token as all APIs but `/about` require authentication. + + - Navigate to `http://127.0.0.1:8080/api/os-wellbore-ddms/docs`. Click `Authorize` and enter your token. That will allow for authenticated requests. + + +2. Choose storage option + + Even if the service runs locally it still relies on osdu data ecosystem storage service `os-storage-dot-opendes.appspot.com/api/storage` to store documents and google blob store to store binary data (`bulk data`). It is possible to override this and use your local file system instead by setting the following environment variables: + + - `USE_INTERNAL_STORAGE_SERVICE_WITH_PATH` to store on a local folder instead of osdu ecosystem storage service. + - `USE_LOCALFS_BLOB_STORAGE_WITH_PATH` to store on a local folder instead of google blob storage. + + ```bash + # Create temp storage folders + mkdir tmpstorage + mkdir tmpblob + + # Set your repo path + path="C:/source" + + python main.py -e USE_INTERNAL_STORAGE_SERVICE_WITH_PATH $path/os-wellbore-ddms/tmpstorage -e USE_LOCALFS_BLOB_STORAGE_WITH_PATH $path/os-wellbore-ddms/tmpblob + ``` + +3. Choose Cloud Provider + + - The code can be run with specifying environment variables and by setting the cloud provider. The accepted values are `gcp`, `az` or `local`. When a cloud provider is passed as an environment variables, there are certain additional environment variables that become mandatory. + +### Setting the Cloud Provider Environment Variables + +- The following environment variables are required when the cloud provider is set to GCP: + - OS_WELLBORE_DDMS_DATA_PROJECT_ID: GCP Data Tenant ID + - OS_WELLBORE_DDMS_DATA_PROJECT_CREDENTIALS: path to the key file of the SA to access the data tenant + - SERVICE_HOST_ENTITLEMENTS: The Entitlements Service host + - SERVICE_HOST_SEARCH: The Search Service host + - SERVICE_HOST_STORAGE: The Storage Service host + + ```bash + python main.py -e CLOUD_PROVIDER gcp \ + -e OS_WELLBORE_DDMS_DATA_PROJECT_ID projectid \ + -e OS_WELLBORE_DDMS_DATA_PROJECT_CREDENTIALS pathtokeyfile \ + -e SERVICE_HOST_ENTITLEMENTS entitlement_host \ + -e SERVICE_HOST_SEARCH search_host \ + -e SERVICE_HOST_STORAGE storage_host + ``` + +- The following environment variables are required when the cloud provider is set to Azure: + - AZ_AI_INSTRUMENTATION_KEY: Azure Application Insights instrumentation key + - SERVICE_HOST_ENTITLEMENTS: The Entitlements Service host + - SERVICE_HOST_SEARCH: The Search Service host + - SERVICE_HOST_STORAGE: The Storage Service host + - SERVICE_HOST_PARTITION: The Partition Service internal host + - KEYVAULT_URL: The Key Vault url (needed by the Partition Service) + - USE_PARTITION_SERVICE: `enabled` when Partition Service is available in the environment. Needs to be `disabled` for `dev` or to run locally. + + ```bash + python main.py -e CLOUD_PROVIDER az \ + -e AZ_AI_INSTRUMENTATION_KEY instrumentationkey \ + -e SERVICE_HOST_ENTITLEMENTS entitlement_host \ + -e SERVICE_HOST_SEARCH search_host \ + -e SERVICE_HOST_STORAGE storage_host \ + -e SERVICE_HOST_PARTITION partition_host \ + -e KEYVAULT_URL keyvault_url \ + -e USE_PARTITION_SERVICE disabled + ``` + +Note: If you're running locally, you may need to provide environmental variables in your IDE. Here is a sample for providing a `.env` file. + +As default, all Core Services endpoint values are set to `None` in `app/conf.py`, you can update `.env` file for core services endpoints based on your cloud provider. + +### Create a log record + +To create a `log` record, below is a payload sample for the PUT `/ddms/v2/logs` API. The response will contain an id you can use on the `/ddms/v2/logs/{logid}/data` to create some bulk data. + +- GCP + + ```json + [{ + "data": { + "log": { + "family": "Gamma Ray", + "familyType": "Gamma Ray", + "format": "float64", + "mnemonic": "GR", + "name": "GAMM", + "unitKey": "gAPI" + } + }, + "kind": "opendes:osdu:log:1.0.5", + "namespace": "opendes:osdu", + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": [ + "US" + ], + "status": "compliant" + }, + "acl": { + "viewers": [ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ], + "owners": [ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ] + }, + "type": "log" + } + ] + ``` + +- MVP + + ```json + [ + { + "acl": { + "owners": [ + "data.default.owners@opendes.contoso.com" + ], + "viewers": [ + "data.default.viewers@opendes.contoso.com" + ] + }, + "data": { + "name": "wdms_e2e_log" + }, + "kind": "opendes:wks:log:1.0.5", + "legal": { + "legaltags": [ + "opendes-storage-1603197111615" + ], + "otherRelevantDataCountries": [ + "US", + "FR" + ] + } + } + ] + ``` + +### Run with Uvicorn + +```bash +uvicorn app.wdms_app:wdms_app --port LOCAL_PORT +``` + +Then access app on `http://127.0.0.1:<LOCAL_PORT>/api/os-wellbore-ddms/docs` + +### Run with Docker + +#### Build Image + +A Personal Access Token (PAT) is required to pull all the python packages. + +```bash +# Set PIP_EXTRA_URL +PIP_EXTRA_URL=https://community.opengroup.org/groups/osdu/platform/domain-data-mgmt-services/wellbore/-/packages + +# Set IMAGE_TAG +IMAGE_TAG="os-wellbore-ddms:dev" + +# Build Image +docker build -t=$IMAGE_TAG --rm . -f ./build/Dockerfile --build-arg PIP_EXTRA_URL=$PIP_EXTRA_URL --build-arg PIP_WHEEL_DIR=python-packages +``` + +#### Run Image + +1. Run the image + + Replace the LOCAL_PORT value with a local port + + ```bash + LOCAL_PORT=<local_port> + + docker run -d -p $LOCAL_PORT:8080 -e OS_WELLBORE_DDMS_DEV_MODE=1 -e USE_LOCALFS_BLOB_STORAGE_WITH_PATH=1 $IMAGE_TAG + ``` + +2. Access app on `http://127.0.0.1:<LOCAL_PORT>/api/os-wellbore-ddms/docs` + +3. The environment variable `OS_WELLBORE_DDMS_DEV_MODE=1` enables dev mode + +4. Logs can be checked by running + + ```bash + docker logs CONTAINER_ID + ``` + +### Run Unit Tests Locally + +```bash +# Install test dependencies +pip install -r requirements_dev.txt + +python -m pytest --junit-xml=unit_tests_report.xml --cov=app --cov-report=html --cov-report=xml ./tests/unit +``` + +Coverage reports can be viewed after the command is run. The HMTL reports are saved in the htmlcov directory. + +### Run Integration Tests locally + +This example runs basic tests using the local filesystem for blob storage and storage service. There's no search or entilements service, everything runs locally. + +First, create the temp storage folders and run the service. + +```bash +mkdir -p tmpstorage +mkdir -p tmpblob +python main.py -e USE_INTERNAL_STORAGE_SERVICE_WITH_PATH $(pwd)/tmpstorage -e USE_LOCALFS_BLOB_STORAGE_WITH_PATH $(pwd)/tmpblob -e CLOUD_PROVIDER local +``` + +In another terminal, generate a minimum configuration file and run the integration tests. + +```bash +cd tests/integration +python gen_postman_env.py --token $(pyjwt --key=secret encode email=nobody@example.com) --base_url "http://127.0.0.1:8080/api/os-wellbore-ddms" --cloud_provider "local" --data_partition "dummy" +pytest ./functional --environment="./generated/postman_environment.json" --filter-tag=basic +``` + +For more information see the [integration tests README](tests/integration/README.md) + +### Port Forward from Kubernetes + + 1. List the pods: `kubectl get pods` + 2. Port forward: `kubectl port-forward pods/POD_NAME LOCAL_PORT:8080` + 3. Access it on `http://127.0.0.1:<LOCAL_PORT>/api/os-wellbore-ddms/docs` + +### Tracing + +OpenCensus libraries are used to record incoming requests metrics (execution time, result code, etc...). +At the moment, 100% of the requests are saved. diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..986499b1146e9e0d2a0d3688a43fca4b4199f19a --- /dev/null +++ b/app/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# this file will be updated in the build pipeline + +__version__ = '0.2' +__build_number__ = 'local' +__app_name__ = 'Wellbore DDMS OSDU' diff --git a/app/auth/__init__.py b/app/auth/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/auth/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/auth/auth.py b/app/auth/auth.py new file mode 100644 index 0000000000000000000000000000000000000000..3abd6d7d857eaac72f648c8bcbd719818a8cf01b --- /dev/null +++ b/app/auth/auth.py @@ -0,0 +1,69 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from cachetools import TTLCache +from fastapi import HTTPException, Depends +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from jwt import PyJWTError +from starlette.authentication import AuthCredentials +from starlette.requests import Request +from starlette.status import HTTP_401_UNAUTHORIZED +from app.model.user import User +from app.utils import Context, async_with_cache +from app.helper import logger +import jwt + +# manually using basic global cache for now as decorator doesn't work with coroutine +_user_info_cache = TTLCache(maxsize=512, ttl=60, getsizeof=lambda x: 1) + + +# Make the name very explicit for now +class OpenDESBearerToken(HTTPBearer): + pass + + +security = OpenDESBearerToken() + + +async def require_opendes_authorized_user(request: Request, + credentials: HTTPAuthorizationCredentials = Depends(security)): + token = credentials.credentials + ctx = Context.current() + + user = await _get_user_from_token(ctx, token) + request.scope['auth'] = AuthCredentials(['authenticated']) + request.scope['user'] = user + + Context.set_current_with_value(auth=token, user=user) + + +async def _get_user_from_token(ctx: Context, token: str) -> User: + global _user_info_cache + cache_key: str = token + return await async_with_cache(_user_info_cache, cache_key, get_user_from_token_not_cached, ctx, token) + + +async def get_user_from_token_not_cached(ctx: Context, token: str) -> User: + # TODO REAL entitlement call is needed here, for now basic decode without verify + try: + token_payload = jwt.decode(token, verify=False) + email = token_payload['email'] + except (KeyError, PyJWTError): + raise HTTPException( + status_code=HTTP_401_UNAUTHORIZED, + detail='invalid token', + headers={'WWW-Authenticate': 'Bearer'}, + ) + groups = [] + return User(email=email, authenticated=True, groups=groups) diff --git a/app/bulk_persistence/__init__.py b/app/bulk_persistence/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6dd982532c0acc927f218af0fb988a3d173e761d --- /dev/null +++ b/app/bulk_persistence/__init__.py @@ -0,0 +1,21 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from .bulk_id import BulkId +from .dataframe_persistence import create_and_store_dataframe, get_dataframe +from .dataframe_serializer import DataframeSerializer +from .json_orient import JSONOrient +from .mime_types import MimeTypes +from .tenant_provider import resolve_tenant +from .exceptions import UnknownChannelsException, InvalidBulkException, NoBulkException, NoDataException, RecordNotFoundException diff --git a/app/bulk_persistence/blob_bulk.py b/app/bulk_persistence/blob_bulk.py new file mode 100644 index 0000000000000000000000000000000000000000..ed4b078f476497003d5180c3eaf6be1317666005 --- /dev/null +++ b/app/bulk_persistence/blob_bulk.py @@ -0,0 +1,30 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Any + + +@dataclass +class BlobBulk: + """ + represents a bulk bloblified, which means serialized in some way. data is expected to be an io.IOBase + """ + + id: str + """ identifier """ + data: Any = None + """ data as file-like object """ + content_type: str = None + metadata: dict = None diff --git a/app/bulk_persistence/blob_storage.py b/app/bulk_persistence/blob_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..79990f25b723ccbb619369c91afbcb1157ea7b4f --- /dev/null +++ b/app/bulk_persistence/blob_storage.py @@ -0,0 +1,235 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import uuid +from asyncio import iscoroutinefunction + +from contextlib import asynccontextmanager + +from io import BytesIO +from os import path, remove +from typing import ( + Any, + Callable, + Coroutine, + Dict, + NamedTuple, + Optional, + Tuple, + Union, +) + +import pandas as pd +import pyarrow as pa +import pyarrow.feather as feather +import pyarrow.parquet as pq + +from app.utils import get_pool_executor, get_wdms_temp_dir + +from .blob_bulk import BlobBulk +from .mime_types import MimeType, MimeTypes + +# Here are functions to (de)serializing) bulk data only, no knowledge at all regarding the domain models, only raw data +# here +# TODO NAMING IS CURRENTLY BAD +# TODO data munging (mainly deals with missing values - df.fillna(a value representing Nan)) +# TODO will do some optimization after, may be use hd5 to speed up the write dans secondly run a background task to +# to write a parquet format, here are some potential strategy: +# - using faster format, e.g. hd5 +# - threshold about the busyness of the service (if not busy and not huge data -> direct write) +# - better proc fork and arg serialization + + +def export_to_parquet( + path_like: str, dataframe: pd.DataFrame +) -> Tuple[str, Dict[str, str]]: + # parquet v2 has less restrictions concerning format (for example number as column name) + pq.write_table( + pa.Table.from_pandas(dataframe, preserve_index=True), + path_like, + version="2.0", + compression="snappy", + ) + return path_like, {"content_type": MimeTypes.PARQUET.type} + + +def load_from_parquet(data) -> pd.DataFrame: + """ data = bytes, str, pyarrow.NativeFile, or file-like object """ + if isinstance(data, bytes): + data = pa.BufferReader(data) + return pq.read_table(data).to_pandas() + + +def export_to_feather( + filename: str, dataframe: pd.DataFrame +) -> Tuple[str, Dict[str, str]]: + feather.write_feather( + pa.Table.from_pandas(dataframe, preserve_index=True), + filename, + compression="lz4", + ) + return filename, {"content_type": MimeTypes.FEATHER.type} + + +def load_from_feather(data) -> pd.DataFrame: + """ data = bytes, str, pyarrow.NativeFile, or file-like object """ + if isinstance(data, bytes): + data = feather.BufferReader(data) + return feather.read_table(data).to_pandas() + + +class BlobFileExporter(NamedTuple): + mime_type: MimeType + writer_fn: Union[ + Callable[[str, pd.DataFrame], Any], Coroutine[str, pd.DataFrame, Any] + ] + + def match(self, str_value: str) -> bool: + return self.mime_type.match(str_value) + + +class BlobFileExporters: + PARQUET = BlobFileExporter(MimeTypes.PARQUET, export_to_parquet) + FEATHER = BlobFileExporter(MimeTypes.FEATHER, export_to_feather) + + @classmethod + def from_string(cls, value: str) -> BlobFileExporter: + if BlobFileExporters.PARQUET.match(value): + return BlobFileExporters.PARQUET + if BlobFileExporters.FEATHER.match(value): + return BlobFileExporters.FEATHER + raise KeyError("unknown file type " + value) + + +class BlobFileImporter(NamedTuple): + mime_type: MimeType + reader_fn: Union[ + Callable[[str, pd.DataFrame], Any], Coroutine[str, pd.DataFrame, Any] + ] + + def match(self, str_value: str) -> bool: + return self.mime_type.match(str_value) + + +class BlobFileImporters: + PARQUET = BlobFileImporter(MimeTypes.PARQUET, load_from_parquet) + FEATHER = BlobFileImporter(MimeTypes.FEATHER, load_from_feather) + + @classmethod + def from_string(cls, value: str) -> BlobFileImporter: + if cls.PARQUET.match(value): + return cls.PARQUET + if cls.FEATHER.match(value): + return cls.FEATHER + raise KeyError('unknown file type ' + value) + + +def _expand_args(args: Tuple[Callable[[str, pd.DataFrame], Tuple[str, str]], str, pd.DataFrame]): + writer_fn, filename, df = args + return writer_fn(filename, df) + + +async def _run_export_to_file_in_executor(filename: str, + dataframe: pd.DataFrame, + executor, + exporter_fn: Callable[[str, pd.DataFrame], Tuple[str, str]]): + return await asyncio.get_event_loop().run_in_executor(executor, + _expand_args, + (exporter_fn, filename, dataframe)) + + +def get_default_exporter_executor(): + return get_pool_executor() + + +@asynccontextmanager +async def create_and_write_blob( + table: pd.DataFrame, *, + file_exporter: BlobFileExporter = BlobFileExporters.PARQUET, + out_dir=None, + blob_id: Optional[str] = None, + executor=get_default_exporter_executor(), + custom_export_to_file_fn=None): + assert file_exporter or custom_export_to_file_fn + """ + This function take inputs data, creates a pandas dataframe and dumps it into a file in a given. Supported output + format are listed in BlobFileTypes which point to a dedicated writer/exporter function. It possible to provide a + custom writer/exported function, it mainly for testing purposes. It also possible to control if the write/export + operation must be run in an executor or not. This option is for testing as well but almost to update in future what + is the best way to handle it because it appears to be a blocking operation which is potentially problematic in a + heavily async context. + :param index_data: indexes values + :param values: actual values + :param row_wise: True if row wise, False if column wise + :param columns_array: columns (head) values + :param out_type: data format to write + :param out_dir: path_like, if none will use temporary folder. + :param blob_id: if none, will be generated. + :param executor: executor to use, if set to None, no executor will be used. If executor is not None, then the writer + must NOT be an async/coroutine function + :param custom_export_to_file_fn: custom writer, either a coroutine or a sync fn, in that case out_type will be + ignored out_filename, if provided will be passed as it to the write_coroutine as bulk id, it must provide a + tuple(file: Union[str, bytes], metadata: dict[str, str]) + if file is str, it means file path + :return: BlobBulk + + Expected to be used as within a context as such: + + > async with create_and_write_blob(...) as blob: + > # blob.data as a IO.base, mainly same as file + > + + """ + assert isinstance(table, pd.DataFrame), f"Unsupported type for table: {type(table)}, must be dataframe" + df = table + + # Build the output filename which will be used as bulk id + blob_id = blob_id or str(uuid.uuid4()) + out_filename = blob_id + file_exporter.mime_type.extension + out_path = path.join(out_dir or get_wdms_temp_dir(), out_filename) + + # Dump/Export the dataframe into a file format + export_to_file_function = custom_export_to_file_fn or file_exporter.writer_fn + + if executor is None: + if iscoroutinefunction(export_to_file_function): + file_meta_pair = await export_to_file_function(out_path, df) + else: + assert callable(export_to_file_function) + file_meta_pair = export_to_file_function(out_path, df) + else: + assert not iscoroutinefunction(export_to_file_function), 'cannot use a coroutine with executor' + file_meta_pair = await _run_export_to_file_in_executor(out_path, df, executor, export_to_file_function) + + metadata = file_meta_pair[1] or {} + source = file_meta_pair[0] + content_type = next((v for k, v in metadata.items() if k.replace('-', '').lower() == 'contenttype'), None) + + if isinstance(source, str): + with open(source, 'rb') as file: + yield BlobBulk(id=blob_id, data=file, content_type=content_type, metadata=metadata) + # clean up + remove(source) + elif isinstance(source, bytes): + yield BlobBulk(id=blob_id, data=BytesIO(source), content_type=content_type, metadata=metadata) + else: + raise RuntimeError(f'unexpected type {source} returned by bulk exporter function') + + +async def read_blob(blob: BlobBulk): + importer = BlobFileImporters.from_string(blob.content_type) + # TODO: run in executor? + dataframe = importer.reader_fn(blob.data) + return dataframe diff --git a/app/bulk_persistence/bulk_id.py b/app/bulk_persistence/bulk_id.py new file mode 100644 index 0000000000000000000000000000000000000000..c4f7740dbe7abd091486eff70ec28fd8a6febf90 --- /dev/null +++ b/app/bulk_persistence/bulk_id.py @@ -0,0 +1,30 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid +from typing import Optional + + +class BulkId: + @staticmethod + def new_bulk_id() -> str: + return str(uuid.uuid4()) + + @classmethod + def bulk_urn_encode(cls, bulk_id: str) -> str: + return uuid.UUID(bulk_id).urn + + @classmethod + def bulk_urn_decode(cls, urn: str) -> Optional[str]: + return str(uuid.UUID(urn)) diff --git a/app/bulk_persistence/dataframe_persistence.py b/app/bulk_persistence/dataframe_persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..f4e6ef2795195387a243941401843bd8a02f13b8 --- /dev/null +++ b/app/bulk_persistence/dataframe_persistence.py @@ -0,0 +1,66 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import io + +import pandas as pd +from osdu.core.api.storage.blob_storage_base import BlobStorageBase + +from app.utils import Context + +from .blob_storage import ( + BlobBulk, + BlobFileExporters, + create_and_write_blob, + read_blob, +) +from .bulk_id import BulkId +from .mime_types import MimeTypes +from .tenant_provider import resolve_tenant + + +async def create_and_store_dataframe(ctx: Context, df: pd.DataFrame) -> str: + """Store bulk on a blob storage""" + new_bulk_id = BulkId.new_bulk_id() + tenant = await resolve_tenant(ctx.partition_id) + async with create_and_write_blob( + df, file_exporter=BlobFileExporters.PARQUET, blob_id=new_bulk_id + ) as bulkblob: + storage: BlobStorageBase = await ctx.app_injector.get(BlobStorageBase) + await storage.upload( + tenant, + bulkblob.id, + bulkblob.data, + content_type=bulkblob.content_type, + metadata=bulkblob.metadata, + ) + return bulkblob.id + + +async def get_dataframe(ctx: Context, bulk_id: str) -> pd.DataFrame: + """ fetch bulk from a blob storage, provide column major """ + tenant = await resolve_tenant(ctx.partition_id) + storage: BlobStorageBase = await ctx.app_injector.get(BlobStorageBase) + + bytes_data = await storage.download(tenant, bulk_id) + # for now use fix parquet format saving one call + # meta_data = await storage.download_metadata(tenant.project_id, tenant.bucket_name, bulk_id) + # content_type = meta_data.metadata["content_type"] + blob = BlobBulk( + id=bulk_id, + data=io.BytesIO(bytes_data), + content_type=MimeTypes.PARQUET.type, + ) + data_frame = await read_blob(blob) + return data_frame diff --git a/app/bulk_persistence/dataframe_serializer.py b/app/bulk_persistence/dataframe_serializer.py new file mode 100644 index 0000000000000000000000000000000000000000..6b0dda0108d9614b02eb82f91db80ee8982c50f5 --- /dev/null +++ b/app/bulk_persistence/dataframe_serializer.py @@ -0,0 +1,108 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from io import BytesIO +from typing import Union, AnyStr, IO, Optional, List + +from pathlib import Path +import numpy as np +import pandas as pd +from pydantic import BaseModel +from pandas import DataFrame as DataframeClass + +from .json_orient import JSONOrient +from .mime_types import MimeTypes + + +class DataframeSerializer: + """ + the goal is to encapsulate to (de)serialized dataframe from/to various format + then provide unified the way to handle various topics float/double precision, compression etc... + """ + + # todo may be unified with the work from storage.blob_storage + + SupportedFormat = [MimeTypes.JSON] # , MimeTypes.MSGPACK] + """ these are supported format through wellbore ddms APIs """ + + @classmethod + def get_schema(cls, orient: Union[str, JSONOrient]) -> dict: + # defined here as only used to provided schema + class SplitFormat(BaseModel): + data: Union[List[Union[str, int, float]], List[List[Union[str, int, float]]]] + columns: List[Union[str, int, float]] = None + index: List[Union[str, int, float]] = None + + class IndexFormat(BaseModel): + TODO: str + + class ColumnFormat(BaseModel): + TODO: str + + class ValuesFormat(BaseModel): + __root__: List[List[Union[str, int, float]]] + + class RecordsFormat(BaseModel): + TODO: str + + schema_dict = { + JSONOrient.split: SplitFormat.schema(), + JSONOrient.index: IndexFormat.schema(), + JSONOrient.columns: ColumnFormat.schema(), + JSONOrient.values: ValuesFormat.schema(), + JSONOrient.records: RecordsFormat.schema() + } + + return schema_dict[JSONOrient.get(orient)] + + @classmethod + def to_json(cls, + df: DataframeClass, + orient: Union[str, JSONOrient] = JSONOrient.split, + path_or_buf: Optional[Union[str, Path, IO[AnyStr]]] = None) -> Optional[str]: + """ + :param df: dataframe to dump + :param orient: format for Json, default is split + :param path_or_buf: File path or object. If not specified, the result is returned as a string. + :return: None or json string of path_or_buf is None + """ + orient = JSONOrient.get(orient) + + return df.fillna("NaN").to_json(path_or_buf=path_or_buf, orient=orient.value) + + @classmethod + def read_parquet(cls, data) -> 'DataframeSerializer.DataframeClass': + """ + :param data: bytes, path object or file-like object + :return: dataframe + """ + if isinstance(data, bytes): + data = BytesIO(data) + + # will raise if contains multiple dataframe + return pd.read_parquet(data) + + @classmethod + def read_json(cls, data, orient: Union[str, JSONOrient]) -> 'DataframeSerializer.DataframeClass': + """ + :param data: bytes str content (valid JSON str), path object or file-like object + :param orient: + :return: dataframe + """ + orient = JSONOrient.get(orient) + + if isinstance(data, bytes): + data = BytesIO(data) + return pd.read_json(path_or_buf=data, orient=orient.value).replace("NaN", np.NaN) diff --git a/app/bulk_persistence/exceptions.py b/app/bulk_persistence/exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..448b383e3a242d37d8cc758de121a4911dcf87f3 --- /dev/null +++ b/app/bulk_persistence/exceptions.py @@ -0,0 +1,23 @@ +class RecordNotFoundException(Exception): + """ Raised when specified Record does not exist """ + pass + + +class NoDataException(Exception): + """ Raised when asking data for a Record that doesn't have any data """ + pass + + +class NoBulkException(Exception): + """ Raised when asking data for a Record that doesn't have bulkURI """ + pass + + +class InvalidBulkException(Exception): + """ Raised when asking data for a Record that have an invalid bulkURI """ + pass + + +class UnknownChannelsException(Exception): + """ Raised when unknown channel """ + pass diff --git a/app/bulk_persistence/json_orient.py b/app/bulk_persistence/json_orient.py new file mode 100644 index 0000000000000000000000000000000000000000..acaa4cecdfcb77928c32d299e6c573a0d8c90fdc --- /dev/null +++ b/app/bulk_persistence/json_orient.py @@ -0,0 +1,29 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum +from typing import Union + + +class JSONOrient(Enum): + # not allow 'table' because very verbose then comes with significant overhead + split = "split" + index = "index" + columns = "columns" + records = "records" + values = "values" + + @classmethod + def get(cls, orient: Union[str, "JSONOrient"]) -> "JSONOrient": + return JSONOrient[orient] if isinstance(orient, str) else orient diff --git a/app/bulk_persistence/mime_types.py b/app/bulk_persistence/mime_types.py new file mode 100644 index 0000000000000000000000000000000000000000..0705d99bad15d0aa110192d33d31d763a65e7b47 --- /dev/null +++ b/app/bulk_persistence/mime_types.py @@ -0,0 +1,84 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Generator, List, NamedTuple + + +class MimeType(NamedTuple): + """ expected always lower case """ + + type: str + extension: str + alternative_types: List[str] = [] + + def match(self, str_value: str) -> bool: + normalized_value = str_value.lower() + return any( + ( + normalized_value == a_type + for a_type in [self.type] + self.alternative_types + ) + ) or normalized_value.replace(".", "") == self.extension.replace( + ".", "" + ) + + +class MimeTypes: + """ + define mime types used in the application + Note: May be use https://docs.python.org/3/library/mimetypes.html + mimetypes.add_type('application/x-parquet', '.parquet') + """ + + PARQUET = MimeType( + type="application/x-parquet", + extension=".parquet", + alternative_types=["application/parquet"], + ) # because https://tools.ietf.org/html/rfc6838#section-3.4 + + FEATHER = MimeType( + type="application/x-feather", + extension=".feather", + alternative_types=["application/feather"], + ) + + JSON = MimeType(type="application/json", extension=".json") + + MSGPACK = MimeType( + type="application/x-msgpack", + extension=".msgpack", + alternative_types=[ + "application/msgpack", + "application/messagepack", + "application/x-messagepack", + "application/vnd.messagepack", + "application/vnd.msgpack", + ], + ) + + @classmethod + def types(cls) -> Generator[MimeType, None, None]: + """ enumerate all type """ + for _, t in cls.__dict__.items(): + if isinstance(t, MimeType): + yield t + + @classmethod + def from_str(cls, value: str) -> MimeType: + for t in cls.types(): + if t.match(value): + return t + raise ValueError(f"{value} does not match any supported mime types") + + # todo add guess_type(path_like) method ? diff --git a/app/bulk_persistence/tenant_provider.py b/app/bulk_persistence/tenant_provider.py new file mode 100644 index 0000000000000000000000000000000000000000..81935d5fef3e69be254ead6c2fc1fb38977a075a --- /dev/null +++ b/app/bulk_persistence/tenant_provider.py @@ -0,0 +1,46 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.conf import Config +from osdu.core.api.storage.tenant import Tenant + +async def resolve_tenant(data_partition_id: str) -> Tenant: + # TODO this is a temporary hardcoded, to be reviewed as we are onboarding different cloud provider + if Config.cloud_provider.value == 'gcp': + return Tenant( + data_partition_id=data_partition_id, + project_id=Config.default_data_tenant_project_id.value, + credentials=Config.default_data_tenant_credentials.value, + bucket_name='logstore-osdu' + ) + + if Config.cloud_provider.value == 'az': + return Tenant( + data_partition_id=data_partition_id, + project_id='', + bucket_name='wdms-osdu' + ) + + if Config.cloud_provider.value == 'ibm': + return Tenant( + data_partition_id=data_partition_id, + project_id=Config.default_data_tenant_project_id.value, + bucket_name='logstore-osdu-ibm' + ) + + return Tenant( + data_partition_id=data_partition_id, + project_id='undefined', + bucket_name='logstore-osdu' + ) diff --git a/app/clients/__init__.py b/app/clients/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..130dcfefe044db39ca56852753c2b5fe4c93ea81 --- /dev/null +++ b/app/clients/__init__.py @@ -0,0 +1,80 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import odes_entitlements +import odes_search +import odes_storage +from odes_entitlements.api_client import AsyncEntitlementsAuthAdministrationApi +from odes_search.api_client import AsyncSearchApi +from odes_storage.api_client import AsyncRecordsApi +from app.conf import Config +from dataclasses import dataclass +from typing import Optional + + +__all__ = ['EntitlementsAuthServiceClient', + 'SearchServiceClient', + 'StorageRecordServiceClient', + 'make_entitlements_auth_client', + 'make_search_client', + 'make_storage_record_client'] + +from app.clients.clients_middleware import client_middleware + +EntitlementsAuthServiceClient = AsyncEntitlementsAuthAdministrationApi +SearchServiceClient = AsyncSearchApi +StorageRecordServiceClient = AsyncRecordsApi + + +@dataclass +class Limits: + max_connections: Optional[int] = None + max_keepalive_connections: Optional[int] = None + + +def make_entitlements_auth_client(host) -> EntitlementsAuthServiceClient: + entitlements_client = odes_entitlements.ApiClient( + host=host, + timeout=Config.de_client_config_timeout.value, + limits=Limits( + max_connections=Config.de_client_config_max_connection.value or None, + max_keepalive_connections=Config.de_client_config_max_keepalive.value or None) + ) + + entitlements_client.add_middleware(middleware=client_middleware) + return odes_entitlements.AsyncApis(entitlements_client).entitlements_auth_administration_api + + +def make_search_client(host) -> SearchServiceClient: + search_client = odes_search.ApiClient( + host=host, + timeout=Config.de_client_config_timeout.value, + limits=Limits( + max_connections=Config.de_client_config_max_connection.value or None, + max_keepalive_connections=Config.de_client_config_max_keepalive.value or None) + ) + search_client.add_middleware(middleware=client_middleware) + return odes_search.AsyncApis(search_client).search_api + + +def make_storage_record_client(host) -> StorageRecordServiceClient: + storage_client = odes_storage.ApiClient( + host=host, + timeout=Config.de_client_config_timeout.value, + limits=Limits( + max_connections=Config.de_client_config_max_connection.value or None, + max_keepalive_connections=Config.de_client_config_max_keepalive.value or None) + ) + storage_client.add_middleware(middleware=client_middleware) + return odes_storage.AsyncApis(storage_client).records_api diff --git a/app/clients/clients_middleware.py b/app/clients/clients_middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..379f202478feb60b8483bc1317cbabb10f250dba --- /dev/null +++ b/app/clients/clients_middleware.py @@ -0,0 +1,65 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from opencensus.trace.span import SpanKind + +from app import conf +from app.utils import Context +from app.helper import utils, traces + + +def _before_tracing_attributes(ctx, request): + """ + Add request attributes + correlation id to the the current tracer's span + """ + ctx.tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_HOST, + attribute_value=request.url.host) + ctx.tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_METHOD, + attribute_value=request.method) + ctx.tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_PATH, + attribute_value=str(request.url.path)) + ctx.tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_URL, + attribute_value=str(request.url)) + ctx.tracer.add_attribute_to_current_span( + attribute_key=conf.CORRELATION_ID_HEADER_NAME, + attribute_value=ctx.correlation_id) + + +async def client_middleware(request, call_next): + ctx = Context.current() + + with ctx.tracer.span(name=f'[client_middleware]{request.url}') as span: + span.span_kind = SpanKind.CLIENT + _before_tracing_attributes(ctx, request) + + # propagate current tracing context to outgoing request's headers + tracing_headers = traces.get_trace_propagator().to_headers(span.context_tracer.span_context) + + request.headers.update(tracing_headers) + ctx.logger.debug(f"client_middleware - url: {request.url} - tracing_headers: {tracing_headers}") + + request.headers[conf.AUTHORIZATION_HEADER_NAME] = f'Bearer {ctx.auth}' + if ctx.correlation_id: + request.headers[conf.CORRELATION_ID_HEADER_NAME] = ctx.correlation_id + if ctx.app_key: + request.headers[conf.APP_KEY_HEADER_NAME] = ctx.app_key + + result = await call_next(request) + span.add_attribute(utils.HTTP_STATUS_CODE, result.status_code) + + return result diff --git a/app/clients/entitlements_service_client.py b/app/clients/entitlements_service_client.py new file mode 100644 index 0000000000000000000000000000000000000000..da3d64c6718dae55963bf65986f6ccfc190bb62d --- /dev/null +++ b/app/clients/entitlements_service_client.py @@ -0,0 +1,20 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.clients import EntitlementsAuthServiceClient +from app.utils import Context + + +async def get_entitlements_auth_service(ctx: Context) -> EntitlementsAuthServiceClient: + return await ctx.app_injector.get(EntitlementsAuthServiceClient) diff --git a/app/clients/search_service_client.py b/app/clients/search_service_client.py new file mode 100644 index 0000000000000000000000000000000000000000..8554bb526a473d5d0a3fcf71d7204fdf6299c8bc --- /dev/null +++ b/app/clients/search_service_client.py @@ -0,0 +1,20 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.clients import SearchServiceClient +from app.utils import Context + + +async def get_search_service(ctx: Context) -> SearchServiceClient: + return await ctx.app_injector.get(SearchServiceClient) diff --git a/app/clients/storage_service_blob_storage.py b/app/clients/storage_service_blob_storage.py new file mode 100644 index 0000000000000000000000000000000000000000..b53049346a134f4646af82a5630945aea3e85c4e --- /dev/null +++ b/app/clients/storage_service_blob_storage.py @@ -0,0 +1,143 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from asyncio import iscoroutinefunction, gather +import uuid +from fastapi import FastAPI, HTTPException +from osdu.core.api.storage.tenant import Tenant +from starlette import status +from odes_storage.models import * +from osdu.core.api.storage.blob_storage_base import BlobStorageBase + +from app.model import model_utils + + +async def no_check_appkey_token(appkey, token): + # empty method + pass + + +class StorageRecordServiceBlobStorage: + """ + implementation of storage service using blob storage. Security check (appkey & token) responsibility is delegated. + This is not meant to be used in production but for various testing and debugging purposes. Use injectors to override + the osdu impl to use this one instead + """ + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + def __init__(self, + blob_storage: BlobStorageBase, + project: str, + container: str, + auth_check_coro=no_check_appkey_token): + """ + :param blob_storage: + :param project: project than will pass to the blob storage calls + :param container: project than will pass to the blob storage calls + :param auth_check_coro: + """ + assert blob_storage + assert iscoroutinefunction(auth_check_coro) + self._storage: BlobStorageBase = blob_storage + self._project: str = project + self._container: str = container + self._auth_check = auth_check_coro + + def _build_record_path(self, id: str, data_partition: str): + return f'{data_partition or "global"}_r_{id.replace(":", "_")}' + + async def _check_auth(self, appkey=None, token=None): + await self._auth_check(appkey, token) + + async def create_or_update_records(self, + record: List[Record] = None, + data_partition_id: str = None, + appkey: str = None, + token: str = None) -> CreateUpdateRecordsResponse: + """ record is a bit anonymous, but we do expect 'id' field """ + record_list = record + await self._check_auth(appkey, token) + # insert id if new record + for rec in record_list: + if rec.id is None: + rec.id = str(uuid.uuid4()) + + await gather(*[ + self._storage.upload( + Tenant(project_id=self._project, bucket_name=self._container, data_partition_id=data_partition_id), + self._build_record_path(record.id, data_partition_id), + model_utils.record_to_json(record), + content_type='application/json') + for record in record_list + ], return_exceptions=False) # return_exceptions False means will throw if a single error occurs + + # manual for now + return CreateUpdateRecordsResponse(recordCount=len(record_list), + recordIds=[record.id for record in record_list], + skipped_record_ids=[]) + + async def get_record(self, + id: str, + data_partition_id: str = None, + appkey: str = None, + token: str = None) -> Record: + await self._check_auth(appkey, token) + object_name = self._build_record_path(id, data_partition_id) + try: + bin_data = await self._storage.download( + Tenant(project_id=self._project, bucket_name=self._container, data_partition_id=data_partition_id), + object_name) + return Record.parse_raw(bin_data) + except FileNotFoundError: + raise HTTPException(status_code=404, detail="Item not found") + + async def get_all_record_versions(self, + id: str, + data_partition_id: str = None, + appkey: str = None, + token: str = None) -> RecordVersions: + # only one version /latest is supported + return RecordVersions(recordId=id, versions=[0]) + + async def get_record_version(self, + id: str, + version: int, + data_partition_id: str = None, + attribute: List[str] = None, + appkey: str = None, + token: str = None) -> Record: + # always return the latest + return await self.get_record(id, data_partition_id, appkey, token) + + async def delete_record(self, + id: str, + data_partition_id: str = None, + appkey: str = None, + token: str = None) -> None: + await self._check_auth(appkey, token) + object_name = self._build_record_path(id, data_partition_id) + try: + await self._storage.delete( + Tenant(project_id=self._project, bucket_name=self._container, data_partition_id=data_partition_id), + object_name) + except FileNotFoundError: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Item not found") + + async def get_schema(self, kind, data_partition_id=None, appkey=None, token=None, *args, **kwargs): + raise NotImplementedError('StorageServiceBlobStorage.get_schema') diff --git a/app/clients/storage_service_client.py b/app/clients/storage_service_client.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb4282860aff1598dd81811f90d9ca13220b628 --- /dev/null +++ b/app/clients/storage_service_client.py @@ -0,0 +1,20 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.clients import StorageRecordServiceClient +from app.utils import Context + + +async def get_storage_record_service(ctx: Context) -> StorageRecordServiceClient: + return await ctx.app_injector.get(StorageRecordServiceClient) diff --git a/app/conf.py b/app/conf.py new file mode 100644 index 0000000000000000000000000000000000000000..376819a692e6fbcfc4e50f5d65f20511ffc2069b --- /dev/null +++ b/app/conf.py @@ -0,0 +1,334 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass +from typing import Optional, Callable, Any, Dict, List +import logging +import os + +__all__ = ['Config', + 'ConfigurationContainer', + 'check_environment', + 'cloud_provider_additional_environment', + 'validator_path_must_exist'] + +logger = logging.getLogger('configuration') + + +@dataclass +class EnvVar: + key: str + description: str = '' + secret: bool = False + default: Optional[str] = None + value: Optional[Any] = None + allowed_values: Optional[List[Any]] = None # if value not in the given list, it's reassigned to None + is_mandatory: bool = False + factory: Optional[Callable[[str], Any]] = None # transform input value into the target + validator: Optional[Callable[[Any], int]] = None # value is always check if None + + def load(self, environment_dict): + value = environment_dict.get(self.key, self.default) + if self.factory is not None and value is not None: + value = self.factory(value) + if self.allowed_values is None or value in self.allowed_values: + self.value = value + + def __call__(self): + return self.value + + def __str__(self): + return f'{self.key} = {self.printable_value}' + + def __bool__(self): + if self.value is None: + return False + return True if self.validator is None else self.validator(self.value) + + @property + def printable_value(self) -> str: + if self.value is None: + return 'UNDEFINED' + if self.secret: + return '*****' + return str(self.value) + + +def validator_path_must_exist(path: str): + return os.path.exists(path) + + +@dataclass(repr=False, eq=False) +class ConfigurationContainer: + """ + Gather any static environment variables and other variable. It's possible to add other at runtime or override them. + Add method also add it as attribute of the current instance. + Environment variable are declared as type EnvVar, then to access the value must do: + config.env_var_attribute.value + Environment variable can be get also by the key of this environment variable. For instance if declared like that: + + path_env_var: EnvVar = EnvVar(key='PATH') + + then the value can be access: + + path_value = config.path_env_var.value + path_value = config['path_env_var'] + path_value = config['PATH'] + + use env_var.printable_value instead of env_var.value when the goal is to log/display it. + """ + + service_name: EnvVar = EnvVar( + key='SERVICE_NAME', + description='Display name of the service when exporting entries for logging and tracing', + default='os-wellbore-ddms---local' + ) + + cloud_provider: EnvVar = EnvVar( + key='CLOUD_PROVIDER', + description='Short name of the current cloud provider environment, must be "gcp" or "az" or "ibm', + default=None, + is_mandatory=True, + allowed_values=['gcp', 'az', 'local', 'ibm'], + factory=lambda x: x.lower() + ) + + service_host_entitlements: EnvVar = EnvVar( + key='SERVICE_HOST_ENTITLEMENTS', + description='Back-end for entitlements service', + is_mandatory=True) + + service_host_search: EnvVar = EnvVar( + key='SERVICE_HOST_SEARCH', + description='Back-end for search service', + is_mandatory=True) + + service_host_storage: EnvVar = EnvVar( + key='SERVICE_HOST_STORAGE', + description='Back-end for storage service', + is_mandatory=True) + + de_client_config_timeout: EnvVar = EnvVar( + key='DE_CLIENT_CFG_TIMEOUT', + description='set connect, read, write, and pool timeouts (in seconds) for all DE client.', + default='45', # gateway timeout is 30s, greater value ensure the async client won't be the bottleneck. + factory=lambda x: int(x)) + + de_client_config_max_connection: EnvVar = EnvVar( + key='DE_CLIENT_CFG_MAX_CONNECTION', + description='maximum number of allowable connections, 0 to always allow.', + default='200', + factory=lambda x: int(x)) + + de_client_config_max_keepalive: EnvVar = EnvVar( + key='DE_CLIENT_CFG_MAX_KEEPALIVE', + description='number of allowable keep-alive connections, 0 to always allow.', + default='200', + factory=lambda x: int(x)) + + build_details: EnvVar = EnvVar( + key='OS_WELLBORE_DDMS_BUILD_DETAILS', + description='contains optional extra information of the build, format is the multiple "key=value" separated' + 'by ;', + default='') + + dev_mode: EnvVar = EnvVar( + key='OS_WELLBORE_DDMS_DEV_MODE', + description='dev mode', + default='false', + factory=lambda x: x.lower() == 'true' or x == '1') + + openapi_prefix: EnvVar = EnvVar( + key='OPENAPI_PREFIX', + description='specify the base path for the openapi doc, in case deployed beind a proxy', + default='/api/os-wellbore-ddms') + + custom_catalog_timeout: EnvVar = EnvVar( + key='CUSTOM_CATALOG_TIMEOUT', + description='Timeout to invalidate custom catalog in seconds', + default='300', + factory=lambda x: int(x)) + + _environment_dict: Dict = os.environ + + _contextual_loader: Callable = None + + def add(self, name: str, value: Any, *, override: bool = False): + """ add a custom """ + if not override and name in self.__dict__: + raise KeyError(name + ' already exists') + self.__setattr__(name, value) + + def add_from_env(self, + env_var_key: str, + attribute_name: Optional[str] = None, + is_mandatory: bool = False, + description: str = '', + secret: bool = False, + default: Optional[str] = None, + allowed_values: Optional[List[Any]] = None, + factory: Optional[Callable[[str], Any]] = None, + validator: Optional[Callable[[Any], int]] = None, + *, override: bool = False) -> Optional: + env_var = EnvVar(key=env_var_key, + description=description, + secret=secret, + default=default, + factory=factory, + allowed_values=allowed_values, + is_mandatory=is_mandatory, + validator=validator) + env_var.load(self._environment_dict) + self.add(attribute_name or env_var_key, env_var, override=override) + return env_var.value + + @classmethod + def with_load_all(cls, environment_dict=os.environ, contextual_loader=None): + inst = cls(_environment_dict=environment_dict, + _contextual_loader=contextual_loader) + inst.reload() + return inst + + def reload(self, environment_dict=None): + if environment_dict is not None: + self._environment_dict = environment_dict + + # loop for EnvVar and load them all + for var in self.env_vars(): + var.load(self._environment_dict) + + if self._contextual_loader is not None: + self._contextual_loader(self) + + def __getitem__(self, name): + """ look for any declared attribute and env var key """ + attribute = self.get_env_or_attribute(name) + if attribute is None: # fallback into environment dict + return self._environment_dict[name] + + return attribute.value if isinstance(attribute, EnvVar) else attribute + + def get(self, name, default=None): + try: + return self[name] + except KeyError: + return default + + def get_env_or_attribute(self, name) -> Optional[EnvVar]: + if name in self.__dict__: + return self.__getattribute__(name) + return next((v for v in self.env_vars() if v.key == name), None) + + def __contains__(self, name) -> bool: + if name in self.__dict__: + return True + return any([v.key == name for v in self.env_vars()]) + + def __repr__(self): + return ', '.join([f'{k}={v}' for k, v in self.as_printable_dict().items()]) + + def as_printable_dict(self) -> Dict[str, str]: + return { + name: + att.printable_value if isinstance(att, EnvVar) + else att for name, att in self.__dict__.items() + if not name.startswith('_')} + + def env_vars(self): + """ generator of all env vars only """ + for name, attribute in self.__dict__.items(): + if isinstance(attribute, EnvVar): + yield attribute + + +def cloud_provider_additional_environment(config: ConfigurationContainer): + provider = config.cloud_provider.value + if provider == 'az': + config.add_from_env(attribute_name='az_ai_instrumentation_key', + env_var_key='AZ_AI_INSTRUMENTATION_KEY', + description='azure app insights instrumentation key', + secret=True, + is_mandatory=True, + override=True) + + config.add_from_env(attribute_name='az_logger_level', + env_var_key='AZ_LOGGER_LEVEL', + description='azure logger level', + default='INFO', + secret=False, + is_mandatory=False, + override=True) + + if provider == 'gcp': + config.add_from_env(attribute_name='default_data_tenant_project_id', + env_var_key='OS_WELLBORE_DDMS_DATA_PROJECT_ID', + description='GCP data tenant ID', + default='logstore-dev', + is_mandatory=True, + override=True) + + config.add_from_env(attribute_name='default_data_tenant_credentials', + env_var_key='OS_WELLBORE_DDMS_DATA_PROJECT_CREDENTIALS', + description='path to the key file of the SA to access the data tenant', + is_mandatory=True, + override=True, + validator=validator_path_must_exist) + + if provider == 'ibm': + config.add_from_env(attribute_name='default_data_tenant_project_id', + env_var_key='OS_WELLBORE_DDMS_DATA_PROJECT_ID', + description='IBM data tenant ID', + default='logstore-ibm', + is_mandatory=True, + override=True) + +# Global config instance +Config = ConfigurationContainer.with_load_all(contextual_loader=cloud_provider_additional_environment) + + +def check_environment(configuration): + """ + The goal is to fail fast and provide meaningfully report in case of error to ease any fix/debug + We may generalize and isolate this in each module (some implementation may need specific setup, + e.g. some Azure impl may require an dedicated env var to some valid file). + For now keep every rules here and review it later. + + By default, in dev_mode log only. In not dev mode + """ + logger.info('Environment configuration:') + for k, v in configuration.as_printable_dict().items(): + logger.info(f' - {k} = {v}') + + mandatory_variables = [v for v in configuration.env_vars() + if v.is_mandatory and not v] + errors = [f'env var {v.key} ({v.description}) is undefined or invalid, current value={os.environ.get(v.key)}' + for v in mandatory_variables] + + logger_level = logger.warning if configuration.dev_mode.value else logger.error + for err in errors: + logger_level(err) + + # handle errors, in no dev mode exit immediately + if any(errors): + if configuration.dev_mode.value: + logger.error('!!! The current environment is not correctly setup to run the service, see logs !!!') + else: # just abort + raise RuntimeError('Incorrect environment: ' + ', '.join(errors)) + + +AUTHORIZATION_HEADER_NAME = 'Authorization' +APP_KEY_HEADER_NAME = 'appKey' +CORRELATION_ID_HEADER_NAME = 'correlation-id' +REQUEST_ID_HEADER_NAME = 'Request-ID' + diff --git a/app/errors/__init__.py b/app/errors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/errors/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/errors/client_error.py b/app/errors/client_error.py new file mode 100644 index 0000000000000000000000000000000000000000..88420782f9c56726aeb5f285a11ceaef92d168ec --- /dev/null +++ b/app/errors/client_error.py @@ -0,0 +1,119 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from odes_entitlements.exceptions import ( + ApiException as OSDUEntitlementsException, + UnexpectedResponse as OSDUEntitlementsUnexpectedResponse, + ResponseValidationError as OSDUEntitlementsResponseValidationError, + ResponseHandlingException as OSDUEntitlementsResponseHandlingException +) +from odes_search.exceptions import ( + ApiException as OSDUSearchException, + UnexpectedResponse as OSDUSearchUnexpectedResponse, + ResponseValidationError as OSDUSearchResponseValidationError, + ResponseHandlingException as OSDUSearchResponseHandlingException +) +from odes_storage.exceptions import ( + ApiException as OSDUStorageException, + UnexpectedResponse as OSDUStorageUnexpectedResponse, + ResponseValidationError as OSDUStorageResponseValidationError, + ResponseHandlingException as OSDUStorageResponseHandlingException +) + +from osdu_az.exceptions.data_access_error import (DataAccessError as OSDUPartitionException) + +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR +from app.utils import get_ctx + +OSDU_DATA_ECOSYSTEM_SEARCH = "osdu-data-ecosystem-search" +OSDU_DATA_ECOSYSTEM_STORAGE = "osdu-data-ecosystem-storage" +OSDU_DATA_ECOSYSTEM_ENTITLEMENTS = "osdu-data-ecosystem-entitlements" +OSDU_DATA_ECOSYSTEM_PARTITION = "osdu-data-ecosystem-partition" + +CONTENT_ENCODING = "utf-16" + + +async def http_search_error_handler(request: Request, exc: OSDUSearchException) -> JSONResponse: + """ + Catches and handles Exceptions raised by os-python-client + """ + get_ctx().logger.exception(f"http_search_error_handler - url: '{request.url}'") + if isinstance(exc, OSDUSearchUnexpectedResponse): + status = exc.status_code + errors = [exc.reason_phrase] + elif isinstance(exc, OSDUSearchResponseValidationError): + status = exc.status_code + errors = exc.args + elif isinstance(exc, OSDUSearchResponseHandlingException): + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.source.args + else: + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.args + + return JSONResponse({"origin": OSDU_DATA_ECOSYSTEM_SEARCH, "errors": errors}, status_code=status) + + +async def http_storage_error_handler(request: Request, exc: OSDUStorageException) -> JSONResponse: + """ + Catches and handles Exceptions raised by os-python-client + """ + get_ctx().logger.exception(f"http_storage_error_handler - url: '{request.url}'") + if isinstance(exc, OSDUStorageUnexpectedResponse): + status = exc.status_code + errors = [exc.reason_phrase] + elif isinstance(exc, OSDUStorageResponseValidationError): + status = exc.status_code + errors = [exc.content] + elif isinstance(exc, OSDUStorageResponseHandlingException): + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.source.args + else: + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.args + + return JSONResponse({"origin": OSDU_DATA_ECOSYSTEM_STORAGE, "errors": errors}, status_code=status) + + +async def http_entitlements_error_handler(request: Request, exc: OSDUEntitlementsException) -> JSONResponse: + """ + Catches and handles Exceptions raised by os-python-client + """ + get_ctx().logger.exception(f"http_entitlements_error_handler - url: '{request.url}'") + if isinstance(exc, OSDUEntitlementsUnexpectedResponse): + status = exc.status_code + errors = [exc.reason_phrase] + elif isinstance(exc, OSDUEntitlementsResponseValidationError): + status = exc.status_code + errors = [exc.content] + elif isinstance(exc, OSDUEntitlementsResponseHandlingException): + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.source.args + else: + status = HTTP_500_INTERNAL_SERVER_ERROR + errors = exc.args + + return JSONResponse({"origin": OSDU_DATA_ECOSYSTEM_ENTITLEMENTS, "errors": errors}, status_code=status) + + +async def http_partition_error_handler(request: Request, exc: OSDUPartitionException) -> JSONResponse: + """ + Catches and handles Exceptions raised by os-python-client + """ + get_ctx().logger.exception(f"http_partition_error_handler - url: '{request.url}'") + + return JSONResponse({"origin": OSDU_DATA_ECOSYSTEM_PARTITION, "errors": [exc.message]}, + status_code=exc.status_code) diff --git a/app/errors/exception_handlers.py b/app/errors/exception_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..556bd3d7020be85732477c2cf1135664d2ed58c7 --- /dev/null +++ b/app/errors/exception_handlers.py @@ -0,0 +1,40 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from pydantic import ValidationError +from odes_entitlements.exceptions import ApiException as OSDUEntitlementsException +from odes_search.exceptions import ApiException as OSDUSearchException +from odes_storage.exceptions import ApiException as OSDUStorageException +from osdu_az.exceptions.data_access_error import DataAccessError as OSDUPartitionException + +from .unhandled_error import unhandled_error_handler +from .validation_error import http422_error_handler +from .client_error import ( + http_search_error_handler, + http_storage_error_handler, + http_entitlements_error_handler, + http_partition_error_handler +) + + +__all__ = ['add_exception_handlers'] + + +def add_exception_handlers(app): + app.add_exception_handler(ValidationError, http422_error_handler) + app.add_exception_handler(OSDUSearchException, http_search_error_handler) + app.add_exception_handler(OSDUStorageException, http_storage_error_handler) + app.add_exception_handler(OSDUEntitlementsException, http_entitlements_error_handler) + app.add_exception_handler(OSDUPartitionException, http_partition_error_handler) + app.add_exception_handler(Exception, unhandled_error_handler) diff --git a/app/errors/unhandled_error.py b/app/errors/unhandled_error.py new file mode 100644 index 0000000000000000000000000000000000000000..b66de6a11a7e9c0fc06f97606267727cef931dd7 --- /dev/null +++ b/app/errors/unhandled_error.py @@ -0,0 +1,27 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR +from app.helper.logger import get_logger + + +async def unhandled_error_handler(request: Request, exc: Exception) -> JSONResponse: + """ + To handle wild exception not caught by other exception handlers + """ + get_logger().exception(f"unhandled_error_handler - {request.url}") + + return JSONResponse({"error": [str(exc)]}, status_code=HTTP_500_INTERNAL_SERVER_ERROR) diff --git a/app/errors/validation_error.py b/app/errors/validation_error.py new file mode 100644 index 0000000000000000000000000000000000000000..93a0476d33dbde933e2b654688a0c82f31f50767 --- /dev/null +++ b/app/errors/validation_error.py @@ -0,0 +1,45 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Union + +from fastapi.exceptions import RequestValidationError +from fastapi.openapi.constants import REF_PREFIX +from fastapi.openapi.utils import validation_error_response_definition +from pydantic import ValidationError +from starlette.requests import Request +from starlette.responses import JSONResponse +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + +from app.helper.logger import get_logger + + +async def http422_error_handler( + request: Request, exc: Union[RequestValidationError, ValidationError], +) -> JSONResponse: + """ + Catches and handles pydantic validation errors + """ + + get_logger().exception(f"http422_error_handler - {request.url}") + return JSONResponse({"errors": exc.errors()}, status_code=HTTP_422_UNPROCESSABLE_ENTITY) + + +validation_error_response_definition["properties"] = { + "errors": { + "title": "Errors", + "type": "array", + "items": {"$ref": "{0}ValidationError".format(REF_PREFIX)}, + }, +} diff --git a/app/helper/__init__.py b/app/helper/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/helper/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/helper/logger.py b/app/helper/logger.py new file mode 100644 index 0000000000000000000000000000000000000000..9392ff125d39143f9f68de725f2560eac2f820df --- /dev/null +++ b/app/helper/logger.py @@ -0,0 +1,168 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging +import traceback +import sys +import rapidjson + +import structlog +from structlog.contextvars import merge_contextvars +from opencensus.ext.azure.log_exporter import AzureLogHandler +from opencensus.trace import config_integration + +from app.conf import Config +from app.utils import get_or_create_ctx +from app.helper.utils import rename_cloud_role_func + +_LOGGER = None + + +def get_logger(): + return _LOGGER + + +class StackDriverRenderer(object): + def __init__(self, service_name=None): + self.service_name = service_name + + def __call__(self, _, method, event_dict): + if self.service_name: + event_dict['serviceContext'] = {'service': self.service_name} + + # rename event to msg + if 'event' in event_dict: + event_dict['message'] = event_dict['event'] + del event_dict['event'] + + # Required by stackdriver to display level of error accordingly + event_dict.setdefault("severity", method) + + if method == 'error' or method == 'critical': + # Enable display of this error in 'Error reporting' in GCP + event_dict['@type'] = 'type.googleapis.com/google.devtools.clouderrorreporting.v1beta1.ReportedErrorEvent' + if sys.exc_info()[0]: + # check if an exception exist https://docs.python.org/2/library/sys.html#sys.exc_info + event_dict['stack_trace'] = traceback.format_exc() + + return event_dict + + +class AzureContextLoggerAdapter(logging.LoggerAdapter): + """ + This adapter adds contextual information into messages to be logged in Azure monitoring. + It aims to add as custom properties contextual fields, following this instructions: + https://docs.microsoft.com/en-us/azure/azure-monitor/app/opencensus-python + """ + + @staticmethod + def _set_extra_attrs(properties): + ctx = get_or_create_ctx() + + properties.setdefault('correlation-id', ctx.correlation_id) + properties.setdefault('request-id', ctx.request_id) + properties.setdefault('data-partition-id', ctx.partition_id) + properties.setdefault('app-key', ctx.app_key) + properties.setdefault('api-key', ctx.api_key) + + def process(self, msg, kwargs): + """ + Retrieve context created in basic middleware from request info to append them + in log message as custom attributes + """ + custom_properties = dict() + self._set_extra_attrs(custom_properties) + kwargs['extra'] = dict(custom_dimensions=custom_properties) + + return msg, kwargs + + +def init_logger(service_name): + global _LOGGER + + if Config.cloud_provider.value == 'az': + _LOGGER = create_azure_logger(service_name) + elif Config.cloud_provider.value == 'gcp': + _LOGGER = create_gcp_logger(service_name) + else: + logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.DEBUG) + _LOGGER = logging.getLogger(__name__) + + return _LOGGER + + +def create_azure_logger(service_name): + """ + Create logger with two handlers: + - AzureLogHandler: to see Dependencies, Requests, Traces and Exception into Azure monitoring + - [default] StreamHandler (c.f. logging.basicConfig() ) to see all logs into the std.out captured in container logs + + returns logger configured wrapped into ContextLoggerAdapter + """ + config_integration.trace_integrations(['logging']) + logger = logging.getLogger(__name__) + logger.setLevel(logging.DEBUG) + + ch = logging.StreamHandler(sys.stdout) + logger.addHandler(ch) + + key = Config.get('az_ai_instrumentation_key') + logger_level = Config.get('az_logger_level') + handler = AzureLogHandler(connection_string=f'InstrumentationKey={key}') + handler.setLevel(logging.getLevelName(logger_level)) + handler.add_telemetry_processor(rename_cloud_role_func(service_name)) + logger.addHandler(handler) + + return AzureContextLoggerAdapter(logger, extra=dict()) + + +def create_gcp_logger(service_name): + """ + Initialize structlog with following configuration: + - Make logs compatible with Stackdriver + - if dev_mode, display stacktrace out of json item + Returns structlog + """ + + structlog.configure( + processors=[ + StackDriverRenderer(service_name=service_name), + merge_contextvars, + structlog.stdlib.filter_by_level, + structlog.stdlib.add_logger_name, + structlog.stdlib.PositionalArgumentsFormatter(), + structlog.processors.TimeStamper(fmt="iso"), + structlog.processors.StackInfoRenderer(), + structlog.processors.format_exc_info, + structlog.processors.UnicodeDecoder(), + structlog.processors.JSONRenderer(serializer=rapidjson.dumps) + ], + context_class=dict, + logger_factory=structlog.stdlib.LoggerFactory(), + wrapper_class=structlog.stdlib.BoundLogger, + cache_logger_on_first_use=True, + ) + + my_logger = structlog.getLogger(__name__) + + ch = logging.StreamHandler(sys.stdout) + ch.setLevel(logging.DEBUG) + ch.setFormatter(logging.Formatter('%(message)s')) + my_logger.addHandler(ch) + + std_ddms_app = logging.getLogger(__name__) + # avoid double logging by the root logger + std_ddms_app.propagate = False + + return my_logger diff --git a/app/helper/traces.py b/app/helper/traces.py new file mode 100644 index 0000000000000000000000000000000000000000..a64fb5aab543e093532335cfda28a5e2a1bedd03 --- /dev/null +++ b/app/helper/traces.py @@ -0,0 +1,103 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.conf import Config +from app.helper.utils import rename_cloud_role_func, COMPONENT + +from opencensus.common.transports.async_ import AsyncTransport +from opencensus.trace import base_exporter +from opencensus.ext.stackdriver.trace_exporter import StackdriverExporter +from opencensus.ext.azure.trace_exporter import AzureExporter +from opencensus.trace.propagation.trace_context_http_header_format import TraceContextPropagator + +""" +How to add specific span in a method + +>> from app.utils import Context, get_ctx +>> +>> @router.get("/about", response_model=MyResponseClass) +>> async def my_endpoint_method(ctx: Context = Depends(get_ctx)) -> MyResponseClass: +>> +>> with ctx.tracer.span(name='test-sub-about.construct'): +>> result = someComputation() +>> with ctx.tracer.span(name='test-sub-about.construct'): +>> return MyResponseClass(result) + +""" + + +def get_trace_propagator() -> TraceContextPropagator: + """ + Returns the implementation of standard tracing propagation as defined + by W3C: https://www.w3.org/TR/trace-context/ + """ + return TraceContextPropagator() + + +def _create_azure_exporter(key: str): + return AzureExporter(connection_string=f'InstrumentationKey={key}') + + +def _create_gcp_exporter(): + return StackdriverExporter(transport=AsyncTransport) + + +def create_exporter(service_name): + """ + Create exporters to sent tracing to different tracing platforms e.g. Stackdriver (Google) or Azure + c.f. documentation https://opencensus.io/exporters/supported-exporters/python/ + """ + combined_exporter = CombinedExporter(service_name=service_name) + + if Config.cloud_provider.value == 'gcp': + print("Registering OpenCensus trace Stackdriver") + + stackdriver_exporter = _create_gcp_exporter() + combined_exporter.add_exporter(stackdriver_exporter) + elif Config.cloud_provider.value == 'az': + print("Registering OpenCensus trace AzureExporter") + + key = Config.get('az_ai_instrumentation_key') + try: + az_exporter = _create_azure_exporter(key) + az_exporter.add_telemetry_processor(rename_cloud_role_func(service_name)) + combined_exporter.add_exporter(az_exporter) + except ValueError as e: + print('Unable to create AzureExporter:', str(e)) + else: + print("No trace will be exported") + + return combined_exporter + + +class CombinedExporter(base_exporter.Exporter): + """ + The Opencensus lib allow to have only 1 exporter, so this class is used to combine multiple exporters + """ + def __init__(self, exporters=None, service_name="undefined"): + if exporters is None: + exporters = [] + self.exporters = exporters + self.service_name = service_name + + def add_exporter(self, exporter): + self.exporters.append(exporter) + + def export(self, span_datas): + # Add shared attributes to all spans + for span_data in span_datas: + span_data.attributes[COMPONENT] = self.service_name + + for e in self.exporters: + e.export(span_datas) diff --git a/app/helper/utils.py b/app/helper/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1885173d31b52ad7e99aad62b672405327b3566d --- /dev/null +++ b/app/helper/utils.py @@ -0,0 +1,79 @@ +from structlog.contextvars import bind_contextvars +from opencensus.trace.attributes_helper import COMMON_ATTRIBUTES + +from starlette.requests import Request +import http + + +def rename_cloud_role_func(service_name): + """ + Return a processor function to change 'Cloud Role Name' in AppInsight with given service_name variable. + It's used by AzureLogHandler and AzureExporter. + https://docs.microsoft.com/en-us/azure/azure-monitor/app/api-filtering-sampling#opencensus-python-telemetry-processors + """ + def callback_func(envelope): + envelope.tags['ai.cloud.role'] = service_name + return True + + return callback_func + + +def add_fields(**kwargs): + """ + Add key-value pairs to our homemade logger + e.g. + >>> bind_contextvars(a=1, b=2) + >>> # Then use loggers as per normal + >>> log.msg("hello") + a=1 b=2 event='hello' + Full documentation: https://www.structlog.org/en/stable/contextvars.html + """ + bind_contextvars(**kwargs) + + +def _get_status_phrase(status_code): + try: + return http.HTTPStatus(status_code).phrase + except ValueError: + return str() + + +STATUS_PHRASES = { + status_code: _get_status_phrase(status_code) for status_code in range(100, 600) +} + + +def process_message(request: Request, status_code: int): + """ + Returns pretty print string to be logger, from Starlette request and status code. + E.g. Request from: 127.0.0.1:55353 - "GET /api/os-wellbore-ddms/ddms/v2/about" 200 OK + """ + reason = STATUS_PHRASES[status_code] + return f'Request from: {_get_client_str(request.client)} - "{request.method}' \ + f' {request.url.path}" {status_code} {reason}' + + +def _get_client_str(client) -> str: + """ + Returns a string container host:port from given starlette client + """ + host, port = client.host, client.port + if not host: + return "" + return f'{host}:{port}' + + +""" +Attributes helper have been used similarly to some examples: +Ex of other middleware : https://github.com/census-instrumentation/opencensus-python/blob/master/contrib/opencensus-ext-django/opencensus/ext/django/middleware.py +https://github.com/census-instrumentation/opencensus-python/blob/master/opencensus/trace/attributes_helper.py +""" +HTTP_HOST = COMMON_ATTRIBUTES['HTTP_HOST'] +HTTP_METHOD = COMMON_ATTRIBUTES['HTTP_METHOD'] +HTTP_PATH = COMMON_ATTRIBUTES['HTTP_PATH'] +HTTP_ROUTE = COMMON_ATTRIBUTES['HTTP_ROUTE'] +HTTP_URL = COMMON_ATTRIBUTES['HTTP_URL'] +HTTP_STATUS_CODE = COMMON_ATTRIBUTES['HTTP_STATUS_CODE'] +HTTP_REQUEST_SIZE = COMMON_ATTRIBUTES['HTTP_REQUEST_SIZE'] +HTTP_RESPONSE_SIZE = COMMON_ATTRIBUTES['HTTP_RESPONSE_SIZE'] +COMPONENT = COMMON_ATTRIBUTES['COMPONENT'] diff --git a/app/injector/__init__.py b/app/injector/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/injector/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/injector/app_injector.py b/app/injector/app_injector.py new file mode 100644 index 0000000000000000000000000000000000000000..93beb089d6e4d62628d87f3f7448ddfd64acd913 --- /dev/null +++ b/app/injector/app_injector.py @@ -0,0 +1,97 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from abc import ABC, abstractmethod +from typing import Type, Any, Callable +import asyncio + + +class WithLifeTime: + """ + This class gathers lifetime definitions. The goal is to encapsulate lifetime management. Each definition implements + `make` method that takes as input a callable that constructs the object and returns a callable + that provides an object with its lifetime managed somehow. + """ + class Base(ABC): + """ Base lifetime class """ + @abstractmethod + def make(self, builder_fn) -> Callable: + """ + :param builder_fn: input factory function/callable that builds an object. + :return: returns a callable that returns an object + """ + raise NotImplemented() + + class Transient(Base): + """ Transient lifetime: a new object is created each time """ + def make(self, builder_fn) -> Callable: + return builder_fn + + class Singleton(Base): + """ + Singleton lifetime: only one instance is constructed, build at the first call then always provides this instance + """ + def __init__(self): + self._builder_fn = None + self._instance = None + + def make(self, builder_fn) -> Callable: + self._builder_fn = builder_fn + return self # return self, meaning the + + async def __call__(self, *args, **kwargs) -> Any: + if self._instance is None: + self._instance = await self._builder_fn(*args, **kwargs) + return self._instance + + +class AppInjector(ABC): + """ + A basic class to handle dependency injection. Module is responsible of managing the lifetime (e.g. new instance or + single instance or some ttl ...) + """ + + def __init__(self): + self._factory_dict = {} + + def register(self, interface: Type, factory_coroutine, lifetime: WithLifeTime.Base = WithLifeTime.Transient()): + """ + :param interface: with interface to register + :param factory_coroutine: async builder callable + :param lifetime: specific lifetime. By default it use transient lifetime which mean the build function is called + everytime. Use WithLifeTime.Singleton to use a single instance instead + :return: + """ + assert asyncio.iscoroutinefunction(factory_coroutine), 'only coroutine is expected' + self._factory_dict[self._key_from_type(interface)] = lifetime.make(factory_coroutine) + + async def get(self, interface: Type, *args, **kwargs) -> Any: + """ + :param interface: interface require + :param kwargs: parameters are passed as it to the factory func + :return: + """ + factory_coroutine = self._factory_dict[self._key_from_type(interface)] + return await factory_coroutine(*args, **kwargs) + + @staticmethod + def _key_from_type(t: Type) -> str: + return str(t) + + +class AppInjectorModule(ABC): + + @abstractmethod + def configure(self, injector: AppInjector): + raise NotImplementedError('AppInjectorModule.configure is abstract') diff --git a/app/injector/az_injector.py b/app/injector/az_injector.py new file mode 100644 index 0000000000000000000000000000000000000000..2628e62fed3b96b877a27943f9545ad2d869cd32 --- /dev/null +++ b/app/injector/az_injector.py @@ -0,0 +1,26 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from osdu_az.storage.blob_storage_az import AzureAioBlobStorage +from .app_injector import AppInjector, AppInjectorModule + + +class AzureInjector(AppInjectorModule): + def configure(self, app_injector: AppInjector): + app_injector.register(BlobStorageBase, AzureInjector.build_az_blob_storage) + + @staticmethod + async def build_az_blob_storage() -> BlobStorageBase: + return AzureAioBlobStorage() diff --git a/app/injector/gcp_injector.py b/app/injector/gcp_injector.py new file mode 100644 index 0000000000000000000000000000000000000000..da7d3eae2e19940abd2ca0aa2d5b37d77452198b --- /dev/null +++ b/app/injector/gcp_injector.py @@ -0,0 +1,35 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from app.utils import get_http_client_session +from osdu_gcp.storage.blob_storage_gcp import GCloudAioStorage +from .app_injector import AppInjector, AppInjectorModule +from app.utils import Context +from app.bulk_persistence import resolve_tenant + + +class GCPInjector(AppInjectorModule): + def configure(self, app_injector: AppInjector): + app_injector.register(BlobStorageBase, GCPInjector.build_gcp_blob_storage) + + @staticmethod + async def build_gcp_blob_storage(*args, **kwargs) -> BlobStorageBase: + ctx: Context = Context.current() + # TODO to be reviewed + tenant = await resolve_tenant(ctx.partition_id) + return GCloudAioStorage( + session=get_http_client_session(), + service_account_file=tenant.credentials + ) diff --git a/app/injector/ibm_injector.py b/app/injector/ibm_injector.py new file mode 100644 index 0000000000000000000000000000000000000000..c1ec34f22b5b2e926035b013afa8e41fd6f95198 --- /dev/null +++ b/app/injector/ibm_injector.py @@ -0,0 +1,21 @@ +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from app.utils import get_http_client_session +from osdu_ibm.storage.blob_storage_ibm import IBMObjectStorage +from .app_injector import AppInjector, AppInjectorModule +from app.utils import Context +from app.bulk_persistence import resolve_tenant + + +class IBMInjector(AppInjectorModule): + def configure(self, app_injector: AppInjector): + app_injector.register(BlobStorageBase, IBMInjector.build_ibm_blob_storage) + + @staticmethod + async def build_ibm_blob_storage(*args, **kwargs) -> BlobStorageBase: + ctx: Context = Context.current() + # TODO to be reviewed + tenant = await resolve_tenant(ctx.partition_id) + return IBMObjectStorage( + session=get_http_client_session(), + service_account_file=tenant.credentials + ) diff --git a/app/injector/main_injector.py b/app/injector/main_injector.py new file mode 100644 index 0000000000000000000000000000000000000000..beaad55ca3d6ae46dec51fa1bfb56a763bae9194 --- /dev/null +++ b/app/injector/main_injector.py @@ -0,0 +1,114 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from app.conf import * +from .app_injector import AppInjector, AppInjectorModule, WithLifeTime + +from app.injector.az_injector import AzureInjector +from app.injector.gcp_injector import GCPInjector +from app.clients import StorageRecordServiceClient +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.clients.search_service_client import SearchServiceClient +from app.clients import make_search_client, make_storage_record_client +from osdu.core.api.storage.blob_storage_local_fs import LocalFSBlobStorage +from app.helper.logger import get_logger +from app.injector.ibm_injector import IBMInjector + + +class MainInjector(AppInjectorModule): + """ + Gather sub injectors, dependency common to any type of deployment and then overriders. + """ + + def configure(self, app_injector: AppInjector): + logger = get_logger() + + app_injector.register( + StorageRecordServiceClient, + self.build_storage_service_client, + WithLifeTime.Singleton() + ) + + app_injector.register( + SearchServiceClient, + self.build_search_service_client, + WithLifeTime.Singleton() + ) + + # TODO use constants + # switch gcp/azure + if Config.cloud_provider.value == 'az': + logger.info('using az injector') + AzureInjector().configure(app_injector) + + if Config.cloud_provider.value == 'gcp': + logger.info('using gcp injector') + GCPInjector().configure(app_injector) + + if Config.cloud_provider.value == 'ibm': + logger.info('using ibm injector') + IBMInjector().configure(app_injector) + + # run overriders + self.overriders(app_injector) + + def overriders(self, app_injector: AppInjector): + """ defined here any overrider """ + logger = get_logger() + + if Config.dev_mode.value: + storage_path: str = Config.get('USE_INTERNAL_STORAGE_SERVICE_WITH_PATH') + if storage_path: + logger.warning(f'overriding storage service using localfs blob storage {storage_path}') + app_injector.register(StorageRecordServiceClient, + self.make_storage_service_on_localfs_blob_storage_builder(storage_path)) + + blob_storage_localfs: str = Config.get('USE_LOCALFS_BLOB_STORAGE_WITH_PATH') + if blob_storage_localfs: + async def _blob_storage_builder(): + return LocalFSBlobStorage(directory=blob_storage_localfs) + + logger.warning(f'overriding blob storage to use local fs on path ' + blob_storage_localfs) + app_injector.register(BlobStorageBase, _blob_storage_builder) + + @staticmethod + def make_storage_service_on_localfs_blob_storage_builder(path: str): + """ + create a builder to instantiate a storage service based on local + :param path: local path to the folder where to store blobs/files + :return async builder + """ + import os + assert os.path.exists(path), path + ' not found' + return MainInjector.make_storage_service_on_blob_storage_builder( + LocalFSBlobStorage(directory=path), project='p', container='c' + ) + + @staticmethod + def make_storage_service_on_blob_storage_builder(blob_storage, project: str, container: str): + """ instantiate a storage service based on the given blob storage """ + + async def _build_it(*args, **kwargs): + return StorageRecordServiceBlobStorage(blob_storage=blob_storage, project=project, container=container) + + return _build_it + + @staticmethod + async def build_storage_service_client(host=None, *args, **kwargs) -> StorageRecordServiceClient: + return make_storage_record_client(host or Config.service_host_storage.value) + + @staticmethod + async def build_search_service_client(host=None, *args, **kwargs) -> SearchServiceClient: + return make_search_client(host or Config.service_host_search.value) diff --git a/app/middleware/__init__.py b/app/middleware/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..603bcb132e1fba41a5bb6576d422cf4f9242bed4 --- /dev/null +++ b/app/middleware/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.middleware.basic_context_middleware import * +from app.middleware.traces_middleware import * + diff --git a/app/middleware/basic_context_middleware.py b/app/middleware/basic_context_middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..2accb64d9e7b418f53c609dd64489a23a2ccd48b --- /dev/null +++ b/app/middleware/basic_context_middleware.py @@ -0,0 +1,87 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uuid + +from fastapi import Depends, Header +from fastapi.security.api_key import APIKeyHeader +from starlette.middleware.base import BaseHTTPMiddleware +from structlog.contextvars import clear_contextvars as clear_logger_contextvars + +import app.helper.utils as logger_utils +from app import conf +from app.injector.app_injector import AppInjector +from app.model.user import User +from app.utils import Context, get_or_create_ctx +from app.helper.logger import get_logger + + +class CreateBasicContextMiddleware(BaseHTTPMiddleware): + def __init__(self, injector: AppInjector, **kwargs): + super().__init__(**kwargs) + self._app_injector = injector + + @staticmethod + def _add_csp_header(request, response): + """ + Returns the response with the additional CSP headers added to allow for swagger js and css files from the given domains. + """ + if "/docs" in request.url.path: + response.headers["Content-Security-Policy"] = "default-src 'self'; script-src 'self' *.jsdelivr.net 'unsafe-inline'; style-src 'self' *.jsdelivr.net; img-src 'self' *.tiangolo.com data:;" + + async def dispatch(self, request, call_next): + api_key = request.headers.get('x-api-key', None) + app_key = request.headers.get(conf.APP_KEY_HEADER_NAME, None) + partition_id = request.headers.get('data-partition-id', None) + correlation_id = request.headers.get(conf.CORRELATION_ID_HEADER_NAME, str(uuid.uuid4())) + request_id = request.headers.get(conf.REQUEST_ID_HEADER_NAME, str(uuid.uuid4())) + anonymous_user = User(email='anonymous', authenticated=False) + + clear_logger_contextvars() + logger_utils.add_fields(correlation_id=correlation_id, + request_id=request_id, + partition_id=partition_id, + app_key=app_key, + api_key=api_key) + + ctx = get_or_create_ctx() + ctx.set_current_with_value(logger=get_logger(), + correlation_id=correlation_id, + request_id=request_id, + partition_id=partition_id, + app_key=app_key, + api_key=api_key, + user=anonymous_user, + app_injector=self._app_injector) + + request.scope['user'] = anonymous_user + + response = await call_next(request) + self._add_csp_header(request, response) + return response + + +async def require_data_partition_id( + data_partition_id: str = Header(default=None, + title='data partition id', + description='identifier of the data partition to query', + min_length=1)): + Context.set_current_with_value(partition_id=data_partition_id) + + +appkey_header = APIKeyHeader(name=conf.APP_KEY_HEADER_NAME) + + +async def require_appkey(appkey: APIKeyHeader = Depends(appkey_header)): + Context.set_current_with_value(app_key=appkey) diff --git a/app/middleware/traces_middleware.py b/app/middleware/traces_middleware.py new file mode 100644 index 0000000000000000000000000000000000000000..37eb314cbc45f4e0dff6584da8e311ccfbe14332 --- /dev/null +++ b/app/middleware/traces_middleware.py @@ -0,0 +1,130 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Any + +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request +from starlette.responses import Response +from starlette.status import HTTP_500_INTERNAL_SERVER_ERROR + +from opencensus.trace import tracer as open_tracer +from opencensus.trace.samplers import AlwaysOnSampler +from opencensus.trace.span import SpanKind + +from app.helper import traces, utils +from app.utils import get_or_create_ctx +from app import conf +from inspect import isfunction as is_function + + +class TracingMiddleware(BaseHTTPMiddleware): + def __init__(self, **kwargs): + super().__init__(**kwargs) + self._trace_propagator = traces.get_trace_propagator() + + @staticmethod + def _retrieve_raw_path(request): + """ + Returns the raw path of given request, else default request's url path + E.g.: + /ddms/v2/wellbores/{wellboreid} instead of /ddms/v2/wellbores/opendes:doc:blablabla14587 + + It retrieves the raw path by finding the APIRoute object by name. By default the name of the route is the name + of python method where there is the implementation. + + + >>> @router.get('/wellbores/{wellboreid}') + >>> async def get_wellbore(wellboreid: str, ctx: Context): + >>> # instructions here + In this example 'get_wellbore' is called_endpoint_func variable, this function's name is needed to retrieve + the APIRoute that contains the raw path. + """ + called_endpoint_func = request.scope['endpoint'] + + if called_endpoint_func and is_function(called_endpoint_func): + function_name = called_endpoint_func.__name__ + called_routes = [route for route in request.app.routes + if route.name == function_name] + if called_routes: + return called_routes[0].path + + return request.url.path + + @staticmethod + def _before_request(request: Request, tracer: open_tracer.Tracer): + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_HOST, + attribute_value=request.url.hostname) + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_METHOD, + attribute_value=request.method) + + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_ROUTE, + attribute_value=request.url.path) + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_PATH, + attribute_value=str(request.url.path)) + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_URL, + attribute_value=str(request.url)) + + ctx_correlation_id = get_or_create_ctx().correlation_id + correlation_id = ctx_correlation_id if ctx_correlation_id is not None \ + else request.headers.get(conf.CORRELATION_ID_HEADER_NAME) + tracer.add_attribute_to_current_span( + attribute_key=conf.CORRELATION_ID_HEADER_NAME, + attribute_value=correlation_id) + + @staticmethod + def _after_successful_request(response: Response, tracer): + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_STATUS_CODE, + attribute_value=response.status_code) + + @staticmethod + def _after_request(request, tracer): + tracer.add_attribute_to_current_span( + attribute_key=utils.HTTP_ROUTE, + attribute_value=TracingMiddleware._retrieve_raw_path(request)) + + async def dispatch(self, request: Request, call_next: Any) -> Response: + + # Create tracing context, from headers if exists, else create a new one + span_context = self._trace_propagator.from_headers(request.headers) + + tracer = open_tracer.Tracer(span_context=span_context, + sampler=AlwaysOnSampler(), + propagator=self._trace_propagator, + exporter=request.app.trace_exporter) + + ctx = get_or_create_ctx() + with tracer.span(request.url.path) as parent_span: + parent_span.span_kind = SpanKind.SERVER + ctx.set_current_with_value(tracer=tracer) + + self._before_request(request, tracer) + ctx.logger.debug(f'Request start: {request.method} {request.url}') + + response = None + try: + response = await call_next(request) + self._after_successful_request(response, tracer) + return response + + finally: + status = response.status_code if response else HTTP_500_INTERNAL_SERVER_ERROR + ctx.logger.info(utils.process_message(request, status)) + self._after_request(request, tracer) diff --git a/app/model/__init__.py b/app/model/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/model/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/model/ddms_model_response.py b/app/model/ddms_model_response.py new file mode 100644 index 0000000000000000000000000000000000000000..2ae6a9fc47e0ab9d96c4ae8b8263c408e5721a8f --- /dev/null +++ b/app/model/ddms_model_response.py @@ -0,0 +1,29 @@ +from __future__ import annotations +from typing import List, Optional +from pydantic import Field + +from app.model.model_curated import DDMSBaseModel + + +class AboutResponseUser(DDMSBaseModel): + tenant: Optional[str] = None + email: Optional[str] = None + + +class V1DmsInfo(DDMSBaseModel): + kinds: Optional[List[str]] = None + + +class V1AboutResponse(DDMSBaseModel): + user: Optional[AboutResponseUser] = None + dmsInfo: Optional[V1DmsInfo] = None + + +class FastSearchResponse(DDMSBaseModel): + results: Optional[List[str]] = None + +#unused after revert on bug 602935 +class WriteDataResponse(DDMSBaseModel): + rowCount: Optional[int] = Field(..., description="Row count") + columnCount: Optional[int] = Field(..., description="Column count") + diff --git a/app/model/entity_utils.py b/app/model/entity_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4a8c140277796e0e5badc5e9e7abf56b5944b9d5 --- /dev/null +++ b/app/model/entity_utils.py @@ -0,0 +1,82 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from enum import Enum + +from app.model import schema_version + + +class Entity(Enum): + LOG = 'log' + LOGSET = 'logSet' + MARKER = 'marker' + TRAJECTORY = 'trajectory' + WELL = 'well' + WELLBORE = 'wellbore' + DIP = 'dip' + DIPSET = 'dipSet' + + +class KindMetaData: + def __init__(self, data_partition_id: str, source: str, entity_type: str, version: str): + self.data_partition_id = data_partition_id + self.source = source + self.entity_type = entity_type + self.version = version + + def __eq__(self, other): + if not isinstance(other, KindMetaData): + return False + + return self.data_partition_id == other.data_partition_id and \ + self.source == other.source and \ + self.entity_type == other.entity_type and \ + self.version == other.version + + +current_version = \ + { + Entity.LOG: schema_version.log_version, + Entity.LOGSET: schema_version.log_version, + Entity.MARKER: schema_version.marker_version, + Entity.TRAJECTORY: schema_version.trajectory_version, + Entity.WELL: schema_version.well_version, + Entity.WELLBORE: schema_version.wellbore_version, + Entity.DIP: schema_version.dip_version, + Entity.DIPSET: schema_version.dipset_version + } + + +def get_version(entity: Entity): + return current_version.get(entity) + + +def format_kind(data_partition: str, source: str, entity: str, version: str): + return f'{data_partition}:{source}:{entity}:{version}' + + +def get_kind(data_partition: str, source: str, entity: Entity): + version = get_version(entity) + return format_kind(data_partition, source, entity.value, version) + + +def get_kind_meta(kind: str) -> KindMetaData: + # Split kind literal into {data-partition-id}:{source}:{entity-type}:{version} + meta = kind.split(':', maxsplit=4) + if len(meta) == 4: + return KindMetaData(data_partition_id=meta[0], + source=meta[1], + entity_type=meta[2], + version=meta[3]) + raise ValueError(f'Invalid kind format in {kind}.') diff --git a/app/model/log_bulk.py b/app/model/log_bulk.py new file mode 100644 index 0000000000000000000000000000000000000000..31179d104d72322451476de490b2e432d572fec6 --- /dev/null +++ b/app/model/log_bulk.py @@ -0,0 +1,95 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from jsonpath_ng import parse as parse_jsonpath +from jsonpath_ng.jsonpath import Parent as JsonParent +from odes_storage.models import Record + +from app.bulk_persistence import BulkId + + +class LogBulkHelper: + # TODO find a better name, LogRecordHelper. but I don't like 'helper', its just a synonymous of bag of several thing + # breaking single responsibility principle + """ gather common bulk manipulation of a log record""" + + @classmethod + def _get_record_data_dict(cls, record: Record) -> dict: + if record.data is None: + record.data = {} + return record.data + + @classmethod + def _set_bulk_id_in_wks(cls, record: Record, bulk_id) -> None: + """ for now it used externalIds, to _get_bulk_id_from_wksbe updated once schema is fixed with log.data.bulkId """ + bulk_urn = BulkId.bulk_urn_encode(bulk_id) + cls._get_record_data_dict(record).setdefault('log', {})['bulkURI'] = bulk_urn + + @classmethod + def _get_bulk_id_from_wks(cls, record: Record) -> Optional[str]: + bulk_uri = ( + cls._get_record_data_dict(record) + .get("log", {}) + .get("bulkURI", None) + ) + return BulkId.bulk_urn_decode(bulk_uri) if bulk_uri else None + + @classmethod + def update_bulk_id( + cls, record: Record, bulk_id, custom_bulk_id_path: Optional[str] = None + ): + """ + Update bulk id within a log record. Note that the custom path cannot be applied when using a strict structured model + It creates the field if not exist + :param record: record to update. + :param bulk_id: bulk reference (id, uri ...) to set + :param custom_bulk_id_path: !! incompatible with log model + """ + if custom_bulk_id_path is None: # what about empty string ? + cls._set_bulk_id_in_wks(record, bulk_id) + else: + record_dict = {"data": record.data} + + # experimentation, no error management + field_name = custom_bulk_id_path.split(".")[-1] + json_exp = parse_jsonpath(custom_bulk_id_path).child(JsonParent()) + + json_exp.find(record_dict)[0].value[ + field_name + ] = BulkId.bulk_urn_encode(bulk_id) + # if only support existing field, it can be done with a simple update call + # parse_jsonpath(custom_bulk_id_path).update(record, bulk_ref) + record.data = record_dict["data"] + + @classmethod + def get_bulk_id( + cls, record: Record, custom_bulk_id_path: Optional[str] = None + ) -> Optional[str]: + """ + :param record: + :param custom_bulk_id_path: !! incompatible with log model + :return: bulk id if any else None + """ + if custom_bulk_id_path is None: # what about empty string ? + return cls._get_bulk_id_from_wks(record) + + record_dict = {"data": record.data} + matches = parse_jsonpath(custom_bulk_id_path).find(record_dict) + return ( + BulkId.bulk_urn_decode(matches[0].value) + if len(matches) > 0 + else None + ) diff --git a/app/model/model_curated.py b/app/model/model_curated.py new file mode 100644 index 0000000000000000000000000000000000000000..a8d53ac4baa687417b22eb0f4131891839553507 --- /dev/null +++ b/app/model/model_curated.py @@ -0,0 +1,2281 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# generated by datamodel-codegen: +# filename: openapi_curated.yaml +# timestamp: 2020-03-25T08:19:10+00:00 + +# - Warning : +# This file is no longer generated +# If you want to generate new version please make sure to not break the changes made to the file + +from __future__ import annotations + +from datetime import date, datetime +from enum import Enum +from typing import Any, Dict, List, Optional, Union + +from pydantic import BaseModel, Extra, Field, confloat + + +# forbid unknown field in the root of the models +class DDMSBaseModel(BaseModel): + """ + The base model forbids fields which are not declared initially in the pydantic model + """ + + class Config: + extra = Extra.forbid + + +# only allow unknown field in the data +class DDMSBaseModelWithExtra(BaseModel): + """ + Used for data model allows extra fields which are not declared initially in the pydantic model + """ + + class Config: + extra = Extra.allow + + +class Point(DDMSBaseModel): + latitude: Optional[confloat(ge=-90.0, le=90.0)] = Field( + None, description='Latitude of point.' + ) + longitude: Optional[confloat(ge=-180.0, le=180.0)] = Field( + None, description='Longitude of point.' + ) + + +class Legal(DDMSBaseModel): + legaltags: Optional[List[str]] = Field( + None, + description='The list of legal tags, see compliance API.', + title='Legal Tags', + ) + otherRelevantDataCountries: Optional[List[str]] = Field( + None, + description='The list of other relevant data countries using the ISO 2-letter codes, see compliance API.', + title='Other Relevant Data Countries', + ) + status: Optional[str] = Field( + None, description='The legal status.', title='Legal Status' + ) + + +class LinkList(DDMSBaseModelWithExtra): + pass + + +class Kind(str, Enum): + CRS = 'CRS' + Unit = 'Unit' + Measurement = 'Measurement' + AzimuthReference = 'AzimuthReference' + DateTime = 'DateTime' + + +class MetaItem(DDMSBaseModel): + kind: Kind = Field( + ..., + description='The kind of reference, unit, measurement, CRS or azimuth reference.', + title='Reference Kind', + ) + name: Optional[str] = Field( + None, + description='The name of the CRS or the symbol/name of the unit', + title='Name or Symbol', + ) + persistableReference: str = Field( + ..., + description='The persistable reference string uniquely identifying the CRS or Unit', + title='Persistable Reference', + ) + propertyNames: Optional[List[str]] = Field( + None, + description='The list of property names, to which this meta data item provides Unit/CRS context to. Data structures, which come in a single frame of reference, can register the property name, others require a full path like "data.structureA.propertyB" to define a unique context.', + title='Attribute Names', + ) + propertyValues: Optional[List[str]] = Field( + None, + description='The list of property values, to which this meta data item provides Unit/CRS context to. Typically a unit symbol is a value to a data structure; this symbol is then registered in this propertyValues array and the persistableReference provides the absolute reference.', + title='Attribute Names', + ) + uncertainty: Optional[float] = Field( + None, + description='The uncertainty of the values measured given the unit or CRS unit.', + title='Uncertainty', + ) + + +class TagDictionary(DDMSBaseModelWithExtra): + pass + + +class ToOneRelationship(DDMSBaseModel): + confidence: Optional[float] = Field( + None, + description='The confidence of the relationship. If the property is absent a well-known relation is implied.', + title='Relationship Confidence', + ) + id: Optional[str] = Field( + None, + description='The id of the related object in the Data Ecosystem. If set, the id has priority over the natural key in the name property.', + title='Related Object Id', + ) + name: Optional[str] = Field( + None, + description='The name or natural key of the related object. This property is required if the target object id could not (yet) be identified.', + title='Related Object Name', + ) + version: Optional[float] = Field( + None, + description='The version number of the related entity. If no version number is specified, the last version is implied.', + title='Entity Version Number', + ) + + +class ValueWithUnit(DDMSBaseModel): + unitKey: str = Field( + ..., + description="Unit for value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details.", + title='Unit Key', + ) + value: float = Field( + ..., + description='Value of the corresponding attribute for the domain object in question.', + title='Value', + ) + + +class Type(Enum): + GeometryCollection = 'GeometryCollection' + + +class Type_1(Enum): + Feature = 'Feature' + + +class Type_2(Enum): + FeatureCollection = 'FeatureCollection' + + +class Type_3(Enum): + LineString = 'LineString' + + +class Type_4(Enum): + MultiLineString = 'MultiLineString' + + +class GeoJsonMultiLineString(DDMSBaseModel): + bbox: Optional[List[float]] = None + coordinates: List[List[List[float]]] + type: Type_4 + + +class Type_5(Enum): + MultiPoint = 'MultiPoint' + + +class Type_6(Enum): + MultiPolygon = 'MultiPolygon' + + +class Type_7(Enum): + Point = 'Point' + + +class GeoJsonPoint(DDMSBaseModel): + bbox: Optional[List[float]] = None + coordinates: List[float] + type: Type_7 + + +class Point3dNonGeoJson(DDMSBaseModel): + coordinates: List[float] = Field( + ..., + description="3-dimensional point; the first coordinate is typically pointing east (easting or longitude), the second coordinate typically points north (northing or latitude). The third coordinate is an elevation (upwards positive, downwards negative). The point's CRS is given by the container.", + title='3D Point', + ) + crsKey: str = Field( + ..., + description="The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details.", + title='CRS Key', + ) + unitKey: str = Field( + ..., + description="The 'unitKey' for the 3rd coordinate, which can be looked up in the 'frameOfReference.unit' for further details.", + title='Unit Key', + ) + + +class Type_8(Enum): + Polygon = 'Polygon' + + +class Polygon(DDMSBaseModel): + bbox: Optional[List[float]] = None + coordinates: List[List[List[float]]] + type: Type_8 + + +class valueArrayWithUnit(DDMSBaseModel): + unitKey: str = Field( + ..., + description="Unit for array value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details.", + title='Unit Key', + ) + values: List[float] = Field( + ..., + description='Value of the corresponding attribute for the domain object in question.', + title='Value', + ) + + +class core_dl_geopoint(DDMSBaseModel): + latitude: confloat(ge=-90.0, le=90.0) = Field( + ..., + description='The latitude value in degrees of arc (dega). Value range [-90, 90].', + title='Latitude', + ) + longitude: confloat(ge=-180.0, le=180.0) = Field( + ..., + description='The longitude value in degrees of arc (dega). Value range [-180, 180]', + title='Longitude', + ) + + +class geographicPosition(DDMSBaseModel): + crsKey: str = Field( + ..., + description="The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details.", + title='CRS Key', + ) + elevationFromMsl: ValueWithUnit = Field( + ..., + description="Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary.", + title='Elevation from MSL', + ) + latitude: float = Field( + ..., + description='Native or original latitude (unit defined by CRS)', + title='Native Latitude', + ) + longitude: float = Field( + ..., + description='Native or original longitude (unit defined by CRS)', + title='Native Longitude', + ) + + +class PlssLocation(DDMSBaseModel): + aliquotPart: Optional[str] = Field( + None, + description='A terse, hierarchical reference to a piece of land, in which successive subdivisions of some larger area.', + title='Aliquot Part', + ) + range: str = Field( + ..., + description='Range, also known as Rng, R; a measure of the distance east or west from a referenced principal meridian, in units of six miles.', + title='Range', + ) + section: int = Field( + ..., description='Section number (between 1 and 36)', title='Section Number' + ) + township: str = Field( + ..., + description='Township, also known as T or Twp; (1) Synonym for survey township, i.e., a square parcel of land of 36 square miles, or (2) A measure of the distance north or south from a referenced baseline, in units of six miles', + title='Township', + ) + + +class projectedPosition(DDMSBaseModel): + crsKey: str = Field( + ..., + description="The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details.", + title='CRS Key', + ) + elevationFromMsl: ValueWithUnit = Field( + ..., + description="Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary.", + title='Elevation from MSL', + ) + x: float = Field( + ..., + description='X-coordinate value in native or original projected CRS', + title='X Coordinate', + ) + y: float = Field( + ..., + description='Y-coordinate value in native or original projected CRS', + title='Y Coordinate', + ) + + +class wellborerelationships(DDMSBaseModelWithExtra): + definitiveTimeDepthRelation: Optional[ToOneRelationship] = Field( + None, + description='The definitive tome-depth relation providing the MD to seismic travel-time transformation.', + title='Definitive Time-Depth Relation', + ) + definitiveTrajectory: Optional[ToOneRelationship] = Field( + None, + description='The definitive trajectory providing the MD to 3D space transformation.', + title='Definitive Trajectory', + ) + tieInWellbore: Optional[ToOneRelationship] = Field( + None, + description='The tie-in wellbore if this wellbore is a side-track.', + title='Tie-in Wellbore', + ) + well: Optional[ToOneRelationship] = Field( + None, description='The well to which this wellbore belongs.', title='Well' + ) + + +class Shape(Enum): + build_and_hold = 'build and hold' + deviated = 'deviated' + double_kickoff = 'double kickoff' + horizontal = 'horizontal' + S_shaped = 'S-shaped' + vertical = 'vertical' + unknown = 'unknown' + + +class WellborePurpose(Enum): + appraisal = 'appraisal' + appraisal____confirmation_appraisal = 'appraisal -- confirmation appraisal' + appraisal____exploratory_appraisal = 'appraisal -- exploratory appraisal' + exploration = 'exploration' + exploration____deeper_pool_wildcat = 'exploration -- deeper-pool wildcat' + exploration____new_field_wildcat = 'exploration -- new-field wildcat' + exploration____new_pool_wildcat = 'exploration -- new-pool wildcat' + exploration____outpost_wildcat = 'exploration -- outpost wildcat' + exploration____shallower_pool_wildcat = 'exploration -- shallower-pool wildcat' + development = 'development' + development____infill_development = 'development -- infill development' + development____injector = 'development -- injector' + development____producer = 'development -- producer' + fluid_storage = 'fluid storage' + fluid_storage____gas_storage = 'fluid storage -- gas storage' + general_srvc = 'general srvc' + general_srvc____borehole_re_acquisition = 'general srvc -- borehole re-acquisition' + general_srvc____observation = 'general srvc -- observation' + general_srvc____relief = 'general srvc -- relief' + general_srvc____research = 'general srvc -- research' + general_srvc____research____drill_test = 'general srvc -- research -- drill test' + general_srvc____research____strat_test = 'general srvc -- research -- strat test' + general_srvc____waste_disposal = 'general srvc -- waste disposal' + mineral = 'mineral' + unknown = 'unknown' + + +class WellboreStatus(Enum): + abandoned = 'abandoned' + active = 'active' + active____injecting = 'active -- injecting' + active____producing = 'active -- producing' + completed = 'completed' + drilling = 'drilling' + partially_plugged = 'partially plugged' + permitted = 'permitted' + plugged_and_abandoned = 'plugged and abandoned' + proposed = 'proposed' + sold = 'sold' + suspended = 'suspended' + temporarily_abandoned = 'temporarily abandoned' + testing = 'testing' + tight = 'tight' + working_over = 'working over' + unknown = 'unknown' + + +class WellboreType(Enum): + bypass = 'bypass' + initial = 'initial' + redrill = 'redrill' + reentry = 'reentry' + respud = 'respud' + sidetrack = 'sidetrack' + unknown = 'unknown' + + +class DataType(Enum): + string = 'string' + number = 'number' + integer = 'integer' + boolean = 'boolean' + + +class Format(Enum): + date = 'date' + date_time = 'date-time' + time = 'time' + byte = 'byte' + binary = 'binary' + boolean = 'boolean' + email = 'email' + uuid = 'uuid' + uri = 'uri' + int8 = 'int8' + int16 = 'int16' + int32 = 'int32' + int64 = 'int64' + float32 = 'float32' + float64 = 'float64' + float128 = 'float128' + + +class logsetrelationships(DDMSBaseModelWithExtra): + well: Optional[ToOneRelationship] = Field( + None, + description='The well to which this logSet belongs. Only required if the wellbore is unknown.', + title='Well', + ) + wellbore: ToOneRelationship = Field( + ..., description='The wellbore to which this logSet belongs.', title='Wellbore' + ) + wellboreSection: Optional[ToOneRelationship] = Field( + None, + description='The wellboreSection to which this logSet belongs.', + title='Wellbore Section', + ) + + +class dipsetrelationships(DDMSBaseModelWithExtra): + well: Optional[ToOneRelationship] = Field( + None, + description='The well to which this dipSet belongs. Only required if the wellbore is unknown.', + title='Well', + ) + wellbore: ToOneRelationship = Field( + ..., description='The wellbore to which this dipSet belongs.', title='Wellbore' + ) + wellboreSection: Optional[ToOneRelationship] = Field( + None, + description='The wellbore section to which this dipSet belongs.', + title='Wellbore Section', + ) + referenceLog: Optional[ToOneRelationship] = Field( + None, + description='The true dip azimuth log of the dipset.', + title='True dip azimuth log', + ) + trueDipAzimuthLog: Optional[ToOneRelationship] = Field( + None, + description='The true dip azimuth log of the dipset.', + title='True dip azimuth log', + ) + trueDipInclinationLog: Optional[ToOneRelationship] = Field( + None, + description='The X-coordinate log of the dipset', + title='X-coordinate log', + ) + xCoordinateLog: Optional[ToOneRelationship] = Field( + None, + description='The X-coordinate log of the dipset', + title='X-coordinate log', + ) + yCoordinateLog: Optional[ToOneRelationship] = Field( + None, + description='The Y-coordinate log of the dipset', + title='Y-coordinate log', + ) + zCoordinateLog: Optional[ToOneRelationship] = Field( + None, + description='The Z-coordinate log of the dipset', + title='Z-coordinate log', + ) + qualityLog: Optional[ToOneRelationship] = Field( + None, + description='The quality log of the dipset', + title='Quality log', + ) + classificationLog: Optional[ToOneRelationship] = Field( + None, + description='The classification log of the dipset', + title='Classification log', + ) + + +class toManyRelationship(DDMSBaseModel): + confidences: Optional[List[float]] = Field( + None, + description='The confidences of the relationships. Keep all the arrays ordered and aligned.', + title='Relationship Confidences', + ) + ids: Optional[List[str]] = Field( + None, + description='The ids of the related objects. It is populated for an explicit relationship where the target entity is present as a record in the data ecosystem. Keep all the arrays ordered and aligned.', + title='Related Object Id', + ) + names: Optional[List[str]] = Field( + None, + description='The names or natural keys of the related objects. Keep all the arrays ordered and aligned.', + title='Related Object Names', + ) + versions: Optional[List[float]] = Field( + None, + description='The specific version numbers of the related instances. This is only specified if a specific version is required. If not populated the last version is implied. Keep all the arrays ordered and aligned.', + title='To Many Relationship', + ) + + +class DataType_1(Enum): + string = 'string' + number = 'number' + integer = 'integer' + boolean = 'boolean' + + +class Format_1(Enum): + date = 'date' + date_time = 'date-time' + time = 'time' + byte = 'byte' + binary = 'binary' + boolean = 'boolean' + email = 'email' + uuid = 'uuid' + uri = 'uri' + int8 = 'int8' + int16 = 'int16' + int32 = 'int32' + int64 = 'int64' + float32 = 'float32' + float64 = 'float64' + float128 = 'float128' + + +class trajectorychannel(DDMSBaseModel): + absentValue: Optional[str] = Field( + None, + description='Optional field carrying the absent value as string for this channel.', + title='Absent Value', + ) + azimuthKey: Optional[str] = Field( + None, + description="The azimuth reference of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.azimuth' dictionary.", + title='Azimuth Reference Key', + ) + crsKey: Optional[str] = Field( + None, + description="The CRS key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.crs' dictionary.", + title='CRS Key', + ) + dataType: Optional[DataType_1] = Field( + 'number', + description="The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + title='Data Type', + ) + dimension: Optional[int] = Field( + None, description='The dimension of this log or channel', title='Dimension' + ) + family: Optional[str] = Field( + None, + description='The log family code of this log or channel (optional)', + title='Log Family', + ) + familyType: Optional[str] = Field( + None, + description="The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)", + title='Log Family Type', + ) + format: Optional[Format_1] = Field( + 'float32', + description="Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + title='Format Hint', + ) + logstoreId: Optional[float] = Field( + None, + description='The id of this log or channel in the Logstore. This property is not present in the index channel.', + title='Logstore ID', + ) + bulkURI: Optional[str] = Field( + None, + description='bulkURI either URL or URN.', + title='bulk URI', + ) + longName: Optional[str] = Field( + None, description='The long name of this log or channel', title='Log Long Name' + ) + mnemonic: Optional[str] = Field( + None, description='The mnemonic of this log or channel', title='Mnemonic' + ) + name: Optional[str] = Field( + None, description='The name of this log or channel.', title='Log Name' + ) + properties: Optional[List[str]] = Field( + None, description='The properties of this log or channel.', title='Properties' + ) + source: Optional[str] = Field( + None, + description='The source of this log or channel as a data reference; Typically this refers to the raw trajectory, from which this log WKE is generated.', + title='Source', + ) + unitKey: Optional[str] = Field( + None, + description="The unit key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.units' dictionary. Empty units (NoUnit) are not recorded.", + title='Unit Key', + ) + + +class trajectoryrelationships(DDMSBaseModelWithExtra): + wellbore: ToOneRelationship = Field( + ..., + description='The wellbore to which this trajectory belongs.', + title='Wellbore', + ) + + +class wgs84Position(DDMSBaseModel): + elevationFromMsl: ValueWithUnit = Field( + ..., + description="Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary.", + title='Elevation from MSL', + ) + latitude: float = Field( + ..., + description='WGS 84 latitude value in degrees (dega)', + title='WGS 84 Latitude', + ) + longitude: float = Field( + ..., + description='WGS 84 longitude value in degrees (dega)', + title='WGS 84 Longitude', + ) + + +class markerrelationships(DDMSBaseModelWithExtra): + horizon: Optional[ToOneRelationship] = Field( + None, + description='The related stratigraphic horizon', + title='Stratigraphic Horizon', + ) + stratigraphicTable: Optional[ToOneRelationship] = Field( + None, + description='The related stratigraphic table, which provides the context for the stratigraphic horizon', + title='Stratigraphic Table', + ) + study: Optional[ToOneRelationship] = Field( + None, + description='The study, in which this marker was conceived.', + title='Study', + ) + trajectory: Optional[ToOneRelationship] = Field( + None, + description='The trajectory used to create the marker position', + title='Trajectory', + ) + wellbore: Optional[ToOneRelationship] = Field( + None, + description='The wellbore entity, to which this marker belongs.', + title='Wellbore', + ) + + +class valueAzimuth(DDMSBaseModel): + azimuthKey: str = Field( + ..., + description="Azimuth reference for the value of the corresponding attribute for the domain object in question. It can be looked up in 'frameOfReference.azimuths'.", + title='Azimuth Key', + ) + crsKey: Optional[str] = Field( + None, + description="Mandatory for GridNorth referenced azimuths. The 'crsKey' can be looked up in the 'frameOfReference.crs' for further details.", + title='CRS Key', + ) + date: Optional[date] = Field( + None, + description='Mandatory for MagneticNorth referenced azimuths: the observation date of the azimuth such that a correction can be obtained via a geomagnetic model using its secular variation.', + title='Observation Date', + ) + unitKey: str = Field( + ..., + description="'unitKey' for value of the corresponding attribute for the domain object in question. It can be looked up in 'frameOfReference.units'.", + title='Unit Key', + ) + value: float = Field( + ..., description='Value of the azimuth.', title='Azimuth Value' + ) + + +class v1LogProperties(DDMSBaseModel): + rowKeysUnit: Optional[str] = Field( + None, title="The unit of the row keys, e.g. 'm' or 'ft'" + ) + rowKeysQuantity: Optional[str] = Field( + None, title="The quantity of the row keys, e.g. 'length' or 'time'" + ) + rowKeysMnemonic: Optional[str] = Field( + None, title='The mnemonic of the row keys, e.g. MD, TVD, TWT' + ) + colKeysUnit: Optional[str] = Field( + None, title="The unit of the column keys, e.g. 'm' or 'ft'" + ) + colKeysQuantity: Optional[str] = Field( + None, title="The quantity of the column keys, e.g. 'length' or 'time'" + ) + colKeysMnemonic: Optional[str] = Field( + None, title='The mnemonic of the column keys, e.g. MD, TVD, TWT' + ) + valuesUnit: Optional[str] = Field( + None, title='The unit of the values, e.g. gAPI or Hz' + ) + valuesQuantity: Optional[str] = Field( + None, title="The quantity of the values, e.g. 'gammaRay' or 'temperature'" + ) + valuesMnemonic: Optional[str] = Field( + None, title='The mnemonic of the values, e.g. GR, TEMP, POR' + ) + custom: Optional[Dict[str, Any]] = Field( + None, + description='A map of custom properties. Case sensitive on both key and value.', + ) + + +class RangeBoundType(Enum): + UNBOUNDED = 'UNBOUNDED' + OPEN = 'OPEN' + CLOSED = 'CLOSED' + + +class DataType_2(Enum): + string = 'string' + number = 'number' + integer = 'integer' + boolean = 'boolean' + date_time = 'date-time' + + +class Format_2(Enum): + date = 'date' + date_time = 'date-time' + time = 'time' + byte = 'byte' + binary = 'binary' + email = 'email' + uuid = 'uuid' + uri = 'uri' + int8 = 'int8' + int16 = 'int16' + int32 = 'int32' + int64 = 'int64' + float32 = 'float32' + float64 = 'float64' + float128 = 'float128' + + +class historyRecord(DDMSBaseModel): + date: Optional[datetime] = Field( + None, + description='The UTC date time of the log creation/processing', + title='Date and Time', + ) + description: Optional[str] = Field( + None, + description='The description of the context, which produced the log.', + title=' Description', + ) + user: Optional[str] = Field( + None, description='The user running the log processing.', title='User' + ) + + +class ReferenceType(Enum): + Date = 'Date' + Date_Time = 'Date Time' + Measured_Depth = 'Measured Depth' + Core_depth = 'Core depth' + True_Vertical_Depth = 'True Vertical Depth' + True_Vertical_Depth_Sub_Sea = 'True Vertical Depth Sub Sea' + One_Way_Time = 'One-Way Time' + Two_Way_Time = 'Two-Way Time' + + +class logRelationships(DDMSBaseModelWithExtra): + logSet: Optional[ToOneRelationship] = Field( + None, + description='The logSet to which this log belongs. If the log is not part of a log set this relationship stays empty.', + title='LogSet', + ) + timeDepthRelation: Optional[ToOneRelationship] = Field( + None, + description='The timeDepthRelation to which this log belongs. If the log is not part of a timeDepthRelation this relationship stays empty.', + title='TimeDepthRelation LogSet', + ) + well: Optional[ToOneRelationship] = Field( + None, + description='The well to which this log belongs. Only required if the wellbore is unknown.', + title='Well', + ) + wellbore: Optional[ToOneRelationship] = Field( + None, + description='The wellbore to which this log belongs. This relationship is the most important; only the wellbore can provide the unique context for the measured depth index.', + title='Wellbore', + ) + + +class basinContext(DDMSBaseModel): + basinCode: Optional[str] = Field( + None, + description='The code of the basin in which the well is located.', + title='Basin Code', + ) + basinName: Optional[str] = Field( + None, + description='The name of the basin in which the well is located.', + title='Basin Name', + ) + subBasinCode: Optional[str] = Field( + None, + description='The code of the sub-basin in which the well is located.', + title='Sub-Basin Code', + ) + subBasinName: Optional[str] = Field( + None, + description='The name of the sub-basin in which the well is located.', + title='Sub-Basin Name', + ) + + +class wellrelationships(DDMSBaseModelWithExtra): + asset: Optional[ToOneRelationship] = Field( + None, description='The asset this well belongs to.', title='Asset' + ) + + +class DirectionWell(Enum): + huff_n_puff = 'huff-n-puff' + injector = 'injector' + producer = 'producer' + uncertain = 'uncertain' + unknown = 'unknown' + + +class FluidWell(Enum): + air = 'air' + condensate = 'condensate' + dry = 'dry' + gas = 'gas' + gas_water = 'gas-water' + non_HC_gas = 'non HC gas' + non_HC_gas____CO2 = 'non HC gas -- CO2' + oil = 'oil' + oil_gas = 'oil-gas' + oil_water = 'oil-water' + steam = 'steam' + water = 'water' + water____brine = 'water -- brine' + water____fresh_water = 'water -- fresh water' + unknown = 'unknown' + + +class WellLocationType(Enum): + Onshore = 'Onshore' + Offshore = 'Offshore' + unknown = 'unknown' + + +class WellPurpose(Enum): + appraisal = 'appraisal' + appraisal____confirmation_appraisal = 'appraisal -- confirmation appraisal' + appraisal____exploratory_appraisal = 'appraisal -- exploratory appraisal' + exploration = 'exploration' + exploration____deeper_pool_wildcat = 'exploration -- deeper-pool wildcat' + exploration____new_field_wildcat = 'exploration -- new-field wildcat' + exploration____new_pool_wildcat = 'exploration -- new-pool wildcat' + exploration____outpost_wildcat = 'exploration -- outpost wildcat' + exploration____shallower_pool_wildcat = 'exploration -- shallower-pool wildcat' + development = 'development' + development____infill_development = 'development -- infill development' + development____injector = 'development -- injector' + development____producer = 'development -- producer' + fluid_storage = 'fluid storage' + fluid_storage____gas_storage = 'fluid storage -- gas storage' + general_srvc = 'general srvc' + general_srvc____borehole_re_acquisition = 'general srvc -- borehole re-acquisition' + general_srvc____observation = 'general srvc -- observation' + general_srvc____relief = 'general srvc -- relief' + general_srvc____research = 'general srvc -- research' + general_srvc____research____drill_test = 'general srvc -- research -- drill test' + general_srvc____research____strat_test = 'general srvc -- research -- strat test' + general_srvc____waste_disposal = 'general srvc -- waste disposal' + mineral = 'mineral' + unknown = 'unknown' + + +class WellStatus(Enum): + abandoned = 'abandoned' + active = 'active' + active____injecting = 'active -- injecting' + active____producing = 'active -- producing' + completed = 'completed' + drilling = 'drilling' + partially_plugged = 'partially plugged' + permitted = 'permitted' + plugged_and_abandoned = 'plugged and abandoned' + proposed = 'proposed' + sold = 'sold' + suspended = 'suspended' + temporarily_abandoned = 'temporarily abandoned' + testing = 'testing' + tight = 'tight' + working_over = 'working over' + unknown = 'unknown' + + +class WellType(Enum): + bypass = 'bypass' + initial = 'initial' + redrill = 'redrill' + reentry = 'reentry' + respud = 'respud' + sidetrack = 'sidetrack' + unknown = 'unknown' + + +class ByBoundingBox(DDMSBaseModel): + topLeft: Point = Field(..., description='Top left corner of the bounding box.') + bottomRight: Point = Field( + ..., description='Bottom right corner of the bounding box.' + ) + + +class ByDistance(DDMSBaseModel): + distance: Optional[confloat(ge=0.0, le=9.223372036854776e18)] = Field( + None, + description='The radius of the circle centered on the specified location. Points which fall into this circle are considered to be matches.', + ) + point: Point = Field(..., description='Center point of the query.') + + +class ByGeoPolygon(DDMSBaseModel): + points: Optional[List[Point]] = Field( + None, description='Polygon defined by a set of points.' + ) + + +class SimpleElevationReference(DDMSBaseModel): + elevationFromMsl: ValueWithUnit = Field( + ..., + description="The elevation above mean sea level (MSL), at which the vertical origin is 0.0. The 'unitKey' is further defined in 'frameOfReference.units'.", + title='Elevation from MSL', + ) + name: Optional[str] = Field( + None, + description='The name of the Elevation Reference.', + title='Elevation Reference Name', + ) + + +class GeoJsonLineString(DDMSBaseModel): + bbox: Optional[List[float]] = None + coordinates: List[List[float]] + type: Type_3 + + +class GeoJsonMultiPoint(DDMSBaseModel): + bbox: Optional[List[float]] = None + coordinates: List[List[float]] + type: Type_5 + + +class GeoJsonMultiPolygon(DDMSBaseModel): + bbox: Optional[List[float]] = Field( + None, description='Bounding box in longitude, latitude WGS 84.' + ) + coordinates: List[List[List[List[float]]]] = Field( + ..., + description='Array of polygons (minimum 2D), containing an array of point coordinates (longitude, latitude, (optionally elevation and other properties).', + ) + type: Type_6 + + +class namedProperty(DDMSBaseModel): + associations: Optional[List[str]] = Field( + None, + description='The optional associations contains one or more mnemonics found elsewhere in the logSet.', + title='Associations', + ) + description: Optional[str] = Field( + None, + description='The description and role of this property.', + title='Property Description', + ) + format: Optional[str] = Field( + None, + description="An optional format declaration for the property values. The 'A' prefix indicates an array; string values are represented by 'S'; floating point values are represented by 'F', optionally followed by a field specification, e.g. 'F10.4'; exponential number representations are represented by 'E'; integer values are represented by 'I'. For further information see the LAS specification http://www.cwls.org/las/.", + title='Format (LAS)', + ) + name: Optional[str] = Field( + None, description='The name of this property.', title='Property Name' + ) + unitKey: Optional[str] = Field( + None, + description="The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition.", + title='Property Unit Symbol', + ) + value: Optional[Union[float, str]] = Field( + None, + description='The value for this property as a string or a number.', + title='Property Value', + ) + values: Optional[List[float]] = Field( + None, + description='The values, e.g. interval boundaries, for this property.', + title='Property Values (Interval)', + ) + + +class logchannel(DDMSBaseModel): + columnNames: Optional[List[str]] = Field( + None, + description="A list of names for multi-dimensional logs (dimension>1). The length of this array is expected to be equal to 'dimension'. For one-dimensional this property stays empty as the columnName is by definition the log name.", + title='Column Names', + ) + dataType: Optional[DataType_2] = Field( + 'number', + description="The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + title='Data Type', + ) + dimension: Optional[int] = Field( + None, description='The dimension of this log or channel', title='Dimension' + ) + family: Optional[str] = Field( + None, + description='The log family code of this log or channel (optional)', + title='Log Family', + ) + familyType: Optional[str] = Field( + None, + description="The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)", + title='Log Family Type', + ) + format: Optional[Format_2] = Field( + 'float32', + description="Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + title='Format Hint', + ) + logstoreId: Optional[float] = Field( + None, + description='The unique id of this log or channel in the Logstore. This property is not present in the index channel.', + title='Logstore ID', + ) + bulkURI: Optional[str] = Field( + None, + description='bulkURI either URL or URN.', + title='bulk URI', + ) + longName: Optional[str] = Field( + None, description='The long name of this log or channel', title='Log Long Name' + ) + mnemonic: Optional[str] = Field( + None, description='The mnemonic of this log or channel', title='Mnemonic' + ) + name: Optional[str] = Field( + None, description='The name of this log or channel.', title='Log Name' + ) + properties: Optional[List[namedProperty]] = Field( + None, + description='The named properties of this log or channel.', + title='Named Properties', + ) + source: Optional[str] = Field( + None, + description='The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.', + title='Source', + ) + unitKey: Optional[str] = Field( + None, + description="The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition.", + title='Unit', + ) + + +class logData(DDMSBaseModelWithExtra): + azimuthReference: Optional[str] = Field( + None, + description='Only supplied with azimuth logs: the azimuth reference code defining the type of North, default TN for true north.', + title='Azimuth Reference Code', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + elevationReference: Optional[SimpleElevationReference] = Field( + None, + description="The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where the index, e.g. MD == 0 and TVD == 0.", + title='Elevation Reference', + ) + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + history: Optional[List[historyRecord]] = Field( + None, + description="An array of historyRecords describing the context for the log's creation or processing.", + title='History Records', + ) + log: Optional[logchannel] = Field( + None, + description='The log containing the log meta data and log-store reference.', + title='Log Channel', + ) + name: Optional[str] = Field( + None, description='The name of this log set', title='Log Set Name' + ) + operation: Optional[str] = Field( + None, description='The operation which created this Log', title='Operation' + ) + reference: Optional[logchannel] = Field( + None, + description='The reference index - only populated for logs, which are member of a logSet and share the reference index.', + title='Reference Index', + ) + referenceType: Optional[ReferenceType] = Field( + None, description='The reference index type of the log set.', title='Index Type' + ) + relationships: Optional[logRelationships] = Field( + None, description='The related entities.', title='Relationships' + ) + start: Optional[ValueWithUnit] = Field( + None, description='The start index value of the log set.', title='Start' + ) + step: Optional[ValueWithUnit] = Field( + None, + description='The index increment value of the log set. Only populated if the log is regularly sampled.', + title='Step', + ) + stop: Optional[ValueWithUnit] = Field( + None, description='The stop index value of the log set.', title='Stop' + ) + + +class log(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[logData] = Field( + None, description='Log data associated with a wellbore', title='Log Data' + ) + id: Optional[str] = Field( + None, description='The unique identifier of the log', title='Log Set ID' + ) + kind: Optional[str] = Field( + 'osdu:wks:log:0.0.1', description='Kind specification', title='Log Kind' + ) + legal: Optional[Legal] = Field( + None, description="The log's legal tags", title='Legal Tags' + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + status: Optional[str] = Field( + 'compliant', description='The status of this log', title='Entity Status' + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this log; set by the framework.', + title='Entity Version Number', + ) + + +class SpatialFilter(DDMSBaseModel): + field: Optional[str] = Field( + None, + description='geo-point field in the index on which filtering will be performed. Use GET schema API to find which fields supports spatial search.', + ) + byBoundingBox: Optional[ByBoundingBox] = Field( + None, + description='A query allowing to filter hits based on a point location within a bounding box.', + ) + byDistance: Optional[ByDistance] = Field( + None, + description='Filters documents that include only hits that exist within a specific distance from a geo point.', + ) + byGeoPolygon: Optional[ByGeoPolygon] = Field( + None, + description='A query allowing to filter hits that only fall within a polygon of points.', + ) + + +class geometryItem(DDMSBaseModel): + bbox: Optional[List[float]] = None + geometries: List[Union[ + GeoJsonPoint, + GeoJsonMultiPoint, + GeoJsonLineString, + GeoJsonMultiLineString, + Polygon, + GeoJsonMultiPolygon, + ]] + type: Type + + +class GeoJsonFeature(DDMSBaseModel): + bbox: Optional[List[float]] = None + geometry: Union[ + GeoJsonPoint, + GeoJsonMultiPoint, + GeoJsonLineString, + GeoJsonMultiLineString, + Polygon, + GeoJsonMultiPolygon, + geometryItem, + ] + properties: Dict[str, Any] + type: Type_1 + + +class GeoJsonFeatureCollection(DDMSBaseModel): + bbox: Optional[List[float]] = None + features: List[GeoJsonFeature] + type: Type_2 + + +class wellboreData(DDMSBaseModelWithExtra): + airGap: Optional[ValueWithUnit] = Field( + None, + description='The gap between water surface and offshore drilling platform.', + title='Air Gap', + ) + block: Optional[str] = Field( + None, + description='The block name, in which the wellbore is located.', + title='Block', + ) + country: Optional[str] = Field( + None, + description="The country, in which the wellbore is located. The country name follows the convention in ISO 3166-1 'English short country name', see https://en.wikipedia.org/wiki/ISO_3166-1", + title='Country', + ) + county: Optional[str] = Field( + None, + description='The county name, in which the wellbore is located.', + title='County', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + drillingDaysTarget: Optional[ValueWithUnit] = Field( + None, + description='Target days for drilling wellbore.', + title='Target Drilling Days', + ) + elevationReference: Optional[SimpleElevationReference] = Field( + None, + description="The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0", + title='Elevation Reference', + ) + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + field: Optional[str] = Field( + None, + description='The field name, to which the wellbore belongs.', + title='Field', + ) + formationAtTd: Optional[str] = Field( + None, + description="The name of the formation at the wellbore's total depth.", + title='Formation at TD', + ) + formationProjected: Optional[str] = Field( + None, + description="The name of the formation at the wellbore's projected depth. This property is questionable as there is not precise documentation available.", + title='Formation Projected', + ) + hasAchievedTotalDepth: Optional[bool] = Field( + True, + description='True ("true" of "1") indicates that the wellbore has acheieved total depth. That is, drilling has completed. False ("false" or "0") indicates otherwise. Not given indicates that it is not known whether total depth has been reached.', + title='Has Total Depth Been Achieved Flag', + ) + isActive: Optional[bool] = Field( + None, + description='True (="1" or "true") indicates that the wellbore is active. False (="0" or "false") indicates otherwise. It is the servers responsibility to set this value based on its available internal data (e.g., what objects are changing).', + title='Is Active Flag', + ) + kickOffMd: Optional[ValueWithUnit] = Field( + None, + description='The kick-off point in measured depth (MD); for the main well the kickOffMd is set to 0.', + title='Kick-off MD', + ) + kickOffTvd: Optional[ValueWithUnit] = Field( + None, + description='Kickoff true vertical depth of the wellbore; for the main wellbore the kickOffMd is set to 0.', + title='Kick-off MD', + ) + locationWGS84: Optional[GeoJsonFeatureCollection] = Field( + None, + description='A 2D GeoJSON FeatureCollection defining wellbore location or trajectory in WGS 84 CRS.', + title='Wellbore Shape WGS 84', + ) + name: Optional[str] = Field( + None, description='The wellbore name', title='Wellbore Name' + ) + operator: Optional[str] = Field( + None, description='The operator of the wellbore.', title='Operator' + ) + permitDate: Optional[date] = Field( + None, description="The wellbore's permit date.", title='Permit Date' + ) + permitNumber: Optional[str] = Field( + None, + description="The wellbore's permit number or permit ID.", + title='Permit Number', + ) + plssLocation: Optional[PlssLocation] = Field( + None, + description='A location described by the Public Land Survey System (United States)', + title='US PLSS Location', + ) + propertyDictionary: Optional[dict] = Field( + None, + description='A dictionary structure, i.e. key/string value pairs, to carry additional wellbore properties.', + title='Property Dictionary', + ) + relationships: Optional[wellborerelationships] = Field( + None, description='The related entities.', title='Relationships' + ) + shape: Optional[Shape] = Field( + None, description='POSC wellbore trajectory shape.', title='Wellbore Shape' + ) + spudDate: Optional[date] = Field( + None, + description='The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format', + title='Spud Date', + ) + state: Optional[str] = Field( + None, + description='The state name, in which the wellbore is located.', + title='State', + ) + totalDepthMd: Optional[ValueWithUnit] = Field( + None, + description='The measured depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.', + title='Total MD', + ) + totalDepthMdDriller: Optional[ValueWithUnit] = Field( + None, + description="The total depth along the wellbore as reported by the drilling contractor from 'elevationReference'. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary..", + title='Total MD Drilled', + ) + totalDepthMdPlanned: Optional[ValueWithUnit] = Field( + None, + description='Planned measured depth for the wellbore total depth.', + title='Total MD Planned', + ) + totalDepthMdSubSeaPlanned: Optional[ValueWithUnit] = Field( + None, + description='Planned measured for the wellbore total depth - with respect to seabed.', + title='Total MD Sub Sea Planned', + ) + totalDepthProjectedMd: Optional[ValueWithUnit] = Field( + None, + description='The projected total measured depth of the borehole. This property is questionable as there is not precise documentation available.', + title='Total MD Projected', + ) + totalDepthTvd: Optional[ValueWithUnit] = Field( + None, + description='The true vertical depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.', + title='Total TVD', + ) + totalDepthTvdDriller: Optional[ValueWithUnit] = Field( + None, + description="The total depth true vertical as reported by the drilling contractor from 'elevationReference', Downwards increasing. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Total TVD Drilled', + ) + totalDepthTvdPlanned: Optional[ValueWithUnit] = Field( + None, + description='Planned true vertical depth for the wellbore total depth.', + title='Total TVD Planned', + ) + totalDepthTvdSubSeaPlanned: Optional[ValueWithUnit] = Field( + None, + description='Planned true vertical depth for the wellbore total depth - with respect to seabed.', + title='Total TVD Sub Sea Planned', + ) + uwi: Optional[str] = Field( + None, + description='The unique wellbore identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).', + title='Unique Wellbore Identifier', + ) + wellHeadElevation: Optional[ValueWithUnit] = Field( + None, + description="The wellbore's vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Elevation', + ) + wellHeadGeographic: Optional[geographicPosition] = Field( + None, + description="The wellbore's well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Position, Geographic', + ) + wellHeadProjected: Optional[projectedPosition] = Field( + None, + description="The wellbore's well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Position, Projected', + ) + wellHeadWgs84: Optional[core_dl_geopoint] = Field( + None, + description="The wellbore's position in WGS 84 latitude and longitude.", + title='WGS 84 Position', + ) + wellboreNumberGovernment: Optional[str] = Field( + None, + description='Government assigned wellbore number.', + title='Government Number', + ) + wellboreNumberOperator: Optional[str] = Field( + None, description='Operator wellbore number.', title='Operator Number' + ) + wellborePurpose: Optional[WellborePurpose] = Field( + None, description='POSC wellbore purpose', title='Wellbore Purpose' + ) + wellboreStatus: Optional[WellboreStatus] = Field( + None, description='POSC wellbore status.', title='Wellbore Status' + ) + wellboreType: Optional[WellboreType] = Field( + None, description='Type of wellbore.', title='Wellbore Type' + ) + + +class wellbore(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[wellboreData] = Field( + None, description='Wellbore data container', title='Wellbore Data' + ) + id: Optional[str] = Field( + None, description='The unique identifier of the wellbore', title='Wellbore ID' + ) + kind: Optional[str] = Field( + 'osdu:wks:wellbore:0.0.1', + description='Well-known wellbore kind specification', + title='Wellbore Kind', + ) + legal: Optional[Legal] = Field( + None, + description="The geological interpretation's legal tags", + title='Legal Tags', + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this wellbore; set by the framework.', + title='Entity Version Number', + ) + + +class channel(DDMSBaseModel): + absentValue: Optional[str] = Field( + None, + description='Optional field carrying the absent value as string for this channel.', + title='Absent Value', + ) + dataType: Optional[DataType] = Field( + 'number', + description="The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + title='Data Type', + ) + dimension: Optional[int] = Field( + None, description='The dimension of this log or channel', title='Dimension' + ) + family: Optional[str] = Field( + None, + description='The log family code of this log or channel (optional)', + title='Log Family', + ) + familyType: Optional[str] = Field( + None, + description="The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)", + title='Log Family Type', + ) + format: Optional[Format] = Field( + 'float32', + description="Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + title='Format Hint', + ) + logstoreId: Optional[float] = Field( + None, + description='The id of this log or channel in the Logstore. This property is not present in the index channel.', + title='Logstore ID', + ) + bulkURI: Optional[str] = Field( + None, + description='bulkURI either URL or URN.', + title='bulk URI', + ) + longName: Optional[str] = Field( + None, description='The long name of this log or channel', title='Log Long Name' + ) + mnemonic: Optional[str] = Field( + None, description='The mnemonic of this log or channel', title='Mnemonic' + ) + name: Optional[str] = Field( + None, description='The name of this log or channel.', title='Log Name' + ) + properties: Optional[List[namedProperty]] = Field( + None, + description='The named properties of this log or channel.', + title='Named Properties', + ) + source: Optional[str] = Field( + None, + description='The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.', + title='Source', + ) + unitKey: Optional[str] = Field( + None, + description="The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition.", + title='Unit', + ) + + +class logSetData(DDMSBaseModelWithExtra): + azimuthReference: Optional[str] = Field( + None, + description='Azimuth reference code defining the type of North. Only used for logSets with azimuth data', + title='Azimuth Reference Code', + ) + channelMnemonics: Optional[List[str]] = Field( + None, + description='A list of channel Mnemonics in this log set.', + title='Channel Mnemonics', + ) + channelNames: Optional[List[str]] = Field( + None, + description='A list of channel long names in this log set.', + title='Channel Names', + ) + classification: Optional[str] = Field( + 'Externally Processed LogSet', + description='The well-known log set classification code.', + title='Log Set Classification', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + elevationReference: Optional[SimpleElevationReference] = None + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + name: Optional[str] = Field( + None, description='The name of this log set', title='Log Set Name' + ) + operation: Optional[str] = Field( + None, description='The operation which created this entity', title='Operation' + ) + reference: Optional[channel] = None + referenceType: Optional[str] = Field( + None, + description='The reference index type of the log set.', + title='Reference Type', + ) + relationships: Optional[logsetrelationships] = None + start: Optional[ValueWithUnit] = None + step: Optional[ValueWithUnit] = None + stop: Optional[ValueWithUnit] = None + + +class dipSetData(DDMSBaseModelWithExtra): + azimuthReference: Optional[str] = Field( + None, + description='Azimuth reference code defining the type of North. Only used for dipSets with azimuth data', + title='Azimuth Reference Code', + ) + classification: Optional[str] = Field( + 'Externally Processed LogSet', + description='The well-known log set classification code.', + title='Log Set Classification', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + elevationReference: Optional[SimpleElevationReference] = None + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + name: Optional[str] = Field( + None, description='The name of this dip set', title='Dip Set Name' + ) + operation: Optional[str] = Field( + None, description='The operation which created this entity', title='Operation' + ) + reference: Optional[channel] = None + referenceType: Optional[str] = Field( + None, + description='The reference index type of the dip set.', + title='Reference Type', + ) + relationships: Optional[dipsetrelationships] = None + start: Optional[ValueWithUnit] = None + step: Optional[ValueWithUnit] = None + stop: Optional[ValueWithUnit] = None + bulkURI: Optional[str] = Field( + None, + description='bulkURI either URL or URN.', + title='bulk URI', + ) + + +class logset(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[logSetData] = Field( + None, + description='Log channel set associated with a wellbore', + title='Log Set Data', + ) + id: Optional[str] = Field( + None, description='The unique identifier of the log set', title='Log Set ID' + ) + kind: Optional[str] = Field( + 'osdu:wks:logSet:0.0.1', description='Kind specification', title='Log Set Kind' + ) + legal: Optional[Legal] = Field( + None, description="The log-set's legal tags", title='Legal Tags' + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this log set; set by the framework.', + title='Entity Version Number', + ) + + +class dipset(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[dipSetData] = Field( + None, + description='dipset data', + title='Dip Set Data', + ) + id: Optional[str] = Field( + None, description='The unique identifier of the dip set', title='Dip Set ID' + ) + kind: Optional[str] = Field( + 'osdu:wks:dipSet:0.0.1', description='Kind specification', title='Dip Set Kind' + ) + legal: Optional[Legal] = Field( + None, description="The dip-set's legal tags", title='Legal Tags' + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this dip set; set by the framework.', + title='Entity Version Number', + ) + + +class trajectoryData(DDMSBaseModelWithExtra): + azimuthReference: Optional[str] = Field( + None, + description='Azimuth reference code defining the type of North, default TN for true north.', + title='Azimuth Reference Code', + ) + channelMnemonics: Optional[List[str]] = Field( + None, + description='A list of channel Mnemonics in this trajectory.', + title='Channel Mnemonics', + ) + channelNames: Optional[List[str]] = Field( + None, + description='A list of channel long names in this trajectory.', + title='Channel Names', + ) + channels: Optional[List[trajectorychannel]] = Field( + None, description='The channels associated to the index.', title='Channels' + ) + classification: Optional[str] = Field( + 'Raw Deviation Survey', + description='The well-known trajectory classification code.', + title='Trajectory Classification', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + elevationReference: Optional[SimpleElevationReference] = Field( + None, + description="The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0", + title='Elevation Reference', + ) + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + index: Optional[trajectorychannel] = Field( + None, description='The index channel or log.', title='Index Channel' + ) + indexType: Optional[str] = Field( + None, description='The index type of the trajectory.', title='Index Type' + ) + locationWGS84: Optional[GeoJsonFeatureCollection] = Field( + None, + description="The wellbore's trajectory preview shape as GeoJSON LineString.", + title='Trajectory preview', + ) + name: Optional[str] = Field( + None, description='The name of this trajectory', title='Trajectory Name' + ) + referencePosition: Optional[Point3dNonGeoJson] = Field( + None, + description='The 3D reference position for the first sample (surface location for main wellbores, tie-in point for side-tracks.', + title='Reference Position First Sample', + ) + relationships: Optional[trajectoryrelationships] = Field( + None, description='The related entities.', title='Relationships' + ) + start: Optional[float] = Field( + None, description='The start index value of the trajectory.', title='Start' + ) + step: Optional[float] = Field( + None, description='The index increment value of the trajectory.', title='Step' + ) + stop: Optional[float] = Field( + None, description='The stop index value of the trajectory.', title='Stop' + ) + wellHeadWgs84: Optional[wgs84Position] = Field( + None, + description="The wellbore's position in WGS 84 latitude and longitude; vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='WGS 84 Position', + ) + + +class trajectory(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[trajectoryData] = Field( + None, + description='A log set representing a trajectory associated with a wellbore', + title='Trajectory Data', + ) + id: Optional[str] = Field( + None, + description='The unique identifier of the trajectory', + title='Trajectory ID', + ) + kind: Optional[str] = Field( + 'osdu:wks:trajectory:0.0.1', + description='Kind specification', + title='Trajectory Kind', + ) + legal: Optional[Legal] = Field( + None, description="The trajectory's legal tags", title='Legal Tags' + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this trajectory; set by the framework.', + title='Entity Version Number', + ) + + +class markerData(DDMSBaseModelWithExtra): + age: Optional[ValueWithUnit] = Field( + None, + description="The absolute age at the feature boundary. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Age', + ) + boundaryRelation: Optional[str] = Field( + None, + description='The marker boundary relationship classification', + title='Interface Boundary Relation', + ) + classification: Optional[str] = Field( + None, + description='The classification of the marker. Could be client-defined via a catalog, e.g. common:wke:markerClassification:1.0.0 and common:wke:markerClassificationMember:1.0.0', + title='Marker Classification', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + depth: Optional[ValueWithUnit] = Field( + None, + description="The original marker depth - measured from data.elevationReference in data.depthReferenceType. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Marker Depth', + ) + depthReferenceType: Optional[str] = Field( + 'MD', + description='Depth reference code defining the type of depth for the marker. Default MD (measured depth). Depth is downwards increasing.', + title='Depth Reference Code', + ) + elevationReference: Optional[SimpleElevationReference] = Field( + None, + description='The elevation from mean sea level (MSL), where depth, topDepth, baseDepth are zero. Values above MSL are positive.', + title='Elevation Reference Level', + ) + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + interpreter: Optional[str] = Field( + None, + description='The name of the interpreter who picked this marker.', + title='Interpreter Name', + ) + locationWGS84: Optional[GeoJsonFeatureCollection] = Field( + None, + description="The marker's shape as GeoJSON Point.", + title='GeoJSON Marker Location', + ) + markerFeatureType: Optional[str] = Field( + None, + description="The marker's type of feature like 'seismic', 'structural', 'stratigraphic'", + title='Marker Feature Type', + ) + markerGeoDomain: Optional[str] = Field( + None, + description="The marker's GeoScience domain like 'geologic', 'reservoir', 'petrophysical'", + title='Marker GeoScience Domain', + ) + markerSubFeatureAttribute: Optional[str] = Field( + None, + description="Further specification of the marker's sub-feature, e.g. in sequence stratigraphy.", + title='Marker Sub-feature Attribute', + ) + markerSubFeatureType: Optional[str] = Field( + None, + description="The marker's sub-type of the feature like 'horizon', 'fault', 'fracture'", + title='Marker Sub-feature Type', + ) + md: ValueWithUnit = Field( + ..., + description="The marker measured depth (MD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Marker Measured Depth', + ) + name: str = Field(..., description='The name of the marker', title='Marker Name') + planeOrientationAzimuth: Optional[ValueWithUnit] = Field( + None, + description="Azimuth angle. The azimuth reference is given by data.azimuthReference. The 'planeOrientationAzimuth.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition.", + title='Azimuth Angle', + ) + planeOrientationDip: Optional[ValueWithUnit] = Field( + None, + description="Dip angle. The 'planeOrientationDip.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition.", + title='Dip Angle', + ) + relationships: Optional[markerrelationships] = Field( + None, description='The entities related to this marker.', title='Relationships' + ) + stratigraphicHierarchyLevel: Optional[int] = Field( + None, + description='Optional hierarchical level in the chrono-stratigraphic/litho-stratigraphic catalog table, identified by the data.relationships.chartId', + title='Column Level', + ) + tvd: Optional[ValueWithUnit] = Field( + None, + description="The marker true vertical depth (TVD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Marker Measured Depth', + ) + wgs84ElevationFromMsl: Optional[ValueWithUnit] = Field( + None, + description="Elevation from Mean Sea Level, downwards negative. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.", + title='Elevation from MSL', + ) + wgs84LatitudeLongitude: Optional[core_dl_geopoint] = Field( + None, + description="The marker's position in WGS 84 latitude and longitude.", + title='WGS 84 Latitude Longitude', + ) + + +class marker(DDMSBaseModel): + acl: TagDictionary = Field( + ..., + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[markerData] = Field( + None, + description='Geological marker using a single point-observation, typically along a wellbore.', + title='Marker Data', + ) + id: Optional[str] = Field( + None, description='The unique identifier of the marker', title='Marker ID' + ) + kind: str = Field(..., description='Marker kind specification', title='Marker Kind') + legal: Legal = Field(..., description="The marker's legal tags", title='Legal Tags') + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this marker; set by the framework.', + title='Entity Version Number', + ) + + +class wellData(DDMSBaseModelWithExtra): + basinContext: Optional[basinContext] = Field( + None, + description='The basin context details for the well.', + title='Basin Context', + ) + block: Optional[str] = Field( + None, description='The block name, in which the well is located.', title='Block' + ) + country: Optional[str] = Field( + None, + description="The country, in which the well is located. The country name follows the convention in ISO 3166-1 'English short country name', see https://en.wikipedia.org/wiki/ISO_3166-1", + title='Country', + ) + county: Optional[str] = Field( + None, + description='The county name, in which the well is located.', + title='County', + ) + dateCreated: Optional[datetime] = Field( + None, + description='The UTC date time of the entity creation', + title='Creation Date and Time', + ) + dateLicenseIssued: Optional[datetime] = Field( + None, + description='The UTC date time when the well license was issued.', + title='License Issue Date', + ) + dateModified: Optional[datetime] = Field( + None, + description='The UTC date time of the last entity modification', + title='Last Modification Date and Time', + ) + datePluggedAbandoned: Optional[datetime] = Field( + None, + description='The UTC date and time at which the well was plugged and abandoned.', + title='Plugged Abandoned Date', + ) + dateSpudded: Optional[datetime] = Field( + None, + description='The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format', + title='Spud Date', + ) + directionWell: Optional[DirectionWell] = Field( + None, + description='POSC well direction. The direction of the flow of the fluids in a well facility (generally, injected or produced, or some combination).', + title='Well Direction', + ) + district: Optional[str] = Field( + None, + description='The district name, to which the well belongs.', + title='District', + ) + elevationReference: Optional[SimpleElevationReference] = Field( + None, + description="The well's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0", + title='Elevation Reference', + ) + externalIds: Optional[List[str]] = Field( + None, + description='An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.', + title='Array of External IDs', + ) + field: Optional[str] = Field( + None, description='The field name, to which the well belongs.', title='Field' + ) + fluidWell: Optional[FluidWell] = Field( + None, + description='POSC well fluid. The type of fluid being produced from or injected \\ninto a well facility.', + title='Well Fluid', + ) + groundElevation: Optional[ValueWithUnit] = Field( + None, + description="The well's ground elevation, Values above MSL are positive..", + title='Ground Elevation', + ) + locationWGS84: Optional[GeoJsonFeatureCollection] = Field( + None, + description='A 2D GeoJSON FeatureCollection defining well location or trajectory in WGS 84 CRS.', + title='Well Shape WGS 84', + ) + name: Optional[str] = Field(None, description='The well name', title='Well Name') + operator: Optional[str] = Field( + None, + description='The operator company name of the well.', + title='Well Operator', + ) + operatorDivision: Optional[str] = Field( + None, + description='The operator division of the well.', + title='Operator Division', + ) + operatorInterest: Optional[float] = Field( + None, + description='Interest for operator. Commonly in percent.', + title='Well Operator Interest', + ) + operatorOriginal: Optional[str] = Field( + None, + description='Original operator of the well. This may be different than the current operator.', + title='Original Well Operator', + ) + plssLocation: Optional[PlssLocation] = Field( + None, + description='A location described by the Public Land Survey System (United States)', + title='US PLSS Location', + ) + propertyDictionary: Optional[dict] = Field( + None, + description='A dictionary structure, i.e. key/string value pairs, to carry additional well properties.', + title='Property Dictionary', + ) + region: Optional[str] = Field( + None, + description='Geo-political region in which the well is located.', + title='Region', + ) + relationships: Optional[wellrelationships] = Field( + None, description='The related entities.', title='Relationships' + ) + state: Optional[str] = Field( + None, description='The state name, in which the well is located.', title='State' + ) + uwi: Optional[str] = Field( + None, + description='The unique well identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).', + title='Unique Well Identifier', + ) + waterDepth: Optional[ValueWithUnit] = Field( + None, description='Depth of water (not land rigs).', title='Water Depth' + ) + wellHeadElevation: Optional[ValueWithUnit] = Field( + None, + description="The well's vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Elevation', + ) + wellHeadGeographic: Optional[geographicPosition] = Field( + None, + description="The well's well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Position, Geographic', + ) + wellHeadProjected: Optional[projectedPosition] = Field( + None, + description="The well's well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.", + title='Well Head Position, Projected', + ) + wellHeadWgs84: Optional[core_dl_geopoint] = Field( + None, + description="The well's position in WGS 84 latitude and longitude.", + title='WGS 84 Position', + ) + wellLocationType: Optional[WellLocationType] = Field(None, description='', title='') + wellNumberGovernment: Optional[str] = Field( + None, description='Government assigned well number.', title='Government Number' + ) + wellNumberLicense: Optional[str] = Field( + None, description='License number of the well.', title='Well License Number' + ) + wellNumberOperator: Optional[str] = Field( + None, description='Operator well number.', title='Operator Number' + ) + wellPurpose: Optional[WellPurpose] = Field( + None, description='POSC well purpose', title='Well Purpose' + ) + wellStatus: Optional[WellStatus] = Field( + None, description='POSC well status.', title='Well Status' + ) + wellType: Optional[WellType] = Field( + None, description='Type of well.', title='Well Type' + ) + + +class well(DDMSBaseModel): + acl: Optional[TagDictionary] = Field( + None, + description='The access control tags associated with this entity.', + title='Access Control List', + ) + ancestry: Optional[LinkList] = Field( + None, + description='The links to data, which constitute the inputs.', + title='Ancestry', + ) + data: Optional[wellData] = Field( + None, description='Well data container', title='Well Data' + ) + id: Optional[str] = Field( + None, description='The unique identifier of the well', title='Well ID' + ) + kind: Optional[str] = Field( + 'osdu:wks:well:0.0.1', + description='Well-known well kind specification', + title='Well Kind', + ) + legal: Optional[Legal] = Field( + None, + description="The geological interpretation's legal tags", + title='Legal Tags', + ) + meta: Optional[List[MetaItem]] = Field( + None, + description="The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)", + title='Frame of Reference Meta Data', + ) + type: Optional[str] = Field( + None, + description='The reference entity type as declared in common:metadata:entity:*.', + title='Entity Type', + ) + version: Optional[float] = Field( + None, + description='The version number of this well; set by the framework.', + title='Entity Version Number', + ) diff --git a/app/model/model_utils.py b/app/model/model_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..1e605ad0073b8d787605524830e7c9b8963fd7fe --- /dev/null +++ b/app/model/model_utils.py @@ -0,0 +1,48 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Type + +from odes_storage.models import Record +from pydantic import BaseModel + +# translate any model to a record + +def to_record(obj: BaseModel) -> Record: + """ + create a Record instance from another model which should shared Record schema. + :param obj: input model instance, expected to be 'compatible' with Record model + :return: record object + """ + return Record(**obj.dict(exclude_unset=True, by_alias=True)) + + +def from_record(cls: Type[BaseModel], record: Record): + """ + create a Record instance from another model which should shared Record schema. + :param cls: model class use to instantiate the object + :param record: input record object + :return: object instantiate (of class 'cls') + """ + return cls(**record.dict(exclude_unset=True, by_alias=True)) + + +def record_to_dict(record: BaseModel) -> dict: + """ Generate a dictionary representation of the model, use exclude_unset=True and by_alias=True""" + return record.dict(exclude_unset=True, by_alias=True) + + +def record_to_json(record: BaseModel) -> str: + """ Generate a JSON representation of the model, use exclude_unset=True and by_alias=True""" + return record.json(exclude_unset=True, by_alias=True) diff --git a/app/model/schema_version.py b/app/model/schema_version.py new file mode 100644 index 0000000000000000000000000000000000000000..3e26304f197b7288b09a3eff6528272508c4cf3a --- /dev/null +++ b/app/model/schema_version.py @@ -0,0 +1,22 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +log_version = '1.0.5' +logset_version = '1.0.5' +marker_version = '1.0.4' +trajectory_version = '1.0.5' +well_version = '1.0.2' +wellbore_version = '1.0.6' +dip_version = '1.0.0' +dipset_version = '1.0.0' diff --git a/app/model/user.py b/app/model/user.py new file mode 100644 index 0000000000000000000000000000000000000000..16b24feba72e29bd8a51e9587e35c91162f2964e --- /dev/null +++ b/app/model/user.py @@ -0,0 +1,23 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from dataclasses import dataclass, field +from typing import List + + +@dataclass(frozen=True) +class User: + authenticated: bool = False + email: str = 'anonymous' + groups: List[str] = field(default_factory=list) diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/about.py b/app/routers/about.py new file mode 100644 index 0000000000000000000000000000000000000000..c0fb3637a5485350bff1d497a50b97d29af5aa4f --- /dev/null +++ b/app/routers/about.py @@ -0,0 +1,64 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends +from pydantic import BaseModel +from app import __version__, __app_name__, __build_number__ +from app.conf import Config +from typing import Dict +from app.auth.auth import require_opendes_authorized_user + +router = APIRouter() + + +class AboutResponse(BaseModel): + service: str = None + version: str = None + buildNumber: str = None + cloudEnvironment: str = None + + +@router.get("/about", response_model=AboutResponse, include_in_schema=True) +async def get_about() -> AboutResponse: + return AboutResponse.construct( + service=__app_name__, + version=__version__, + buildNumber=__build_number__, + cloudEnvironment=Config.cloud_provider.value + ) + + +class VersionDetailsResponse(BaseModel): + service: str = None + version: str = None + buildNumber: str = None + details: Dict[str, str] = None + + +@router.get("/version", response_model=VersionDetailsResponse, include_in_schema=True) +async def get_version(user=Depends(require_opendes_authorized_user, use_cache=False)): + # very basic parsing for now + key_val_list = [key_val.split('=', 1) for key_val in Config.build_details.value.split(';') if '=' in key_val] + details = { + key_val[0].strip(): key_val[1].replace('\\"', '"').strip(' "') + for key_val in key_val_list + } + # codecs.decode(s, 'unicode_escape') + return VersionDetailsResponse.construct( + service=__app_name__, + version=__version__, + buildNumber=__build_number__, + details=details + ) + diff --git a/app/routers/ddms_v2/__init__.py b/app/routers/ddms_v2/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/ddms_v2/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/ddms_v2/common_parameters.py b/app/routers/ddms_v2/common_parameters.py new file mode 100644 index 0000000000000000000000000000000000000000..bd69491887cf8b747664cd707d9996c9a5639b39 --- /dev/null +++ b/app/routers/ddms_v2/common_parameters.py @@ -0,0 +1,10 @@ +from fastapi import Query +from app.bulk_persistence import JSONOrient + +def json_orient_parameter(orient: str = Query( + JSONOrient.split.value, + description='define format when using JSON data is used. Value can be ' + ', '.join([o.value for o in JSONOrient]), + regex="|".join([o.value for o in JSONOrient]) + ) +) -> str: + return orient diff --git a/app/routers/ddms_v2/ddms_v2.py b/app/routers/ddms_v2/ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..e400cff521e95485bab5d5b8597a94e3d7544cea --- /dev/null +++ b/app/routers/ddms_v2/ddms_v2.py @@ -0,0 +1,29 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends, HTTPException + +from app.model.ddms_model_response import V1AboutResponse, AboutResponseUser +from app.model.model_curated import * +from app.utils import Context, get_ctx + + +router = APIRouter() + + +@router.get('/status', response_model=V1AboutResponse, + summary="Get the status of the service") +async def about(ctx: Context = Depends(get_ctx)) -> V1AboutResponse: + return V1AboutResponse(user=AboutResponseUser(tenant=ctx.partition_id or 'unknown', email=ctx.user.email)) + diff --git a/app/routers/ddms_v2/log_ddms_v2.py b/app/routers/ddms_v2/log_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..ad82be6aacab93b5d6a434aef51962701b0e5a5c --- /dev/null +++ b/app/routers/ddms_v2/log_ddms_v2.py @@ -0,0 +1,548 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import asyncio +import json +from typing import List, Optional + +import numpy as np +import pandas as pd +import starlette +import starlette.status as status +from fastapi import ( + APIRouter, + Depends, + File, + HTTPException, + Query, + Request, + UploadFile, +) +from odes_storage.models import ( + CreateUpdateRecordsResponse, + Record, + RecordVersions, +) +from pydantic import BaseModel, Field +from starlette.responses import Response + +from app.bulk_persistence import DataframeSerializer, JSONOrient, MimeTypes, get_dataframe +from app.clients.storage_service_client import get_storage_record_service +from app.model.log_bulk import LogBulkHelper +from app.model.model_curated import log +from app.model.model_utils import from_record, to_record +from app.routers.ddms_v2.persistence import Persistence +from app.routers.ddms_v2.common_parameters import json_orient_parameter +from app.utils import Context, OpenApiHandler, OpenApiResponse, get_ctx + + +router = APIRouter() + + +async def get_persistence() -> Persistence: + return Persistence() + + +async def fetch_record(ctx: Context, record_id: str, version=None) -> Record: + """ + :param ctx: context + :param record_id: record identifier + :param version: log version + :return: record + """ + + storage_client = await get_storage_record_service(ctx) + if version: + return await storage_client.get_record_version( + id=record_id, + version=version, + data_partition_id=ctx.partition_id, + ) + else: + return await storage_client.get_record( + id=record_id, + data_partition_id=ctx.partition_id, + ) + + +async def update_records(ctx: Context, records: List[BaseModel]) -> CreateUpdateRecordsResponse: + """ + :param ctx: context + :param records: list of record in dict or pydantic format + :return: id of the record + """ + storage_client = await get_storage_record_service(ctx) + # record_dict_list = [r.dict(exclude_unset=True) if isinstance(r, BaseModel) else r for r in records] + # just assume it works + return await storage_client.create_or_update_records( + record=records, data_partition_id=ctx.partition_id + ) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------- API get Log META ----------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +@router.get('/logs/{logid}', + response_model=log, + summary="Get the Log using wks:log:1.0.5 schema", + description=""" + Get the log object using its data ecosystem **id**. <p>If the log + kind is *wks:log:1.0.5* returns the record directly</p> <p>If the + wellbore kind is different *wks:log:1.0.5* it will get the raw + record and convert the results to match the *wks:log:1.0.5*. If + conversion is not possible returns an error **500**</p>""", + operation_id="get_log", + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}}, + response_model_exclude_unset=True) +async def get_log( + logid: str, + ctx: Context = Depends(get_ctx) +) -> log: + record = await fetch_record(ctx, logid) + return from_record(log, record) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------- API create or update Log META ---------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +@router.post('/logs', response_model=CreateUpdateRecordsResponse, + summary="Create or update the logs using wks:log:1.0.5 schema", + operation_id="post_log", + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}) +async def post_log( + logs: List[log], + ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + if len(logs) == 0: + return CreateUpdateRecordsResponse(recordCount=0, recordIds=[]) + + return await update_records(ctx, records=[to_record(lg) for lg in logs]) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# -------------------------------------------------- API delete Log META ---------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +@router.delete('/logs/{logid}', + summary="Delete the log. The API performs a logical deletion of the given record", + operation_id="del_log", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"} + }) +async def del_log( + logid: str, + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + await storage_client.delete_record(id=logid, data_partition_id=ctx.partition_id) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------- API get Log all versions --------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +@router.get( + "/logs/{logid}/versions", + response_model=RecordVersions, + summary="Get all versions of the log", + operation_id="get_log_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}} +) +async def get_log_versions( + logid: str, ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions( + id=logid, data_partition_id=ctx.partition_id + ) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------- API get Log @ specific version --------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- + + +@router.get( + "/logs/{logid}/versions/{version}", + response_model=log, + summary="Get the given version of log using wks:log:1.0.5 schema", + operation_id="get_log_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}}, + response_model_exclude_unset=True +) +async def get_log_version( + logid: str, version: int, ctx: Context = Depends(get_ctx) +) -> log: + return from_record(log, await fetch_record(ctx, logid, version)) + + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------------- API write Log BULK --------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +def bulk_id_path_parameter(bulk_path: str = Query( + None, + alias='bulk-path', + description='The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). ' + 'Required for non wks:log.') +) -> str: + return bulk_path + + +async def _write_log_data( + ctx: Context, + persistence: Persistence, + logid: str, + bulk_path: Optional[str], + dataframe: pd.DataFrame) -> CreateUpdateRecordsResponse: + # TODO: handle strings - if column type is object or string, could be useful to + # convert to categories df['text'].astype('category') to speed up storage + # http://matthewrocklin.com/blog/work/2015/03/16/Fast-Serialization + + # we can concurrently fetch the log record and construct/upload the bulk + bulk_id, log_record = await asyncio.gather( + persistence.write_bulk(ctx=ctx, dataframe=dataframe), + fetch_record(ctx, logid), + ) + # update the record + LogBulkHelper.update_bulk_id(log_record, bulk_id, bulk_path) + + # push new version on the storage + return await update_records(ctx, [log_record]) + + +_log_dataframe_example = pd.DataFrame( + [ + [0, 1001, 2001], + [0.5, 1002, 2002], + [1, 1003, 2003], + [1.5, 1004, 2004], + [2, 1005, 2005], + ], + columns=["Ref", "col_100X", "col_200X"], +) + +# manually setup doc as we wanted to tweaked the classic mechanism in order to best perf as we can +@OpenApiHandler.set( + operation_id="write_log_data", + request_body={ + 'description': + 'Write log bulk data.' + '\nIt uses [Pandas.Dataframe json format]' + '(https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html)' + + '.\n Here\'re examples for data with {} rows and {} columns with different _orient_: '.format( + _log_dataframe_example.shape[0], + _log_dataframe_example.shape[1]) + + ''.join([f'\n* {o.value}: <br/>`{DataframeSerializer.to_json(_log_dataframe_example, o)}`<br/> ' + for o in JSONOrient]), + # put examples here because of bug in swagger UI to properly render multiple examples + 'required': True, + 'content': { + MimeTypes.JSON.type: { + 'schema': { + # swagger UI bug, so single example here + 'example': json.loads( + DataframeSerializer.to_json(_log_dataframe_example, JSONOrient.split) + ), + 'oneOf': [DataframeSerializer.get_schema(o) for o in JSONOrient] + } + } + } + }) +@router.post('/logs/{logid}/data', + summary="Writes the specified data to the log (atomic).", + description='Overwrite if exists', + operation_id="write_log_data", + response_model=CreateUpdateRecordsResponse, + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}, + status.HTTP_200_OK: {}}) +async def write_log_data( + request: Request, + logid: str, + orient: str = Depends(json_orient_parameter), + bulk_path: str = Depends(bulk_id_path_parameter), + persistence: Persistence = Depends(get_persistence), + ctx: Context = Depends(get_ctx), +) -> CreateUpdateRecordsResponse: + content = await request.body() # request.stream() + df = DataframeSerializer.read_json(content, orient) + return await _write_log_data(ctx, persistence, logid, bulk_path, df) + +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------------- API write Log BULK (UPLOAD FILE) ------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +# --------------------------------------------------------------------------------------------------------------------- +@router.post('/logs/{logid}/upload_data', + summary='Writes the data to the log. Support json file (then orient must be provided) and parquet', + description='Overwrite if exists', + operation_id="upload_log_data", + response_model=CreateUpdateRecordsResponse, + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "invalid request"}, + status.HTTP_404_NOT_FOUND: {"description": "log not found"}, + status.HTTP_200_OK: {}}) +async def upload_log_data_file( + logid: str, + file: UploadFile = File(...), + orient: str = Depends(json_orient_parameter), + bulk_path: str = Depends(bulk_id_path_parameter), + persistence: Persistence = Depends(get_persistence), + ctx: Context = Depends(get_ctx), +) -> CreateUpdateRecordsResponse: + try: + mime_type = MimeTypes.from_str(file.content_type) + except ValueError: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="unknown content_type " + file.content_type, + ) + + if mime_type == MimeTypes.JSON: + # TODO for now the entire content is read at once, can chunk it instead I guess + content: bytes = await file.read() + df = DataframeSerializer.read_json(content, orient) + elif mime_type == MimeTypes.PARQUET: + try: + df = DataframeSerializer.read_parquet(file.file) + except Exception as e: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, + detail='invalid data: ' + e.message if hasattr(e, 'message') else 'unknown error') + else: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=file.content_type + ' is not supported') + + return await _write_log_data(ctx, persistence, logid, bulk_path, df) + + +async def _get_log_data( + ctx: Context, + persistence: Persistence, + logid: str, + version: int, + orient: str, + bulk_id_path: str = None, +): + + """ + Get log bulk data in format in the given orient value from log id logid + + private method in order to factorize GET /logs/{logid} and GET /logs/{logid}/version/{version} + get the log record with the specified log id into the storage, + fetch the bulkd id in the record using bulkk_id_path if any + read the bulk data and serialize it into a json. + + param persistence: peristence instance used to read the data + param logid: id of the log + param version: the version of the data log you want to have + param orient: get the log data in the given orient value + param bulk_id_path: support of custom bulk id path, if none use the standard + + return json response with the bulkd data in the orient format + """ + + # we may use an optimistic cache here + log_record = await fetch_record(ctx, logid, version) + + df = await persistence.read_bulk(ctx, log_record, bulk_id_path) + content = DataframeSerializer.to_json(df, orient=orient) + return Response(content=content, media_type=MimeTypes.JSON.type) # content is already jsonified no need to use JSONResponse + + +@OpenApiHandler.set( + operation_id="get_log_data", + responses=[ + OpenApiResponse( + status=status.HTTP_200_OK, + description= + 'Get log bulk data in format in the given _orient_ value.' + '\nIt uses [Pandas.Dataframe json format]' + '(https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html)' + + '.\n Here\'re examples for data with {} rows and {} columns with different _orient_: '.format( + _log_dataframe_example.shape[0], + _log_dataframe_example.shape[1]) + + ''.join([f'\n* {o.value}: <br/>`{DataframeSerializer.to_json(_log_dataframe_example, o)}`<br/> ' + for o in JSONOrient]), + + name='GetLogDataResponse', + example=DataframeSerializer.to_json(_log_dataframe_example, JSONOrient.split), + schema={'oneOf': [DataframeSerializer.get_schema(o) for o in JSONOrient]}) + ]) +@router.get('/logs/{logid}/data', + summary="Returns all data within the specified filters. Strongly consistent.", + description='return full bulk data', + operation_id="get_log_data", + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}}) +async def get_log_data( + logid: str, + orient: str = Depends(json_orient_parameter), + bulk_id_path: str = Depends(bulk_id_path_parameter), + persistence: Persistence = Depends(get_persistence), + ctx: Context = Depends(get_ctx), +): + return await _get_log_data( + ctx=ctx, + persistence=persistence, + logid=logid, + orient=orient, + bulk_id_path=bulk_id_path, + version=None, + ) + + +class StatsColumn(BaseModel): + count: int = Field(..., description="Count number of non-NA/null observations") + mean: float = Field(..., description="Mean of the values") + std: float = Field(..., description="Standard deviation of the observations") + min: float = Field(..., description="Minimum of the values in the object") + percentile25: float = Field(..., alias="25%") + percentile50: float = Field(..., alias="50%") + percentile75: float = Field(..., alias="75%") + max: float = Field(..., description="Maximum of the values in the object") + + +class GetStatisticResponse(BaseModel): + columns: List[StatsColumn] + + +@router.get('/logs/{logid}/statistics', + summary='Data statistics', + description="This API will return count, mean, std, min, max and percentiles of each column", + response_model=GetStatisticResponse, + ) +async def get_log_data_statistics(logid: str, + bulk_id_path: str = Depends(bulk_id_path_parameter), + ctx: Context = Depends(get_ctx)): + """ + /!\ This is a non optimized API due to the the fetch of the entire bulk each time it is called + In case of intensive usage, this API should retrieve statistics from metadata stored at write time. + """ + # we may use an optimistic cache here + log_record = await fetch_record(ctx, logid) # use dict to support the custom path + + bulk_id = LogBulkHelper.get_bulk_id(log_record, bulk_id_path) + if bulk_id is None: + content = '{}' # no bulk + else: + df = await get_dataframe(ctx, bulk_id) + content = df.describe(include="all").to_json() + + return Response(content=content, media_type=MimeTypes.JSON.type) + + +@OpenApiHandler.set( + operation_id="get_log_data_by_version", + responses=[ + OpenApiResponse( + status=status.HTTP_200_OK, + description= + 'Get log bulk data in format in the given _orient_ value.' + '\nIt uses [Pandas.Dataframe json format]' + '(https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html)' + + '.\n Here\'re examples for data with {} rows and {} columns with different _orient_: '.format( + _log_dataframe_example.shape[0], + _log_dataframe_example.shape[1]) + + ''.join([f'\n* {o.value}: <br/>`{DataframeSerializer.to_json(_log_dataframe_example, o)}`<br/> ' + for o in JSONOrient]), + + name='GetLogDataResponse', + example=DataframeSerializer.to_json(_log_dataframe_example, JSONOrient.split), + schema={'oneOf': [DataframeSerializer.get_schema(o) for o in JSONOrient]}) + ]) +@router.get('/logs/{logid}/versions/{version}/data', + summary="Returns all data within the specified filters. Strongly consistent.", + description='return full bulk data', + operation_id="get_log_data_by_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "log not found"}}) +async def get_log_data_by_version( + logid: str, + version: int, + orient: str = Depends(json_orient_parameter), + bulk_id_path: str = Depends(bulk_id_path_parameter), + persistence: Persistence = Depends(get_persistence), + ctx: Context = Depends(get_ctx), +): + + return await _get_log_data( + ctx=ctx, + persistence=persistence, + logid=logid, + orient=orient, + bulk_id_path=bulk_id_path, + version=version, + ) + + +# ---------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------------------- +# -------------------------------------------------- NOT IMPLEMENTED --------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------------------- +# ---------------------------------------------------------------------------------------------------------------------- + +@router.get('/logs/{logid}/decimated', + summary="Returns a decimated version of all data within the specified filters. Eventually consistent.", + description="""TODO + Note: row order is not preserved.""", + operation_id="get_log_decimated", + responses={ + status.HTTP_404_NOT_FOUND: {"description": "log not found"}, + status.HTTP_422_UNPROCESSABLE_ENTITY: {"description": "log is not compatible with decimation"} + }) +async def get_log_decimated( + logid: str, + quantiles: int = Query(None, description="Number of division desired"), + start: float = Query(None, description="The start value for the log decimation"), + stop: float = Query(None, description="The stop value for the log decimation"), + orient: str = Depends(json_orient_parameter), + bulk_id_path: str = Depends(bulk_id_path_parameter), + persistence: Persistence = Depends(get_persistence), + ctx: Context = Depends(get_ctx)): + log_record = await fetch_record(ctx, logid) + + df = await persistence.read_bulk(ctx, log_record, bulk_id_path) + + # TODO : remove this after review what should be done with index column + if len(df.columns) == 1: + raise HTTPException(status_code=starlette.status.HTTP_400_BAD_REQUEST, + detail="data frame doesn't contain index") + + if df.dtypes[1] not in [np.float64, np.float32]: + raise HTTPException(status_code=422, detail="log is not compatible with decimation") + + # TODO: Make this async using dask distributed? + if start is not None and stop is not None: + # get values between start and stop + window = df.set_index(0)[start:stop].reset_index() + else: + window = df + + # create groups + res = pd.qcut(window[0], q=quantiles) + groups = window.groupby([res]) + # get mean for each group + means = groups.mean()[[0, 1]] + # serialize + content = means.fillna("NaN").to_json(orient=orient) + + return Response(content=content, media_type=MimeTypes.JSON.type) diff --git a/app/routers/ddms_v2/logset_ddms_v2.py b/app/routers/ddms_v2/logset_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..79d8a8c195f7b69bb5c232b3c017426fee025ab9 --- /dev/null +++ b/app/routers/ddms_v2/logset_ddms_v2.py @@ -0,0 +1,122 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends, Query +import starlette.status as status +from starlette.responses import Response + +from app.clients.storage_service_client import get_storage_record_service +from app.clients.search_service_client import get_search_service +from odes_storage.models import * +from app.model.model_curated import logset +from app.utils import Context +from app.utils import get_ctx +from app.model.model_utils import to_record, from_record +from app.model.entity_utils import Entity + +import app.routers.ddms_v2.storage_helper as storage_helper + +router = APIRouter() + + +@router.get('/logsets/{logsetid}', + response_model=logset, + summary="Get the LogSet using wks:logSet:1.0.5 schema", + description="""Get the LogSet object using its **id**""", + operation_id="get_logset", + responses={status.HTTP_404_NOT_FOUND: {"description": "LogSet not found"}}, + response_model_exclude_unset=True) +async def get_logset( + logsetid: str, + ctx: Context = Depends(get_ctx) +) -> logset: + storage_client = await get_storage_record_service(ctx) + logset_record = await storage_client.get_record(id=logsetid, data_partition_id=ctx.partition_id) + return from_record(logset, logset_record) + + +@router.delete('/logsets/{logsetid}', + summary="Delete the LogSet. The API performs a logical deletion of the given record", + operation_id="del_logset", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "LogSet not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"} + }) +async def del_logset( + logsetid: str, + recursive: bool = Query(default=False, description="Whether or not to delete records children"), + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + if recursive: + await storage_helper.StorageHelper.delete_recursively( + ctx, + entity_id=logsetid, + relationship='logset', + entity_list=[Entity.LOG], + data_partition_id=ctx.partition_id, + search_service=await get_search_service(ctx), + storage_service=storage_client + ) + else: + await storage_client.delete_record(id=logsetid, data_partition_id=ctx.partition_id) + + +@router.get('/logsets/{logsetid}/versions', + response_model=RecordVersions, + summary="Get all versions of the logset", + operation_id="get_logset_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "LogSet not found"}}) +async def get_logset_versions( + logsetid: str, + ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions(id=logsetid, data_partition_id=ctx.partition_id) + + +@router.get('/logsets/{logsetid}/versions/{version}', + response_model=logset, + summary="Get the given version of LogSet using wks:logSet:1.0.5 schema", + description=""""Get the LogSet object using its **id**.""", + operation_id="get_logset_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "LogSet not found"}}, + response_model_exclude_unset=True) +async def get_logset_version( + logsetid: str, + version: int, + ctx: Context = Depends(get_ctx) +) -> logset: + storage_client = await get_storage_record_service(ctx) + result_logset = await storage_client.get_record_version(id=logsetid, + version=version, + data_partition_id=ctx.partition_id) + return from_record(logset, result_logset) + + +@router.post('/logsets', + response_model=CreateUpdateRecordsResponse, + summary="Create or update the LogSets using wks:logSet:1.0.5 schema", + operation_id="post_logset", + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}) +async def post_logset( + logsets: List[logset], + ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + storage_client = await get_storage_record_service(ctx) + return await storage_client.create_or_update_records( + record=[to_record(lgset) for lgset in logsets], + data_partition_id=ctx.partition_id) diff --git a/app/routers/ddms_v2/marker_ddms_v2.py b/app/routers/ddms_v2/marker_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..a6135af738870312ea0d66d2e7ac1866c44b7a0b --- /dev/null +++ b/app/routers/ddms_v2/marker_ddms_v2.py @@ -0,0 +1,103 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends, Query +import starlette.status as status +from starlette.responses import Response + +from app.clients.storage_service_client import get_storage_record_service +from odes_storage.models import * +from app.model.model_curated import * +from app.utils import Context, get_ctx +from app.model.model_utils import to_record, from_record + +router = APIRouter() + + +@router.get('/markers/{markerid}', + response_model=marker, + summary="Get the marker using wks:marker:1.0.4 schema", + description="""Get the Marker object using its **id**""", + operation_id="get_marker", + responses={status.HTTP_404_NOT_FOUND: {"description": "marker not found"}}, + response_model_exclude_unset=True) +async def get_marker( + markerid: str, + ctx: Context = Depends(get_ctx) +) -> marker: + storage_client = await get_storage_record_service(ctx) + marker_record = await storage_client.get_record(id=markerid, data_partition_id=ctx.partition_id) + return from_record(marker, marker_record) + + +@router.delete('/markers/{markerid}', + summary="Delete the marker. The API performs a logical deletion of the given record", + operation_id="del_marker", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={status.HTTP_404_NOT_FOUND: {"description": "Marker not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"} + } + ) +async def del_marker( + markerid: str, + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + await storage_client.delete_record(id=markerid, data_partition_id=ctx.partition_id) + + +@router.get('/markers/{markerid}/versions', + response_model=RecordVersions, + summary="Get all versions of the marker", + operation_id="get_marker_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "marker not found"}}) +async def get_marker_versions( + markerid: str, + ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions(id=markerid, data_partition_id=ctx.partition_id) + + +@router.get('/markers/{markerid}/versions/{version}', + response_model=marker, + summary="Get the given version of marker using wks:marker:1.0.4 schema", + operation_id="get_marker_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "marker not found"}}, + response_model_exclude_unset=True) +async def get_marker_version( + markerid: str, + version: int, + ctx: Context = Depends(get_ctx) +) -> marker: + storage_client = await get_storage_record_service(ctx) + result_marker = await storage_client.get_record_version(id=markerid, + version=version, + data_partition_id=ctx.partition_id) + return from_record(marker, result_marker) + + +@router.post('/markers', response_model=CreateUpdateRecordsResponse, + summary="Create or update the markers using wks:marker:1.0.4 schema", + operation_id="post_marker", + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}) +async def post_marker( + markers: List[marker], + ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + storage_client = await get_storage_record_service(ctx) + return await storage_client.create_or_update_records( + record=[to_record(mk) for mk in markers], + data_partition_id=ctx.partition_id) diff --git a/app/routers/ddms_v2/persistence.py b/app/routers/ddms_v2/persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..801b26902bb15f852191a1ac6a37a4e2369c24f1 --- /dev/null +++ b/app/routers/ddms_v2/persistence.py @@ -0,0 +1,41 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.bulk_persistence import create_and_store_dataframe +import pandas as pd +from app.bulk_persistence import get_dataframe + +from odes_storage.models import Record +from app.utils import Context + +from app.model.log_bulk import LogBulkHelper + + +class Persistence: + @classmethod + async def read_bulk( + cls, + ctx: Context, + record: Record, + bulk_id_path: str, + ) -> pd.DataFrame: + bulk_id = LogBulkHelper.get_bulk_id(record, bulk_id_path) + if bulk_id is None: + return pd.DataFrame() + + return await get_dataframe(ctx, bulk_id) + + @classmethod + async def write_bulk(cls, ctx: Context, dataframe) -> str: + return await create_and_store_dataframe(ctx, dataframe) diff --git a/app/routers/ddms_v2/storage_helper.py b/app/routers/ddms_v2/storage_helper.py new file mode 100644 index 0000000000000000000000000000000000000000..cf5864d59be421045ba9dfe0b1a8c7a20ff18192 --- /dev/null +++ b/app/routers/ddms_v2/storage_helper.py @@ -0,0 +1,134 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List +import asyncio +from functools import reduce +from collections import namedtuple + +import starlette.status as status +from fastapi import HTTPException +from odes_search.models import QueryRequest, CursorQueryResponse + +from app.routers.search import search_wrapper +from app.clients import SearchServiceClient, StorageRecordServiceClient +from app.model.entity_utils import Entity, format_kind, get_kind_meta +from app.utils import Context + + +class StorageHelper: + @staticmethod + def _status_code_from_exception(exp) -> int: + if not isinstance(exp, Exception): + return status.HTTP_200_OK + + # in order to get status code from various exception without explicitly typing it + return int(getattr(exp, 'status_code', status.HTTP_500_INTERNAL_SERVER_ERROR)) + + @staticmethod + async def delete_recursively( + ctx: Context, + entity_id: str, + relationship: str, + entity_list: List[Entity], + data_partition_id: str, + search_service: SearchServiceClient, + storage_service: StorageRecordServiceClient): + """ + Delete the given entity and all related entity that declares a relationship to that entity. + :param ctx: Context + :param entity_id: id of the entity source + :param relationship: name of the relationship that refers the source entity. For instance relationship='well' + then the method will search for record 'data.relationships.well.id: "entity_id"' + :param entity_list: filter for entity type to delete aside the source entity + :param data_partition_id: + :param search_service: search client + :param storage_service: storage client + :return: None + """ + + record = await storage_service.get_record(entity_id, data_partition_id) + source = get_kind_meta(record.kind).source # use same source than the given entity ?? e.g. wks ? + + request = QueryRequest(kind=format_kind(data_partition_id, source, '*', '*'), + query=f'data.relationships.{relationship}.id: \"{entity_id}\"', + returned_fields=["id", "kind"]) + + aggregated_result: CursorQueryResponse = await search_wrapper.SearchWrapper.query_cursorless( + search_service=search_service, + data_partition_id=data_partition_id, + query_request=request + ) + + # gather ids only if entity type matches the given list + entities_to_remove = [ + entity for entity in aggregated_result.results + if get_kind_meta(entity["kind"]).entity_type in map(lambda i: i.value, entity_list) + ] + + # first delete the source entity, if it fail, we must not delete the others + await storage_service.delete_record(id=entity_id, data_partition_id=data_partition_id) + ctx.logger.debug(f'record {entity_id} successfully deleted') + + # execute all deletion concurrently, do not stop at first fail + delete_results = await asyncio.gather(*[ + storage_service.delete_record(id=entity['id'], data_partition_id=data_partition_id) + for entity in entities_to_remove + ], return_exceptions=True) + + # make list of entity result for error management + EntityResult = namedtuple('EntityResult', 'entity result status_code') + results = [ + EntityResult(entity=e, + result=r, + status_code=StorageHelper._status_code_from_exception(r)) + for e, r in zip(entities_to_remove, delete_results) + ] + + # log successfully deleted entities for debugging purposes + for r in filter(lambda r: r.status_code == status.HTTP_200_OK, results): + ctx.logger.debug(f'{r.entity["id"]} of kind {r.entity["kind"]} ' + f'successfully deleted (from recursive delete of {entity_id})') + + # warn for already deleted entity + for r in filter(lambda r: r.status_code == status.HTTP_404_NOT_FOUND, results): + ctx.logger.warning(f'entity {r.entity["id"]} of kind {r.entity["kind"]} was already deleted') + + # errors treatment (i.e. not 200, not 404), gather them by status + in_errors = list(filter( + lambda r: r.status_code not in [status.HTTP_200_OK, status.HTTP_404_NOT_FOUND], + results + )) + + # log errors + for r in in_errors: + ctx.logger.error(f'error on deleted entity {r.entity["id"]} of kind {r.entity["kind"]},' + f'status code: {r.status_code}, detail: {str(r.result)}') + + if len(in_errors) == 1: # a single error, just forward + raise in_errors[0] + + if len(in_errors) > 1: + distinct_error_statuses = list({r.status_code for r in in_errors}) + if len(distinct_error_statuses) == 1: + # for homogenous status code, keep the same + final_status_code = distinct_error_statuses[0] + else: + # for heterogeneous status code, set to 500 + final_status_code = status.HTTP_500_INTERNAL_SERVER_ERROR + + raise HTTPException( + status_code=final_status_code, + # build detail from all distinct (not empty) error messages + detail='Errors: ' + ', '.join({str(r.result) for r in in_errors if str(r.result)}) + '.') diff --git a/app/routers/ddms_v2/well_ddms_v2.py b/app/routers/ddms_v2/well_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..dc5d917341a501f568ce56e2bf04a760fc02814f --- /dev/null +++ b/app/routers/ddms_v2/well_ddms_v2.py @@ -0,0 +1,139 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends, Query +import starlette.status as status +from starlette.responses import Response + +from app.clients.storage_service_client import get_storage_record_service +from app.clients.search_service_client import get_search_service +from odes_storage.models import * +from app.model.model_curated import * +from app.utils import Context +from app.utils import get_ctx +from app.model.model_utils import to_record, from_record +from app.model.entity_utils import Entity + +import app.routers.ddms_v2.storage_helper as storage_helper + +router = APIRouter() + + +@router.get('/wells/{wellid}', response_model=well, + summary="Get the Well using wks:well:1.0.2 schema", + description="""Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500**""", + operation_id="get_well", + responses={status.HTTP_404_NOT_FOUND: {"description": "Well not found"}}, + response_model_exclude_unset=True) +async def get_well( + wellid: str, + ctx: Context = Depends(get_ctx) +) -> well: + storage_client = await get_storage_record_service(ctx) + well_record = await storage_client.get_record(id=wellid, data_partition_id=ctx.partition_id) + return from_record(well, well_record) + + +@router.delete('/wells/{wellid}', + summary="Delete the well. The API performs a logical deletion of the given record", + operation_id="del_well", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={status.HTTP_404_NOT_FOUND: {"description": "Well not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"} + } + ) +async def del_well( + wellid: str, + recursive: bool = Query(default=False, description="Whether or not to delete records children"), + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + if recursive: + + sub_entity_types = [ + Entity.WELLBORE, + Entity.LOGSET, + Entity.LOG, + Entity.MARKER, + Entity.TRAJECTORY, + Entity.DIPSET + ] + + await storage_helper.StorageHelper.delete_recursively( + ctx, + entity_id=wellid, + relationship='well', + entity_list=sub_entity_types, + data_partition_id=ctx.partition_id, + search_service=await get_search_service(ctx), + storage_service=storage_client + ) + else: + await storage_client.delete_record(id=wellid, data_partition_id=ctx.partition_id) + + +@router.get('/wells/{wellid}/versions', + response_model=RecordVersions, + summary="Get all versions of the Well", + operation_id="get_well_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "Well not found"}}) +async def get_well_versions( + wellid: str, + ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions(id=wellid, data_partition_id=ctx.partition_id) + + +@router.get('/wells/{wellid}/versions/{version}', + response_model=well, + summary="Get the given version of the Well using wks:well:1.0.2 schema", + description=""""Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500**""", + operation_id="get_well_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "Well not found"}}, + response_model_exclude_unset=True) +async def get_well_version( + wellid: str, + version: int, + ctx: Context = Depends(get_ctx) +) -> well: + storage_client = await get_storage_record_service(ctx) + well_record = await storage_client.get_record_version(id=wellid, + version=version, + data_partition_id=ctx.partition_id) + return from_record(well, well_record) + + +@router.post('/wells', + response_model=CreateUpdateRecordsResponse, + summary="Create or update the Wells using wks:well:1.0.2 schema", + operation_id="post_well", + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}) +async def post_well( + wells: List[well], + ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + storage_client = await get_storage_record_service(ctx) + return await storage_client.create_or_update_records( + record=[to_record(w) for w in wells], + data_partition_id=ctx.partition_id) diff --git a/app/routers/ddms_v2/wellbore_ddms_v2.py b/app/routers/ddms_v2/wellbore_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..8b63fa062799b807868cda7053e1bd74878ddfcc --- /dev/null +++ b/app/routers/ddms_v2/wellbore_ddms_v2.py @@ -0,0 +1,129 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter, Depends, Query +import starlette.status as status +from starlette.responses import Response + +from app.clients.storage_service_client import get_storage_record_service +from app.clients.search_service_client import get_search_service +from odes_storage.models import * +from app.model.model_curated import * +from app.utils import Context +from app.utils import get_ctx +from app.model.model_utils import to_record, from_record +from app.model.entity_utils import Entity + +import app.routers.ddms_v2.storage_helper as storage_helper + +router = APIRouter() + + +@router.get('/wellbores/{wellboreid}', response_model=wellbore, + summary="Get the Wellbore using wks:wellbore:1.0.6 schema", + description="""Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500**""", + operation_id="get_wellbore", + responses={status.HTTP_404_NOT_FOUND: {"description": "Wellbore not found"}}, + response_model_exclude_unset=True) +async def get_wellbore( + wellboreid: str, + ctx: Context = Depends(get_ctx) +) -> wellbore: + storage_client = await get_storage_record_service(ctx) + wellbore_record = await storage_client.get_record(id=wellboreid, data_partition_id=ctx.partition_id) + return from_record(wellbore, wellbore_record) + + +@router.delete('/wellbores/{wellboreid}', + summary="Delete the wellbore. The API performs a logical deletion of the given record", + operation_id="del_wellbore", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={status.HTTP_404_NOT_FOUND: {"description": "Wellbore not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"} + } + ) +async def del_wellbore( + wellboreid: str, + recursive: bool = Query(default=False, description="Whether or not to delete records children"), + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + if recursive: + await storage_helper.StorageHelper.delete_recursively( + ctx, + entity_id=wellboreid, + relationship='wellbore', + entity_list=[Entity.LOGSET, Entity.LOG, Entity.MARKER], + data_partition_id=ctx.partition_id, + search_service=await get_search_service(ctx), + storage_service=storage_client + ) + else: + await storage_client.delete_record(id=wellboreid, data_partition_id=ctx.partition_id) + + +@router.get('/wellbores/{wellboreid}/versions', + response_model=RecordVersions, + summary="Get all versions of the Wellbore", + operation_id="get_wellbore_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "Wellbore not found"}}) +async def get_wellbore_versions( + wellboreid: str, + ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions(id=wellboreid, data_partition_id=ctx.partition_id) + + +@router.get('/wellbores/{wellboreid}/versions/{version}', + response_model=wellbore, + summary="Get the given version of the Wellbore using wks:wellbore:1.0.6 schema", + description=""""Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500**""", + operation_id="get_wellbore_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "Wellbore not found"}}, + response_model_exclude_unset=True) +async def get_wellbore_version( + wellboreid: str, + version: int, + ctx: Context = Depends(get_ctx) +) -> wellbore: + storage_client = await get_storage_record_service(ctx) + wellbore_record = await storage_client.get_record_version(id=wellboreid, + version=version, + data_partition_id=ctx.partition_id) + return from_record(wellbore, wellbore_record) + + +@router.post('/wellbores', + response_model=CreateUpdateRecordsResponse, + summary="Create or update the Wellbores using wks:wellbore:1.0.6 schema", + operation_id="post_wellbore", + responses={ + status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}) +async def post_wellbore( + wellbores: List[wellbore], + ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + storage_client = await get_storage_record_service(ctx) + return await storage_client.create_or_update_records( + record=[to_record(w) for w in wellbores], + data_partition_id=ctx.partition_id) diff --git a/app/routers/dipset/__init__.py b/app/routers/dipset/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/dipset/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/dipset/dip_ddms_v2.py b/app/routers/dipset/dip_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..79fec87302afd51b792dce532def19238f798a05 --- /dev/null +++ b/app/routers/dipset/dip_ddms_v2.py @@ -0,0 +1,178 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +from typing import List, Optional + +import starlette.status as status +from fastapi import APIRouter, Depends, Path, Query + +import app.routers.dipset.persistence as persistence +from app.routers.dipset.dip_model import Dip +from app.utils import Context, get_ctx + +# TODO reference: setup reference type family, unit ... +# TODO setup dipset channels (family, family_type, unit, format of dip attributs) +# TODO logger : should log information when retrieving/creating dipset, log, ... +# TODO dispet should have a wellbore (data.relationships.wellbore) + +router = APIRouter() + + +@router.post( + "/dipsets/{dipsetid}/dips", + summary="Define the dips of the dipset", + response_model=List[Dip], + response_model_exclude_none=True, + description="""Replace previous dips by provided dips. Sort dips by reference and azimuth.""", + operation_id="post_dips", +) +async def post_dips( + dips: List[Dip], dipsetid: str = Path(..., description="The ID of the dipset"), ctx: Context = Depends(get_ctx) +) -> List[Dip]: + df = persistence.dips_to_df(dips) + await persistence.write_dipset_data(ctx, dataframe=df, ds=dipsetid) + return persistence.df_to_dips(df) + + +@router.post( + "/dipsets/{dipsetid}/dips/insert", + summary="insert dip in a dipset", + response_model=List[Dip], + response_model_exclude_none=True, + description="""Insert dips in dipset. + Existing dips are not replaced. + Several dip can have same reference. + Operation will sort by reference all dips in dipset (may modify dip indexes).""", + operation_id="insert_dips", +) +async def insert_dips( + dips: List[Dip], dipsetid: str, ctx: Context = Depends(get_ctx)) -> List[Dip]: + my_dipset, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + df = df.append(persistence.dips_to_df(dips)) + await persistence.write_dipset_data(ctx, dataframe=df, ds=my_dipset) + return persistence.df_to_dips(df) + + +@router.get( + "/dipsets/{dipsetid}/dips", + summary="Get dips", + response_model=Optional[List[Dip]], + response_model_exclude_none=True, + description="""Return dips from dipset from the given index until the given number of dips specifed in query parameters. + If not specified returns all dips from dipset.""", + operation_id="get_dips", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}}, +) +async def get_dips( + dipsetid: str, + index: Optional[int] = Query(None, ge=0), + limit: Optional[int] = Query(None, ge=0), + ctx: Context = Depends(get_ctx), +) -> List[Dip]: + + _, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + + start = 0 + if index is not None: + start = index + + end = None + if limit is not None: + end = start + limit + df = df.iloc[slice(start, end)] + + return persistence.df_to_dips(df) + + +@router.get( + "/dipsets/{dipsetid}/dips/query", + summary="Query dip from dipset", + response_model=List[Dip], + response_model_exclude_none=True, + description="""Search dip within reference interval and specific classification""", + operation_id="query_dip", +) +async def query_dip( + dipsetid: str, + min_ref: Optional[float] = Query( + None, description="Min reference for the dips to search in the dipset", alias="minReference" + ), + max_ref: Optional[float] = Query( + None, title="Max reference for the dips to search in the dipset", alias="maxReference" + ), + classification: Optional[str] = Query(None, title="Classification for the dip to search in the dipset"), + ctx: Context = Depends(get_ctx), +) -> List[Dip]: + _, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + + if classification is not None: + df = df[df["classification"] == classification] + if min_ref is not None and not math.isnan(min_ref): + df = df[df["reference"] >= min_ref] + if max_ref is not None and not math.isnan(max_ref): + df = df[df["reference"] <= max_ref] + + return persistence.df_to_dips(df) + + +@router.get( + "/dipsets/{dipsetid}/dips/{index}", + summary="Get a dip at index", + response_model=Dip, + response_model_exclude_none=True, + description=""""Return dip from dipset at the given index""", + operation_id="get_dip_by_index", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet or index not found"}}, +) +async def get_dip_by_index(dipsetid: str, index: int, ctx: Context = Depends(get_ctx)) -> Dip: + _, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + return persistence.series_to_dip(df.iloc[index]) + + +@router.patch( + "/dipsets/{dipsetid}/dips/{index}", + summary="Update dip", + response_model=List[Dip], + response_model_exclude_none=True, + description=""""Update dip at index + Operation will sort by reference all dips in dipset (may modify dip indexes).""", + operation_id="patch_dip", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}}, +) +async def patch_dip(dip: Dip, dipsetid: str, index: int, ctx: Context = Depends(get_ctx)) -> List[Dip]: + # TODO input validation 0 <= index < size + my_dipset, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + # Update the data + df.iloc[index] = persistence.dip_to_series(dip) + + await persistence.write_dipset_data(ctx, dataframe=df, ds=my_dipset) + return persistence.df_to_dips(df) + + +@router.delete( + "/dipsets/{dipsetid}/dips/{index}", + summary="Delete a dip", + response_model=List[Dip], + response_model_exclude_none=True, + description="Removes the dip at index", + operation_id="delete_dip_by_index", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet or index not found"}}, +) +async def delete_dip_by_index(dipsetid: str, index: int, ctx: Context = Depends(get_ctx)) -> List[Dip]: + # TODO input validation 0 <= index < size + my_dipset, df = await persistence.read_dipset_data(ctx, ds=dipsetid) + df.drop(index=index, inplace=True) + await persistence.write_dipset_data(ctx, dataframe=df, ds=my_dipset) + return persistence.df_to_dips(df) diff --git a/app/routers/dipset/dip_model.py b/app/routers/dipset/dip_model.py new file mode 100644 index 0000000000000000000000000000000000000000..17a3683c6d109cbf087d5e54eacf01e8e7d8fa50 --- /dev/null +++ b/app/routers/dipset/dip_model.py @@ -0,0 +1,95 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional + +from pydantic import BaseModel, Field, validator + +from app.model.model_curated import ValueWithUnit + +meter_alias = ["m", "meter", "Meter", "METER", "meters", "Meters", "METERS"] +dega_alias = ["dega", "Dega", "DEGA"] +unitless_alias = ["unitless", "Unitless", "UNITLESS", "UnitLess"] + + +def value_must_be_in_meter(cls, v): + if v is not None and v.unitKey not in meter_alias: + raise ValueError("unit must be meter") + return v + + +def value_must_be_in_dega(cls, v): + if v is not None and v.unitKey not in dega_alias: + raise ValueError("unit must be dega") + return v + + +class Dip(BaseModel): + reference: ValueWithUnit = Field( + ..., title="Reference of the dip", description="Only Measured Depth in meter is supported for the moment", + ) + azimuth: ValueWithUnit = Field( + ..., title="Azimuth value of the dip", description="Only degrees unit is supported for the moment" + ) + inclination: ValueWithUnit = Field( + ..., title="Inclination value of the dip", description="Only degrees unit is supported for the moment", + ) + quality: Optional[ValueWithUnit] = Field( + None, + title="Quality of the dip", + description="Decimal number between 0 and 1", + exclusiveMinimum=0, + exclusiveMaximum=1, + ) + xCoordinate: Optional[ValueWithUnit] = Field( + None, title="The X coordinate of the dip", description="Only meter unit is supported for the moment" + ) + yCoordinate: Optional[ValueWithUnit] = Field( + None, title="The Y coordinate of the dip", description="Only meter unit is supported for the moment" + ) + zCoordinate: Optional[ValueWithUnit] = Field( + None, title="The Z coordinate of the dip", description="Only meter unit is supported for the moment" + ) + classification: Optional[str] = Field( + None, title="Classification of the dip", description="Any string is accepted." + ) + + _reference_unit_validator = validator("reference", allow_reuse=True)(value_must_be_in_meter) + _x_coordinate_unit_validator = validator("xCoordinate", allow_reuse=True)(value_must_be_in_meter) + _y_coordinate_unit_validator = validator("yCoordinate", allow_reuse=True)(value_must_be_in_meter) + _z_coordinate_unit_validator = validator("zCoordinate", allow_reuse=True)(value_must_be_in_meter) + _azimuth_unit_validator = validator("azimuth", allow_reuse=True)(value_must_be_in_dega) + _inclination_unit_validator = validator("inclination", allow_reuse=True)(value_must_be_in_dega) + + @validator("quality") + def quality_validator(cls, v): + if v is not None and (v.value < 0 or v.value > 1): + raise ValueError("value must be greater or egal to 0 and less or egal to 1") + if v is not None and v.unitKey not in unitless_alias: + raise ValueError("unit must be unitless") + return v + + class Config: + schema_extra = { + "example": { + "reference": {"unitKey": "meter", "value": 1000.5}, + "azimuth": {"unitKey": "dega", "value": 42}, + "inclination": {"unitKey": "dega", "value": 9}, + "quality": {"unitKey": "unitless", "value": 0.5}, + "xCoordinate": {"unitKey": "meter", "value": 2}, + "yCoordinate": {"unitKey": "meter", "value": 45}, + "zCoordinate": {"unitKey": "meter", "value": 7}, + "classification": "fracture", + } + } diff --git a/app/routers/dipset/dipset_ddms_v2.py b/app/routers/dipset/dipset_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..8c842d3b40898222fc8192c878b7698b781bd5c9 --- /dev/null +++ b/app/routers/dipset/dipset_ddms_v2.py @@ -0,0 +1,115 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import starlette.status as status +from fastapi import APIRouter, Depends, Query +from odes_storage.models import * +from starlette.responses import Response + +import app.routers.ddms_v2.storage_helper as storage_helper +from app.clients.search_service_client import get_search_service +from app.clients.storage_service_client import get_storage_record_service +from app.model.model_curated import dipset +from app.model.model_utils import from_record, to_record +from app.model.entity_utils import Entity +from app.utils import Context, get_ctx + +router = APIRouter() +@router.post( + "/dipsets", + response_model=CreateUpdateRecordsResponse, + summary="Create or update the DipSets using wks:dipSet:1.0.0 schema", + operation_id="post_dipset", + responses={status.HTTP_400_BAD_REQUEST: {"description": "Missing mandatory parameter or unknown parameter"}}, +) +async def post_dipset(dipsets: List[dipset], ctx: Context = Depends(get_ctx)) -> CreateUpdateRecordsResponse: + # TODO disallow creation of a dipset without wellbore + + storage_client = await get_storage_record_service(ctx) + record = await storage_client.create_or_update_records( + record=[to_record(dipset) for dipset in dipsets], data_partition_id=ctx.partition_id + ) + return record + + +@router.get( + "/dipsets/{dipsetid}/versions/{version}", + response_model=dipset, + summary="Get the given version of DipSet using wks:dipset:1.0.0 schema", + description=""""Get the DipSet object using its **id**.""", + operation_id="get_dipset_version", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}}, + response_model_exclude_unset=True +) +async def get_dipset_version(dipsetid: str, version: int, ctx: Context = Depends(get_ctx)) -> dipset: + storage_client = await get_storage_record_service(ctx) + result = await storage_client.get_record_version(id=dipsetid, version=version, + data_partition_id=ctx.partition_id) + return from_record(dipset, result) + + +@router.get( + "/dipsets/{dipsetid}/versions", + response_model=RecordVersions, + summary="Get all versions of the dipset", + operation_id="get_dipset_versions", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}}, +) +async def get_dipset_versions(dipsetid: str, ctx: Context = Depends(get_ctx)) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions(id=dipsetid, data_partition_id=ctx.partition_id) + + +@router.get( + "/dipsets/{dipsetid}", + response_model=dipset, + summary="Get the DipSet using wks:dipSet:1.0.0 schema", + description="""Get the DipSet object using its **id**""", + operation_id="get_dipset", + responses={status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}}, + response_model_exclude_unset=True +) +async def get_dipset(dipsetid: str, ctx: Context = Depends(get_ctx)) -> dipset: + storage_client = await get_storage_record_service(ctx) + record = await storage_client.get_record(id=dipsetid, data_partition_id=ctx.partition_id) + return from_record(dipset, record) + + +@router.delete( + "/dipsets/{dipsetid}", + summary="Delete the DipSet. The API performs a logical deletion of the given record", + operation_id="del_dipset", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "DipSet not found"}, + status.HTTP_204_NO_CONTENT: {"description": "Record deleted successfully"}, + }, +) +async def del_dipset(dipsetid: str, + recursive: bool = Query(default=False, description="Whether or not to delete records children"), + ctx: Context = Depends(get_ctx)): + storage_client = await get_storage_record_service(ctx) + if recursive: + await storage_helper.StorageHelper.delete_recursively( + ctx, + entity_id=dipsetid, + relationship="dipset", + entity_list=[Entity.LOG], + data_partition_id=ctx.partition_id, + search_service=await get_search_service(ctx), + storage_service=storage_client, + ) + else: + await storage_client.delete_record(id=dipsetid, data_partition_id=ctx.partition_id) diff --git a/app/routers/dipset/persistence.py b/app/routers/dipset/persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..6e1eb1ae7c3f53b9bcb4e0c262b15999a23503f9 --- /dev/null +++ b/app/routers/dipset/persistence.py @@ -0,0 +1,325 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import math +from typing import List, NamedTuple, Tuple, Union + +import pandas as pd +import starlette.status as status +from fastapi import HTTPException +from odes_storage import UnexpectedResponse + +from app.clients.storage_service_client import get_storage_record_service +from app.model import entity_utils +from app.model.entity_utils import Entity +from app.model.model_curated import ( + ToOneRelationship, + ValueWithUnit, + dipset, + dipsetrelationships, + log, + logchannel, + logData, + logRelationships, +) +from app.model.model_utils import from_record, to_record +from app.routers.dipset.dip_model import Dip +from app.bulk_persistence import get_dataframe, create_and_store_dataframe, BulkId + +async def create_missing_logs(ctx, my_dipset: dipset): + """ + Creates missing log in storage and update dipset record accordingly + return dispset record + """ + + # TODO error management log creation failed + + class LogMeta(NamedTuple): + name: str + unit: str + family_type: str + family: str + format: str + data_type: str + dipset_relationships: str + + log_meta = { + "reference": LogMeta( + name="reference", + unit="meter", + family_type="Reference", + family="Measured Depth", + format="float64", + data_type="number", + dipset_relationships="referenceLog", + ), + "azimuth": LogMeta( + name="trueDipAzimuth", + unit="dega", + family_type="Formation Geometry", + family="True Dip Azimuth", + format="float32", + data_type="number", + dipset_relationships="trueDipAzimuthLog", + ), + "inclination": LogMeta( + name="trueDipInclination", + unit="dega", + family_type="Formation Geometry", + family="True Dip Inclination", + format="float32", + data_type="number", + dipset_relationships="trueDipInclinationLog", + ), + "xCoordinate": LogMeta( + name="xCoordinate", + unit="meter", + family_type="Reference", + family="X Coordinate", + format="float32", + data_type="number", + dipset_relationships="xCoordinateLog", + ), + "yCoordinate": LogMeta( + name="yCoordinate", + unit="meter", + family_type="Reference", + family="Y Coordinate", + format="float32", + data_type="number", + dipset_relationships="yCoordinateLog", + ), + "zCoordinate": LogMeta( + name="zCoordinate", + unit="meter", + family_type="Reference", + family="Z Coordinate", + format="float32", + data_type="number", + dipset_relationships="zCoordinateLog", + ), + "quality": LogMeta( + name="quality", + unit="unitless", + family_type="Borehole Image", + family="Dip Quality", + format="float32", + data_type="number", + dipset_relationships="qualityLog", + ), + "classification": LogMeta( + name="classification", + unit="unitless", + family_type="Borehole Image", + family="Dip classification", + format="", + data_type="string", + dipset_relationships="classificationLog", + ), + } + + def create_log_record(meta: LogMeta) -> log: + log_channel = logchannel( + name=meta.name, + dimension=1, + unitKey=meta.unit, + dataType=meta.data_type, + family=meta.family, + familyType=meta.family_type, + ) + if meta.format: + log_channel.format = meta.format + + dip_kind_metadata = entity_utils.get_kind_meta(my_dipset.kind) + log_kind = entity_utils.get_kind( + dip_kind_metadata.data_partition_id, + dip_kind_metadata.source, + Entity.LOG) + + return log( + acl=my_dipset.acl, + legal=my_dipset.legal, + kind=log_kind, + data=logData(name=f"name", operation="wddms_dipset", log=log_channel, + relationships=logRelationships(logSet=ToOneRelationship(id=my_dipset.id))), + ) + + # Add data.relationships to the dipset record + if not my_dipset.data.relationships: + # according to the schema data.relationships is not mandatory + # where as data.relationships.wellbore is mandatory + # since we will add relationships to the log we must set a relationships.wellbore + # TODO force the client to set a wellbore. + my_dipset.data.relationships = dipsetrelationships(wellbore="") + + # Find missing logs to be created + records = [ + to_record(create_log_record(meta)) + for k, meta in log_meta.items() + if getattr(my_dipset.data.relationships, meta.dipset_relationships, None) is None + ] + + # Create logs + if len(records) > 0: + storage_client = await get_storage_record_service(ctx) + # Creating logs + # TODO check create log responses (should have the right update_record_responses number of logs) + create_logs_response = await storage_client.create_or_update_records( + data_partition_id=ctx.partition_id, record=records + ) + + # TODO doesn't work in case only some log need to be created!!!!! + # Update dipset + for idx, (k, meta) in enumerate(log_meta.items()): + setattr( + my_dipset.data.relationships, + meta.dipset_relationships, + ToOneRelationship(id=create_logs_response.record_ids[idx]), + ) + + +def dip_to_series(dip: Dip) -> pd.Series: + # TODO performance and code duplication with dips_to_df + data = {} + for member, _ in dip.__fields__.items(): + data[member] = None + if getattr(dip, member, None) is not None: + if isinstance(getattr(dip, member), ValueWithUnit): + data[member] = getattr(dip, member).value + else: + data[member] = getattr(dip, member) + + s = pd.Series(data) + return s + + +def series_to_dip(row: pd.Series): + # TODO refactor, error prone + + return Dip( + reference=ValueWithUnit(unitKey="meter", value=row["reference"]) + if row.get("reference") and not math.isnan(row["reference"]) + else None, + azimuth=ValueWithUnit(unitKey="dega", value=row["azimuth"]) + if row.get("azimuth") and not math.isnan(row["azimuth"]) + else None, + inclination=ValueWithUnit(unitKey="dega", value=row["inclination"]) + if row.get("inclination") and not math.isnan(row["inclination"]) + else None, + quality=ValueWithUnit(unitKey="unitless", value=row["quality"]) + if row.get("quality") and not math.isnan(row["quality"]) + else None, + xCoordinate=ValueWithUnit(unitKey="meter", value=row["xCoordinate"]) + if row.get("xCoordinate") and not math.isnan(row["xCoordinate"]) + else None, + yCoordinate=ValueWithUnit(unitKey="meter", value=row["yCoordinate"]) + if row.get("yCoordinate") and not math.isnan(row["yCoordinate"]) + else None, + zCoordinate=ValueWithUnit(unitKey="meter", value=row["zCoordinate"]) + if row.get("zCoordinate") and not math.isnan(row["zCoordinate"]) + else None, + classification=row.get("classification"), + ) + + +def dips_to_df(dips: List[Dip]) -> pd.DataFrame: + # TODO performance and code duplication with dip_to_series + + data = {} + for member, _ in dips[0].__fields__.items(): + data[member] = [] + for dip in dips: + v = None + if getattr(dip, member, None) is not None: + if isinstance(getattr(dip, member), ValueWithUnit): + v = getattr(dip, member).value + else: + v = getattr(dip, member) + data[member].append(v) + + return pd.DataFrame(data) + + +def df_to_dips(dataframe: pd.DataFrame) -> List[Dip]: + return [series_to_dip(row) for index, row in dataframe.iterrows()] + + +#TODO refactor duplicate with trajectory +async def write_bulk(ctx, dataframe: pd.DataFrame) -> str: + bulk_id = await create_and_store_dataframe(ctx, dataframe) + return BulkId.bulk_urn_encode(bulk_id) + + +async def write_dipset_data(ctx, dataframe: pd.DataFrame, ds: Union[dipset, str]) -> dipset: + # TODO input validation & error management + + my_dipset = await fetch_dipset(ctx, ds) if not isinstance(ds, dipset) else ds + + # Sort data by reference and azimuth + dataframe.sort_values(by=["reference", "azimuth"], inplace=True, ignore_index=True) + + # Write data in storage and update dipset bulk URI + my_dipset.data.bulkURI = await write_bulk(ctx, dataframe) + + # Create or update logs + await create_missing_logs(ctx, my_dipset) + + # Update dipset record + storage_client = await get_storage_record_service(ctx) + await storage_client.create_or_update_records( + data_partition_id=ctx.partition_id, record=[to_record(my_dipset)] + ) + + return my_dipset + + +async def read_dipset_data(ctx, ds: Union[dipset, str]) -> Tuple[dipset, pd.DataFrame]: + """Gets the bulk data for the dipset + + Create or update the log associated to the dipset and return the updated dipset + + Args: + Union[dipset, str]: dipset record or dipset ID to get the bulkd data + + Returns: + Tuple[dipset, pandas.DataFrame]: updated dipset record and dataframe containing bulkd data for the specified record + + Raises: + HTTPException: 404 record is not found + """ + my_dipset = await fetch_dipset(ctx, ds) if not isinstance(ds, dipset) else ds + + if my_dipset.data is None or my_dipset.data.bulkURI is None: # what about empty string ? + return my_dipset, pd.DataFrame() + + # Fetch data + df = await get_dataframe(ctx, BulkId.bulk_urn_decode(my_dipset.data.bulkURI)) + + return my_dipset, df + + +async def fetch_dipset(ctx, dipsetid: str) -> dipset: + """Fetch the dipset record + check dip logs and create the missing one""" + # TODO error management fetch dipset + # TODO input validation dipset record should have data, data.relationships, data.relationships.wellbore + + storage_client = await get_storage_record_service(ctx) + try: # TODO creating a custom exception for instance RecordNotFoundException + storage_record = await storage_client.get_record(id=dipsetid, + data_partition_id=ctx.partition_id) + except UnexpectedResponse as unexpected_response: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(unexpected_response)) + + return from_record(dipset, storage_record) diff --git a/app/routers/logrecognition/__init__.py b/app/routers/logrecognition/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/logrecognition/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/logrecognition/family_processor_manager.py b/app/routers/logrecognition/family_processor_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..39610884436cadfcd266cc6c5c7effe4eb3e1cb1 --- /dev/null +++ b/app/routers/logrecognition/family_processor_manager.py @@ -0,0 +1,100 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import importlib.resources as pkg_resources # for loading resources in a package folder +import json +from dataclasses import dataclass +from datetime import datetime + +import far.catalogs as catalogs +import starlette.status as status +from far import family_processor as family_processor +from far.family_processor.family_processor import FamilyProcessor as FamilyProcessor +from odes_storage.exceptions import UnexpectedResponse + +from app.clients.storage_service_client import get_storage_record_service +from app.utils import Context + + +@dataclass +class ProcessorItem: + processor: FamilyProcessor + creation_date: datetime + + +FIXED_RECORD_ID = ":doc:REMOVED_FOR_CICD_SCAN" +# This name should not match any existing partition id +DEFAULT_CATALOG_NAME = "default_WDMS_catalog" + + +class FamilyProcessorManager: + def __init__(self, catalog_lifetime: int): + """ + + :param catalog_lifetime: lifetime (in seconds) for a cached catalog. Could be replaced in the future by a + message mechanism (from DE if new catalog is uploaded or from one pod to the others via redis) + """ + + self._catalog_lifetime = catalog_lifetime + self._processors = {DEFAULT_CATALOG_NAME: ProcessorItem( + processor=family_processor.make_family_processor(), creation_date=datetime.now())} + + @staticmethod + async def _get_catalogs_from_de(ctx: Context, partition_id: str): + + storage_client = await get_storage_record_service(ctx) + record_id = f"{partition_id}{FIXED_RECORD_ID}" + try: + catalogs_record = await storage_client.get_record(record_id, ctx.partition_id) + except UnexpectedResponse as e: + if e.status_code != status.HTTP_404_NOT_FOUND: + raise + return None, None, None + + rules_catalog = catalogs_record.data.get("family_catalog", None) + unit_catalog_str = pkg_resources.read_text(catalogs, 'CompatibleUnits.json') + unit_catalog = json.loads(unit_catalog_str) + main_family_catalog = {"LogFiles": {}} + main_families = catalogs_record.data.get("main_family_catalog", None) + if main_families is None: + main_families = [] + main_family_catalog["LogFiles"]["loginfo"] = main_families + + return rules_catalog, unit_catalog, main_family_catalog + + @staticmethod + async def _create_processor(ctx: Context, client_id: str) -> FamilyProcessor: + rules_catalog, unit_catalog, main_family_catalog = await FamilyProcessorManager._get_catalogs_from_de( + ctx, client_id) + if rules_catalog is None: + return None + return family_processor.make_user_family_processor(rules_catalog, unit_catalog, main_family_catalog) + + def get_default_processor(self): + return self._processors.get(DEFAULT_CATALOG_NAME).processor + + async def get_processor(self, ctx: Context, client_id: str = DEFAULT_CATALOG_NAME): + processor_item = self._processors.get(client_id, None) + if processor_item is None: + processor = await FamilyProcessorManager._create_processor(ctx, client_id) + if processor is None: + # When catalog is not found for given partition Id, fallback to the default catalog + processor = self.get_default_processor() + processor_item = ProcessorItem(processor=processor, creation_date=datetime.now()) + self._processors[client_id] = processor_item + if (datetime.now() - processor_item.creation_date).seconds >= self._catalog_lifetime: + # regenerate the client + self._processors[client_id] = None + return await self.get_processor(ctx, client_id) + return processor_item.processor diff --git a/app/routers/logrecognition/log_recognition.py b/app/routers/logrecognition/log_recognition.py new file mode 100644 index 0000000000000000000000000000000000000000..7e0ba9bf3b8a3efc20e913ba720896492b87029b --- /dev/null +++ b/app/routers/logrecognition/log_recognition.py @@ -0,0 +1,160 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import far.family_processor.model as farmodel +from fastapi import APIRouter, Depends, HTTPException +from starlette import status +from typing import Optional, List +from pydantic import BaseModel, Field +import odes_storage.models as model + +import app.routers.logrecognition.family_processor_manager as fp_manager +from app.clients.storage_service_client import get_storage_record_service +from app.conf import Config +from app.utils import Context +from app.utils import get_ctx + +router = APIRouter() + + +class CatalogItem(BaseModel): + unit: str + family: Optional[str] = "" + rule: str + + +class MainFanilyCatalogItem(BaseModel): + MainFamily: str + Family: str + Unit: str + + +class Catalog(BaseModel): + family_catalog: List[CatalogItem] + main_family_catalog: Optional[List[MainFanilyCatalogItem]] = None + + +class CatalogRecord(BaseModel): + acl: "model.StorageAcl" = Field(..., alias="acl") + legal: "model.Legal" = Field(..., alias="legal") + data: "Catalog" = Field(..., alias="data") + + class Config: + schema_extra = { + "example": { + "acl": { + "viewers": [ + "abc@example.com, cde@example.com" + ], + "owners": [ + "abc@example.com, cde@example.com" + ] + }, + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": [ + "US" + ] + }, + "data": { + "family_catalog": [ + { + "unit": "ohm.m", + "family": "Medium Resistivity", + "rule": "MEDR" + } + ], + "main_family_catalog": [ + { + "MainFamily": "Resistivity", + "Family": "Medium Resistivity", + "Unit": "OHMM" + } + ] + } + } + } + + +family_processor_manager = fp_manager.FamilyProcessorManager(Config.custom_catalog_timeout.value) + + +class GuessRequest(BaseModel): + label: str # Channel name, as defined in LAS or DLIS + log_unit: Optional[str] = None # Channel unit, as defined in LAS or DLIS + description: Optional[str] = None # Channel description, as defined in LAS or DLIS + + class Config: + schema_extra = { + "example": { + "label": "GRD", + "log_unit": "GAPI", + "description": "LDTD Gamma Ray", + } + } + + +class GuessResponse(BaseModel): + family: Optional[str] = None # Guessed family + family_type: Optional[List[str]] = None # Family type corresponding to guessed family + log_unit: Optional[str] = None # Guessed log unit + base_unit: Optional[str] = None # Unit to convert log + + +@router.post('/family', response_model=GuessResponse, + summary="Recognize family and unit", + description="Find the most probable family and unit using family assignment rule based catalogs. " + "User defined catalog will have the priority.", + operation_id="family") +async def post_recognize_custom(body: GuessRequest, + ctx: Context = Depends(get_ctx)) -> GuessResponse: + processor = await family_processor_manager.get_processor(ctx, ctx.partition_id) + result = processor.guess(log_info=farmodel.GuessRequest(**body.dict())) + if result.error is not None: + # Try with the default catalog + default_processor = family_processor_manager.get_default_processor() + result = default_processor.guess(log_info=farmodel.GuessRequest(**body.dict())) + if result.error is not None: + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=result.error) + + # family_processor_manager can return a 'str' or a 'List[str]', ensure in any case a List[str] is returned + if isinstance(result.family_type, str): + result.family_type = [result.family_type] + + response: GuessResponse = GuessResponse(family=result.family, + family_type=result.family_type, + log_unit=result.log_unit, + base_unit=result.base_unit) + return response + + +@router.put('/upload-catalog', + response_model=model.CreateUpdateRecordsResponse, + summary="Upload user-defined catalog with family assignment rules", + description="""Upload user-defined catalog with family assignment rules for specific partition ID. + If there is an existing catalog, it will be replaced. It takes maximum of 5 mins to replace the existing catalog. + Hence, any call to retrieve the family should be made after 5 mins of uploading the catalog""", + operation_id="upload-catalog") +async def upload_catalog(body: CatalogRecord, + ctx: Context = Depends(get_ctx)) -> model.CreateUpdateRecordsResponse: + storage_client = await get_storage_record_service(ctx) + # force the id + record = model.Record(**body.dict(by_alias=True), + id=f"{ctx.partition_id}{fp_manager.FIXED_RECORD_ID}", + kind=f"{ctx.partition_id}:wdms:familycatalog:1.0.0" + ) + response = await storage_client.create_or_update_records(ctx.partition_id, record=[record]) + return response diff --git a/app/routers/probes.py b/app/routers/probes.py new file mode 100644 index 0000000000000000000000000000000000000000..ade0f5a14b33cb10ab13b9eceddfd38e99f28ff3 --- /dev/null +++ b/app/routers/probes.py @@ -0,0 +1,37 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import APIRouter + +# Routes require for the liveliness ('/healthz') and readiness ('/healthz') probes for kubernetes +# The root route ('/') is needs for the liveliness of the Google loadbalancer +# which doesn't take into account the ones defined in the yaml deployment file + + +router = APIRouter() + + +@router.get("/healthz", include_in_schema=False) +async def health(): + return {'status': 'healthy'} + + +@router.get("/readiness", include_in_schema=False) +async def readiness(): + return {'status': 'healthy'} + + +@router.get("/", include_in_schema=False) +async def ingress_gce_health(): + return {'status': 'healthy'} diff --git a/app/routers/search/__init__.py b/app/routers/search/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/search/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/search/fast_search.py b/app/routers/search/fast_search.py new file mode 100644 index 0000000000000000000000000000000000000000..2030662ad13b5f3e964797d880f5cb12996dd6a5 --- /dev/null +++ b/app/routers/search/fast_search.py @@ -0,0 +1,158 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List +from fastapi import APIRouter, Depends +from odes_search.models import Point +from app.utils import Context +import app.routers.search.search as search + +router = APIRouter() +wellbore_kind = '*:wks:wellbore:*' +log_kind = '*:wks:log:*' +logSet_kind = '*:wks:logSet:*' +marker_kind = '*:wks:marker:*' +crs_format = 'data.wellHeadWgs84' +query_type = 'fastquery' + + +def get_ctx() -> Context: + return Context.current() + + +@router.post('/fastquery/wellbores', summary="Query with cursor", + description="""Get all Wellbores IDs object. <p>The wellbore kind is + *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p>""") +async def fastquery_wellbores(body: search.SearchQuery = None, ctx: Context = Depends(get_ctx)): + return await search.basic_query_request(query_type, wellbore_kind, ctx, body.query) + + +@router.post('/fastquery/wellbores/bydistance', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores IDs IDs objects in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p>""") +async def fastquery_wellbores_bydistance(latitude: float, longitude: float, distance: int, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + spatial_filter = search.query_spatial_filter_builder("bydistance", latitude1=latitude, longitude1=longitude, + distance=distance) + return await search.query_request_with_spatial_filter(query_type, spatial_filter, ctx, body.query) + + +@router.post('/fastquery/wellbores/byboundingbox', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_wellbores_byboundingbox(latitude_top_left: float, longitude_top_left: float, + latitude_bottom_right: float, longitude_bottom_right: float, + body: search.SearchQuery = None, ctx: Context = Depends(get_ctx)): + spatial_filter = search.query_spatial_filter_builder("byboundingbox", latitude1=latitude_top_left, + longitude1=longitude_top_left, + latitude2=latitude_bottom_right, + longitude2=longitude_bottom_right) + return await search.query_request_with_spatial_filter(query_type, spatial_filter, ctx, body.query) + + +@router.post('/fastquery/wellbores/bygeopolygon', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_wellbores_bygeopolygon(points: List[Point], query: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + spatial_filter = search.query_spatial_filter_builder("bygeopolygon", points=points) + return await search.query_request_with_spatial_filter(query_type, spatial_filter, ctx, query.query) + + +@router.post('/fastquery/wellbore/{wellbore_id}/logsets', + summary='Query with cursor, search logSets IDs by wellbore ID', + description="""Get all LogSets IDs objects using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logsets_bywellbore(wellbore_id: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = search.added_query(wellbore_id, "wellbore", body.query) + return await search.basic_query_request(query_type, logSet_kind, ctx, query) + + +@router.post('/fastquery/wellbores/{wellbore_attribute}/logsets', + summary='Query with cursor, search logSets IDs by wellbore attribute', + description="""Get all LogSets IDs objects using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logsets_bywellboreattribute(wellbore_attribute: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await search.query_request_with_specific_attribute(query_type, wellbore_attribute, wellbore_kind, + logSet_kind, "wellbore", ctx, + body.query) + + +@router.post('/fastquery/logs', summary='Query with cursor, gets logs', + description="""Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logs(body: search.SearchQuery = None, ctx: Context = Depends(get_ctx)): + return await search.basic_query_request(query_type, log_kind, ctx, body.query) + + +@router.post('/fastquery/wellbore/{wellbore_id}/logs', summary='Query with cursor, search logs IDs by wellbore ID', + description="""Get all Logs IDs objects using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logs_bywellbore(wellbore_id: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = search.added_query(wellbore_id, "wellbore", body.query) + return await search.basic_query_request(query_type, log_kind, ctx, query) + + +@router.post('/fastquery/wellbores/{wellbore_attribute}/logs', + summary='Query with cursor, search logs IDs by wellbore attribute', + description="""Get all Logs IDs objects using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logs_bywellboreattribute(wellbore_attribute: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await search.query_request_with_specific_attribute(query_type, wellbore_attribute, wellbore_kind, log_kind, + "wellbore", ctx, + body.query) + + +@router.post('/fastquery/logset/{logset_id}/logs', summary='Query with cursor, search logs IDs by logSet ID', + description="""Get all Logs IDs objects using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logs_bylogset(logset_id: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = search.added_query(logset_id, "logSet", body.query) + return await search.basic_query_request(query_type, log_kind, ctx, query) + + +@router.post('/fastquery/logsets/{logset_attribute}/logs', + summary='Query with cursor, search logs IDs by logSet attribute', + description="""Get all Logs IDs objects using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_logs_bylogsetattribute(logset_attribute: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await search.query_request_with_specific_attribute(query_type, logset_attribute, logSet_kind, log_kind, + "logSet", ctx, + body.query) + + +@router.post('/fastquery/wellbore/{wellbore_id}/markers', + summary='Query with cursor, search markers IDs by wellbore ID', + description="""Get all Markers IDs objects using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records IDs directly based on existing schemas</p>""") +async def fastquery_markers_bywellbore(wellbore_id: str, body: search.SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = search.added_query(wellbore_id, "wellbore", body.query) + return await search.basic_query_request(query_type, marker_kind, ctx, query) diff --git a/app/routers/search/search.py b/app/routers/search/search.py new file mode 100644 index 0000000000000000000000000000000000000000..783010de6e7d1c5a56adc39345ce38f3a749ef31 --- /dev/null +++ b/app/routers/search/search.py @@ -0,0 +1,300 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List +from fastapi import APIRouter, Depends +from odes_search.models import ( + QueryRequest, + QueryResponse, + CursorQueryResponse, + SpatialFilter, + Point, + ByDistance, + ByBoundingBox, + ByGeoPolygon, + CursorQueryRequest) +from app.clients.search_service_client import get_search_service +from app.utils import Context +import app.routers.search.search_wrapper as search_wrapper +from pydantic import BaseModel, Field + +router = APIRouter() + +wellbore_kind = '*:wks:wellbore:*' +log_kind = '*:wks:log:*' +logSet_kind = '*:wks:logSet:*' +marker_kind = '*:wks:marker:*' +crs_format = 'data.wellHeadWgs84' +query_type = 'query' +LIMIT = 1000 + + +class SearchQuery(BaseModel): + query: str = Field(None, alias="query") + + +def get_ctx() -> Context: + return Context.current() + + +def query_type_returned_fields(query_type: str): + returned_fields = 'id' if query_type == 'fastquery' else '*' + return returned_fields + + +async def query_request_with_spatial_filter(query_type: str, spatial_filter: SpatialFilter, ctx: Context, + query: str = None): + returned_fields = query_type_returned_fields(query_type) + query_request = QueryRequest(kind=wellbore_kind, + query=query, + returnedFields=[returned_fields], + spatialFilter=spatial_filter) + return await search_wrapper.SearchWrapper.query_cursorless( + search_service=await get_search_service(ctx), + data_partition_id=ctx.partition_id, + query_request=query_request) + + +def query_spatial_filter_builder(spacial_filter_type: str, latitude1: str = None, longitude1: float = None, + latitude2: str = None, longitude2: float = None, distance: int = None, + points: List[Point] = None): + if spacial_filter_type == "bydistance": + point = Point(latitude=latitude1, longitude=longitude1) + by_distance = ByDistance(distance=distance, point=point) + spatial_filter = SpatialFilter(field=crs_format, byDistance=by_distance) + if spacial_filter_type == "byboundingbox": + point_top_left = Point(latitude=latitude1, longitude=longitude1) + point_bottom_right = Point(latitude=latitude2, longitude=longitude2) + by_bounding_box = ByBoundingBox(topLeft=point_top_left, bottomRight=point_bottom_right) + spatial_filter = SpatialFilter(field=crs_format, byBoundingBox=by_bounding_box) + if spacial_filter_type == "bygeopolygon": + by_geo_polygon = ByGeoPolygon(points=points) + spatial_filter = SpatialFilter(field=crs_format, byGeoPolygon=by_geo_polygon) + return spatial_filter + + +def create_relationships_id_str(data_type, id): + return f'data.relationships.{data_type}.id:\"{id}\"' + + +async def query_request_with_specific_attribute(query_type: str, attribute: str, attribute_kind: str, kind: str, + data_type: str, + ctx: Context, query: str = None): + query_request = QueryRequest(kind=attribute_kind, + query=attribute, + returnedFields=['id']) + + client = await get_search_service(ctx) + query_result = await search_wrapper.SearchWrapper.query_cursorless( + search_service=client, + data_partition_id=ctx.partition_id, + query_request=query_request) + + response = CursorQueryResponse.parse_obj(query_result.dict()) + + if not response.results: + return query_result + + relationships_ids = [create_relationships_id_str(data_type, r["id"]) for r in response.results] + id_list = ' OR '.join(relationships_ids) # [a, b, c] => 'a OR b OR c' + + if query: + query = f'({id_list}) AND ({query})' + else: + query = f'{id_list}' + + returned_fields = query_type_returned_fields(query_type) + query_request = QueryRequest(kind=kind, + query=query, + returnedFields=[returned_fields]) + return await search_wrapper.SearchWrapper.query_cursorless( + search_service=client, + data_partition_id=ctx.partition_id, + query_request=query_request) + + +async def basic_query_request(query_type: str, kind: str, ctx: Context, query: str = None): + returned_fields = query_type_returned_fields(query_type) + query_request = QueryRequest(kind=kind, + query=query, + returnedFields=[returned_fields]) + client = await get_search_service(ctx) + return await search_wrapper.SearchWrapper.query_cursorless( + search_service=client, + data_partition_id=ctx.partition_id, + query_request=query_request) + + +async def basic_query_request_with_cursor(query_type: str, kind: str, ctx: Context, query: str = None): + returned_fields = query_type_returned_fields(query_type) + if not query: + query = None + query_request = CursorQueryRequest(kind=kind, + limit=LIMIT, + query=query, + returnedFields=[returned_fields]) + client = await get_search_service(ctx) + return await client.query_with_cursor( + data_partition_id=ctx.partition_id, + cursor_query_request=query_request) + + +def added_query(id: str, data_type: str, query: str = None): + relationships_id = create_relationships_id_str(data_type, id) + if query: + query = f'{relationships_id} AND ({query})' + else: + query = relationships_id + return query + + +@router.post('/query', summary='Query') +async def query(query_request: QueryRequest, + ctx: Context = Depends(get_ctx)) -> QueryResponse: + client = await get_search_service(ctx) + return await client.query(data_partition_id=ctx.partition_id, + query_request=query_request) + + +@router.post('/query_with_cursor', summary='Query with cursor') +async def query_with_cursor(query_request: QueryRequest, + ctx: Context = Depends(get_ctx)): + client = await get_search_service(ctx) + return await search_wrapper.SearchWrapper.query_cursorless( + search_service=client, + data_partition_id=ctx.partition_id, + query_request=query_request) + + +@router.post('/query/wellbores', summary='Query with cursor', + description="""Get all Wellbores object. <p>The wellbore kind is + *:wks:wellbore:* returns all records directly based on existing schemas</p>""") +async def query_wellbores(body: SearchQuery = None, ctx: Context = Depends(get_ctx)): + return await basic_query_request_with_cursor(query_type, wellbore_kind, ctx, body.query) + + +@router.post('/query/wellbores/bydistance', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores object in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>""") +async def query_wellbores_bydistance(latitude: float, longitude: float, distance: int, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + spatial_filter = query_spatial_filter_builder("bydistance", latitude1=latitude, longitude1=longitude, + distance=distance) + return await query_request_with_spatial_filter(query_type, spatial_filter, ctx, body.query) + + +@router.post('/query/wellbores/byboundingbox', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores object in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>""") +async def query_wellbores_byboundingbox(latitude_top_left: float, longitude_top_left: float, + latitude_bottom_right: float, longitude_bottom_right: float, + body: SearchQuery = None, ctx: Context = Depends(get_ctx)): + spatial_filter = query_spatial_filter_builder("byboundingbox", latitude1=latitude_top_left, + longitude1=longitude_top_left, + latitude2=latitude_bottom_right, longitude2=longitude_bottom_right) + return await query_request_with_spatial_filter(query_type, spatial_filter, ctx, body.query) + + +@router.post('/query/wellbores/bygeopolygon', summary=f'Query with cursor, CRS format: {crs_format}', + description="""Get all Wellbores object in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>""") +async def query_wellbores_bygeopolygon(points: List[Point], query: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + spatial_filter = query_spatial_filter_builder("bygeopolygon", points=points) + return await query_request_with_spatial_filter(query_type, spatial_filter, ctx, query.query) + + +@router.post('/query/wellbore/{wellboreId}/logsets', summary='Query with cursor, search logSets by wellbore ID', + description="""Get all LogSets object using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p>""") +async def query_logsets_bywellbore(wellboreId: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = added_query(wellboreId, "wellbore", body.query) + return await basic_query_request(query_type, logSet_kind, ctx, query) + + +@router.post('/query/wellbores/{wellboreAttribute}/logsets', + summary='Query with cursor, search logSets by wellbore attribute', + description="""Get all LogSets object using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p>""") +async def query_logsets_bywellboreattribute(wellboreAttribute: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await query_request_with_specific_attribute(query_type, wellboreAttribute, wellbore_kind, logSet_kind, + "wellbore", ctx, + body.query) + + +@router.post('/query/logs', summary='Query with cursor, gets logs', + description="""Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records directly based on existing schemas</p>""") +async def query_logs(body: SearchQuery = None, ctx: Context = Depends(get_ctx)): + return await basic_query_request_with_cursor(query_type, log_kind, ctx, body.query) + + +@router.post('/query/wellbore/{wellboreId}/logs', summary='Query with cursor, search logs by wellbore ID', + description="""Get all Logs object using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>""") +async def query_logs_bywellbore(wellboreId: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = added_query(wellboreId, "wellbore", body.query) + return await basic_query_request(query_type, log_kind, ctx, query) + + +@router.post('/query/wellbores/{wellboreAttribute}/logs', + summary='Query with cursor, search logs by wellbore attribute', + description="""Get all Logs object using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>""") +async def query_logs_bywellboreattribute(wellboreAttribute: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await query_request_with_specific_attribute(query_type, wellboreAttribute, wellbore_kind, log_kind, + "wellbore", ctx, + body.query) + + +@router.post('/query/logset/{logsetId}/logs', summary='Query with cursor, search logs by logSet ID', + description="""Get all Logs object using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>""") +async def query_logs_bylogset(logsetId: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = added_query(logsetId, "logSet", body.query) + return await basic_query_request(query_type, log_kind, ctx, query) + + +@router.post('/query/logsets/{logsetAttribute}/logs', summary='Query with cursor, search logs by logSet attribute', + description="""Get all Logs object using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>""") +async def query_logs_bylogsetattribute(logsetAttribute: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + return await query_request_with_specific_attribute(query_type, logsetAttribute, logSet_kind, log_kind, "logSet", + ctx, + body.query) + + +@router.post('/query/wellbore/{wellboreId}/markers', summary='Query with cursor, search markers by wellbore ID', + description="""Get all Markers object using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records directly based on existing schemas</p>""") +async def query_markers_bywellbore(wellboreId: str, body: SearchQuery = None, + ctx: Context = Depends(get_ctx)): + query = added_query(wellboreId, "wellbore", body.query) + return await basic_query_request(query_type, marker_kind, ctx, query) diff --git a/app/routers/search/search_wrapper.py b/app/routers/search/search_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..2a2e98706bb8e7749c9ca8af05d38271552491a2 --- /dev/null +++ b/app/routers/search/search_wrapper.py @@ -0,0 +1,55 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from odes_search.models import * +from app.clients import SearchServiceClient + + +class SearchWrapper: + + @staticmethod + async def query_cursorless(search_service: SearchServiceClient, + data_partition_id: str, + query_request: QueryRequest) -> CursorQueryResponse: + """ + Repeat the search query until the returned cursor is null and concatenate the result + :param search_service: + :param data_partition_id: + :param query_request: The query + :return: The result of the query + """ + # Convert the query string into query object + cursor = None + + test_dict = query_request.dict(by_alias=True) + request_with_cursor = CursorQueryRequest.parse_obj(test_dict) + request_with_cursor.limit = 100 + request_with_cursor.cursor = cursor + + agregated_result = CursorQueryResponse() + agregated_result.results = [] + + while True: + request_with_cursor.cursor = cursor + query_result = await search_service.query_with_cursor( + data_partition_id=data_partition_id, + cursor_query_request=request_with_cursor) + + cursor = query_result.cursor + if not cursor: + break + agregated_result.results.extend(query_result.results) + agregated_result.total_count = query_result.total_count + + return agregated_result diff --git a/app/routers/trajectory/__init__.py b/app/routers/trajectory/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/app/routers/trajectory/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/app/routers/trajectory/parameters.py b/app/routers/trajectory/parameters.py new file mode 100644 index 0000000000000000000000000000000000000000..3e2df7a9834f2570931d986eaa37795689d9b4c4 --- /dev/null +++ b/app/routers/trajectory/parameters.py @@ -0,0 +1,13 @@ +from fastapi import Query +from app.bulk_persistence import JSONOrient + + +acceptable_values = [o.value for o in JSONOrient if o.value is not "values"] + +def trajectory_json_orient_parameter(orient: str = Query( + JSONOrient.split.value, + description='define format when using JSON data is used. Value can be ' + ', '.join(acceptable_values), + regex="|".join(acceptable_values) +) +) -> str: + return orient diff --git a/app/routers/trajectory/persistence.py b/app/routers/trajectory/persistence.py new file mode 100644 index 0000000000000000000000000000000000000000..c456b5090b10d27b998afacc0fae83579c0a2e54 --- /dev/null +++ b/app/routers/trajectory/persistence.py @@ -0,0 +1,65 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pandas as pd +from app.bulk_persistence import BulkId, NoBulkException, UnknownChannelsException, InvalidBulkException +from app.model.model_curated import trajectory as Trajectory + +from app.bulk_persistence import get_dataframe, create_and_store_dataframe + +from app.utils import Context + +TrajectoryId = str + + +class Persistence: + """Gets the bulk data for the trajectory + + Args: + record (Trajectory): trajectory record to get the bulkd data + channels (list[str]): Filters the channel to be returned, if none return all channels + + Returns: + pandas.Dataframe: containing bulkd data for the specified record + + Raises: + NoBulkException: record doesn't have any bulk. + InvalidBulkException: value of data.bulkURI in record is invalid. + """ + @classmethod + async def read_bulk( + cls, ctx: Context, record: Trajectory, channels=None + ) -> pd.DataFrame: + + if record.data is None or not hasattr(record.data,'bulkURI') or record.data.bulkURI is None: # todo what about empty string + raise NoBulkException + + try: + df = await get_dataframe(ctx, BulkId.bulk_urn_decode(record.data.bulkURI)) + except Exception as ex: + raise InvalidBulkException(ex) + + if not channels: + return df + + try: + return df[channels] + except KeyError as key_error: # unknown channels + raise UnknownChannelsException(key_error) + + + @classmethod + async def write_bulk(cls, ctx, dataframe: pd.DataFrame) -> str: + bulk_id = await create_and_store_dataframe(ctx, dataframe) + return BulkId.bulk_urn_encode(bulk_id) diff --git a/app/routers/trajectory/trajectory_ddms_v2.py b/app/routers/trajectory/trajectory_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..cc9918be59065b49376c3126ad8d3f5722e18c4a --- /dev/null +++ b/app/routers/trajectory/trajectory_ddms_v2.py @@ -0,0 +1,301 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import List, Optional + +from fastapi import APIRouter, Depends, Query, Request, HTTPException +from starlette import status +from starlette.responses import Response +from pandas import DataFrame + +from odes_storage.models import CreateUpdateRecordsResponse, RecordVersions + +from app.clients.storage_service_client import get_storage_record_service +from app.model.model_curated import ( + trajectory as Trajectory, + trajectorychannel as TrajectoryChannel, +) +from app.model.model_utils import from_record, to_record +from app.routers.trajectory.parameters import trajectory_json_orient_parameter +from app.routers.trajectory.persistence import Persistence +from app.bulk_persistence import DataframeSerializer, JSONOrient, MimeTypes, NoBulkException, UnknownChannelsException, \ + InvalidBulkException + +from app.utils import Context, OpenApiHandler, OpenApiResponse, get_ctx + +router = APIRouter() + +TrajectoryId = str + + +async def get_persistence() -> Persistence: + return Persistence() + + +async def get_trajectory_record(ctx, trajectoryid: TrajectoryId) -> Trajectory: + storage_client = await get_storage_record_service(ctx) + return from_record( + Trajectory, + await storage_client.get_record( + id=trajectoryid, data_partition_id=ctx.partition_id + ), + ) + + +@router.get( + "/trajectories/{trajectoryid}", + response_model=Trajectory, + summary="Get the trajectory using wks:trajectory:1.0.5 schema", + description="""Get the Trajectory object using its **id**""", + operation_id="get_trajectory", + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Trajectory not found"} + }, + response_model_exclude_unset=True +) +async def get_trajectory( + trajectoryid: TrajectoryId, ctx: Context = Depends(get_ctx) +) -> Trajectory: + # TODO add a check on the kind (*:wks:Trajectory:1.0.5) + return await get_trajectory_record(ctx, trajectoryid) + + +@router.delete( + "/trajectories/{trajectoryid}", + summary="Delete the Trajectory. The API performs a logical deletion of the given record", + operation_id="del_trajectory", + status_code=status.HTTP_204_NO_CONTENT, + response_class=Response, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Trajectory not found"}, + status.HTTP_204_NO_CONTENT: { + "description": "Record deleted successfully" + }, + }, +) +async def del_trajectory( + trajectoryid: TrajectoryId, + ctx: Context = Depends(get_ctx), +): + storage_client = await get_storage_record_service(ctx) + await storage_client.delete_record( + id=trajectoryid, data_partition_id=ctx.partition_id + ) + + +@router.get( + "/trajectories/{trajectoryid}/versions", + response_model=RecordVersions, + summary="Get all versions of the Trajectory", + operation_id="get_trajectory_versions", + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Trajectory not found"} + }, +) +async def get_trajectory_versions( + trajectoryid: TrajectoryId, ctx: Context = Depends(get_ctx) +) -> RecordVersions: + storage_client = await get_storage_record_service(ctx) + return await storage_client.get_all_record_versions( + id=trajectoryid, data_partition_id=ctx.partition_id + ) + + +@router.get( + "/trajectories/{trajectoryid}/versions/{version}", + response_model=Trajectory, + summary="Get the given version of Trajectory using wks:Trajectory:1.0.5 schema", + operation_id="get_trajectory_version", + responses={ + status.HTTP_404_NOT_FOUND: {"description": "Trajectory not found"} + }, + response_model_exclude_unset=True +) +async def get_trajectory_version( + trajectoryid: TrajectoryId, version: int, ctx: Context = Depends(get_ctx) +) -> Trajectory: + storage_client = await get_storage_record_service(ctx) + trajectory_record = await storage_client.get_record_version( + id=trajectoryid, version=version, data_partition_id=ctx.partition_id + ) + return from_record(Trajectory, trajectory_record) + + +@router.post( + "/trajectories", + response_model=CreateUpdateRecordsResponse, + summary="Create or update the trajectories using wks:Trajectory:1.0.5 schema", + operation_id="post_trajectory", + responses={ + status.HTTP_400_BAD_REQUEST: { + "description": "Missing mandatory parameter or unknown parameter" + } + }, +) +async def post_trajectory( + trajectories: List[Trajectory], ctx: Context = Depends(get_ctx) +) -> CreateUpdateRecordsResponse: + + storage_client = await get_storage_record_service(ctx) + return await storage_client.create_or_update_records( + record=[to_record(tr) for tr in trajectories], + data_partition_id=ctx.partition_id, + ) + + +_trajectory_dataframe_example = DataFrame([ + [0, 1001, 2001], + [0.5, 1002, 2002], + [1, 1003, 2003], + [1.5, 1004, 2004], + [2, 1005, 2005]], + columns=['MD', 'X', 'Y'] +) + + + +# manually setup doc as we wanted to tweaked the classic mechanism in order to best perf as we can +@OpenApiHandler.set( + operation_id="post_traj_data", + request_body={ + 'description': + 'Write trajectory bulk data. Each column corresponds to a channel.' + '\nIt uses [Pandas.Dataframe json format]' + '(https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html)' + + '.\n Here\'re examples for data with {} rows and {} channels ({}) with different _orient_: '.format( + _trajectory_dataframe_example.shape[0], + _trajectory_dataframe_example.shape[1], + ', '.join(_trajectory_dataframe_example.columns.tolist())) + + ''.join([f'\n* {o.value}: <br/>`{DataframeSerializer.to_json(_trajectory_dataframe_example, o)}`<br/> ' + for o in JSONOrient if o != JSONOrient.values]), + # put examples here because of bug in swagger UI to properly render multiple examples + "required": True, + "content": { + MimeTypes.JSON.type: { + "schema": { + # swagger UI bug, so single example here + "example": DataframeSerializer.to_json( + _trajectory_dataframe_example, + JSONOrient.split + ), + "oneOf": [ + DataframeSerializer.get_schema(o) for o in JSONOrient + ], + } + } + }, + }, +) +@router.post( + "/trajectories/{trajectoryid}/data", + summary="Writes the specified data to the trajectory (atomic).", + description="Overwrite if exists", + operation_id="post_traj_data", + response_model=CreateUpdateRecordsResponse, + responses={ + status.HTTP_404_NOT_FOUND: {"description": "trajectory not found"}, + status.HTTP_200_OK: {}, + }, +) +async def post_traj_data( + request: Request, + trajectoryid: TrajectoryId, + orient: str = Depends(trajectory_json_orient_parameter), + ctx: Context = Depends(get_ctx), + persistence: Persistence = Depends(get_persistence)) -> CreateUpdateRecordsResponse: + + content = await request.body() # request.stream() + df = DataframeSerializer.read_json(content, orient) + + record = await get_trajectory_record(ctx, trajectoryid) + + record.data.bulkURI = await persistence.write_bulk(ctx, df) + + # update record's channels + if not record.data.channels: + record.data.channels = [] + + channels = {c.name: c for c in record.data.channels} + + record.data.channels = [] + for name in df.columns: + channel = channels.get(name, TrajectoryChannel(name=name)) + channel.bulkURI = record.data.bulkURI + ":" + name + record.data.channels.append(channel) + + # Update record + storage_client = await get_storage_record_service(ctx) + await storage_client.create_or_update_records( + data_partition_id=ctx.partition_id, record=[record] + ) + + return record + + +@OpenApiHandler.set( + operation_id="get_traj_data", + responses=[ + OpenApiResponse( + status=status.HTTP_200_OK, + description= + 'Get trajectory data of the given channels.' + '\nIt uses [Pandas.Dataframe json format]' + '(https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html)' + + '.\n Here\'re examples for data with {} rows for channels {} with different _orient_: '.format( + _trajectory_dataframe_example.shape[0], + ', '.join(_trajectory_dataframe_example.columns.tolist())) + + ''.join([f'\n* {o.value}: <br/>`{DataframeSerializer.to_json(_trajectory_dataframe_example, o)}`<br/> ' + for o in JSONOrient]), + name="GetLogDataResponse", + example=DataframeSerializer.to_json(_trajectory_dataframe_example, JSONOrient.split), + schema={ + "oneOf": [DataframeSerializer.get_schema(o) for o in JSONOrient] + }, + ) + ], +) +@router.get( + "/trajectories/{trajectoryid}/data", + summary="Returns all data within the specified filters. Strongly consistent.", + description="return full bulk data", + operation_id="get_traj_data", + responses={ + status.HTTP_404_NOT_FOUND: {"description": "trajectory not found"}, + status.HTTP_400_BAD_REQUEST: {"description": "unknown channels"}, + status.HTTP_204_NO_CONTENT: {"description": "No bulkURI"}, + status.HTTP_500_INTERNAL_SERVER_ERROR: {"description": "Record has an invalid bulkURI"}, + }, +) +async def get_traj_data( + trajectoryid: TrajectoryId, + orient: str = Depends(trajectory_json_orient_parameter), + channels: Optional[List[str]] = Query( + None, description="List of channels to get. If not provided, return all channels." + ), + ctx: Context = Depends(get_ctx), + persistence: Persistence = Depends(get_persistence), +): + record = await get_trajectory_record(ctx, trajectoryid) + + try: + df = await persistence.read_bulk(ctx, record, channels) + except NoBulkException: + return Response(status_code=status.HTTP_204_NO_CONTENT) + except UnknownChannelsException as key_error: # unknown channels + raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(key_error)) from key_error + except InvalidBulkException as ex: + raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(ex)) + + content = DataframeSerializer.to_json(df, orient=orient) + return Response(content=content, media_type=MimeTypes.JSON.type) diff --git a/app/utils.py b/app/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..8f43b2d02ceacd7a81d4fdd40478cb8bbc8cb92f --- /dev/null +++ b/app/utils.py @@ -0,0 +1,410 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional, Callable, List, Tuple, Union, NamedTuple +import concurrent.futures +from functools import lru_cache +from aiohttp import ClientSession +import contextvars +from os import path, makedirs +import tempfile +import json + +from app.model.user import User +from app.injector.app_injector import AppInjector +from app.conf import Config + + +@lru_cache() +def get_http_client_session(key: str = 'GLOBAL'): + return ClientSession(json_serialize=json.dumps) + + +def get_pool_executor(): + return get_pool_executor._pool + + +get_pool_executor._pool = concurrent.futures.ThreadPoolExecutor() + + +def _setup_temp_dir() -> str: + tmpdir = tempfile.gettempdir() + if not tmpdir.endswith('wdmsosdu'): + tmpdir = path.join(tmpdir, 'wdmsosdu') + makedirs(tmpdir, exist_ok=True) + tempfile.tempdir = tmpdir + return tmpdir + + +WDMS_TEMP_DIR = _setup_temp_dir() + + +def get_wdms_temp_dir(): + return WDMS_TEMP_DIR + + +async def async_with_cache(cache, key: str, fn_coroutine, *args, **kwargs): + try: + return cache[key] + except KeyError: + pass # key not found + v = await fn_coroutine(*args, **kwargs) + try: + cache[key] = v + except ValueError: + pass # value too large + return v + + +class Context: + """ + Immutable object to provide contextual information a long request processing + """ + __slots__ = [ + '_tracer', + '_logger', + '_correlation_id', + '_request_id', + '_dev_mode', + '_auth', + '_partition_id', + '_app_key', + '_api_key', + '_user', + '_app_injector', + '_attr_dict' + ] + + def __init__(self, + tracer=None, + logger=None, + correlation_id: Optional[str] = None, + request_id: Optional[str] = None, + dev_mode: bool = Config.dev_mode.value, + auth=None, + partition_id: Optional[str] = None, + app_key: Optional[str] = None, + api_key: Optional[str] = None, + user: Optional[User] = None, + app_injector: Optional[AppInjector] = None, + **keys): + + self._tracer = tracer + self._logger = logger + self._correlation_id = correlation_id + self._request_id = request_id + self._dev_mode = dev_mode + self._auth = auth + self._partition_id = partition_id + self._app_key = app_key + self._api_key = api_key + self._user = user + self._app_injector = app_injector + + # pass + self._attr_dict = keys or {} + + __ctx_var = contextvars.ContextVar('__internal_context_var') + """ + contextvar is natively supported in asyncio, we can take advantage of this of easily get the current context (by + the way it may hide the potential dependency) + """ + + @classmethod + def current(cls) -> 'Context': + return cls.__ctx_var.get() + + def set_current(self): + Context.__ctx_var.set(self) + + @classmethod + def set_current_with_value(cls, tracer=None, logger=None, correlation_id=None, request_id=None, auth=None, + partition_id=None, app_key=None, api_key=None, user=None, app_injector=None, + dev_mode=Config.dev_mode.value, + **keys) -> 'Context': + """ + clone the current context with the given values, set the new ctx as current and returns it + :return: + """ + current = cls.current() + assert current is not None, 'no existing current context' + new_ctx = current.with_value(tracer=tracer, + logger=logger, + correlation_id=correlation_id, + request_id=request_id, + auth=auth, + partition_id=partition_id, + app_key=app_key, + api_key=api_key, + user=user, + app_injector=app_injector, + dev_mode=dev_mode, + **keys) + new_ctx.set_current() + return new_ctx + + def get(self, key, default=None): + if key in self._attr_dict: + return self._attr_dict[key] + if hasattr(self, '_' + key): + return getattr(self, '_' + key) + return default + + def __getitem__(self, key): + if key in self._attr_dict: + return self._attr_dict[key] + + if hasattr(self, '_' + key): + return getattr(self, '_' + key) + raise KeyError(key + ' is unknown') + + def __copy__(self): + return self.__class__( + tracer=self._tracer, + logger=self._logger, + correlation_id=self._correlation_id, + request_id=self._request_id, + dev_mode=self._dev_mode, + auth=self._auth, + partition_id=self._partition_id, + app_key=self._app_key, + api_key=self._api_key, + user=self._user, + app_injector=self._app_injector, + **self._attr_dict) + + def with_correlation_id(self, correlation_id): + clone = self.__copy__() + clone._correlation_id = correlation_id + return clone + + def with_request_id(self, request_id): + clone = self.__copy__() + clone._request_id = request_id + return clone + + def with_auth(self, auth): + clone = self.__copy__() + clone._auth = auth + return clone + + def with_partition_id(self, partition_id): + clone = self.__copy__() + clone._partition_id = partition_id + return clone + + def with_user(self, user): + clone = self.__copy__() + clone._user = user + return clone + + def with_app_key(self, app_key): + clone = self.__copy__() + clone._app_key = app_key + return clone + + def with_api_key(self, api_key): + clone = self.__copy__() + clone._api_key = api_key + return clone + + def with_injector(self, app_injector): + clone = self.__copy__() + clone._app_injector = app_injector + return clone + + def with_value(self, tracer=None, logger=None, correlation_id=None, request_id=None, auth=None, + partition_id=None, app_key=None, api_key=None, user=None, app_injector=None, + dev_mode=Config.dev_mode.value, **keys) -> 'Context': + """ Clone context, adding all keys in future logs """ + cloned = self.__class__( + tracer=tracer or self._tracer, + logger=logger or self._logger, + correlation_id=correlation_id or self._correlation_id, + request_id=request_id or self._request_id, + dev_mode=dev_mode or self._dev_mode, + auth=auth or self._auth, + partition_id=partition_id or self._partition_id, + app_key=app_key or self._app_key, + api_key=api_key or self._api_key, + user=user or self._user, + app_injector=app_injector or self._app_injector, + **self._attr_dict) + + if keys is not None: + cloned._attr_dict.update(keys) + return cloned + + @property + def tracer(self): + return self._tracer + + @property + def logger(self): + return self._logger + + @property + def correlation_id(self) -> Optional[str]: + return self._correlation_id + + @property + def request_id(self) -> Optional[str]: + return self._request_id + + @property + def dev_mode(self) -> bool: + return self._dev_mode + + @property + def auth(self): + return self._auth + + @property + def partition_id(self) -> Optional[str]: + return self._partition_id + + @property + def api_key(self) -> Optional[str]: + return self._api_key + + @property + def app_key(self) -> Optional[str]: + return self._app_key + + @property + def user(self) -> Optional[User]: + return self._user + + @property + def app_injector(self) -> Optional[AppInjector]: + return self._app_injector + + def __dict__(self): + return { + "tracer": self.tracer, + "logger": self.logger, + "correlation_id": self.correlation_id, + "request_id": self.request_id, + "dev_mode": self.dev_mode, + "partition_id": self.partition_id, + "app_key": self.app_key, + "api_key": self.api_key + } + + def __repr__(self): + return json.dumps(self.__dict__()) + + + +def get_ctx() -> Context: + return Context.current() + + +def get_or_create_ctx() -> Context: + """ + This method aims to be used in middleware, where the order of Context creation is not guaranteed + :return an empty Context with default values + """ + try: + return get_ctx() + except LookupError: + ctx = Context() + ctx.set_current() + return ctx + + +class OpenApiResponse(NamedTuple): + status: int + name: str + mimetype: str = 'application/json' + description: str = '' + schema: Optional[dict] = None + example: Optional[dict] = None + +#NOSONAR +class __OpenApiHandler: + def __init__(self): + self._handlers = {} + + def set(self, operation_id: str, *, + request_body: Optional[dict] = None, + schemas: Optional[dict] = None, + responses: Optional[Union[dict, List[OpenApiResponse]]] = None) -> Callable: + handlers = [] + if request_body is not None: + handlers.append(lambda openapi, oid: self._set_request_body(openapi, oid, request_body)) + + if responses is not None: + if isinstance(responses, dict): + handlers.append(lambda openapi, oid: self._append_responses(openapi, oid, responses)) + else: + responses_dict = { + str(r.status): { + 'description': r.description, + 'content': { + r.mimetype: + {'schema': {'$ref': '#/components/schemas/' + r.name}, 'example': r.example} + if r.example else {'schema': {'$ref': '#/components/schemas/' + r.name}} + } + } for r in responses + } + schemas = schemas or {} + schemas.update({r.name: r.schema for r in responses}) + handlers.append(lambda openapi, oid: self._append_responses(openapi, oid, responses_dict)) + + if schemas is not None: + handlers.append(lambda openapi, _: self._append_schemas(openapi, schemas)) + + def decorator(func: Callable) -> Callable: + self._handlers.setdefault(operation_id, []).extend(handlers) + return func + + return decorator + + def __getitem__(self, operation_id: str) -> Optional[List[Callable]]: + return self._handlers.get(operation_id, None) + + def __call__(self, openapi_schema: dict, operation_ids: List[str]): + for operation_id in operation_ids: + if operation_id is not None and operation_id in self._handlers: + for handler in self._handlers[operation_id]: + handler(openapi_schema, operation_id) + + @classmethod + def operation_from_id(cls, openapi_schema: dict, operation_id: str) -> Optional[dict]: + for path, path_node in openapi_schema['paths'].items(): + for method, method_node in path_node.items(): + if method_node.get('operationId', '') == operation_id: + return method_node + return None + + @classmethod + def _set_request_body(cls, openapi_schema: dict, operation_id: str, request_body: dict): + method_node = cls.operation_from_id(openapi_schema, operation_id) + if request_body is not None: + method_node['requestBody'] = request_body + + @classmethod + def _append_responses(cls, openapi_schema: dict, operation_id: str, responses: dict): + method_node = cls.operation_from_id(openapi_schema, operation_id) + method_node.setdefault('responses', {}).update(responses) + + @classmethod + def _append_schemas(cls, openapi_schema: dict, schemas: Union[dict, List[Tuple[str, dict]]]): + openapi_schema['components'].setdefault('schemas', {}).update( + schemas if isinstance(schemas, dict) else {name: schema for name, schema in schemas} + ) + + +OpenApiHandler = __OpenApiHandler() diff --git a/app/wdms_app.py b/app/wdms_app.py new file mode 100644 index 0000000000000000000000000000000000000000..92f6cd8197bfe7c2028e16e6f947904a02b28880 --- /dev/null +++ b/app/wdms_app.py @@ -0,0 +1,151 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi import FastAPI, Depends +from fastapi.openapi.utils import get_openapi + +from app import __version__, __build_number__, __app_name__ +from app.auth.auth import require_opendes_authorized_user +from app.conf import Config, check_environment +from app.errors.exception_handlers import add_exception_handlers + +from app.helper import traces, logger +from app.injector.app_injector import AppInjector +from app.injector.main_injector import MainInjector +from app.middleware import CreateBasicContextMiddleware, TracingMiddleware +from app.middleware.basic_context_middleware import require_data_partition_id +from app.routers import probes, about +from app.routers.ddms_v2 import ( + ddms_v2, + wellbore_ddms_v2, + logset_ddms_v2, + marker_ddms_v2, + log_ddms_v2, + well_ddms_v2 +) +from app.routers.trajectory import trajectory_ddms_v2 +from app.routers.dipset import dipset_ddms_v2, dip_ddms_v2 +from app.routers.logrecognition import log_recognition +from app.routers.search import search, fast_search +from app.clients import StorageRecordServiceClient, SearchServiceClient +from app.utils import get_http_client_session, OpenApiHandler, get_wdms_temp_dir + +base_app = FastAPI() + + +#The sub application which contains all the routers +wdms_app = FastAPI(title=__app_name__, + description='build ' + __build_number__, + version=__version__, + ) + +app_injector = AppInjector() + +base_app.mount(Config.openapi_prefix.value, wdms_app) + + +def custom_openapi(*args, **kwargs): + if wdms_app.openapi_schema: + return wdms_app.openapi_schema + openapi_schema = get_openapi( + title=wdms_app.title, + version=wdms_app.version, + description=wdms_app.description, + routes=wdms_app.routes, + servers=wdms_app.servers + ) + + routes_in_schemas = [route for route in wdms_app.routes if getattr(route, 'include_in_schema', True)] + OpenApiHandler(openapi_schema, [getattr(route, 'operation_id', None) for route in routes_in_schemas]) + + wdms_app.openapi_schema = openapi_schema + return wdms_app.openapi_schema + + +wdms_app.openapi = custom_openapi + + +def hide_router_modules(modules): + for mod in modules: + for rte in mod.router.routes: + rte.include_in_schema = False + + +@base_app.on_event("startup") +async def startup_event(): + service_name = Config.service_name.value + + logger.init_logger(service_name=service_name) + check_environment(Config) + print('using temporary directory:', get_wdms_temp_dir()) + MainInjector().configure(app_injector) + wdms_app.trace_exporter = traces.create_exporter(service_name=service_name) + + +@base_app.on_event('shutdown') +async def shutdown_event(): + # clients close + storage_client = await app_injector.get(StorageRecordServiceClient) + if storage_client is not None: + await storage_client.api_client.close() + + search_client = await app_injector.get(SearchServiceClient) + if search_client is not None: + await storage_client.api_client.close() + + await get_http_client_session().close() + +wellbore_api_group_prefix = '/ddms/v2' + +wdms_app.include_router(probes.router) +wdms_app.include_router(about.router, prefix=wellbore_api_group_prefix) + +ddms_v2_routes_groups = [ + (ddms_v2, "Wellbore DDMS"), + (well_ddms_v2, "Well"), + (wellbore_ddms_v2, "Wellbore"), + (logset_ddms_v2, "Logset"), + (trajectory_ddms_v2, "Trajectory"), + (marker_ddms_v2, "Marker"), + (log_ddms_v2, "Log"), + (dipset_ddms_v2, "Dipset"), + (dip_ddms_v2, "Dips"), +] +for ddms_v2_routes_group in ddms_v2_routes_groups: + wdms_app.include_router(ddms_v2_routes_group[0].router, + prefix=wellbore_api_group_prefix, + tags=[ddms_v2_routes_group[1]], + dependencies=[ + Depends(require_opendes_authorized_user, use_cache=False), + Depends(require_data_partition_id, use_cache=False) + ]) + +wdms_app.include_router(search.router, prefix='/ddms', tags=['search'], dependencies=[ + Depends(require_data_partition_id, use_cache=False), + Depends(require_opendes_authorized_user, use_cache=False) +]) +wdms_app.include_router(fast_search.router, prefix='/ddms', tags=['fast-search'], dependencies=[ + Depends(require_data_partition_id, use_cache=False), + Depends(require_opendes_authorized_user, use_cache=False)]) + +wdms_app.include_router(log_recognition.router, prefix='/log-recognition', tags=['log-recognition'], dependencies=[ + Depends(require_data_partition_id, use_cache=False), + Depends(require_opendes_authorized_user, use_cache=False)]) + +# order is last executed first +wdms_app.add_middleware(TracingMiddleware) +wdms_app.add_middleware(CreateBasicContextMiddleware, injector=app_injector) + +# adding exception handling +add_exception_handlers(wdms_app) diff --git a/build/Dockerfile b/build/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..184b698593c25f5c0bbfbae76ead31ec8848c96a --- /dev/null +++ b/build/Dockerfile @@ -0,0 +1,43 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +FROM python:3.7-slim-buster + +RUN apt-get update && apt-get install -y git +COPY requirements.txt requirements_dev.txt ./ + +RUN pip install -r requirements.txt +RUN pip install -r requirements_dev.txt + +COPY ./app /app +ENV PYTHONPATH=./ + +# record some detail of the build, must be passed as --build-arg +ARG build_date +ARG build_number +ARG build_origin="Personal build" +ARG commit_id +ARG commit_branch +ENV OS_WELLBORE_DDMS_BUILD_DETAILS build_date=$build_date;build_number=$build_number;build_origin=$build_origin;commit_id=$commit_id;commit_branch=$commit_branch + + +EXPOSE 8097 +WORKDIR ./ + +# Make the container run as non-root user +#(https://medium.com/better-programming/running-a-container-with-a-non-root-user-e35830d1f42a) +RUN addgroup --system appuser && adduser --system appuser && adduser appuser appuser +USER appuser + +CMD ["uvicorn", "app.wdms_app:wdms_app", "--host", "0.0.0.0", "--port", "8097"] diff --git a/build/set_version.py b/build/set_version.py new file mode 100644 index 0000000000000000000000000000000000000000..463a2aab615f66e2b951e29b41094cf133b0dc08 --- /dev/null +++ b/build/set_version.py @@ -0,0 +1,69 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import re +import shutil +import sys + +FILE_TO_UPDATE = 'app/__init__.py' +VERSION_FIELD_NAME = '__version__' +BUILD_NUMBER_FIELD_NAME = '__build_number__' + +FIELDS_TO_CAPTURE = [VERSION_FIELD_NAME, BUILD_NUMBER_FIELD_NAME] + +# capture KEY = basic string +regexp_key_value_capture = re.compile( + "^[\\s]*(" + '|'.join(FIELDS_TO_CAPTURE) + ")[\\s]*=[\\s]*[('\\\")](.+)[('\\\")].*" +) +regexp_key_capture = re.compile( + "^[\\s]*(" + '|'.join(FIELDS_TO_CAPTURE) + ")[\\s]*=.*" +) + + +def set_version(file_name: str, *, build_number: str, patch_number: str): + initial_value = {k: None for k in FIELDS_TO_CAPTURE} + lines = ['# updated by script -------------------\n', '\n'] + + # capture + with open(file_name) as f: + for line in f: + lines.append(line) + m = regexp_key_value_capture.match(line) + if m is not None and len(m.groups()) == 2: + key, value = m.group(1, 2) + initial_value[key] = value + + # backup + shutil.copyfile(file_name, file_name + '.bck') + + new_values = { + VERSION_FIELD_NAME: '"' + initial_value[VERSION_FIELD_NAME] + f'.{patch_number or "0000"}' + '"', + BUILD_NUMBER_FIELD_NAME: f'"{build_number or "unknown"}"', + } + + with open(file_name, 'w') as f: + for line in lines: + m = regexp_key_capture.match(line) + if m is not None: + key = m.group(1) + if key in new_values: + f.write('# was: ' + line) + line = f'{key} = {new_values[key]}\n\n' + f.write(line) # other line kept untouched + + +if __name__ == "__main__": + kwargs = {arg.split('=')[0]: arg.split('=')[1] for arg in sys.argv[1:]} + assert 'build_number' in kwargs and 'patch_number' in kwargs, 'build_number and patch_number must be defined' + set_version(FILE_TO_UPDATE, build_number=kwargs['build_number'], patch_number=kwargs['patch_number']) diff --git a/devops/azure/chart/.helmignore b/devops/azure/chart/.helmignore new file mode 100644 index 0000000000000000000000000000000000000000..44d09546648f064793b09ebb6fc712ab27208416 --- /dev/null +++ b/devops/azure/chart/.helmignore @@ -0,0 +1,24 @@ +# Patterns to ignore when building packages. +# This supports shell glob matching, relative path matching, and +# negation (prefixed with !). Only one pattern per line. +.DS_Store +# Common VCS dirs +.git/ +.gitignore +.bzr/ +.bzrignore +.hg/ +.hgignore +.svn/ +# Common backup files +*.swp +*.bak +*.tmp +*.orig +*~ +# Various IDEs +.project +.idea/ +*.tmproj +.vscode/ +deploy-chart.sh diff --git a/devops/azure/chart/Chart.yaml b/devops/azure/chart/Chart.yaml new file mode 100644 index 0000000000000000000000000000000000000000..9ff7d77557614dc1ec940f385dbf790444c408a1 --- /dev/null +++ b/devops/azure/chart/Chart.yaml @@ -0,0 +1,32 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v2 +name: wdms +description: OSDU Wellbore DDMS Service + +# A chart can be either an 'application' or a 'library' chart. +# +# Application charts are a collection of templates that can be packaged into versioned archives +# to be deployed. +# +# Library charts provide useful utilities or functions for the chart developer. They're included as +# a dependency of application charts to inject those utilities and functions into the rendering +# pipeline. Library charts do not define any templates and therefore cannot be deployed. +type: application + +# This is the chart version. This version number should be incremented each time you make changes +# to the chart and its templates, including the app version. +# Versions are expected to follow Semantic Versioning (https://semver.org/) +version: 0.1.0 diff --git a/devops/azure/chart/templates/_helpers.tpl b/devops/azure/chart/templates/_helpers.tpl new file mode 100644 index 0000000000000000000000000000000000000000..0d242dae75d10234a19464aad8f8c1c9cf50ceff --- /dev/null +++ b/devops/azure/chart/templates/_helpers.tpl @@ -0,0 +1,54 @@ + +{{/* +Common Annotations +*/}} +{{- define "os-wellbore-ddms.commonAnnotations" -}} +build-number: {{ .Values.annotations.buildNumber | quote }} +build-origin: {{ .Values.annotations.buildOrigin | quote }} +commit-branch: {{ .Values.annotations.commitBranch | quote }} +commit-id: {{ .Values.annotations.commitId | quote }} +{{- end}} + +{{/* +Common Labels +*/}} +{{- define "os-wellbore-ddms.commonLabels" -}} +app: os-wellbore-ddms{{ include "os-wellbore-ddms.name-suffix" . }} +env: {{ .Values.labels.env }} +{{ include "os-wellbore-ddms.deploymentTypeLabels" . }} +{{- end }} + +{{/* + Creates a dynamic set of labels based on if the deployment is a temp Deployment or not. +*/}} +{{- define "os-wellbore-ddms.deploymentTypeLabels" -}} +{{- if .Values.tempDeployment.enabled -}} +temporary-deployment: "{{ .Values.tempDeployment.name }}" +deployment-type: temporary +{{- else }} +deployment-type: standard +{{- end }} +{{- end }} + +{{/* + Renders the namespace. +*/}} +{{- define "os-wellbore-ddms.namespace" -}} +namespace: {{.Values.namespace}} +{{- end }} + +{{/* + Renders the pathPrefix and suffix if there is any +*/}} +{{- define "os-wellbore-ddms.prefix" -}} +{{ .Values.ingress.hosts.pathPrefix }}{{ include "os-wellbore-ddms.name-suffix" . }} +{{- end }} + +{{/* + Creates a string suffix if the deployment is marked as temporary. +*/}} +{{- define "os-wellbore-ddms.name-suffix" -}} +{{- if .Values.tempDeployment.enabled -}} +{{- printf "---%s" .Values.tempDeployment.name -}} +{{- end -}} +{{- end -}} \ No newline at end of file diff --git a/devops/azure/chart/templates/authorization.yaml b/devops/azure/chart/templates/authorization.yaml new file mode 100644 index 0000000000000000000000000000000000000000..077d203a4ba5aad946abb88c10b5cc5a3a9a98b5 --- /dev/null +++ b/devops/azure/chart/templates/authorization.yaml @@ -0,0 +1,40 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: security.istio.io/v1beta1 +kind: AuthorizationPolicy +metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 4}} + labels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} + name: {{ .Values.authorizationPolicy.name }}{{ include "os-wellbore-ddms.name-suffix" . }} +{{ include "os-wellbore-ddms.namespace" . | indent 2}} +spec: + action: DENY + rules: + - from: + - source: + notRequestPrincipals: + - '*' + to: + - operation: + notPaths: + - {{ include "os-wellbore-ddms.prefix" . }}/ + - {{ include "os-wellbore-ddms.prefix" . }}/ddms/v2/about + - {{ include "os-wellbore-ddms.prefix" . }}/docs + - {{ include "os-wellbore-ddms.prefix" . }}/openapi.json + selector: + matchLabels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 6}} diff --git a/devops/azure/chart/templates/configmap.yaml b/devops/azure/chart/templates/configmap.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6c78dd47d1149adf8001dd214766504e6cf7aa04 --- /dev/null +++ b/devops/azure/chart/templates/configmap.yaml @@ -0,0 +1,33 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{{$nameSuffix := include "os-wellbore-ddms.name-suffix" .}} +apiVersion: v1 +data: + CLOUD_PROVIDER: az + OPENAPI_PREFIX: {{ include "os-wellbore-ddms.prefix" . }} + SERVICE_HOST_ENTITLEMENTS: {{ .Values.configMap.data.entitlementsEndpoint }} + SERVICE_HOST_STORAGE: {{ .Values.configMap.data.storageEndpoint }} + SERVICE_HOST_SEARCH: {{ .Values.configMap.data.searchEndpoint }} + SERVICE_HOST_PARTITION: {{ .Values.configMap.data.partitionEndpoint }} + USE_PARTITION_SERVICE: {{ .Values.configMap.data.usePartitionService }} + AZ_LOGGER_LEVEL: {{ .Values.configMap.data.loggerLevel }} +kind: ConfigMap +metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 4}} + labels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} + name: {{ .Values.configMap.name }}{{ $nameSuffix }} +{{ include "os-wellbore-ddms.namespace" . | indent 2}} \ No newline at end of file diff --git a/devops/azure/chart/templates/deployment.yaml b/devops/azure/chart/templates/deployment.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5a89c6b9dfeae4283893906eb2a208bd17959f02 --- /dev/null +++ b/devops/azure/chart/templates/deployment.yaml @@ -0,0 +1,101 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{{$nameSuffix := include "os-wellbore-ddms.name-suffix" .}} +apiVersion: apps/v1 +kind: Deployment +metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 4}} + labels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} + name: {{ .Values.deployment.name }}{{ $nameSuffix }} +{{ include "os-wellbore-ddms.namespace" . | indent 2}} +spec: + replicas: {{ .Values.replicaCount }} + selector: + matchLabels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 6}} + template: + metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 8}} + labels: + aadpodidbinding: "{{ .Values.labels.aadpodidbinding }}" +{{ include "os-wellbore-ddms.commonLabels" . | indent 8}} + spec: + volumes: + # Note: + # This volume is required for pod-identity access mode (https://github.com/Azure/secrets-store-csi-driver-provider-azure/blob/master/docs/pod-identity-mode.md) + # Even if not directly used by the application, it is still required indirectly to allow the secretProviderClass to sync the secret, because the secret is only created as a side effect of workload mounting its content. + # + # Some more explanation: + # https://github.com/Azure/secrets-store-csi-driver-provider-azure/issues/132#issuecomment-646349209 + # "It's not possible to only sync as a kubernetes secret and not have the contents mounted as files in the pod. The sync as K8s secret is done by getting the contents from the fs." + - name: azure-keyvault + csi: + driver: secrets-store.csi.k8s.io + readOnly: true + volumeAttributes: + secretProviderClass: "azure-keyvault" + containers: + - name: {{ .Values.deployment.name }} + image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}" + ports: + - containerPort: 8080 + # This preStop hook has been added as a temporary workaround to minimize downtime during deployments until this limitation is addressed at the AGIC level + lifecycle: + preStop: + exec: + command: ["sleep", "15"] + envFrom: + - configMapRef: + name: {{ .Values.configMap.name }}{{ $nameSuffix }} + env: + - name: AZ_AI_INSTRUMENTATION_KEY + valueFrom: + secretKeyRef: + name: {{ .Values.deployment.instrumentationKey.secretName }} + key: appinsights + - name: KEYVAULT_URL + valueFrom: + configMapKeyRef: + name: {{ .Values.deployment.osduSvcProperties }} + key: {{ .Values.deployment.keyvaultUrlPropertyName }} + - name: SERVICE_NAME + valueFrom: + fieldRef: + apiVersion: v1 + fieldPath: metadata.labels['app'] + volumeMounts: + # This mount is not used by the application but is required for the secretProviderClass + # See more comment on the Volume definion + - mountPath: /azure-keyvault + name: azure-keyvault + readOnly: true + livenessProbe: + httpGet: + path: {{ include "os-wellbore-ddms.prefix" . }}/healthz + port: 8080 + initialDelaySeconds: 15 + periodSeconds: 5 + timeoutSeconds: 1 + failureThreshold: 3 + readinessProbe: + httpGet: + path: {{ include "os-wellbore-ddms.prefix" . }}/healthz + port: 8080 + initialDelaySeconds: 5 + timeoutSeconds: 5 + terminationGracePeriodSeconds: 45 \ No newline at end of file diff --git a/devops/azure/chart/templates/ingress.yaml b/devops/azure/chart/templates/ingress.yaml new file mode 100644 index 0000000000000000000000000000000000000000..448a03e8b28c30ac1850ede1871f2b108b9d26e2 --- /dev/null +++ b/devops/azure/chart/templates/ingress.yaml @@ -0,0 +1,52 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +{{- if .Values.ingress.enabled -}} +{{$nameSuffix := include "os-wellbore-ddms.name-suffix" .}} +apiVersion: networking.k8s.io/v1beta1 +kind: Ingress +metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 4}} + appgw.ingress.kubernetes.io/ssl-redirect: "true" + appgw.ingress.kubernetes.io/connection-draining: "true" + appgw.ingress.kubernetes.io/connection-draining-timeout: "30" + kubernetes.io/ingress.class: azure/application-gateway +{{ if .Values.ingress.hosts.host }} + cert-manager.io/acme-challenge-type: http01 + cert-manager.io/cluster-issuer: letsencrypt +{{ end }} + + labels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} + name: {{ .Values.deployment.name }}{{ $nameSuffix }} +{{ include "os-wellbore-ddms.namespace" . | indent 2}} +spec: + rules: + - http: + paths: + - backend: + serviceName: {{ .Values.deployment.name }}{{ $nameSuffix }} + servicePort: 80 + path: {{ include "os-wellbore-ddms.prefix" . }}/* +{{ if .Values.ingress.hosts.host }} + host: {{ .Values.ingress.hosts.host }} +{{ end }} + tls: + - secretName: {{ .Values.ingress.tlsSecret }} +{{ if .Values.ingress.hosts.host }} + hosts: + - {{ .Values.ingress.hosts.host }} +{{ end }} +{{- end -}} \ No newline at end of file diff --git a/devops/azure/chart/templates/service.yaml b/devops/azure/chart/templates/service.yaml new file mode 100644 index 0000000000000000000000000000000000000000..8ddd57015bc1b1fc147c8e1ac071abf8e72d78b7 --- /dev/null +++ b/devops/azure/chart/templates/service.yaml @@ -0,0 +1,30 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +apiVersion: v1 +kind: Service +metadata: + annotations: +{{ include "os-wellbore-ddms.commonAnnotations" . | indent 4}} + labels: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} + name: {{ .Values.deployment.name }}{{ include "os-wellbore-ddms.name-suffix" . }} +{{ include "os-wellbore-ddms.namespace" . | indent 2}} +spec: + ports: + - name: http + port: 80 + targetPort: 8080 + selector: +{{ include "os-wellbore-ddms.commonLabels" . | indent 4}} \ No newline at end of file diff --git a/devops/azure/chart/values.yaml b/devops/azure/chart/values.yaml new file mode 100644 index 0000000000000000000000000000000000000000..094aa78f98c258a3e7d7ec2af38467190fed9943 --- /dev/null +++ b/devops/azure/chart/values.yaml @@ -0,0 +1,66 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Default values for os-wellbore-ddms. +# This is a YAML-formatted file. +# Declare variables to be passed into your templates. + +namespace: osdu +deployment: + instrumentationKey: + secretName: central-logging + osduSvcProperties: osdu-svc-properties + keyvaultUrlPropertyName: ENV_KEYVAULT + name: os-wellbore-ddms + +replicaCount: 2 + +annotations: + buildNumber: #{Build.BuildNumber}# + buildOrigin: AzureDevops build/#{Build.DefinitionName}# + commitBranch: #{Build.SourceBranch}# + commitId: #{Build.SourceVersion}# + +image: + repository: #{CONTAINER_REGISTRY_NAME}#.azurecr.io/#{app}#-#{env}# + # Overrides the image tag whose default is the chart appVersion. + tag: #{Build.SourceVersion}# + +tempDeployment: + enabled: false + name: prxxxx + +labels: + aadpodidbinding: osdu-identity + env: #{env}# + +configMap: + data: + entitlementsEndpoint: http://entitlements-azure.osdu/api/entitlements + storageEndpoint: http://storage.osdu/api/storage + searchEndpoint: http://search-service.osdu/api/search + partitionEndpoint: http://partition.osdu/api/partition + usePartitionService: 'enabled' + loggerLevel: 'INFO' + name: os-wellbore-ddms-envs + +ingress: + enabled: false + hosts: + host: # leave it empty for our dev cluster + pathPrefix: /api/os-wellbore-ddms + tlsSecret: osdu-certificate + +authorizationPolicy: + name: wellbore-jwt-authz \ No newline at end of file diff --git a/devops/azure/template/build-stage.yaml b/devops/azure/template/build-stage.yaml new file mode 100644 index 0000000000000000000000000000000000000000..c47cc4ec8503717ac437f8386d409991a9c77e2c --- /dev/null +++ b/devops/azure/template/build-stage.yaml @@ -0,0 +1,61 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +stages: + - stage: build + jobs: + - job: unit_test_version3_7 + steps: + - template: steps-unit-tests.yaml + parameters: + python_version: '3.7' + - job: unit_test_version3_8 + steps: + - template: steps-unit-tests.yaml + parameters: + python_version: '3.8' + include_coverage: false + - job: build + dependsOn: + - unit_test_version3_7 + - unit_test_version3_8 + steps: + - template: steps-build.yaml + parameters: + acr_names: + - ${{ variables.global_acr_name }} + - job: setupVariables + steps: + - task: Bash@3 + displayName: 'setup basePath' + name: 'setupBasePath' + env: + build_reason: $(Build.Reason) + pull_request_id: $(System.PullRequest.PullRequestId) + inputs: + targetType: 'inline' + script: | + set -e + + echo "build_reason: $build_reason" + echo "pull_request_id: $pull_request_id" + + basePath=/api/os-wellbore-ddms + if [ "${build_reason}" == "PullRequest" ]; then + echo "Pull Request. Changing BasePath" + basePath=$basePath---pr${pull_request_id} + fi + echo "setting: basePath=$basePath" + echo "##vso[task.setvariable variable=basePath;isOutput=true]${basePath}" \ No newline at end of file diff --git a/devops/azure/template/deploy-stages.yml b/devops/azure/template/deploy-stages.yml new file mode 100644 index 0000000000000000000000000000000000000000..0dade9e6268f587d1589dc98ab3ad201681123ab --- /dev/null +++ b/devops/azure/template/deploy-stages.yml @@ -0,0 +1,83 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: + - name: providers + type: object + default: [] + +stages: + +- ${{ each provider in parameters.providers }}: + - ${{ each environment in provider.environments }}: + - stage: 'Deploy_${{ provider.name }}_R3MVP_${{ environment }}' + variables: + - group: 'R3MVP - ${{ provider.name }} Target Env - ${{ environment }}' + - template: 'vars/env_vars.yml' + - name: env + value: ${{ environment }} + condition: "and(succeeded(), eq(variables['build.sourceBranch'], 'refs/heads/master'))" + jobs: + - deployment: Deploy + pool: + vmImage: "ubuntu-latest" + environment: ${{ environment }} + strategy: + runOnce: + deploy: + steps: + - template: steps-define-variables.yaml + - template: steps-push-image.yml + parameters: + azureSubscription: $(SERVICE_CONNECTION_NAME) + sourceACRName: $(us_acr_name) + destinationACRName: $(CONTAINER_REGISTRY_NAME) + environment: ${{ environment }} + imageTag: $(defineVariables.tag_name_output) + appName: $(app) + - template: devops/tasks/aks-deployment-steps.yml@pipelineTemplatesRepo + parameters: + environment: ${{ environment }} + serviceName: $(app) + providerName: ${{ provider.short_name }} + chartPath: $(chartPath) + valuesFile: $(valuesFile) + hldRegPath: $(hldRegPath) + - template: devops/tasks/flux-service-wait.yml@pipelineTemplatesRepo + parameters: + environment: ${{ environment }} + serviceName: $(fluxServiceName) + imageRepoName: $(app) + skipDeploy: false + + - stage: Test_${{ provider.name }}_R3MVP_${{ environment }} + dependsOn: + - Deploy_${{ provider.name }}_R3MVP_${{ environment }} + variables: + - group: 'R3MVP - ${{ provider.name }} Target Env - ${{ environment }}' + - template: 'vars/env_vars.yml' + - template: 'vars/global_vars.yml' + condition: "and(succeeded(), eq(variables['build.sourceBranch'], 'refs/heads/master'))" + jobs: + - job: e2e_test + steps: + - template: steps-e2e-tests.yaml + parameters: + baseUrl: "https://$(DNS_HOST)" + basePath: ${{ variables.base_path }} + aclDomain: ${{ variables.acl_domain }} + legalTag: ${{ variables.legal_tag }} + dataPartition: ${{ variables.data_partition }} + cloudProvider: ${{ provider.short_name }} + testServiceAccount: TO_BE_DEFINED diff --git a/devops/azure/template/steps-build.yaml b/devops/azure/template/steps-build.yaml new file mode 100644 index 0000000000000000000000000000000000000000..2609b84125c1ae6e67d47d33c5ee76888f542e0f --- /dev/null +++ b/devops/azure/template/steps-build.yaml @@ -0,0 +1,90 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: +- name: acr_names + type: object #List of string + +steps: + + +- template: steps-define-variables.yaml + +- task: Bash@3 + displayName: 'set version info' + inputs: + targetType: 'inline' + script: | + build_number=$(Build.BuildNumber) + patch_number=${build_number:0:8} + python ./build/set_version.py build_number=$build_number patch_number=$patch_number + +- task: Bash@3 + displayName: 'prepare artifacts' + inputs: + targetType: 'inline' + script: | + echo ----- GET IMAGE TAG ------------= + echo $(tag_name) + + echo ----- PUBLISH ARTIFACTS ------------ + echo $(tag_name)> $(Build.ArtifactStagingDirectory)/image_tag.txt + directories_to_copy=(deploy scripts tests) + for directory_to_copy in ${directories_to_copy[@]}; do + echo copying $directory_to_copy/ + mkdir $(Build.ArtifactStagingDirectory)/$directory_to_copy + cp -R $directory_to_copy/* $(Build.ArtifactStagingDirectory)/$directory_to_copy/ + done + +- task: Bash@3 + displayName: 'build image' + inputs: + targetType: 'inline' + script: | + current_utc_date=`date --utc` + echo $current_utc_date + + echo ----- BUILD IMAGE ------------ + docker build -t=wdms-osdu:$(tag_name) \ + -t=wdms-osdu:latest \ + --rm . -f ./build/Dockerfile \ + --build-arg PIP_EXTRA_URL=$PIP_EXTRA_INDEX_URL \ + --build-arg PIP_WHEEL_DIR=python-packages \ + --build-arg build_date="$current_utc_date" \ + --build-arg build_number="$(Build.BuildNumber)" \ + --build-arg commit_id=$(commit_id) \ + --build-arg build_origin="AzureDevops build/$(Build.DefinitionName)" \ + --build-arg commit_branch=$(Build.SourceBranch) + + +# Current implementation pushes the images to a couple of 'central' ACR. +# They are different from the ACR mapped to a given environment +# The steps to publish to the environment specific ACR happens in a deploy stage, pulling the image from one those central ACR first. +# +# This could eventually be changed to an other apprach: +# - Push the built image as a ADO artifact, which will be downlaoded during the deploy stage +# - Or rebuild the image during the deploy stage +- ${{ each acr_name in parameters.acr_names }}: + - task: AzureCLI@2 + displayName: 'push image to acr "${{ acr_name }}""' + inputs: + azureSubscription: TO_BE_DEFINED + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + set -e + az acr login --name ${{ acr_name }} + docker tag wdms-osdu:$(tag_name) ${{ acr_name }}.azurecr.io/os-wellbore-ddms:$(tag_name) + docker push ${{ acr_name }}.azurecr.io/os-wellbore-ddms:$(tag_name) + diff --git a/devops/azure/template/steps-define-variables.yaml b/devops/azure/template/steps-define-variables.yaml new file mode 100644 index 0000000000000000000000000000000000000000..67baede38561fcbfb224f3e88abc4bfe5725fcef --- /dev/null +++ b/devops/azure/template/steps-define-variables.yaml @@ -0,0 +1,28 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +steps: + +- task: Bash@3 + displayName: 'define variables' + name: 'defineVariables' + inputs: + targetType: 'inline' + script: | + source_full_version=$(Build.SourceVersion) + commit_id=${source_full_version:0:7} + echo "##vso[task.setvariable variable=commit_id;]${commit_id}" + tag_name=$commit_id + echo "##vso[task.setvariable variable=tag_name]${tag_name}" + echo "##vso[task.setvariable variable=tag_name_output;isOutput=true]${tag_name}" diff --git a/devops/azure/template/steps-e2e-tests.yaml b/devops/azure/template/steps-e2e-tests.yaml new file mode 100644 index 0000000000000000000000000000000000000000..403286f487960c3fc2bdf91cbea339c5f251fc9e --- /dev/null +++ b/devops/azure/template/steps-e2e-tests.yaml @@ -0,0 +1,190 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: + - name: testSA + type: string + default: "" #Needed only for Google Test SA + - name: deAudience + type: string + default: "" #Needed only for Google Test SA + - name: baseUrl + type: string + - name: osduBaseUrl + type: string + default: "" # if empty will use baseUrl instead + - name: basePath + type: string + - name: cloudProvider + type: string + - name: aclDomain + type: string + - name: legalTag + type: string + - name: dataPartition + type: string + - name: testServiceAccount + type: object + - name: skipCertValidation + # Used in case of invalid certificate (self-signed,...) + type: boolean + default: false + +steps: + +- bash: | + set -e + + echo "parameters.skipCertValidation: ${{ parameters.skipCertValidation }}" + echo "parameters.baseUrl: ${{ parameters.baseUrl }}" + echo "parameters.osduBaseUrl: ${{ parameters.osduBaseUrl }}" + echo "parameters.basePath: ${{ parameters.basePath }}" + echo "parameters.cloudProvider: ${{ parameters.cloudProvider }}" + echo "parameters.aclDomain: ${{ parameters.aclDomain }}" + echo "parameters.legalTag: ${{ parameters.legalTag }}" + echo "parameters.dataPartition: ${{ parameters.dataPartition }}" + echo "parameters.testServiceAccount.type: ${{ parameters.testServiceAccount.type }}" + displayName: 'Display Info' + +- task: Bash@3 + displayName: 'testReportBaseName' + inputs: + targetType: 'inline' + script: | + prefix=$(System.StageName)#$(System.JobAttempt) + echo "##vso[task.setvariable variable=testReportBaseName]${prefix}" + +- task: UsePythonVersion@0 + inputs: + versionSpec: '3.7' + addToPath: true + + + +############# +# TO_BE_DEFINED +#------------ +# In the current version, here should be a couple of tasks which goal is to generate a token to be used for integration tests. +# the 'testServiceAccount' parameter could be used to pass information on how to get the token (reference to secret, type of Identity Provider...) +# +# the token is to be added as a secure output variable: +# echo "##vso[task.setvariable variable=token;issecret=true]${token}" +# +# The following section for a GCP Service Account Token is given as example. +# A better approach could eventually be considered such as: +# - pass the token as parameter, and delegate to a higher task the responsibility to generate the token depending on the Service Account Type +# - use some kind of injection mechanism to include a template specific to the service account type +# +########### + +### GCP Service Account based token +- ${{ if eq(parameters.testServiceAccount.type, 'gcp') }}: + - task: DownloadSecureFile@1 + displayName: 'Download secret File : ${{parameters.testServiceAccount.secretFile}}' + name: secretFile + inputs: + secureFile: ${{ parameters.testServiceAccount.secretFile }} + + - bash: | + set -e + gcloud auth activate-service-account --key-file $(secretFile.secureFilePath) + displayName: 'SETUP - Authenticate for GCP' + + - task: Bash@3 + displayName: 'Generate Token for GCP' + inputs: + targetType: 'inline' + script: | + token=$(gcloud auth print-identity-token --audiences=${{ parameters.deAudience }}) + echo "##vso[task.setvariable variable=token;issecret=true]${token}" +### End of GCP Service Account based token + + +- bash: | + set -e + + insecure_flag="" + checkCert="True" + + # Depending on the value of 'skipCertValidation' we add the '--insecure' option to newman + # and update checkCert for pytest + skipCertValidation="${{ lower(parameters.skipCertValidation) }}" + if $skipCertValidation + then + echo "Adding --insecure flag to newman to disable cert validation." + insecure_flag="--insecure" + checkCert="" + fi + + echo "##vso[task.setvariable variable=insecure_flag]${insecure_flag}" + echo "##vso[task.setvariable variable=checkCert]${checkCert}" + + displayName: 'Create test variables' + +- bash: | + set -e + + osduBaseUrl=${{ parameters.osduBaseUrl }} + if [ -z "$osduBaseUrl" ] + then + osduBaseUrl=${{ parameters.baseUrl }} + fi + + python tests/integration/gen_postman_env.py --token $(token) --base_url ${osduBaseUrl} --cloud_provider ${{ parameters.cloudProvider }} --acl_domain ${{ parameters.aclDomain }} --legal_tag ${{ parameters.legalTag }} --data_partition ${{ parameters.dataPartition }} + + newman run tests/dependencies/core_dependencies_test.postman_collection.json -e generated/postman_environment.json -r junit,cli --reporter-junit-export dependencies_tests_report_$(testReportBaseName).xml --timeout-request 10000 $(insecure_flag) + + displayName: 'Check healthiness of Core services' + condition: ne('${{ parameters.cloudProvider }}', 'gcp') + continueOnError: true + +- bash: | + set -e + + appUrl=${{ parameters.baseUrl }}${{ parameters.basePath }} + echo "Testing App on ${appUrl}" + + python tests/integration/gen_postman_env.py --token $(token) --base_url ${appUrl} --cloud_provider ${{ parameters.cloudProvider }} --acl_domain ${{ parameters.aclDomain }} --legal_tag ${{ parameters.legalTag }} --data_partition ${{ parameters.dataPartition }} + + pip install virtualenv + virtualenv venv37 + source venv37/bin/activate + pip install -r ./requirements_dev.txt + + pytest tests/integration/functional/ --junit-xml=$(testReportBaseName)_int_tests_report.xml -o junit_suite_name="wdms_integration_$(testReportBaseName)" --environment=generated/postman_environment.json $(insecure_flag) --retry-on-error=502 + deactivate + displayName: 'RUN INTEGRATION-E2E' + +- bash: | + set -e + + pip install virtualenv + virtualenv venv37 + source venv37/bin/activate + pip install -r ./requirements_dev.txt + + appUrl=${{ parameters.baseUrl }}${{ parameters.basePath }} + echo "Testing App on ${appUrl}" + + pytest tests/integration/security/test_auth.py --base_url ${appUrl} --check_cert "$(checkCert)" --token="$(token)" --junit-xml=$(testReportBaseName)_auth_tests_report.xml -o junit_suite_name="wdms_auth_$(testReportBaseName)" + + deactivate + displayName: 'RUN INTEGRATION-AUTH' + + +- task: PublishTestResults@2 + inputs: + testResultsFormat: 'JUnit' + testResultsFiles: '**/*tests_report*.xml' + condition: always() diff --git a/devops/azure/template/steps-push-image.yml b/devops/azure/template/steps-push-image.yml new file mode 100644 index 0000000000000000000000000000000000000000..6e1a2c15df6c1f914468cd7a86c9f57cad0fc4d5 --- /dev/null +++ b/devops/azure/template/steps-push-image.yml @@ -0,0 +1,56 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: + azureSubscription: '' + sourceACRName: '' + destinationACRName: '' + environment: '' + imageTag: '' + appName: '' + +steps: + +# Current implementation pulls the Image from a 'central' ACR first. +# It is an ACR to which the image was pushed during the build stage +# +# This could eventually be changed to an other apprach: +# - Push the built image as a ADO artifact, which will be downlaoded during the deploy stage +# - Or rebuild the image during the deploy stage + - task: AzureCLI@2 + displayName: "Pull from WDMS Core ACR" + inputs: + azureSubscription: TO_BE_DEFINED + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + echo "az acr login -n ${{ parameters.sourceACRName }}" + az acr login -n ${{ parameters.sourceACRName }} + echo "docker pull ${{ parameters.sourceACRName}}.azurecr.io/os-wellbore-ddms:${{ parameters.imageTag }}" + docker pull ${{ parameters.sourceACRName}}.azurecr.io/os-wellbore-ddms:${{ parameters.imageTag }} + + - task: AzureCLI@2 + displayName: "Push to R3 MVP ${{ parameters.environment }} ACR" + inputs: + azureSubscription: ${{ parameters.azureSubscription }} + scriptType: bash + scriptLocation: inlineScript + inlineScript: | + set -e + echo "${{ parameters.azureSubscription }}" + echo "${{ parameters.destinationACRName }}" + az acr login --name ${{ parameters.destinationACRName }} + docker tag ${{ parameters.sourceACRName }}.azurecr.io/os-wellbore-ddms:${{ parameters.imageTag }} ${{ parameters.destinationACRName }}.azurecr.io/${{ parameters.appName }}-${{ parameters.environment }}:$(Build.SourceVersion) + docker push ${{ parameters.destinationACRName }}.azurecr.io/${{ parameters.appName }}-${{ parameters.environment }}:$(Build.SourceVersion) + az acr repository show-tags --name ${{ parameters.destinationACRName }} --repository ${{ parameters.appName }}-${{ parameters.environment }} --orderby time_desc --top 3 \ No newline at end of file diff --git a/devops/azure/template/steps-unit-tests.yaml b/devops/azure/template/steps-unit-tests.yaml new file mode 100644 index 0000000000000000000000000000000000000000..6d789d1575a42521eed6c9e8221de71f5dee601b --- /dev/null +++ b/devops/azure/template/steps-unit-tests.yaml @@ -0,0 +1,72 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +parameters: +- name: python_version + type: string + default: '' +- name: include_coverage + type: boolean + default: true + +steps: + +# Use Python version +# Use the specified version of Python from the tool cache, optionally adding it to the PATH +- task: UsePythonVersion@0 + inputs: + versionSpec: ${{ parameters.python_version }} + addToPath: true + +- task: Bash@3 + displayName: 'run unit tests' + inputs: + targetType: 'inline' + script: | + echo ----- setup: installing dependencies -------- + echo $(System.DefaultWorkingDirectory) + python --version + pip --version + pip install virtualenv + virtualenv venv + source venv/bin/activate + pip install -r ./requirements.txt + pip install -r ./requirements_dev.txt + + echo ----- run tests -------- + includeCoverage="${{ lower(parameters.include_coverage) }}" + if $includeCoverage + then + python -m pytest --junit-xml=unit_tests_report.xml -o junit_suite_name=wdms_ut_python_${{ parameters.python_version }} --cov=app --cov-report=html --cov-report=xml ./tests/unit + else + python -m pytest --junit-xml=unit_tests_report.xml -o junit_suite_name=wdms_ut_python_${{ parameters.python_version }} ./tests/unit + fi + + echo ----- teardown -------- + deactivate + +- task: PublishTestResults@2 + inputs: + testResultsFormat: 'JUnit' + testResultsFiles: '**/*_tests_report*.xml' + failTaskOnFailedTests: true + +- ${{ if eq(parameters.include_coverage, true) }}: + - task: PublishCodeCoverageResults@1 + inputs: + codeCoverageTool: 'Cobertura' + summaryFileLocation: '$(System.DefaultWorkingDirectory)/coverage.xml' + pathToSources: '$(System.DefaultWorkingDirectory)/app' + reportDirectory: '$(System.DefaultWorkingDirectory)/htmlcov' + additionalCodeCoverageFiles: '$(System.DefaultWorkingDirectory)/htmlcov/**' diff --git a/devops/azure/template/vars/env_vars.yaml b/devops/azure/template/vars/env_vars.yaml new file mode 100644 index 0000000000000000000000000000000000000000..5e3f63ad3769209e47360262828e15ccd86d22bc --- /dev/null +++ b/devops/azure/template/vars/env_vars.yaml @@ -0,0 +1,20 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +variables: + # This allow to translate variables from the Infra and Shared Service to the current Wellbore DMS ADO tasks + # Eventually we could adjust the tasks to use directly the shared Variables, and remove this file + legal_tag: $(LEGAL_TAG) + acl_domain: $(DOMAIN) + data_partition: $(MY_TENANT) diff --git a/devops/azure/template/vars/global_vars.yml b/devops/azure/template/vars/global_vars.yml new file mode 100644 index 0000000000000000000000000000000000000000..5044bc299b698ba8874c212919061d5a937bec0f --- /dev/null +++ b/devops/azure/template/vars/global_vars.yml @@ -0,0 +1,23 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +variables: + app: 'wdms' + global_acr_name: TO_BE_DEFINED + chartPath: 'devops/azure/chart' + valuesFile: 'devops/azure/chart/values.yaml' + hldRegPath: 'providers/azure/hld-registry' + fluxServiceName: 'os-wellbore-ddms' + base_path : '/api/os-wellbore-ddms' + SERVICE_CONNECTION_NAME: placeholder # Must be defined, but the value will be overriden by environment specific variable \ No newline at end of file diff --git a/devops/development-pipeline.yml b/devops/development-pipeline.yml new file mode 100644 index 0000000000000000000000000000000000000000..e0135405ae0acebe579751000f60fca2d5af62f1 --- /dev/null +++ b/devops/development-pipeline.yml @@ -0,0 +1,48 @@ +# Copyright © Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +trigger: + batch: true + branches: + include: + - master + paths: + exclude: + - /**/*.md + - .gitignore + - /docs + +resources: + repositories: + - repository: FluxRepo + type: git + name: k8-gitops-manifests + - repository: TemplateRepo + type: git + name: infra-azure-provisioning + +variables: + - group: 'Azure - OSDU' + - group: 'Azure - OSDU Secrets' + - template: template/vars/global_vars.yml + +stages: + - template: template/build-stage.yml + + - template: template/deploy-stages.yml + parameters: + providers: + - name: Azure + short_name: az + environments: ['dev'] diff --git a/devops/pipeline.yml b/devops/pipeline.yml new file mode 100644 index 0000000000000000000000000000000000000000..f4aa146d8058d7869b96261f0c11e15c81724acf --- /dev/null +++ b/devops/pipeline.yml @@ -0,0 +1,48 @@ +# Copyright © Microsoft Corporation +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +trigger: + batch: true + branches: + include: + - master + paths: + exclude: + - /**/*.md + - .gitignore + - /docs + +resources: + repositories: + - repository: FluxRepo + type: git + name: k8-gitops-manifests + - repository: TemplateRepo + type: git + name: infra-azure-provisioning + +variables: + - group: 'Azure - OSDU' + - group: 'Azure - OSDU Secrets' + - template: template/vars/global_vars.yml + +stages: + - template: template/build-stage.yml + + - template: template/deploy-stages.yml + parameters: + providers: + - name: Azure + short_name: az + environments: ['demo'] diff --git a/docs/log_recognition/log-recognition.md b/docs/log_recognition/log-recognition.md new file mode 100644 index 0000000000000000000000000000000000000000..f7e1f9daa8c90c65a2667dd86024784df43e57e1 --- /dev/null +++ b/docs/log_recognition/log-recognition.md @@ -0,0 +1,292 @@ +# Log Recognition Service + +[**Introduction**](#introduction) + +[**How to use this service**](#how-to-use-this-service) + +[**How to create a custom catalog**](#how-to-create-a-custom-catalog) + +# **Introduction** + +Wellbore logs are acquired by different logging company with their +convention of defining the log name. This leads to a log called by +different mnemonics for the same measurement. Also, different logging +tools of the same measurement can have different description and units. +If, all this data is stored together without any classification it could +lead to a lot of confusion and waste of time. Hence, logs need to be +classified in groups / Families based on the measurement. This would +help in identifying the logs easily and optimize the time spent looking +for the logs. + +*Example: The following curves coming from the field are all gamma ray. +But have different names.* + + + +*The processing workflows only need Gamma ray. You need to know all of +them are Gamma ray. Hence the classification of these curves to Families +is important.* + +In cloud environment, we are looking for automatic solutions. To have +automatic workflows, it becomes important that the logs mnemonics are +identified accurately. But, identifying the logs manually is a very +tedious work. As there could be thousands of logs in a well, and many +such wells in a field. Log recognition is a service that is assigning +the Family attribute to the all the logs automatically using family +assignment rules. This will ensure that the correct logs are picked up +by the workflows for automatic processing. + +**About the service** + +Log Recognition service has a huge ***default catalog*** of assignment +rules. Which help in the identification of log family using the Log name +/ Mnemonics, Log unit and description. This catalog cannot be modified +by user. But the users can create their own ***custom catalog*** with +different set of assignment rules specific to their data partition. The +custom catalog will always have priority over the default catalog. +Hence, if a company wants to have their own set of rules which are not +present in the default catalog or override some rules defined in the +default catalog that are different for their company. Then, they could +create their own customized catalog. + +# **How to use this service** + +Log Recognition service provides API's to assign Family attribute to +logs using the Log name / Mnemonics, Description of the Log and log +unit. If the curve name is *GRD* and unit *GAPI*, then the family will +be identified as *Gamma Ray***.** The table below illustrates more such +examples. There are examples of how to use the API below the table. + + Curve Name | Unit | Description | Family + ------------| ------ | ------------------------------------------ | ------------------ + GRD | GAPI | LDTD Gamma Ray | **Gamma Ray** + HD01 | g/cc | SMOOTHED AZIMUTHAL DENSITY - BIN 01 | **Bulk Density** + DFHF0_FSI | | Filtered Water Holdup Electrical Probe 0 | **Water Holdup** + +**Here is an example of an API** +```bash +/recognize API +POST /api/log-recognition/recognize +``` +```json +{ + "label": "GRD", + "log_unit": "gApi", + "description": "" +} +``` +Curl +```bash +curl -X POST "/api/log-recognition/recognize" -H "accept: application/json" -H "Authorization: BearerToken" -H "appkey: appKey" -H "Content-Type: application/json" -d "{ \"label\": \"GRD\", \"log_unit\": \"gApi\", \"description\": \"\"}" +``` + +Parameters + + Parameter | Description + ------------- | ------------------------------ + label | Curve name (mandatory) + log_unit | Curve unit (optional) + description | Curve description (optional) + +Response: + + Key | Description + ------------- | ------------------------------------- + family | Recognized family + family_type | Recognized main family + log_unit | Curve unit (same as input) + base_unit | Default unit defined by main family + +Example: + +Recognize the family for a curve named "GRD" and unit is "gapi" +```json +{ + "label": "GRD", + "log_unit": "gapi", + "description": "" +} +``` +The result will be +```json +{ + "family": "Gamma Ray", + "family_type": [ "Gamma Ray"], + "log_unit": "gapi", + "base_unit": "gAPI" +} +``` + +# **How to create a custom catalog** + +The custom catalog consists of the following attributes. + +1. Unit of log + +2. Family of log + +3. Rule or Mnemonic + +4. Main Family + +5. Unit of the Family + +Examples: + +Adding a new rule or overriding an existing Family assignment rule. The +catalog needs to be in the following (json) format. +```json +{ + "data": { + "family_catalog": [ + { + "unit": "ohm.m", + "family": "Medium Resistivity", + "rule": "MEDR" + } + ], + "main_family_catalog": [ + { + "MainFamily": "Resistivity", + "Family": "Medium Resistivity", + "Unit": "OHMM" + } + ] + } +} +``` + + +For adding multiple family assignment, you can also create rules in a +csv file that can be used for creating the custom catalog. + + Log | Unit | Family | Main Family | Family unit + --------| ------- | --------------------------------------- | --------------------- | ------------- + BL1M | Kg/m3 | Bulk Density | Density | g/cm3 + RDPL | OHMM | Deep Resistivity | Resistivity | ohm.m + PFC2 | mm | Caliper | Borehole Properties | in + MICR | OHMM | Micro Spherically Focused Resistivity | Resistivity | ohm.m + ALCD1 | mS/ft | Conductivity - Deep Induction | Conductivity | mS/m + CNCQH | PU | Thermal Neutron Porosity | Porosity | v/v + CNCQH2 | PU | Thermal Neutron Porosity | Porosity | v/v + +Use the following Python script to convert the csv file to Json format +as required by the service. + +```python +import csv +import json +import sys + +if len(sys.argv) is not 2: + print("usage: python converter.py filetoconvert.json") + exit(-1) +res = {"data": {"family_catalog": [], "main_family_catalog": []}} + +filetoopen = sys.argv[1] +with open(filetoopen, newline="") as csvfile: + catalog_reader = csv.DictReader(csvfile, delimiter=",", quotechar="|") + for row in catalog_reader: + rule = { + "unit": row["Unit"], + "family": row["Family"], + "rule": row["Log"], + } + main_fam = { + "MainFamily": row["Main Family"], + "Family": row["Family"], + "Unit": row["Family unit"], + } + if rule not in res["data"]["family_catalog"]: + res["data"]["family_catalog"].append(rule) + if main_fam not in res["data"]["main_family_catalog"]: + res["data"]["main_family_catalog"].append(main_fam) + +# res["data"]["main_family_catalog"] = list(dict.fromkeys(res["data"]["main_family_catalog"])) +with open("out.json", mode="w") as json_file: + json.dump(res, json_file, indent=4) +print("out.json generated") +``` + + +The python will generate the input for the service as below. +```json +{ + "data": { + "family_catalog": [ + { + "unit": "Kg/m3", + "family": "Bulk Density", + "rule": "BL1M" + }, + { + "unit": "OHMM", + "family": "Deep Resistivity", + "rule": "RDPL" + }, + { + "unit": "mm", + "family": "Caliper", + "rule": "PFC2" + }, + { + "unit": "OHMM", + "family": "Micro Spherically Focused Resistivity", + "rule": "MICR" + }, + { + "unit": "mS/ft", + "family": "Conductivity - Deep Induction", + "rule": "ALCD1" + }, + { + "unit": "PU", + "family": "Thermal Neutron Porosity", + "rule": "CNCQH" + }, + { + "unit": "PU", + "family": "Thermal Neutron Porosity", + "rule": "CNCQH2" + } + ], + "main_family_catalog": [ + { + "MainFamily": "Density", + "Family": "Bulk Density", + "Unit": "g/cm3" + }, + { + "MainFamily": "Resistivity", + "Family": "Deep Resistivity", + "Unit": "ohm.m" + }, + { + "MainFamily": "Borehole Properties", + "Family": "Caliper", + "Unit": "in" + }, + { + "MainFamily": "Resistivity", + "Family": "Micro Spherically Focused Resistivity", + "Unit": "ohm.m" + }, + { + "MainFamily": "Conductivity", + "Family": "Conductivity - Deep Induction", + "Unit": "mS/m" + }, + { + "MainFamily": "Porosity", + "Family": "Thermal Neutron Porosity", + "Unit": "v/v" + } + ] + } +} +``` + +***Note:** If there is an existing catalog, it will be replaced. It +takes maximum of 5 mins to replace the existing catalog. Hence, any call +to retrieve the family should be made after 5 mins of uploading the +catalog* diff --git a/docs/log_recognition/sites/default/files/solution/wellboreDMS/gamma-ray-logviewer.png b/docs/log_recognition/sites/default/files/solution/wellboreDMS/gamma-ray-logviewer.png new file mode 100644 index 0000000000000000000000000000000000000000..d81921cee64ee227ec9aff65718ee39612f96b9a Binary files /dev/null and b/docs/log_recognition/sites/default/files/solution/wellboreDMS/gamma-ray-logviewer.png differ diff --git a/docs/log_statistics/log-statistics.md b/docs/log_statistics/log-statistics.md new file mode 100644 index 0000000000000000000000000000000000000000..26d1cc1f09e97231fa397d203edf608877c82424 --- /dev/null +++ b/docs/log_statistics/log-statistics.md @@ -0,0 +1,123 @@ +# Log statistics + +**Goal:** Provide statistical information for Bulk log data + +**Use case 1:** Data consumers interested in specific statistics for specific log. + +**Use case 2:** Data ingestors can use statistics as a method to ensure bulk data is transferred correctly to Wellbore DMS. + +When transferring bulk data to Wellbore DMS it might happened that due to various reasons (human, system) +the data is not transferred correctly and might not be discovered until the data is actually used. +By launching the calculation on the application side and comparing the results to the ones form this service, +you will be able to compare results quickly. + +The method calculate the following statistics: +- Count : Number of values (we exclude the missing value NaN) + +- Min & Max : smallest and largest values in this channel + +- Arithmetic Mean value & Standard deviation + +- Percentiles : 25%, 50% , 75% + +**Methodology:** + +To calculate the statistics, the service use the method `describe` from the "Pandas" library. + +Link: <https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.describe.html> + +Example: + +DataFrame Panda: + +:: | **Ref** | **col_1** | **col_2** | **col_3** +--- | --------- | ----------- | ----------- | ----------- + 0 | 1.0 | 10 | NaN | 11 + 1 | 1.5 | 58 | 20.0 | 21 + 2 | 2 | 2 | 30.0 | 31 + +after using `describe` method: + +:: | **Ref** | **col_1** | **col_2** | **col_3** +--- | --------- | ----------- | ----------- | ----------- + count | 3 | 3 | 2 | 3 + mean | 1.5 | 23.333333 | 25 | 21 + std | 0.5 | 30.287511 | 7.071068 | 10 + min | 1 | 2 | 10 | 11 + 25% | 1.25 | 6 | 22.5 | 16 + 50% | 1.5 | 10 | 25 | 21 + 75% | 1.75 | 34 | 27.5 | 26 + max | 2 | 58 | 30 | 31 + +Descriptive statistics include those that summarize the central +tendency, dispersion and shape of a dataset's distribution, +**excluding NaN values.** + +For numeric data, the result's index will include **count, mean, std, +min, max** as well as lower, 50 and upper **percentiles**. By default +the lower percentile is 25 and the upper percentile is 75. The 50 +percentile is the same as the median. + +<https://www.w3resource.com/pandas/dataframe/dataframe-to_json.php> + +Example with Curl: + +**Curl** + +``` bash +curl -X GET "http://${host}/osdu/wdms/wellbore/v2/log/${logid}/statistics" -H "accept: application/json" -H "data-partition-id: ${data-partition-id}" -H "Authorization: Bearer $TOKEN" +``` +```json +{"Ref":{"count":3.0,"mean":1.5,"std":0.5,"min":1.0,"25%":1.25,"50%":1.5,"75%":1.75,"max":2.0}, +"col_1":{"count":3.0,"mean":23.3333333333,"std":30.2875111776,"min":2.0,"25%":6.0,"50%":10.0,"75%":34.0,"max":58.0}, +"col_2":{"count":2.0,"mean":25.0,"std":7.0710678119,"min":20.0,"25%":22.5,"50%":25.0,"75%":27.5,"max":30.0}, +"col_3":{"count":3.0,"mean":21.0,"std":10.0,"min":11.0,"25%":16.0,"50%":21.0,"75%":26.0,"max":31.0}} +``` + +API response from the swagger : + +**Stat API response** +```json +{ + "Ref": { + "count": 3, + "mean": 1.5, + "std": 0.5, + "min": 1, + "25%": 1.25, + "50%": 1.5, + "75%": 1.75, + "max": 2 + }, + "col_1": { + "count": 3, + "mean": 23.3333333333, + "std": 30.2875111776, + "min": 2, + "25%": 6, + "50%": 10, + "75%": 34, + "max": 58 + }, + "col_2": { + "count": 2, + "mean": 25, + "std": 7.0710678119, + "min": 20, + "25%": 22.5, + "50%": 25, + "75%": 27.5, + "max": 30 + }, + "col_3": { + "count": 3, + "mean": 21, + "std": 10, + "min": 11, + "25%": 16, + "50%": 21, + "75%": 26, + "max": 31 + } +} +``` \ No newline at end of file diff --git a/docs/portal/about.md b/docs/portal/about.md new file mode 100644 index 0000000000000000000000000000000000000000..dfc0e8da0781dcad8358b45335836bfe4da01020 --- /dev/null +++ b/docs/portal/about.md @@ -0,0 +1,14 @@ +## Overview +The Wellbore DMS provides developers the capacity to access and manipulate various data types acquired and interpreted in Wellbores. + +## Features + +### Data search +First thing we all want is to be able to find the right data in the right context, quickly. To help with this challenge, we deliver various methodologies to query the data stored in the Wellbore DMS. By providing the right context, we can provide you with the relevant data that matches your criteria’s. + +### Data access +Once found, it’s all about accessing the data for your service. Wellbores, Trajectories, Logs and all other data types required to your service. +And Evidently, once your service has run, placing new and improved data back in storage for it to be accessible in the next processing chain. + +### Data services +Providing a set of tools to help software developers to work consistently with data: Ingestion engines, connectors and various data manipulation tools to ease and reduce the burden from application developers. \ No newline at end of file diff --git a/docs/portal/tutorials.md b/docs/portal/tutorials.md new file mode 100644 index 0000000000000000000000000000000000000000..339f9fb19dc05dab22a956af127967e2ea85c187 --- /dev/null +++ b/docs/portal/tutorials.md @@ -0,0 +1,11 @@ +## Tutorials +We provide a wide variety of tutorials and resources for developers to easily understand and adopt our API's. The included examples are based on typical scenarios of use and workflows while code snippets provide a more in-depth overview of the implementation. + +### Wellbore DMV v1 +Data access API’s to be used with the Data Ecosystem environment + +### Wellbore DMS v2 Data Access +Data access API’s to be used with OSDU Core environment + +### Wellbore DMS v2 Data Services +Services to manipulate data using the OSDU environment \ No newline at end of file diff --git a/main.py b/main.py new file mode 100644 index 0000000000000000000000000000000000000000..39e2ba55e1f07c6e780f9a532bfef525636f4f27 --- /dev/null +++ b/main.py @@ -0,0 +1,59 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import uvicorn +import click +from app.wdms_app import base_app +from app.conf import Config +import os + + +@click.command() +@click.option('-p', '--port', default=8080, help='port') +@click.option('-h', '--host', default='127.0.0.1', + help='host, set to "0.0.0.0" to make the service available on network') +@click.option('--dev_mode', default=-1, + help='(0|1) set dev mode, if not set will be true if localhost', type=int) +@click.option('-e', '--env', multiple=True, type=(str, str), + help='set/override the env var within the service process') +def run_wdms_app(port: int, host: str, dev_mode, env): + dev_mode = dev_mode if dev_mode >= 0 else int(host == '127.0.0.1' or host.lower() == 'localhost') + + if len(env) > 0: + # work on a copy of environ + environment_dict = os.environ.copy() + for env_key, env_value in env: + print(f'set {env_key} to {env_value}') + environment_dict[env_key] = env_value + + # reload config with the updated environment -> will update declared var + Config.reload(environment_dict) + + # push not explicitly declared var from the command args + for env_key, env_value in env: + if env_key not in Config: + Config.add_from_env(env_key) + Config.get_env_or_attribute(env_key).value = env_value + + # update + Config.dev_mode.value = bool(dev_mode) + Config.add('port', port) + Config.add('host', host) + + uvicorn.run(base_app, port=port, host=host) + + +if __name__ == "__main__": + run_wdms_app() + diff --git a/media/.gitkeep b/media/.gitkeep new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/media/Wellbore DDMS kickoff with R3 team.mp4 b/media/Wellbore DDMS kickoff with R3 team.mp4 new file mode 100644 index 0000000000000000000000000000000000000000..5adde8900aa9a0b811cbfb9d15cb564ef86019f3 Binary files /dev/null and b/media/Wellbore DDMS kickoff with R3 team.mp4 differ diff --git a/provider/os-wellbore-ddms-azure/README.md b/provider/os-wellbore-ddms-azure/README.md new file mode 100644 index 0000000000000000000000000000000000000000..23603d1804cdc5c5bb27f65433c82b0fb75ad23c --- /dev/null +++ b/provider/os-wellbore-ddms-azure/README.md @@ -0,0 +1,110 @@ +# os-wellbore-ddms-azure + +Wellbore Data Management Services (WDMS) Open Subsurface Data Universe (OSDU) is one of the several backend services that comprise Schlumberger's Exploration and Production (E&P) software ecosystem. It is a single, containerized service written in Python that provides an API for wellbore related data. + +## Requirements + +In order to run this service locally, you will need the following: + +- [Python](https://www.python.org/downloads/) >=3.7 +- [FastAPI](https://fastapi.tiangolo.com/) +- [OSDU on Azure infrastructure](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning) deployed + + +## Service Dependencies +- [Storage Service](https://community.opengroup.org/osdu/platform/system/storage) +- [Search Service](https://community.opengroup.org/osdu/platform/system/search-service) +- [Entitlements Service](https://community.opengroup.org/osdu/platform/security-and-compliance/entitlements) + +> Add service dependencies here + +## General Tips + +**Environment Variable Management** +The following tools make environment variable configuration simpler + - [direnv](https://direnv.net/) - for a shell/terminal environment + +## Environment Variables + +In order to run the service locally, you will need to have the following environment variables defined. We have created a helper script to generate .yaml files to set the environment variables to run and test the service using the InteliJ IDEA plugin and generate a .envrc file to set the environment variables to run and test the service using direnv [here](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/blob/master/tools/variables/os-wellbore-ddms.sh). + +**Note** The following command can be useful to pull secrets from keyvault: +```bash +az keyvault secret show --vault-name $KEY_VAULT_NAME --name $KEY_VAULT_SECRET_NAME --query value -otsv +``` + +**Required to run service** + +| name | value | description | sensitive? | source | +| --- | --- | --- | --- | --- | +| `CLOUDPROVIDER` | `azure` | Cloud provider for this deployment | no | Constant | +| `SERVICE_HOST` | `$DNS_HOST` | Description | no | | +| `SERVICE_PORT` | `8080` | Description | no | | +| `SERVICE_HOST_ENTITLEMENTS` | `https://${ENV_HOST}/entitlements/v1` | Entitlements service host | no | +| `SERVICE_HOST_STORAGE` | `https://${ENV_HOST}/api/storage/v2` | Storage service host | no | +| `SERVICE_HOST_SEARCH` | `https://${ENV_HOST}/api/search/v2` | Search service host | no | +| `SERVICE_HOST_PARTITION` | `https://${ENV_HOST}/api/partition/v2` | Partition service host | no | +| `KEYVAULT_URL` | `****` | The Key Vault url (needed by the Partition Service) | yes | +| `AZ_AI_INSTRUMENTATION_KEY` | `****` | Azure Application Insights instrumentation key | yes | + +**Required to run integration tests** + +| name | value | description | sensitive? | source | +| --- | --- | --- | --- | --- | +| `FILTER_TAG` | `basic` | Run integration tests locally | no | Constant | + + +## Running Locally +| name | value | description | sensitive? | source | +| --- | --- | --- | --- | --- | +| `CLOUDPROVIDER` | `local` | Run locally | no | Constant | +| `SERVICE_HOST` | `127.0.0.1` | Description | no | | +| `SERVICE_PORT` | `8080` | Description | no | | +| `STORAGE_SERVICE_PATH` | `tmpstorage` | Local record storage folder | no | +| `BLOB_STORAGE_PATH` | `tmpblob` | Local blob storage folder | no | | + + +### Configure Python environment + +Refer to **Project Startup** section in the top-level [README](/README.md) + +### Build and run the application + +After configuring your environment as specified above, you can follow these steps to build and run the application. These steps should be invoked from the repository root. + +```bash +python main.py -e USE_INTERNAL_STORAGE_SERVICE_WITH_PATH $STORAGE_SERVICE_PATH -e USE_LOCALFS_BLOB_STORAGE_WITH_PATH $BLOB_STORAGE_PATH -e CLOUD_PROVIDER $CLOUDPROVIDER +``` + +Use the command above to run the service locally. Other options are in the top-level [README](/README.md) + + +### Test the Application + +_After the service has started it should be accessible via a web browser by visiting [http://127.0.0.1:8080/api/os-wellbore-ddms/docs](http://127.0.0.1:8080/api/os-wellbore-ddms/docs). If the request does not fail, you can then run the integration tests._ + +```bash +# setup integration tests +mkdir -p $STORAGE_SERVICE_PATH +mkdir -p $BLOB_STORAGE_PATH +python main.py -e USE_INTERNAL_STORAGE_SERVICE_WITH_PATH $STORAGE_SERVICE_PATH -e USE_LOCALFS_BLOB_STORAGE_WITH_PATH $BLOB_STORAGE_PATH -e CLOUD_PROVIDER $CLOUDPROVIDER +# Note: this assumes that the environment variables for integration tests as outlined above are already exported in your environment. +cd tests/integration +python gen_postman_env.py --token $(pyjwt --key=secret encode email=nobody@example.com) --base_url "http://$SERVICE_HOST:$SERVICE_PORT/api/os-wellbore-ddms" --cloud_provider "local" --data_partition "dummy" +pytest ./functional --environment="./generated/postman_environment.json" --filter-tag=$FILTER_TAG +``` + +## License +Copyright © Schlumberger + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +[http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0) + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000000000000000000000000000000000000..600b1e4f7c1f8c155d3538252952c99ffcacba7b --- /dev/null +++ b/requirements.txt @@ -0,0 +1,51 @@ +fastapi>=0.59.0 +cachetools==3.1.1 +aiohttp==3.6.2 +pyjwt==1.7.1 +cryptography>=2.7 +pyarrow>=0.15.0 +pandas==1.1.2 +uvicorn +Click +structlog +python-rapidjson +python-multipart +jsonpath-ng # maintenance of 'jsonpath-rw' lib it's bit abandoned + +opencensus +opencensus-ext-stackdriver +opencensus-ext-azure +opencensus-ext-ocagent +opencensus-ext-logging + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/356/packages/pypi/simple/ +log-recognition-lib>=0.0.7 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/228/packages/pypi/simple/ +osdu-data-ecosystem-storage~=1.0.0 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/227/packages/pypi/simple/ +osdu-data-ecosystem-entitlements>=0.3.2, <0.4 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/229/packages/pypi/simple/ +osdu-data-ecosystem-search>=0.3.2, <0.4 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/224/packages/pypi/simple/ +osdu-core-lib-python-gcp>=0.3.0, <0.4 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/226/packages/pypi/simple/ +osdu-core-lib-python-azure~=0.2.0 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/223/packages/pypi/simple/ +osdu-core-lib-python>=0.4.0, <0.5 + +--extra-index-url \ +https://community.opengroup.org/api/v4/projects/431/packages/pypi/simple/ +osdu-core-lib-python-ibm>=0.0.1, <0.1 diff --git a/requirements_dev.txt b/requirements_dev.txt new file mode 100644 index 0000000000000000000000000000000000000000..81eda7d9fd03923a7c972bb8046f269ab55f5b67 --- /dev/null +++ b/requirements_dev.txt @@ -0,0 +1,17 @@ +pytest +pytest-asyncio +pytest-cov +pytest-mock +pytest-httpx +httpx + +# Note since 3.8 includes Mock 4.0+. +mock>=4.0 +requests # used by starlette.TestClient for testing +pyjwt +cryptography + +# the following are used in functional integration tests +pytest-dependency +munch +jsonschema diff --git a/schema/indexation/README.md b/schema/indexation/README.md new file mode 100644 index 0000000000000000000000000000000000000000..9ef0b95d5ecda554495f84d1a6abab615a0ac78a --- /dev/null +++ b/schema/indexation/README.md @@ -0,0 +1,57 @@ +# Indexation schemas + +Indexation schemas are required to be able to query on some custom properties. Otherwise these fields are not indexed + and only generic one are. Schema must be registered once for each data partition. This will eventually + be part of the data provisioning but for the time being they are 'manually' registered. + +## schema +* [log.json](./log.json) +* [logSet.json](./logSet.json) +* [marker.json](./marker.json) +* [trajectory.json](./trajectory.json) +* [wellbore.json](./wellbore.json) +* [dipSet.json](./dipSet.json) + +All schemas but dipSet come from [data-management/wke-schema repository](https://slb-swt.visualstudio.com/data-management/_git/wke-schema?path=%2Fdomains%2Fwell%2Fjson_schema) +and were put here manually (no sync). The dipSet has been created for the needs of WDMS v2. + +We may update them to be adapted to wdms v2 needs (for instance, bulk reference instead of DELFI logstore id, or bulk at + logset level ...). Potentially we'll adopt OSDU schemas instead of WKS defined by Schlumberger ([OSDU WellLog.json](https://gitlab.opengroup.org/osdu/json-schemas/-/blob/master/Generated/work-product-component/WellLog.json)). + + +**WARNING**: The "kind" inside the json should be updated to correspond to the data partition: + +e.g. +``` +{ + "kind": "DATA_PARTITION_TAG:wks:log:1.0.5", + "schema": ... +} +``` +in case of data partition = `opendes` is must be updated to +``` +{ + "kind": "opendes:wks:log:1.0.5", + "schema": ... +} +``` + + +## Commands + +### Token +see [here](https://dev.azure.com/slb-des-ext-collaboration/open-data-ecosystem/_wiki/wikis/open-data-ecosystem.wiki/553/Authentication?anchor=get-an-sauth-token-from-an-sauth-service-account---sauth-v2) for token generation. + + +### cURL + +given TOKEN, BASE_URL and DATA_PARTITION + +``` +curl \ +--location \ +--request GET "$BASE_URL/api/storage/v2/schemas/$DATA_PARTITION:wks:wellbore:1.0.6' \ +--header "accept: application/json" \ +--header "data-partition-id: $DATA_PARTITION" \ +--header "Authorization: Bearer $TOKEN" +``` \ No newline at end of file diff --git a/schema/indexation/dipSet.json b/schema/indexation/dipSet.json new file mode 100644 index 0000000000000000000000000000000000000000..950029a51527e2ae5177cf93e3fec19aeff15929 --- /dev/null +++ b/schema/indexation/dipSet.json @@ -0,0 +1,266 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:dipSet:1.0.0", + "schema": [ + { + "path": "azimuthReference", + "kind": "string" + }, + { + "path": "classification", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "operation", + "kind": "string" + }, + { + "path": "reference.absentValue", + "kind": "string" + }, + { + "path": "reference.dataType", + "kind": "string" + }, + { + "path": "reference.dimension", + "kind": "int" + }, + { + "path": "reference.family", + "kind": "string" + }, + { + "path": "reference.familyType", + "kind": "string" + }, + { + "path": "reference.format", + "kind": "string" + }, + { + "path": "reference.logstoreId", + "kind": "long" + }, + { + "path": "reference.longName", + "kind": "string" + }, + { + "path": "reference.mnemonic", + "kind": "string" + }, + { + "path": "reference.name", + "kind": "string" + }, + { + "path": "reference.source", + "kind": "string" + }, + { + "path": "reference.unitKey", + "kind": "string" + }, + { + "path": "referenceType", + "kind": "string" + }, + { + "path": "relationships.well.confidence", + "kind": "double" + }, + { + "path": "relationships.well.id", + "kind": "link" + }, + { + "path": "relationships.well.name", + "kind": "string" + }, + { + "path": "relationships.well.version", + "kind": "long" + }, + { + "path": "relationships.wellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.wellbore.id", + "kind": "link" + }, + { + "path": "relationships.wellbore.name", + "kind": "string" + }, + { + "path": "relationships.wellbore.version", + "kind": "long" + }, + { + "path": "relationships.wellboreSection.confidence", + "kind": "double" + }, + { + "path": "relationships.wellboreSection.id", + "kind": "link" + }, + { + "path": "relationships.wellboreSection.name", + "kind": "string" + }, + { + "path": "relationships.wellboreSection.version", + "kind": "long" + }, + { + "path": "relationships.trueDipAzimuthLog.confidence", + "kind": "double" + }, + { + "path": "relationships.trueDipAzimuthLog.id", + "kind": "link" + }, + { + "path": "relationships.trueDipAzimuthLog.name", + "kind": "string" + }, + { + "path": "relationships.trueDipAzimuthLog.version", + "kind": "long" + }, + { + "path": "relationships.trueDipInclinationLog.confidence", + "kind": "double" + }, + { + "path": "relationships.trueDipInclinationLog.id", + "kind": "link" + }, + { + "path": "relationships.trueDipInclinationLog.name", + "kind": "string" + }, + { + "path": "relationships.trueDipInclinationLog.version", + "kind": "long" + }, + { + "path": "relationships.XCoordinateLog.confidence", + "kind": "double" + }, + { + "path": "relationships.XCoordinateLog.id", + "kind": "link" + }, + { + "path": "relationships.XCoordinateLog.name", + "kind": "string" + }, + { + "path": "relationships.XCoordinateLog.version", + "kind": "long" + }, + { + "path": "relationships.YCoordinateLog.confidence", + "kind": "double" + }, + { + "path": "relationships.YCoordinateLog.id", + "kind": "link" + }, + { + "path": "relationships.YCoordinateLog.name", + "kind": "string" + }, + { + "path": "relationships.YCoordinateLog.version", + "kind": "long" + }, + { + "path": "relationships.ZCoordinateLog.confidence", + "kind": "double" + }, + { + "path": "relationships.ZCoordinateLog.id", + "kind": "link" + }, + { + "path": "relationships.ZCoordinateLog.name", + "kind": "string" + }, + { + "path": "relationships.ZCoordinateLog.version", + "kind": "long" + }, + { + "path": "relationships.quality.confidence", + "kind": "double" + }, + { + "path": "relationships.quality.id", + "kind": "link" + }, + { + "path": "relationships.quality.name", + "kind": "string" + }, + { + "path": "relationships.quality.version", + "kind": "long" + }, + { + "path": "start.unitKey", + "kind": "string" + }, + { + "path": "start.value", + "kind": "double" + }, + { + "path": "step.unitKey", + "kind": "string" + }, + { + "path": "step.value", + "kind": "double" + }, + { + "path": "stop.unitKey", + "kind": "string" + }, + { + "path": "stop.value", + "kind": "double" + } + ], + "ext": null +} \ No newline at end of file diff --git a/schema/indexation/log.json b/schema/indexation/log.json new file mode 100644 index 0000000000000000000000000000000000000000..b696490fca03108cc4b15d50bf89fe9c748b8e6d --- /dev/null +++ b/schema/indexation/log.json @@ -0,0 +1,230 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:log:1.0.5", + "schema": [ + { + "path": "azimuthReference", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "log.columnNames", + "kind": "[]string" + }, + { + "path": "log.dataType", + "kind": "string" + }, + { + "path": "log.dimension", + "kind": "int" + }, + { + "path": "log.family", + "kind": "string" + }, + { + "path": "log.familyType", + "kind": "string" + }, + { + "path": "log.format", + "kind": "string" + }, + { + "path": "log.logstoreId", + "kind": "long" + }, + { + "path": "log.longName", + "kind": "string" + }, + { + "path": "log.mnemonic", + "kind": "string" + }, + { + "path": "log.name", + "kind": "string" + }, + { + "path": "log.source", + "kind": "string" + }, + { + "path": "log.unitKey", + "kind": "string" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "operation", + "kind": "string" + }, + { + "path": "reference.columnNames", + "kind": "[]string" + }, + { + "path": "reference.dataType", + "kind": "string" + }, + { + "path": "reference.dimension", + "kind": "int" + }, + { + "path": "reference.family", + "kind": "string" + }, + { + "path": "reference.familyType", + "kind": "string" + }, + { + "path": "reference.format", + "kind": "string" + }, + { + "path": "reference.logstoreId", + "kind": "long" + }, + { + "path": "reference.longName", + "kind": "string" + }, + { + "path": "reference.mnemonic", + "kind": "string" + }, + { + "path": "reference.name", + "kind": "string" + }, + { + "path": "reference.source", + "kind": "string" + }, + { + "path": "reference.unitKey", + "kind": "string" + }, + { + "path": "referenceType", + "kind": "string" + }, + { + "path": "relationships.logSet.confidence", + "kind": "double" + }, + { + "path": "relationships.logSet.id", + "kind": "link" + }, + { + "path": "relationships.logSet.name", + "kind": "string" + }, + { + "path": "relationships.logSet.version", + "kind": "long" + }, + { + "path": "relationships.timeDepthRelation.confidence", + "kind": "double" + }, + { + "path": "relationships.timeDepthRelation.id", + "kind": "link" + }, + { + "path": "relationships.timeDepthRelation.name", + "kind": "string" + }, + { + "path": "relationships.timeDepthRelation.version", + "kind": "long" + }, + { + "path": "relationships.well.confidence", + "kind": "double" + }, + { + "path": "relationships.well.id", + "kind": "link" + }, + { + "path": "relationships.well.name", + "kind": "string" + }, + { + "path": "relationships.well.version", + "kind": "long" + }, + { + "path": "relationships.wellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.wellbore.id", + "kind": "link" + }, + { + "path": "relationships.wellbore.name", + "kind": "string" + }, + { + "path": "relationships.wellbore.version", + "kind": "long" + }, + { + "path": "start.unitKey", + "kind": "string" + }, + { + "path": "start.value", + "kind": "double" + }, + { + "path": "step.unitKey", + "kind": "string" + }, + { + "path": "step.value", + "kind": "double" + }, + { + "path": "stop.unitKey", + "kind": "string" + }, + { + "path": "stop.value", + "kind": "double" + } + ], + "ext": null +} \ No newline at end of file diff --git a/schema/indexation/logSet.json b/schema/indexation/logSet.json new file mode 100644 index 0000000000000000000000000000000000000000..42c01f971b36cc99e7bac503b5c02c572a8df333 --- /dev/null +++ b/schema/indexation/logSet.json @@ -0,0 +1,178 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:logSet:1.0.5", + "schema": [ + { + "path": "azimuthReference", + "kind": "string" + }, + { + "path": "channelMnemonics", + "kind": "[]string" + }, + { + "path": "channelNames", + "kind": "[]string" + }, + { + "path": "classification", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "operation", + "kind": "string" + }, + { + "path": "reference.absentValue", + "kind": "string" + }, + { + "path": "reference.dataType", + "kind": "string" + }, + { + "path": "reference.dimension", + "kind": "int" + }, + { + "path": "reference.family", + "kind": "string" + }, + { + "path": "reference.familyType", + "kind": "string" + }, + { + "path": "reference.format", + "kind": "string" + }, + { + "path": "reference.logstoreId", + "kind": "long" + }, + { + "path": "reference.longName", + "kind": "string" + }, + { + "path": "reference.mnemonic", + "kind": "string" + }, + { + "path": "reference.name", + "kind": "string" + }, + { + "path": "reference.source", + "kind": "string" + }, + { + "path": "reference.unitKey", + "kind": "string" + }, + { + "path": "referenceType", + "kind": "string" + }, + { + "path": "relationships.well.confidence", + "kind": "double" + }, + { + "path": "relationships.well.id", + "kind": "link" + }, + { + "path": "relationships.well.name", + "kind": "string" + }, + { + "path": "relationships.well.version", + "kind": "long" + }, + { + "path": "relationships.wellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.wellbore.id", + "kind": "link" + }, + { + "path": "relationships.wellbore.name", + "kind": "string" + }, + { + "path": "relationships.wellbore.version", + "kind": "long" + }, + { + "path": "relationships.wellboreSection.confidence", + "kind": "double" + }, + { + "path": "relationships.wellboreSection.id", + "kind": "link" + }, + { + "path": "relationships.wellboreSection.name", + "kind": "string" + }, + { + "path": "relationships.wellboreSection.version", + "kind": "long" + }, + { + "path": "start.unitKey", + "kind": "string" + }, + { + "path": "start.value", + "kind": "double" + }, + { + "path": "step.unitKey", + "kind": "string" + }, + { + "path": "step.value", + "kind": "double" + }, + { + "path": "stop.unitKey", + "kind": "string" + }, + { + "path": "stop.value", + "kind": "double" + } + ], + "ext": null +} \ No newline at end of file diff --git a/schema/indexation/marker.json b/schema/indexation/marker.json new file mode 100644 index 0000000000000000000000000000000000000000..54713e08462ada2c7357dff858f3bf3eb27ef02e --- /dev/null +++ b/schema/indexation/marker.json @@ -0,0 +1,214 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:marker:1.0.4", + "schema": [ + { + "path": "age.unitKey", + "kind": "string" + }, + { + "path": "age.value", + "kind": "double" + }, + { + "path": "boundaryRelation", + "kind": "string" + }, + { + "path": "classification", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "depth.unitKey", + "kind": "string" + }, + { + "path": "depth.value", + "kind": "double" + }, + { + "path": "depthReferenceType", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "interpreter", + "kind": "string" + }, + { + "path": "locationWGS84", + "kind": "core:dl:geoshape:1.0.0" + }, + { + "path": "markerFeatureType", + "kind": "string" + }, + { + "path": "markerGeoDomain", + "kind": "string" + }, + { + "path": "markerSubFeatureAttribute", + "kind": "string" + }, + { + "path": "markerSubFeatureType", + "kind": "string" + }, + { + "path": "md.unitKey", + "kind": "string" + }, + { + "path": "md.value", + "kind": "double" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "planeOrientationAzimuth.unitKey", + "kind": "string" + }, + { + "path": "planeOrientationAzimuth.value", + "kind": "double" + }, + { + "path": "planeOrientationDip.unitKey", + "kind": "string" + }, + { + "path": "planeOrientationDip.value", + "kind": "double" + }, + { + "path": "relationships.horizon.confidence", + "kind": "double" + }, + { + "path": "relationships.horizon.id", + "kind": "link" + }, + { + "path": "relationships.horizon.name", + "kind": "string" + }, + { + "path": "relationships.horizon.version", + "kind": "long" + }, + { + "path": "relationships.stratigraphicTable.confidence", + "kind": "double" + }, + { + "path": "relationships.stratigraphicTable.id", + "kind": "link" + }, + { + "path": "relationships.stratigraphicTable.name", + "kind": "string" + }, + { + "path": "relationships.stratigraphicTable.version", + "kind": "long" + }, + { + "path": "relationships.study.confidence", + "kind": "double" + }, + { + "path": "relationships.study.id", + "kind": "link" + }, + { + "path": "relationships.study.name", + "kind": "string" + }, + { + "path": "relationships.study.version", + "kind": "long" + }, + { + "path": "relationships.trajectory.confidence", + "kind": "double" + }, + { + "path": "relationships.trajectory.id", + "kind": "link" + }, + { + "path": "relationships.trajectory.name", + "kind": "string" + }, + { + "path": "relationships.trajectory.version", + "kind": "long" + }, + { + "path": "relationships.wellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.wellbore.id", + "kind": "link" + }, + { + "path": "relationships.wellbore.name", + "kind": "string" + }, + { + "path": "relationships.wellbore.version", + "kind": "long" + }, + { + "path": "stratigraphicHierarchyLevel", + "kind": "int" + }, + { + "path": "tvd.unitKey", + "kind": "string" + }, + { + "path": "tvd.value", + "kind": "double" + }, + { + "path": "wgs84ElevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "wgs84ElevationFromMsl.value", + "kind": "double" + }, + { + "path": "wgs84LatitudeLongitude", + "kind": "core:dl:geopoint:1.0.0" + } + ], + "ext": null +} \ No newline at end of file diff --git a/schema/indexation/trajectory.json b/schema/indexation/trajectory.json new file mode 100644 index 0000000000000000000000000000000000000000..76df4f266b76cdb40d6ef21e151ee11f505102d6 --- /dev/null +++ b/schema/indexation/trajectory.json @@ -0,0 +1,174 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:trajectory:1.0.5", + "schema": [ + { + "path": "azimuthReference", + "kind": "string" + }, + { + "path": "channelMnemonics", + "kind": "[]string" + }, + { + "path": "channelNames", + "kind": "[]string" + }, + { + "path": "classification", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "index.absentValue", + "kind": "string" + }, + { + "path": "index.azimuthKey", + "kind": "string" + }, + { + "path": "index.crsKey", + "kind": "string" + }, + { + "path": "index.dataType", + "kind": "string" + }, + { + "path": "index.dimension", + "kind": "int" + }, + { + "path": "index.family", + "kind": "string" + }, + { + "path": "index.familyType", + "kind": "string" + }, + { + "path": "index.format", + "kind": "string" + }, + { + "path": "index.logstoreId", + "kind": "long" + }, + { + "path": "index.longName", + "kind": "string" + }, + { + "path": "index.mnemonic", + "kind": "string" + }, + { + "path": "index.name", + "kind": "string" + }, + { + "path": "index.properties", + "kind": "[]string" + }, + { + "path": "index.source", + "kind": "string" + }, + { + "path": "index.unitKey", + "kind": "string" + }, + { + "path": "indexType", + "kind": "string" + }, + { + "path": "locationWGS84", + "kind": "core:dl:geoshape:1.0.0" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "referencePosition.coordinates", + "kind": "[]double" + }, + { + "path": "referencePosition.crsKey", + "kind": "string" + }, + { + "path": "referencePosition.unitKey", + "kind": "string" + }, + { + "path": "relationships.wellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.wellbore.id", + "kind": "link" + }, + { + "path": "relationships.wellbore.name", + "kind": "string" + }, + { + "path": "relationships.wellbore.version", + "kind": "long" + }, + { + "path": "start", + "kind": "double" + }, + { + "path": "step", + "kind": "double" + }, + { + "path": "stop", + "kind": "double" + }, + { + "path": "wellHeadWgs84.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "wellHeadWgs84.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "wellHeadWgs84.latitude", + "kind": "double" + }, + { + "path": "wellHeadWgs84.longitude", + "kind": "double" + } + ], + "ext": null +} \ No newline at end of file diff --git a/schema/indexation/wellbore.json b/schema/indexation/wellbore.json new file mode 100644 index 0000000000000000000000000000000000000000..aec6adfae57ce5f518d9b74c373f84c0e3b63940 --- /dev/null +++ b/schema/indexation/wellbore.json @@ -0,0 +1,354 @@ +{ + "kind": "DATA_PARTITION_TAG:wks:wellbore:1.0.6", + "schema": [ + { + "path": "airGap.unitKey", + "kind": "string" + }, + { + "path": "airGap.value", + "kind": "double" + }, + { + "path": "block", + "kind": "string" + }, + { + "path": "country", + "kind": "string" + }, + { + "path": "county", + "kind": "string" + }, + { + "path": "dateCreated", + "kind": "datetime" + }, + { + "path": "dateModified", + "kind": "datetime" + }, + { + "path": "drillingDaysTarget.unitKey", + "kind": "string" + }, + { + "path": "drillingDaysTarget.value", + "kind": "double" + }, + { + "path": "elevationReference.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "elevationReference.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "elevationReference.name", + "kind": "string" + }, + { + "path": "externalIds", + "kind": "[]link" + }, + { + "path": "field", + "kind": "string" + }, + { + "path": "formationAtTd", + "kind": "string" + }, + { + "path": "formationProjected", + "kind": "string" + }, + { + "path": "hasAchievedTotalDepth", + "kind": "boolean" + }, + { + "path": "isActive", + "kind": "boolean" + }, + { + "path": "kickOffMd.unitKey", + "kind": "string" + }, + { + "path": "kickOffMd.value", + "kind": "double" + }, + { + "path": "kickOffTvd.unitKey", + "kind": "string" + }, + { + "path": "kickOffTvd.value", + "kind": "double" + }, + { + "path": "locationWGS84", + "kind": "core:dl:geoshape:1.0.0" + }, + { + "path": "name", + "kind": "string" + }, + { + "path": "operator", + "kind": "string" + }, + { + "path": "permitDate", + "kind": "datetime" + }, + { + "path": "permitNumber", + "kind": "string" + }, + { + "path": "plssLocation.aliquotPart", + "kind": "string" + }, + { + "path": "plssLocation.range", + "kind": "string" + }, + { + "path": "plssLocation.section", + "kind": "int" + }, + { + "path": "plssLocation.township", + "kind": "string" + }, + { + "path": "relationships.definitiveTimeDepthRelation.confidence", + "kind": "double" + }, + { + "path": "relationships.definitiveTimeDepthRelation.id", + "kind": "link" + }, + { + "path": "relationships.definitiveTimeDepthRelation.name", + "kind": "string" + }, + { + "path": "relationships.definitiveTimeDepthRelation.version", + "kind": "long" + }, + { + "path": "relationships.definitiveTrajectory.confidence", + "kind": "double" + }, + { + "path": "relationships.definitiveTrajectory.id", + "kind": "link" + }, + { + "path": "relationships.definitiveTrajectory.name", + "kind": "string" + }, + { + "path": "relationships.definitiveTrajectory.version", + "kind": "long" + }, + { + "path": "relationships.tieInWellbore.confidence", + "kind": "double" + }, + { + "path": "relationships.tieInWellbore.id", + "kind": "link" + }, + { + "path": "relationships.tieInWellbore.name", + "kind": "string" + }, + { + "path": "relationships.tieInWellbore.version", + "kind": "long" + }, + { + "path": "relationships.well.confidence", + "kind": "double" + }, + { + "path": "relationships.well.id", + "kind": "link" + }, + { + "path": "relationships.well.name", + "kind": "string" + }, + { + "path": "relationships.well.version", + "kind": "long" + }, + { + "path": "shape", + "kind": "string" + }, + { + "path": "spudDate", + "kind": "datetime" + }, + { + "path": "state", + "kind": "string" + }, + { + "path": "totalDepthMd.unitKey", + "kind": "string" + }, + { + "path": "totalDepthMd.value", + "kind": "double" + }, + { + "path": "totalDepthMdDriller.unitKey", + "kind": "string" + }, + { + "path": "totalDepthMdDriller.value", + "kind": "double" + }, + { + "path": "totalDepthMdPlanned.unitKey", + "kind": "string" + }, + { + "path": "totalDepthMdPlanned.value", + "kind": "double" + }, + { + "path": "totalDepthMdSubSeaPlanned.unitKey", + "kind": "string" + }, + { + "path": "totalDepthMdSubSeaPlanned.value", + "kind": "double" + }, + { + "path": "totalDepthProjectedMd.unitKey", + "kind": "string" + }, + { + "path": "totalDepthProjectedMd.value", + "kind": "double" + }, + { + "path": "totalDepthTvd.unitKey", + "kind": "string" + }, + { + "path": "totalDepthTvd.value", + "kind": "double" + }, + { + "path": "totalDepthTvdDriller.unitKey", + "kind": "string" + }, + { + "path": "totalDepthTvdDriller.value", + "kind": "double" + }, + { + "path": "totalDepthTvdPlanned.unitKey", + "kind": "string" + }, + { + "path": "totalDepthTvdPlanned.value", + "kind": "double" + }, + { + "path": "totalDepthTvdSubSeaPlanned.unitKey", + "kind": "string" + }, + { + "path": "totalDepthTvdSubSeaPlanned.value", + "kind": "double" + }, + { + "path": "uwi", + "kind": "string" + }, + { + "path": "wellHeadElevation.unitKey", + "kind": "string" + }, + { + "path": "wellHeadElevation.value", + "kind": "double" + }, + { + "path": "wellHeadGeographic.crsKey", + "kind": "string" + }, + { + "path": "wellHeadGeographic.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "wellHeadGeographic.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "wellHeadGeographic.latitude", + "kind": "double" + }, + { + "path": "wellHeadGeographic.longitude", + "kind": "double" + }, + { + "path": "wellHeadProjected.crsKey", + "kind": "string" + }, + { + "path": "wellHeadProjected.elevationFromMsl.unitKey", + "kind": "string" + }, + { + "path": "wellHeadProjected.elevationFromMsl.value", + "kind": "double" + }, + { + "path": "wellHeadProjected.x", + "kind": "double" + }, + { + "path": "wellHeadProjected.y", + "kind": "double" + }, + { + "path": "wellHeadWgs84", + "kind": "core:dl:geopoint:1.0.0" + }, + { + "path": "wellboreNumberGovernment", + "kind": "string" + }, + { + "path": "wellboreNumberOperator", + "kind": "string" + }, + { + "path": "wellborePurpose", + "kind": "string" + }, + { + "path": "wellboreStatus", + "kind": "string" + }, + { + "path": "wellboreType", + "kind": "string" + } + ], + "ext": null +} \ No newline at end of file diff --git a/spec/README.md b/spec/README.md new file mode 100644 index 0000000000000000000000000000000000000000..8b4668526e61738a3b979fdd3ebb1e5e462a1a90 --- /dev/null +++ b/spec/README.md @@ -0,0 +1,32 @@ +# Wellbore DMS OpenAPI Specification + +The OpenAPI specification for Wellbore DMS is reverse generated from the source code - +as opposed to generating the implementation from the specification. + +## Where to find it +The Swagger page for WDMS is available along the running service at, +`https://{hostname}/docs`. +And the OpenAPI specification file will be at `https://{hostname}/openapi.json` + +The `spec` directory contains the OpenAPI specification files for Wellbore DMS. + +Under `spec/generated`, the OpenAPI in JSON format is saved as-is. + +## Publishing to Developer Portal +API products are grouped in families as described in the table below. + +API reference/Swagger | API Product | Path | Objects/services +--- | --- | --- | --- +Wellbore Objects Generic data types | OSDU Wellbore DMS - Data Access | baseURL/osdu/wdms/wellbore/v2 | Well, Wellbore, Logset, Log, Trajectory, Geology +Dips & Markers | OSDU Wellbore DMS - Data Access | baseURL/osdu/wdms/geology/v2 | Dip & DipSet, Maker +Search | OSDU Wellbore DMS - Data Access | baseURL/osdu/wdms/search/v2 | Search (aka Contextualization) +Log Recognition | OSDU Wellbore DMS - Data Services | baseURL/osdu/wdms/log-recognition/v2 | Rule Based Log Recognition + +### Steps +1. Convert generated JSON to YAML. Use swagger-codegen-cli locally, or Swagger Editor UI locally. +2. The converted YAML had syntax errors that were corrected in `spec/edited/openapi.yaml` +3. `spec/edited/openapi.yaml` was split according to the API groups defined above, +`geology.yaml`, `log-recognition.yaml`, `search.yaml`, and `wellbore.yaml`, placed under `spec/edited`. + + +_**Latest synced version:**_ Commit REMOVED_FOR_CICD_SCAN (Dec/10/2020) \ No newline at end of file diff --git a/spec/edited/geology.yaml b/spec/edited/geology.yaml new file mode 100644 index 0000000000000000000000000000000000000000..bf6f5ec5d0d3e1209b2c55c1bdfa0e1e4abcf04b --- /dev/null +++ b/spec/edited/geology.yaml @@ -0,0 +1,1913 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +openapi: 3.0.2 +info: + title: Wellbore DDMS Geology OSDU + version: '0.2' +servers: + - url: https://api.example.com/osdu/wdms/geology/v2 +security: + - bearer: [ ] + - appkey: [ ] +paths: + '/markers/{markerid}': + get: + tags: + - Marker + summary: 'Get the marker using wks:marker:1.0.4 schema' + description: Get the Marker object using its **id** + operationId: get_marker + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/marker' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Marker + summary: Delete the marker. The API performs a logical deletion of the given record + operationId: del_marker + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/markers/{markerid}/versions': + get: + tags: + - Marker + summary: Get all versions of the marker + operationId: get_marker_versions + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/markers/{markerid}/versions/{version}': + get: + tags: + - Marker + summary: 'Get the given version of marker using wks:marker:1.0.4 schema' + operationId: get_marker_version + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/marker' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /markers: + post: + tags: + - Marker + summary: 'Create or update the markers using wks:marker:1.0.4 schema' + operationId: post_marker + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Markers + type: array + items: + $ref: '#/components/schemas/marker' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /dipsets: + post: + tags: + - Dipset + summary: 'Create or update the DipSets using wks:dipSet:1.0.0 schema' + operationId: post_dipset + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dipsets + type: array + items: + $ref: '#/components/schemas/dipset' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/versions/{version}': + get: + tags: + - Dipset + summary: 'Get the given version of DipSet using wks:dipset:1.0.0 schema' + description: '"Get the DipSet object using its **id**.' + operationId: get_dipset_version + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/dipset' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/versions': + get: + tags: + - Dipset + summary: Get all versions of the dipset + operationId: get_dipset_versions + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}': + get: + tags: + - Dipset + summary: 'Get the DipSet using wks:dipSet:1.0.0 schema' + description: Get the DipSet object using its **id** + operationId: get_dipset + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/dipset' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Dipset + summary: Delete the DipSet. The API performs a logical deletion of the given record + operationId: del_dipset + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/dips': + get: + tags: + - Dips + summary: Get dips + description: |- + Return dips from dipset from the given index until the given number of dips specifed in query parameters. + If not specified returns all dips from dipset. + operationId: get_dips + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: false + schema: + title: Index + minimum: 0 + type: integer + name: index + in: query + - required: false + schema: + title: Limit + minimum: 0 + type: integer + name: limit + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Get Dips Ddms V2 Dipsets Dipsetid Dips Get + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + post: + tags: + - Dips + summary: Define the dips of the dipset + description: Replace previous dips by provided dips. Sort dips by reference and azimuth. + operationId: post_dips + parameters: + - description: The ID of the dipset + required: true + schema: + title: Dipsetid + type: string + description: The ID of the dipset + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dips + type: array + items: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Post Dips Ddms V2 Dipsets Dipsetid Dips Post + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/dips/insert': + post: + tags: + - Dips + summary: insert dip in a dipset + description: |- + Insert dips in dipset. + Existing dips are not replaced. + Several dip can have same reference. + Operation will sort by reference all dips in dipset (may modify dip indexes). + operationId: insert_dips + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dips + type: array + items: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Insert Dips Ddms V2 Dipsets Dipsetid Dips Insert Post + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/dips/query': + get: + tags: + - Dips + summary: Query dip from dipset + description: Search dip within reference interval and specific classification + operationId: query_dip + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: Min reference for the dips to search in the dipset + required: false + schema: + title: Minreference + type: number + description: Min reference for the dips to search in the dipset + name: minReference + in: query + - required: false + schema: + title: Max reference for the dips to search in the dipset + type: number + name: maxReference + in: query + - required: false + schema: + title: Classification for the dip to search in the dipset + type: string + name: classification + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Query Dip Ddms V2 Dipsets Dipsetid Dips Query Get + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/dipsets/{dipsetid}/dips/{index}': + get: + tags: + - Dips + summary: Get a dip at index + description: '"Return dip from dipset at the given index' + operationId: get_dip_by_index + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet or index not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Dips + summary: Delete a dip + description: Removes the dip at index + operationId: delete_dip_by_index + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Delete Dip By Index Ddms V2 Dipsets Dipsetid Dips Index Delete + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet or index not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + patch: + tags: + - Dips + summary: Update dip + description: |- + "Update dip at index + Operation will sort by reference all dips in dipset (may modify dip indexes). + operationId: patch_dip + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Patch Dip Ddms V2 Dipsets Dipsetid Dips Index Patch + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] +components: + schemas: + CreateUpdateRecordsResponse: + title: CreateUpdateRecordsResponse + type: object + properties: + recordCount: + title: Recordcount + type: integer + recordIds: + title: Recordids + type: array + items: + type: string + skippedRecordIds: + title: Skippedrecordids + type: array + items: + type: string + DataType: + title: DataType + enum: + - string + - number + - integer + - boolean + description: An enumeration. + Dip: + title: Dip + required: + - reference + - azimuth + - inclination + type: object + properties: + reference: + title: Reference of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only Measured Depth in meter is supported for the moment + azimuth: + title: Azimuth value of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only degrees unit is supported for the moment + inclination: + title: Inclination value of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only degrees unit is supported for the moment + quality: + title: Quality of the dip + exclusiveMaximum: true + maximum: 1 + exclusiveMinimum: false + minimum: 0 + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Decimal number between 0 and 1 + xCoordinate: + title: The X coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + yCoordinate: + title: The Y coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + zCoordinate: + title: The Z coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + classification: + title: Classification of the dip + type: string + description: Any string is accepted. + example: + reference: + unitKey: meter + value: 1000.5 + azimuth: + unitKey: dega + value: 42 + inclination: + unitKey: dega + value: 9 + quality: + unitKey: unitless + value: 0.5 + xCoordinate: + unitKey: meter + value: 2 + yCoordinate: + unitKey: meter + value: 45 + zCoordinate: + unitKey: meter + value: 7 + classification: fracture + Format: + title: Format + enum: + - date + - date-time + - time + - byte + - binary + - boolean + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + GeoJsonFeature: + title: GeoJsonFeature + required: + - geometry + - properties + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometry: + title: Geometry + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + - $ref: '#/components/schemas/geometryItem' + properties: + title: Properties + type: object + type: + $ref: '#/components/schemas/Type_1' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonFeatureCollection: + title: GeoJsonFeatureCollection + required: + - features + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + features: + title: Features + type: array + items: + $ref: '#/components/schemas/GeoJsonFeature' + type: + $ref: '#/components/schemas/Type_2' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonLineString: + title: GeoJsonLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_3' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiLineString: + title: GeoJsonMultiLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_4' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPoint: + title: GeoJsonMultiPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_5' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPolygon: + title: GeoJsonMultiPolygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + description: 'Bounding box in longitude, latitude WGS 84.' + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: array + items: + type: number + description: 'Array of polygons (minimum 2D), containing an array of point coordinates (longitude, latitude, (optionally elevation and other properties).' + type: + $ref: '#/components/schemas/Type_6' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonPoint: + title: GeoJsonPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_7' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + errors: + title: Errors + type: array + items: + $ref: '#/components/schemas/ValidationError' + Kind: + title: Kind + enum: + - CRS + - Unit + - Measurement + - AzimuthReference + - DateTime + type: string + description: An enumeration. + Legal: + title: Legal + type: object + properties: + legaltags: + title: Legal Tags + type: array + items: + type: string + description: 'The list of legal tags, see compliance API.' + otherRelevantDataCountries: + title: Other Relevant Data Countries + type: array + items: + type: string + description: 'The list of other relevant data countries using the ISO 2-letter codes, see compliance API.' + status: + title: Legal Status + type: string + description: The legal status. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + LinkList: + title: LinkList + type: object + properties: {} + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + MetaItem: + title: MetaItem + required: + - kind + - persistableReference + type: object + properties: + kind: + title: Reference Kind + allOf: + - $ref: '#/components/schemas/Kind' + description: 'The kind of reference, unit, measurement, CRS or azimuth reference.' + name: + title: Name or Symbol + type: string + description: The name of the CRS or the symbol/name of the unit + persistableReference: + title: Persistable Reference + type: string + description: The persistable reference string uniquely identifying the CRS or Unit + propertyNames: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property names, to which this meta data item provides Unit/CRS context to. Data structures, which come in a single frame of reference, can register the property name, others require a full path like "data.structureA.propertyB" to define a unique context.' + propertyValues: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property values, to which this meta data item provides Unit/CRS context to. Typically a unit symbol is a value to a data structure; this symbol is then registered in this propertyValues array and the persistableReference provides the absolute reference.' + uncertainty: + title: Uncertainty + type: number + description: The uncertainty of the values measured given the unit or CRS unit. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Polygon: + title: Polygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_8' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + RecordVersions: + title: RecordVersions + type: object + properties: + recordId: + title: Recordid + type: string + versions: + title: Versions + type: array + items: + type: integer + SimpleElevationReference: + title: SimpleElevationReference + required: + - elevationFromMsl + type: object + properties: + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The elevation above mean sea level (MSL), at which the vertical origin is 0.0. The ''unitKey'' is further defined in ''frameOfReference.units''.' + name: + title: Elevation Reference Name + type: string + description: The name of the Elevation Reference. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + TagDictionary: + title: TagDictionary + type: object + properties: {} + description: Used for data model allows extra fields which are not declared initially in the pydantic model + ToOneRelationship: + title: ToOneRelationship + type: object + properties: + confidence: + title: Relationship Confidence + type: number + description: The confidence of the relationship. If the property is absent a well-known relation is implied. + id: + title: Related Object Id + type: string + description: 'The id of the related object in the Data Ecosystem. If set, the id has priority over the natural key in the name property.' + name: + title: Related Object Name + type: string + description: The name or natural key of the related object. This property is required if the target object id could not (yet) be identified. + version: + title: Entity Version Number + type: number + description: 'The version number of the related entity. If no version number is specified, the last version is implied.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Type: + title: Type + enum: + - GeometryCollection + description: An enumeration. + Type_1: + title: Type_1 + enum: + - Feature + description: An enumeration. + Type_2: + title: Type_2 + enum: + - FeatureCollection + description: An enumeration. + Type_3: + title: Type_3 + enum: + - LineString + description: An enumeration. + Type_4: + title: Type_4 + enum: + - MultiLineString + description: An enumeration. + Type_5: + title: Type_5 + enum: + - MultiPoint + description: An enumeration. + Type_6: + title: Type_6 + enum: + - MultiPolygon + description: An enumeration. + Type_7: + title: Type_7 + enum: + - Point + description: An enumeration. + Type_8: + title: Type_8 + enum: + - Polygon + description: An enumeration. + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + type: string + msg: + title: Message + type: string + type: + title: Error Type + type: string + ValueWithUnit: + title: ValueWithUnit + required: + - unitKey + - value + type: object + properties: + unitKey: + title: Unit Key + type: string + description: Unit for value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details. + value: + title: Value + type: number + description: Value of the corresponding attribute for the domain object in question. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + channel: + title: channel + type: object + properties: + absentValue: + title: Absent Value + type: string + description: Optional field carrying the absent value as string for this channel. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Named Properties + type: array + items: + $ref: '#/components/schemas/namedProperty' + description: The named properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.' + unitKey: + title: Unit + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + core_dl_geopoint: + title: core_dl_geopoint + required: + - latitude + - longitude + type: object + properties: + latitude: + title: Latitude + maximum: 90 + minimum: -90 + type: number + description: 'The latitude value in degrees of arc (dega). Value range [-90, 90].' + longitude: + title: Longitude + maximum: 180 + minimum: -180 + type: number + description: 'The longitude value in degrees of arc (dega). Value range [-180, 180]' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + dipSetData: + title: dipSetData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: Azimuth reference code defining the type of North. Only used for dipSets with azimuth data + classification: + title: Log Set Classification + type: string + description: The well-known log set classification code. + default: Externally Processed LogSet + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + $ref: '#/components/schemas/SimpleElevationReference' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + name: + title: Dip Set Name + type: string + description: The name of this dip set + operation: + title: Operation + type: string + description: The operation which created this entity + reference: + $ref: '#/components/schemas/channel' + referenceType: + title: Reference Type + type: string + description: The reference index type of the dip set. + relationships: + $ref: '#/components/schemas/dipsetrelationships' + start: + $ref: '#/components/schemas/ValueWithUnit' + step: + $ref: '#/components/schemas/ValueWithUnit' + stop: + $ref: '#/components/schemas/ValueWithUnit' + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + dipset: + title: dipset + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Dip Set Data + allOf: + - $ref: '#/components/schemas/dipSetData' + description: dipset data + id: + title: Dip Set ID + type: string + description: The unique identifier of the dip set + kind: + title: Dip Set Kind + type: string + description: Kind specification + default: 'osdu:wks:dipSet:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The dip-set's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this dip set; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + dipsetrelationships: + title: dipsetrelationships + required: + - wellbore + type: object + properties: + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this dipSet belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this dipSet belongs. + wellboreSection: + title: Wellbore Section + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore section to which this dipSet belongs. + referenceLog: + title: True dip azimuth log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The true dip azimuth log of the dipset. + trueDipAzimuthLog: + title: True dip azimuth log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The true dip azimuth log of the dipset. + trueDipInclinationLog: + title: X-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The X-coordinate log of the dipset + xCoordinateLog: + title: X-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The X-coordinate log of the dipset + yCoordinateLog: + title: Y-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The Y-coordinate log of the dipset + zCoordinateLog: + title: Z-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The Z-coordinate log of the dipset + qualityLog: + title: Quality log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The quality log of the dipset + classificationLog: + title: Classification log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The classification log of the dipset + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + geometryItem: + title: geometryItem + required: + - geometries + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometries: + title: Geometries + type: array + items: + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + type: + $ref: '#/components/schemas/Type' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + marker: + title: marker + required: + - acl + - kind + - legal + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Marker Data + allOf: + - $ref: '#/components/schemas/markerData' + description: 'Geological marker using a single point-observation, typically along a wellbore.' + id: + title: Marker ID + type: string + description: The unique identifier of the marker + kind: + title: Marker Kind + type: string + description: Marker kind specification + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The marker's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this marker; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + markerData: + title: markerData + required: + - md + - name + type: object + properties: + age: + title: Age + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The absolute age at the feature boundary. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + boundaryRelation: + title: Interface Boundary Relation + type: string + description: The marker boundary relationship classification + classification: + title: Marker Classification + type: string + description: 'The classification of the marker. Could be client-defined via a catalog, e.g. common:wke:markerClassification:1.0.0 and common:wke:markerClassificationMember:1.0.0' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + depth: + title: Marker Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The original marker depth - measured from data.elevationReference in data.depthReferenceType. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + depthReferenceType: + title: Depth Reference Code + type: string + description: Depth reference code defining the type of depth for the marker. Default MD (measured depth). Depth is downwards increasing. + default: MD + elevationReference: + title: Elevation Reference Level + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The elevation from mean sea level (MSL), where depth, topDepth, baseDepth are zero. Values above MSL are positive.' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + interpreter: + title: Interpreter Name + type: string + description: The name of the interpreter who picked this marker. + locationWGS84: + title: GeoJSON Marker Location + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: The marker's shape as GeoJSON Point. + markerFeatureType: + title: Marker Feature Type + type: string + description: 'The marker''s type of feature like ''seismic'', ''structural'', ''stratigraphic''' + markerGeoDomain: + title: Marker GeoScience Domain + type: string + description: 'The marker''s GeoScience domain like ''geologic'', ''reservoir'', ''petrophysical''' + markerSubFeatureAttribute: + title: Marker Sub-feature Attribute + type: string + description: 'Further specification of the marker''s sub-feature, e.g. in sequence stratigraphy.' + markerSubFeatureType: + title: Marker Sub-feature Type + type: string + description: 'The marker''s sub-type of the feature like ''horizon'', ''fault'', ''fracture''' + md: + title: Marker Measured Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The marker measured depth (MD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + name: + title: Marker Name + type: string + description: The name of the marker + planeOrientationAzimuth: + title: Azimuth Angle + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Azimuth angle. The azimuth reference is given by data.azimuthReference. The 'planeOrientationAzimuth.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + planeOrientationDip: + title: Dip Angle + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Dip angle. The 'planeOrientationDip.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/markerrelationships' + description: The entities related to this marker. + stratigraphicHierarchyLevel: + title: Column Level + type: integer + description: 'Optional hierarchical level in the chrono-stratigraphic/litho-stratigraphic catalog table, identified by the data.relationships.chartId' + tvd: + title: Marker Measured Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The marker true vertical depth (TVD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + wgs84ElevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Sea Level, downwards negative. The unit definition is found via the property''s unitKey'' in ''frameOfReference.units'' dictionary.' + wgs84LatitudeLongitude: + title: WGS 84 Latitude Longitude + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The marker's position in WGS 84 latitude and longitude. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + markerrelationships: + title: markerrelationships + type: object + properties: + horizon: + title: Stratigraphic Horizon + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The related stratigraphic horizon + stratigraphicTable: + title: Stratigraphic Table + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The related stratigraphic table, which provides the context for the stratigraphic horizon' + study: + title: Study + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The study, in which this marker was conceived.' + trajectory: + title: Trajectory + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The trajectory used to create the marker position + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The wellbore entity, to which this marker belongs.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + namedProperty: + title: namedProperty + type: object + properties: + associations: + title: Associations + type: array + items: + type: string + description: The optional associations contains one or more mnemonics found elsewhere in the logSet. + description: + title: Property Description + type: string + description: The description and role of this property. + format: + title: Format (LAS) + type: string + description: 'An optional format declaration for the property values. The ''A'' prefix indicates an array; string values are represented by ''S''; floating point values are represented by ''F'', optionally followed by a field specification, e.g. ''F10.4''; exponential number representations are represented by ''E''; integer values are represented by ''I''. For further information see the LAS specification http://www.cwls.org/las/.' + name: + title: Property Name + type: string + description: The name of this property. + unitKey: + title: Property Unit Symbol + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + value: + title: Property Value + anyOf: + - type: number + - type: string + description: The value for this property as a string or a number. + values: + title: Property Values (Interval) + type: array + items: + type: number + description: 'The values, e.g. interval boundaries, for this property.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + securitySchemes: + bearer: + type: apiKey + name: Authorization + in: header + appkey: + type: apiKey + in: header + name: appkey \ No newline at end of file diff --git a/spec/edited/log-recognition.yaml b/spec/edited/log-recognition.yaml new file mode 100644 index 0000000000000000000000000000000000000000..afb71c8a82d0e44930b35645a88816e1b22cdd42 --- /dev/null +++ b/spec/edited/log-recognition.yaml @@ -0,0 +1,311 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +openapi: 3.0.2 +info: + title: Log Recognition OSDU + version: '0.2' +servers: + - url: https://api.example.com/osdu/wdms/log-recognition/v2 +security: +- bearer: [] +- appkey: [] +paths: + /family: + post: + tags: + - Log recognition + summary: Recognize family and unit + description: Find the most probable family and unit using family assignment rule based catalogs. User defined catalog will have the priority. + operationId: family + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/GuessRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/GuessResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /upload-catalog: + put: + tags: + - Log recognition + summary: Upload user-defined catalog with family assignment rules + description: |- + Upload user-defined catalog with family assignment rules for specific partition ID. + If there is an existing catalog, it will be replaced. It takes maximum of 5 mins to replace the existing catalog. + Hence, any call to retrieve the family should be made after 5 mins of uploading the catalog + operationId: upload-catalog + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CatalogRecord' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] +components: + schemas: + Catalog: + title: Catalog + required: + - family_catalog + type: object + properties: + family_catalog: + title: Family Catalog + type: array + items: + $ref: '#/components/schemas/CatalogItem' + main_family_catalog: + title: Main Family Catalog + type: array + items: + $ref: '#/components/schemas/MainFanilyCatalogItem' + CatalogItem: + title: CatalogItem + required: + - unit + - rule + type: object + properties: + unit: + title: Unit + type: string + family: + title: Family + type: string + default: '' + rule: + title: Rule + type: string + CatalogRecord: + title: CatalogRecord + required: + - acl + - legal + - data + type: object + properties: + acl: + $ref: '#/components/schemas/StorageAcl' + legal: + $ref: '#/components/schemas/Legal' + data: + $ref: '#/components/schemas/Catalog' + example: + acl: + viewers: + - 'abc@example.com, cde@example.com' + owners: + - 'abc@example.com, cde@example.com' + legal: + legaltags: + - opendes-public-usa-dataset-1 + otherRelevantDataCountries: + - US + data: + family_catalog: + - unit: ohm.m + family: Medium Resistivity + rule: MEDR + main_family_catalog: + - MainFamily: Resistivity + Family: Medium Resistivity + Unit: OHMM + CreateUpdateRecordsResponse: + title: CreateUpdateRecordsResponse + type: object + properties: + recordCount: + title: Recordcount + type: integer + recordIds: + title: Recordids + type: array + items: + type: string + skippedRecordIds: + title: Skippedrecordids + type: array + items: + type: string + GuessRequest: + title: GuessRequest + required: + - label + type: object + properties: + label: + title: Label + type: string + log_unit: + title: Log Unit + type: string + description: + title: Description + type: string + example: + label: GRD + log_unit: GAPI + description: LDTD Gamma Ray + GuessResponse: + title: GuessResponse + type: object + properties: + family: + title: Family + type: string + family_type: + title: Family Type + type: string + log_unit: + title: Log Unit + type: string + base_unit: + title: Base Unit + type: string + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + errors: + title: Errors + type: array + items: + $ref: '#/components/schemas/ValidationError' + Legal: + title: Legal + type: object + properties: + legaltags: + title: Legaltags + type: array + items: + type: string + otherRelevantDataCountries: + title: Otherrelevantdatacountries + type: array + items: + type: string + MainFanilyCatalogItem: + title: MainFanilyCatalogItem + required: + - MainFamily + - Family + - Unit + type: object + properties: + MainFamily: + title: Mainfamily + type: string + Family: + title: Family + type: string + Unit: + title: Unit + type: string + StorageAcl: + title: StorageAcl + required: + - viewers + - owners + type: object + properties: + viewers: + title: Viewers + type: array + items: + type: string + owners: + title: Owners + type: array + items: + type: string + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + type: string + msg: + title: Message + type: string + type: + title: Error Type + type: string + securitySchemes: + bearer: + type: apiKey + name: Authorization + in: header + appkey: + type: apiKey + in: header + name: appkey \ No newline at end of file diff --git a/spec/edited/openapi.yaml b/spec/edited/openapi.yaml new file mode 100644 index 0000000000000000000000000000000000000000..0b430061f4acf7b1d43731104c0db1f8ae38f4de --- /dev/null +++ b/spec/edited/openapi.yaml @@ -0,0 +1,6882 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +openapi: 3.0.2 +info: + title: Wellbore DDMS OSDU + description: build local + version: '0.2' +paths: + /ddms/v2/about: + get: + summary: Get About + operationId: get_about_ddms_v2_about_get + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AboutResponse' + /ddms/v2/version: + get: + summary: Get Version + operationId: get_version_ddms_v2_version_get + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VersionDetailsResponse' + security: + - OpenDESBearerToken: [] + /ddms/v2/status: + get: + tags: + - Wellbore DDMS + summary: Get the status of the service + operationId: about_ddms_v2_status_get + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/V1AboutResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wells/{wellid}': + get: + tags: + - Well + summary: 'Get the Well using wks:well:1.0.2 schema' + description: |- + Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500** + operationId: get_well + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/well' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Well + summary: Delete the well. The API performs a logical deletion of the given record + operationId: del_well + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wells/{wellid}/versions': + get: + tags: + - Well + summary: Get all versions of the Well + operationId: get_well_versions + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wells/{wellid}/versions/{version}': + get: + tags: + - Well + summary: 'Get the given version of the Well using wks:well:1.0.2 schema' + description: |- + "Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500** + operationId: get_well_version + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/well' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/wells: + post: + tags: + - Well + summary: 'Create or update the Wells using wks:well:1.0.2 schema' + operationId: post_well + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Wells + type: array + items: + $ref: '#/components/schemas/well' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wellbores/{wellboreid}': + get: + tags: + - Wellbore + summary: 'Get the Wellbore using wks:wellbore:1.0.6 schema' + description: |- + Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500** + operationId: get_wellbore + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/wellbore' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Wellbore + summary: Delete the wellbore. The API performs a logical deletion of the given record + operationId: del_wellbore + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wellbores/{wellboreid}/versions': + get: + tags: + - Wellbore + summary: Get all versions of the Wellbore + operationId: get_wellbore_versions + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/wellbores/{wellboreid}/versions/{version}': + get: + tags: + - Wellbore + summary: 'Get the given version of the Wellbore using wks:wellbore:1.0.6 schema' + description: |- + "Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500** + operationId: get_wellbore_version + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/wellbore' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/wellbores: + post: + tags: + - Wellbore + summary: 'Create or update the Wellbores using wks:wellbore:1.0.6 schema' + operationId: post_wellbore + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Wellbores + type: array + items: + $ref: '#/components/schemas/wellbore' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logsets/{logsetid}': + get: + tags: + - Logset + summary: 'Get the LogSet using wks:logSet:1.0.5 schema' + description: Get the LogSet object using its **id** + operationId: get_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Logset + summary: Delete the LogSet. The API performs a logical deletion of the given record + operationId: del_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logsets/{logsetid}/versions': + get: + tags: + - Logset + summary: Get all versions of the logset + operationId: get_logset_versions + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logsets/{logsetid}/versions/{version}': + get: + tags: + - Logset + summary: 'Get the given version of LogSet using wks:logSet:1.0.5 schema' + description: '"Get the LogSet object using its **id**.' + operationId: get_logset_version + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logsets/{logsetid}/harmonize': + post: + tags: + - Logset + summary: 'Create or update the LogSets using wks:logSet:1.0.5 schema' + operationId: harmonize_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: logset not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/logsets: + post: + tags: + - Logset + summary: 'Create or update the LogSets using wks:logSet:1.0.5 schema' + operationId: put_logset + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Logsets + type: array + items: + $ref: '#/components/schemas/logset' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/trajectories/{trajectoryid}': + get: + tags: + - Trajectory + summary: 'Get the trajectory using wks:trajectory:1.0.5 schema' + description: Get the Trajectory object using its **id** + operationId: get_trajectory + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/trajectory' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Trajectory + summary: Delete the Trajectory. The API performs a logical deletion of the given record + operationId: del_trajectory + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/trajectories/{trajectoryid}/versions': + get: + tags: + - Trajectory + summary: Get all versions of the Trajectory + operationId: get_trajectory_versions + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/trajectories/{trajectoryid}/versions/{version}': + get: + tags: + - Trajectory + summary: 'Get the given version of Trajectory using wks:Trajectory:1.0.5 schema' + operationId: get_trajectory_version + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/trajectory' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/trajectories: + post: + tags: + - Trajectory + summary: 'Create or update the trajectories using wks:Trajectory:1.0.5 schema' + operationId: post_trajectory + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Trajectories + type: array + items: + $ref: '#/components/schemas/trajectory' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/trajectories/{trajectoryid}/data': + get: + tags: + - Trajectory + summary: Returns all data within the specified filters. Strongly consistent. + description: return full bulk data + operationId: get_traj_data + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: 'List of channels to get. If not provided, return all channels.' + required: false + schema: + title: Channels + type: array + items: + type: string + description: 'List of channels to get. If not provided, return all channels.' + name: channels + in: query + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: |- + Get trajectory data of the given channels. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows for channels MD, X, Y with different _orient_: + * split: <br/>`{"columns":["MD","X","Y"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"MD":0.0,"X":1001,"Y":2001},"1":{"MD":0.5,"X":1002,"Y":2002},"2":{"MD":1.0,"X":1003,"Y":2003},"3":{"MD":1.5,"X":1004,"Y":2004},"4":{"MD":2.0,"X":1005,"Y":2005}}`<br/> + * columns: <br/>`{"MD":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"Y":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"MD":0.0,"X":1001,"Y":2001},{"MD":0.5,"X":1002,"Y":2002},{"MD":1.0,"X":1003,"Y":2003},{"MD":1.5,"X":1004,"Y":2004},{"MD":2.0,"X":1005,"Y":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + content: + application/json: + schema: + $ref: '#/components/schemas/GetLogDataResponse' + example: '{"columns":["MD","X","Y"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}' + '400': + description: unknown channels + '404': + description: trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + post: + tags: + - Trajectory + summary: Writes the specified data to the trajectory (atomic). + description: Overwrite if exists + operationId: post_traj_data + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '404': + description: trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/markers/{markerid}': + get: + tags: + - Marker + summary: 'Get the marker using wks:marker:1.0.4 schema' + description: Get the Marker object using its **id** + operationId: get_marker + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/marker' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Marker + summary: Delete the marker. The API performs a logical deletion of the given record + operationId: del_marker + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/markers/{markerid}/versions': + get: + tags: + - Marker + summary: Get all versions of the marker + operationId: get_marker_versions + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/markers/{markerid}/versions/{version}': + get: + tags: + - Marker + summary: 'Get the given version of marker using wks:marker:1.0.4 schema' + operationId: get_marker_version + parameters: + - required: true + schema: + title: Markerid + type: string + name: markerid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/marker' + '404': + description: marker not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/markers: + post: + tags: + - Marker + summary: 'Create or update the markers using wks:marker:1.0.4 schema' + operationId: post_marker + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Markers + type: array + items: + $ref: '#/components/schemas/marker' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}': + get: + tags: + - Log + summary: 'Get the Log using wks:log:1.0.5 schema' + description: |2- + + Get the log object using its data ecosystem **id**. <p>If the log + kind is *wks:log:1.0.5* returns the record directly</p> <p>If the + wellbore kind is different *wks:log:1.0.5* it will get the raw + record and convert the results to match the *wks:log:1.0.5*. If + conversion is not possible returns an error **500**</p> + operationId: get_log + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/log' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Log + summary: Delete the log. The API performs a logical deletion of the given record + operationId: del_log + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/v2/logs: + post: + tags: + - Log + summary: 'Create or update the logs using wks:log:1.0.5 schema' + operationId: post_log + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Logs + type: array + items: + $ref: '#/components/schemas/log' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}/versions': + get: + tags: + - Log + summary: Get all versions of the log + operationId: get_log_versions + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}/versions/{version}': + get: + tags: + - Log + summary: 'Get the given version of log using wks:log:1.0.5 schema' + operationId: get_log_version + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/log' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}/data': + get: + tags: + - Log + summary: Returns all data within the specified filters. Strongly consistent. + description: return full bulk data + operationId: get_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: |- + Get log bulk data in format in the given _orient_ value. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows and 3 columns with different _orient_: + * split: <br/>`{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"Ref":0.0,"col_100X":1001,"col_200X":2001},"1":{"Ref":0.5,"col_100X":1002,"col_200X":2002},"2":{"Ref":1.0,"col_100X":1003,"col_200X":2003},"3":{"Ref":1.5,"col_100X":1004,"col_200X":2004},"4":{"Ref":2.0,"col_100X":1005,"col_200X":2005}}`<br/> + * columns: <br/>`{"Ref":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"col_100X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"col_200X":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"Ref":0.0,"col_100X":1001,"col_200X":2001},{"Ref":0.5,"col_100X":1002,"col_200X":2002},{"Ref":1.0,"col_100X":1003,"col_200X":2003},{"Ref":1.5,"col_100X":1004,"col_200X":2004},{"Ref":2.0,"col_100X":1005,"col_200X":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + content: + application/json: + schema: + $ref: '#/components/schemas/GetLogDataResponse' + example: '{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + post: + tags: + - Log + summary: Writes the specified data to the log (atomic). + description: Overwrite if exists + operationId: write_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + requestBody: + description: |- + Write log bulk data. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows and 3 columns with different _orient_: + * split: <br/>`{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"Ref":0.0,"col_100X":1001,"col_200X":2001},"1":{"Ref":0.5,"col_100X":1002,"col_200X":2002},"2":{"Ref":1.0,"col_100X":1003,"col_200X":2003},"3":{"Ref":1.5,"col_100X":1004,"col_200X":2004},"4":{"Ref":2.0,"col_100X":1005,"col_200X":2005}}`<br/> + * columns: <br/>`{"Ref":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"col_100X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"col_200X":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"Ref":0.0,"col_100X":1001,"col_200X":2001},{"Ref":0.5,"col_100X":1002,"col_200X":2002},{"Ref":1.0,"col_100X":1003,"col_200X":2003},{"Ref":1.5,"col_100X":1004,"col_200X":2004},{"Ref":2.0,"col_100X":1005,"col_200X":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + required: true + content: + application/json: + schema: + example: + columns: + - Ref + - col_100X + - col_200X + index: + - 0 + - 1 + - 2 + - 3 + - 4 + data: + - - 0 + - 1001 + - 2001 + - - 0.5 + - 1002 + - 2002 + - - 1 + - 1003 + - 2003 + - - 1.5 + - 1004 + - 2004 + - - 2 + - 1005 + - 2005 + oneOf: + - title: SplitFormat + type: object + properties: + data: + title: Data + anyOf: + - type: array + items: + anyOf: + - type: string + - type: integer + - type: number + - type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + columns: + title: Columns + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + index: + title: Index + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + required: + - data + - title: IndexFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ColumnFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: RecordsFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ValuesFormat + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + '/ddms/v2/logs/{logid}/upload_data': + post: + tags: + - Log + summary: Writes the data to the log. Support json file (then orient must be provided) and parquet + description: Overwrite if exists + operationId: upload_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + multipart/form-data: + schema: + $ref: '#/components/schemas/Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: invalid request + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}/statistics': + get: + tags: + - Log + summary: Data statistics + description: 'This API will return count, mean, std, min, max and percentiles of each column' + operationId: get_log_data_statistics_ddms_v2_logs__logid__statistics_get + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/GetStatisticResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/logs/{logid}/decimated': + get: + tags: + - Log + summary: Returns a decimated version of all data within the specified filters. Eventually consistent. + description: |- + TODO + Note: row order is not preserved. + operationId: get_log_decimated + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: Number of division desired + required: false + schema: + title: Quantiles + type: integer + description: Number of division desired + name: quantiles + in: query + - description: The start value for the log decimation + required: false + schema: + title: Start + type: number + description: The start value for the log decimation + name: start + in: query + - description: The stop value for the log decimation + required: false + schema: + title: Stop + type: number + description: The stop value for the log decimation + name: stop + in: query + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '404': + description: log not found + '422': + description: log is not compatible with decimation + security: + - OpenDESBearerToken: [] + /ddms/v2/dipsets: + post: + tags: + - Dipset + summary: 'Create or update the DipSets using wks:dipSet:1.0.0 schema' + operationId: post_dipset + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dipsets + type: array + items: + $ref: '#/components/schemas/dipset' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/versions/{version}': + get: + tags: + - Dipset + summary: 'Get the given version of DipSet using wks:dipset:1.0.0 schema' + description: '"Get the DipSet object using its **id**.' + operationId: get_dipset_version + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/dipset' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/versions': + get: + tags: + - Dipset + summary: Get all versions of the dipset + operationId: get_dipset_versions + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}': + get: + tags: + - Dipset + summary: 'Get the DipSet using wks:dipSet:1.0.0 schema' + description: Get the DipSet object using its **id** + operationId: get_dipset + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/dipset' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Dipset + summary: Delete the DipSet. The API performs a logical deletion of the given record + operationId: del_dipset + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/dips': + get: + tags: + - Dips + summary: Get dips + description: |- + Return dips from dipset from the given index until the given number of dips specifed in query parameters. + If not specified returns all dips from dipset. + operationId: get_dips + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: false + schema: + title: Index + minimum: 0 + type: integer + name: index + in: query + - required: false + schema: + title: Limit + minimum: 0 + type: integer + name: limit + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Get Dips Ddms V2 Dipsets Dipsetid Dips Get + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + post: + tags: + - Dips + summary: Define the dips of the dipset + description: Replace previous dips by provided dips. Sort dips by reference and azimuth. + operationId: post_dips + parameters: + - description: The ID of the dipset + required: true + schema: + title: Dipsetid + type: string + description: The ID of the dipset + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dips + type: array + items: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Post Dips Ddms V2 Dipsets Dipsetid Dips Post + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/dips/insert': + post: + tags: + - Dips + summary: insert dip in a dipset + description: |- + Insert dips in dipset. + Existing dips are not replaced. + Several dip can have same reference. + Operation will sort by reference all dips in dipset (may modify dip indexes). + operationId: insert_dips + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Dips + type: array + items: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Insert Dips Ddms V2 Dipsets Dipsetid Dips Insert Post + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/dips/query': + get: + tags: + - Dips + summary: Query dip from dipset + description: Search dip within reference interval and specific classification + operationId: query_dip + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - description: Min reference for the dips to search in the dipset + required: false + schema: + title: Minreference + type: number + description: Min reference for the dips to search in the dipset + name: minReference + in: query + - required: false + schema: + title: Max reference for the dips to search in the dipset + type: number + name: maxReference + in: query + - required: false + schema: + title: Classification for the dip to search in the dipset + type: string + name: classification + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Query Dip Ddms V2 Dipsets Dipsetid Dips Query Get + type: array + items: + $ref: '#/components/schemas/Dip' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/v2/dipsets/{dipsetid}/dips/{index}': + get: + tags: + - Dips + summary: Get a dip at index + description: '"Return dip from dipset at the given index' + operationId: get_dip_by_index + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet or index not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + delete: + tags: + - Dips + summary: Delete a dip + description: Removes the dip at index + operationId: delete_dip_by_index + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Delete Dip By Index Ddms V2 Dipsets Dipsetid Dips Index Delete + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet or index not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + patch: + tags: + - Dips + summary: Update dip + description: |- + "Update dip at index + Operation will sort by reference all dips in dipset (may modify dip indexes). + operationId: patch_dip + parameters: + - required: true + schema: + title: Dipsetid + type: string + name: dipsetid + in: path + - required: true + schema: + title: Index + type: integer + name: index + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/Dip' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + title: Response Patch Dip Ddms V2 Dipsets Dipsetid Dips Index Patch + type: array + items: + $ref: '#/components/schemas/Dip' + '404': + description: DipSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query: + post: + tags: + - search + summary: Query + operationId: query_ddms_query_post + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query_with_cursor: + post: + tags: + - search + summary: Query with cursor + operationId: query_ddms_query_with_cursor_post + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query/wellbores: + post: + tags: + - search + summary: Query with cursor + description: |- + Get all Wellbores object. <p>The wellbore kind is + *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query/wellbores/bydistance: + post: + tags: + - search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_bydistance_post + parameters: + - required: true + schema: + title: Latitude + type: number + name: latitude + in: query + - required: true + schema: + title: Longitude + type: number + name: longitude + in: query + - required: true + schema: + title: Distance + type: integer + name: distance + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query/wellbores/byboundingbox: + post: + tags: + - search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_byboundingbox_post + parameters: + - required: true + schema: + title: Latitude Top Left + type: number + name: latitude_top_left + in: query + - required: true + schema: + title: Longitude Top Left + type: number + name: longitude_top_left + in: query + - required: true + schema: + title: Latitude Bottom Right + type: number + name: latitude_bottom_right + in: query + - required: true + schema: + title: Longitude Bottom Right + type: number + name: longitude_bottom_right + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query/wellbores/bygeopolygon: + post: + tags: + - search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_bygeopolygon_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/wellbore/{wellbore_id}/logsets': + post: + tags: + - search + summary: 'Query with cursor, search logSets by wellbore ID' + description: |- + Get all LogSets object using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__logsets_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/wellbores/{wellbore_attribute}/logsets': + post: + tags: + - search + summary: 'Query with cursor, search logSets by wellbore attribute' + description: |- + Get all LogSets object using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores__wellbore_attribute__logsets_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/query/logs: + post: + tags: + - search + summary: 'Query with cursor, gets logs' + description: |- + Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logs_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/wellbore/{wellbore_id}/logs': + post: + tags: + - search + summary: 'Query with cursor, search logs by wellbore ID' + description: |- + Get all Logs object using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__logs_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/wellbores/{wellbore_attribute}/logs': + post: + tags: + - search + summary: 'Query with cursor, search logs by wellbore attribute' + description: |- + Get all Logs object using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores__wellbore_attribute__logs_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/logset/{logset_id}/logs': + post: + tags: + - search + summary: 'Query with cursor, search logs by logSet ID' + description: |- + Get all Logs object using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logset__logset_id__logs_post + parameters: + - required: true + schema: + title: Logset Id + type: string + name: logset_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/logsets/{logset_attribute}/logs': + post: + tags: + - search + summary: 'Query with cursor, search logs by logSet attribute' + description: |- + Get all Logs object using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logsets__logset_attribute__logs_post + parameters: + - required: true + schema: + title: Logset Attribute + type: string + name: logset_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/query/wellbore/{wellbore_id}/markers': + post: + tags: + - search + summary: 'Query with cursor, search markers by wellbore ID' + description: |- + Get all Markers object using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__markers_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/fastquery/wellbores: + post: + tags: + - fast-search + summary: Query with cursor + description: |- + Get all Wellbores IDs object. <p>The wellbore kind is + *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/fastquery/wellbores/bydistance: + post: + tags: + - fast-search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs IDs objects in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_bydistance_post + parameters: + - required: true + schema: + title: Latitude + type: number + name: latitude + in: query + - required: true + schema: + title: Longitude + type: number + name: longitude + in: query + - required: true + schema: + title: Distance + type: integer + name: distance + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/fastquery/wellbores/byboundingbox: + post: + tags: + - fast-search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_byboundingbox_post + parameters: + - required: true + schema: + title: Latitude Top Left + type: number + name: latitude_top_left + in: query + - required: true + schema: + title: Longitude Top Left + type: number + name: longitude_top_left + in: query + - required: true + schema: + title: Latitude Bottom Right + type: number + name: latitude_bottom_right + in: query + - required: true + schema: + title: Longitude Bottom Right + type: number + name: longitude_bottom_right + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/fastquery/wellbores/bygeopolygon: + post: + tags: + - fast-search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_bygeopolygon_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/wellbore/{wellbore_id}/logsets': + post: + tags: + - fast-search + summary: 'Query with cursor, search logSets IDs by wellbore ID' + description: |- + Get all LogSets IDs objects using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__logsets_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/wellbores/{wellbore_attribute}/logsets': + post: + tags: + - fast-search + summary: 'Query with cursor, search logSets IDs by wellbore attribute' + description: |- + Get all LogSets IDs objects using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores__wellbore_attribute__logsets_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /ddms/fastquery/logs: + post: + tags: + - fast-search + summary: 'Query with cursor, gets logs' + description: |- + Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logs_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/wellbore/{wellbore_id}/logs': + post: + tags: + - fast-search + summary: 'Query with cursor, search logs IDs by wellbore ID' + description: |- + Get all Logs IDs objects using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__logs_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/wellbores/{wellbore_attribute}/logs': + post: + tags: + - fast-search + summary: 'Query with cursor, search logs IDs by wellbore attribute' + description: |- + Get all Logs IDs objects using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores__wellbore_attribute__logs_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/logset/{logset_id}/logs': + post: + tags: + - fast-search + summary: 'Query with cursor, search logs IDs by logSet ID' + description: |- + Get all Logs IDs objects using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logset__logset_id__logs_post + parameters: + - required: true + schema: + title: Logset Id + type: string + name: logset_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/logsets/{logset_attribute}/logs': + post: + tags: + - fast-search + summary: 'Query with cursor, search logs IDs by logSet attribute' + description: |- + Get all Logs IDs objects using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logsets__logset_attribute__logs_post + parameters: + - required: true + schema: + title: Logset Attribute + type: string + name: logset_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + '/ddms/fastquery/wellbore/{wellbore_id}/markers': + post: + tags: + - fast-search + summary: 'Query with cursor, search markers IDs by wellbore ID' + description: |- + Get all Markers IDs objects using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__markers_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /log-recognition/family: + post: + tags: + - log-recognition + summary: Recognize family and unit + description: Find the most probable family and unit using family assignment rule based catalogs. User defined catalog will have the priority. + operationId: family + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/GuessRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/GuessResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] + /log-recognition/upload-catalog: + put: + tags: + - log-recognition + summary: Upload user-defined catalog with family assignment rules + description: |- + Upload user-defined catalog with family assignment rules for specific partition ID. + If there is an existing catalog, it will be replaced. It takes maximum of 5 mins to replace the existing catalog. + Hence, any call to retrieve the family should be made after 5 mins of uploading the catalog + operationId: upload-catalog + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/CatalogRecord' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - OpenDESBearerToken: [] +components: + schemas: + AboutResponse: + title: AboutResponse + type: object + properties: + service: + title: Service + type: string + version: + title: Version + type: string + buildNumber: + title: Buildnumber + type: string + cloudEnvironment: + title: Cloudenvironment + type: string + AboutResponseUser: + title: AboutResponseUser + type: object + properties: + tenant: + title: Tenant + type: string + email: + title: Email + type: string + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post: + title: Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post + required: + - file + type: object + properties: + file: + title: File + type: string + format: binary + ByBoundingBox: + title: ByBoundingBox + required: + - topLeft + - bottomRight + type: object + properties: + topLeft: + $ref: '#/components/schemas/Point' + bottomRight: + $ref: '#/components/schemas/Point' + ByDistance: + title: ByDistance + required: + - point + type: object + properties: + distance: + title: Distance + type: number + point: + $ref: '#/components/schemas/Point' + ByGeoPolygon: + title: ByGeoPolygon + type: object + properties: + points: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + Catalog: + title: Catalog + required: + - family_catalog + type: object + properties: + family_catalog: + title: Family Catalog + type: array + items: + $ref: '#/components/schemas/CatalogItem' + main_family_catalog: + title: Main Family Catalog + type: array + items: + $ref: '#/components/schemas/MainFanilyCatalogItem' + CatalogItem: + title: CatalogItem + required: + - unit + - rule + type: object + properties: + unit: + title: Unit + type: string + family: + title: Family + type: string + default: '' + rule: + title: Rule + type: string + CatalogRecord: + title: CatalogRecord + required: + - acl + - legal + - data + type: object + properties: + acl: + $ref: '#/components/schemas/StorageAcl' + legal: + $ref: '#/components/schemas/odes_storage__models__Legal' + data: + $ref: '#/components/schemas/Catalog' + example: + acl: + viewers: + - 'abc@example.com, cde@example.com' + owners: + - 'abc@example.com, cde@example.com' + legal: + legaltags: + - opendes-public-usa-dataset-1 + otherRelevantDataCountries: + - US + data: + family_catalog: + - unit: ohm.m + family: Medium Resistivity + rule: MEDR + main_family_catalog: + - MainFamily: Resistivity + Family: Medium Resistivity + Unit: OHMM + CreateUpdateRecordsResponse: + title: CreateUpdateRecordsResponse + type: object + properties: + recordCount: + title: Recordcount + type: integer + recordIds: + title: Recordids + type: array + items: + type: string + skippedRecordIds: + title: Skippedrecordids + type: array + items: + type: string + DataType: + title: DataType + enum: + - string + - number + - integer + - boolean + description: An enumeration. + DataType_1: + title: DataType_1 + enum: + - string + - number + - integer + - boolean + description: An enumeration. + DataType_2: + title: DataType_2 + enum: + - string + - number + - integer + - boolean + - date-time + description: An enumeration. + Dip: + title: Dip + required: + - reference + - azimuth + - inclination + type: object + properties: + reference: + title: Reference of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only Measured Depth in meter is supported for the moment + azimuth: + title: Azimuth value of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only degrees unit is supported for the moment + inclination: + title: Inclination value of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only degrees unit is supported for the moment + quality: + title: Quality of the dip + exclusiveMaximum: true + maximum: 1 + exclusiveMinimum: false + minimum: 0 + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Decimal number between 0 and 1 + xCoordinate: + title: The X coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + yCoordinate: + title: The Y coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + zCoordinate: + title: The Z coordinate of the dip + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Only meter unit is supported for the moment + classification: + title: Classification of the dip + type: string + description: Any string is accepted. + example: + reference: + unitKey: meter + value: 1000.5 + azimuth: + unitKey: dega + value: 42 + inclination: + unitKey: dega + value: 9 + quality: + unitKey: unitless + value: 0.5 + xCoordinate: + unitKey: meter + value: 2 + yCoordinate: + unitKey: meter + value: 45 + zCoordinate: + unitKey: meter + value: 7 + classification: fracture + DirectionWell: + title: DirectionWell + enum: + - huff-n-puff + - injector + - producer + - uncertain + - unknown + description: An enumeration. + FluidWell: + title: FluidWell + enum: + - air + - condensate + - dry + - gas + - gas-water + - non HC gas + - non HC gas -- CO2 + - oil + - oil-gas + - oil-water + - steam + - water + - water -- brine + - water -- fresh water + - unknown + description: An enumeration. + Format: + title: Format + enum: + - date + - date-time + - time + - byte + - binary + - boolean + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + Format_1: + title: Format_1 + enum: + - date + - date-time + - time + - byte + - binary + - boolean + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + Format_2: + title: Format_2 + enum: + - date + - date-time + - time + - byte + - binary + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + GeoJsonFeature: + title: GeoJsonFeature + required: + - geometry + - properties + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometry: + title: Geometry + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + - $ref: '#/components/schemas/geometryItem' + properties: + title: Properties + type: object + type: + $ref: '#/components/schemas/Type_1' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonFeatureCollection: + title: GeoJsonFeatureCollection + required: + - features + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + features: + title: Features + type: array + items: + $ref: '#/components/schemas/GeoJsonFeature' + type: + $ref: '#/components/schemas/Type_2' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonLineString: + title: GeoJsonLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_3' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiLineString: + title: GeoJsonMultiLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_4' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPoint: + title: GeoJsonMultiPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_5' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPolygon: + title: GeoJsonMultiPolygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + description: 'Bounding box in longitude, latitude WGS 84.' + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: array + items: + type: number + description: 'Array of polygons (minimum 2D), containing an array of point coordinates (longitude, latitude, (optionally elevation and other properties).' + type: + $ref: '#/components/schemas/Type_6' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonPoint: + title: GeoJsonPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_7' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GetStatisticResponse: + title: GetStatisticResponse + required: + - columns + type: object + properties: + columns: + title: Columns + type: array + items: + $ref: '#/components/schemas/StatsColumn' + GuessRequest: + title: GuessRequest + required: + - label + type: object + properties: + label: + title: Label + type: string + log_unit: + title: Log Unit + type: string + description: + title: Description + type: string + example: + label: GRD + log_unit: GAPI + description: LDTD Gamma Ray + GuessResponse: + title: GuessResponse + type: object + properties: + family: + title: Family + type: string + family_type: + title: Family Type + type: string + log_unit: + title: Log Unit + type: string + base_unit: + title: Base Unit + type: string + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + errors: + title: Errors + type: array + items: + $ref: '#/components/schemas/ValidationError' + Kind: + title: Kind + enum: + - CRS + - Unit + - Measurement + - AzimuthReference + - DateTime + type: string + description: An enumeration. + LinkList: + title: LinkList + type: object + properties: {} + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + MainFanilyCatalogItem: + title: MainFanilyCatalogItem + required: + - MainFamily + - Family + - Unit + type: object + properties: + MainFamily: + title: Mainfamily + type: string + Family: + title: Family + type: string + Unit: + title: Unit + type: string + MetaItem: + title: MetaItem + required: + - kind + - persistableReference + type: object + properties: + kind: + title: Reference Kind + allOf: + - $ref: '#/components/schemas/Kind' + description: 'The kind of reference, unit, measurement, CRS or azimuth reference.' + name: + title: Name or Symbol + type: string + description: The name of the CRS or the symbol/name of the unit + persistableReference: + title: Persistable Reference + type: string + description: The persistable reference string uniquely identifying the CRS or Unit + propertyNames: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property names, to which this meta data item provides Unit/CRS context to. Data structures, which come in a single frame of reference, can register the property name, others require a full path like "data.structureA.propertyB" to define a unique context.' + propertyValues: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property values, to which this meta data item provides Unit/CRS context to. Typically a unit symbol is a value to a data structure; this symbol is then registered in this propertyValues array and the persistableReference provides the absolute reference.' + uncertainty: + title: Uncertainty + type: number + description: The uncertainty of the values measured given the unit or CRS unit. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + PlssLocation: + title: PlssLocation + required: + - range + - section + - township + type: object + properties: + aliquotPart: + title: Aliquot Part + type: string + description: 'A terse, hierarchical reference to a piece of land, in which successive subdivisions of some larger area.' + range: + title: Range + type: string + description: 'Range, also known as Rng, R; a measure of the distance east or west from a referenced principal meridian, in units of six miles.' + section: + title: Section Number + type: integer + description: Section number (between 1 and 36) + township: + title: Township + type: string + description: 'Township, also known as T or Twp; (1) Synonym for survey township, i.e., a square parcel of land of 36 square miles, or (2) A measure of the distance north or south from a referenced baseline, in units of six miles' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Point: + title: Point + type: object + properties: + latitude: + title: Latitude + type: number + longitude: + title: Longitude + type: number + Point3dNonGeoJson: + title: Point3dNonGeoJson + required: + - coordinates + - crsKey + - unitKey + type: object + properties: + coordinates: + title: 3D Point + type: array + items: + type: number + description: '3-dimensional point; the first coordinate is typically pointing east (easting or longitude), the second coordinate typically points north (northing or latitude). The third coordinate is an elevation (upwards positive, downwards negative). The point''s CRS is given by the container.' + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + unitKey: + title: Unit Key + type: string + description: 'The ''unitKey'' for the 3rd coordinate, which can be looked up in the ''frameOfReference.unit'' for further details.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Polygon: + title: Polygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_8' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + QueryRequest: + title: QueryRequest + required: + - kind + type: object + properties: + kind: + title: Kind + type: string + limit: + title: Limit + type: integer + query: + title: Query + type: string + returnedFields: + title: Returnedfields + type: array + items: + type: string + sort: + $ref: '#/components/schemas/SortQuery' + queryAsOwner: + title: Queryasowner + type: boolean + spatialFilter: + $ref: '#/components/schemas/SpatialFilter' + offset: + title: Offset + type: integer + RecordVersions: + title: RecordVersions + type: object + properties: + recordId: + title: Recordid + type: string + versions: + title: Versions + type: array + items: + type: integer + ReferenceType: + title: ReferenceType + enum: + - Date + - Date Time + - Measured Depth + - Core depth + - True Vertical Depth + - True Vertical Depth Sub Sea + - One-Way Time + - Two-Way Time + description: An enumeration. + Shape: + title: Shape + enum: + - build and hold + - deviated + - double kickoff + - horizontal + - S-shaped + - vertical + - unknown + description: An enumeration. + SimpleElevationReference: + title: SimpleElevationReference + required: + - elevationFromMsl + type: object + properties: + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The elevation above mean sea level (MSL), at which the vertical origin is 0.0. The ''unitKey'' is further defined in ''frameOfReference.units''.' + name: + title: Elevation Reference Name + type: string + description: The name of the Elevation Reference. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + SortQuery: + title: SortQuery + type: object + properties: + field: + title: Field + type: array + items: + type: string + order: + title: Order + type: string + enum: ['ASC', 'DESC'] + SpatialFilter: + title: SpatialFilter + type: object + properties: + field: + title: Field + type: string + byBoundingBox: + $ref: '#/components/schemas/ByBoundingBox' + byDistance: + $ref: '#/components/schemas/ByDistance' + byGeoPolygon: + $ref: '#/components/schemas/ByGeoPolygon' + StatsColumn: + title: StatsColumn + required: + - count + - mean + - std + - min + - 25% + - 50% + - 75% + - max + type: object + properties: + count: + title: Count + type: integer + description: Count number of non-NA/null observations + mean: + title: Mean + type: number + description: Mean of the values + std: + title: Std + type: number + description: Standard deviation of the observations + min: + title: Min + type: number + description: Minimum of the values in the object + 25%: + title: 25% + type: number + 50%: + title: 50% + type: number + 75%: + title: 75% + type: number + max: + title: Max + type: number + description: Maximum of the values in the object + StorageAcl: + title: StorageAcl + required: + - viewers + - owners + type: object + properties: + viewers: + title: Viewers + type: array + items: + type: string + owners: + title: Owners + type: array + items: + type: string + TagDictionary: + title: TagDictionary + type: object + properties: {} + description: Used for data model allows extra fields which are not declared initially in the pydantic model + ToOneRelationship: + title: ToOneRelationship + type: object + properties: + confidence: + title: Relationship Confidence + type: number + description: The confidence of the relationship. If the property is absent a well-known relation is implied. + id: + title: Related Object Id + type: string + description: 'The id of the related object in the Data Ecosystem. If set, the id has priority over the natural key in the name property.' + name: + title: Related Object Name + type: string + description: The name or natural key of the related object. This property is required if the target object id could not (yet) be identified. + version: + title: Entity Version Number + type: number + description: 'The version number of the related entity. If no version number is specified, the last version is implied.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Type: + title: Type + enum: + - GeometryCollection + description: An enumeration. + Type_1: + title: Type_1 + enum: + - Feature + description: An enumeration. + Type_2: + title: Type_2 + enum: + - FeatureCollection + description: An enumeration. + Type_3: + title: Type_3 + enum: + - LineString + description: An enumeration. + Type_4: + title: Type_4 + enum: + - MultiLineString + description: An enumeration. + Type_5: + title: Type_5 + enum: + - MultiPoint + description: An enumeration. + Type_6: + title: Type_6 + enum: + - MultiPolygon + description: An enumeration. + Type_7: + title: Type_7 + enum: + - Point + description: An enumeration. + Type_8: + title: Type_8 + enum: + - Polygon + description: An enumeration. + V1AboutResponse: + title: V1AboutResponse + type: object + properties: + user: + $ref: '#/components/schemas/AboutResponseUser' + dmsInfo: + $ref: '#/components/schemas/V1DmsInfo' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + V1DmsInfo: + title: V1DmsInfo + type: object + properties: + kinds: + title: Kinds + type: array + items: + type: string + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + type: string + msg: + title: Message + type: string + type: + title: Error Type + type: string + ValueWithUnit: + title: ValueWithUnit + required: + - unitKey + - value + type: object + properties: + unitKey: + title: Unit Key + type: string + description: Unit for value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details. + value: + title: Value + type: number + description: Value of the corresponding attribute for the domain object in question. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + VersionDetailsResponse: + title: VersionDetailsResponse + type: object + properties: + service: + title: Service + type: string + version: + title: Version + type: string + buildNumber: + title: Buildnumber + type: string + details: + title: Details + type: object + additionalProperties: + type: string + WellLocationType: + title: WellLocationType + enum: + - Onshore + - Offshore + - unknown + description: An enumeration. + WellPurpose: + title: WellPurpose + enum: + - appraisal + - appraisal -- confirmation appraisal + - appraisal -- exploratory appraisal + - exploration + - exploration -- deeper-pool wildcat + - exploration -- new-field wildcat + - exploration -- new-pool wildcat + - exploration -- outpost wildcat + - exploration -- shallower-pool wildcat + - development + - development -- infill development + - development -- injector + - development -- producer + - fluid storage + - fluid storage -- gas storage + - general srvc + - general srvc -- borehole re-acquisition + - general srvc -- observation + - general srvc -- relief + - general srvc -- research + - general srvc -- research -- drill test + - general srvc -- research -- strat test + - general srvc -- waste disposal + - mineral + - unknown + description: An enumeration. + WellStatus: + title: WellStatus + enum: + - abandoned + - active + - active -- injecting + - active -- producing + - completed + - drilling + - partially plugged + - permitted + - plugged and abandoned + - proposed + - sold + - suspended + - temporarily abandoned + - testing + - tight + - working over + - unknown + description: An enumeration. + WellType: + title: WellType + enum: + - bypass + - initial + - redrill + - reentry + - respud + - sidetrack + - unknown + description: An enumeration. + WellborePurpose: + title: WellborePurpose + enum: + - appraisal + - appraisal -- confirmation appraisal + - appraisal -- exploratory appraisal + - exploration + - exploration -- deeper-pool wildcat + - exploration -- new-field wildcat + - exploration -- new-pool wildcat + - exploration -- outpost wildcat + - exploration -- shallower-pool wildcat + - development + - development -- infill development + - development -- injector + - development -- producer + - fluid storage + - fluid storage -- gas storage + - general srvc + - general srvc -- borehole re-acquisition + - general srvc -- observation + - general srvc -- relief + - general srvc -- research + - general srvc -- research -- drill test + - general srvc -- research -- strat test + - general srvc -- waste disposal + - mineral + - unknown + description: An enumeration. + WellboreStatus: + title: WellboreStatus + enum: + - abandoned + - active + - active -- injecting + - active -- producing + - completed + - drilling + - partially plugged + - permitted + - plugged and abandoned + - proposed + - sold + - suspended + - temporarily abandoned + - testing + - tight + - working over + - unknown + description: An enumeration. + WellboreType: + title: WellboreType + enum: + - bypass + - initial + - redrill + - reentry + - respud + - sidetrack + - unknown + description: An enumeration. + app__model__model_curated__Legal: + title: Legal + type: object + properties: + legaltags: + title: Legal Tags + type: array + items: + type: string + description: 'The list of legal tags, see compliance API.' + otherRelevantDataCountries: + title: Other Relevant Data Countries + type: array + items: + type: string + description: 'The list of other relevant data countries using the ISO 2-letter codes, see compliance API.' + status: + title: Legal Status + type: string + description: The legal status. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + basinContext: + title: basinContext + type: object + properties: + basinCode: + title: Basin Code + type: string + description: The code of the basin in which the well is located. + basinName: + title: Basin Name + type: string + description: The name of the basin in which the well is located. + subBasinCode: + title: Sub-Basin Code + type: string + description: The code of the sub-basin in which the well is located. + subBasinName: + title: Sub-Basin Name + type: string + description: The name of the sub-basin in which the well is located. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + channel: + title: channel + type: object + properties: + absentValue: + title: Absent Value + type: string + description: Optional field carrying the absent value as string for this channel. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Named Properties + type: array + items: + $ref: '#/components/schemas/namedProperty' + description: The named properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.' + unitKey: + title: Unit + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + core_dl_geopoint: + title: core_dl_geopoint + required: + - latitude + - longitude + type: object + properties: + latitude: + title: Latitude + maximum: 90 + minimum: -90 + type: number + description: 'The latitude value in degrees of arc (dega). Value range [-90, 90].' + longitude: + title: Longitude + maximum: 180 + minimum: -180 + type: number + description: 'The longitude value in degrees of arc (dega). Value range [-180, 180]' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + dipSetData: + title: dipSetData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: Azimuth reference code defining the type of North. Only used for dipSets with azimuth data + classification: + title: Log Set Classification + type: string + description: The well-known log set classification code. + default: Externally Processed LogSet + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + $ref: '#/components/schemas/SimpleElevationReference' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + name: + title: Dip Set Name + type: string + description: The name of this dip set + operation: + title: Operation + type: string + description: The operation which created this entity + reference: + $ref: '#/components/schemas/channel' + referenceType: + title: Reference Type + type: string + description: The reference index type of the dip set. + relationships: + $ref: '#/components/schemas/dipsetrelationships' + start: + $ref: '#/components/schemas/ValueWithUnit' + step: + $ref: '#/components/schemas/ValueWithUnit' + stop: + $ref: '#/components/schemas/ValueWithUnit' + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + dipset: + title: dipset + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Dip Set Data + allOf: + - $ref: '#/components/schemas/dipSetData' + description: dipset data + id: + title: Dip Set ID + type: string + description: The unique identifier of the dip set + kind: + title: Dip Set Kind + type: string + description: Kind specification + default: 'osdu:wks:dipSet:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The dip-set's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this dip set; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + dipsetrelationships: + title: dipsetrelationships + required: + - wellbore + type: object + properties: + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this dipSet belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this dipSet belongs. + wellboreSection: + title: Wellbore Section + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore section to which this dipSet belongs. + referenceLog: + title: True dip azimuth log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The true dip azimuth log of the dipset. + trueDipAzimuthLog: + title: True dip azimuth log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The true dip azimuth log of the dipset. + trueDipInclinationLog: + title: X-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The X-coordinate log of the dipset + xCoordinateLog: + title: X-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The X-coordinate log of the dipset + yCoordinateLog: + title: Y-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The Y-coordinate log of the dipset + zCoordinateLog: + title: Z-coordinate log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The Z-coordinate log of the dipset + qualityLog: + title: Quality log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The quality log of the dipset + classificationLog: + title: Classification log + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The classification log of the dipset + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + geographicPosition: + title: geographicPosition + required: + - crsKey + - elevationFromMsl + - latitude + - longitude + type: object + properties: + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + latitude: + title: Native Latitude + type: number + description: Native or original latitude (unit defined by CRS) + longitude: + title: Native Longitude + type: number + description: Native or original longitude (unit defined by CRS) + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + geometryItem: + title: geometryItem + required: + - geometries + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometries: + title: Geometries + type: array + items: + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + type: + $ref: '#/components/schemas/Type' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + historyRecord: + title: historyRecord + type: object + properties: + date: + title: Date and Time + type: string + description: The UTC date time of the log creation/processing + format: date-time + description: + title: ' Description' + type: string + description: 'The description of the context, which produced the log.' + user: + title: User + type: string + description: The user running the log processing. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + log: + title: log + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Log Data + allOf: + - $ref: '#/components/schemas/logData' + description: Log data associated with a wellbore + id: + title: Log Set ID + type: string + description: The unique identifier of the log + kind: + title: Log Kind + type: string + description: Kind specification + default: 'osdu:wks:log:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The log's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + status: + title: Entity Status + type: string + description: The status of this log + default: compliant + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this log; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logData: + title: logData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: 'Only supplied with azimuth logs: the azimuth reference code defining the type of North, default TN for true north.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where the index, e.g. MD == 0 and TVD == 0.' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + history: + title: History Records + type: array + items: + $ref: '#/components/schemas/historyRecord' + description: An array of historyRecords describing the context for the log's creation or processing. + log: + title: Log Channel + allOf: + - $ref: '#/components/schemas/logchannel' + description: The log containing the log meta data and log-store reference. + name: + title: Log Set Name + type: string + description: The name of this log set + operation: + title: Operation + type: string + description: The operation which created this Log + reference: + title: Reference Index + allOf: + - $ref: '#/components/schemas/logchannel' + description: 'The reference index - only populated for logs, which are member of a logSet and share the reference index.' + referenceType: + title: Index Type + allOf: + - $ref: '#/components/schemas/ReferenceType' + description: The reference index type of the log set. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/logRelationships' + description: The related entities. + start: + title: Start + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The start index value of the log set. + step: + title: Step + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The index increment value of the log set. Only populated if the log is regularly sampled. + stop: + title: Stop + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The stop index value of the log set. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + logRelationships: + title: logRelationships + type: object + properties: + logSet: + title: LogSet + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The logSet to which this log belongs. If the log is not part of a log set this relationship stays empty. + timeDepthRelation: + title: TimeDepthRelation LogSet + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The timeDepthRelation to which this log belongs. If the log is not part of a timeDepthRelation this relationship stays empty. + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this log belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this log belongs. This relationship is the most important; only the wellbore can provide the unique context for the measured depth index. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logSetData: + title: logSetData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: Azimuth reference code defining the type of North. Only used for logSets with azimuth data + channelMnemonics: + title: Channel Mnemonics + type: array + items: + type: string + description: A list of channel Mnemonics in this log set. + channelNames: + title: Channel Names + type: array + items: + type: string + description: A list of channel long names in this log set. + classification: + title: Log Set Classification + type: string + description: The well-known log set classification code. + default: Externally Processed LogSet + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + $ref: '#/components/schemas/SimpleElevationReference' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + name: + title: Log Set Name + type: string + description: The name of this log set + operation: + title: Operation + type: string + description: The operation which created this entity + reference: + $ref: '#/components/schemas/channel' + referenceType: + title: Reference Type + type: string + description: The reference index type of the log set. + relationships: + $ref: '#/components/schemas/logsetrelationships' + start: + $ref: '#/components/schemas/ValueWithUnit' + step: + $ref: '#/components/schemas/ValueWithUnit' + stop: + $ref: '#/components/schemas/ValueWithUnit' + description: Used for data model allows extra fields which are not declared initially in the pydantic model + logchannel: + title: logchannel + type: object + properties: + columnNames: + title: Column Names + type: array + items: + type: string + description: A list of names for multi-dimensional logs (dimension>1). The length of this array is expected to be equal to 'dimension'. For one-dimensional this property stays empty as the columnName is by definition the log name. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType_2' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format_2' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The unique id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Named Properties + type: array + items: + $ref: '#/components/schemas/namedProperty' + description: The named properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.' + unitKey: + title: Unit + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logset: + title: logset + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Log Set Data + allOf: + - $ref: '#/components/schemas/logSetData' + description: Log channel set associated with a wellbore + id: + title: Log Set ID + type: string + description: The unique identifier of the log set + kind: + title: Log Set Kind + type: string + description: Kind specification + default: 'osdu:wks:logSet:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The log-set's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this log set; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logsetrelationships: + title: logsetrelationships + required: + - wellbore + type: object + properties: + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this logSet belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this logSet belongs. + wellboreSection: + title: Wellbore Section + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellboreSection to which this logSet belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + marker: + title: marker + required: + - acl + - kind + - legal + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Marker Data + allOf: + - $ref: '#/components/schemas/markerData' + description: 'Geological marker using a single point-observation, typically along a wellbore.' + id: + title: Marker ID + type: string + description: The unique identifier of the marker + kind: + title: Marker Kind + type: string + description: Marker kind specification + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The marker's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this marker; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + markerData: + title: markerData + required: + - md + - name + type: object + properties: + age: + title: Age + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The absolute age at the feature boundary. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + boundaryRelation: + title: Interface Boundary Relation + type: string + description: The marker boundary relationship classification + classification: + title: Marker Classification + type: string + description: 'The classification of the marker. Could be client-defined via a catalog, e.g. common:wke:markerClassification:1.0.0 and common:wke:markerClassificationMember:1.0.0' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + depth: + title: Marker Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The original marker depth - measured from data.elevationReference in data.depthReferenceType. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + depthReferenceType: + title: Depth Reference Code + type: string + description: Depth reference code defining the type of depth for the marker. Default MD (measured depth). Depth is downwards increasing. + default: MD + elevationReference: + title: Elevation Reference Level + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The elevation from mean sea level (MSL), where depth, topDepth, baseDepth are zero. Values above MSL are positive.' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + interpreter: + title: Interpreter Name + type: string + description: The name of the interpreter who picked this marker. + locationWGS84: + title: GeoJSON Marker Location + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: The marker's shape as GeoJSON Point. + markerFeatureType: + title: Marker Feature Type + type: string + description: 'The marker''s type of feature like ''seismic'', ''structural'', ''stratigraphic''' + markerGeoDomain: + title: Marker GeoScience Domain + type: string + description: 'The marker''s GeoScience domain like ''geologic'', ''reservoir'', ''petrophysical''' + markerSubFeatureAttribute: + title: Marker Sub-feature Attribute + type: string + description: 'Further specification of the marker''s sub-feature, e.g. in sequence stratigraphy.' + markerSubFeatureType: + title: Marker Sub-feature Type + type: string + description: 'The marker''s sub-type of the feature like ''horizon'', ''fault'', ''fracture''' + md: + title: Marker Measured Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The marker measured depth (MD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + name: + title: Marker Name + type: string + description: The name of the marker + planeOrientationAzimuth: + title: Azimuth Angle + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Azimuth angle. The azimuth reference is given by data.azimuthReference. The 'planeOrientationAzimuth.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + planeOrientationDip: + title: Dip Angle + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Dip angle. The 'planeOrientationDip.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/markerrelationships' + description: The entities related to this marker. + stratigraphicHierarchyLevel: + title: Column Level + type: integer + description: 'Optional hierarchical level in the chrono-stratigraphic/litho-stratigraphic catalog table, identified by the data.relationships.chartId' + tvd: + title: Marker Measured Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The marker true vertical depth (TVD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary. + wgs84ElevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Sea Level, downwards negative. The unit definition is found via the property''s unitKey'' in ''frameOfReference.units'' dictionary.' + wgs84LatitudeLongitude: + title: WGS 84 Latitude Longitude + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The marker's position in WGS 84 latitude and longitude. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + markerrelationships: + title: markerrelationships + type: object + properties: + horizon: + title: Stratigraphic Horizon + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The related stratigraphic horizon + stratigraphicTable: + title: Stratigraphic Table + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The related stratigraphic table, which provides the context for the stratigraphic horizon' + study: + title: Study + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The study, in which this marker was conceived.' + trajectory: + title: Trajectory + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The trajectory used to create the marker position + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: 'The wellbore entity, to which this marker belongs.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + namedProperty: + title: namedProperty + type: object + properties: + associations: + title: Associations + type: array + items: + type: string + description: The optional associations contains one or more mnemonics found elsewhere in the logSet. + description: + title: Property Description + type: string + description: The description and role of this property. + format: + title: Format (LAS) + type: string + description: 'An optional format declaration for the property values. The ''A'' prefix indicates an array; string values are represented by ''S''; floating point values are represented by ''F'', optionally followed by a field specification, e.g. ''F10.4''; exponential number representations are represented by ''E''; integer values are represented by ''I''. For further information see the LAS specification http://www.cwls.org/las/.' + name: + title: Property Name + type: string + description: The name of this property. + unitKey: + title: Property Unit Symbol + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + value: + title: Property Value + anyOf: + - type: number + - type: string + description: The value for this property as a string or a number. + values: + title: Property Values (Interval) + type: array + items: + type: number + description: 'The values, e.g. interval boundaries, for this property.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + odes_storage__models__Legal: + title: Legal + type: object + properties: + legaltags: + title: Legaltags + type: array + items: + type: string + otherRelevantDataCountries: + title: Otherrelevantdatacountries + type: array + items: + type: string + projectedPosition: + title: projectedPosition + required: + - crsKey + - elevationFromMsl + - x + - 'y' + type: object + properties: + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + x: + title: X Coordinate + type: number + description: X-coordinate value in native or original projected CRS + 'y': + title: Y Coordinate + type: number + description: Y-coordinate value in native or original projected CRS + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectory: + title: trajectory + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Trajectory Data + allOf: + - $ref: '#/components/schemas/trajectoryData' + description: A log set representing a trajectory associated with a wellbore + id: + title: Trajectory ID + type: string + description: The unique identifier of the trajectory + kind: + title: Trajectory Kind + type: string + description: Kind specification + default: 'osdu:wks:trajectory:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The trajectory's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this trajectory; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectoryData: + title: trajectoryData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: 'Azimuth reference code defining the type of North, default TN for true north.' + channelMnemonics: + title: Channel Mnemonics + type: array + items: + type: string + description: A list of channel Mnemonics in this trajectory. + channelNames: + title: Channel Names + type: array + items: + type: string + description: A list of channel long names in this trajectory. + channels: + title: Channels + type: array + items: + $ref: '#/components/schemas/trajectorychannel' + description: The channels associated to the index. + classification: + title: Trajectory Classification + type: string + description: The well-known trajectory classification code. + default: Raw Deviation Survey + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + index: + title: Index Channel + allOf: + - $ref: '#/components/schemas/trajectorychannel' + description: The index channel or log. + indexType: + title: Index Type + type: string + description: The index type of the trajectory. + locationWGS84: + title: Trajectory preview + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: The wellbore's trajectory preview shape as GeoJSON LineString. + name: + title: Trajectory Name + type: string + description: The name of this trajectory + referencePosition: + title: Reference Position First Sample + allOf: + - $ref: '#/components/schemas/Point3dNonGeoJson' + description: 'The 3D reference position for the first sample (surface location for main wellbores, tie-in point for side-tracks.' + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/trajectoryrelationships' + description: The related entities. + start: + title: Start + type: number + description: The start index value of the trajectory. + step: + title: Step + type: number + description: The index increment value of the trajectory. + stop: + title: Stop + type: number + description: The stop index value of the trajectory. + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/wgs84Position' + description: 'The wellbore''s position in WGS 84 latitude and longitude; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + description: Used for data model allows extra fields which are not declared initially in the pydantic model + trajectorychannel: + title: trajectorychannel + type: object + properties: + absentValue: + title: Absent Value + type: string + description: Optional field carrying the absent value as string for this channel. + azimuthKey: + title: Azimuth Reference Key + type: string + description: The azimuth reference of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.azimuth' dictionary. + crsKey: + title: CRS Key + type: string + description: The CRS key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.crs' dictionary. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType_1' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format_1' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Properties + type: array + items: + type: string + description: The properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw trajectory, from which this log WKE is generated.' + unitKey: + title: Unit Key + type: string + description: The unit key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.units' dictionary. Empty units (NoUnit) are not recorded. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectoryrelationships: + title: trajectoryrelationships + required: + - wellbore + type: object + properties: + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this trajectory belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + well: + title: well + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Well Data + allOf: + - $ref: '#/components/schemas/wellData' + description: Well data container + id: + title: Well ID + type: string + description: The unique identifier of the well + kind: + title: Well Kind + type: string + description: Well-known well kind specification + default: 'osdu:wks:well:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The geological interpretation's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this well; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellData: + title: wellData + type: object + properties: + basinContext: + title: Basin Context + allOf: + - $ref: '#/components/schemas/basinContext' + description: The basin context details for the well. + block: + title: Block + type: string + description: 'The block name, in which the well is located.' + country: + title: Country + type: string + description: 'The country, in which the well is located. The country name follows the convention in ISO 3166-1 ''English short country name'', see https://en.wikipedia.org/wiki/ISO_3166-1' + county: + title: County + type: string + description: 'The county name, in which the well is located.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateLicenseIssued: + title: License Issue Date + type: string + description: The UTC date time when the well license was issued. + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + datePluggedAbandoned: + title: Plugged Abandoned Date + type: string + description: The UTC date and time at which the well was plugged and abandoned. + format: date-time + dateSpudded: + title: Spud Date + type: string + description: 'The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format' + format: date-time + directionWell: + title: Well Direction + allOf: + - $ref: '#/components/schemas/DirectionWell' + description: 'POSC well direction. The direction of the flow of the fluids in a well facility (generally, injected or produced, or some combination).' + district: + title: District + type: string + description: 'The district name, to which the well belongs.' + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The well''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + field: + title: Field + type: string + description: 'The field name, to which the well belongs.' + fluidWell: + title: Well Fluid + allOf: + - $ref: '#/components/schemas/FluidWell' + description: POSC well fluid. The type of fluid being produced from or injected \ninto a well facility. + groundElevation: + title: Ground Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The well''s ground elevation, Values above MSL are positive..' + locationWGS84: + title: Well Shape WGS 84 + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: A 2D GeoJSON FeatureCollection defining well location or trajectory in WGS 84 CRS. + name: + title: Well Name + type: string + description: The well name + operator: + title: Well Operator + type: string + description: The operator company name of the well. + operatorDivision: + title: Operator Division + type: string + description: The operator division of the well. + operatorInterest: + title: Well Operator Interest + type: number + description: Interest for operator. Commonly in percent. + operatorOriginal: + title: Original Well Operator + type: string + description: Original operator of the well. This may be different than the current operator. + plssLocation: + title: US PLSS Location + allOf: + - $ref: '#/components/schemas/PlssLocation' + description: A location described by the Public Land Survey System (United States) + propertyDictionary: + title: Property Dictionary + type: object + description: 'A dictionary structure, i.e. key/string value pairs, to carry additional well properties.' + region: + title: Region + type: string + description: Geo-political region in which the well is located. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/wellrelationships' + description: The related entities. + state: + title: State + type: string + description: 'The state name, in which the well is located.' + uwi: + title: Unique Well Identifier + type: string + description: 'The unique well identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).' + waterDepth: + title: Water Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Depth of water (not land rigs). + wellHeadElevation: + title: Well Head Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The well''s vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadGeographic: + title: 'Well Head Position, Geographic' + allOf: + - $ref: '#/components/schemas/geographicPosition' + description: 'The well''s well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadProjected: + title: 'Well Head Position, Projected' + allOf: + - $ref: '#/components/schemas/projectedPosition' + description: 'The well''s well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The well's position in WGS 84 latitude and longitude. + wellLocationType: + $ref: '#/components/schemas/WellLocationType' + wellNumberGovernment: + title: Government Number + type: string + description: Government assigned well number. + wellNumberLicense: + title: Well License Number + type: string + description: License number of the well. + wellNumberOperator: + title: Operator Number + type: string + description: Operator well number. + wellPurpose: + title: Well Purpose + allOf: + - $ref: '#/components/schemas/WellPurpose' + description: POSC well purpose + wellStatus: + title: Well Status + allOf: + - $ref: '#/components/schemas/WellStatus' + description: POSC well status. + wellType: + title: Well Type + allOf: + - $ref: '#/components/schemas/WellType' + description: Type of well. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + wellbore: + title: wellbore + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Wellbore Data + allOf: + - $ref: '#/components/schemas/wellboreData' + description: Wellbore data container + id: + title: Wellbore ID + type: string + description: The unique identifier of the wellbore + kind: + title: Wellbore Kind + type: string + description: Well-known wellbore kind specification + default: 'osdu:wks:wellbore:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/app__model__model_curated__Legal' + description: The geological interpretation's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this wellbore; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellboreData: + title: wellboreData + type: object + properties: + airGap: + title: Air Gap + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The gap between water surface and offshore drilling platform. + block: + title: Block + type: string + description: 'The block name, in which the wellbore is located.' + country: + title: Country + type: string + description: 'The country, in which the wellbore is located. The country name follows the convention in ISO 3166-1 ''English short country name'', see https://en.wikipedia.org/wiki/ISO_3166-1' + county: + title: County + type: string + description: 'The county name, in which the wellbore is located.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + drillingDaysTarget: + title: Target Drilling Days + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Target days for drilling wellbore. + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + field: + title: Field + type: string + description: 'The field name, to which the wellbore belongs.' + formationAtTd: + title: Formation at TD + type: string + description: The name of the formation at the wellbore's total depth. + formationProjected: + title: Formation Projected + type: string + description: The name of the formation at the wellbore's projected depth. This property is questionable as there is not precise documentation available. + hasAchievedTotalDepth: + title: Has Total Depth Been Achieved Flag + type: boolean + description: 'True ("true" of "1") indicates that the wellbore has acheieved total depth. That is, drilling has completed. False ("false" or "0") indicates otherwise. Not given indicates that it is not known whether total depth has been reached.' + default: true + isActive: + title: Is Active Flag + type: boolean + description: 'True (="1" or "true") indicates that the wellbore is active. False (="0" or "false") indicates otherwise. It is the servers responsibility to set this value based on its available internal data (e.g., what objects are changing).' + kickOffMd: + title: Kick-off MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The kick-off point in measured depth (MD); for the main well the kickOffMd is set to 0. + kickOffTvd: + title: Kick-off MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Kickoff true vertical depth of the wellbore; for the main wellbore the kickOffMd is set to 0. + locationWGS84: + title: Wellbore Shape WGS 84 + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: A 2D GeoJSON FeatureCollection defining wellbore location or trajectory in WGS 84 CRS. + name: + title: Wellbore Name + type: string + description: The wellbore name + operator: + title: Operator + type: string + description: The operator of the wellbore. + permitDate: + title: Permit Date + type: string + description: The wellbore's permit date. + format: date + permitNumber: + title: Permit Number + type: string + description: The wellbore's permit number or permit ID. + plssLocation: + title: US PLSS Location + allOf: + - $ref: '#/components/schemas/PlssLocation' + description: A location described by the Public Land Survey System (United States) + propertyDictionary: + title: Property Dictionary + type: object + description: 'A dictionary structure, i.e. key/string value pairs, to carry additional wellbore properties.' + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/wellborerelationships' + description: The related entities. + shape: + title: Wellbore Shape + allOf: + - $ref: '#/components/schemas/Shape' + description: POSC wellbore trajectory shape. + spudDate: + title: Spud Date + type: string + description: 'The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format' + format: date + state: + title: State + type: string + description: 'The state name, in which the wellbore is located.' + totalDepthMd: + title: Total MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The measured depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.' + totalDepthMdDriller: + title: Total MD Drilled + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The total depth along the wellbore as reported by the drilling contractor from 'elevationReference'. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.. + totalDepthMdPlanned: + title: Total MD Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned measured depth for the wellbore total depth. + totalDepthMdSubSeaPlanned: + title: Total MD Sub Sea Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned measured for the wellbore total depth - with respect to seabed. + totalDepthProjectedMd: + title: Total MD Projected + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The projected total measured depth of the borehole. This property is questionable as there is not precise documentation available. + totalDepthTvd: + title: Total TVD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The true vertical depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.' + totalDepthTvdDriller: + title: Total TVD Drilled + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The total depth true vertical as reported by the drilling contractor from ''elevationReference'', Downwards increasing. The unit definition is found via the property''s unitKey'' in ''frameOfReference.units'' dictionary.' + totalDepthTvdPlanned: + title: Total TVD Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned true vertical depth for the wellbore total depth. + totalDepthTvdSubSeaPlanned: + title: Total TVD Sub Sea Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned true vertical depth for the wellbore total depth - with respect to seabed. + uwi: + title: Unique Wellbore Identifier + type: string + description: 'The unique wellbore identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).' + wellHeadElevation: + title: Well Head Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The wellbore''s vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadGeographic: + title: 'Well Head Position, Geographic' + allOf: + - $ref: '#/components/schemas/geographicPosition' + description: 'The wellbore''s well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadProjected: + title: 'Well Head Position, Projected' + allOf: + - $ref: '#/components/schemas/projectedPosition' + description: 'The wellbore''s well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The wellbore's position in WGS 84 latitude and longitude. + wellboreNumberGovernment: + title: Government Number + type: string + description: Government assigned wellbore number. + wellboreNumberOperator: + title: Operator Number + type: string + description: Operator wellbore number. + wellborePurpose: + title: Wellbore Purpose + allOf: + - $ref: '#/components/schemas/WellborePurpose' + description: POSC wellbore purpose + wellboreStatus: + title: Wellbore Status + allOf: + - $ref: '#/components/schemas/WellboreStatus' + description: POSC wellbore status. + wellboreType: + title: Wellbore Type + allOf: + - $ref: '#/components/schemas/WellboreType' + description: Type of wellbore. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + wellborerelationships: + title: wellborerelationships + type: object + properties: + definitiveTimeDepthRelation: + title: Definitive Time-Depth Relation + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The definitive tome-depth relation providing the MD to seismic travel-time transformation. + definitiveTrajectory: + title: Definitive Trajectory + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The definitive trajectory providing the MD to 3D space transformation. + tieInWellbore: + title: Tie-in Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The tie-in wellbore if this wellbore is a side-track. + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this wellbore belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellrelationships: + title: wellrelationships + type: object + properties: + asset: + title: Asset + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The asset this well belongs to. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wgs84Position: + title: wgs84Position + required: + - elevationFromMsl + - latitude + - longitude + type: object + properties: + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + latitude: + title: WGS 84 Latitude + type: number + description: WGS 84 latitude value in degrees (dega) + longitude: + title: WGS 84 Longitude + type: number + description: WGS 84 longitude value in degrees (dega) + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GetLogDataResponse: + oneOf: + - title: SplitFormat + type: object + properties: + data: + title: Data + anyOf: + - type: array + items: + anyOf: + - type: string + - type: integer + - type: number + - type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + columns: + title: Columns + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + index: + title: Index + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + required: + - data + - title: IndexFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ColumnFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: RecordsFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ValuesFormat + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + securitySchemes: + OpenDESBearerToken: + type: http + scheme: bearer diff --git a/spec/edited/search.yaml b/spec/edited/search.yaml new file mode 100644 index 0000000000000000000000000000000000000000..20bf92fe3c313a04018870ec04530c4150fb032e --- /dev/null +++ b/spec/edited/search.yaml @@ -0,0 +1,1400 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +openapi: 3.0.2 +info: + title: Wellbore DDMS Search OSDU + version: '0.2' +servers: + - url: https://api.example.com/osdu/wdms/search/v2 +security: +- bearer: [] +- appkey: [] +paths: + /query: + post: + tags: + - Search + summary: Query + operationId: query_ddms_query_post + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query_with_cursor: + post: + tags: + - Search + summary: Query with cursor + operationId: query_ddms_query_with_cursor_post + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/QueryRequest' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query/wellbores: + post: + tags: + - Search + summary: Query with cursor + description: |- + Get all Wellbores object. <p>The wellbore kind is + *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query/wellbores/bydistance: + post: + tags: + - Search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_bydistance_post + parameters: + - required: true + schema: + title: Latitude + type: number + name: latitude + in: query + - required: true + schema: + title: Longitude + type: number + name: longitude + in: query + - required: true + schema: + title: Distance + type: integer + name: distance + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query/wellbores/byboundingbox: + post: + tags: + - Search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_byboundingbox_post + parameters: + - required: true + schema: + title: Latitude Top Left + type: number + name: latitude_top_left + in: query + - required: true + schema: + title: Longitude Top Left + type: number + name: longitude_top_left + in: query + - required: true + schema: + title: Latitude Bottom Right + type: number + name: latitude_bottom_right + in: query + - required: true + schema: + title: Longitude Bottom Right + type: number + name: longitude_bottom_right + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query/wellbores/bygeopolygon: + post: + tags: + - Search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores object in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores_bygeopolygon_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/wellbore/{wellbore_id}/logsets': + post: + tags: + - Search + summary: 'Query with cursor, search logSets by wellbore ID' + description: |- + Get all LogSets object using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__logsets_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/wellbores/{wellbore_attribute}/logsets': + post: + tags: + - Search + summary: 'Query with cursor, search logSets by wellbore attribute' + description: |- + Get all LogSets object using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores__wellbore_attribute__logsets_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /query/logs: + post: + tags: + - Search + summary: 'Query with cursor, gets logs' + description: |- + Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logs_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/wellbore/{wellbore_id}/logs': + post: + tags: + - Search + summary: 'Query with cursor, search logs by wellbore ID' + description: |- + Get all Logs object using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__logs_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/wellbores/{wellbore_attribute}/logs': + post: + tags: + - Search + summary: 'Query with cursor, search logs by wellbore attribute' + description: |- + Get all Logs object using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbores__wellbore_attribute__logs_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/logset/{logset_id}/logs': + post: + tags: + - Search + summary: 'Query with cursor, search logs by logSet ID' + description: |- + Get all Logs object using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logset__logset_id__logs_post + parameters: + - required: true + schema: + title: Logset Id + type: string + name: logset_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/logsets/{logset_attribute}/logs': + post: + tags: + - Search + summary: 'Query with cursor, search logs by logSet attribute' + description: |- + Get all Logs object using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_logsets__logset_attribute__logs_post + parameters: + - required: true + schema: + title: Logset Attribute + type: string + name: logset_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/query/wellbore/{wellbore_id}/markers': + post: + tags: + - Search + summary: 'Query with cursor, search markers by wellbore ID' + description: |- + Get all Markers object using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records directly based on existing schemas</p> + operationId: query_ddms_query_wellbore__wellbore_id__markers_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /fastquery/wellbores: + post: + tags: + - Fast search + summary: Query with cursor + description: |- + Get all Wellbores IDs object. <p>The wellbore kind is + *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /fastquery/wellbores/bydistance: + post: + tags: + - Fast search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs IDs objects in a specific area. <p>The specific area will be define by a circle + based on its center coordinates (lat, lon) and radius (meters) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_bydistance_post + parameters: + - required: true + schema: + title: Latitude + type: number + name: latitude + in: query + - required: true + schema: + title: Longitude + type: number + name: longitude + in: query + - required: true + schema: + title: Distance + type: integer + name: distance + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /fastquery/wellbores/byboundingbox: + post: + tags: + - Fast search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a square + based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_byboundingbox_post + parameters: + - required: true + schema: + title: Latitude Top Left + type: number + name: latitude_top_left + in: query + - required: true + schema: + title: Longitude Top Left + type: number + name: longitude_top_left + in: query + - required: true + schema: + title: Latitude Bottom Right + type: number + name: latitude_bottom_right + in: query + - required: true + schema: + title: Longitude Bottom Right + type: number + name: longitude_bottom_right + in: query + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /fastquery/wellbores/bygeopolygon: + post: + tags: + - Fast search + summary: 'Query with cursor, CRS format: data.wellHeadWgs84' + description: |- + Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a + polygon based on each of its coordinates (lat, lon) with a minimum of three</p> + <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores_bygeopolygon_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/wellbore/{wellbore_id}/logsets': + post: + tags: + - Fast search + summary: 'Query with cursor, search logSets IDs by wellbore ID' + description: |- + Get all LogSets IDs objects using its relationship Wellbore ID. <p>All LogSets linked to this + specific ID will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__logsets_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/wellbores/{wellbore_attribute}/logsets': + post: + tags: + - Fast search + summary: 'Query with cursor, search logSets IDs by wellbore attribute' + description: |- + Get all LogSets IDs objects using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores + with this specific attribute will be returned</p> + <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores__wellbore_attribute__logsets_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /fastquery/logs: + post: + tags: + - Fast search + summary: 'Query with cursor, gets logs' + description: |- + Get all Logs object. <p>The Logs kind is + *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logs_post + parameters: + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/wellbore/{wellbore_id}/logs': + post: + tags: + - Fast search + summary: 'Query with cursor, search logs IDs by wellbore ID' + description: |- + Get all Logs IDs objects using its relationship Wellbore ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__logs_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/wellbores/{wellbore_attribute}/logs': + post: + tags: + - Fast search + summary: 'Query with cursor, search logs IDs by wellbore attribute' + description: |- + Get all Logs IDs objects using a specific attribute of Wellbores. <p>All Logs linked to Wellbores + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbores__wellbore_attribute__logs_post + parameters: + - required: true + schema: + title: Wellbore Attribute + type: string + name: wellbore_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/logset/{logset_id}/logs': + post: + tags: + - Fast search + summary: 'Query with cursor, search logs IDs by logSet ID' + description: |- + Get all Logs IDs objects using its relationship Logset ID. <p>All Logs linked to this + specific ID will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logset__logset_id__logs_post + parameters: + - required: true + schema: + title: Logset Id + type: string + name: logset_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/logsets/{logset_attribute}/logs': + post: + tags: + - Fast search + summary: 'Query with cursor, search logs IDs by logSet attribute' + description: |- + Get all Logs IDs objects using a specific attribute of LogSets. <p>All Logs linked to LogSets + with this specific attribute will be returned</p> + <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_logsets__logset_attribute__logs_post + parameters: + - required: true + schema: + title: Logset Attribute + type: string + name: logset_attribute + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/fastquery/wellbore/{wellbore_id}/markers': + post: + tags: + - Fast search + summary: 'Query with cursor, search markers IDs by wellbore ID' + description: |- + Get all Markers IDs objects using its relationship Wellbore ID. <p>All Markers linked to this + specific ID will be returned</p> + <p>The Marker kind is *:wks:marker:* returns all records IDs directly based on existing schemas</p> + operationId: query_ddms_fastquery_wellbore__wellbore_id__markers_post + parameters: + - required: true + schema: + title: Wellbore Id + type: string + name: wellbore_id + in: path + - required: false + schema: + title: Query + type: string + name: query + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] +components: + schemas: + ByBoundingBox: + title: ByBoundingBox + required: + - topLeft + - bottomRight + type: object + properties: + topLeft: + $ref: '#/components/schemas/Point' + bottomRight: + $ref: '#/components/schemas/Point' + ByDistance: + title: ByDistance + required: + - point + type: object + properties: + distance: + title: Distance + type: number + point: + $ref: '#/components/schemas/Point' + ByGeoPolygon: + title: ByGeoPolygon + type: object + properties: + points: + title: Points + type: array + items: + $ref: '#/components/schemas/Point' + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + errors: + title: Errors + type: array + items: + $ref: '#/components/schemas/ValidationError' + Point: + title: Point + type: object + properties: + latitude: + title: Latitude + type: number + longitude: + title: Longitude + type: number + QueryRequest: + title: QueryRequest + required: + - kind + type: object + properties: + kind: + title: Kind + type: string + limit: + title: Limit + type: integer + query: + title: Query + type: string + returnedFields: + title: Returnedfields + type: array + items: + type: string + sort: + $ref: '#/components/schemas/SortQuery' + queryAsOwner: + title: Queryasowner + type: boolean + spatialFilter: + $ref: '#/components/schemas/SpatialFilter' + offset: + title: Offset + type: integer + SortQuery: + title: SortQuery + type: object + properties: + field: + title: Field + type: array + items: + type: string + order: + title: Order + type: string + enum: ['ASC', 'DESC'] + SpatialFilter: + title: SpatialFilter + type: object + properties: + field: + title: Field + type: string + byBoundingBox: + $ref: '#/components/schemas/ByBoundingBox' + byDistance: + $ref: '#/components/schemas/ByDistance' + byGeoPolygon: + $ref: '#/components/schemas/ByGeoPolygon' + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + type: string + msg: + title: Message + type: string + type: + title: Error Type + type: string + securitySchemes: + bearer: + type: apiKey + name: Authorization + in: header + appkey: + type: apiKey + in: header + name: appkey \ No newline at end of file diff --git a/spec/edited/wellbore.yaml b/spec/edited/wellbore.yaml new file mode 100644 index 0000000000000000000000000000000000000000..e5944530f96cf4c17c1d63908f4e020ad007f26d --- /dev/null +++ b/spec/edited/wellbore.yaml @@ -0,0 +1,4143 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +openapi: 3.0.2 +info: + title: Wellbore DDMS OSDU + version: '0.2' +servers: + - url: https://api.example.com/osdu/wdms/wellbore/v2 +security: +- bearer: [] +- appkey: [] +paths: + /about: + get: + summary: Get About + operationId: get_about_ddms_v2_about_get + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/AboutResponse' + /version: + get: + summary: Get Version + operationId: get_version_ddms_v2_version_get + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/VersionDetailsResponse' + security: + - bearer: [] + - appkey: [] + /status: + get: + summary: Get the status of the service + operationId: about_ddms_v2_status_get + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/V1AboutResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wells/{wellid}': + get: + tags: + - Well + summary: 'Get the Well using wks:well:1.0.2 schema' + description: |- + Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500** + operationId: get_well + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/well' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Well + summary: Delete the well. The API performs a logical deletion of the given record + operationId: del_well + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wells/{wellid}/versions': + get: + tags: + - Well + summary: Get all versions of the Well + operationId: get_well_versions + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wells/{wellid}/versions/{version}': + get: + tags: + - Well + summary: 'Get the given version of the Well using wks:well:1.0.2 schema' + description: |- + "Get the Well object using its **id**. <p>If the well kind is + *wks:well:1.0.2* returns the record directly</p> <p>If the well + kind is different *wks:well:1.0.2* it will get the raw record and + convert the results to match the *wks:well:1.0.2*. If convertion is + not possible returns an error **500** + operationId: get_well_version + parameters: + - required: true + schema: + title: Wellid + type: string + name: wellid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/well' + '404': + description: Well not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /wells: + post: + tags: + - Well + summary: 'Create or update the Wells using wks:well:1.0.2 schema' + operationId: post_well + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Wells + type: array + items: + $ref: '#/components/schemas/well' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wellbores/{wellboreid}': + get: + tags: + - Wellbore + summary: 'Get the Wellbore using wks:wellbore:1.0.6 schema' + description: |- + Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500** + operationId: get_wellbore + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/wellbore' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Wellbore + summary: Delete the wellbore. The API performs a logical deletion of the given record + operationId: del_wellbore + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wellbores/{wellboreid}/versions': + get: + tags: + - Wellbore + summary: Get all versions of the Wellbore + operationId: get_wellbore_versions + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/wellbores/{wellboreid}/versions/{version}': + get: + tags: + - Wellbore + summary: 'Get the given version of the Wellbore using wks:wellbore:1.0.6 schema' + description: |- + "Get the Wellbore object using its **id**. <p>If the wellbore kind is + *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore + kind is different *wks:wellbore:1.0.6* it will get the raw record and + convert the results to match the *wks:wellbore:1.0.6*. If convertion is + not possible returns an error **500** + operationId: get_wellbore_version + parameters: + - required: true + schema: + title: Wellboreid + type: string + name: wellboreid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/wellbore' + '404': + description: Wellbore not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /wellbores: + post: + tags: + - Wellbore + summary: 'Create or update the Wellbores using wks:wellbore:1.0.6 schema' + operationId: post_wellbore + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Wellbores + type: array + items: + $ref: '#/components/schemas/wellbore' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logsets/{logsetid}': + get: + tags: + - Logset + summary: 'Get the LogSet using wks:logSet:1.0.5 schema' + description: Get the LogSet object using its **id** + operationId: get_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Logset + summary: Delete the LogSet. The API performs a logical deletion of the given record + operationId: del_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logsets/{logsetid}/versions': + get: + tags: + - Logset + summary: Get all versions of the logset + operationId: get_logset_versions + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logsets/{logsetid}/versions/{version}': + get: + tags: + - Logset + summary: 'Get the given version of LogSet using wks:logSet:1.0.5 schema' + description: '"Get the LogSet object using its **id**.' + operationId: get_logset_version + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: LogSet not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logsets/{logsetid}/harmonize': + post: + tags: + - Logset + summary: 'Create or update the LogSets using wks:logSet:1.0.5 schema' + operationId: harmonize_logset + parameters: + - required: true + schema: + title: Logsetid + type: string + name: logsetid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/logset' + '404': + description: logset not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /logsets: + post: + tags: + - Logset + summary: 'Create or update the LogSets using wks:logSet:1.0.5 schema' + operationId: put_logset + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Logsets + type: array + items: + $ref: '#/components/schemas/logset' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/trajectories/{trajectoryid}': + get: + tags: + - Trajectory + summary: 'Get the trajectory using wks:trajectory:1.0.5 schema' + description: Get the Trajectory object using its **id** + operationId: get_trajectory + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/trajectory' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Trajectory + summary: Delete the Trajectory. The API performs a logical deletion of the given record + operationId: del_trajectory + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: Whether or not to delete records children + required: false + schema: + title: Recursive + type: boolean + description: Whether or not to delete records children + default: false + name: recursive + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/trajectories/{trajectoryid}/versions': + get: + tags: + - Trajectory + summary: Get all versions of the Trajectory + operationId: get_trajectory_versions + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/trajectories/{trajectoryid}/versions/{version}': + get: + tags: + - Trajectory + summary: 'Get the given version of Trajectory using wks:Trajectory:1.0.5 schema' + operationId: get_trajectory_version + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/trajectory' + '404': + description: Trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /trajectories: + post: + tags: + - Trajectory + summary: 'Create or update the trajectories using wks:Trajectory:1.0.5 schema' + operationId: post_trajectory + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Trajectories + type: array + items: + $ref: '#/components/schemas/trajectory' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/trajectories/{trajectoryid}/data': + get: + tags: + - Trajectory + summary: Returns all data within the specified filters. Strongly consistent. + description: return full bulk data + operationId: get_traj_data + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: 'List of channels to get. If not provided, return all channels.' + required: false + schema: + title: Channels + type: array + items: + type: string + description: 'List of channels to get. If not provided, return all channels.' + name: channels + in: query + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: |- + Get trajectory data of the given channels. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows for channels MD, X, Y with different _orient_: + * split: <br/>`{"columns":["MD","X","Y"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"MD":0.0,"X":1001,"Y":2001},"1":{"MD":0.5,"X":1002,"Y":2002},"2":{"MD":1.0,"X":1003,"Y":2003},"3":{"MD":1.5,"X":1004,"Y":2004},"4":{"MD":2.0,"X":1005,"Y":2005}}`<br/> + * columns: <br/>`{"MD":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"Y":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"MD":0.0,"X":1001,"Y":2001},{"MD":0.5,"X":1002,"Y":2002},{"MD":1.0,"X":1003,"Y":2003},{"MD":1.5,"X":1004,"Y":2004},{"MD":2.0,"X":1005,"Y":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + content: + application/json: + schema: + $ref: '#/components/schemas/GetLogDataResponse' + example: '{"columns":["MD","X","Y"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}' + '400': + description: unknown channels + '404': + description: trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + post: + tags: + - Trajectory + summary: Writes the specified data to the trajectory (atomic). + description: Overwrite if exists + operationId: post_traj_data + parameters: + - required: true + schema: + title: Trajectoryid + type: string + name: trajectoryid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '404': + description: trajectory not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}': + get: + tags: + - Log + summary: 'Get the Log using wks:log:1.0.5 schema' + description: |2- + + Get the log object using its data ecosystem **id**. <p>If the log + kind is *wks:log:1.0.5* returns the record directly</p> <p>If the + wellbore kind is different *wks:log:1.0.5* it will get the raw + record and convert the results to match the *wks:log:1.0.5*. If + conversion is not possible returns an error **500**</p> + operationId: get_log + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/log' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + delete: + tags: + - Log + summary: Delete the log. The API performs a logical deletion of the given record + operationId: del_log + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '204': + description: Record deleted successfully + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + /logs: + post: + tags: + - Log + summary: 'Create or update the logs using wks:log:1.0.5 schema' + operationId: post_log + parameters: + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + application/json: + schema: + title: Logs + type: array + items: + $ref: '#/components/schemas/log' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: Missing mandatory parameter or unknown parameter + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}/versions': + get: + tags: + - Log + summary: Get all versions of the log + operationId: get_log_versions + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/RecordVersions' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}/versions/{version}': + get: + tags: + - Log + summary: 'Get the given version of log using wks:log:1.0.5 schema' + operationId: get_log_version + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - required: true + schema: + title: Version + type: integer + name: version + in: path + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/log' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}/data': + get: + tags: + - Log + summary: Returns all data within the specified filters. Strongly consistent. + description: return full bulk data + operationId: get_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: |- + Get log bulk data in format in the given _orient_ value. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows and 3 columns with different _orient_: + * split: <br/>`{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"Ref":0.0,"col_100X":1001,"col_200X":2001},"1":{"Ref":0.5,"col_100X":1002,"col_200X":2002},"2":{"Ref":1.0,"col_100X":1003,"col_200X":2003},"3":{"Ref":1.5,"col_100X":1004,"col_200X":2004},"4":{"Ref":2.0,"col_100X":1005,"col_200X":2005}}`<br/> + * columns: <br/>`{"Ref":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"col_100X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"col_200X":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"Ref":0.0,"col_100X":1001,"col_200X":2001},{"Ref":0.5,"col_100X":1002,"col_200X":2002},{"Ref":1.0,"col_100X":1003,"col_200X":2003},{"Ref":1.5,"col_100X":1004,"col_200X":2004},{"Ref":2.0,"col_100X":1005,"col_200X":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + content: + application/json: + schema: + $ref: '#/components/schemas/GetLogDataResponse' + example: '{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + post: + tags: + - Log + summary: Writes the specified data to the log (atomic). + description: Overwrite if exists + operationId: write_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + requestBody: + description: |- + Write log bulk data. + It uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html). + Here're examples for data with 5 rows and 3 columns with different _orient_: + * split: <br/>`{"columns":["Ref","col_100X","col_200X"],"index":[0,1,2,3,4],"data":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> + * index: <br/>`{"0":{"Ref":0.0,"col_100X":1001,"col_200X":2001},"1":{"Ref":0.5,"col_100X":1002,"col_200X":2002},"2":{"Ref":1.0,"col_100X":1003,"col_200X":2003},"3":{"Ref":1.5,"col_100X":1004,"col_200X":2004},"4":{"Ref":2.0,"col_100X":1005,"col_200X":2005}}`<br/> + * columns: <br/>`{"Ref":{"0":0.0,"1":0.5,"2":1.0,"3":1.5,"4":2.0},"col_100X":{"0":1001,"1":1002,"2":1003,"3":1004,"4":1005},"col_200X":{"0":2001,"1":2002,"2":2003,"3":2004,"4":2005}}`<br/> + * records: <br/>`[{"Ref":0.0,"col_100X":1001,"col_200X":2001},{"Ref":0.5,"col_100X":1002,"col_200X":2002},{"Ref":1.0,"col_100X":1003,"col_200X":2003},{"Ref":1.5,"col_100X":1004,"col_200X":2004},{"Ref":2.0,"col_100X":1005,"col_200X":2005}]`<br/> + * values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> + required: true + content: + application/json: + schema: + example: + columns: + - Ref + - col_100X + - col_200X + index: + - 0 + - 1 + - 2 + - 3 + - 4 + data: + - - 0 + - 1001 + - 2001 + - - 0.5 + - 1002 + - 2002 + - - 1 + - 1003 + - 2003 + - - 1.5 + - 1004 + - 2004 + - - 2 + - 1005 + - 2005 + oneOf: + - title: SplitFormat + type: object + properties: + data: + title: Data + anyOf: + - type: array + items: + anyOf: + - type: string + - type: integer + - type: number + - type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + columns: + title: Columns + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + index: + title: Index + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + required: + - data + - title: IndexFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ColumnFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: RecordsFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ValuesFormat + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + '/logs/{logid}/upload_data': + post: + tags: + - Log + summary: Writes the data to the log. Support json file (then orient must be provided) and parquet + description: Overwrite if exists + operationId: upload_log_data + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + requestBody: + content: + multipart/form-data: + schema: + $ref: '#/components/schemas/Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post' + required: true + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/CreateUpdateRecordsResponse' + '400': + description: invalid request + '404': + description: log not found + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}/statistics': + get: + tags: + - Log + summary: Data statistics + description: 'This API will return count, mean, std, min, max and percentiles of each column' + operationId: get_log_data_statistics_ddms_v2_logs__logid__statistics_get + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: + $ref: '#/components/schemas/GetStatisticResponse' + '422': + description: Validation Error + content: + application/json: + schema: + $ref: '#/components/schemas/HTTPValidationError' + security: + - bearer: [] + - appkey: [] + '/logs/{logid}/decimated': + get: + tags: + - Log + summary: Returns a decimated version of all data within the specified filters. Eventually consistent. + description: |- + TODO + Note: row order is not preserved. + operationId: get_log_decimated + parameters: + - required: true + schema: + title: Logid + type: string + name: logid + in: path + - description: Number of division desired + required: false + schema: + title: Quantiles + type: integer + description: Number of division desired + name: quantiles + in: query + - description: The start value for the log decimation + required: false + schema: + title: Start + type: number + description: The start value for the log decimation + name: start + in: query + - description: The stop value for the log decimation + required: false + schema: + title: Stop + type: number + description: The stop value for the log decimation + name: stop + in: query + - description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + required: false + schema: + title: Orient + type: string + description: 'define format when using JSON data is used. Value can be split, index, columns, records, values' + default: split + name: orient + in: query + - description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + required: false + schema: + title: Bulk-Path + type: string + description: 'The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.' + name: bulk-path + in: query + - description: identifier of the data partition to query + required: false + schema: + title: data partition id + minLength: 1 + type: string + description: identifier of the data partition to query + name: data-partition-id + in: header + responses: + '200': + description: Successful Response + content: + application/json: + schema: {} + '404': + description: log not found + '422': + description: log is not compatible with decimation + security: + - bearer: [] + - appkey: [] +components: + schemas: + AboutResponse: + title: AboutResponse + type: object + properties: + service: + title: Service + type: string + version: + title: Version + type: string + buildNumber: + title: Buildnumber + type: string + cloudEnvironment: + title: Cloudenvironment + type: string + AboutResponseUser: + title: AboutResponseUser + type: object + properties: + tenant: + title: Tenant + type: string + email: + title: Email + type: string + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post: + title: Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post + required: + - file + type: object + properties: + file: + title: File + type: string + format: binary + CreateUpdateRecordsResponse: + title: CreateUpdateRecordsResponse + type: object + properties: + recordCount: + title: Recordcount + type: integer + recordIds: + title: Recordids + type: array + items: + type: string + skippedRecordIds: + title: Skippedrecordids + type: array + items: + type: string + DataType: + title: DataType + enum: + - string + - number + - integer + - boolean + description: An enumeration. + DataType_1: + title: DataType_1 + enum: + - string + - number + - integer + - boolean + description: An enumeration. + DataType_2: + title: DataType_2 + enum: + - string + - number + - integer + - boolean + - date-time + description: An enumeration. + DirectionWell: + title: DirectionWell + enum: + - huff-n-puff + - injector + - producer + - uncertain + - unknown + description: An enumeration. + FluidWell: + title: FluidWell + enum: + - air + - condensate + - dry + - gas + - gas-water + - non HC gas + - non HC gas -- CO2 + - oil + - oil-gas + - oil-water + - steam + - water + - water -- brine + - water -- fresh water + - unknown + description: An enumeration. + Format: + title: Format + enum: + - date + - date-time + - time + - byte + - binary + - boolean + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + Format_1: + title: Format_1 + enum: + - date + - date-time + - time + - byte + - binary + - boolean + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + Format_2: + title: Format_2 + enum: + - date + - date-time + - time + - byte + - binary + - email + - uuid + - uri + - int8 + - int16 + - int32 + - int64 + - float32 + - float64 + - float128 + description: An enumeration. + GeoJsonFeature: + title: GeoJsonFeature + required: + - geometry + - properties + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometry: + title: Geometry + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + - $ref: '#/components/schemas/geometryItem' + properties: + title: Properties + type: object + type: + $ref: '#/components/schemas/Type_1' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonFeatureCollection: + title: GeoJsonFeatureCollection + required: + - features + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + features: + title: Features + type: array + items: + $ref: '#/components/schemas/GeoJsonFeature' + type: + $ref: '#/components/schemas/Type_2' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonLineString: + title: GeoJsonLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_3' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiLineString: + title: GeoJsonMultiLineString + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_4' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPoint: + title: GeoJsonMultiPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_5' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonMultiPolygon: + title: GeoJsonMultiPolygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + description: 'Bounding box in longitude, latitude WGS 84.' + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: array + items: + type: number + description: 'Array of polygons (minimum 2D), containing an array of point coordinates (longitude, latitude, (optionally elevation and other properties).' + type: + $ref: '#/components/schemas/Type_6' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GeoJsonPoint: + title: GeoJsonPoint + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_7' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GetStatisticResponse: + title: GetStatisticResponse + required: + - columns + type: object + properties: + columns: + title: Columns + type: array + items: + $ref: '#/components/schemas/StatsColumn' + HTTPValidationError: + title: HTTPValidationError + type: object + properties: + errors: + title: Errors + type: array + items: + $ref: '#/components/schemas/ValidationError' + Kind: + title: Kind + enum: + - CRS + - Unit + - Measurement + - AzimuthReference + - DateTime + type: string + description: An enumeration. + Legal: + title: Legal + type: object + properties: + legaltags: + title: Legal Tags + type: array + items: + type: string + description: 'The list of legal tags, see compliance API.' + otherRelevantDataCountries: + title: Other Relevant Data Countries + type: array + items: + type: string + description: 'The list of other relevant data countries using the ISO 2-letter codes, see compliance API.' + status: + title: Legal Status + type: string + description: The legal status. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + LinkList: + title: LinkList + type: object + properties: {} + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + MetaItem: + title: MetaItem + required: + - kind + - persistableReference + type: object + properties: + kind: + title: Reference Kind + allOf: + - $ref: '#/components/schemas/Kind' + description: 'The kind of reference, unit, measurement, CRS or azimuth reference.' + name: + title: Name or Symbol + type: string + description: The name of the CRS or the symbol/name of the unit + persistableReference: + title: Persistable Reference + type: string + description: The persistable reference string uniquely identifying the CRS or Unit + propertyNames: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property names, to which this meta data item provides Unit/CRS context to. Data structures, which come in a single frame of reference, can register the property name, others require a full path like "data.structureA.propertyB" to define a unique context.' + propertyValues: + title: Attribute Names + type: array + items: + type: string + description: 'The list of property values, to which this meta data item provides Unit/CRS context to. Typically a unit symbol is a value to a data structure; this symbol is then registered in this propertyValues array and the persistableReference provides the absolute reference.' + uncertainty: + title: Uncertainty + type: number + description: The uncertainty of the values measured given the unit or CRS unit. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + PlssLocation: + title: PlssLocation + required: + - range + - section + - township + type: object + properties: + aliquotPart: + title: Aliquot Part + type: string + description: 'A terse, hierarchical reference to a piece of land, in which successive subdivisions of some larger area.' + range: + title: Range + type: string + description: 'Range, also known as Rng, R; a measure of the distance east or west from a referenced principal meridian, in units of six miles.' + section: + title: Section Number + type: integer + description: Section number (between 1 and 36) + township: + title: Township + type: string + description: 'Township, also known as T or Twp; (1) Synonym for survey township, i.e., a square parcel of land of 36 square miles, or (2) A measure of the distance north or south from a referenced baseline, in units of six miles' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Point3dNonGeoJson: + title: Point3dNonGeoJson + required: + - coordinates + - crsKey + - unitKey + type: object + properties: + coordinates: + title: 3D Point + type: array + items: + type: number + description: '3-dimensional point; the first coordinate is typically pointing east (easting or longitude), the second coordinate typically points north (northing or latitude). The third coordinate is an elevation (upwards positive, downwards negative). The point''s CRS is given by the container.' + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + unitKey: + title: Unit Key + type: string + description: 'The ''unitKey'' for the 3rd coordinate, which can be looked up in the ''frameOfReference.unit'' for further details.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Polygon: + title: Polygon + required: + - coordinates + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + coordinates: + title: Coordinates + type: array + items: + type: array + items: + type: array + items: + type: number + type: + $ref: '#/components/schemas/Type_8' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + RecordVersions: + title: RecordVersions + type: object + properties: + recordId: + title: Recordid + type: string + versions: + title: Versions + type: array + items: + type: integer + ReferenceType: + title: ReferenceType + enum: + - Date + - Date Time + - Measured Depth + - Core depth + - True Vertical Depth + - True Vertical Depth Sub Sea + - One-Way Time + - Two-Way Time + description: An enumeration. + Shape: + title: Shape + enum: + - build and hold + - deviated + - double kickoff + - horizontal + - S-shaped + - vertical + - unknown + description: An enumeration. + SimpleElevationReference: + title: SimpleElevationReference + required: + - elevationFromMsl + type: object + properties: + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The elevation above mean sea level (MSL), at which the vertical origin is 0.0. The ''unitKey'' is further defined in ''frameOfReference.units''.' + name: + title: Elevation Reference Name + type: string + description: The name of the Elevation Reference. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + StatsColumn: + title: StatsColumn + required: + - count + - mean + - std + - min + - 25% + - 50% + - 75% + - max + type: object + properties: + count: + title: Count + type: integer + description: Count number of non-NA/null observations + mean: + title: Mean + type: number + description: Mean of the values + std: + title: Std + type: number + description: Standard deviation of the observations + min: + title: Min + type: number + description: Minimum of the values in the object + 25%: + title: 25% + type: number + 50%: + title: 50% + type: number + 75%: + title: 75% + type: number + max: + title: Max + type: number + description: Maximum of the values in the object + TagDictionary: + title: TagDictionary + type: object + properties: {} + description: Used for data model allows extra fields which are not declared initially in the pydantic model + ToOneRelationship: + title: ToOneRelationship + type: object + properties: + confidence: + title: Relationship Confidence + type: number + description: The confidence of the relationship. If the property is absent a well-known relation is implied. + id: + title: Related Object Id + type: string + description: 'The id of the related object in the Data Ecosystem. If set, the id has priority over the natural key in the name property.' + name: + title: Related Object Name + type: string + description: The name or natural key of the related object. This property is required if the target object id could not (yet) be identified. + version: + title: Entity Version Number + type: number + description: 'The version number of the related entity. If no version number is specified, the last version is implied.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + Type: + title: Type + enum: + - GeometryCollection + description: An enumeration. + Type_1: + title: Type_1 + enum: + - Feature + description: An enumeration. + Type_2: + title: Type_2 + enum: + - FeatureCollection + description: An enumeration. + Type_3: + title: Type_3 + enum: + - LineString + description: An enumeration. + Type_4: + title: Type_4 + enum: + - MultiLineString + description: An enumeration. + Type_5: + title: Type_5 + enum: + - MultiPoint + description: An enumeration. + Type_6: + title: Type_6 + enum: + - MultiPolygon + description: An enumeration. + Type_7: + title: Type_7 + enum: + - Point + description: An enumeration. + Type_8: + title: Type_8 + enum: + - Polygon + description: An enumeration. + V1AboutResponse: + title: V1AboutResponse + type: object + properties: + user: + $ref: '#/components/schemas/AboutResponseUser' + dmsInfo: + $ref: '#/components/schemas/V1DmsInfo' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + V1DmsInfo: + title: V1DmsInfo + type: object + properties: + kinds: + title: Kinds + type: array + items: + type: string + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + ValidationError: + title: ValidationError + required: + - loc + - msg + - type + type: object + properties: + loc: + title: Location + type: array + items: + type: string + msg: + title: Message + type: string + type: + title: Error Type + type: string + ValueWithUnit: + title: ValueWithUnit + required: + - unitKey + - value + type: object + properties: + unitKey: + title: Unit Key + type: string + description: Unit for value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details. + value: + title: Value + type: number + description: Value of the corresponding attribute for the domain object in question. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + VersionDetailsResponse: + title: VersionDetailsResponse + type: object + properties: + service: + title: Service + type: string + version: + title: Version + type: string + buildNumber: + title: Buildnumber + type: string + details: + title: Details + type: object + additionalProperties: + type: string + WellLocationType: + title: WellLocationType + enum: + - Onshore + - Offshore + - unknown + description: An enumeration. + WellPurpose: + title: WellPurpose + enum: + - appraisal + - appraisal -- confirmation appraisal + - appraisal -- exploratory appraisal + - exploration + - exploration -- deeper-pool wildcat + - exploration -- new-field wildcat + - exploration -- new-pool wildcat + - exploration -- outpost wildcat + - exploration -- shallower-pool wildcat + - development + - development -- infill development + - development -- injector + - development -- producer + - fluid storage + - fluid storage -- gas storage + - general srvc + - general srvc -- borehole re-acquisition + - general srvc -- observation + - general srvc -- relief + - general srvc -- research + - general srvc -- research -- drill test + - general srvc -- research -- strat test + - general srvc -- waste disposal + - mineral + - unknown + description: An enumeration. + WellStatus: + title: WellStatus + enum: + - abandoned + - active + - active -- injecting + - active -- producing + - completed + - drilling + - partially plugged + - permitted + - plugged and abandoned + - proposed + - sold + - suspended + - temporarily abandoned + - testing + - tight + - working over + - unknown + description: An enumeration. + WellType: + title: WellType + enum: + - bypass + - initial + - redrill + - reentry + - respud + - sidetrack + - unknown + description: An enumeration. + WellborePurpose: + title: WellborePurpose + enum: + - appraisal + - appraisal -- confirmation appraisal + - appraisal -- exploratory appraisal + - exploration + - exploration -- deeper-pool wildcat + - exploration -- new-field wildcat + - exploration -- new-pool wildcat + - exploration -- outpost wildcat + - exploration -- shallower-pool wildcat + - development + - development -- infill development + - development -- injector + - development -- producer + - fluid storage + - fluid storage -- gas storage + - general srvc + - general srvc -- borehole re-acquisition + - general srvc -- observation + - general srvc -- relief + - general srvc -- research + - general srvc -- research -- drill test + - general srvc -- research -- strat test + - general srvc -- waste disposal + - mineral + - unknown + description: An enumeration. + WellboreStatus: + title: WellboreStatus + enum: + - abandoned + - active + - active -- injecting + - active -- producing + - completed + - drilling + - partially plugged + - permitted + - plugged and abandoned + - proposed + - sold + - suspended + - temporarily abandoned + - testing + - tight + - working over + - unknown + description: An enumeration. + WellboreType: + title: WellboreType + enum: + - bypass + - initial + - redrill + - reentry + - respud + - sidetrack + - unknown + description: An enumeration. + basinContext: + title: basinContext + type: object + properties: + basinCode: + title: Basin Code + type: string + description: The code of the basin in which the well is located. + basinName: + title: Basin Name + type: string + description: The name of the basin in which the well is located. + subBasinCode: + title: Sub-Basin Code + type: string + description: The code of the sub-basin in which the well is located. + subBasinName: + title: Sub-Basin Name + type: string + description: The name of the sub-basin in which the well is located. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + channel: + title: channel + type: object + properties: + absentValue: + title: Absent Value + type: string + description: Optional field carrying the absent value as string for this channel. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Named Properties + type: array + items: + $ref: '#/components/schemas/namedProperty' + description: The named properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.' + unitKey: + title: Unit + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + core_dl_geopoint: + title: core_dl_geopoint + required: + - latitude + - longitude + type: object + properties: + latitude: + title: Latitude + maximum: 90 + minimum: -90 + type: number + description: 'The latitude value in degrees of arc (dega). Value range [-90, 90].' + longitude: + title: Longitude + maximum: 180 + minimum: -180 + type: number + description: 'The longitude value in degrees of arc (dega). Value range [-180, 180]' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + geographicPosition: + title: geographicPosition + required: + - crsKey + - elevationFromMsl + - latitude + - longitude + type: object + properties: + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + latitude: + title: Native Latitude + type: number + description: Native or original latitude (unit defined by CRS) + longitude: + title: Native Longitude + type: number + description: Native or original longitude (unit defined by CRS) + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + geometryItem: + title: geometryItem + required: + - geometries + - type + type: object + properties: + bbox: + title: Bbox + type: array + items: + type: number + geometries: + title: Geometries + type: array + items: + anyOf: + - $ref: '#/components/schemas/GeoJsonPoint' + - $ref: '#/components/schemas/GeoJsonMultiPoint' + - $ref: '#/components/schemas/GeoJsonLineString' + - $ref: '#/components/schemas/GeoJsonMultiLineString' + - $ref: '#/components/schemas/Polygon' + - $ref: '#/components/schemas/GeoJsonMultiPolygon' + type: + $ref: '#/components/schemas/Type' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + historyRecord: + title: historyRecord + type: object + properties: + date: + title: Date and Time + type: string + description: The UTC date time of the log creation/processing + format: date-time + description: + title: ' Description' + type: string + description: 'The description of the context, which produced the log.' + user: + title: User + type: string + description: The user running the log processing. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + log: + title: log + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Log Data + allOf: + - $ref: '#/components/schemas/logData' + description: Log data associated with a wellbore + id: + title: Log Set ID + type: string + description: The unique identifier of the log + kind: + title: Log Kind + type: string + description: Kind specification + default: 'osdu:wks:log:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The log's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + status: + title: Entity Status + type: string + description: The status of this log + default: compliant + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this log; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logData: + title: logData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: 'Only supplied with azimuth logs: the azimuth reference code defining the type of North, default TN for true north.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where the index, e.g. MD == 0 and TVD == 0.' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + history: + title: History Records + type: array + items: + $ref: '#/components/schemas/historyRecord' + description: An array of historyRecords describing the context for the log's creation or processing. + log: + title: Log Channel + allOf: + - $ref: '#/components/schemas/logchannel' + description: The log containing the log meta data and log-store reference. + name: + title: Log Set Name + type: string + description: The name of this log set + operation: + title: Operation + type: string + description: The operation which created this Log + reference: + title: Reference Index + allOf: + - $ref: '#/components/schemas/logchannel' + description: 'The reference index - only populated for logs, which are member of a logSet and share the reference index.' + referenceType: + title: Index Type + allOf: + - $ref: '#/components/schemas/ReferenceType' + description: The reference index type of the log set. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/logRelationships' + description: The related entities. + start: + title: Start + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The start index value of the log set. + step: + title: Step + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The index increment value of the log set. Only populated if the log is regularly sampled. + stop: + title: Stop + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The stop index value of the log set. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + logRelationships: + title: logRelationships + type: object + properties: + logSet: + title: LogSet + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The logSet to which this log belongs. If the log is not part of a log set this relationship stays empty. + timeDepthRelation: + title: TimeDepthRelation LogSet + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The timeDepthRelation to which this log belongs. If the log is not part of a timeDepthRelation this relationship stays empty. + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this log belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this log belongs. This relationship is the most important; only the wellbore can provide the unique context for the measured depth index. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logSetData: + title: logSetData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: Azimuth reference code defining the type of North. Only used for logSets with azimuth data + channelMnemonics: + title: Channel Mnemonics + type: array + items: + type: string + description: A list of channel Mnemonics in this log set. + channelNames: + title: Channel Names + type: array + items: + type: string + description: A list of channel long names in this log set. + classification: + title: Log Set Classification + type: string + description: The well-known log set classification code. + default: Externally Processed LogSet + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + $ref: '#/components/schemas/SimpleElevationReference' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + name: + title: Log Set Name + type: string + description: The name of this log set + operation: + title: Operation + type: string + description: The operation which created this entity + reference: + $ref: '#/components/schemas/channel' + referenceType: + title: Reference Type + type: string + description: The reference index type of the log set. + relationships: + $ref: '#/components/schemas/logsetrelationships' + start: + $ref: '#/components/schemas/ValueWithUnit' + step: + $ref: '#/components/schemas/ValueWithUnit' + stop: + $ref: '#/components/schemas/ValueWithUnit' + description: Used for data model allows extra fields which are not declared initially in the pydantic model + logchannel: + title: logchannel + type: object + properties: + columnNames: + title: Column Names + type: array + items: + type: string + description: A list of names for multi-dimensional logs (dimension>1). The length of this array is expected to be equal to 'dimension'. For one-dimensional this property stays empty as the columnName is by definition the log name. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType_2' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format_2' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The unique id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Named Properties + type: array + items: + $ref: '#/components/schemas/namedProperty' + description: The named properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated.' + unitKey: + title: Unit + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logset: + title: logset + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Log Set Data + allOf: + - $ref: '#/components/schemas/logSetData' + description: Log channel set associated with a wellbore + id: + title: Log Set ID + type: string + description: The unique identifier of the log set + kind: + title: Log Set Kind + type: string + description: Kind specification + default: 'osdu:wks:logSet:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The log-set's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this log set; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + logsetrelationships: + title: logsetrelationships + required: + - wellbore + type: object + properties: + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this logSet belongs. Only required if the wellbore is unknown. + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this logSet belongs. + wellboreSection: + title: Wellbore Section + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellboreSection to which this logSet belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + namedProperty: + title: namedProperty + type: object + properties: + associations: + title: Associations + type: array + items: + type: string + description: The optional associations contains one or more mnemonics found elsewhere in the logSet. + description: + title: Property Description + type: string + description: The description and role of this property. + format: + title: Format (LAS) + type: string + description: 'An optional format declaration for the property values. The ''A'' prefix indicates an array; string values are represented by ''S''; floating point values are represented by ''F'', optionally followed by a field specification, e.g. ''F10.4''; exponential number representations are represented by ''E''; integer values are represented by ''I''. For further information see the LAS specification http://www.cwls.org/las/.' + name: + title: Property Name + type: string + description: The name of this property. + unitKey: + title: Property Unit Symbol + type: string + description: The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition. + value: + title: Property Value + anyOf: + - type: number + - type: string + description: The value for this property as a string or a number. + values: + title: Property Values (Interval) + type: array + items: + type: number + description: 'The values, e.g. interval boundaries, for this property.' + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + projectedPosition: + title: projectedPosition + required: + - crsKey + - elevationFromMsl + - x + - 'y' + type: object + properties: + crsKey: + title: CRS Key + type: string + description: 'The ''crsKey'', which can be looked up in the ''frameOfReference.crs'' for further details.' + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + x: + title: X Coordinate + type: number + description: X-coordinate value in native or original projected CRS + 'y': + title: Y Coordinate + type: number + description: Y-coordinate value in native or original projected CRS + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectory: + title: trajectory + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Trajectory Data + allOf: + - $ref: '#/components/schemas/trajectoryData' + description: A log set representing a trajectory associated with a wellbore + id: + title: Trajectory ID + type: string + description: The unique identifier of the trajectory + kind: + title: Trajectory Kind + type: string + description: Kind specification + default: 'osdu:wks:trajectory:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The trajectory's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this trajectory; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectoryData: + title: trajectoryData + type: object + properties: + azimuthReference: + title: Azimuth Reference Code + type: string + description: 'Azimuth reference code defining the type of North, default TN for true north.' + channelMnemonics: + title: Channel Mnemonics + type: array + items: + type: string + description: A list of channel Mnemonics in this trajectory. + channelNames: + title: Channel Names + type: array + items: + type: string + description: A list of channel long names in this trajectory. + channels: + title: Channels + type: array + items: + $ref: '#/components/schemas/trajectorychannel' + description: The channels associated to the index. + classification: + title: Trajectory Classification + type: string + description: The well-known trajectory classification code. + default: Raw Deviation Survey + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + index: + title: Index Channel + allOf: + - $ref: '#/components/schemas/trajectorychannel' + description: The index channel or log. + indexType: + title: Index Type + type: string + description: The index type of the trajectory. + locationWGS84: + title: Trajectory preview + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: The wellbore's trajectory preview shape as GeoJSON LineString. + name: + title: Trajectory Name + type: string + description: The name of this trajectory + referencePosition: + title: Reference Position First Sample + allOf: + - $ref: '#/components/schemas/Point3dNonGeoJson' + description: 'The 3D reference position for the first sample (surface location for main wellbores, tie-in point for side-tracks.' + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/trajectoryrelationships' + description: The related entities. + start: + title: Start + type: number + description: The start index value of the trajectory. + step: + title: Step + type: number + description: The index increment value of the trajectory. + stop: + title: Stop + type: number + description: The stop index value of the trajectory. + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/wgs84Position' + description: 'The wellbore''s position in WGS 84 latitude and longitude; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + description: Used for data model allows extra fields which are not declared initially in the pydantic model + trajectorychannel: + title: trajectorychannel + type: object + properties: + absentValue: + title: Absent Value + type: string + description: Optional field carrying the absent value as string for this channel. + azimuthKey: + title: Azimuth Reference Key + type: string + description: The azimuth reference of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.azimuth' dictionary. + crsKey: + title: CRS Key + type: string + description: The CRS key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.crs' dictionary. + dataType: + title: Data Type + allOf: + - $ref: '#/components/schemas/DataType_1' + description: The log value type (per log sample). The 'format' property may contain further hints about data type presentation. + default: number + dimension: + title: Dimension + type: integer + description: The dimension of this log or channel + family: + title: Log Family + type: string + description: The log family code of this log or channel (optional) + familyType: + title: Log Family Type + type: string + description: 'The log family type code of this log or channel. Example: ''Neutron Porosity'' for ''Thermal Neutron Porosity Sandstone''. (optional)' + format: + title: Format Hint + allOf: + - $ref: '#/components/schemas/Format_1' + description: Optional format hint how to treat the log values as strings or number of bits per 'dataType'. + default: float32 + logstoreId: + title: Logstore ID + type: number + description: The id of this log or channel in the Logstore. This property is not present in the index channel. + bulkURI: + title: bulk URI + type: string + description: bulkURI either URL or URN. + longName: + title: Log Long Name + type: string + description: The long name of this log or channel + mnemonic: + title: Mnemonic + type: string + description: The mnemonic of this log or channel + name: + title: Log Name + type: string + description: The name of this log or channel. + properties: + title: Properties + type: array + items: + type: string + description: The properties of this log or channel. + source: + title: Source + type: string + description: 'The source of this log or channel as a data reference; Typically this refers to the raw trajectory, from which this log WKE is generated.' + unitKey: + title: Unit Key + type: string + description: The unit key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.units' dictionary. Empty units (NoUnit) are not recorded. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + trajectoryrelationships: + title: trajectoryrelationships + required: + - wellbore + type: object + properties: + wellbore: + title: Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The wellbore to which this trajectory belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + well: + title: well + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Well Data + allOf: + - $ref: '#/components/schemas/wellData' + description: Well data container + id: + title: Well ID + type: string + description: The unique identifier of the well + kind: + title: Well Kind + type: string + description: Well-known well kind specification + default: 'osdu:wks:well:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The geological interpretation's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this well; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellData: + title: wellData + type: object + properties: + basinContext: + title: Basin Context + allOf: + - $ref: '#/components/schemas/basinContext' + description: The basin context details for the well. + block: + title: Block + type: string + description: 'The block name, in which the well is located.' + country: + title: Country + type: string + description: 'The country, in which the well is located. The country name follows the convention in ISO 3166-1 ''English short country name'', see https://en.wikipedia.org/wiki/ISO_3166-1' + county: + title: County + type: string + description: 'The county name, in which the well is located.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateLicenseIssued: + title: License Issue Date + type: string + description: The UTC date time when the well license was issued. + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + datePluggedAbandoned: + title: Plugged Abandoned Date + type: string + description: The UTC date and time at which the well was plugged and abandoned. + format: date-time + dateSpudded: + title: Spud Date + type: string + description: 'The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format' + format: date-time + directionWell: + title: Well Direction + allOf: + - $ref: '#/components/schemas/DirectionWell' + description: 'POSC well direction. The direction of the flow of the fluids in a well facility (generally, injected or produced, or some combination).' + district: + title: District + type: string + description: 'The district name, to which the well belongs.' + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The well''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + field: + title: Field + type: string + description: 'The field name, to which the well belongs.' + fluidWell: + title: Well Fluid + allOf: + - $ref: '#/components/schemas/FluidWell' + description: POSC well fluid. The type of fluid being produced from or injected \ninto a well facility. + groundElevation: + title: Ground Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The well''s ground elevation, Values above MSL are positive..' + locationWGS84: + title: Well Shape WGS 84 + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: A 2D GeoJSON FeatureCollection defining well location or trajectory in WGS 84 CRS. + name: + title: Well Name + type: string + description: The well name + operator: + title: Well Operator + type: string + description: The operator company name of the well. + operatorDivision: + title: Operator Division + type: string + description: The operator division of the well. + operatorInterest: + title: Well Operator Interest + type: number + description: Interest for operator. Commonly in percent. + operatorOriginal: + title: Original Well Operator + type: string + description: Original operator of the well. This may be different than the current operator. + plssLocation: + title: US PLSS Location + allOf: + - $ref: '#/components/schemas/PlssLocation' + description: A location described by the Public Land Survey System (United States) + propertyDictionary: + title: Property Dictionary + type: object + description: 'A dictionary structure, i.e. key/string value pairs, to carry additional well properties.' + region: + title: Region + type: string + description: Geo-political region in which the well is located. + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/wellrelationships' + description: The related entities. + state: + title: State + type: string + description: 'The state name, in which the well is located.' + uwi: + title: Unique Well Identifier + type: string + description: 'The unique well identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).' + waterDepth: + title: Water Depth + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Depth of water (not land rigs). + wellHeadElevation: + title: Well Head Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The well''s vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadGeographic: + title: 'Well Head Position, Geographic' + allOf: + - $ref: '#/components/schemas/geographicPosition' + description: 'The well''s well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadProjected: + title: 'Well Head Position, Projected' + allOf: + - $ref: '#/components/schemas/projectedPosition' + description: 'The well''s well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The well's position in WGS 84 latitude and longitude. + wellLocationType: + $ref: '#/components/schemas/WellLocationType' + wellNumberGovernment: + title: Government Number + type: string + description: Government assigned well number. + wellNumberLicense: + title: Well License Number + type: string + description: License number of the well. + wellNumberOperator: + title: Operator Number + type: string + description: Operator well number. + wellPurpose: + title: Well Purpose + allOf: + - $ref: '#/components/schemas/WellPurpose' + description: POSC well purpose + wellStatus: + title: Well Status + allOf: + - $ref: '#/components/schemas/WellStatus' + description: POSC well status. + wellType: + title: Well Type + allOf: + - $ref: '#/components/schemas/WellType' + description: Type of well. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + wellbore: + title: wellbore + type: object + properties: + acl: + title: Access Control List + allOf: + - $ref: '#/components/schemas/TagDictionary' + description: The access control tags associated with this entity. + ancestry: + title: Ancestry + allOf: + - $ref: '#/components/schemas/LinkList' + description: 'The links to data, which constitute the inputs.' + data: + title: Wellbore Data + allOf: + - $ref: '#/components/schemas/wellboreData' + description: Wellbore data container + id: + title: Wellbore ID + type: string + description: The unique identifier of the wellbore + kind: + title: Wellbore Kind + type: string + description: Well-known wellbore kind specification + default: 'osdu:wks:wellbore:0.0.1' + legal: + title: Legal Tags + allOf: + - $ref: '#/components/schemas/Legal' + description: The geological interpretation's legal tags + meta: + title: Frame of Reference Meta Data + type: array + items: + $ref: '#/components/schemas/MetaItem' + description: 'The meta data section linking the ''unitKey'', ''crsKey'' to self-contained definitions (persistableReference)' + type: + title: Entity Type + type: string + description: 'The reference entity type as declared in common:metadata:entity:*.' + version: + title: Entity Version Number + type: number + description: The version number of this wellbore; set by the framework. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellboreData: + title: wellboreData + type: object + properties: + airGap: + title: Air Gap + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The gap between water surface and offshore drilling platform. + block: + title: Block + type: string + description: 'The block name, in which the wellbore is located.' + country: + title: Country + type: string + description: 'The country, in which the wellbore is located. The country name follows the convention in ISO 3166-1 ''English short country name'', see https://en.wikipedia.org/wiki/ISO_3166-1' + county: + title: County + type: string + description: 'The county name, in which the wellbore is located.' + dateCreated: + title: Creation Date and Time + type: string + description: The UTC date time of the entity creation + format: date-time + dateModified: + title: Last Modification Date and Time + type: string + description: The UTC date time of the last entity modification + format: date-time + drillingDaysTarget: + title: Target Drilling Days + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Target days for drilling wellbore. + elevationReference: + title: Elevation Reference + allOf: + - $ref: '#/components/schemas/SimpleElevationReference' + description: 'The wellbore''s elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0' + externalIds: + title: Array of External IDs + type: array + items: + type: string + description: 'An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity.' + field: + title: Field + type: string + description: 'The field name, to which the wellbore belongs.' + formationAtTd: + title: Formation at TD + type: string + description: The name of the formation at the wellbore's total depth. + formationProjected: + title: Formation Projected + type: string + description: The name of the formation at the wellbore's projected depth. This property is questionable as there is not precise documentation available. + hasAchievedTotalDepth: + title: Has Total Depth Been Achieved Flag + type: boolean + description: 'True ("true" of "1") indicates that the wellbore has acheieved total depth. That is, drilling has completed. False ("false" or "0") indicates otherwise. Not given indicates that it is not known whether total depth has been reached.' + default: true + isActive: + title: Is Active Flag + type: boolean + description: 'True (="1" or "true") indicates that the wellbore is active. False (="0" or "false") indicates otherwise. It is the servers responsibility to set this value based on its available internal data (e.g., what objects are changing).' + kickOffMd: + title: Kick-off MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The kick-off point in measured depth (MD); for the main well the kickOffMd is set to 0. + kickOffTvd: + title: Kick-off MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Kickoff true vertical depth of the wellbore; for the main wellbore the kickOffMd is set to 0. + locationWGS84: + title: Wellbore Shape WGS 84 + allOf: + - $ref: '#/components/schemas/GeoJsonFeatureCollection' + description: A 2D GeoJSON FeatureCollection defining wellbore location or trajectory in WGS 84 CRS. + name: + title: Wellbore Name + type: string + description: The wellbore name + operator: + title: Operator + type: string + description: The operator of the wellbore. + permitDate: + title: Permit Date + type: string + description: The wellbore's permit date. + format: date + permitNumber: + title: Permit Number + type: string + description: The wellbore's permit number or permit ID. + plssLocation: + title: US PLSS Location + allOf: + - $ref: '#/components/schemas/PlssLocation' + description: A location described by the Public Land Survey System (United States) + propertyDictionary: + title: Property Dictionary + type: object + description: 'A dictionary structure, i.e. key/string value pairs, to carry additional wellbore properties.' + relationships: + title: Relationships + allOf: + - $ref: '#/components/schemas/wellborerelationships' + description: The related entities. + shape: + title: Wellbore Shape + allOf: + - $ref: '#/components/schemas/Shape' + description: POSC wellbore trajectory shape. + spudDate: + title: Spud Date + type: string + description: 'The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format' + format: date + state: + title: State + type: string + description: 'The state name, in which the wellbore is located.' + totalDepthMd: + title: Total MD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The measured depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.' + totalDepthMdDriller: + title: Total MD Drilled + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The total depth along the wellbore as reported by the drilling contractor from 'elevationReference'. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.. + totalDepthMdPlanned: + title: Total MD Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned measured depth for the wellbore total depth. + totalDepthMdSubSeaPlanned: + title: Total MD Sub Sea Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned measured for the wellbore total depth - with respect to seabed. + totalDepthProjectedMd: + title: Total MD Projected + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: The projected total measured depth of the borehole. This property is questionable as there is not precise documentation available. + totalDepthTvd: + title: Total TVD + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The true vertical depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site.' + totalDepthTvdDriller: + title: Total TVD Drilled + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The total depth true vertical as reported by the drilling contractor from ''elevationReference'', Downwards increasing. The unit definition is found via the property''s unitKey'' in ''frameOfReference.units'' dictionary.' + totalDepthTvdPlanned: + title: Total TVD Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned true vertical depth for the wellbore total depth. + totalDepthTvdSubSeaPlanned: + title: Total TVD Sub Sea Planned + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: Planned true vertical depth for the wellbore total depth - with respect to seabed. + uwi: + title: Unique Wellbore Identifier + type: string + description: 'The unique wellbore identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits).' + wellHeadElevation: + title: Well Head Elevation + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'The wellbore''s vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadGeographic: + title: 'Well Head Position, Geographic' + allOf: + - $ref: '#/components/schemas/geographicPosition' + description: 'The wellbore''s well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadProjected: + title: 'Well Head Position, Projected' + allOf: + - $ref: '#/components/schemas/projectedPosition' + description: 'The wellbore''s well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL.' + wellHeadWgs84: + title: WGS 84 Position + allOf: + - $ref: '#/components/schemas/core_dl_geopoint' + description: The wellbore's position in WGS 84 latitude and longitude. + wellboreNumberGovernment: + title: Government Number + type: string + description: Government assigned wellbore number. + wellboreNumberOperator: + title: Operator Number + type: string + description: Operator wellbore number. + wellborePurpose: + title: Wellbore Purpose + allOf: + - $ref: '#/components/schemas/WellborePurpose' + description: POSC wellbore purpose + wellboreStatus: + title: Wellbore Status + allOf: + - $ref: '#/components/schemas/WellboreStatus' + description: POSC wellbore status. + wellboreType: + title: Wellbore Type + allOf: + - $ref: '#/components/schemas/WellboreType' + description: Type of wellbore. + description: Used for data model allows extra fields which are not declared initially in the pydantic model + wellborerelationships: + title: wellborerelationships + type: object + properties: + definitiveTimeDepthRelation: + title: Definitive Time-Depth Relation + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The definitive tome-depth relation providing the MD to seismic travel-time transformation. + definitiveTrajectory: + title: Definitive Trajectory + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The definitive trajectory providing the MD to 3D space transformation. + tieInWellbore: + title: Tie-in Wellbore + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The tie-in wellbore if this wellbore is a side-track. + well: + title: Well + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The well to which this wellbore belongs. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wellrelationships: + title: wellrelationships + type: object + properties: + asset: + title: Asset + allOf: + - $ref: '#/components/schemas/ToOneRelationship' + description: The asset this well belongs to. + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + wgs84Position: + title: wgs84Position + required: + - elevationFromMsl + - latitude + - longitude + type: object + properties: + elevationFromMsl: + title: Elevation from MSL + allOf: + - $ref: '#/components/schemas/ValueWithUnit' + description: 'Elevation from Mean Seal Level, downwards negative. The unit definition is found via ''elevationFromMsl.unitKey'' in ''frameOfReference.units'' dictionary.' + latitude: + title: WGS 84 Latitude + type: number + description: WGS 84 latitude value in degrees (dega) + longitude: + title: WGS 84 Longitude + type: number + description: WGS 84 longitude value in degrees (dega) + additionalProperties: false + description: The base model forbids fields which are not declared initially in the pydantic model + GetLogDataResponse: + oneOf: + - title: SplitFormat + type: object + properties: + data: + title: Data + anyOf: + - type: array + items: + anyOf: + - type: string + - type: integer + - type: number + - type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + columns: + title: Columns + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + index: + title: Index + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + required: + - data + - title: IndexFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ColumnFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: RecordsFormat + type: object + properties: + TODO: + title: Todo + type: string + required: + - TODO + - title: ValuesFormat + type: array + items: + type: array + items: + anyOf: + - type: string + - type: integer + - type: number + securitySchemes: + bearer: + type: apiKey + name: Authorization + in: header + appkey: + type: apiKey + in: header + name: appkey \ No newline at end of file diff --git a/spec/generated/openapi.json b/spec/generated/openapi.json new file mode 100644 index 0000000000000000000000000000000000000000..afa35d500ec41b9346034dd4d5df7b56cff0d1d8 --- /dev/null +++ b/spec/generated/openapi.json @@ -0,0 +1,10166 @@ +{ + "openapi": "3.0.2", + "info": { + "title": "Wellbore DDMS OSDU", + "description": "build local", + "version": "0.2" + }, + "paths": { + "/ddms/v2/about": { + "get": { + "summary": "Get About", + "operationId": "get_about_ddms_v2_about_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AboutResponse" + } + } + } + } + } + } + }, + "/ddms/v2/version": { + "get": { + "summary": "Get Version", + "operationId": "get_version_ddms_v2_version_get", + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/VersionDetailsResponse" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/status": { + "get": { + "tags": [ + "Wellbore DDMS" + ], + "summary": "Get the status of the service", + "operationId": "about_ddms_v2_status_get", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/V1AboutResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wells/{wellid}": { + "get": { + "tags": [ + "Well" + ], + "summary": "Get the Well using wks:well:1.0.2 schema", + "description": "Get the Well object using its **id**. <p>If the well kind is\n *wks:well:1.0.2* returns the record directly</p> <p>If the well\n kind is different *wks:well:1.0.2* it will get the raw record and\n convert the results to match the *wks:well:1.0.2*. If convertion is\n not possible returns an error **500**", + "operationId": "get_well", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellid", + "type": "string" + }, + "name": "wellid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/well" + } + } + } + }, + "404": { + "description": "Well not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Well" + ], + "summary": "Delete the well. The API performs a logical deletion of the given record", + "operationId": "del_well", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellid", + "type": "string" + }, + "name": "wellid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "Well not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wells/{wellid}/versions": { + "get": { + "tags": [ + "Well" + ], + "summary": "Get all versions of the Well", + "operationId": "get_well_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellid", + "type": "string" + }, + "name": "wellid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "Well not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wells/{wellid}/versions/{version}": { + "get": { + "tags": [ + "Well" + ], + "summary": "Get the given version of the Well using wks:well:1.0.2 schema", + "description": "\"Get the Well object using its **id**. <p>If the well kind is\n *wks:well:1.0.2* returns the record directly</p> <p>If the well\n kind is different *wks:well:1.0.2* it will get the raw record and\n convert the results to match the *wks:well:1.0.2*. If convertion is\n not possible returns an error **500**", + "operationId": "get_well_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellid", + "type": "string" + }, + "name": "wellid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/well" + } + } + } + }, + "404": { + "description": "Well not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wells": { + "post": { + "tags": [ + "Well" + ], + "summary": "Create or update the Wells using wks:well:1.0.2 schema", + "operationId": "post_well", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Wells", + "type": "array", + "items": { + "$ref": "#/components/schemas/well" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wellbores/{wellboreid}": { + "get": { + "tags": [ + "Wellbore" + ], + "summary": "Get the Wellbore using wks:wellbore:1.0.6 schema", + "description": "Get the Wellbore object using its **id**. <p>If the wellbore kind is\n *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore\n kind is different *wks:wellbore:1.0.6* it will get the raw record and\n convert the results to match the *wks:wellbore:1.0.6*. If convertion is\n not possible returns an error **500**", + "operationId": "get_wellbore", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellboreid", + "type": "string" + }, + "name": "wellboreid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/wellbore" + } + } + } + }, + "404": { + "description": "Wellbore not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Wellbore" + ], + "summary": "Delete the wellbore. The API performs a logical deletion of the given record", + "operationId": "del_wellbore", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellboreid", + "type": "string" + }, + "name": "wellboreid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "Wellbore not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wellbores/{wellboreid}/versions": { + "get": { + "tags": [ + "Wellbore" + ], + "summary": "Get all versions of the Wellbore", + "operationId": "get_wellbore_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellboreid", + "type": "string" + }, + "name": "wellboreid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "Wellbore not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wellbores/{wellboreid}/versions/{version}": { + "get": { + "tags": [ + "Wellbore" + ], + "summary": "Get the given version of the Wellbore using wks:wellbore:1.0.6 schema", + "description": "\"Get the Wellbore object using its **id**. <p>If the wellbore kind is\n *wks:wellbore:1.0.6* returns the record directly</p> <p>If the wellbore\n kind is different *wks:wellbore:1.0.6* it will get the raw record and\n convert the results to match the *wks:wellbore:1.0.6*. If convertion is\n not possible returns an error **500**", + "operationId": "get_wellbore_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellboreid", + "type": "string" + }, + "name": "wellboreid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/wellbore" + } + } + } + }, + "404": { + "description": "Wellbore not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/wellbores": { + "post": { + "tags": [ + "Wellbore" + ], + "summary": "Create or update the Wellbores using wks:wellbore:1.0.6 schema", + "operationId": "post_wellbore", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Wellbores", + "type": "array", + "items": { + "$ref": "#/components/schemas/wellbore" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logsets/{logsetid}": { + "get": { + "tags": [ + "Logset" + ], + "summary": "Get the LogSet using wks:logSet:1.0.5 schema", + "description": "Get the LogSet object using its **id**", + "operationId": "get_logset", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logsetid", + "type": "string" + }, + "name": "logsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/logset" + } + } + } + }, + "404": { + "description": "LogSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Logset" + ], + "summary": "Delete the LogSet. The API performs a logical deletion of the given record", + "operationId": "del_logset", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logsetid", + "type": "string" + }, + "name": "logsetid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "LogSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logsets/{logsetid}/versions": { + "get": { + "tags": [ + "Logset" + ], + "summary": "Get all versions of the logset", + "operationId": "get_logset_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logsetid", + "type": "string" + }, + "name": "logsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "LogSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logsets/{logsetid}/versions/{version}": { + "get": { + "tags": [ + "Logset" + ], + "summary": "Get the given version of LogSet using wks:logSet:1.0.5 schema", + "description": "\"Get the LogSet object using its **id**.", + "operationId": "get_logset_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logsetid", + "type": "string" + }, + "name": "logsetid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/logset" + } + } + } + }, + "404": { + "description": "LogSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logsets/{logsetid}/harmonize": { + "post": { + "tags": [ + "Logset" + ], + "summary": "Create or update the LogSets using wks:logSet:1.0.5 schema", + "operationId": "harmonize_logset", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logsetid", + "type": "string" + }, + "name": "logsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/logset" + } + } + } + }, + "404": { + "description": "logset not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logsets": { + "post": { + "tags": [ + "Logset" + ], + "summary": "Create or update the LogSets using wks:logSet:1.0.5 schema", + "operationId": "put_logset", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Logsets", + "type": "array", + "items": { + "$ref": "#/components/schemas/logset" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/trajectories/{trajectoryid}": { + "get": { + "tags": [ + "Trajectory" + ], + "summary": "Get the trajectory using wks:trajectory:1.0.5 schema", + "description": "Get the Trajectory object using its **id**", + "operationId": "get_trajectory", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/trajectory" + } + } + } + }, + "404": { + "description": "Trajectory not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Trajectory" + ], + "summary": "Delete the Trajectory. The API performs a logical deletion of the given record", + "operationId": "del_trajectory", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "Trajectory not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/trajectories/{trajectoryid}/versions": { + "get": { + "tags": [ + "Trajectory" + ], + "summary": "Get all versions of the Trajectory", + "operationId": "get_trajectory_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "Trajectory not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/trajectories/{trajectoryid}/versions/{version}": { + "get": { + "tags": [ + "Trajectory" + ], + "summary": "Get the given version of Trajectory using wks:Trajectory:1.0.5 schema", + "operationId": "get_trajectory_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/trajectory" + } + } + } + }, + "404": { + "description": "Trajectory not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/trajectories": { + "post": { + "tags": [ + "Trajectory" + ], + "summary": "Create or update the trajectories using wks:Trajectory:1.0.5 schema", + "operationId": "post_trajectory", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Trajectories", + "type": "array", + "items": { + "$ref": "#/components/schemas/trajectory" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/trajectories/{trajectoryid}/data": { + "get": { + "tags": [ + "Trajectory" + ], + "summary": "Returns all data within the specified filters. Strongly consistent.", + "description": "return full bulk data", + "operationId": "get_traj_data", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "description": "List of channels to get. If not provided, return all channels.", + "required": false, + "schema": { + "title": "Channels", + "type": "array", + "items": { + "type": "string" + }, + "description": "List of channels to get. If not provided, return all channels." + }, + "name": "channels", + "in": "query" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Get trajectory data of the given channels.\nIt uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html).\n Here're examples for data with 5 rows for channels MD, X, Y with different _orient_: \n* split: <br/>`{\"columns\":[\"MD\",\"X\",\"Y\"],\"index\":[0,1,2,3,4],\"data\":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> \n* index: <br/>`{\"0\":{\"MD\":0.0,\"X\":1001,\"Y\":2001},\"1\":{\"MD\":0.5,\"X\":1002,\"Y\":2002},\"2\":{\"MD\":1.0,\"X\":1003,\"Y\":2003},\"3\":{\"MD\":1.5,\"X\":1004,\"Y\":2004},\"4\":{\"MD\":2.0,\"X\":1005,\"Y\":2005}}`<br/> \n* columns: <br/>`{\"MD\":{\"0\":0.0,\"1\":0.5,\"2\":1.0,\"3\":1.5,\"4\":2.0},\"X\":{\"0\":1001,\"1\":1002,\"2\":1003,\"3\":1004,\"4\":1005},\"Y\":{\"0\":2001,\"1\":2002,\"2\":2003,\"3\":2004,\"4\":2005}}`<br/> \n* records: <br/>`[{\"MD\":0.0,\"X\":1001,\"Y\":2001},{\"MD\":0.5,\"X\":1002,\"Y\":2002},{\"MD\":1.0,\"X\":1003,\"Y\":2003},{\"MD\":1.5,\"X\":1004,\"Y\":2004},{\"MD\":2.0,\"X\":1005,\"Y\":2005}]`<br/> \n* values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> ", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetLogDataResponse" + }, + "example": "{\"columns\":[\"MD\",\"X\",\"Y\"],\"index\":[0,1,2,3,4],\"data\":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}" + } + } + }, + "404": { + "description": "trajectory not found" + }, + "400": { + "description": "unknown channels" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "post": { + "tags": [ + "Trajectory" + ], + "summary": "Writes the specified data to the trajectory (atomic).", + "description": "Overwrite if exists", + "operationId": "post_traj_data", + "parameters": [ + { + "required": true, + "schema": { + "title": "Trajectoryid", + "type": "string" + }, + "name": "trajectoryid", + "in": "path" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "404": { + "description": "trajectory not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/markers/{markerid}": { + "get": { + "tags": [ + "Marker" + ], + "summary": "Get the marker using wks:marker:1.0.4 schema", + "description": "Get the Marker object using its **id**", + "operationId": "get_marker", + "parameters": [ + { + "required": true, + "schema": { + "title": "Markerid", + "type": "string" + }, + "name": "markerid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/marker" + } + } + } + }, + "404": { + "description": "marker not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Marker" + ], + "summary": "Delete the marker. The API performs a logical deletion of the given record", + "operationId": "del_marker", + "parameters": [ + { + "required": true, + "schema": { + "title": "Markerid", + "type": "string" + }, + "name": "markerid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "Marker not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/markers/{markerid}/versions": { + "get": { + "tags": [ + "Marker" + ], + "summary": "Get all versions of the marker", + "operationId": "get_marker_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Markerid", + "type": "string" + }, + "name": "markerid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "marker not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/markers/{markerid}/versions/{version}": { + "get": { + "tags": [ + "Marker" + ], + "summary": "Get the given version of marker using wks:marker:1.0.4 schema", + "operationId": "get_marker_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Markerid", + "type": "string" + }, + "name": "markerid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/marker" + } + } + } + }, + "404": { + "description": "marker not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/markers": { + "post": { + "tags": [ + "Marker" + ], + "summary": "Create or update the markers using wks:marker:1.0.4 schema", + "operationId": "post_marker", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Markers", + "type": "array", + "items": { + "$ref": "#/components/schemas/marker" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}": { + "get": { + "tags": [ + "Log" + ], + "summary": "Get the Log using wks:log:1.0.5 schema", + "description": "\n Get the log object using its data ecosystem **id**. <p>If the log\n kind is *wks:log:1.0.5* returns the record directly</p> <p>If the\n wellbore kind is different *wks:log:1.0.5* it will get the raw\n record and convert the results to match the *wks:log:1.0.5*. If\n conversion is not possible returns an error **500**</p>", + "operationId": "get_log", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/log" + } + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Log" + ], + "summary": "Delete the log. The API performs a logical deletion of the given record", + "operationId": "del_log", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs": { + "post": { + "tags": [ + "Log" + ], + "summary": "Create or update the logs using wks:log:1.0.5 schema", + "operationId": "post_log", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Logs", + "type": "array", + "items": { + "$ref": "#/components/schemas/log" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}/versions": { + "get": { + "tags": [ + "Log" + ], + "summary": "Get all versions of the log", + "operationId": "get_log_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}/versions/{version}": { + "get": { + "tags": [ + "Log" + ], + "summary": "Get the given version of log using wks:log:1.0.5 schema", + "operationId": "get_log_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/log" + } + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}/data": { + "get": { + "tags": [ + "Log" + ], + "summary": "Returns all data within the specified filters. Strongly consistent.", + "description": "return full bulk data", + "operationId": "get_log_data", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.", + "required": false, + "schema": { + "title": "Bulk-Path", + "type": "string", + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log." + }, + "name": "bulk-path", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Get log bulk data in format in the given _orient_ value.\nIt uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html).\n Here're examples for data with 5 rows and 3 columns with different _orient_: \n* split: <br/>`{\"columns\":[\"Ref\",\"col_100X\",\"col_200X\"],\"index\":[0,1,2,3,4],\"data\":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> \n* index: <br/>`{\"0\":{\"Ref\":0.0,\"col_100X\":1001,\"col_200X\":2001},\"1\":{\"Ref\":0.5,\"col_100X\":1002,\"col_200X\":2002},\"2\":{\"Ref\":1.0,\"col_100X\":1003,\"col_200X\":2003},\"3\":{\"Ref\":1.5,\"col_100X\":1004,\"col_200X\":2004},\"4\":{\"Ref\":2.0,\"col_100X\":1005,\"col_200X\":2005}}`<br/> \n* columns: <br/>`{\"Ref\":{\"0\":0.0,\"1\":0.5,\"2\":1.0,\"3\":1.5,\"4\":2.0},\"col_100X\":{\"0\":1001,\"1\":1002,\"2\":1003,\"3\":1004,\"4\":1005},\"col_200X\":{\"0\":2001,\"1\":2002,\"2\":2003,\"3\":2004,\"4\":2005}}`<br/> \n* records: <br/>`[{\"Ref\":0.0,\"col_100X\":1001,\"col_200X\":2001},{\"Ref\":0.5,\"col_100X\":1002,\"col_200X\":2002},{\"Ref\":1.0,\"col_100X\":1003,\"col_200X\":2003},{\"Ref\":1.5,\"col_100X\":1004,\"col_200X\":2004},{\"Ref\":2.0,\"col_100X\":1005,\"col_200X\":2005}]`<br/> \n* values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> ", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetLogDataResponse" + }, + "example": "{\"columns\":[\"Ref\",\"col_100X\",\"col_200X\"],\"index\":[0,1,2,3,4],\"data\":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}" + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "post": { + "tags": [ + "Log" + ], + "summary": "Writes the specified data to the log (atomic).", + "description": "Overwrite if exists", + "operationId": "write_log_data", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.", + "required": false, + "schema": { + "title": "Bulk-Path", + "type": "string", + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log." + }, + "name": "bulk-path", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ], + "requestBody": { + "description": "Write log bulk data.\nIt uses [Pandas.Dataframe json format](https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.to_json.html).\n Here're examples for data with 5 rows and 3 columns with different _orient_: \n* split: <br/>`{\"columns\":[\"Ref\",\"col_100X\",\"col_200X\"],\"index\":[0,1,2,3,4],\"data\":[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]}`<br/> \n* index: <br/>`{\"0\":{\"Ref\":0.0,\"col_100X\":1001,\"col_200X\":2001},\"1\":{\"Ref\":0.5,\"col_100X\":1002,\"col_200X\":2002},\"2\":{\"Ref\":1.0,\"col_100X\":1003,\"col_200X\":2003},\"3\":{\"Ref\":1.5,\"col_100X\":1004,\"col_200X\":2004},\"4\":{\"Ref\":2.0,\"col_100X\":1005,\"col_200X\":2005}}`<br/> \n* columns: <br/>`{\"Ref\":{\"0\":0.0,\"1\":0.5,\"2\":1.0,\"3\":1.5,\"4\":2.0},\"col_100X\":{\"0\":1001,\"1\":1002,\"2\":1003,\"3\":1004,\"4\":1005},\"col_200X\":{\"0\":2001,\"1\":2002,\"2\":2003,\"3\":2004,\"4\":2005}}`<br/> \n* records: <br/>`[{\"Ref\":0.0,\"col_100X\":1001,\"col_200X\":2001},{\"Ref\":0.5,\"col_100X\":1002,\"col_200X\":2002},{\"Ref\":1.0,\"col_100X\":1003,\"col_200X\":2003},{\"Ref\":1.5,\"col_100X\":1004,\"col_200X\":2004},{\"Ref\":2.0,\"col_100X\":1005,\"col_200X\":2005}]`<br/> \n* values: <br/>`[[0.0,1001,2001],[0.5,1002,2002],[1.0,1003,2003],[1.5,1004,2004],[2.0,1005,2005]]`<br/> ", + "required": true, + "content": { + "application/json": { + "schema": { + "example": { + "columns": [ + "Ref", + "col_100X", + "col_200X" + ], + "index": [ + 0, + 1, + 2, + 3, + 4 + ], + "data": [ + [ + 0.0, + 1001, + 2001 + ], + [ + 0.5, + 1002, + 2002 + ], + [ + 1.0, + 1003, + 2003 + ], + [ + 1.5, + 1004, + 2004 + ], + [ + 2.0, + 1005, + 2005 + ] + ] + }, + "oneOf": [ + { + "title": "SplitFormat", + "type": "object", + "properties": { + "data": { + "title": "Data", + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "array", + "items": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + } + ] + }, + "columns": { + "title": "Columns", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + }, + "index": { + "title": "Index", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + }, + "required": [ + "data" + ] + }, + { + "title": "IndexFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "ColumnFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "RecordsFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "ValuesFormat", + "type": "array", + "items": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + } + ] + } + } + } + } + } + }, + "/ddms/v2/logs/{logid}/upload_data": { + "post": { + "tags": [ + "Log" + ], + "summary": "Writes the data to the log. Support json file (then orient must be provided) and parquet", + "description": "Overwrite if exists", + "operationId": "upload_log_data", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.", + "required": false, + "schema": { + "title": "Bulk-Path", + "type": "string", + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log." + }, + "name": "bulk-path", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "multipart/form-data": { + "schema": { + "$ref": "#/components/schemas/Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "invalid request" + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}/statistics": { + "get": { + "tags": [ + "Log" + ], + "summary": "Data statistics", + "description": "This API will return count, mean, std, min, max and percentiles of each column", + "operationId": "get_log_data_statistics_ddms_v2_logs__logid__statistics_get", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.", + "required": false, + "schema": { + "title": "Bulk-Path", + "type": "string", + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log." + }, + "name": "bulk-path", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GetStatisticResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/logs/{logid}/decimated": { + "get": { + "tags": [ + "Log" + ], + "summary": "Returns a decimated version of all data within the specified filters. Eventually consistent.", + "description": "TODO\n Note: row order is not preserved.", + "operationId": "get_log_decimated", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logid", + "type": "string" + }, + "name": "logid", + "in": "path" + }, + { + "description": "Number of division desired", + "required": false, + "schema": { + "title": "Quantiles", + "type": "integer", + "description": "Number of division desired" + }, + "name": "quantiles", + "in": "query" + }, + { + "description": "The start value for the log decimation", + "required": false, + "schema": { + "title": "Start", + "type": "number", + "description": "The start value for the log decimation" + }, + "name": "start", + "in": "query" + }, + { + "description": "The stop value for the log decimation", + "required": false, + "schema": { + "title": "Stop", + "type": "number", + "description": "The stop value for the log decimation" + }, + "name": "stop", + "in": "query" + }, + { + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "required": false, + "schema": { + "title": "Orient", + "type": "string", + "description": "define format when using JSON data is used. Value can be split, index, columns, records, values", + "default": "split" + }, + "name": "orient", + "in": "query" + }, + { + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log.", + "required": false, + "schema": { + "title": "Bulk-Path", + "type": "string", + "description": "The json path to the bulk reference (see https://goessner.net/articles/JsonPath/). Required for non wks:log." + }, + "name": "bulk-path", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "404": { + "description": "log not found" + }, + "422": { + "description": "log is not compatible with decimation" + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets": { + "post": { + "tags": [ + "Dipset" + ], + "summary": "Create or update the DipSets using wks:dipSet:1.0.0 schema", + "operationId": "post_dipset", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Dipsets", + "type": "array", + "items": { + "$ref": "#/components/schemas/dipset" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "400": { + "description": "Missing mandatory parameter or unknown parameter" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/versions/{version}": { + "get": { + "tags": [ + "Dipset" + ], + "summary": "Get the given version of DipSet using wks:dipset:1.0.0 schema", + "description": "\"Get the DipSet object using its **id**.", + "operationId": "get_dipset_version", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Version", + "type": "integer" + }, + "name": "version", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/dipset" + } + } + } + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/versions": { + "get": { + "tags": [ + "Dipset" + ], + "summary": "Get all versions of the dipset", + "operationId": "get_dipset_versions", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RecordVersions" + } + } + } + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}": { + "get": { + "tags": [ + "Dipset" + ], + "summary": "Get the DipSet using wks:dipSet:1.0.0 schema", + "description": "Get the DipSet object using its **id**", + "operationId": "get_dipset", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/dipset" + } + } + } + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Dipset" + ], + "summary": "Delete the DipSet. The API performs a logical deletion of the given record", + "operationId": "del_dipset", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "Whether or not to delete records children", + "required": false, + "schema": { + "title": "Recursive", + "type": "boolean", + "description": "Whether or not to delete records children", + "default": false + }, + "name": "recursive", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "204": { + "description": "Record deleted successfully" + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/dips": { + "get": { + "tags": [ + "Dips" + ], + "summary": "Get dips", + "description": "Return dips from dipset from the given index until the given number of dips specifed in query parameters. \n If not specified returns all dips from dipset.", + "operationId": "get_dips", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Index", + "minimum": 0.0, + "type": "integer" + }, + "name": "index", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Limit", + "minimum": 0.0, + "type": "integer" + }, + "name": "limit", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Get Dips Ddms V2 Dipsets Dipsetid Dips Get", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "post": { + "tags": [ + "Dips" + ], + "summary": "Define the dips of the dipset", + "description": "Replace previous dips by provided dips. Sort dips by reference and azimuth.", + "operationId": "post_dips", + "parameters": [ + { + "description": "The ID of the dipset", + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string", + "description": "The ID of the dipset" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Dips", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Post Dips Ddms V2 Dipsets Dipsetid Dips Post", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/dips/insert": { + "post": { + "tags": [ + "Dips" + ], + "summary": "insert dip in a dipset", + "description": "Insert dips in dipset. \n Existing dips are not replaced. \n Several dip can have same reference. \n Operation will sort by reference all dips in dipset (may modify dip indexes).", + "operationId": "insert_dips", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Dips", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Insert Dips Ddms V2 Dipsets Dipsetid Dips Insert Post", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/dips/query": { + "get": { + "tags": [ + "Dips" + ], + "summary": "Query dip from dipset", + "description": "Search dip within reference interval and specific classification", + "operationId": "query_dip", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "description": "Min reference for the dips to search in the dipset", + "required": false, + "schema": { + "title": "Minreference", + "type": "number", + "description": "Min reference for the dips to search in the dipset" + }, + "name": "minReference", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Max reference for the dips to search in the dipset", + "type": "number" + }, + "name": "maxReference", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Classification for the dip to search in the dipset", + "type": "string" + }, + "name": "classification", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Query Dip Ddms V2 Dipsets Dipsetid Dips Query Get", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/v2/dipsets/{dipsetid}/dips/{index}": { + "get": { + "tags": [ + "Dips" + ], + "summary": "Get a dip at index", + "description": "\"Return dip from dipset at the given index", + "operationId": "get_dip_by_index", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Index", + "type": "integer" + }, + "name": "index", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Dip" + } + } + } + }, + "404": { + "description": "DipSet or index not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "delete": { + "tags": [ + "Dips" + ], + "summary": "Delete a dip", + "description": "Removes the dip at index", + "operationId": "delete_dip_by_index", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Index", + "type": "integer" + }, + "name": "index", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Delete Dip By Index Ddms V2 Dipsets Dipsetid Dips Index Delete", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "404": { + "description": "DipSet or index not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + }, + "patch": { + "tags": [ + "Dips" + ], + "summary": "Update dip", + "description": "\"Update dip at index \n Operation will sort by reference all dips in dipset (may modify dip indexes).", + "operationId": "patch_dip", + "parameters": [ + { + "required": true, + "schema": { + "title": "Dipsetid", + "type": "string" + }, + "name": "dipsetid", + "in": "path" + }, + { + "required": true, + "schema": { + "title": "Index", + "type": "integer" + }, + "name": "index", + "in": "path" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/Dip" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "title": "Response Patch Dip Ddms V2 Dipsets Dipsetid Dips Index Patch", + "type": "array", + "items": { + "$ref": "#/components/schemas/Dip" + } + } + } + } + }, + "404": { + "description": "DipSet not found" + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query": { + "post": { + "tags": [ + "search" + ], + "summary": "Query", + "operationId": "query_ddms_query_post", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/QueryRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query_with_cursor": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor", + "operationId": "query_ddms_query_with_cursor_post", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/QueryRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor", + "description": "Get all Wellbores object. <p>The wellbore kind is\n *:wks:wellbore:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores/bydistance": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores object in a specific area. <p>The specific area will be define by a circle\n based on its center coordinates (lat, lon) and radius (meters) </p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores_bydistance_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Latitude", + "type": "number" + }, + "name": "latitude", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude", + "type": "number" + }, + "name": "longitude", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Distance", + "type": "integer" + }, + "name": "distance", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores/byboundingbox": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores object in a specific area. <p>The specific area will be define by a square\n based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores_byboundingbox_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Latitude Top Left", + "type": "number" + }, + "name": "latitude_top_left", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude Top Left", + "type": "number" + }, + "name": "longitude_top_left", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Latitude Bottom Right", + "type": "number" + }, + "name": "latitude_bottom_right", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude Bottom Right", + "type": "number" + }, + "name": "longitude_bottom_right", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores/bygeopolygon": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores object in a specific area. <p>The specific area will be define by a \n polygon based on each of its coordinates (lat, lon) with a minimum of three</p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores_bygeopolygon_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Points", + "type": "array", + "items": { + "$ref": "#/components/schemas/Point" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbore/{wellbore_id}/logsets": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logSets by wellbore ID", + "description": "Get all LogSets object using its relationship Wellbore ID. <p>All LogSets linked to this\n specific ID will be returned</p>\n <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbore__wellbore_id__logsets_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores/{wellbore_attribute}/logsets": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logSets by wellbore attribute", + "description": "Get all LogSets object using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores\n with this specific attribute will be returned</p>\n <p>The LogSet kind is *:wks:logSet:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores__wellbore_attribute__logsets_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Attribute", + "type": "string" + }, + "name": "wellbore_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/logs": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, gets logs", + "description": "Get all Logs object. <p>The Logs kind is\n *:wks:log:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_logs_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbore/{wellbore_id}/logs": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logs by wellbore ID", + "description": "Get all Logs object using its relationship Wellbore ID. <p>All Logs linked to this\n specific ID will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbore__wellbore_id__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbores/{wellbore_attribute}/logs": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logs by wellbore attribute", + "description": "Get all Logs object using a specific attribute of Wellbores. <p>All Logs linked to Wellbores\n with this specific attribute will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbores__wellbore_attribute__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Attribute", + "type": "string" + }, + "name": "wellbore_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/logset/{logset_id}/logs": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logs by logSet ID", + "description": "Get all Logs object using its relationship Logset ID. <p>All Logs linked to this\n specific ID will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_logset__logset_id__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logset Id", + "type": "string" + }, + "name": "logset_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/logsets/{logset_attribute}/logs": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search logs by logSet attribute", + "description": "Get all Logs object using a specific attribute of LogSets. <p>All Logs linked to LogSets\n with this specific attribute will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_logsets__logset_attribute__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logset Attribute", + "type": "string" + }, + "name": "logset_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/query/wellbore/{wellbore_id}/markers": { + "post": { + "tags": [ + "search" + ], + "summary": "Query with cursor, search markers by wellbore ID", + "description": "Get all Markers object using its relationship Wellbore ID. <p>All Markers linked to this\n specific ID will be returned</p>\n <p>The Marker kind is *:wks:marker:* returns all records directly based on existing schemas</p>", + "operationId": "query_ddms_query_wellbore__wellbore_id__markers_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor", + "description": "Get all Wellbores IDs object. <p>The wellbore kind is\n *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores/bydistance": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores IDs IDs objects in a specific area. <p>The specific area will be define by a circle\n based on its center coordinates (lat, lon) and radius (meters) </p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records IDs IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores_bydistance_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Latitude", + "type": "number" + }, + "name": "latitude", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude", + "type": "number" + }, + "name": "longitude", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Distance", + "type": "integer" + }, + "name": "distance", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores/byboundingbox": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a square\n based on its top left coordinates (lat, lon) and its bottom right coordinates (log, lat) </p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores_byboundingbox_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Latitude Top Left", + "type": "number" + }, + "name": "latitude_top_left", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude Top Left", + "type": "number" + }, + "name": "longitude_top_left", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Latitude Bottom Right", + "type": "number" + }, + "name": "latitude_bottom_right", + "in": "query" + }, + { + "required": true, + "schema": { + "title": "Longitude Bottom Right", + "type": "number" + }, + "name": "longitude_bottom_right", + "in": "query" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores/bygeopolygon": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, CRS format: data.wellHeadWgs84", + "description": "Get all Wellbores IDs objects in a specific area. <p>The specific area will be define by a \n polygon based on each of its coordinates (lat, lon) with a minimum of three</p>\n <p>The wellbore kind is *:wks:wellbore:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores_bygeopolygon_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "title": "Points", + "type": "array", + "items": { + "$ref": "#/components/schemas/Point" + } + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbore/{wellbore_id}/logsets": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logSets IDs by wellbore ID", + "description": "Get all LogSets IDs objects using its relationship Wellbore ID. <p>All LogSets linked to this\n specific ID will be returned</p>\n <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbore__wellbore_id__logsets_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores/{wellbore_attribute}/logsets": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logSets IDs by wellbore attribute", + "description": "Get all LogSets IDs objects using a specific attribute of Wellbores. <p>All LogSets linked to Wellbores\n with this specific attribute will be returned</p>\n <p>The LogSet kind is *:wks:logSet:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores__wellbore_attribute__logsets_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Attribute", + "type": "string" + }, + "name": "wellbore_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/logs": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, gets logs", + "description": "Get all Logs object. <p>The Logs kind is\n *:wks:log:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_logs_post", + "parameters": [ + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbore/{wellbore_id}/logs": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logs IDs by wellbore ID", + "description": "Get all Logs IDs objects using its relationship Wellbore ID. <p>All Logs linked to this\n specific ID will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbore__wellbore_id__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbores/{wellbore_attribute}/logs": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logs IDs by wellbore attribute", + "description": "Get all Logs IDs objects using a specific attribute of Wellbores. <p>All Logs linked to Wellbores\n with this specific attribute will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbores__wellbore_attribute__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Attribute", + "type": "string" + }, + "name": "wellbore_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/logset/{logset_id}/logs": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logs IDs by logSet ID", + "description": "Get all Logs IDs objects using its relationship Logset ID. <p>All Logs linked to this\n specific ID will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_logset__logset_id__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logset Id", + "type": "string" + }, + "name": "logset_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/logsets/{logset_attribute}/logs": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search logs IDs by logSet attribute", + "description": "Get all Logs IDs objects using a specific attribute of LogSets. <p>All Logs linked to LogSets\n with this specific attribute will be returned</p>\n <p>The Log kind is *:wks:log:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_logsets__logset_attribute__logs_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Logset Attribute", + "type": "string" + }, + "name": "logset_attribute", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/ddms/fastquery/wellbore/{wellbore_id}/markers": { + "post": { + "tags": [ + "fast-search" + ], + "summary": "Query with cursor, search markers IDs by wellbore ID", + "description": "Get all Markers IDs objects using its relationship Wellbore ID. <p>All Markers linked to this\n specific ID will be returned</p>\n <p>The Marker kind is *:wks:marker:* returns all records IDs directly based on existing schemas</p>", + "operationId": "query_ddms_fastquery_wellbore__wellbore_id__markers_post", + "parameters": [ + { + "required": true, + "schema": { + "title": "Wellbore Id", + "type": "string" + }, + "name": "wellbore_id", + "in": "path" + }, + { + "required": false, + "schema": { + "title": "Query", + "type": "string" + }, + "name": "query", + "in": "query" + }, + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": {} + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/log-recognition/family": { + "post": { + "tags": [ + "log-recognition" + ], + "summary": "Recognize family and unit", + "description": "Find the most probable family and unit using family assignment rule based catalogs. User defined catalog will have the priority.", + "operationId": "family", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GuessRequest" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GuessResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + }, + "/log-recognition/upload-catalog": { + "put": { + "tags": [ + "log-recognition" + ], + "summary": "Upload user-defined catalog with family assignment rules", + "description": "Upload user-defined catalog with family assignment rules for specific partition ID. \n If there is an existing catalog, it will be replaced. It takes maximum of 5 mins to replace the existing catalog. \n Hence, any call to retrieve the family should be made after 5 mins of uploading the catalog", + "operationId": "upload-catalog", + "parameters": [ + { + "description": "identifier of the data partition to query", + "required": false, + "schema": { + "title": "data partition id", + "minLength": 1, + "type": "string", + "description": "identifier of the data partition to query" + }, + "name": "data-partition-id", + "in": "header" + } + ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CatalogRecord" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateUpdateRecordsResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + }, + "security": [ + { + "OpenDESBearerToken": [] + } + ] + } + } + }, + "components": { + "schemas": { + "AboutResponse": { + "title": "AboutResponse", + "type": "object", + "properties": { + "service": { + "title": "Service", + "type": "string" + }, + "version": { + "title": "Version", + "type": "string" + }, + "buildNumber": { + "title": "Buildnumber", + "type": "string" + }, + "cloudEnvironment": { + "title": "Cloudenvironment", + "type": "string" + } + } + }, + "AboutResponseUser": { + "title": "AboutResponseUser", + "type": "object", + "properties": { + "tenant": { + "title": "Tenant", + "type": "string" + }, + "email": { + "title": "Email", + "type": "string" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post": { + "title": "Body_upload_log_data_file_ddms_v2_logs__logid__upload_data_post", + "required": [ + "file" + ], + "type": "object", + "properties": { + "file": { + "title": "File", + "type": "string", + "format": "binary" + } + } + }, + "ByBoundingBox": { + "title": "ByBoundingBox", + "required": [ + "topLeft", + "bottomRight" + ], + "type": "object", + "properties": { + "topLeft": { + "$ref": "#/components/schemas/Point" + }, + "bottomRight": { + "$ref": "#/components/schemas/Point" + } + } + }, + "ByDistance": { + "title": "ByDistance", + "required": [ + "point" + ], + "type": "object", + "properties": { + "distance": { + "title": "Distance", + "type": "number" + }, + "point": { + "$ref": "#/components/schemas/Point" + } + } + }, + "ByGeoPolygon": { + "title": "ByGeoPolygon", + "type": "object", + "properties": { + "points": { + "title": "Points", + "type": "array", + "items": { + "$ref": "#/components/schemas/Point" + } + } + } + }, + "Catalog": { + "title": "Catalog", + "required": [ + "family_catalog" + ], + "type": "object", + "properties": { + "family_catalog": { + "title": "Family Catalog", + "type": "array", + "items": { + "$ref": "#/components/schemas/CatalogItem" + } + }, + "main_family_catalog": { + "title": "Main Family Catalog", + "type": "array", + "items": { + "$ref": "#/components/schemas/MainFanilyCatalogItem" + } + } + } + }, + "CatalogItem": { + "title": "CatalogItem", + "required": [ + "unit", + "rule" + ], + "type": "object", + "properties": { + "unit": { + "title": "Unit", + "type": "string" + }, + "family": { + "title": "Family", + "type": "string", + "default": "" + }, + "rule": { + "title": "Rule", + "type": "string" + } + } + }, + "CatalogRecord": { + "title": "CatalogRecord", + "required": [ + "acl", + "legal", + "data" + ], + "type": "object", + "properties": { + "acl": { + "$ref": "#/components/schemas/StorageAcl" + }, + "legal": { + "$ref": "#/components/schemas/odes_storage__models__Legal" + }, + "data": { + "$ref": "#/components/schemas/Catalog" + } + }, + "example": { + "acl": { + "viewers": [ + "abc@example.com, cde@example.com" + ], + "owners": [ + "abc@example.com, cde@example.com" + ] + }, + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": [ + "US" + ] + }, + "data": { + "family_catalog": [ + { + "unit": "ohm.m", + "family": "Medium Resistivity", + "rule": "MEDR" + } + ], + "main_family_catalog": [ + { + "MainFamily": "Resistivity", + "Family": "Medium Resistivity", + "Unit": "OHMM" + } + ] + } + } + }, + "CreateUpdateRecordsResponse": { + "title": "CreateUpdateRecordsResponse", + "type": "object", + "properties": { + "recordCount": { + "title": "Recordcount", + "type": "integer" + }, + "recordIds": { + "title": "Recordids", + "type": "array", + "items": { + "type": "string" + } + }, + "skippedRecordIds": { + "title": "Skippedrecordids", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "DataType": { + "title": "DataType", + "enum": [ + "string", + "number", + "integer", + "boolean" + ], + "description": "An enumeration." + }, + "DataType_1": { + "title": "DataType_1", + "enum": [ + "string", + "number", + "integer", + "boolean" + ], + "description": "An enumeration." + }, + "DataType_2": { + "title": "DataType_2", + "enum": [ + "string", + "number", + "integer", + "boolean", + "date-time" + ], + "description": "An enumeration." + }, + "Dip": { + "title": "Dip", + "required": [ + "reference", + "azimuth", + "inclination" + ], + "type": "object", + "properties": { + "reference": { + "title": "Reference of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only Measured Depth in meter is supported for the moment" + }, + "azimuth": { + "title": "Azimuth value of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only degrees unit is supported for the moment" + }, + "inclination": { + "title": "Inclination value of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only degrees unit is supported for the moment" + }, + "quality": { + "title": "Quality of the dip", + "exclusiveMaximum": 1.0, + "exclusiveMinimum": 0.0, + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Decimal number between 0 and 1" + }, + "xCoordinate": { + "title": "The X coordinate of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only meter unit is supported for the moment" + }, + "yCoordinate": { + "title": "The Y coordinate of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only meter unit is supported for the moment" + }, + "zCoordinate": { + "title": "The Z coordinate of the dip", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Only meter unit is supported for the moment" + }, + "classification": { + "title": "Classification of the dip", + "type": "string", + "description": "Any string is accepted." + } + }, + "example": { + "reference": { + "unitKey": "meter", + "value": 1000.5 + }, + "azimuth": { + "unitKey": "dega", + "value": 42 + }, + "inclination": { + "unitKey": "dega", + "value": 9 + }, + "quality": { + "unitKey": "unitless", + "value": 0.5 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 2 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 45 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 7 + }, + "classification": "fracture" + } + }, + "DirectionWell": { + "title": "DirectionWell", + "enum": [ + "huff-n-puff", + "injector", + "producer", + "uncertain", + "unknown" + ], + "description": "An enumeration." + }, + "FluidWell": { + "title": "FluidWell", + "enum": [ + "air", + "condensate", + "dry", + "gas", + "gas-water", + "non HC gas", + "non HC gas -- CO2", + "oil", + "oil-gas", + "oil-water", + "steam", + "water", + "water -- brine", + "water -- fresh water", + "unknown" + ], + "description": "An enumeration." + }, + "Format": { + "title": "Format", + "enum": [ + "date", + "date-time", + "time", + "byte", + "binary", + "boolean", + "email", + "uuid", + "uri", + "int8", + "int16", + "int32", + "int64", + "float32", + "float64", + "float128" + ], + "description": "An enumeration." + }, + "Format_1": { + "title": "Format_1", + "enum": [ + "date", + "date-time", + "time", + "byte", + "binary", + "boolean", + "email", + "uuid", + "uri", + "int8", + "int16", + "int32", + "int64", + "float32", + "float64", + "float128" + ], + "description": "An enumeration." + }, + "Format_2": { + "title": "Format_2", + "enum": [ + "date", + "date-time", + "time", + "byte", + "binary", + "email", + "uuid", + "uri", + "int8", + "int16", + "int32", + "int64", + "float32", + "float64", + "float128" + ], + "description": "An enumeration." + }, + "GeoJsonFeature": { + "title": "GeoJsonFeature", + "required": [ + "geometry", + "properties", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "geometry": { + "title": "Geometry", + "anyOf": [ + { + "$ref": "#/components/schemas/GeoJsonPoint" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiPoint" + }, + { + "$ref": "#/components/schemas/GeoJsonLineString" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiLineString" + }, + { + "$ref": "#/components/schemas/Polygon" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiPolygon" + }, + { + "$ref": "#/components/schemas/geometryItem" + } + ] + }, + "properties": { + "title": "Properties", + "type": "object" + }, + "type": { + "$ref": "#/components/schemas/Type_1" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonFeatureCollection": { + "title": "GeoJsonFeatureCollection", + "required": [ + "features", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "features": { + "title": "Features", + "type": "array", + "items": { + "$ref": "#/components/schemas/GeoJsonFeature" + } + }, + "type": { + "$ref": "#/components/schemas/Type_2" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonLineString": { + "title": "GeoJsonLineString", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + }, + "type": { + "$ref": "#/components/schemas/Type_3" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonMultiLineString": { + "title": "GeoJsonMultiLineString", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + }, + "type": { + "$ref": "#/components/schemas/Type_4" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonMultiPoint": { + "title": "GeoJsonMultiPoint", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + }, + "type": { + "$ref": "#/components/schemas/Type_5" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonMultiPolygon": { + "title": "GeoJsonMultiPolygon", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + }, + "description": "Bounding box in longitude, latitude WGS 84." + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + }, + "description": "Array of polygons (minimum 2D), containing an array of point coordinates (longitude, latitude, (optionally elevation and other properties)." + }, + "type": { + "$ref": "#/components/schemas/Type_6" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GeoJsonPoint": { + "title": "GeoJsonPoint", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "number" + } + }, + "type": { + "$ref": "#/components/schemas/Type_7" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GetStatisticResponse": { + "title": "GetStatisticResponse", + "required": [ + "columns" + ], + "type": "object", + "properties": { + "columns": { + "title": "Columns", + "type": "array", + "items": { + "$ref": "#/components/schemas/StatsColumn" + } + } + } + }, + "GuessRequest": { + "title": "GuessRequest", + "required": [ + "label" + ], + "type": "object", + "properties": { + "label": { + "title": "Label", + "type": "string" + }, + "log_unit": { + "title": "Log Unit", + "type": "string" + }, + "description": { + "title": "Description", + "type": "string" + } + }, + "example": { + "label": "GRD", + "log_unit": "GAPI", + "description": "LDTD Gamma Ray" + } + }, + "GuessResponse": { + "title": "GuessResponse", + "type": "object", + "properties": { + "family": { + "title": "Family", + "type": "string" + }, + "family_type": { + "title": "Family Type", + "type": "string" + }, + "log_unit": { + "title": "Log Unit", + "type": "string" + }, + "base_unit": { + "title": "Base Unit", + "type": "string" + } + } + }, + "HTTPValidationError": { + "title": "HTTPValidationError", + "type": "object", + "properties": { + "errors": { + "title": "Errors", + "type": "array", + "items": { + "$ref": "#/components/schemas/ValidationError" + } + } + } + }, + "Kind": { + "title": "Kind", + "enum": [ + "CRS", + "Unit", + "Measurement", + "AzimuthReference", + "DateTime" + ], + "type": "string", + "description": "An enumeration." + }, + "LinkList": { + "title": "LinkList", + "type": "object", + "properties": {}, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "MainFanilyCatalogItem": { + "title": "MainFanilyCatalogItem", + "required": [ + "MainFamily", + "Family", + "Unit" + ], + "type": "object", + "properties": { + "MainFamily": { + "title": "Mainfamily", + "type": "string" + }, + "Family": { + "title": "Family", + "type": "string" + }, + "Unit": { + "title": "Unit", + "type": "string" + } + } + }, + "MetaItem": { + "title": "MetaItem", + "required": [ + "kind", + "persistableReference" + ], + "type": "object", + "properties": { + "kind": { + "title": "Reference Kind", + "allOf": [ + { + "$ref": "#/components/schemas/Kind" + } + ], + "description": "The kind of reference, unit, measurement, CRS or azimuth reference." + }, + "name": { + "title": "Name or Symbol", + "type": "string", + "description": "The name of the CRS or the symbol/name of the unit" + }, + "persistableReference": { + "title": "Persistable Reference", + "type": "string", + "description": "The persistable reference string uniquely identifying the CRS or Unit" + }, + "propertyNames": { + "title": "Attribute Names", + "type": "array", + "items": { + "type": "string" + }, + "description": "The list of property names, to which this meta data item provides Unit/CRS context to. Data structures, which come in a single frame of reference, can register the property name, others require a full path like \"data.structureA.propertyB\" to define a unique context." + }, + "propertyValues": { + "title": "Attribute Names", + "type": "array", + "items": { + "type": "string" + }, + "description": "The list of property values, to which this meta data item provides Unit/CRS context to. Typically a unit symbol is a value to a data structure; this symbol is then registered in this propertyValues array and the persistableReference provides the absolute reference." + }, + "uncertainty": { + "title": "Uncertainty", + "type": "number", + "description": "The uncertainty of the values measured given the unit or CRS unit." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "PlssLocation": { + "title": "PlssLocation", + "required": [ + "range", + "section", + "township" + ], + "type": "object", + "properties": { + "aliquotPart": { + "title": "Aliquot Part", + "type": "string", + "description": "A terse, hierarchical reference to a piece of land, in which successive subdivisions of some larger area." + }, + "range": { + "title": "Range", + "type": "string", + "description": "Range, also known as Rng, R; a measure of the distance east or west from a referenced principal meridian, in units of six miles." + }, + "section": { + "title": "Section Number", + "type": "integer", + "description": "Section number (between 1 and 36)" + }, + "township": { + "title": "Township", + "type": "string", + "description": "Township, also known as T or Twp; (1) Synonym for survey township, i.e., a square parcel of land of 36 square miles, or (2) A measure of the distance north or south from a referenced baseline, in units of six miles" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "Point": { + "title": "Point", + "type": "object", + "properties": { + "latitude": { + "title": "Latitude", + "type": "number" + }, + "longitude": { + "title": "Longitude", + "type": "number" + } + } + }, + "Point3dNonGeoJson": { + "title": "Point3dNonGeoJson", + "required": [ + "coordinates", + "crsKey", + "unitKey" + ], + "type": "object", + "properties": { + "coordinates": { + "title": "3D Point", + "type": "array", + "items": { + "type": "number" + }, + "description": "3-dimensional point; the first coordinate is typically pointing east (easting or longitude), the second coordinate typically points north (northing or latitude). The third coordinate is an elevation (upwards positive, downwards negative). The point's CRS is given by the container." + }, + "crsKey": { + "title": "CRS Key", + "type": "string", + "description": "The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details." + }, + "unitKey": { + "title": "Unit Key", + "type": "string", + "description": "The 'unitKey' for the 3rd coordinate, which can be looked up in the 'frameOfReference.unit' for further details." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "Polygon": { + "title": "Polygon", + "required": [ + "coordinates", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "coordinates": { + "title": "Coordinates", + "type": "array", + "items": { + "type": "array", + "items": { + "type": "array", + "items": { + "type": "number" + } + } + } + }, + "type": { + "$ref": "#/components/schemas/Type_8" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "QueryRequest": { + "title": "QueryRequest", + "required": [ + "kind" + ], + "type": "object", + "properties": { + "kind": { + "title": "Kind", + "type": "string" + }, + "limit": { + "title": "Limit", + "type": "integer" + }, + "query": { + "title": "Query", + "type": "string" + }, + "returnedFields": { + "title": "Returnedfields", + "type": "array", + "items": { + "type": "string" + } + }, + "sort": { + "$ref": "#/components/schemas/SortQuery" + }, + "queryAsOwner": { + "title": "Queryasowner", + "type": "boolean" + }, + "spatialFilter": { + "$ref": "#/components/schemas/SpatialFilter" + }, + "offset": { + "title": "Offset", + "type": "integer" + } + } + }, + "RecordVersions": { + "title": "RecordVersions", + "type": "object", + "properties": { + "recordId": { + "title": "Recordid", + "type": "string" + }, + "versions": { + "title": "Versions", + "type": "array", + "items": { + "type": "integer" + } + } + } + }, + "ReferenceType": { + "title": "ReferenceType", + "enum": [ + "Date", + "Date Time", + "Measured Depth", + "Core depth", + "True Vertical Depth", + "True Vertical Depth Sub Sea", + "One-Way Time", + "Two-Way Time" + ], + "description": "An enumeration." + }, + "Shape": { + "title": "Shape", + "enum": [ + "build and hold", + "deviated", + "double kickoff", + "horizontal", + "S-shaped", + "vertical", + "unknown" + ], + "description": "An enumeration." + }, + "SimpleElevationReference": { + "title": "SimpleElevationReference", + "required": [ + "elevationFromMsl" + ], + "type": "object", + "properties": { + "elevationFromMsl": { + "title": "Elevation from MSL", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The elevation above mean sea level (MSL), at which the vertical origin is 0.0. The 'unitKey' is further defined in 'frameOfReference.units'." + }, + "name": { + "title": "Elevation Reference Name", + "type": "string", + "description": "The name of the Elevation Reference." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "SortQuery": { + "title": "SortQuery", + "type": "object", + "properties": { + "field": { + "title": "Field", + "type": "array", + "items": { + "type": "string" + } + }, + "order": { + "title": "Order", + "anyOf": [ + { + "const": "ASC", + "type": "string" + }, + { + "const": "DESC", + "type": "string" + } + ] + } + } + }, + "SpatialFilter": { + "title": "SpatialFilter", + "type": "object", + "properties": { + "field": { + "title": "Field", + "type": "string" + }, + "byBoundingBox": { + "$ref": "#/components/schemas/ByBoundingBox" + }, + "byDistance": { + "$ref": "#/components/schemas/ByDistance" + }, + "byGeoPolygon": { + "$ref": "#/components/schemas/ByGeoPolygon" + } + } + }, + "StatsColumn": { + "title": "StatsColumn", + "required": [ + "count", + "mean", + "std", + "min", + "25%", + "50%", + "75%", + "max" + ], + "type": "object", + "properties": { + "count": { + "title": "Count", + "type": "integer", + "description": "Count number of non-NA/null observations" + }, + "mean": { + "title": "Mean", + "type": "number", + "description": "Mean of the values" + }, + "std": { + "title": "Std", + "type": "number", + "description": "Standard deviation of the observations" + }, + "min": { + "title": "Min", + "type": "number", + "description": "Minimum of the values in the object" + }, + "25%": { + "title": "25%", + "type": "number" + }, + "50%": { + "title": "50%", + "type": "number" + }, + "75%": { + "title": "75%", + "type": "number" + }, + "max": { + "title": "Max", + "type": "number", + "description": "Maximum of the values in the object" + } + } + }, + "StorageAcl": { + "title": "StorageAcl", + "required": [ + "viewers", + "owners" + ], + "type": "object", + "properties": { + "viewers": { + "title": "Viewers", + "type": "array", + "items": { + "type": "string" + } + }, + "owners": { + "title": "Owners", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "TagDictionary": { + "title": "TagDictionary", + "type": "object", + "properties": {}, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "ToOneRelationship": { + "title": "ToOneRelationship", + "type": "object", + "properties": { + "confidence": { + "title": "Relationship Confidence", + "type": "number", + "description": "The confidence of the relationship. If the property is absent a well-known relation is implied." + }, + "id": { + "title": "Related Object Id", + "type": "string", + "description": "The id of the related object in the Data Ecosystem. If set, the id has priority over the natural key in the name property." + }, + "name": { + "title": "Related Object Name", + "type": "string", + "description": "The name or natural key of the related object. This property is required if the target object id could not (yet) be identified." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of the related entity. If no version number is specified, the last version is implied." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "Type": { + "title": "Type", + "enum": [ + "GeometryCollection" + ], + "description": "An enumeration." + }, + "Type_1": { + "title": "Type_1", + "enum": [ + "Feature" + ], + "description": "An enumeration." + }, + "Type_2": { + "title": "Type_2", + "enum": [ + "FeatureCollection" + ], + "description": "An enumeration." + }, + "Type_3": { + "title": "Type_3", + "enum": [ + "LineString" + ], + "description": "An enumeration." + }, + "Type_4": { + "title": "Type_4", + "enum": [ + "MultiLineString" + ], + "description": "An enumeration." + }, + "Type_5": { + "title": "Type_5", + "enum": [ + "MultiPoint" + ], + "description": "An enumeration." + }, + "Type_6": { + "title": "Type_6", + "enum": [ + "MultiPolygon" + ], + "description": "An enumeration." + }, + "Type_7": { + "title": "Type_7", + "enum": [ + "Point" + ], + "description": "An enumeration." + }, + "Type_8": { + "title": "Type_8", + "enum": [ + "Polygon" + ], + "description": "An enumeration." + }, + "V1AboutResponse": { + "title": "V1AboutResponse", + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/AboutResponseUser" + }, + "dmsInfo": { + "$ref": "#/components/schemas/V1DmsInfo" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "V1DmsInfo": { + "title": "V1DmsInfo", + "type": "object", + "properties": { + "kinds": { + "title": "Kinds", + "type": "array", + "items": { + "type": "string" + } + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "ValidationError": { + "title": "ValidationError", + "required": [ + "loc", + "msg", + "type" + ], + "type": "object", + "properties": { + "loc": { + "title": "Location", + "type": "array", + "items": { + "type": "string" + } + }, + "msg": { + "title": "Message", + "type": "string" + }, + "type": { + "title": "Error Type", + "type": "string" + } + } + }, + "ValueWithUnit": { + "title": "ValueWithUnit", + "required": [ + "unitKey", + "value" + ], + "type": "object", + "properties": { + "unitKey": { + "title": "Unit Key", + "type": "string", + "description": "Unit for value of the corresponding attribute for the domain object in question. The key can be looked up in the 'frameOfReference.units' for further details." + }, + "value": { + "title": "Value", + "type": "number", + "description": "Value of the corresponding attribute for the domain object in question." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "VersionDetailsResponse": { + "title": "VersionDetailsResponse", + "type": "object", + "properties": { + "service": { + "title": "Service", + "type": "string" + }, + "version": { + "title": "Version", + "type": "string" + }, + "buildNumber": { + "title": "Buildnumber", + "type": "string" + }, + "details": { + "title": "Details", + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "WellLocationType": { + "title": "WellLocationType", + "enum": [ + "Onshore", + "Offshore", + "unknown" + ], + "description": "An enumeration." + }, + "WellPurpose": { + "title": "WellPurpose", + "enum": [ + "appraisal", + "appraisal -- confirmation appraisal", + "appraisal -- exploratory appraisal", + "exploration", + "exploration -- deeper-pool wildcat", + "exploration -- new-field wildcat", + "exploration -- new-pool wildcat", + "exploration -- outpost wildcat", + "exploration -- shallower-pool wildcat", + "development", + "development -- infill development", + "development -- injector", + "development -- producer", + "fluid storage", + "fluid storage -- gas storage", + "general srvc", + "general srvc -- borehole re-acquisition", + "general srvc -- observation", + "general srvc -- relief", + "general srvc -- research", + "general srvc -- research -- drill test", + "general srvc -- research -- strat test", + "general srvc -- waste disposal", + "mineral", + "unknown" + ], + "description": "An enumeration." + }, + "WellStatus": { + "title": "WellStatus", + "enum": [ + "abandoned", + "active", + "active -- injecting", + "active -- producing", + "completed", + "drilling", + "partially plugged", + "permitted", + "plugged and abandoned", + "proposed", + "sold", + "suspended", + "temporarily abandoned", + "testing", + "tight", + "working over", + "unknown" + ], + "description": "An enumeration." + }, + "WellType": { + "title": "WellType", + "enum": [ + "bypass", + "initial", + "redrill", + "reentry", + "respud", + "sidetrack", + "unknown" + ], + "description": "An enumeration." + }, + "WellborePurpose": { + "title": "WellborePurpose", + "enum": [ + "appraisal", + "appraisal -- confirmation appraisal", + "appraisal -- exploratory appraisal", + "exploration", + "exploration -- deeper-pool wildcat", + "exploration -- new-field wildcat", + "exploration -- new-pool wildcat", + "exploration -- outpost wildcat", + "exploration -- shallower-pool wildcat", + "development", + "development -- infill development", + "development -- injector", + "development -- producer", + "fluid storage", + "fluid storage -- gas storage", + "general srvc", + "general srvc -- borehole re-acquisition", + "general srvc -- observation", + "general srvc -- relief", + "general srvc -- research", + "general srvc -- research -- drill test", + "general srvc -- research -- strat test", + "general srvc -- waste disposal", + "mineral", + "unknown" + ], + "description": "An enumeration." + }, + "WellboreStatus": { + "title": "WellboreStatus", + "enum": [ + "abandoned", + "active", + "active -- injecting", + "active -- producing", + "completed", + "drilling", + "partially plugged", + "permitted", + "plugged and abandoned", + "proposed", + "sold", + "suspended", + "temporarily abandoned", + "testing", + "tight", + "working over", + "unknown" + ], + "description": "An enumeration." + }, + "WellboreType": { + "title": "WellboreType", + "enum": [ + "bypass", + "initial", + "redrill", + "reentry", + "respud", + "sidetrack", + "unknown" + ], + "description": "An enumeration." + }, + "app__model__model_curated__Legal": { + "title": "Legal", + "type": "object", + "properties": { + "legaltags": { + "title": "Legal Tags", + "type": "array", + "items": { + "type": "string" + }, + "description": "The list of legal tags, see compliance API." + }, + "otherRelevantDataCountries": { + "title": "Other Relevant Data Countries", + "type": "array", + "items": { + "type": "string" + }, + "description": "The list of other relevant data countries using the ISO 2-letter codes, see compliance API." + }, + "status": { + "title": "Legal Status", + "type": "string", + "description": "The legal status." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "basinContext": { + "title": "basinContext", + "type": "object", + "properties": { + "basinCode": { + "title": "Basin Code", + "type": "string", + "description": "The code of the basin in which the well is located." + }, + "basinName": { + "title": "Basin Name", + "type": "string", + "description": "The name of the basin in which the well is located." + }, + "subBasinCode": { + "title": "Sub-Basin Code", + "type": "string", + "description": "The code of the sub-basin in which the well is located." + }, + "subBasinName": { + "title": "Sub-Basin Name", + "type": "string", + "description": "The name of the sub-basin in which the well is located." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "channel": { + "title": "channel", + "type": "object", + "properties": { + "absentValue": { + "title": "Absent Value", + "type": "string", + "description": "Optional field carrying the absent value as string for this channel." + }, + "dataType": { + "title": "Data Type", + "allOf": [ + { + "$ref": "#/components/schemas/DataType" + } + ], + "description": "The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + "default": "number" + }, + "dimension": { + "title": "Dimension", + "type": "integer", + "description": "The dimension of this log or channel" + }, + "family": { + "title": "Log Family", + "type": "string", + "description": "The log family code of this log or channel (optional)" + }, + "familyType": { + "title": "Log Family Type", + "type": "string", + "description": "The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)" + }, + "format": { + "title": "Format Hint", + "allOf": [ + { + "$ref": "#/components/schemas/Format" + } + ], + "description": "Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + "default": "float32" + }, + "logstoreId": { + "title": "Logstore ID", + "type": "number", + "description": "The id of this log or channel in the Logstore. This property is not present in the index channel." + }, + "bulkURI": { + "title": "bulk URI", + "type": "string", + "description": "bulkURI either URL or URN." + }, + "longName": { + "title": "Log Long Name", + "type": "string", + "description": "The long name of this log or channel" + }, + "mnemonic": { + "title": "Mnemonic", + "type": "string", + "description": "The mnemonic of this log or channel" + }, + "name": { + "title": "Log Name", + "type": "string", + "description": "The name of this log or channel." + }, + "properties": { + "title": "Named Properties", + "type": "array", + "items": { + "$ref": "#/components/schemas/namedProperty" + }, + "description": "The named properties of this log or channel." + }, + "source": { + "title": "Source", + "type": "string", + "description": "The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated." + }, + "unitKey": { + "title": "Unit", + "type": "string", + "description": "The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "core_dl_geopoint": { + "title": "core_dl_geopoint", + "required": [ + "latitude", + "longitude" + ], + "type": "object", + "properties": { + "latitude": { + "title": "Latitude", + "maximum": 90.0, + "minimum": -90.0, + "type": "number", + "description": "The latitude value in degrees of arc (dega). Value range [-90, 90]." + }, + "longitude": { + "title": "Longitude", + "maximum": 180.0, + "minimum": -180.0, + "type": "number", + "description": "The longitude value in degrees of arc (dega). Value range [-180, 180]" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "dipSetData": { + "title": "dipSetData", + "type": "object", + "properties": { + "azimuthReference": { + "title": "Azimuth Reference Code", + "type": "string", + "description": "Azimuth reference code defining the type of North. Only used for dipSets with azimuth data" + }, + "classification": { + "title": "Log Set Classification", + "type": "string", + "description": "The well-known log set classification code.", + "default": "Externally Processed LogSet" + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "elevationReference": { + "$ref": "#/components/schemas/SimpleElevationReference" + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "name": { + "title": "Dip Set Name", + "type": "string", + "description": "The name of this dip set" + }, + "operation": { + "title": "Operation", + "type": "string", + "description": "The operation which created this entity" + }, + "reference": { + "$ref": "#/components/schemas/channel" + }, + "referenceType": { + "title": "Reference Type", + "type": "string", + "description": "The reference index type of the dip set." + }, + "relationships": { + "$ref": "#/components/schemas/dipsetrelationships" + }, + "start": { + "$ref": "#/components/schemas/ValueWithUnit" + }, + "step": { + "$ref": "#/components/schemas/ValueWithUnit" + }, + "stop": { + "$ref": "#/components/schemas/ValueWithUnit" + }, + "bulkURI": { + "title": "bulk URI", + "type": "string", + "description": "bulkURI either URL or URN." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "dipset": { + "title": "dipset", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Dip Set Data", + "allOf": [ + { + "$ref": "#/components/schemas/dipSetData" + } + ], + "description": "dipset data" + }, + "id": { + "title": "Dip Set ID", + "type": "string", + "description": "The unique identifier of the dip set" + }, + "kind": { + "title": "Dip Set Kind", + "type": "string", + "description": "Kind specification", + "default": "osdu:wks:dipSet:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The dip-set's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this dip set; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "dipsetrelationships": { + "title": "dipsetrelationships", + "required": [ + "wellbore" + ], + "type": "object", + "properties": { + "well": { + "title": "Well", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The well to which this dipSet belongs. Only required if the wellbore is unknown." + }, + "wellbore": { + "title": "Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore to which this dipSet belongs." + }, + "wellboreSection": { + "title": "Wellbore Section", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore section to which this dipSet belongs." + }, + "referenceLog": { + "title": "True dip azimuth log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The true dip azimuth log of the dipset." + }, + "trueDipAzimuthLog": { + "title": "True dip azimuth log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The true dip azimuth log of the dipset." + }, + "trueDipInclinationLog": { + "title": "X-coordinate log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The X-coordinate log of the dipset" + }, + "xCoordinateLog": { + "title": "X-coordinate log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The X-coordinate log of the dipset" + }, + "yCoordinateLog": { + "title": "Y-coordinate log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The Y-coordinate log of the dipset" + }, + "zCoordinateLog": { + "title": "Z-coordinate log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The Z-coordinate log of the dipset" + }, + "qualityLog": { + "title": "Quality log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The quality log of the dipset" + }, + "classificationLog": { + "title": "Classification log", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The classification log of the dipset" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "geographicPosition": { + "title": "geographicPosition", + "required": [ + "crsKey", + "elevationFromMsl", + "latitude", + "longitude" + ], + "type": "object", + "properties": { + "crsKey": { + "title": "CRS Key", + "type": "string", + "description": "The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details." + }, + "elevationFromMsl": { + "title": "Elevation from MSL", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary." + }, + "latitude": { + "title": "Native Latitude", + "type": "number", + "description": "Native or original latitude (unit defined by CRS)" + }, + "longitude": { + "title": "Native Longitude", + "type": "number", + "description": "Native or original longitude (unit defined by CRS)" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "geometryItem": { + "title": "geometryItem", + "required": [ + "geometries", + "type" + ], + "type": "object", + "properties": { + "bbox": { + "title": "Bbox", + "type": "array", + "items": { + "type": "number" + } + }, + "geometries": { + "title": "Geometries", + "type": "array", + "items": { + "anyOf": [ + { + "$ref": "#/components/schemas/GeoJsonPoint" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiPoint" + }, + { + "$ref": "#/components/schemas/GeoJsonLineString" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiLineString" + }, + { + "$ref": "#/components/schemas/Polygon" + }, + { + "$ref": "#/components/schemas/GeoJsonMultiPolygon" + } + ] + } + }, + "type": { + "$ref": "#/components/schemas/Type" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "historyRecord": { + "title": "historyRecord", + "type": "object", + "properties": { + "date": { + "title": "Date and Time", + "type": "string", + "description": "The UTC date time of the log creation/processing", + "format": "date-time" + }, + "description": { + "title": " Description", + "type": "string", + "description": "The description of the context, which produced the log." + }, + "user": { + "title": "User", + "type": "string", + "description": "The user running the log processing." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "log": { + "title": "log", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Log Data", + "allOf": [ + { + "$ref": "#/components/schemas/logData" + } + ], + "description": "Log data associated with a wellbore" + }, + "id": { + "title": "Log Set ID", + "type": "string", + "description": "The unique identifier of the log" + }, + "kind": { + "title": "Log Kind", + "type": "string", + "description": "Kind specification", + "default": "osdu:wks:log:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The log's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "status": { + "title": "Entity Status", + "type": "string", + "description": "The status of this log", + "default": "compliant" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this log; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "logData": { + "title": "logData", + "type": "object", + "properties": { + "azimuthReference": { + "title": "Azimuth Reference Code", + "type": "string", + "description": "Only supplied with azimuth logs: the azimuth reference code defining the type of North, default TN for true north." + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "elevationReference": { + "title": "Elevation Reference", + "allOf": [ + { + "$ref": "#/components/schemas/SimpleElevationReference" + } + ], + "description": "The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where the index, e.g. MD == 0 and TVD == 0." + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "history": { + "title": "History Records", + "type": "array", + "items": { + "$ref": "#/components/schemas/historyRecord" + }, + "description": "An array of historyRecords describing the context for the log's creation or processing." + }, + "log": { + "title": "Log Channel", + "allOf": [ + { + "$ref": "#/components/schemas/logchannel" + } + ], + "description": "The log containing the log meta data and log-store reference." + }, + "name": { + "title": "Log Set Name", + "type": "string", + "description": "The name of this log set" + }, + "operation": { + "title": "Operation", + "type": "string", + "description": "The operation which created this Log" + }, + "reference": { + "title": "Reference Index", + "allOf": [ + { + "$ref": "#/components/schemas/logchannel" + } + ], + "description": "The reference index - only populated for logs, which are member of a logSet and share the reference index." + }, + "referenceType": { + "title": "Index Type", + "allOf": [ + { + "$ref": "#/components/schemas/ReferenceType" + } + ], + "description": "The reference index type of the log set." + }, + "relationships": { + "title": "Relationships", + "allOf": [ + { + "$ref": "#/components/schemas/logRelationships" + } + ], + "description": "The related entities." + }, + "start": { + "title": "Start", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The start index value of the log set." + }, + "step": { + "title": "Step", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The index increment value of the log set. Only populated if the log is regularly sampled." + }, + "stop": { + "title": "Stop", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The stop index value of the log set." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "logRelationships": { + "title": "logRelationships", + "type": "object", + "properties": { + "logSet": { + "title": "LogSet", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The logSet to which this log belongs. If the log is not part of a log set this relationship stays empty." + }, + "timeDepthRelation": { + "title": "TimeDepthRelation LogSet", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The timeDepthRelation to which this log belongs. If the log is not part of a timeDepthRelation this relationship stays empty." + }, + "well": { + "title": "Well", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The well to which this log belongs. Only required if the wellbore is unknown." + }, + "wellbore": { + "title": "Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore to which this log belongs. This relationship is the most important; only the wellbore can provide the unique context for the measured depth index." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "logSetData": { + "title": "logSetData", + "type": "object", + "properties": { + "azimuthReference": { + "title": "Azimuth Reference Code", + "type": "string", + "description": "Azimuth reference code defining the type of North. Only used for logSets with azimuth data" + }, + "channelMnemonics": { + "title": "Channel Mnemonics", + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of channel Mnemonics in this log set." + }, + "channelNames": { + "title": "Channel Names", + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of channel long names in this log set." + }, + "classification": { + "title": "Log Set Classification", + "type": "string", + "description": "The well-known log set classification code.", + "default": "Externally Processed LogSet" + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "elevationReference": { + "$ref": "#/components/schemas/SimpleElevationReference" + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "name": { + "title": "Log Set Name", + "type": "string", + "description": "The name of this log set" + }, + "operation": { + "title": "Operation", + "type": "string", + "description": "The operation which created this entity" + }, + "reference": { + "$ref": "#/components/schemas/channel" + }, + "referenceType": { + "title": "Reference Type", + "type": "string", + "description": "The reference index type of the log set." + }, + "relationships": { + "$ref": "#/components/schemas/logsetrelationships" + }, + "start": { + "$ref": "#/components/schemas/ValueWithUnit" + }, + "step": { + "$ref": "#/components/schemas/ValueWithUnit" + }, + "stop": { + "$ref": "#/components/schemas/ValueWithUnit" + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "logchannel": { + "title": "logchannel", + "type": "object", + "properties": { + "columnNames": { + "title": "Column Names", + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of names for multi-dimensional logs (dimension>1). The length of this array is expected to be equal to 'dimension'. For one-dimensional this property stays empty as the columnName is by definition the log name." + }, + "dataType": { + "title": "Data Type", + "allOf": [ + { + "$ref": "#/components/schemas/DataType_2" + } + ], + "description": "The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + "default": "number" + }, + "dimension": { + "title": "Dimension", + "type": "integer", + "description": "The dimension of this log or channel" + }, + "family": { + "title": "Log Family", + "type": "string", + "description": "The log family code of this log or channel (optional)" + }, + "familyType": { + "title": "Log Family Type", + "type": "string", + "description": "The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)" + }, + "format": { + "title": "Format Hint", + "allOf": [ + { + "$ref": "#/components/schemas/Format_2" + } + ], + "description": "Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + "default": "float32" + }, + "logstoreId": { + "title": "Logstore ID", + "type": "number", + "description": "The unique id of this log or channel in the Logstore. This property is not present in the index channel." + }, + "bulkURI": { + "title": "bulk URI", + "type": "string", + "description": "bulkURI either URL or URN." + }, + "longName": { + "title": "Log Long Name", + "type": "string", + "description": "The long name of this log or channel" + }, + "mnemonic": { + "title": "Mnemonic", + "type": "string", + "description": "The mnemonic of this log or channel" + }, + "name": { + "title": "Log Name", + "type": "string", + "description": "The name of this log or channel." + }, + "properties": { + "title": "Named Properties", + "type": "array", + "items": { + "$ref": "#/components/schemas/namedProperty" + }, + "description": "The named properties of this log or channel." + }, + "source": { + "title": "Source", + "type": "string", + "description": "The source of this log or channel as a data reference; Typically this refers to the raw LogSet, from which this log WKE is generated." + }, + "unitKey": { + "title": "Unit", + "type": "string", + "description": "The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "logset": { + "title": "logset", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Log Set Data", + "allOf": [ + { + "$ref": "#/components/schemas/logSetData" + } + ], + "description": "Log channel set associated with a wellbore" + }, + "id": { + "title": "Log Set ID", + "type": "string", + "description": "The unique identifier of the log set" + }, + "kind": { + "title": "Log Set Kind", + "type": "string", + "description": "Kind specification", + "default": "osdu:wks:logSet:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The log-set's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this log set; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "logsetrelationships": { + "title": "logsetrelationships", + "required": [ + "wellbore" + ], + "type": "object", + "properties": { + "well": { + "title": "Well", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The well to which this logSet belongs. Only required if the wellbore is unknown." + }, + "wellbore": { + "title": "Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore to which this logSet belongs." + }, + "wellboreSection": { + "title": "Wellbore Section", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellboreSection to which this logSet belongs." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "marker": { + "title": "marker", + "required": [ + "acl", + "kind", + "legal" + ], + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Marker Data", + "allOf": [ + { + "$ref": "#/components/schemas/markerData" + } + ], + "description": "Geological marker using a single point-observation, typically along a wellbore." + }, + "id": { + "title": "Marker ID", + "type": "string", + "description": "The unique identifier of the marker" + }, + "kind": { + "title": "Marker Kind", + "type": "string", + "description": "Marker kind specification" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The marker's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this marker; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "markerData": { + "title": "markerData", + "required": [ + "md", + "name" + ], + "type": "object", + "properties": { + "age": { + "title": "Age", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The absolute age at the feature boundary. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "boundaryRelation": { + "title": "Interface Boundary Relation", + "type": "string", + "description": "The marker boundary relationship classification" + }, + "classification": { + "title": "Marker Classification", + "type": "string", + "description": "The classification of the marker. Could be client-defined via a catalog, e.g. common:wke:markerClassification:1.0.0 and common:wke:markerClassificationMember:1.0.0" + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "depth": { + "title": "Marker Depth", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The original marker depth - measured from data.elevationReference in data.depthReferenceType. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "depthReferenceType": { + "title": "Depth Reference Code", + "type": "string", + "description": "Depth reference code defining the type of depth for the marker. Default MD (measured depth). Depth is downwards increasing.", + "default": "MD" + }, + "elevationReference": { + "title": "Elevation Reference Level", + "allOf": [ + { + "$ref": "#/components/schemas/SimpleElevationReference" + } + ], + "description": "The elevation from mean sea level (MSL), where depth, topDepth, baseDepth are zero. Values above MSL are positive." + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "interpreter": { + "title": "Interpreter Name", + "type": "string", + "description": "The name of the interpreter who picked this marker." + }, + "locationWGS84": { + "title": "GeoJSON Marker Location", + "allOf": [ + { + "$ref": "#/components/schemas/GeoJsonFeatureCollection" + } + ], + "description": "The marker's shape as GeoJSON Point." + }, + "markerFeatureType": { + "title": "Marker Feature Type", + "type": "string", + "description": "The marker's type of feature like 'seismic', 'structural', 'stratigraphic'" + }, + "markerGeoDomain": { + "title": "Marker GeoScience Domain", + "type": "string", + "description": "The marker's GeoScience domain like 'geologic', 'reservoir', 'petrophysical'" + }, + "markerSubFeatureAttribute": { + "title": "Marker Sub-feature Attribute", + "type": "string", + "description": "Further specification of the marker's sub-feature, e.g. in sequence stratigraphy." + }, + "markerSubFeatureType": { + "title": "Marker Sub-feature Type", + "type": "string", + "description": "The marker's sub-type of the feature like 'horizon', 'fault', 'fracture'" + }, + "md": { + "title": "Marker Measured Depth", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The marker measured depth (MD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "name": { + "title": "Marker Name", + "type": "string", + "description": "The name of the marker" + }, + "planeOrientationAzimuth": { + "title": "Azimuth Angle", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Azimuth angle. The azimuth reference is given by data.azimuthReference. The 'planeOrientationAzimuth.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition." + }, + "planeOrientationDip": { + "title": "Dip Angle", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Dip angle. The 'planeOrientationDip.unitKey' is to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition." + }, + "relationships": { + "title": "Relationships", + "allOf": [ + { + "$ref": "#/components/schemas/markerrelationships" + } + ], + "description": "The entities related to this marker." + }, + "stratigraphicHierarchyLevel": { + "title": "Column Level", + "type": "integer", + "description": "Optional hierarchical level in the chrono-stratigraphic/litho-stratigraphic catalog table, identified by the data.relationships.chartId" + }, + "tvd": { + "title": "Marker Measured Depth", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The marker true vertical depth (TVD) measured from data.elevationReference. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "wgs84ElevationFromMsl": { + "title": "Elevation from MSL", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Elevation from Mean Sea Level, downwards negative. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "wgs84LatitudeLongitude": { + "title": "WGS 84 Latitude Longitude", + "allOf": [ + { + "$ref": "#/components/schemas/core_dl_geopoint" + } + ], + "description": "The marker's position in WGS 84 latitude and longitude." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "markerrelationships": { + "title": "markerrelationships", + "type": "object", + "properties": { + "horizon": { + "title": "Stratigraphic Horizon", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The related stratigraphic horizon" + }, + "stratigraphicTable": { + "title": "Stratigraphic Table", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The related stratigraphic table, which provides the context for the stratigraphic horizon" + }, + "study": { + "title": "Study", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The study, in which this marker was conceived." + }, + "trajectory": { + "title": "Trajectory", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The trajectory used to create the marker position" + }, + "wellbore": { + "title": "Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore entity, to which this marker belongs." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "namedProperty": { + "title": "namedProperty", + "type": "object", + "properties": { + "associations": { + "title": "Associations", + "type": "array", + "items": { + "type": "string" + }, + "description": "The optional associations contains one or more mnemonics found elsewhere in the logSet." + }, + "description": { + "title": "Property Description", + "type": "string", + "description": "The description and role of this property." + }, + "format": { + "title": "Format (LAS)", + "type": "string", + "description": "An optional format declaration for the property values. The 'A' prefix indicates an array; string values are represented by 'S'; floating point values are represented by 'F', optionally followed by a field specification, e.g. 'F10.4'; exponential number representations are represented by 'E'; integer values are represented by 'I'. For further information see the LAS specification http://www.cwls.org/las/." + }, + "name": { + "title": "Property Name", + "type": "string", + "description": "The name of this property." + }, + "unitKey": { + "title": "Property Unit Symbol", + "type": "string", + "description": "The unitKey to be looked up in the 'frameOfReference.units' dictionary to find the self-contained definition." + }, + "value": { + "title": "Property Value", + "anyOf": [ + { + "type": "number" + }, + { + "type": "string" + } + ], + "description": "The value for this property as a string or a number." + }, + "values": { + "title": "Property Values (Interval)", + "type": "array", + "items": { + "type": "number" + }, + "description": "The values, e.g. interval boundaries, for this property." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "odes_storage__models__Legal": { + "title": "Legal", + "type": "object", + "properties": { + "legaltags": { + "title": "Legaltags", + "type": "array", + "items": { + "type": "string" + } + }, + "otherRelevantDataCountries": { + "title": "Otherrelevantdatacountries", + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "projectedPosition": { + "title": "projectedPosition", + "required": [ + "crsKey", + "elevationFromMsl", + "x", + "y" + ], + "type": "object", + "properties": { + "crsKey": { + "title": "CRS Key", + "type": "string", + "description": "The 'crsKey', which can be looked up in the 'frameOfReference.crs' for further details." + }, + "elevationFromMsl": { + "title": "Elevation from MSL", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary." + }, + "x": { + "title": "X Coordinate", + "type": "number", + "description": "X-coordinate value in native or original projected CRS" + }, + "y": { + "title": "Y Coordinate", + "type": "number", + "description": "Y-coordinate value in native or original projected CRS" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "trajectory": { + "title": "trajectory", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Trajectory Data", + "allOf": [ + { + "$ref": "#/components/schemas/trajectoryData" + } + ], + "description": "A log set representing a trajectory associated with a wellbore" + }, + "id": { + "title": "Trajectory ID", + "type": "string", + "description": "The unique identifier of the trajectory" + }, + "kind": { + "title": "Trajectory Kind", + "type": "string", + "description": "Kind specification", + "default": "osdu:wks:trajectory:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The trajectory's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this trajectory; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "trajectoryData": { + "title": "trajectoryData", + "type": "object", + "properties": { + "azimuthReference": { + "title": "Azimuth Reference Code", + "type": "string", + "description": "Azimuth reference code defining the type of North, default TN for true north." + }, + "channelMnemonics": { + "title": "Channel Mnemonics", + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of channel Mnemonics in this trajectory." + }, + "channelNames": { + "title": "Channel Names", + "type": "array", + "items": { + "type": "string" + }, + "description": "A list of channel long names in this trajectory." + }, + "channels": { + "title": "Channels", + "type": "array", + "items": { + "$ref": "#/components/schemas/trajectorychannel" + }, + "description": "The channels associated to the index." + }, + "classification": { + "title": "Trajectory Classification", + "type": "string", + "description": "The well-known trajectory classification code.", + "default": "Raw Deviation Survey" + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "elevationReference": { + "title": "Elevation Reference", + "allOf": [ + { + "$ref": "#/components/schemas/SimpleElevationReference" + } + ], + "description": "The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0" + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "index": { + "title": "Index Channel", + "allOf": [ + { + "$ref": "#/components/schemas/trajectorychannel" + } + ], + "description": "The index channel or log." + }, + "indexType": { + "title": "Index Type", + "type": "string", + "description": "The index type of the trajectory." + }, + "locationWGS84": { + "title": "Trajectory preview", + "allOf": [ + { + "$ref": "#/components/schemas/GeoJsonFeatureCollection" + } + ], + "description": "The wellbore's trajectory preview shape as GeoJSON LineString." + }, + "name": { + "title": "Trajectory Name", + "type": "string", + "description": "The name of this trajectory" + }, + "referencePosition": { + "title": "Reference Position First Sample", + "allOf": [ + { + "$ref": "#/components/schemas/Point3dNonGeoJson" + } + ], + "description": "The 3D reference position for the first sample (surface location for main wellbores, tie-in point for side-tracks." + }, + "relationships": { + "title": "Relationships", + "allOf": [ + { + "$ref": "#/components/schemas/trajectoryrelationships" + } + ], + "description": "The related entities." + }, + "start": { + "title": "Start", + "type": "number", + "description": "The start index value of the trajectory." + }, + "step": { + "title": "Step", + "type": "number", + "description": "The index increment value of the trajectory." + }, + "stop": { + "title": "Stop", + "type": "number", + "description": "The stop index value of the trajectory." + }, + "wellHeadWgs84": { + "title": "WGS 84 Position", + "allOf": [ + { + "$ref": "#/components/schemas/wgs84Position" + } + ], + "description": "The wellbore's position in WGS 84 latitude and longitude; vertical position is an elevation from mean sea level (MSL), positive above MSL." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "trajectorychannel": { + "title": "trajectorychannel", + "type": "object", + "properties": { + "absentValue": { + "title": "Absent Value", + "type": "string", + "description": "Optional field carrying the absent value as string for this channel." + }, + "azimuthKey": { + "title": "Azimuth Reference Key", + "type": "string", + "description": "The azimuth reference of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.azimuth' dictionary." + }, + "crsKey": { + "title": "CRS Key", + "type": "string", + "description": "The CRS key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.crs' dictionary." + }, + "dataType": { + "title": "Data Type", + "allOf": [ + { + "$ref": "#/components/schemas/DataType_1" + } + ], + "description": "The log value type (per log sample). The 'format' property may contain further hints about data type presentation.", + "default": "number" + }, + "dimension": { + "title": "Dimension", + "type": "integer", + "description": "The dimension of this log or channel" + }, + "family": { + "title": "Log Family", + "type": "string", + "description": "The log family code of this log or channel (optional)" + }, + "familyType": { + "title": "Log Family Type", + "type": "string", + "description": "The log family type code of this log or channel. Example: 'Neutron Porosity' for 'Thermal Neutron Porosity Sandstone'. (optional)" + }, + "format": { + "title": "Format Hint", + "allOf": [ + { + "$ref": "#/components/schemas/Format_1" + } + ], + "description": "Optional format hint how to treat the log values as strings or number of bits per 'dataType'.", + "default": "float32" + }, + "logstoreId": { + "title": "Logstore ID", + "type": "number", + "description": "The id of this log or channel in the Logstore. This property is not present in the index channel." + }, + "bulkURI": { + "title": "bulk URI", + "type": "string", + "description": "bulkURI either URL or URN." + }, + "longName": { + "title": "Log Long Name", + "type": "string", + "description": "The long name of this log or channel" + }, + "mnemonic": { + "title": "Mnemonic", + "type": "string", + "description": "The mnemonic of this log or channel" + }, + "name": { + "title": "Log Name", + "type": "string", + "description": "The name of this log or channel." + }, + "properties": { + "title": "Properties", + "type": "array", + "items": { + "type": "string" + }, + "description": "The properties of this log or channel." + }, + "source": { + "title": "Source", + "type": "string", + "description": "The source of this log or channel as a data reference; Typically this refers to the raw trajectory, from which this log WKE is generated." + }, + "unitKey": { + "title": "Unit Key", + "type": "string", + "description": "The unit key of this log or channel. The detailed definition is found as persistable reference in the 'frameOfReference.units' dictionary. Empty units (NoUnit) are not recorded." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "trajectoryrelationships": { + "title": "trajectoryrelationships", + "required": [ + "wellbore" + ], + "type": "object", + "properties": { + "wellbore": { + "title": "Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The wellbore to which this trajectory belongs." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "well": { + "title": "well", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Well Data", + "allOf": [ + { + "$ref": "#/components/schemas/wellData" + } + ], + "description": "Well data container" + }, + "id": { + "title": "Well ID", + "type": "string", + "description": "The unique identifier of the well" + }, + "kind": { + "title": "Well Kind", + "type": "string", + "description": "Well-known well kind specification", + "default": "osdu:wks:well:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The geological interpretation's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this well; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "wellData": { + "title": "wellData", + "type": "object", + "properties": { + "basinContext": { + "title": "Basin Context", + "allOf": [ + { + "$ref": "#/components/schemas/basinContext" + } + ], + "description": "The basin context details for the well." + }, + "block": { + "title": "Block", + "type": "string", + "description": "The block name, in which the well is located." + }, + "country": { + "title": "Country", + "type": "string", + "description": "The country, in which the well is located. The country name follows the convention in ISO 3166-1 'English short country name', see https://en.wikipedia.org/wiki/ISO_3166-1" + }, + "county": { + "title": "County", + "type": "string", + "description": "The county name, in which the well is located." + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateLicenseIssued": { + "title": "License Issue Date", + "type": "string", + "description": "The UTC date time when the well license was issued.", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "datePluggedAbandoned": { + "title": "Plugged Abandoned Date", + "type": "string", + "description": "The UTC date and time at which the well was plugged and abandoned.", + "format": "date-time" + }, + "dateSpudded": { + "title": "Spud Date", + "type": "string", + "description": "The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format", + "format": "date-time" + }, + "directionWell": { + "title": "Well Direction", + "allOf": [ + { + "$ref": "#/components/schemas/DirectionWell" + } + ], + "description": "POSC well direction. The direction of the flow of the fluids in a well facility (generally, injected or produced, or some combination)." + }, + "district": { + "title": "District", + "type": "string", + "description": "The district name, to which the well belongs." + }, + "elevationReference": { + "title": "Elevation Reference", + "allOf": [ + { + "$ref": "#/components/schemas/SimpleElevationReference" + } + ], + "description": "The well's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0" + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "field": { + "title": "Field", + "type": "string", + "description": "The field name, to which the well belongs." + }, + "fluidWell": { + "title": "Well Fluid", + "allOf": [ + { + "$ref": "#/components/schemas/FluidWell" + } + ], + "description": "POSC well fluid. The type of fluid being produced from or injected \\ninto a well facility." + }, + "groundElevation": { + "title": "Ground Elevation", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The well's ground elevation, Values above MSL are positive.." + }, + "locationWGS84": { + "title": "Well Shape WGS 84", + "allOf": [ + { + "$ref": "#/components/schemas/GeoJsonFeatureCollection" + } + ], + "description": "A 2D GeoJSON FeatureCollection defining well location or trajectory in WGS 84 CRS." + }, + "name": { + "title": "Well Name", + "type": "string", + "description": "The well name" + }, + "operator": { + "title": "Well Operator", + "type": "string", + "description": "The operator company name of the well." + }, + "operatorDivision": { + "title": "Operator Division", + "type": "string", + "description": "The operator division of the well." + }, + "operatorInterest": { + "title": "Well Operator Interest", + "type": "number", + "description": "Interest for operator. Commonly in percent." + }, + "operatorOriginal": { + "title": "Original Well Operator", + "type": "string", + "description": "Original operator of the well. This may be different than the current operator." + }, + "plssLocation": { + "title": "US PLSS Location", + "allOf": [ + { + "$ref": "#/components/schemas/PlssLocation" + } + ], + "description": "A location described by the Public Land Survey System (United States)" + }, + "propertyDictionary": { + "title": "Property Dictionary", + "type": "object", + "description": "A dictionary structure, i.e. key/string value pairs, to carry additional well properties." + }, + "region": { + "title": "Region", + "type": "string", + "description": "Geo-political region in which the well is located." + }, + "relationships": { + "title": "Relationships", + "allOf": [ + { + "$ref": "#/components/schemas/wellrelationships" + } + ], + "description": "The related entities." + }, + "state": { + "title": "State", + "type": "string", + "description": "The state name, in which the well is located." + }, + "uwi": { + "title": "Unique Well Identifier", + "type": "string", + "description": "The unique well identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits)." + }, + "waterDepth": { + "title": "Water Depth", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Depth of water (not land rigs)." + }, + "wellHeadElevation": { + "title": "Well Head Elevation", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The well's vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadGeographic": { + "title": "Well Head Position, Geographic", + "allOf": [ + { + "$ref": "#/components/schemas/geographicPosition" + } + ], + "description": "The well's well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadProjected": { + "title": "Well Head Position, Projected", + "allOf": [ + { + "$ref": "#/components/schemas/projectedPosition" + } + ], + "description": "The well's well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadWgs84": { + "title": "WGS 84 Position", + "allOf": [ + { + "$ref": "#/components/schemas/core_dl_geopoint" + } + ], + "description": "The well's position in WGS 84 latitude and longitude." + }, + "wellLocationType": { + "$ref": "#/components/schemas/WellLocationType" + }, + "wellNumberGovernment": { + "title": "Government Number", + "type": "string", + "description": "Government assigned well number." + }, + "wellNumberLicense": { + "title": "Well License Number", + "type": "string", + "description": "License number of the well." + }, + "wellNumberOperator": { + "title": "Operator Number", + "type": "string", + "description": "Operator well number." + }, + "wellPurpose": { + "title": "Well Purpose", + "allOf": [ + { + "$ref": "#/components/schemas/WellPurpose" + } + ], + "description": "POSC well purpose" + }, + "wellStatus": { + "title": "Well Status", + "allOf": [ + { + "$ref": "#/components/schemas/WellStatus" + } + ], + "description": "POSC well status." + }, + "wellType": { + "title": "Well Type", + "allOf": [ + { + "$ref": "#/components/schemas/WellType" + } + ], + "description": "Type of well." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "wellbore": { + "title": "wellbore", + "type": "object", + "properties": { + "acl": { + "title": "Access Control List", + "allOf": [ + { + "$ref": "#/components/schemas/TagDictionary" + } + ], + "description": "The access control tags associated with this entity." + }, + "ancestry": { + "title": "Ancestry", + "allOf": [ + { + "$ref": "#/components/schemas/LinkList" + } + ], + "description": "The links to data, which constitute the inputs." + }, + "data": { + "title": "Wellbore Data", + "allOf": [ + { + "$ref": "#/components/schemas/wellboreData" + } + ], + "description": "Wellbore data container" + }, + "id": { + "title": "Wellbore ID", + "type": "string", + "description": "The unique identifier of the wellbore" + }, + "kind": { + "title": "Wellbore Kind", + "type": "string", + "description": "Well-known wellbore kind specification", + "default": "osdu:wks:wellbore:0.0.1" + }, + "legal": { + "title": "Legal Tags", + "allOf": [ + { + "$ref": "#/components/schemas/app__model__model_curated__Legal" + } + ], + "description": "The geological interpretation's legal tags" + }, + "meta": { + "title": "Frame of Reference Meta Data", + "type": "array", + "items": { + "$ref": "#/components/schemas/MetaItem" + }, + "description": "The meta data section linking the 'unitKey', 'crsKey' to self-contained definitions (persistableReference)" + }, + "type": { + "title": "Entity Type", + "type": "string", + "description": "The reference entity type as declared in common:metadata:entity:*." + }, + "version": { + "title": "Entity Version Number", + "type": "number", + "description": "The version number of this wellbore; set by the framework." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "wellboreData": { + "title": "wellboreData", + "type": "object", + "properties": { + "airGap": { + "title": "Air Gap", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The gap between water surface and offshore drilling platform." + }, + "block": { + "title": "Block", + "type": "string", + "description": "The block name, in which the wellbore is located." + }, + "country": { + "title": "Country", + "type": "string", + "description": "The country, in which the wellbore is located. The country name follows the convention in ISO 3166-1 'English short country name', see https://en.wikipedia.org/wiki/ISO_3166-1" + }, + "county": { + "title": "County", + "type": "string", + "description": "The county name, in which the wellbore is located." + }, + "dateCreated": { + "title": "Creation Date and Time", + "type": "string", + "description": "The UTC date time of the entity creation", + "format": "date-time" + }, + "dateModified": { + "title": "Last Modification Date and Time", + "type": "string", + "description": "The UTC date time of the last entity modification", + "format": "date-time" + }, + "drillingDaysTarget": { + "title": "Target Drilling Days", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Target days for drilling wellbore." + }, + "elevationReference": { + "title": "Elevation Reference", + "allOf": [ + { + "$ref": "#/components/schemas/SimpleElevationReference" + } + ], + "description": "The wellbore's elevation reference from mean sea level (MSL), positive above MSL. This is where MD == 0 and TVD == 0" + }, + "externalIds": { + "title": "Array of External IDs", + "type": "array", + "items": { + "type": "string" + }, + "description": "An array of identities (e.g. some kind if URL to be resolved in an external data store), which links to external realizations of the same entity." + }, + "field": { + "title": "Field", + "type": "string", + "description": "The field name, to which the wellbore belongs." + }, + "formationAtTd": { + "title": "Formation at TD", + "type": "string", + "description": "The name of the formation at the wellbore's total depth." + }, + "formationProjected": { + "title": "Formation Projected", + "type": "string", + "description": "The name of the formation at the wellbore's projected depth. This property is questionable as there is not precise documentation available." + }, + "hasAchievedTotalDepth": { + "title": "Has Total Depth Been Achieved Flag", + "type": "boolean", + "description": "True (\"true\" of \"1\") indicates that the wellbore has acheieved total depth. That is, drilling has completed. False (\"false\" or \"0\") indicates otherwise. Not given indicates that it is not known whether total depth has been reached.", + "default": true + }, + "isActive": { + "title": "Is Active Flag", + "type": "boolean", + "description": "True (=\"1\" or \"true\") indicates that the wellbore is active. False (=\"0\" or \"false\") indicates otherwise. It is the servers responsibility to set this value based on its available internal data (e.g., what objects are changing)." + }, + "kickOffMd": { + "title": "Kick-off MD", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The kick-off point in measured depth (MD); for the main well the kickOffMd is set to 0." + }, + "kickOffTvd": { + "title": "Kick-off MD", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Kickoff true vertical depth of the wellbore; for the main wellbore the kickOffMd is set to 0." + }, + "locationWGS84": { + "title": "Wellbore Shape WGS 84", + "allOf": [ + { + "$ref": "#/components/schemas/GeoJsonFeatureCollection" + } + ], + "description": "A 2D GeoJSON FeatureCollection defining wellbore location or trajectory in WGS 84 CRS." + }, + "name": { + "title": "Wellbore Name", + "type": "string", + "description": "The wellbore name" + }, + "operator": { + "title": "Operator", + "type": "string", + "description": "The operator of the wellbore." + }, + "permitDate": { + "title": "Permit Date", + "type": "string", + "description": "The wellbore's permit date.", + "format": "date" + }, + "permitNumber": { + "title": "Permit Number", + "type": "string", + "description": "The wellbore's permit number or permit ID." + }, + "plssLocation": { + "title": "US PLSS Location", + "allOf": [ + { + "$ref": "#/components/schemas/PlssLocation" + } + ], + "description": "A location described by the Public Land Survey System (United States)" + }, + "propertyDictionary": { + "title": "Property Dictionary", + "type": "object", + "description": "A dictionary structure, i.e. key/string value pairs, to carry additional wellbore properties." + }, + "relationships": { + "title": "Relationships", + "allOf": [ + { + "$ref": "#/components/schemas/wellborerelationships" + } + ], + "description": "The related entities." + }, + "shape": { + "title": "Wellbore Shape", + "allOf": [ + { + "$ref": "#/components/schemas/Shape" + } + ], + "description": "POSC wellbore trajectory shape." + }, + "spudDate": { + "title": "Spud Date", + "type": "string", + "description": "The date and time when activities to drill the borehole begin to create a hole in the earth. For a sidetrack, this is the date kickoff operations began. The format follows ISO 8601 YYYY-MM-DD extended format", + "format": "date" + }, + "state": { + "title": "State", + "type": "string", + "description": "The state name, in which the wellbore is located." + }, + "totalDepthMd": { + "title": "Total MD", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The measured depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site." + }, + "totalDepthMdDriller": { + "title": "Total MD Drilled", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The total depth along the wellbore as reported by the drilling contractor from 'elevationReference'. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary.." + }, + "totalDepthMdPlanned": { + "title": "Total MD Planned", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Planned measured depth for the wellbore total depth." + }, + "totalDepthMdSubSeaPlanned": { + "title": "Total MD Sub Sea Planned", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Planned measured for the wellbore total depth - with respect to seabed." + }, + "totalDepthProjectedMd": { + "title": "Total MD Projected", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The projected total measured depth of the borehole. This property is questionable as there is not precise documentation available." + }, + "totalDepthTvd": { + "title": "Total TVD", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The true vertical depth of the borehole. If status is plugged, indicates the maximum depth reached before plugging. It is recommended that this value be updated about every 10 minutes by an assigned raw data provider at a site." + }, + "totalDepthTvdDriller": { + "title": "Total TVD Drilled", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The total depth true vertical as reported by the drilling contractor from 'elevationReference', Downwards increasing. The unit definition is found via the property's unitKey' in 'frameOfReference.units' dictionary." + }, + "totalDepthTvdPlanned": { + "title": "Total TVD Planned", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Planned true vertical depth for the wellbore total depth." + }, + "totalDepthTvdSubSeaPlanned": { + "title": "Total TVD Sub Sea Planned", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Planned true vertical depth for the wellbore total depth - with respect to seabed." + }, + "uwi": { + "title": "Unique Wellbore Identifier", + "type": "string", + "description": "The unique wellbore identifier, aka. API number, US well number or UBHI. Codes can have 10, 12 or 14 digits depending on the availability of directional sidetrack (2 digits) and event sequence codes (2 digits)." + }, + "wellHeadElevation": { + "title": "Well Head Elevation", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "The wellbore's vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadGeographic": { + "title": "Well Head Position, Geographic", + "allOf": [ + { + "$ref": "#/components/schemas/geographicPosition" + } + ], + "description": "The wellbore's well head position in the native, geographic CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadProjected": { + "title": "Well Head Position, Projected", + "allOf": [ + { + "$ref": "#/components/schemas/projectedPosition" + } + ], + "description": "The wellbore's well head position in the native, projected CRS; vertical position is an elevation from mean sea level (MSL), positive above MSL." + }, + "wellHeadWgs84": { + "title": "WGS 84 Position", + "allOf": [ + { + "$ref": "#/components/schemas/core_dl_geopoint" + } + ], + "description": "The wellbore's position in WGS 84 latitude and longitude." + }, + "wellboreNumberGovernment": { + "title": "Government Number", + "type": "string", + "description": "Government assigned wellbore number." + }, + "wellboreNumberOperator": { + "title": "Operator Number", + "type": "string", + "description": "Operator wellbore number." + }, + "wellborePurpose": { + "title": "Wellbore Purpose", + "allOf": [ + { + "$ref": "#/components/schemas/WellborePurpose" + } + ], + "description": "POSC wellbore purpose" + }, + "wellboreStatus": { + "title": "Wellbore Status", + "allOf": [ + { + "$ref": "#/components/schemas/WellboreStatus" + } + ], + "description": "POSC wellbore status." + }, + "wellboreType": { + "title": "Wellbore Type", + "allOf": [ + { + "$ref": "#/components/schemas/WellboreType" + } + ], + "description": "Type of wellbore." + } + }, + "description": "Used for data model allows extra fields which are not declared initially in the pydantic model" + }, + "wellborerelationships": { + "title": "wellborerelationships", + "type": "object", + "properties": { + "definitiveTimeDepthRelation": { + "title": "Definitive Time-Depth Relation", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The definitive tome-depth relation providing the MD to seismic travel-time transformation." + }, + "definitiveTrajectory": { + "title": "Definitive Trajectory", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The definitive trajectory providing the MD to 3D space transformation." + }, + "tieInWellbore": { + "title": "Tie-in Wellbore", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The tie-in wellbore if this wellbore is a side-track." + }, + "well": { + "title": "Well", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The well to which this wellbore belongs." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "wellrelationships": { + "title": "wellrelationships", + "type": "object", + "properties": { + "asset": { + "title": "Asset", + "allOf": [ + { + "$ref": "#/components/schemas/ToOneRelationship" + } + ], + "description": "The asset this well belongs to." + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "wgs84Position": { + "title": "wgs84Position", + "required": [ + "elevationFromMsl", + "latitude", + "longitude" + ], + "type": "object", + "properties": { + "elevationFromMsl": { + "title": "Elevation from MSL", + "allOf": [ + { + "$ref": "#/components/schemas/ValueWithUnit" + } + ], + "description": "Elevation from Mean Seal Level, downwards negative. The unit definition is found via 'elevationFromMsl.unitKey' in 'frameOfReference.units' dictionary." + }, + "latitude": { + "title": "WGS 84 Latitude", + "type": "number", + "description": "WGS 84 latitude value in degrees (dega)" + }, + "longitude": { + "title": "WGS 84 Longitude", + "type": "number", + "description": "WGS 84 longitude value in degrees (dega)" + } + }, + "additionalProperties": false, + "description": "The base model forbids fields which are not declared initially in the pydantic model" + }, + "GetLogDataResponse": { + "oneOf": [ + { + "title": "SplitFormat", + "type": "object", + "properties": { + "data": { + "title": "Data", + "anyOf": [ + { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + }, + { + "type": "array", + "items": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + } + ] + }, + "columns": { + "title": "Columns", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + }, + "index": { + "title": "Index", + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + }, + "required": [ + "data" + ] + }, + { + "title": "IndexFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "ColumnFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "RecordsFormat", + "type": "object", + "properties": { + "TODO": { + "title": "Todo", + "type": "string" + } + }, + "required": [ + "TODO" + ] + }, + { + "title": "ValuesFormat", + "type": "array", + "items": { + "type": "array", + "items": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + } + ] + } + } + } + ] + } + }, + "securitySchemes": { + "OpenDESBearerToken": { + "type": "http", + "scheme": "bearer" + } + } + } +} \ No newline at end of file diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/dependencies/core_dependencies_test.postman_collection.json b/tests/dependencies/core_dependencies_test.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..8ef98311337898f9da086037ad834772688e320d --- /dev/null +++ b/tests/dependencies/core_dependencies_test.postman_collection.json @@ -0,0 +1,1094 @@ +{ + "info": { + "_postman_id": "5c914854-d2c9-4565-b3ee-cab8d62dd8db", + "name": "Core dependencies test", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "Entitlements", + "item": [ + { + "name": "entitlements - get groups", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "", + " pm.expect(pm.response.json().groups).to.not.be.empty;", + "});", + "", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/entitlements/v1/groups", + "host": [ + "{{base_url}}" + ], + "path": [ + "entitlements", + "v1", + "groups" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Storage", + "item": [ + { + "name": "storage - query all kinds", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(pm.response.json().results).to.not.be.empty;", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/api/storage/v2/query/kinds", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "query", + "kinds" + ] + } + }, + "response": [] + }, + { + "name": "storage - get schema from logSetKind", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "", + " pm.expect(pm.response.json().schema).to.not.be.empty;", + "});", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/api/storage/v2/schemas/{{logSetKind}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "schemas", + "{{logSetKind}}" + ] + } + }, + "response": [] + }, + { + "name": "storage - create logset record Copy", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "const resobj = pm.response.json();", + "", + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(201);", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(resobj.recordCount).to.eql(1);", + " pm.expect(resobj.recordIds.length).to.eql(1);", + " if (resobj.skippedRecordIds)", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);", + "});", + "", + "// stored the record id for the following tests", + "let record_id = resobj.recordIds[0];", + "console.log(\"created-logset-record-id\", record_id)", + "pm.variables.set(\"created-logset-record-id\", record_id);", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"kind\": \"{{logSetKind}}\",\n \"acl\": {\n \"owners\": [\"{{acl_owner}}\"],\n \"viewers\": [\"{{acl_viewer}}\"]\n },\n \"legal\": {\n \"legaltags\": [\"{{legal_tag}}\"],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n },\n \"data\": {\n \"msg\": \"WDMS dependency test to Data Ecosystem\"\n }\n }\n]", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/storage/v2/records?skipdupes=false", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records" + ], + "query": [ + { + "key": "skipdupes", + "value": "false" + } + ] + } + }, + "response": [] + }, + { + "name": "storage - get logset record by id", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/api/storage/v2/records/{{created-logset-record-id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records", + "{{created-logset-record-id}}" + ] + } + }, + "response": [] + }, + { + "name": "storage - delete logset record Copy", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(204);", + "});", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/api/storage/v2/records/{{created-logset-record-id}}:delete", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records", + "{{created-logset-record-id}}:delete" + ] + } + }, + "response": [] + } + ] + }, + { + "name": "Search", + "item": [ + { + "name": "search setUp - dummy records creation", + "item": [ + { + "name": "search - create dummy logset record", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "const resobj = pm.response.json();", + "", + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(201);", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(resobj.recordCount).to.eql(1);", + " pm.expect(resobj.recordIds.length).to.eql(1);", + " if (resobj.skippedRecordIds)", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"id\": \"{{data_partition}}:doc:WDMS-dependencies-test-dummy-logset\",\n \"kind\": \"{{logSetKind}}\",\n \"acl\": {\n \"owners\": [\"{{acl_owner}}\"],\n \"viewers\": [\"{{acl_viewer}}\"]\n },\n \"legal\": {\n \"legaltags\": [\"{{legal_tag}}\"],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n },\n \"data\": {\n \"msg\": \"WDMS dependency test to Data Ecosystem\"\n }\n }\n]", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/storage/v2/records?skipdupes=false", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records" + ], + "query": [ + { + "key": "skipdupes", + "value": "false" + } + ] + } + }, + "response": [] + }, + { + "name": "search - create dummy well record", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "const resobj = pm.response.json();", + "", + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(201);", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(resobj.recordCount).to.eql(1);", + " pm.expect(resobj.recordIds.length).to.eql(1);", + " if (resobj.skippedRecordIds)", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"id\": \"{{data_partition}}:doc:WDMS-dependencies-test-dummy-well\",\n \"kind\": \"{{wellKind}}\",\n \"acl\": {\n \"owners\": [\"{{acl_owner}}\"],\n \"viewers\": [\"{{acl_viewer}}\"]\n },\n \"legal\": {\n \"legaltags\": [\"{{legal_tag}}\"],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n },\n \"data\": {\n \"msg\": \"WDMS dependency test to Data Ecosystem\"\n }\n }\n]", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/storage/v2/records?skipdupes=false", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records" + ], + "query": [ + { + "key": "skipdupes", + "value": "false" + } + ] + } + }, + "response": [] + }, + { + "name": "search - create dummy wellbore record", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "const resobj = pm.response.json();", + "", + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(201);", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(resobj.recordCount).to.eql(1);", + " pm.expect(resobj.recordIds.length).to.eql(1);", + " if (resobj.skippedRecordIds)", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"id\": \"{{data_partition}}:doc:WDMS-dependencies-test-dummy-wellbore\",\n \"kind\": \"{{wellboreKind}}\",\n \"acl\": {\n \"owners\": [\"{{acl_owner}}\"],\n \"viewers\": [\"{{acl_viewer}}\"]\n },\n \"legal\": {\n \"legaltags\": [\"{{legal_tag}}\"],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n },\n \"data\": {\n \"msg\": \"WDMS dependency test to Data Ecosystem\"\n }\n }\n]", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/storage/v2/records?skipdupes=false", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records" + ], + "query": [ + { + "key": "skipdupes", + "value": "false" + } + ] + } + }, + "response": [] + }, + { + "name": "search - create dummy log record", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "const resobj = pm.response.json();", + "", + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {", + " pm.response.to.have.status(201);", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + " pm.expect(resobj.recordCount).to.eql(1);", + " pm.expect(resobj.recordIds.length).to.eql(1);", + " if (resobj.skippedRecordIds)", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);", + "});", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"id\": \"{{data_partition}}:doc:WDMS-dependencies-test-dummy-log\",\n \"kind\": \"{{logKind}}\",\n \"acl\": {\n \"owners\": [\"{{acl_owner}}\"],\n \"viewers\": [\"{{acl_viewer}}\"]\n },\n \"legal\": {\n \"legaltags\": [\"{{legal_tag}}\"],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n },\n \"data\": {\n \"msg\": \"WDMS dependency test to Data Ecosystem\"\n }\n }\n]", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/storage/v2/records?skipdupes=false", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "storage", + "v2", + "records" + ], + "query": [ + { + "key": "skipdupes", + "value": "false" + } + ] + } + }, + "response": [] + } + ], + "description": "This setup phase aims to create records with well known kinds (well, wellbore, logset, log) to ensure API calls to search service will not fail because of missing records kind. \r\nIndeed, search APIs will only return records that have been indexed. This indexation is triggered asynchronously after record creation. It could cause the failure of these tests the very first time they are run.", + "event": [ + { + "listen": "prerequest", + "script": { + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ] + }, + { + "name": "search logset", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "\r", + " pm.expect(pm.response.json().results).to.not.be.empty;\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true, + "connection": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "value": "", + "type": "text", + "disabled": true + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"kind\": \"{{logSetKind}}\",\r\n \"limit\": 30,\r\n \"returnedFields\": [\"id\", \"data.name\", \"legal\"]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/search/v2/query", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "search", + "v2", + "query" + ] + } + }, + "response": [] + }, + { + "name": "search wellbores", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "\r", + " pm.expect(pm.response.json().results).to.not.be.empty;\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true, + "connection": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"kind\": \"{{wellboreKind}}\",\r\n \"limit\": 30,\r\n \"returnedFields\": [\"id\", \"data.name\"]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/search/v2/query", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "search", + "v2", + "query" + ] + } + }, + "response": [] + }, + { + "name": "search well", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "\r", + " pm.expect(pm.response.json().results).to.not.be.empty;\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true, + "connection": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"kind\": \"{{wellKind}}\",\r\n \"limit\": 30,\r\n \"returnedFields\": [\"id\", \"data.name\"]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/search/v2/query", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "search", + "v2", + "query" + ] + } + }, + "response": [] + }, + { + "name": "search logs", + "event": [ + { + "listen": "test", + "script": { + "exec": [ + "pm.test(pm.info.requestName + \" [\" + pm.variables.get('osduBaseUrl') + \"]\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "\r", + " pm.expect(pm.response.json().results).to.not.be.empty;\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true, + "connection": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"kind\": \"{{logKind}}\",\r\n \"limit\": 30,\r\n \"returnedFields\": [\"id\", \"data.name\"]\r\n}", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/api/search/v2/query", + "host": [ + "{{base_url}}" + ], + "path": [ + "api", + "search", + "v2", + "query" + ] + } + }, + "response": [] + } + ] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "key": "token", + "value": "" + }, + { + "key": "wellKind", + "value": "{{data_partition}}:wks:well:1.0.2" + }, + { + "key": "wellboreKind", + "value": "{{data_partition}}:wks:wellbore:1.0.6" + }, + { + "key": "logSetKind", + "value": "{{data_partition}}:wks:logSet:1.0.5" + }, + { + "key": "logKind", + "value": "{{data_partition}}:wks:log:1.0.5" + }, + { + "key": "data_partition", + "value": "" + }, + { + "key": "base_url", + "value": "" + }, + { + "key": "legal_tag", + "value": "" + }, + { + "key": "acl_domain", + "value": "contoso.com" + }, + { + "key": "acl_owner", + "value": "data.default.owners@{{data_partition}}.{{acl_domain}}" + }, + { + "key": "acl_viewer", + "value": "data.default.viewers@{{data_partition}}.{{acl_domain}}" + } + ] +} \ No newline at end of file diff --git a/tests/integration/README.md b/tests/integration/README.md new file mode 100644 index 0000000000000000000000000000000000000000..5c08eba4ff9419143b395c58add72c44d29d04bc --- /dev/null +++ b/tests/integration/README.md @@ -0,0 +1,274 @@ +# Wellbore DMS - Integration Tests + +## How to run integration tests + +### Requirement +```bash + pip install -r requirements_dev.txt +``` +see application [README.md](../../README.md) how to use a virtual environment. + +A valid access token + +## Run integration functional tests + +Tests has been 'migrated' from postman collection to pytest. It re-uses its + [variable feature](https://learning.postman.com/docs/sending-requests/variables/). Variable are identified in the +request using double embrace `{{MY_VARIABLE}}`. Similarly to postman, variables are override at different scopes: +* test 'collection' level: see [wdms_variable.py](./functional/request_builders/wdms_variables.py) +* local environment (same format than postman): see file [local_environment.json](./functional/local_environment.json). +It's mainly a convenient way to setup the environment to run the tests locally. +* environment file provided in pytest cmd line using the parameter `--environment=env_file`. +* parameter provided in pytest command line using the parameter `--param=key:value`. +* locally in the test. + + +test collection < local environment < environment file in cmd line < parameter in cmd line < test. + + +### generate environment file + +```bash +# Generate env json file +python tests/integration/gen_postman_env.py --token ${token} --base_url ${appUrl} --cloud_provider ${cloudProvider} --acl_domain ${acl_domain} --legal_tag ${legal_tag} --data_partition ${data_partition} +``` + +### running the tests from command line + +Tests are regular pytest tests. By the way additional parameters has been added (run `pytest ./functional --help` to get all parameters): + +* `--environment`: (similar to newman) use an environment file to override some variables values. +* `--timeout-request`: (similar to newman) specify a timeout for requests (milliseconds), 0 means no timeout. +* `--insecure`: (similar to newman) disable cert validation. +* `--param`: set one or several variables. The format is `variable_name: variable_value`. +* `--retry-on-error`: allow to retry in case of specific http code (only >500). 4 attempt max with a delay of 10s between retry. +* `--log-request-level`: add info in the log for any request made. 0 = deactivated, 1 = one line summary containing the + url and response code, 2 = full request and response details. Ideally use with [--log-file](https://docs.pytest.org/en/stable/logging.html). +* `--header`: header to set for any request (can be overridden at test level), format `header_name: header_value`. +* `--filter-tag`: filter in or not test based on their tag (using `@pytest.mak.tag("TAG")`). Separate multiple tags by + "|" .Prefix tag by "!" to filter out test with the given tag. Tags are case insensitive. + * `--filter-tag=TAG1` will run all tests with TAG1. + * `--filter-tag=!TAG2` will run all tests without TAG2. + * `--filter-tag=TAG1|TAG2` will run all tests with TAG1 or TAG2. + * `--filter-tag=TAG1|!TAG2` will run all tests with TAG1 than don't have TAG2. + +example of commands: + +`pytest ./functional` => Run all tests. None environment is passed here so ensure the token is then set in +local_environment.json file. + +`pytest ./functional --param="token: MY_TOKEN"` => Run all tests, pass token in parameter. + + +`pytest ./functional --environment="./generated/postman_environment.json" --insecure --timeout-request=15000 --filter-tag=crud` +=> Run tests with tag 'crud', use a environment file generated by the script gen_postman_env.py, disable SSL validation, +15 seconds request timeout. + +For better logging in console use `--log-cli-level=INFO|DEBUG`. (by default log_cli is ON and log_cli_level is set to +INFO level see [pytest.ini](./functional/pytest.ini)). This produces this output on the console: + +``` +collected 53 items + +functional\tests\test_about.py::test_about +----------------------------------------------------------------------------------------------------------------- live log call ----------------------------------------------------------------------------------------------------------------- +16:38:40 [INFO] - test_about => GET https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms/about +16:38:40 [INFO] - test_about <= status_code=200 (298 ms) +PASSED [ 1%] +functional\tests\test_about.py::test_version +----------------------------------------------------------------------------------------------------------------- live log call ----------------------------------------------------------------------------------------------------------------- +16:38:40 [INFO] - test_version => GET https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms/version +16:38:40 [INFO] - test_version <= status_code=200 (245 ms) +PASSED [ 3%] +functional\tests\test_about.py::test_status SKIPPED [ 5%] +functional\tests\test_crud.py::test_crud_create_record[well] SKIPPED [ 7%] +functional\tests\test_crud.py::test_crud_create_record[wellbore] SKIPPED +... +functional\tests\test_search.py::test_search_logs_by_logset_attribute SKIPPED [100%] + +========================================================================================================= 2 passed, 51 skipped in 0.73s ========================================================================================================= +``` + +To export logs into a file use `--log-file=test_log.txt`. The level for log file can be set independently: +`pytest ./functional --log-cli-level= INFO --log-file=test_log.txt --log-file-level=DEBUG` +=> the log dumped into the file use level `DEBUG`, what is print on the console uses level `INFO`. + +_Note_: `pytest` and `python -m pytest` are equivalent. + + +### add test + +The tests are using [requests](https://requests.readthedocs.io/en/master/) to make the call to wdms service. Test can be +implemented as any regular pytest. Otherwise follow the same way, the postman integration tests have been migrated to +pytest. + +Create a [RequestRunner](./functional/request_runner.py) from a [Request](./functional/request_runner.py) definition: + +```python +request_definition = Request( + name='any name', + method='POST', # 'POST' ,'GET', 'PATCH', 'DELETE' ... + url='{{base_url}}/ddms/my_api', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "json": "{{MY_SPECIFIC_VARIABLE}}" }') + +request_runner = RequestRunner(request_definition) +``` + +#### payload formats +the payload can be anything supported in (https://requests.readthedocs.io/en/latest/api/) which is _"Dictionary, list of + tuples, bytes, or file-like object"_: + + * dictionary: +```python +request_definition = Request(..., + payload={ "response": 42 }) +``` + + * bytes: +```python +request_definition = Request(..., + payload=b"binay content") +``` + + * file-like, e.g. StringIO: +```python +request_definition = Request(..., + payload=StringIO(...)) +``` + * file-like, e.g. open file: + ```python +with open("file_path") as file: + rq = RequestRunner( Request(..., payload=file) + rq.call(...) +``` + +#### Assert for successful response or specific status code +```python +# build the request runner +request_runner = RequestRunner(my_request(...)) + +# do the call a get the result +call_result = request_runner.call(...) +``` + +To assert for any successful response: `call_result.assert_ok()`. +To assert for a specific status code: `call_result.assert_for_status(404)`. + +Alternatively use the argument `assert_status` in the `request_runner.call` method: + +```python +# do the call asserting a specific status code +call_result = request_runner.call(..., assert_status = 404) +``` + +_Note: A request is always fully logged in case of unexpected status code._ + + + +#### Variable substitution +Variable expression (format `{{VARIABLE_NAME}}`) can be used in the URL, headers and payload. + +Use the `with_env_wdms` fixture from [fixtures.py](./functional/tests/fixtures.py) to get the environment variables with +general variables such as token, data_partition, base_url ... setup for the run. Then use the `call` method from +`RequestRunner` to do the call. Here an example were + +```python + +def my_test(with_wdms_env): + request_definition = Request(...) + request_runner = RequestRunner(request_definition) + result = request_runner.call(with_wdms_env, assert_code=200, MY_SPECIFIC_VARIABLE="42") + + response_object = result.get_response_obj() + assert response_object.some_field == "expected" + +``` + +### Traceability + +Tests automatically add `correlation-id` header which can help to investigate if needed. It's formatted this way + _wdms_e2e\_{TEST_NAME}\_{UUID}_, e.g. `wdms_e2e_test_about_402ec3e0-fd0e-4fee-9ad0-7dee0761036d` + +It can be override (as any headers) by passing it in the pytest command line: + +`pytest ./functional --header="correlation-id: MY_CORRELATION_ID"` + +To get the test specific correlation id, the easiest is to enable full log verbosity on request by using + `--log-request-level=2`: + + `pytest ./functional/tests/test_about.py::test_about --log-request-level=2` + + => run only test_about, here's the output: +``` +functional\tests\test_about.py::test_about +-------------------------------------------------------------------------------------------------------- live log call --------------------------------------------------------------------------------------------------------- +11:26:57 [INFO] - test_about => GET https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms/about +11:26:57 [INFO] - test_about <= +11:26:57 [INFO] - [200] - https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms/about +start: [2020-11-25 11:26:57.413062], end: 2020-11-25 11:26:57.702802, elapsed: 289 ms + +============ REQUEST =============== +URL: [GET] https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms/about) +headers: + - User-Agent: python-requests/2.24.0 + - Accept-Encoding: gzip, deflate + - accept: application/json + - Connection: close + - correlation-id: wdms_e2e_test_about_a3b54d19-73f9-4047-aadb-11592fc0d949 + + +============ RESPONSE =============== +headers: + - date: Wed, 25 Nov 2020 10:26:57 GMT + - server: istio-envoy + - content-length: 114 + - content-type: application/json + - x-envoy-upstream-service-time: 4 + - Via: 1.1 google + - Alt-Svc: clear + - Connection: close +body: ------------------------------------------- +{"service":"Wellbore DDMS OSDU","version":"0.2.20112501","buildNumber":"20112501_master","cloudEnvironment":"gcp"} + +PASSED +``` + +The request headers contain it: + +`correlation-id: wdms_e2e_test_about_a3b54d19-73f9-4047-aadb-11592fc0d949` + + + +## Run integration tests using the `os-wellbore-ddms` pipeline + +The integration tests run as part of the test(e2e) stage of the `os-wellbore-ddms` pipeline, when there is a commit to + an active PR or to the `master` branch. + +## Run integration tests for authentication + +The integration tests for authentication were implemented in Python for the following reasons: + +1. There were previous authentication integration tests written in Python for the v1 version of gcp that allowed the reusability from the existing base. +2. Newman and postman had complexities that were discovered in the previous testing framework for v1 to be able to accept and work with token and validations. +3. If these tests needed to be added for other services, this will allow for an accessible way to reuse the code that is written in python. +4. To keep integration tests simple, python was decided as the route. + +The integration tests for authentication are located in the `tests/integration/security` folder. They can be running by following the guide [here](./security/README.md) + +### Deployment-related record ids + +Some tests use the search APIs and are relying on the indexation service for that matter. As the indexation process is +asynchronous, those records can not be created, checked then deleted by the test script just like the others. + +Here we create records with hardcoded ids, which can trigger other problems when several CICDs are run in parallel. +To mitigate the issue, those records ids follow a `{{data_partition}}:{{cloud_provider}}-{{record_id}}` pattern to avoid collision when different cloud provider deployments access the same data ecosystem backend. + +### Test results + +The deployment pipeline fails if any of the tests fail. The test results can be viewed in the pipeline logs +and further troubleshooting can de done by running the failed tests locally using Newman or Postman. diff --git a/tests/integration/WellboreDDMSv2.postman_collection.json b/tests/integration/WellboreDDMSv2.postman_collection.json new file mode 100644 index 0000000000000000000000000000000000000000..337a0deee25cffac1a2f19799fb33bcb90be82b6 --- /dev/null +++ b/tests/integration/WellboreDDMSv2.postman_collection.json @@ -0,0 +1,6079 @@ +{ + "info": { + "_postman_id": "bf5d12ea-6d52-4458-8cb9-9b00eb7594eb", + "name": "WellboreDDMSv2", + "schema": "https://schema.getpostman.com/json/collection/v2.1.0/collection.json" + }, + "item": [ + { + "name": "CRUD_tests", + "item": [ + { + "name": "recursive_delete", + "item": [ + { + "name": "DELETE well (recursive)", + "event": [ + { + "listen": "test", + "script": { + "id": "6714cc20-5cef-4b07-bac1-ddc9cf898f7b", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wells/{{del_recursive_well_id}}?recursive=true", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells", + "{{del_recursive_well_id}}" + ], + "query": [ + { + "key": "recursive", + "value": "true" + } + ] + } + }, + "response": [] + }, + { + "name": "GET wellbore (deleted)", + "event": [ + { + "listen": "test", + "script": { + "id": "2f07a3ee-5000-453b-bcbc-e81f42fe4804", + "exec": [ + "pm.test(\"status code is 404\", function () {\r", + " pm.expect(pm.response.code).to.eql(404);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores/{{del_recursive_wellbore_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores", + "{{del_recursive_wellbore_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET logset (deleted)", + "event": [ + { + "listen": "test", + "script": { + "id": "a88fc3d6-0f24-4818-bdbe-59f052379161", + "exec": [ + "pm.test(\"status code is 404\", function () {\r", + " pm.expect(pm.response.code).to.eql(404);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logsets/{{del_recursive_logset_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets", + "{{del_recursive_logset_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET log (deleted)", + "event": [ + { + "listen": "test", + "script": { + "id": "a08ef8b2-4d84-4dae-8974-8363d5dd4f5e", + "exec": [ + "pm.test(\"status code is 404\", function () {\r", + " pm.expect(pm.response.code).to.eql(404);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{del_recursive_log_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{del_recursive_log_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "log", + "item": [ + { + "name": "PUT logs", + "event": [ + { + "listen": "test", + "script": { + "id": "67a8e8a7-bb53-4d74-ac6f-4f30d821c489", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0];\r", + "pm.environment.set(\"record_id\", record_id);\r", + "\r", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "52736c89-003f-4ee8-8f75-c35039d2f918", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\", \r\n \"log\": {\r\n \"name\": \"wellbore-ddms-test-log_0000\"\r\n }\r\n },\r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0001\", \r\n \"relationships\": {\r\n \"well\": {\"id\":\"{{del_recursive_well_id}}\"},\r\n \"logset\": {\"id\":\"{{del_recursive_logset_id}}\"}\r\n }\r\n },\r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\",\r\n \"id\": \"{{del_recursive_log_id}}\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "GET log", + "event": [ + { + "listen": "test", + "script": { + "id": "16b28cdf-57ce-4893-8059-e9ccc172e8d0", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET log versions", + "event": [ + { + "listen": "test", + "script": { + "id": "e0c6171c-c542-42b3-8ac5-8b3ab701280f", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET log version", + "event": [ + { + "listen": "test", + "script": { + "id": "b8705be5-6b48-4228-99b1-d7f8a609754a", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "PUT log data", + "event": [ + { + "listen": "test", + "script": { + "id": "26b3f20e-75d8-49d2-9fb8-400cdab12cbe", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.recordCount).to.eql(1);\r", + " pm.expect(resobj.recordIds.length).to.eql(1);\r", + " pm.expect(resobj.recordIds[0]).to.eql(current_id);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n \"columns\": [\r\n \"Ref\",\r\n \"col_1\",\r\n \"col_2\"\r\n ],\r\n \"index\": [\r\n 0,\r\n 1,\r\n 2\r\n ],\r\n \"data\": [\r\n [\r\n 1,\r\n 10,\r\n 11\r\n ],\r\n [\r\n 1.5,\r\n 20,\r\n 21\r\n ],\r\n [\r\n 2,\r\n 30,\r\n 31\r\n ]\r\n ]\r\n}" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}/data?orient=split", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}", + "data" + ], + "query": [ + { + "key": "orient", + "value": "split" + } + ] + } + }, + "response": [] + }, + { + "name": "GET log data", + "event": [ + { + "listen": "test", + "script": { + "id": "ac2ac5de-7779-4d20-b28a-18761fc9cf2f", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data[0][1]).to.eql(10);\r", + " pm.expect(resobj.data[1][1]).to.eql(20);\r", + " pm.expect(resobj.data[2][1]).to.eql(30);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}/data?orient=split", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}", + "data" + ], + "query": [ + { + "key": "orient", + "value": "split" + } + ] + } + }, + "response": [] + }, + { + "name": "DELETE log", + "event": [ + { + "listen": "test", + "script": { + "id": "f9fe87a7-8e7e-42d1-ac2a-337a18e51659", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "well", + "item": [ + { + "name": "PUT wells", + "event": [ + { + "listen": "test", + "script": { + "id": "42bae0bd-e201-425f-8904-78ce83e04024", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0];\r", + "pm.environment.set(\"record_id\", record_id);\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\"},\r\n \"kind\": \"{{data_partition}}:wks:well:1.0.2\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0001\"},\r\n \"kind\": \"{{data_partition}}:wks:well:1.0.2\",\r\n \"id\": \"{{del_recursive_well_id}}\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/wells", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells" + ] + } + }, + "response": [] + }, + { + "name": "GET well", + "event": [ + { + "listen": "test", + "script": { + "id": "b25b87b5-f956-48f7-8756-162c6643d3e7", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wells/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET well versions", + "event": [ + { + "listen": "test", + "script": { + "id": "9ec74ef6-fef0-4063-8058-86c37513b5eb", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wells/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET well version", + "event": [ + { + "listen": "test", + "script": { + "id": "c0ea9743-4b66-4082-afdc-a4be3cdd5168", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wells/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "DELETE well", + "event": [ + { + "listen": "test", + "script": { + "id": "cab4411e-4f51-4eca-a7a6-e0ac4ab2ee12", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wells/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "wellbore", + "item": [ + { + "name": "PUT wellbores", + "event": [ + { + "listen": "test", + "script": { + "id": "bc2ef1fb-e7b3-45de-8dd3-cd9a5b286574", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0];\r", + "pm.environment.set(\"record_id\", record_id);\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\"},\r\n \"kind\": \"{{data_partition}}:wks:wellbore:1.0.6\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0001\", \r\n \"relationships\": {\r\n \"well\": {\"id\":\"{{del_recursive_well_id}}\"}\r\n }\r\n },\r\n \"kind\": \"{{data_partition}}:wks:wellbore:1.0.6\",\r\n \"id\": \"{{del_recursive_wellbore_id}}\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores" + ] + } + }, + "response": [] + }, + { + "name": "GET wellbore", + "event": [ + { + "listen": "test", + "script": { + "id": "aa07ecd8-c5f7-442f-b1b5-c8cf8363f534", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET wellbore versions", + "event": [ + { + "listen": "test", + "script": { + "id": "b97bc532-3117-4e2f-b46e-9bcdd91848e6", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET wellbore version", + "event": [ + { + "listen": "test", + "script": { + "id": "91f62a45-c5c3-4f87-a886-614444de43e0", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "DELETE wellbore", + "event": [ + { + "listen": "test", + "script": { + "id": "eba64719-0e2e-4bdf-a323-8621e5510a09", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "logset", + "item": [ + { + "name": "PUT logsets", + "event": [ + { + "listen": "test", + "script": { + "id": "5571a449-dea3-4fd6-a356-0fa91cd1b769", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0];\r", + "pm.environment.set(\"record_id\", record_id);\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\"},\r\n \"kind\": \"{{dipsetKind}}\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0001\", \r\n \"relationships\": {\r\n \"well\": {\"id\":\"{{del_recursive_well_id}}\"},\r\n \"wellbore\": {\"id\":\"{{del_recursive_wellbore_id}}\"}\r\n }\r\n },\r\n \"kind\": \"{{dipsetKind}}\",\r\n \"id\": \"{{del_recursive_logset_id}}\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logsets", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets" + ] + } + }, + "response": [] + }, + { + "name": "GET logset", + "event": [ + { + "listen": "test", + "script": { + "id": "2bfed0fb-faa8-4f99-93cb-e89acbede791", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logsets/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET logset versions", + "event": [ + { + "listen": "test", + "script": { + "id": "80913ca5-0898-4e94-b957-feda6d292f07", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logsets/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET logset version", + "event": [ + { + "listen": "test", + "script": { + "id": "9381a426-9a06-46f1-aae1-121e2422819a", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logsets/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "DELETE logset", + "event": [ + { + "listen": "test", + "script": { + "id": "e5dbf0e2-02e8-4c5c-915f-c4ddc4f6c49b", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logsets/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "marker", + "item": [ + { + "name": "PUT markers", + "event": [ + { + "listen": "test", + "script": { + "id": "c01be4b3-e242-4306-af7c-06345d391c74", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(1);\r", + " pm.expect(resobj.recordIds.length).to.eql(1);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"name\": \"wddms-e2e-record-0000\",\r\n \"md\": {\r\n \"unitKey\": \"Unknown\",\r\n \"value\": 0\r\n }\r\n },\r\n \"kind\": \"{{data_partition}}:wks:marker:1.0.4\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/markers", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers" + ] + } + }, + "response": [] + }, + { + "name": "GET marker", + "event": [ + { + "listen": "test", + "script": { + "id": "47a22362-05b6-47ce-ac7f-e2e536ba29b3", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/markers/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET marker versions", + "event": [ + { + "listen": "test", + "script": { + "id": "01b6caf9-2072-409d-ac58-caa00d7ccfa4", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/markers/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET marker version", + "event": [ + { + "listen": "test", + "script": { + "id": "bc15b83d-1fa3-4ad6-8dc8-3114fcc59450", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/markers/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "DELETE marker", + "event": [ + { + "listen": "test", + "script": { + "id": "d3d9a448-db73-464b-9ba2-334555adf7e3", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/markers/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "trajectory", + "item": [ + { + "name": "PUT trajectories", + "event": [ + { + "listen": "test", + "script": { + "id": "09421417-226b-49f7-998d-7383818d9378", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(1);\r", + " pm.expect(resobj.recordIds.length).to.eql(1);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\"},\r\n \"kind\": \"{{data_partition}}:wks:trajectory:1.0.5\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/trajectories", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "trajectories" + ] + } + }, + "response": [] + }, + { + "name": "GET trajectory", + "event": [ + { + "listen": "test", + "script": { + "id": "516a930d-fcc3-497b-80e5-4c3c65632670", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/trajectories/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "trajectories", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "GET trajectory versions", + "event": [ + { + "listen": "test", + "script": { + "id": "c99ea2af-072a-4684-9b8d-763da6d4f622", + "exec": [ + "let resobj = pm.response.json();\r", + "let current_id = pm.environment.get(\"record_id\")\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + " pm.expect(resobj.recordId).to.eql(current_id);\r", + "});\r", + "\r", + "let record_version = resobj.versions[0]\r", + "pm.environment.set(\"record_version\", record_version)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/trajectories/{{record_id}}/versions", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "trajectories", + "{{record_id}}", + "versions" + ] + } + }, + "response": [] + }, + { + "name": "GET trajectory version", + "event": [ + { + "listen": "test", + "script": { + "id": "287cfc01-9e91-4662-89cb-23f3c86bf50a", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.name).to.eql('wddms-e2e-record-0000');\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/trajectories/{{record_id}}/versions/{{record_version}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "trajectories", + "{{record_id}}", + "versions", + "{{record_version}}" + ] + } + }, + "response": [] + }, + { + "name": "DELETE trajectory", + "event": [ + { + "listen": "test", + "script": { + "id": "8eba982e-7e31-47c3-91db-4e8f91155dbb", + "exec": [ + "pm.test(\"status code is 204\", function () {\r", + " pm.expect(pm.response.code).to.eql(204);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/trajectories/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "trajectories", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "dips", + "item": [ + { + "name": "Put dipset", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "865c7cb3-be63-4f26-a9d0-ba26be8afbd8", + "exec": [ + "pm.collectionVariables.set(\"dipsetName\", \"wddms-test-dipset-\"+pm.variables.replaceIn(\"{{$randomFirstName}}\"))", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "d339d425-c23b-4a26-a87d-4b1ce2528f45", + "exec": [ + "", + "", + "pm.test(\"status code is 200\", function () {", + " pm.response.to.have.status(200);", + "});", + "", + "", + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "const jsonData = pm.response.json();", + "", + "pm.test(\"response is as expected\", () => {", + " pm.expect(jsonData.recordCount).to.eql(1); ", + " pm.expect(jsonData.recordIds.length).to.eql(1);", + "});", + "", + "let record_id = jsonData.recordIds[0]", + "pm.collectionVariables.set(\"dipsetId\", record_id);" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "PUT", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"acl\": {\n \"owners\": [\n \"{{acl_owner}}\"\n ],\n \"viewers\": [\n \"{{acl_viewer}}\"\n ]\n },\n \"data\": {\n \"name\": \"{{dipsetName}}\"\n },\n \"kind\": \"{{dipsetKind}}\",\n \"legal\": {\n \"legaltags\": [\n \"{{legal_tag}}\"\n ],\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\n }\n }\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets" + ] + } + }, + "response": [] + }, + { + "name": "Get dipset", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "671ed146-31c6-477d-8ee0-8c3090621b11", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "9b8d67b4-b826-4b06-9252-9f4844d3321a", + "exec": [ + "pm.test(\"status code is 200\", function () {", + " pm.response.to.have.status(200);", + " // pm.expect(pm.response.code).to.be.oneOf([201, 202]);", + " // pm.response.to.have.status(\"Created\");", + "});", + "", + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "", + "", + "pm.test(\"Response validate schema\", () => {", + " const schema = {", + " \"type\": \"object\",", + " \"properties\":{", + " \"acl\": {", + " \"type\": \"object\",", + " \"properties\":{", + " \"viewers\": {\"type\":\"array\"},", + " \"owners\": {\"type\":\"array\"}", + " }", + " },", + " \"data\": {", + " \"type\": \"object\",", + " \"properties\":{", + "", + " }", + "", + " },", + " \"id\": {\"type\": \"string\"},", + " \"kind\": {\"type\": \"string\"},", + " \"legal\": {", + " \"type\": \"object\",", + " \"properties\":{", + " \"legaltags\": {\"type\":\"array\"},", + " \"otherRelevantDataCountries\": {\"type\":\"array\"}", + " }", + " },", + " },", + " \"required\": [ \"acl\", \"data\", \"id\", \"kind\", \"legal\" ]", + " };", + " pm.response.to.have.jsonSchema(schema);", + "});", + "", + "", + "pm.test(\"response data is as expected\", () => {", + " const jsonData = pm.response.json();", + " pm.expect(jsonData.data.name).to.eql(pm.collectionVariables.get(\"dipsetName\"));", + " pm.expect(jsonData.kind).to.eql(Common.resolve_variable(\"dipsetKind\"));", + "});", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}" + ] + } + }, + "response": [] + }, + { + "name": "Create dips", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "6b800a03-7463-4463-bf7d-6449f528ff43", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "06958657-7cc8-4aff-a871-91efa5a3cd20", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "pm.test(\"response data is as expected\", () => { ", + " const expected = [", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 0.12345678912121212", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 12.0", + " },", + " \"quality\": {", + " \"unitKey\": \"unitless\",", + " \"value\": 1.0", + " },", + " \"xCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1.0", + " },", + " \"yCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2.0", + " },", + " \"zCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 34.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 27.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 3.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 1.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 4000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 4.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 2.0", + " },", + " \"classification\": \"breakout\"", + " }", + " ]", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});", + "", + "", + "// Saving the value for later use", + "pm.collectionVariables.set(\"expected_dips\", JSON.stringify(pm.response.json(), null, 2));", + "", + "", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "[\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":2000.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":34},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":27}\n },\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":1000.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":0.123456789121212121212},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":12},\n \"quality\": {\"unitKey\":\"unitless\", \"value\":1},\n \"xCoordinate\": {\"unitKey\":\"m\", \"value\":1},\n \"yCoordinate\": {\"unitKey\":\"m\", \"value\":2},\n \"zCoordinate\": {\"unitKey\":\"m\", \"value\":3},\n \"classification\": \"fracture\"\n },\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":4000.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":4},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":2},\n \"classification\": \"breakout\"\n } ,\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":3000.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":3},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":1},\n \"classification\": \"fracture\"\n } \n]\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips" + ] + } + }, + "response": [] + }, + { + "name": "Get dips", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "77a5c99f-d1db-4cbd-927c-8321567231d2", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "a6175ac9-bed8-4093-bcc8-e3dc7752b8cd", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "pm.test(\"response data is as expected\", () => {", + " const jsonData = pm.response.json();", + " pm.expect(jsonData).to.eql(JSON.parse(pm.collectionVariables.get(\"expected_dips\")));", + "});", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips" + ], + "query": [ + { + "key": "index", + "value": "12", + "disabled": true + }, + { + "key": "limit", + "value": "-1", + "disabled": true + } + ] + } + }, + "response": [] + }, + { + "name": "Get dip from index", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "408e79bd-7a01-4e8b-8f04-381ff0e7ffc4", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "c0b0a3c7-fb17-4f0a-9fb0-a51bc90717c4", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "pm.test(\"response data is as expected\", () => { ", + " const expected = {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 34.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 27.0", + " }", + " } ", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disableBodyPruning": true + }, + "request": { + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/1", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips", + "1" + ] + } + }, + "response": [] + }, + { + "name": "Insert dips", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "8a4d3608-f52d-42ce-957f-4b54dc377436", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "525e20be-249c-455a-9a1c-84acb551f6af", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "", + "pm.test(\"response data is as expected\", () => { ", + " const expected = [", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 888.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 666.66", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 99.99", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 0.12345678912121212", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 12.0", + " },", + " \"quality\": {", + " \"unitKey\": \"unitless\",", + " \"value\": 1.0", + " },", + " \"xCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1.0", + " },", + " \"yCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2.0", + " },", + " \"zCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1500.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 77.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 81.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 34.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 27.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 3.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 1.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 4000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 4.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 2.0", + " },", + " \"classification\": \"breakout\"", + " }", + " ]", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});", + "", + "", + "" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "POST", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "[\n\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":1500.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":77},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":81}\n },\n {\n \"reference\": {\"unitKey\":\"meter\", \"value\":888.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":666.66},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":99.99}\n }\n \n]\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/insert", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips", + "insert" + ] + } + }, + "response": [] + }, + { + "name": "Patch dip", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "92cfff94-a54f-472b-b35f-7114b1f54428", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "9b37f7f8-0b14-4d56-80f0-acfcdbb7bdfd", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "", + "pm.test(\"response data is as expected\", () => { ", + " const expected = [", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 0.12345678912121212", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 12.0", + " },", + " \"quality\": {", + " \"unitKey\": \"unitless\",", + " \"value\": 1.0", + " },", + " \"xCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1.0", + " },", + " \"yCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2.0", + " },", + " \"zCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 8.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 12.0", + " },", + " \"xCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"yCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"zCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1500.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 77.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 81.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 34.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 27.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 3.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 1.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 4000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 4.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 2.0", + " },", + " \"classification\": \"breakout\"", + " }", + " ]", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "PATCH", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "\n{\n \"reference\": {\"unitKey\":\"meter\", \"value\":1000.0},\n \"azimuth\": {\"unitKey\":\"dega\", \"value\":8},\n \"inclination\": {\"unitKey\":\"dega\", \"value\":12},\n \"classification\": \"fracture\",\n \"quality\" : {\"unitKey\":\"unitless\", \"value\":0},\n \"xCoordinate\" : {\"unitKey\":\"meter\", \"value\":12},\n \"yCoordinate\" : {\"unitKey\":\"meter\", \"value\":12},\n \"zCoordinate\" : {\"unitKey\":\"meter\", \"value\":12}\n\n}\n", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/0?=", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips", + "0" + ], + "query": [ + { + "key": "", + "value": "" + } + ] + } + }, + "response": [] + }, + { + "name": "Delete dip", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "076e65ce-75ab-46d2-8b39-e41e2466d52a", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "f1bfc7d0-7e2d-46fa-be35-8d30ee994690", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "pm.test(\"response data is as expected\", () => { ", + " const expected = [", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 8.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 12.0", + " },", + " \"xCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"yCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"zCoordinate\": {", + " \"unitKey\": \"meter\",", + " \"value\": 12.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 1500.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 77.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 81.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 2000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 34.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 27.0", + " }", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 3000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 3.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 1.0", + " },", + " \"classification\": \"fracture\"", + " },", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 4000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 4.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 2.0", + " },", + " \"classification\": \"breakout\"", + " }", + " ]", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "DELETE", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "json" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/0", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips", + "0" + ] + } + }, + "response": [] + }, + { + "name": "Query dips", + "event": [ + { + "listen": "prerequest", + "script": { + "id": "62bf724a-bdce-4351-b56d-d30d05d9c027", + "exec": [ + "", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "test", + "script": { + "id": "57524cbb-5c43-45cd-9550-d7843f703b1b", + "exec": [ + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "pm.test(\"response must be valid\", function () {", + " pm.response.to.be.ok;", + " pm.response.to.be.withBody;", + " pm.response.to.be.json;", + "});", + "", + "pm.test(\"status code is 200\", function () {", + " pm.expect(pm.response.code).to.eql(200);", + "});", + "", + "pm.test(\"response data is as expected\", () => { ", + " const expected = [", + " {", + " \"reference\": {", + " \"unitKey\": \"meter\",", + " \"value\": 4000.0", + " },", + " \"azimuth\": {", + " \"unitKey\": \"dega\",", + " \"value\": 4.0", + " },", + " \"inclination\": {", + " \"unitKey\": \"dega\",", + " \"value\": 2.0", + " },", + " \"classification\": \"breakout\"", + " }", + " ]; ", + "", + " pm.expect(_.isEqual(pm.response.json(), expected)).to.be.true", + "});" + ], + "type": "text/javascript" + } + } + ], + "request": { + "method": "GET", + "header": [ + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/query?minReference=3500&maxReference=8000&classification=breakout", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "dipsets", + "{{dipsetId}}", + "dips", + "query" + ], + "query": [ + { + "key": "minReference", + "value": "3500" + }, + { + "key": "maxReference", + "value": "8000" + }, + { + "key": "classification", + "value": "breakout" + } + ] + } + }, + "response": [] + } + ], + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "event": [ + { + "listen": "prerequest", + "script": { + "id": "fc370605-7faa-4c7e-a8ba-e11ec5b3183f", + "type": "text/javascript", + "exec": [ + "" + ] + } + }, + { + "listen": "test", + "script": { + "id": "0064bc72-e85a-40d8-a2a8-2adac97641de", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "model_extensibility", + "item": [ + { + "name": "create_log_with_extra_fields", + "event": [ + { + "listen": "test", + "script": { + "id": "a190f098-a939-4cb6-bdcb-cb92d110d850", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)\r", + "\r", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "e592fa49-55f7-4257-9f78-5b3913cf6e21", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\"name\": \"wddms-e2e-record-0000\", \r\n \"xxx_extra_at_data\": \"value_at_data\", \r\n \"log\": {\r\n \"name\": \"wellbore-ddms-test-log_0000\"\r\n }\r\n },\r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "get_log_check_for_extra_fields", + "event": [ + { + "listen": "test", + "script": { + "id": "046caedb-d706-4294-b449-ce3656a61a1d", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + " pm.expect(resobj.data.xxx_extra_at_data).to.eql('value_at_data');\r", + " pm.expect(resobj.legal.otherRelevantDataCountries[0]).to.eql('US');\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "74de7718-241c-4e2b-b6ec-aa070c266475", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}" + ] + } + }, + "response": [] + }, + { + "name": "clean_up_delete_log", + "event": [ + { + "listen": "test", + "script": { + "id": "bdf306ce-6c85-47bf-b76b-743279345ce5", + "exec": [ + "\r", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "eed550b6-f8d3-4692-8fa3-1dd4f817fc48", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "DELETE", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/logs/{{record_id}}", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs", + "{{record_id}}" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "error_cases", + "item": [ + { + "name": "create_log_with_invalid_data_should_422", + "event": [ + { + "listen": "test", + "script": { + "id": "1dde0b62-5554-48fb-9ba4-d57dc30672b4", + "exec": [ + "pm.test(\"response must be a 422 unprocessable entity error\", () => {\r", + " pm.expect(pm.response.code).to.eql(422);\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + }, + { + "listen": "prerequest", + "script": { + "id": "1ee368e3-b819-41b6-86ce-72f3242e1e83", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "\"[{\"data\":{\"name\":\"incomplete_data\"}}]\"" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "Search_APIs", + "item": [ + { + "name": "Create", + "item": [ + { + "name": "PUT wells", + "event": [ + { + "listen": "test", + "script": { + "id": "49fe95ab-2a3e-4446-b949-a2a69d30830c", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(1);\r", + " pm.expect(resobj.recordIds.length).to.eql(1);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"basinContext\": {\r\n \"basinCode\": \"C5031\", \r\n \"basinName\": \"Williston Basin\", \r\n \"subBasinCode\": \"C50310104\", \r\n \"subBasinName\": \"Three Forks Formation and Jefferson Group\"\r\n }, \r\n \"block\": \"Block 11/8\", \r\n \"country\": \"United States of America\", \r\n \"county\": \"Stark\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateLicenseIssued\": \"2012-10-21T18:00:00Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n \"datePluggedAbandoned\": \"2019-02-21T18:00:00Z\", \r\n \"dateSpudded\": \"2014-02-21T19:00:00Z\", \r\n \"directionWell\": \"producer\", \r\n \"district\": \"Fryburg\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"name\": \"GL\"\r\n }, \r\n \"field\": \"Bell\", \r\n \"fluidWell\": \"oil-gas\", \r\n \"groundElevation\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2645.6\r\n }, \r\n \"locationWGS84\": {\r\n \"features\": [\r\n {\r\n \"geometry\": {\r\n \"coordinates\": [\r\n -103.2380248, \r\n 46.8925081, \r\n 2650.5\r\n ], \r\n \"type\": \"Point\"\r\n }, \r\n \"properties\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"type\": \"Feature\"\r\n }\r\n ], \r\n \"type\": \"FeatureCollection\"\r\n }, \r\n \"name\": \"wddms-e2e-search-test-0000\", \r\n \"operator\": \"Don E. Beckert\", \r\n \"operatorDivision\": \"Division ND\", \r\n \"plssLocation\": {\r\n \"range\": \"99W\", \r\n \"section\": 31, \r\n \"township\": \"140N\"\r\n }, \r\n \"propertyDictionary\": {\r\n \"API Number\": \"33003000080000\", \r\n \"Activity Code\": \"E\", \r\n \"Basin\": \"WILLISTON BASIN\", \r\n \"Basin Code\": \"713200\", \r\n \"Class Initial Code\": \"WF\", \r\n \"Class Initial Name\": \"NEW FIELD WILDCAT\", \r\n \"Country Name\": \"UNITED STATES\", \r\n \"County Name\": \"BARNES\", \r\n \"Current Operator City\": \"BILLINGS\", \r\n \"Current Operator Name\": \"NYVATEX MONTANA\", \r\n \"Date First Report\": \"11-12-1982\", \r\n \"Date Last Activity\": \"06-03-2016\", \r\n \"Depth Total Projected\": \"1800\", \r\n \"Elevation Reference Datum\": \"GR\", \r\n \"Elevation Reference Value\": \"1407\", \r\n \"Field Name\": \"WILDCAT\", \r\n \"Final Status\": \"ABANDON LOCATION\", \r\n \"Formation Projected Name\": \"PRECAMBRIAN\", \r\n \"Ground Elevation\": \"1407\", \r\n \"Hole Direction\": \"VERTICAL\", \r\n \"Lease Acres\": \"40\", \r\n \"Lease Name\": \"TRIEBOLD\", \r\n \"Operator City\": \"BILLINGS\", \r\n \"Operator Name\": \"NYVATEX MONTANA\", \r\n \"Permit Date\": \"11-10-1982\", \r\n \"Permit Filer Long\": \";PRESIDENT;;;;;;;\", \r\n \"Permit Number\": \"9896\", \r\n \"Permit Status\": \"APPROVED\", \r\n \"Source\": \"PI\", \r\n \"State Name\": \"NORTH DAKOTA\", \r\n \"Status Final Code\": \"A\", \r\n \"Sub Basin\": \"EASTERN SHELF (WILLISTON BASIN)\", \r\n \"Sub Basin Code\": \"100000004313\", \r\n \"Surface LL Source\": \"IH\", \r\n \"Surface Latitude\": \"+47.1981919\", \r\n \"Surface Longitude\": \" -97.8621697\", \r\n \"UWI\": \"33003000080000\", \r\n \"Unit of Measure\": \"ACRE\", \r\n \"Well Num\": \"34-14\"\r\n }, \r\n \"region\": \"North America\", \r\n \"relationships\": {\r\n \"asset\": {\r\n \"name\": \"Bell Stark\"\r\n }\r\n }, \r\n \"state\": \"North Dakota\", \r\n \"uwi\": \"33-089-00300-00\", \r\n \"wellHeadElevation\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"wellHeadGeographic\": {\r\n \"crsKey\": \"geographic\", \r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"latitude\": 46.89249512931594, \r\n \"longitude\": -103.23756979739804\r\n }, \r\n \"wellHeadProjected\": {\r\n \"crsKey\": \"projected\", \r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"x\": 1315694.366039069, \r\n \"y\": 458966.7531300551\r\n }, \r\n \"wellHeadWgs84\": {\r\n \"latitude\": 46.8925081, \r\n \"longitude\": -103.2380248\r\n }, \r\n \"wellLocationType\": \"Onshore\", \r\n \"wellNumberGovernment\": \"42-501-20130-P\", \r\n \"wellNumberLicense\": \"42-501-20130-P\", \r\n \"wellNumberOperator\": \"12399-001\", \r\n \"wellPurpose\": \"development -- producer\", \r\n \"wellStatus\": \"active -- producing\", \r\n \"wellType\": \"reentry\"\r\n }, \r\n \"id\": \"{{search_well_id}}\",\r\n \"kind\": \"{{data_partition}}:wks:well:2.0.0\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"version\": 1631210408153985\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/wells", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wells" + ] + } + }, + "response": [] + }, + { + "name": "PUT wellbores", + "event": [ + { + "listen": "test", + "script": { + "id": "3a726857-180b-45cf-ad93-c8421efaa60d", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(1);\r", + " pm.expect(resobj.recordIds.length).to.eql(1);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"basinContext\": {\r\n \"basinCode\": \"C5031\", \r\n \"basinName\": \"Williston Basin\", \r\n \"subBasinCode\": \"C50310104\", \r\n \"subBasinName\": \"Three Forks Formation and Jefferson Group\"\r\n }, \r\n \"block\": \"Block 11/8\", \r\n \"country\": \"United States of America\", \r\n \"county\": \"Stark\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateLicenseIssued\": \"2012-10-21T18:00:00Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n \"datePluggedAbandoned\": \"2019-02-21T18:00:00Z\", \r\n \"dateSpudded\": \"2014-02-21T19:00:00Z\", \r\n \"directionWell\": \"producer\", \r\n \"district\": \"Fryburg\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"name\": \"GL\"\r\n }, \r\n \"field\": \"Bell\", \r\n \"fluidWell\": \"oil-gas\", \r\n \"groundElevation\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2645.6\r\n }, \r\n \"locationWGS84\": {\r\n \"features\": [\r\n {\r\n \"geometry\": {\r\n \"coordinates\": [\r\n -103.2380248, \r\n 46.8925081, \r\n 2650.5\r\n ], \r\n \"type\": \"Point\"\r\n }, \r\n \"properties\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"type\": \"Feature\"\r\n }\r\n ], \r\n \"type\": \"FeatureCollection\"\r\n }, \r\n \"name\": \"wddms-e2e-search-test-0000\", \r\n \"operator\": \"Don E. Beckert\", \r\n \"operatorDivision\": \"Division ND\", \r\n \"plssLocation\": {\r\n \"range\": \"99W\", \r\n \"section\": 31, \r\n \"township\": \"140N\"\r\n }, \r\n \"propertyDictionary\": {\r\n \"API Number\": \"33003000080000\", \r\n \"Activity Code\": \"E\", \r\n \"Basin\": \"WILLISTON BASIN\", \r\n \"Basin Code\": \"713200\", \r\n \"Class Initial Code\": \"WF\", \r\n \"Class Initial Name\": \"NEW FIELD WILDCAT\", \r\n \"Country Name\": \"UNITED STATES\", \r\n \"County Name\": \"BARNES\", \r\n \"Current Operator City\": \"BILLINGS\", \r\n \"Current Operator Name\": \"NYVATEX MONTANA\", \r\n \"Date First Report\": \"11-12-1982\", \r\n \"Date Last Activity\": \"06-03-2016\", \r\n \"Depth Total Projected\": \"1800\", \r\n \"Elevation Reference Datum\": \"GR\", \r\n \"Elevation Reference Value\": \"1407\", \r\n \"Field Name\": \"WILDCAT\", \r\n \"Final Status\": \"ABANDON LOCATION\", \r\n \"Formation Projected Name\": \"PRECAMBRIAN\", \r\n \"Ground Elevation\": \"1407\", \r\n \"Hole Direction\": \"VERTICAL\", \r\n \"Lease Acres\": \"40\", \r\n \"Lease Name\": \"TRIEBOLD\", \r\n \"Operator City\": \"BILLINGS\", \r\n \"Operator Name\": \"NYVATEX MONTANA\", \r\n \"Permit Date\": \"11-10-1982\", \r\n \"Permit Filer Long\": \";PRESIDENT;;;;;;;\", \r\n \"Permit Number\": \"9896\", \r\n \"Permit Status\": \"APPROVED\", \r\n \"Source\": \"PI\", \r\n \"State Name\": \"NORTH DAKOTA\", \r\n \"Status Final Code\": \"A\", \r\n \"Sub Basin\": \"EASTERN SHELF (WILLISTON BASIN)\", \r\n \"Sub Basin Code\": \"100000004313\", \r\n \"Surface LL Source\": \"IH\", \r\n \"Surface Latitude\": \"+47.1981919\", \r\n \"Surface Longitude\": \" -97.8621697\", \r\n \"UWI\": \"33003000080000\", \r\n \"Unit of Measure\": \"ACRE\", \r\n \"Well Num\": \"34-14\"\r\n }, \r\n \"region\": \"North America\", \r\n \"relationships\": {\r\n \"asset\": {\r\n \"name\": \"Bell Stark\"\r\n }\r\n }, \r\n \"state\": \"North Dakota\", \r\n \"uwi\": \"33-089-00300-00\", \r\n \"wellHeadElevation\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"wellHeadGeographic\": {\r\n \"crsKey\": \"geographic\", \r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"latitude\": 46.89249512931594, \r\n \"longitude\": -103.23756979739804\r\n }, \r\n \"wellHeadProjected\": {\r\n \"crsKey\": \"projected\", \r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2650.5\r\n }, \r\n \"x\": 1315694.366039069, \r\n \"y\": 458966.7531300551\r\n }, \r\n \"wellHeadWgs84\": {\r\n \"latitude\": 46.8925081, \r\n \"longitude\": -103.2380248\r\n }, \r\n \"wellLocationType\": \"Onshore\", \r\n \"wellNumberGovernment\": \"42-501-20130-P\", \r\n \"wellNumberLicense\": \"42-501-20130-P\", \r\n \"wellNumberOperator\": \"12399-001\", \r\n \"wellPurpose\": \"development -- producer\", \r\n \"wellStatus\": \"active -- producing\", \r\n \"wellType\": \"reentry\"\r\n }, \r\n \"id\": \"{{search_wellbore_id}}\",\r\n \"kind\": \"{{data_partition}}:wks:wellbore:1.0.6\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"version\": 1631210408153985\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/wellbores", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "wellbores" + ] + } + }, + "response": [] + }, + { + "name": "PUT logsets", + "event": [ + { + "listen": "test", + "script": { + "id": "68dce21d-0f9b-44db-8a04-a0c138ea87b8", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"azimuthReference\": \"TN\", \r\n \"channelMnemonics\": [\r\n \"DCAL\", \r\n \"DPOR\", \r\n \"GR\", \r\n \"NPOR\", \r\n \"RHOB\", \r\n \"DT\"\r\n ], \r\n \"channelNames\": [\r\n \"Differential Caliper\", \r\n \"Density Porosity\", \r\n \"Gamma Ray\", \r\n \"Enhanced Thermal Neutron Porosity in Selected Lithology\", \r\n \"CDL Bulk Density\", \r\n \"Delta-T (also called Slowness or Interval Transit Time)\"\r\n ], \r\n \"classification\": \"Quad-Combo\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2680.5\r\n }, \r\n \"name\": \"KB\"\r\n }, \r\n \"externalIds\": [\r\n \"Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:9190e417-8d42-4994-9e6a-9a327b4f47b1\"\r\n ], \r\n \"operation\": \"Harmonization\", \r\n \"properties\": [\r\n {\r\n \"description\": \"Run 1 date {DD/MM/YYYY}\", \r\n \"name\": \"RUN_DATE-RUN1\", \r\n \"unitKey\": \"\", \r\n \"value\": \"22/09/1998\"\r\n }, \r\n {\r\n \"description\": \"Run 1 depth interval\", \r\n \"name\": \"RUN_DEPTH-TOP-RUN1\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 0\r\n }, \r\n {\r\n \"description\": \"Run 1 depth interval\", \r\n \"name\": \"RUN_DEPTH-BASE-RUN1\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 1500\r\n }, \r\n {\r\n \"description\": \"Run 2 date {DD/MM/YYYY}\", \r\n \"name\": \"RUN_DATE-RUN2\", \r\n \"unitKey\": \"\", \r\n \"value\": \"23/10/1998\"\r\n }, \r\n {\r\n \"description\": \"Run 2 depth interval\", \r\n \"name\": \"RUN_DEPTH-TOP-RUN2\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 1500\r\n }, \r\n {\r\n \"description\": \"Run 2 depth interval\", \r\n \"name\": \"RUN_DEPTH-BASE-RUN2\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 2513\r\n }, \r\n {\r\n \"associations\": [\r\n \"ENSEMBLE_TOOLELEMENT\", \r\n \"EDTC-B_8612\", \r\n \"EDTC-B_8612\"\r\n ], \r\n \"description\": \"from Toolstring_Parameter\", \r\n \"name\": \"EDTC-B_8612\", \r\n \"value\": 8612\r\n }, \r\n {\r\n \"description\": \"zone range\", \r\n \"format\": \"{AF}\", \r\n \"name\": \"ERRBND_Zone[1]\", \r\n \"values\": [\r\n -999.25, \r\n 43474.6413266435\r\n ]\r\n }\r\n ], \r\n \"reference\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Measured Depth\", \r\n \"familyType\": \"Depth\", \r\n \"format\": \"float32\", \r\n \"mnemonic\": \"MD\", \r\n \"name\": \"Measured Depth\", \r\n \"unitKey\": \"ft\"\r\n }, \r\n \"referenceType\": \"Measured Depth\", \r\n \"relationships\": {\r\n \"well\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }, \r\n \"start\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 1234.56\r\n }, \r\n \"step\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 0.1\r\n }, \r\n \"stop\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 13856.25\r\n }\r\n }, \r\n \"id\": \"{{search_logset_id}}\", \r\n \"kind\": \"{{dipsetKind}}\", \r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ], \r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"meta\": [\r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"ft\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.3048,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"ft\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"stop.value\", \r\n \"elevationReference.elevationFromMsl.value\", \r\n \"start.value\", \r\n \"step.value\", \r\n \"reference.unitKey\"\r\n ], \r\n \"propertyValues\": [\r\n \"ft\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"DateTime\", \r\n \"name\": \"datetime\", \r\n \"persistableReference\": \"{\\\"format\\\":\\\"yyyy-MM-ddTHH:mm:ssZ\\\",\\\"timeZone\\\":\\\"UTC\\\",\\\"type\\\":\\\"DTM\\\"}\", \r\n \"propertyNames\": [\r\n \"dateModified\", \r\n \"dateCreated\"\r\n ]\r\n }\r\n ], \r\n \"version\": 1161040831539285\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"azimuthReference\": \"TN\", \r\n \"channelMnemonics\": [\r\n \"DCAL\", \r\n \"DPOR\", \r\n \"GR\", \r\n \"NPOR\", \r\n \"RHOB\", \r\n \"DT\"\r\n ], \r\n \"channelNames\": [\r\n \"Differential Caliper\", \r\n \"Density Porosity\", \r\n \"Gamma Ray\", \r\n \"Enhanced Thermal Neutron Porosity in Selected Lithology\", \r\n \"CDL Bulk Density\", \r\n \"Delta-T (also called Slowness or Interval Transit Time)\"\r\n ], \r\n \"classification\": \"Quad-Combo\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2680.5\r\n }, \r\n \"name\": \"KB\"\r\n }, \r\n \"externalIds\": [\r\n \"Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:9190e417-8d42-4994-9e6a-9a327b4f47b1\"\r\n ], \r\n \"operation\": \"Harmonization\", \r\n \"properties\": [\r\n {\r\n \"description\": \"Run 1 date {DD/MM/YYYY}\", \r\n \"name\": \"RUN_DATE-RUN1\", \r\n \"unitKey\": \"\", \r\n \"value\": \"22/09/1998\"\r\n }, \r\n {\r\n \"description\": \"Run 1 depth interval\", \r\n \"name\": \"RUN_DEPTH-TOP-RUN1\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 0\r\n }, \r\n {\r\n \"description\": \"Run 1 depth interval\", \r\n \"name\": \"RUN_DEPTH-BASE-RUN1\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 1500\r\n }, \r\n {\r\n \"description\": \"Run 2 date {DD/MM/YYYY}\", \r\n \"name\": \"RUN_DATE-RUN2\", \r\n \"unitKey\": \"\", \r\n \"value\": \"23/10/1998\"\r\n }, \r\n {\r\n \"description\": \"Run 2 depth interval\", \r\n \"name\": \"RUN_DEPTH-TOP-RUN2\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 1500\r\n }, \r\n {\r\n \"description\": \"Run 2 depth interval\", \r\n \"name\": \"RUN_DEPTH-BASE-RUN2\", \r\n \"unitKey\": \"ft\", \r\n \"value\": 2513\r\n }, \r\n {\r\n \"associations\": [\r\n \"ENSEMBLE_TOOLELEMENT\", \r\n \"EDTC-B_8612\", \r\n \"EDTC-B_8612\"\r\n ], \r\n \"description\": \"from Toolstring_Parameter\", \r\n \"name\": \"EDTC-B_8612\", \r\n \"value\": 8612\r\n }, \r\n {\r\n \"description\": \"zone range\", \r\n \"format\": \"{AF}\", \r\n \"name\": \"ERRBND_Zone[1]\", \r\n \"values\": [\r\n -999.25, \r\n 43474.6413266435\r\n ]\r\n }\r\n ], \r\n \"reference\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Measured Depth\", \r\n \"familyType\": \"Depth\", \r\n \"format\": \"float32\", \r\n \"mnemonic\": \"MD\", \r\n \"name\": \"Measured Depth\", \r\n \"unitKey\": \"ft\"\r\n }, \r\n \"referenceType\": \"Measured Depth\", \r\n \"relationships\": {\r\n \"well\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }, \r\n \"start\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 1234.56\r\n }, \r\n \"step\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 0.1\r\n }, \r\n \"stop\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 13856.25\r\n }\r\n }, \r\n \"id\": \"{{search_logset2_id}}\", \r\n \"kind\": \"{{dipsetKind}}\", \r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ], \r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"meta\": [\r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"ft\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.3048,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"ft\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"stop.value\", \r\n \"elevationReference.elevationFromMsl.value\", \r\n \"start.value\", \r\n \"step.value\", \r\n \"reference.unitKey\"\r\n ], \r\n \"propertyValues\": [\r\n \"ft\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"DateTime\", \r\n \"name\": \"datetime\", \r\n \"persistableReference\": \"{\\\"format\\\":\\\"yyyy-MM-ddTHH:mm:ssZ\\\",\\\"timeZone\\\":\\\"UTC\\\",\\\"type\\\":\\\"DTM\\\"}\", \r\n \"propertyNames\": [\r\n \"dateModified\", \r\n \"dateCreated\"\r\n ]\r\n }\r\n ], \r\n \"version\": 1161040831539285\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logsets", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logsets" + ] + } + }, + "response": [] + }, + { + "name": "PUT markers", + "event": [ + { + "listen": "test", + "script": { + "id": "87c86847-7b38-4e2d-bea6-5de6b03b256d", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(2);\r", + " pm.expect(resobj.recordIds.length).to.eql(2);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"name\": \"wddms-e2e-search-0000\",\r\n \"md\": {\r\n \"unitKey\": \"Unknown\",\r\n \"value\": 0\r\n },\r\n \"relationships\": {\r\n \"well\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }\r\n },\r\n \"id\": \"{{search_marker_id}}\", \r\n \"kind\": \"{{data_partition}}:wks:marker:1.0.4\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n },\r\n \"data\": {\r\n \"name\": \"wddms-e2e-search-0000\",\r\n \"md\": {\r\n \"unitKey\": \"Unknown\",\r\n \"value\": 0\r\n },\r\n \"relationships\": {\r\n \"well\": {\r\n \"name\": \"Newton 2-31\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }\r\n },\r\n \"id\": \"{{search_marker2_id}}\", \r\n \"kind\": \"{{data_partition}}:wks:marker:1.0.4\",\r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ],\r\n \"otherRelevantDataCountries\": [\"US\",\"FR\"]\r\n }\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/markers", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "markers" + ] + } + }, + "response": [] + }, + { + "name": "PUT logs", + "event": [ + { + "listen": "test", + "script": { + "id": "f7c25a2f-2e60-4f23-952b-2500de24b565", + "exec": [ + "const resobj = pm.response.json();\r", + "\r", + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"response is as expected\", () => {\r", + " pm.expect(resobj.recordCount).to.eql(3);\r", + " pm.expect(resobj.recordIds.length).to.eql(3);\r", + " if (resobj.skippedRecordIds)\r", + " pm.expect(resobj.skippedRecordIds.length).to.eql(0);\r", + "});\r", + "\r", + "let record_id = resobj.recordIds[0]\r", + "pm.environment.set(\"record_id\", record_id)\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "PUT", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n }, \r\n \"data\": {\r\n \"azimuthReference\": \"TN\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n\t\"basin\": \"Feda Graben (Central Graben)\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2680.5\r\n }, \r\n \"name\": \"KB\"\r\n }, \r\n \"externalIds\": [\r\n \"Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:438c35f3-fb59-4581-bb21-93b591d7cd1f\"\r\n ], \r\n \"history\": [\r\n {\r\n \"date\": \"2019-02-01T11:16:03Z\", \r\n \"description\": \"Created by Quanti_ Borehole computation; \\nFamilies: True Vertical Depth; \\nVariables: TVD, \\nZonation: ZONATION_ALL; Unit: ft; \\nMudType: Water; BSALinput: 0; Unit: ppk; BFHIinput: -9999; Unit: unitless; BPressCompute: Compute from mud weight and TVD; AirGap: 2; Unit: m; MudWeight: 1.1; Unit: g/m3; BTempCompute: Compute from depth tie point and gradient; BTEMPinput: 75; Unit: degC; BTEMPreferenceTVD: 2438.4; Unit: m; BTEMPgradient: 2; Unit: degC/100m; RmCompute: Compute from zoned variables; RMinput: 0.1; Unit: ohm.m; RMtemperature: 20; Unit: degC; RMFinput: 0.08; Unit: ohm.m; RMFtemperature: 20; Unit: degC; RMCinput: 0.16; Unit: ohm.m; RMCtemperature: 20; Unit: degC; RWinput: 0.1; Unit: ohm.m; RWtemperature: 100; Unit: degC; FormationSalinity: -9999; Unit: ppk;\", \r\n \"user\": \"Ddahan\"\r\n }\r\n ], \r\n \"log\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Density Porosity\", \r\n \"familyType\": \"Porosity\", \r\n \"format\": \"float32\", \r\n \"logstoreId\": 2156256839304115, \r\n \"mnemonic\": \"DPOR\", \r\n \"name\": \"Density Porosity\", \r\n \"properties\": [\r\n {\r\n \"description\": \"Linear depth offset of the channel sensor relative to some reference point, typically the toolstring zero\", \r\n \"name\": \"MEASURE_POINT_OFFSET\", \r\n \"unitKey\": \"m\", \r\n \"value\": 0.264922\r\n }\r\n ], \r\n \"unitKey\": \"%\"\r\n }, \r\n \"name\": \"DPOR\", \r\n \"reference\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Measured Depth\", \r\n \"familyType\": \"Depth\", \r\n \"format\": \"float32\", \r\n \"mnemonic\": \"MD\", \r\n \"name\": \"Measured Depth\", \r\n \"unitKey\": \"ft\"\r\n }, \r\n \"referenceType\": \"Measured Depth\", \r\n \"relationships\": {\r\n \"logSet\": {\r\n \"id\": \"{{search_logset_id}}\"\r\n }, \r\n \"well\": {\r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }, \r\n \"start\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 1234.56\r\n }, \r\n \"step\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 0.1\r\n }, \r\n \"stop\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 13856.25\r\n }\r\n }, \r\n \"id\": \"{{search_log_id}}\", \r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\", \r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ], \r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"meta\": [\r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"ft\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.3048,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"ft\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"reference.unitKey\", \r\n \"stop.value\", \r\n \"elevationReference.elevationFromMsl.value\", \r\n \"start.value\", \r\n \"step.value\"\r\n ], \r\n \"propertyValues\": [\r\n \"ft\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"%\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.01,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"%\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Dimensionless\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.unitKey\"\r\n ], \r\n \"propertyValues\": [\r\n \"%\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"m\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":1.0,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"m\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.properties.namedProperty.value\"\r\n ]\r\n }\r\n ]\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n }, \r\n \"data\": {\r\n \"azimuthReference\": \"TN\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n\t\"basin\": \"Feda Graben (Central Graben)\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2680.5\r\n }, \r\n \"name\": \"KB\"\r\n }, \r\n \"externalIds\": [\r\n \"Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:438c35f3-fb59-4581-bb21-93b591d7cd1f\"\r\n ], \r\n \"history\": [\r\n {\r\n \"date\": \"2019-02-01T11:16:03Z\", \r\n \"description\": \"Created by Quanti_ Borehole computation; \\nFamilies: True Vertical Depth; \\nVariables: TVD, \\nZonation: ZONATION_ALL; Unit: ft; \\nMudType: Water; BSALinput: 0; Unit: ppk; BFHIinput: -9999; Unit: unitless; BPressCompute: Compute from mud weight and TVD; AirGap: 2; Unit: m; MudWeight: 1.1; Unit: g/m3; BTempCompute: Compute from depth tie point and gradient; BTEMPinput: 75; Unit: degC; BTEMPreferenceTVD: 2438.4; Unit: m; BTEMPgradient: 2; Unit: degC/100m; RmCompute: Compute from zoned variables; RMinput: 0.1; Unit: ohm.m; RMtemperature: 20; Unit: degC; RMFinput: 0.08; Unit: ohm.m; RMFtemperature: 20; Unit: degC; RMCinput: 0.16; Unit: ohm.m; RMCtemperature: 20; Unit: degC; RWinput: 0.1; Unit: ohm.m; RWtemperature: 100; Unit: degC; FormationSalinity: -9999; Unit: ppk;\", \r\n \"user\": \"Ddahan\"\r\n }\r\n ], \r\n \"log\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Density Porosity\", \r\n \"familyType\": \"Porosity\", \r\n \"format\": \"float32\", \r\n \"logstoreId\": 2156256839304115, \r\n \"mnemonic\": \"DPOR\", \r\n \"name\": \"Density Porosity\", \r\n \"properties\": [\r\n {\r\n \"description\": \"Linear depth offset of the channel sensor relative to some reference point, typically the toolstring zero\", \r\n \"name\": \"MEASURE_POINT_OFFSET\", \r\n \"unitKey\": \"m\", \r\n \"value\": 0.264922\r\n }\r\n ], \r\n \"unitKey\": \"%\"\r\n }, \r\n \"name\": \"DPOR\", \r\n \"reference\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Measured Depth\", \r\n \"familyType\": \"Depth\", \r\n \"format\": \"float32\", \r\n \"mnemonic\": \"MD\", \r\n \"name\": \"Measured Depth\", \r\n \"unitKey\": \"ft\"\r\n }, \r\n \"referenceType\": \"Measured Depth\", \r\n \"relationships\": {\r\n \"logSet\": {\r\n \"id\": \"{{search_logset_id}}\"\r\n }, \r\n \"well\": {\r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }, \r\n \"start\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 1234.56\r\n }, \r\n \"step\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 0.1\r\n }, \r\n \"stop\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 13856.25\r\n }\r\n }, \r\n \"id\": \"{{search_log2_id}}\", \r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\", \r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ], \r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"meta\": [\r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"ft\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.3048,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"ft\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"reference.unitKey\", \r\n \"stop.value\", \r\n \"elevationReference.elevationFromMsl.value\", \r\n \"start.value\", \r\n \"step.value\"\r\n ], \r\n \"propertyValues\": [\r\n \"ft\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"%\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.01,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"%\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Dimensionless\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.unitKey\"\r\n ], \r\n \"propertyValues\": [\r\n \"%\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"m\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":1.0,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"m\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.properties.namedProperty.value\"\r\n ]\r\n }\r\n ]\r\n},\r\n{\r\n \"acl\": {\r\n \"owners\": [\r\n \"{{acl_owner}}\"\r\n ],\r\n \"viewers\": [\r\n \"{{acl_viewer}}\"\r\n ]\r\n }, \r\n \"data\": {\r\n \"azimuthReference\": \"TN\", \r\n \"dateCreated\": \"2013-03-22T11:16:03Z\", \r\n \"dateModified\": \"2013-03-22T11:16:03Z\", \r\n\t\"basin\": \"Feda Graben (Central Graben)\", \r\n \"elevationReference\": {\r\n \"elevationFromMsl\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 2680.5\r\n }, \r\n \"name\": \"KB\"\r\n }, \r\n \"externalIds\": [\r\n \"Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:438c35f3-fb59-4581-bb21-93b591d7cd1f\"\r\n ], \r\n \"history\": [\r\n {\r\n \"date\": \"2019-02-01T11:16:03Z\", \r\n \"description\": \"Created by Quanti_ Borehole computation; \\nFamilies: True Vertical Depth; \\nVariables: TVD, \\nZonation: ZONATION_ALL; Unit: ft; \\nMudType: Water; BSALinput: 0; Unit: ppk; BFHIinput: -9999; Unit: unitless; BPressCompute: Compute from mud weight and TVD; AirGap: 2; Unit: m; MudWeight: 1.1; Unit: g/m3; BTempCompute: Compute from depth tie point and gradient; BTEMPinput: 75; Unit: degC; BTEMPreferenceTVD: 2438.4; Unit: m; BTEMPgradient: 2; Unit: degC/100m; RmCompute: Compute from zoned variables; RMinput: 0.1; Unit: ohm.m; RMtemperature: 20; Unit: degC; RMFinput: 0.08; Unit: ohm.m; RMFtemperature: 20; Unit: degC; RMCinput: 0.16; Unit: ohm.m; RMCtemperature: 20; Unit: degC; RWinput: 0.1; Unit: ohm.m; RWtemperature: 100; Unit: degC; FormationSalinity: -9999; Unit: ppk;\", \r\n \"user\": \"Ddahan\"\r\n }\r\n ], \r\n \"log\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Density Porosity\", \r\n \"familyType\": \"Porosity\", \r\n \"format\": \"float32\", \r\n \"logstoreId\": 2156256839304115, \r\n \"mnemonic\": \"DPOR\", \r\n \"name\": \"Density Porosity\", \r\n \"properties\": [\r\n {\r\n \"description\": \"Linear depth offset of the channel sensor relative to some reference point, typically the toolstring zero\", \r\n \"name\": \"MEASURE_POINT_OFFSET\", \r\n \"unitKey\": \"m\", \r\n \"value\": 0.264922\r\n }\r\n ], \r\n \"unitKey\": \"%\"\r\n }, \r\n \"name\": \"DPOR\", \r\n \"reference\": {\r\n \"dataType\": \"number\", \r\n \"dimension\": 1, \r\n \"family\": \"Measured Depth\", \r\n \"familyType\": \"Depth\", \r\n \"format\": \"float32\", \r\n \"mnemonic\": \"MD\", \r\n \"name\": \"Measured Depth\", \r\n \"unitKey\": \"ft\"\r\n }, \r\n \"referenceType\": \"Measured Depth\", \r\n \"relationships\": {\r\n \"logSet\": {\r\n \"id\": \"{{search_logset_id}}\"\r\n }, \r\n \"well\": {\r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }, \r\n \"wellbore\": {\r\n \"confidence\": 1.0, \r\n \"id\": \"{{search_wellbore_id}}\", \r\n \"name\": \"wddms-e2e-search-test-0000\"\r\n }\r\n }, \r\n \"start\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 1234.56\r\n }, \r\n \"step\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 0.1\r\n }, \r\n \"stop\": {\r\n \"unitKey\": \"ft\", \r\n \"value\": 13856.25\r\n }\r\n }, \r\n \"id\": \"{{search_log3_id}}\", \r\n \"kind\": \"{{data_partition}}:wks:log:1.0.5\", \r\n \"legal\": {\r\n \"legaltags\": [\r\n \"{{legal_tag}}\"\r\n ], \r\n \"otherRelevantDataCountries\": [\r\n \"US\", \r\n \"BE\", \r\n \"NO\", \r\n \"IN\"\r\n ]\r\n }, \r\n \"meta\": [\r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"ft\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.3048,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"ft\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"reference.unitKey\", \r\n \"stop.value\", \r\n \"elevationReference.elevationFromMsl.value\", \r\n \"start.value\", \r\n \"step.value\"\r\n ], \r\n \"propertyValues\": [\r\n \"ft\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"%\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":0.01,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"%\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Dimensionless\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.unitKey\"\r\n ], \r\n \"propertyValues\": [\r\n \"%\"\r\n ]\r\n }, \r\n {\r\n \"kind\": \"Unit\", \r\n \"name\": \"m\", \r\n \"persistableReference\": \"{\\\"scaleOffset\\\":{\\\"scale\\\":1.0,\\\"offset\\\":0.0},\\\"symbol\\\":\\\"m\\\",\\\"baseMeasurement\\\":{\\\"ancestry\\\":\\\"Length\\\",\\\"type\\\":\\\"UM\\\"},\\\"type\\\":\\\"USO\\\"}\", \r\n \"propertyNames\": [\r\n \"log.properties.namedProperty.value\"\r\n ]\r\n }\r\n ]\r\n}\r\n]" + }, + "url": { + "raw": "{{base_url}}/ddms/v2/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "logs" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + }, + { + "name": "Search", + "item": [ + { + "name": "search wellbores", + "event": [ + { + "listen": "test", + "script": { + "id": "7dc1252a-ed71-46da-92b4-1b258efff61e", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + }, + { + "key": "appkey", + "value": "test", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores" + ] + } + }, + "response": [] + }, + { + "name": "search wellbore using /query", + "event": [ + { + "listen": "test", + "script": { + "id": "ea307b91-4e2a-4abe-b0f1-cf0bceef31e2", + "exec": [ + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + }, + { + "key": "appkey", + "value": "test", + "type": "text" + } + ], + "body": { + "mode": "raw", + "raw": "{\r\n\t\"kind\":\"*:osdu:wellbore:*\",\r\n \"query\":\"id:\\\"{{data_partition}}:osdu:wellbore-Search-Tests-DatazEQZYboexFRuDdZ\\\"\"\r\n}", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/query", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "query" + ] + } + }, + "response": [] + }, + { + "name": "search wellbores by distance", + "event": [ + { + "listen": "test", + "script": { + "id": "b46e8599-4469-4db6-9530-b2af424461a0", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(1);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores/bydistance?latitude=46.8&longitude=-103.2&distance=15000", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores", + "bydistance" + ], + "query": [ + { + "key": "latitude", + "value": "46.8" + }, + { + "key": "longitude", + "value": "-103.2" + }, + { + "key": "distance", + "value": "15000" + } + ] + } + }, + "response": [] + }, + { + "name": "search wellbores by bounding box", + "event": [ + { + "listen": "test", + "script": { + "id": "fd64295b-a82b-402d-8238-df28b24454ed", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(1);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores/byboundingbox?latitude_top_left=48&longitude_top_left=-104&latitude_bottom_right=45&longitude_bottom_right=-101", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores", + "byboundingbox" + ], + "query": [ + { + "key": "latitude_top_left", + "value": "48" + }, + { + "key": "longitude_top_left", + "value": "-104" + }, + { + "key": "latitude_bottom_right", + "value": "45" + }, + { + "key": "longitude_bottom_right", + "value": "-101" + } + ] + } + }, + "response": [] + }, + { + "name": "search wellbores by geo polygon", + "event": [ + { + "listen": "test", + "script": { + "id": "bc03c222-59c2-41c1-8b1d-eea1a5e517bc", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(1);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "[\r\n {\r\n \"latitude\": 46,\r\n \"longitude\": -101\r\n },\r\n {\r\n \"latitude\": 49,\r\n \"longitude\": -102\r\n },\r\n {\r\n \"latitude\": 45,\r\n \"longitude\": -105\r\n }\r\n]", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores/bygeopolygon", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores", + "bygeopolygon" + ] + } + }, + "response": [] + }, + { + "name": "search logset by wellbore id", + "event": [ + { + "listen": "test", + "script": { + "id": "11dd295d-7800-4adb-a370-755e644fc7df", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.eql(2);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/logsets", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbore", + "{{search_wellbore_id}}", + "logsets" + ] + } + }, + "response": [] + }, + { + "name": "search logset by wellbores attribute", + "event": [ + { + "listen": "test", + "script": { + "id": "ec0a5576-a7ef-4bd3-bf15-78b2eaaf2239", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(2);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores/{{search_wellbore_attribute}}/logsets", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores", + "{{search_wellbore_attribute}}", + "logsets" + ] + } + }, + "response": [] + }, + { + "name": "search logs", + "event": [ + { + "listen": "test", + "script": { + "id": "0e1419fc-a8db-4d9f-8d25-c5682c350f73", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "search logs by wellbore id", + "event": [ + { + "listen": "test", + "script": { + "id": "937960d7-307e-4c05-b408-29d13ef9c144", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.eql(3);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbore", + "{{search_wellbore_id}}", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "search logs by wellbores attribute", + "event": [ + { + "listen": "test", + "script": { + "id": "8d86d89d-2e8c-4610-bb2a-c5d9bc0f0e45", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(3);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbores/{{search_wellbore_attribute}}/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbores", + "{{search_wellbore_attribute}}", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "search logs by logset id", + "event": [ + { + "listen": "test", + "script": { + "id": "5a65af2a-4a67-4517-891b-5fe4d917be65", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.eql(3);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/logset/{{search_logset_id}}/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "logset", + "{{search_logset_id}}", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "search logs by logsets attribute", + "event": [ + { + "listen": "test", + "script": { + "id": "735fc570-5014-4d05-86f3-d5b7295c5ff4", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.be.gte(3);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/logsets/{{search_logset_attribute}}/logs", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "logsets", + "{{search_logset_attribute}}", + "logs" + ] + } + }, + "response": [] + }, + { + "name": "search markers by wellbore id", + "event": [ + { + "listen": "test", + "script": { + "id": "563bd1bf-8023-4eaf-89ce-3f04bc077c2b", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "pm.test(\"response data is as expected\", () => {\r", + " const resobj = pm.response.json();\r", + "\r", + " pm.expect(resobj.totalCount).to.eql(2);\r", + "});" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "POST", + "header": [ + { + "key": "accept", + "type": "text", + "value": "application/json" + }, + { + "key": "data-partition-id", + "type": "text", + "value": "{{data_partition}}" + }, + { + "key": "appkey", + "type": "text", + "value": "test" + } + ], + "body": { + "mode": "raw", + "raw": "", + "options": { + "raw": { + "language": "text" + } + } + }, + "url": { + "raw": "{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/markers", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "{{search_query_type}}", + "wellbore", + "{{search_wellbore_id}}", + "markers" + ] + } + }, + "response": [] + } + ], + "protocolProfileBehavior": {}, + "_postman_isSubFolder": true + } + ], + "protocolProfileBehavior": {} + }, + { + "name": "about", + "event": [ + { + "listen": "test", + "script": { + "id": "43ca901e-b902-4a38-a89a-b75f466524b0", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "\r", + "pm.test(\"Check response's content\", function () {\r", + "\r", + " var jsonData = pm.response.json();\r", + "\r", + " pm.expect(jsonData).to.include.all.keys([\"service\", \"version\", \"buildNumber\", \"cloudEnvironment\"])\r", + "\r", + " pm.expect(jsonData.service).to.be.a('string');\r", + " pm.expect(jsonData.version).to.be.a('string');\r", + " pm.expect(jsonData.buildNumber).to.be.a('string');\r", + " pm.expect(jsonData.cloudEnvironment).to.be.a('string');\r", + "\r", + " currentCloudEnv = pm.variables.get(\"cloud_provider\");\r", + " pm.expect(jsonData.cloudEnvironment).to.equal(currentCloudEnv);\r", + "}); " + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "noauth" + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/about", + "host": [ + "{{base_url}}" + ], + "path": [ + "about" + ] + } + }, + "response": [] + }, + { + "name": "version", + "event": [ + { + "listen": "test", + "script": { + "id": "18609a40-e670-455a-886c-80b4097ebe76", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/version", + "host": [ + "{{base_url}}" + ], + "path": [ + "version" + ] + } + }, + "response": [] + }, + { + "name": "status", + "event": [ + { + "listen": "test", + "script": { + "id": "fe59794f-878e-4c0a-8cd2-d76a285cbd48", + "exec": [ + "pm.test(\"response must be valid\", function () {\r", + " pm.response.to.be.ok;\r", + " pm.response.to.be.withBody;\r", + " pm.response.to.be.json;\r", + "});\r", + "\r", + "pm.test(\"status code is 200\", function () {\r", + " pm.expect(pm.response.code).to.eql(200);\r", + "});\r", + "\r", + "" + ], + "type": "text/javascript" + } + } + ], + "protocolProfileBehavior": { + "disabledSystemHeaders": { + "accept": true + } + }, + "request": { + "auth": { + "type": "bearer", + "bearer": [ + { + "key": "token", + "value": "{{token}}", + "type": "string" + } + ] + }, + "method": "GET", + "header": [ + { + "key": "accept", + "value": "application/json", + "type": "text" + }, + { + "key": "data-partition-id", + "value": "{{data_partition}}", + "type": "text" + } + ], + "url": { + "raw": "{{base_url}}/ddms/v2/status", + "host": [ + "{{base_url}}" + ], + "path": [ + "ddms", + "v2", + "status" + ] + } + }, + "response": [] + } + ], + "event": [ + { + "listen": "prerequest", + "script": { + "id": "7cbef63d-84ca-4add-a9f2-c0427f6b220d", + "type": "text/javascript", + "exec": [ + "Common = {", + " debug: function(msg, origin) {", + " prefix = (typeof origin !== 'undefined') ? '[' + origin + '] ' : ''", + " console.info(prefix + msg)", + " },", + "", + " resolve_variable: function(name) {", + " let value = pm.variables.get(name)", + " for(let idx = value.indexOf('{{', 0); idx >= 0; idx = value.indexOf('{{', idx+2)) {", + " let idx_end = value.indexOf('}}', idx)", + " if(idx_end > idx +2) {", + " let nested_var = value.substring(idx+2, idx_end)", + " let nested_value = Common.resolve_variable(nested_var)", + " value = value.replace('{{'+nested_var+'}}', nested_value)", + " idx=0", + " }", + " }", + " return value", + " },", + "", + " base_url: function() {", + " return Common.resolve_variable('base_url')", + " }", + "};" + ] + } + }, + { + "listen": "test", + "script": { + "id": "8250f5ab-46ea-4999-84e2-f2d840d8c9a7", + "type": "text/javascript", + "exec": [ + "" + ] + } + } + ], + "variable": [ + { + "id": "c015aac0-dbd0-46ae-901e-f7e4413856a1", + "key": "base_url", + "value": "" + }, + { + "id": "e3692127-889a-4e11-9576-ec748d13f906", + "key": "token", + "value": "xxx" + }, + { + "id": "28c4c463-bd06-426c-bbf9-35e0003c1d68", + "key": "data_partition", + "value": "" + }, + { + "id": "dc7508c4-0f71-4c5f-848e-1b251e1b313e", + "key": "record_id", + "value": "" + }, + { + "id": "8f6c0400-23b1-432f-895f-eafc71948223", + "key": "cloud_provider", + "value": "local" + }, + { + "id": "e2122138-ac38-48d0-ad63-7c70cab7d0e1", + "key": "del_recursive_well_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-del-well:0000" + }, + { + "id": "f1f2e7b0-382d-4f58-9399-a3db06cc1139", + "key": "del_recursive_wellbore_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-del-wellbore:0000" + }, + { + "id": "ed0d74cd-4a4b-4eb5-a2f4-07cc2f82b0e4", + "key": "del_recursive_logset_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-del-logset:0000" + }, + { + "id": "b9eb773f-d37d-437b-ac9b-19a685b4f00e", + "key": "del_recursive_log_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-del-log:0000" + }, + { + "id": "f02afb29-8b55-4530-8fd3-97e2bd85750c", + "key": "search_query_type", + "value": "query" + }, + { + "id": "5d4e7445-119f-4ba4-b2f1-7b055debb6ee", + "key": "search_well_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-well:0001" + }, + { + "id": "f9ccb2d9-85c0-4e1f-8913-7707ee994edc", + "key": "search_wellbore_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-wellbore:0001" + }, + { + "id": "232174b5-fe54-4405-85ae-0f44932bcffe", + "key": "search_wellbore_attribute", + "value": "data.state:\"North Dakota\"" + }, + { + "id": "6eca8af8-eade-4d1c-af37-e3375f4ad9bf", + "key": "search_logset_attribute", + "value": "data.classification:\"Quad-Combo\"" + }, + { + "id": "8fb98253-89fc-45e7-873d-ec492b77d412", + "key": "search_logset_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-logset:0001" + }, + { + "id": "0197d20b-38a8-488d-a979-c2b87f69721e", + "key": "search_logset2_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-logset:0002" + }, + { + "id": "c078b560-e658-417a-a5e7-ee14ec1cc73b", + "key": "search_marker_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-marker:0001" + }, + { + "id": "c2611e63-b768-4213-85f6-7ccd1a6e6f9c", + "key": "search_marker2_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-marker:0002" + }, + { + "id": "ae71d630-50fb-41bb-a40d-651ff0e437f0", + "key": "search_log_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-log:0001" + }, + { + "id": "19d9cc44-f72f-4ed0-93c4-dae7ebd10c82", + "key": "search_log2_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-log:0002" + }, + { + "id": "31cc99cb-08aa-49a9-b548-004e7307261f", + "key": "search_log3_id", + "value": "{{data_partition}}:{{cloud_provider}}-wddms-search-log:0003" + }, + { + "id": "eb89d8f0-0361-4387-86cd-0dfff30717e6", + "key": "dipsetKind", + "value": "{{data_partition}}:wks:dipSet:1.0.0" + }, + { + "id": "1faebae0-bc08-4e0e-aa4a-f51649f20904", + "key": "dipsetName", + "value": "" + }, + { + "id": "3321bda3-4185-4fda-883c-b77a1522f678", + "key": "dipsetId", + "value": "" + }, + { + "id": "0fd28758-a3a6-4b0b-a3d5-e27bfbc12ee5", + "key": "expected_dips", + "value": "" + }, + { + "id": "69005070-6c2d-4f7c-a74a-69056043f8c4", + "key": "acl_domain", + "value": "" + }, + { + "id": "69005070-6c2d-4f7c-a74a-69056043f8c4", + "key": "acl_owner", + "value": "data.default.owners@{{data_partition}}.{{acl_domain}}" + }, + { + "id": "fe87f63e-ffbe-4c8b-b317-b0b04d213e3b", + "key": "acl_viewer", + "value": "data.default.viewers@{{data_partition}}.{{acl_domain}}" + }, + { + "id": "4c8c1979-7ceb-4822-9b61-1bfd513ebff5", + "key": "legal_tag", + "value": "" + } + ], + "protocolProfileBehavior": {} +} \ No newline at end of file diff --git a/tests/integration/functional/__init__.py b/tests/integration/functional/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/integration/functional/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/integration/functional/conftest.py b/tests/integration/functional/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..5286c23579b03c24c535ea7cf42c605b0ea722fd --- /dev/null +++ b/tests/integration/functional/conftest.py @@ -0,0 +1,159 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import sys +import pathlib +import pytest +import logging +import warnings + + +logger = logging.getLogger() + +# disable pywin32 deprecation warning, imported by azure lib and artifacts-keyring +warnings.filterwarnings("ignore", "the imp module is deprecated") + +integration_test_base_path = str(pathlib.Path(__file__).parent.absolute()) +sys.path.append(integration_test_base_path) # set the current directory into the python path + +# put after sys.path update +from .variables import Variables, CmdLineSpecialVar +from .tests.fixtures import WDMS_Variables + +FILTER_IN_TAGS = set() +FILTER_OUT_TAGS = set() + + +def pytest_addoption(parser): + parser.addoption('--filter-tag', default='', + help='exclude or not test based on tag(s). Separate multiple tag by "|" .' + 'Prefix tag by "!" to filter out test with the given tag. Tags are case insensitive') + + parser.addoption('--environment', default='', + help='Specify a environment as a JSON file (postman format)') + + parser.addoption('--timeout-request', type=int, default=0, + help='Specify a timeout for requests (milliseconds), 0 means no timeout') + + parser.addoption('--log-request-level', type=int, default=1, + help='add info in the log for every request call: 0 nothing, 1 url & status_code, 2 full') + + parser.addoption('--insecure', action='store_true', + help='Disables SSL validations') + + parser.addoption( + '--retry-on-error', default='', + help='retry up to 4 times on the specific error code (>=500). Separate multiple code by "|"') + + parser.addoption( + '--header', action='append', + help='header to set for any request (can be overridden at test level), format header_name: header_value') + + parser.addoption( + '--param', action='append', + help='set a parameter value (it overrides environ file ones but can be overridden at test level), format param_name: param_value') + + +def set_environment_from_config(pytest_config, variables: Variables): + local_env = Variables.load_env(integration_test_base_path + '/local_environment.json', ignore_empty=True) + variables.update_env(local_env) + + env_file = pytest_config.getoption('environment', default=None) + if env_file: + loaded_env = Variables.load_env(env_file) + variables.update_env(loaded_env) + + # timeout + timeout_request = pytest_config.getoption('timeout_request', default=0) + if timeout_request: + CmdLineSpecialVar.set_timeout_request(variables, int(timeout_request)) + + # log request level + log_request_level = pytest_config.getoption('log_request_level', default=1) + if log_request_level: + CmdLineSpecialVar.set_log_request_level(variables, int(log_request_level)) + + # disable ssl validation + disable_ssl_validation = pytest_config.getoption('insecure', default=False) + CmdLineSpecialVar.set_disable_ssl_validation(variables, disable_ssl_validation) + if disable_ssl_validation: + # only trigger this warning once, not on each call + warnings.filterwarnings("ignore", "Unverified HTTPS request is being made") + + retry_on_error = pytest_config.getoption('retry_on_error', default=None) + if retry_on_error: + CmdLineSpecialVar.set_retry_on_error(variables, [int(e) for e in retry_on_error.split('|')]) + + # custom header + headers = pytest_config.getoption('header', default=None) or [] + header_dict = {} + for header in headers: + if not isinstance(header, str): + continue + idx = header.find(':') + if idx > 1: + header_dict[header[: idx].strip()] = header[idx + 1:-1].strip() + if header_dict: + CmdLineSpecialVar.set_headers(variables, header_dict) + + # custom param + params = pytest_config.getoption('param', default=None) or [] + + if params: + for param in params: + if not isinstance(param, str): + continue + idx = param.find(':') + if idx > 1: + variables.set(param[: idx].strip(), param[idx + 1:-1].strip()) + + +def pytest_configure(config): + set_environment_from_config(config, WDMS_Variables) + + config.addinivalue_line("markers", "tag: add tags to a test to extend filtering capability") + + if CmdLineSpecialVar.get_disable_ssl_validation(WDMS_Variables): + # filter warning when disabling ssl validation in order to not be spammed by warning and still spot real onces + # it would be better to use action=once to have the error only a single time but it not works properly, + # so just ignore this warning + config.addinivalue_line("filterwarnings", "ignore::urllib3.exceptions.InsecureRequestWarning") + + # filter tags + tag_sequence = config.getoption('filter_tag', default=None) + for tag in tag_sequence.split('|'): + if len(tag) < 2: + continue + if tag[0] == '!': + FILTER_OUT_TAGS.add(tag[1:].lower()) + else: + FILTER_IN_TAGS.add(tag.lower()) + + +def pytest_runtest_setup(item): + if FILTER_IN_TAGS or FILTER_OUT_TAGS: + item_tags = set() + for mark in item.iter_markers(name="tag"): + for arg in mark.args: + if isinstance(arg, list): + item_tags.update({t.lower() for t in arg}) + else: + item_tags.add(arg.lower()) + if FILTER_IN_TAGS: + if not FILTER_IN_TAGS.intersection(item_tags): + pytest.skip('unmatched tags: ' + '|'.join(FILTER_IN_TAGS)) + + if FILTER_OUT_TAGS: + if FILTER_OUT_TAGS.intersection(item_tags): + pytest.skip('matched tags: ' + '|'.join(FILTER_OUT_TAGS)) diff --git a/tests/integration/functional/local_environment.json b/tests/integration/functional/local_environment.json new file mode 100644 index 0000000000000000000000000000000000000000..905e7cda6a2048509523480d29e4dcadca1ae0cd --- /dev/null +++ b/tests/integration/functional/local_environment.json @@ -0,0 +1,26 @@ +{ + "description": "This is just a convenient file to ease to run the integration/functional tests locally. Empty value will be ignored", + "name": "wdms integration/functional local environment", + "values": [ + { + "enabled": true, + "key": "token", "value": "" + }, + { + "enabled": true, + "key": "cloud_provider", "value": "" + }, + { + "enabled": true, + "key": "base_url", "value": "" + }, + { + "enabled": false, + "key": "acl_domain", "value": "" + }, + { + "enabled": false, + "key": "legal_tag", "value": "" + } + ] +} diff --git a/tests/integration/functional/pytest.ini b/tests/integration/functional/pytest.ini new file mode 100644 index 0000000000000000000000000000000000000000..f5284fc1c7fcb50d31aa1ee3cf5c0d92c6221cc9 --- /dev/null +++ b/tests/integration/functional/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +log_format = %(asctime)s [%(levelname)s] - %(message)s +log_cli_level = 20 +log_cli = 1 \ No newline at end of file diff --git a/tests/integration/functional/request_builders/__init__.py b/tests/integration/functional/request_builders/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ea2c210f2cd267e9af0ebbee4a92aea2b7b1fb3b --- /dev/null +++ b/tests/integration/functional/request_builders/__init__.py @@ -0,0 +1,184 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Union, List +from .wdms import about +from .wdms import version +from .wdms import status +from .wdms import crud +from .wdms import error_cases +from .wdms import model_extensibility +from .wdms import recursive_delete +from .wdms import search_apis + + +def build_request(path: Union[str, List[str]], sep: str = ".") -> "RequestRunner": + path = path.split(sep) if isinstance(path, str) else path + n_path = ".".join([p.lower().replace(" ", "_") for p in path]) + if n_path == "crud.well.get_well": + return crud.well.build_request_get_well() + if n_path == "crud.well.delete_well": + return crud.well.build_request_delete_well() + if n_path == "crud.well.get_well_specific_version": + return crud.well.build_request_get_well_specific_version() + if n_path == "crud.well.get_versions_of_well": + return crud.well.build_request_get_versions_of_well() + if n_path == "crud.well.create_well": + return crud.well.build_request_create_well() + if n_path == "crud.wellbore.delete_wellbore": + return crud.wellbore.build_request_delete_wellbore() + if n_path == "crud.wellbore.get_wellbore_specific_version": + return crud.wellbore.build_request_get_wellbore_specific_version() + if n_path == "crud.wellbore.get_wellbore": + return crud.wellbore.build_request_get_wellbore() + if n_path == "crud.wellbore.get_versions_of_wellbore": + return crud.wellbore.build_request_get_versions_of_wellbore() + if n_path == "crud.wellbore.create_wellbore": + return crud.wellbore.build_request_create_wellbore() + if n_path == "crud.logset.get_versions_of_logset": + return crud.logset.build_request_get_versions_of_logset() + if n_path == "crud.logset.get_logset_specific_version": + return crud.logset.build_request_get_logset_specific_version() + if n_path == "crud.logset.get_logset": + return crud.logset.build_request_get_logset() + if n_path == "crud.logset.delete_logset": + return crud.logset.build_request_delete_logset() + if n_path == "crud.logset.create_logset": + return crud.logset.build_request_create_logset() + if n_path == "crud.marker.delete_marker": + return crud.marker.build_request_delete_marker() + if n_path == "crud.marker.get_versions_of_marker": + return crud.marker.build_request_get_versions_of_marker() + if n_path == "crud.marker.get_marker": + return crud.marker.build_request_get_marker() + if n_path == "crud.marker.get_marker_specific_version": + return crud.marker.build_request_get_marker_specific_version() + if n_path == "crud.marker.create_marker": + return crud.marker.build_request_create_marker() + if n_path == "crud.trajectory.get_versions_of_trajectory": + return crud.trajectory.build_request_get_versions_of_trajectory() + if n_path == "crud.trajectory.get_trajectory": + return crud.trajectory.build_request_get_trajectory() + if n_path == "crud.trajectory.get_trajectory_specific_version": + return crud.trajectory.build_request_get_trajectory_specific_version() + if n_path == "crud.trajectory.delete_trajectory": + return crud.trajectory.build_request_delete_trajectory() + if n_path == "crud.trajectory.create_trajectory": + return crud.trajectory.build_request_create_trajectory() + if n_path == "crud.log.delete_log": + return crud.log.build_request_delete_log() + if n_path == "crud.log.get_versions_of_log": + return crud.log.build_request_get_versions_of_log() + if n_path == "crud.log.get_log_bulk_data": + return crud.log.build_request_get_log_bulk_data() + if n_path == "crud.log.get_log": + return crud.log.build_request_get_log() + if n_path == "crud.log.get_log_specific_version": + return crud.log.build_request_get_log_specific_version() + if n_path == "crud.log.create_log": + return crud.log.build_request_create_log() + if n_path == "crud.log.add_log_bulk_data": + return crud.log.build_request_add_log_bulk_data() + if n_path == "crud.dips.get_dipset": + return crud.dips.build_request_get_dipset() + if n_path == "crud.dips.query_dips": + return crud.dips.build_request_query_dips() + if n_path == "crud.dips.delete_dip": + return crud.dips.build_request_delete_dip() + if n_path == "crud.dips.create_dips": + return crud.dips.build_request_create_dips() + if n_path == "crud.dips.create__dipset": + return crud.dips.build_request_create__dipset() + if n_path == "crud.dips.insert_dips": + return crud.dips.build_request_insert_dips() + if n_path == "crud.dips.get_dip_from_index": + return crud.dips.build_request_get_dip_from_index() + if n_path == "crud.dips.delete_dipset": + return crud.dips.build_request_delete_dipset() + if n_path == "crud.dips.patch_dip": + return crud.dips.build_request_patch_dip() + if n_path == "crud.dips.get_dips": + return crud.dips.build_request_get_dips() + if n_path == "error_cases.create_log_with_invalid_data_should_422": + return error_cases.build_request_create_log_with_invalid_data_should_422() + if n_path == "model_extensibility.get_log_check_for_extra_fields": + return model_extensibility.build_request_get_log_check_for_extra_fields() + if n_path == "model_extensibility.clean_up_delete_log": + return model_extensibility.build_request_clean_up_delete_log() + if n_path == "model_extensibility.create_log_with_extra_fields": + return model_extensibility.build_request_create_log_with_extra_fields() + if n_path == "recursive_delete.setup.recusive_del_setup_end": + return recursive_delete.setup.build_request_recursive_del_setup_end() + if n_path == "recursive_delete.setup.recusive_del_setup_create_well": + return recursive_delete.setup.build_request_recursive_del_setup_create_well() + if n_path == "recursive_delete.setup.recusive_del_setup_check_state_start": + return recursive_delete.setup.build_request_recursive_del_setup_check_state_start() + if n_path == "recursive_delete.setup.recusive_del_setup_create_logs": + return recursive_delete.setup.build_request_recursive_del_setup_create_logs() + if n_path == "recursive_delete.setup.recusive_del_setup_create_logset": + return recursive_delete.setup.build_request_recursive_del_setup_create_logset() + if n_path == "recursive_delete.setup.recusive_del_setup_create_wellbore": + return recursive_delete.setup.build_request_recursive_del_setup_create_wellbore() + if n_path == "recursive_delete.setup.recusive_del_setup_create_record_refs": + return recursive_delete.setup.build_request_recursive_del_setup_create_record_refs() + if n_path == "recursive_delete.delete_well.check_logset_is_deleted": + return recursive_delete.delete_well.build_request_check_logset_is_deleted() + if n_path == "recursive_delete.delete_well.recursive_delete_well": + return recursive_delete.delete_well.build_request_recursive_delete_well() + if n_path == "recursive_delete.delete_well.check_log_is_deleted": + return recursive_delete.delete_well.build_request_check_log_is_deleted() + if n_path == "recursive_delete.delete_well.check_wellbore_is_deleted": + return recursive_delete.delete_well.build_request_check_wellbore_is_deleted() + if n_path == "search_apis.setup.seach_tests_setup_end": + return search_apis.setup.build_request_seach_tests_setup_end() + if n_path == "search_apis.setup.seach_tests_setup_create_logsets": + return search_apis.setup.build_request_seach_tests_setup_create_logsets() + if n_path == "search_apis.setup.seach_tests_setup_create_record_refs": + return search_apis.setup.build_request_seach_tests_setup_create_record_refs() + if n_path == "search_apis.setup.seach_tests_setup_create_logs": + return search_apis.setup.build_request_seach_tests_setup_create_logs() + if n_path == "search_apis.setup.seach_tests_setup_create_wellbore": + return search_apis.setup.build_request_seach_tests_setup_create_wellbore() + if n_path == "search_apis.setup.seach_tests_setup_create_markers": + return search_apis.setup.build_request_seach_tests_setup_create_markers() + if n_path == "search_apis.setup.seach_tests_setup_start": + return search_apis.setup.build_request_seach_tests_setup_start() + if n_path == "search_apis.search.search_logs_by_logset_id": + return search_apis.search.build_request_search_logs_by_logset_id() + if n_path == "search_apis.search.search_markers_by_wellbore_id": + return search_apis.search.build_request_search_markers_by_wellbore_id() + if n_path == "search_apis.search.search_wellbores_by_geo_polygon": + return search_apis.search.build_request_search_wellbores_by_geo_polygon() + if n_path == "search_apis.search.search_logs_by_wellbore_id": + return search_apis.search.build_request_search_logs_by_wellbore_id() + if n_path == "search_apis.search.search_logset_by_wellbores_attribute": + return search_apis.search.build_request_search_logset_by_wellbores_attribute() + if n_path == "search_apis.search.search_logs_by_wellbores_attribute": + return search_apis.search.build_request_search_logs_by_wellbores_attribute() + if n_path == "search_apis.search.search_wellbores_by_bounding_box": + return search_apis.search.build_request_search_wellbores_by_bounding_box() + if n_path == "search_apis.search.search_wellbores_by_distance": + return search_apis.search.build_request_search_wellbores_by_distance() + if n_path == "search_apis.search.search_logs_by_logsets_attribute": + return search_apis.search.build_request_search_logs_by_logsets_attribute() + if n_path == "search_apis.search.search_logset_by_wellbore_id": + return search_apis.search.build_request_search_logset_by_wellbore_id() + if n_path == "about": + return about.build_request_about() + if n_path == "version": + return version.build_request_version() + if n_path == "status": + return status.build_request_status() + + raise ValueError(f'No request matches the path {n_path}') diff --git a/tests/integration/functional/request_builders/wdms/__init__.py b/tests/integration/functional/request_builders/wdms/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..017c361b066364ade62d2a5fd5e6766e16f310fc --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..wdms import about, version, status, crud, error_cases, model_extensibility, recursive_delete, search_apis + +__all__ = ["about", "version", "status", "crud", "error_cases", "model_extensibility", "recursive_delete", "search_apis"] diff --git a/tests/integration/functional/request_builders/wdms/about.py b/tests/integration/functional/request_builders/wdms/about.py new file mode 100644 index 0000000000000000000000000000000000000000..a748ff4139c0b876ae9445fbc2776f58240dcbbe --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/about.py @@ -0,0 +1,29 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_about() -> RequestRunner: + rq_proto = Request( + name='about', + method='GET', + url='{{base_url}}/ddms/v2/about', + headers={ + 'accept': 'application/json', + 'Connection': '{{header_connection}}', + }, + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/crud/__init__.py b/tests/integration/functional/request_builders/wdms/crud/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..9e4d98e06a110fc27952cb551f730b494e86bd78 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..crud import well, wellbore, logset, marker, trajectory, log, dips + +__all__ = ["well", "wellbore", "logset", "marker", "trajectory", "log", "dips"] diff --git a/tests/integration/functional/request_builders/wdms/crud/dips.py b/tests/integration/functional/request_builders/wdms/crud/dips.py new file mode 100644 index 0000000000000000000000000000000000000000..f4d863e11f1e090e60a55ad7c66dfff1f4088685 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/dips.py @@ -0,0 +1,229 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_get_dipset() -> RequestRunner: + rq_proto = Request( + name='Get dipset', + method='GET', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_query_dips() -> RequestRunner: + rq_proto = Request( + name='Query dips', + method='GET', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/query?minReference=3500&maxReference=8000&classification=breakout', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_delete_dip() -> RequestRunner: + rq_proto = Request( + name='Delete dip', + method='DELETE', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/0', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_dips() -> RequestRunner: + rq_proto = Request( + name='Create dips', + method='POST', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ + { + "reference": {"unitKey":"meter", "value":2000.0}, + "azimuth": {"unitKey":"dega", "value":34}, + "inclination": {"unitKey":"dega", "value":27} + }, + { + "reference": {"unitKey":"meter", "value":1000.0}, + "azimuth": {"unitKey":"dega", "value":0.123456789121212121212}, + "inclination": {"unitKey":"dega", "value":12}, + "quality": {"unitKey":"unitless", "value":1}, + "xCoordinate": {"unitKey":"m", "value":1}, + "yCoordinate": {"unitKey":"m", "value":2}, + "zCoordinate": {"unitKey":"m", "value":3}, + "classification": "fracture" + }, + { + "reference": {"unitKey":"meter", "value":4000.0}, + "azimuth": {"unitKey":"dega", "value":4}, + "inclination": {"unitKey":"dega", "value":2}, + "classification": "breakout" + } , + { + "reference": {"unitKey":"meter", "value":3000.0}, + "azimuth": {"unitKey":"dega", "value":3}, + "inclination": {"unitKey":"dega", "value":1}, + "classification": "fracture" + } +] + +""" + ) + return RequestRunner(rq_proto) + + +def build_request_create__dipset() -> RequestRunner: + rq_proto = Request( + name='Create dipset', + method='POST', + url='{{base_url}}/ddms/v2/dipsets', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { "name": "wdms_e2e_dipset_Keon" }, + "kind": "{{dipsetKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_insert_dips() -> RequestRunner: + rq_proto = Request( + name='Insert dips', + method='POST', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/insert', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ + + { + "reference": {"unitKey":"meter", "value":1500.0}, + "azimuth": {"unitKey":"dega", "value":77}, + "inclination": {"unitKey":"dega", "value":81} + }, + { + "reference": {"unitKey":"meter", "value":888.0}, + "azimuth": {"unitKey":"dega", "value":666.66}, + "inclination": {"unitKey":"dega", "value":99.99} + } + +] + +""" + ) + return RequestRunner(rq_proto) + + +def build_request_get_dip_from_index() -> RequestRunner: + rq_proto = Request( + name='Get dip from index', + method='GET', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/1', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_delete_dipset() -> RequestRunner: + rq_proto = Request( + name='Delete dipset', + method='DELETE', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_patch_dip() -> RequestRunner: + rq_proto = Request( + name='Patch dip', + method='PATCH', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips/0?=', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" + +{ + "reference": {"unitKey":"meter", "value":1000.0}, + "azimuth": {"unitKey":"dega", "value":8}, + "inclination": {"unitKey":"dega", "value":12}, + "classification": "fracture", + "quality" : {"unitKey":"unitless", "value":0}, + "xCoordinate" : {"unitKey":"meter", "value":12}, + "yCoordinate" : {"unitKey":"meter", "value":12}, + "zCoordinate" : {"unitKey":"meter", "value":12} + +} + +""" + ) + return RequestRunner(rq_proto) + + +def build_request_get_dips() -> RequestRunner: + rq_proto = Request( + name='Get dips', + method='GET', + url='{{base_url}}/ddms/v2/dipsets/{{dipsetId}}/dips', + headers={ + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/crud/log.py b/tests/integration/functional/request_builders/wdms/crud/log.py new file mode 100644 index 0000000000000000000000000000000000000000..ba125c89b41457d805a274e8d2a1a32fe9215f5c --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/log.py @@ -0,0 +1,142 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_delete_log() -> RequestRunner: + rq_proto = Request( + name='Delete log', + method='DELETE', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_versions_of_log() -> RequestRunner: + rq_proto = Request( + name='Get versions of log', + method='GET', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_log_bulk_data() -> RequestRunner: + rq_proto = Request( + name='Get log bulk data', + method='GET', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}/data?orient=split', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_log() -> RequestRunner: + rq_proto = Request( + name='Get log', + method='GET', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_log_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get log specific version', + method='GET', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}/versions/{{log_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_log() -> RequestRunner: + rq_proto = Request( + name='Create log', + method='POST', + url='{{base_url}}/ddms/v2/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {"name": "wdms_e2e_log"}, + "kind": "{{logKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_add_log_bulk_data() -> RequestRunner: + rq_proto = Request( + name='Add log bulk data', + method='POST', + url='{{base_url}}/ddms/v2/logs/{{log_record_id}}/data?orient=split', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload={ + "columns": [ + "Ref", + "col_1", + "col_2" + ], + "index": [0, 1, 2], + "data": [ + [1, 10, 11], + [1.5, 20, 21], + [2, 30, 31] + ] + } + ) + return RequestRunner(rq_proto) diff --git a/tests/integration/functional/request_builders/wdms/crud/logset.py b/tests/integration/functional/request_builders/wdms/crud/logset.py new file mode 100644 index 0000000000000000000000000000000000000000..09e79ea04acc6638bb17304a223e9e04947a554c --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/logset.py @@ -0,0 +1,100 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_get_versions_of_logset() -> RequestRunner: + rq_proto = Request( + name='Get versions of logset', + method='GET', + url='{{base_url}}/ddms/v2/logsets/{{logset_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_logset_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get logset specific version', + method='GET', + url='{{base_url}}/ddms/v2/logsets/{{logset_record_id}}/versions/{{logset_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_logset() -> RequestRunner: + rq_proto = Request( + name='Get logset', + method='GET', + url='{{base_url}}/ddms/v2/logsets/{{logset_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_delete_logset() -> RequestRunner: + rq_proto = Request( + name='Delete logset', + method='DELETE', + url='{{base_url}}/ddms/v2/logsets/{{logset_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_logset() -> RequestRunner: + rq_proto = Request( + name='Create logset', + method='POST', + url='{{base_url}}/ddms/v2/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {"name": "wdms_e2e_logset"}, + "kind": "{{logSetKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/crud/marker.py b/tests/integration/functional/request_builders/wdms/crud/marker.py new file mode 100644 index 0000000000000000000000000000000000000000..47ab786e5f5af673e0098f3c7678245516feb573 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/marker.py @@ -0,0 +1,103 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_delete_marker() -> RequestRunner: + rq_proto = Request( + name='Delete marker', + method='DELETE', + url='{{base_url}}/ddms/v2/markers/{{marker_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_versions_of_marker() -> RequestRunner: + rq_proto = Request( + name='Get versions of marker', + method='GET', + url='{{base_url}}/ddms/v2/markers/{{marker_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_marker() -> RequestRunner: + rq_proto = Request( + name='Get marker', + method='GET', + url='{{base_url}}/ddms/v2/markers/{{marker_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_marker_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get marker specific version', + method='GET', + url='{{base_url}}/ddms/v2/markers/{{marker_record_id}}/versions/{{marker_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_marker() -> RequestRunner: + rq_proto = Request( + name='Create marker', + method='POST', + url='{{base_url}}/ddms/v2/markers', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_marker", + "md": { "unitKey": "Unknown", "value": 0 } + }, + "kind": "{{markerKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/crud/trajectory.py b/tests/integration/functional/request_builders/wdms/crud/trajectory.py new file mode 100644 index 0000000000000000000000000000000000000000..60d771279d3a27593312fffb404cd741af06a571 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/trajectory.py @@ -0,0 +1,169 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_get_versions_of_trajectory() -> RequestRunner: + rq_proto = Request( + name='Get versions of trajectory', + method='GET', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_trajectory() -> RequestRunner: + rq_proto = Request( + name='Get trajectory', + method='GET', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_trajectory_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get trajectory specific version', + method='GET', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}/versions/{{trajectory_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_delete_trajectory() -> RequestRunner: + rq_proto = Request( + name='Delete trajectory', + method='DELETE', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_trajectory_with_id() -> RequestRunner: + rq_proto = Request( + name='Create trajectory', + method='POST', + url='{{base_url}}/ddms/v2/trajectories', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ + { + "acl": {{record_acl}}, + "legal": {{record_legal}}, + "kind":"{{trajectoryKind}}", + "id":"{{trajectory_record_id}}", + "data": {{trajectory_data}} + } +] +""" + ) + return RequestRunner(rq_proto) + +def build_request_create_trajectory() -> RequestRunner: + rq_proto = Request( + name='Create trajectory', + method='POST', + url='{{base_url}}/ddms/v2/trajectories', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ + { + "acl": {{record_acl}}, + "legal": {{record_legal}}, + "data": {{trajectory_data}}, + "kind":"{{trajectoryKind}}" + } +] +""" + ) + return RequestRunner(rq_proto) + +def build_request_get_trajectory_bulk_data() -> RequestRunner: + rq_proto = Request( + name='Create trajectory data', + method='GET', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}/data?orient=split', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + } + ) + return RequestRunner(rq_proto) + + +def build_request_add_trajectory_bulk_data() -> RequestRunner: + rq_proto = Request( + name='Add trajectory bulk data', + method='POST', + url='{{base_url}}/ddms/v2/trajectories/{{trajectory_record_id}}/data?orient=split', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload={ + "columns": [ + "MD", + "X", + "Y" + ], + "index": [0, 1, 2, 3, 4], + "data": [ + [0.0, 1001, 2001], + [0.5, 1002, 2002], + [1.0, 1003, 2003], + [1.5, 1004, 2004], + [2.0, 1005, 2005] + ] + } + ) + return RequestRunner(rq_proto) diff --git a/tests/integration/functional/request_builders/wdms/crud/well.py b/tests/integration/functional/request_builders/wdms/crud/well.py new file mode 100644 index 0000000000000000000000000000000000000000..34d6bcb82f6954daf85d886ba613c3029edf4f73 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/well.py @@ -0,0 +1,100 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_get_well() -> RequestRunner: + rq_proto = Request( + name='Get well', + method='GET', + url='{{base_url}}/ddms/v2/wells/{{well_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_delete_well() -> RequestRunner: + rq_proto = Request( + name='Delete well', + method='DELETE', + url='{{base_url}}/ddms/v2/wells/{{well_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_well_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get well specific version', + method='GET', + url='{{base_url}}/ddms/v2/wells/{{well_record_id}}/versions/{{well_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_versions_of_well() -> RequestRunner: + rq_proto = Request( + name='Get versions of well', + method='GET', + url='{{base_url}}/ddms/v2/wells/{{well_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_well() -> RequestRunner: + rq_proto = Request( + name='Create well', + method='POST', + url='{{base_url}}/ddms/v2/wells', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {"name": "wdms_e2e_well"}, + "kind": "{{wellKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/crud/wellbore.py b/tests/integration/functional/request_builders/wdms/crud/wellbore.py new file mode 100644 index 0000000000000000000000000000000000000000..5597066484051ee8047fa8e902eabd2e496c4820 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/crud/wellbore.py @@ -0,0 +1,101 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_delete_wellbore() -> RequestRunner: + rq_proto = Request( + name='Delete wellbore', + method='DELETE', + url='{{base_url}}/ddms/v2/wellbores/{{wellbore_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_wellbore_specific_version() -> RequestRunner: + rq_proto = Request( + name='Get wellbore specific version', + method='GET', + url='{{base_url}}/ddms/v2/wellbores/{{wellbore_record_id}}/versions/{{wellbore_record_version}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_wellbore() -> RequestRunner: + rq_proto = Request( + name='Get wellbore', + method='GET', + url='{{base_url}}/ddms/v2/wellbores/{{wellbore_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_get_versions_of_wellbore() -> RequestRunner: + rq_proto = Request( + name='Get versions of wellbore', + method='GET', + url='{{base_url}}/ddms/v2/wellbores/{{wellbore_record_id}}/versions', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_wellbore() -> RequestRunner: + rq_proto = Request( + name='Create wellbore', + method='POST', + url='{{base_url}}/ddms/v2/wellbores', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {"name": "wdms_e2e_wellbore"}, + "kind": "{{wellboreKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/error_cases.py b/tests/integration/functional/request_builders/wdms/error_cases.py new file mode 100644 index 0000000000000000000000000000000000000000..8921d531ea63191503480400faa0df07f480dfa2 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/error_cases.py @@ -0,0 +1,34 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_create_log_with_invalid_data_should_422() -> RequestRunner: + rq_proto = Request( + name='create_log_with_invalid_data_should_422', + method='POST', + url='{{base_url}}/ddms/v2/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[{"data":{"name":"incomplete_data"}}] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/model_extensibility.py b/tests/integration/functional/request_builders/wdms/model_extensibility.py new file mode 100644 index 0000000000000000000000000000000000000000..1602b757e67fc564b1094b8dc82e2fc3adff21a9 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/model_extensibility.py @@ -0,0 +1,96 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_get_record_check_for_extra_fields() -> RequestRunner: + rq_proto = Request( + name='get_record_check_for_extra_fields', + method='GET', + url='{{base_url}}/ddms/v2/{{base_url_entity}}/{{record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_clean_up_delete_log() -> RequestRunner: + rq_proto = Request( + name='clean_up_delete_log', + method='DELETE', + url='{{base_url}}/ddms/v2/logs/{{record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_create_log_with_extra_fields() -> RequestRunner: + rq_proto = Request( + name='create_log_with_extra_fields', + method='POST', + url='{{base_url}}/ddms/v2/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_well", + "xxx_extra_at_data": "value_at_data" + }, + "kind": "{{logKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_create_data_extra_fields() -> RequestRunner: + rq_proto = Request( + name='build_request_create_data_extra_fields', + method='POST', + url='{{base_url}}/ddms/v2/{{base_url_entity}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {{data}}, + "kind": "{{entity_kind}}" +} +] +""" + ) + return RequestRunner(rq_proto) \ No newline at end of file diff --git a/tests/integration/functional/request_builders/wdms/recursive_delete/__init__.py b/tests/integration/functional/request_builders/wdms/recursive_delete/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8e16cc0e90568be88041e76cf53bfcac2c09d8e5 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/recursive_delete/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..recursive_delete import setup, delete_well + +__all__ = ["setup", "delete_well"] diff --git a/tests/integration/functional/request_builders/wdms/recursive_delete/delete_well.py b/tests/integration/functional/request_builders/wdms/recursive_delete/delete_well.py new file mode 100644 index 0000000000000000000000000000000000000000..99e647d29ee3e25473f60f03c3754dccbba6efe6 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/recursive_delete/delete_well.py @@ -0,0 +1,76 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_check_logset_is_deleted() -> RequestRunner: + rq_proto = Request( + name='check logset is deleted', + method='GET', + url='{{base_url}}/ddms/v2/logsets/{{recursive_del_logset_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_delete_well() -> RequestRunner: + rq_proto = Request( + name='recursive delete well', + method='DELETE', + url='{{base_url}}/ddms/v2/wells/{{recursive_del_well_id}}?recursive=true', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_check_log_is_deleted() -> RequestRunner: + rq_proto = Request( + name='check log is deleted', + method='GET', + url='{{base_url}}/ddms/v2/logs/{{recursive_del_log_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_check_wellbore_is_deleted() -> RequestRunner: + rq_proto = Request( + name='check wellbore is deleted', + method='GET', + url='{{base_url}}/ddms/v2/wellbores/{{recursive_del_wellbore_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/recursive_delete/setup.py b/tests/integration/functional/request_builders/wdms/recursive_delete/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..d6ded7e819009e7a0d55f0d08d1b8d3f19f30408 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/recursive_delete/setup.py @@ -0,0 +1,197 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_recursive_del_setup_end() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_end', + method='DELETE', + url='{{base_url}}/ddms/v2/logsets/{{recursive_del_ref_record_id}}', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_create_well() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_create_well', + method='POST', + url='{{base_url}}/ddms/v2/wells', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": {"name": "wdms_e2e_recursive_del_well"}, + "kind": "{{wellKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_check_state_start() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_check_state_start', + method='POST', + url='{{base_url}}/ddms/query', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +{ + "kind": "{{logSetKind}}", + "query": "data.name:\"wdms_e2e_recursive_del_refs\"", + "returnedFields": ["id", "data.channelNames"] +} + +""" + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_create_logs() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_create_logs', + method='POST', + url='{{base_url}}/ddms/v2/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_recursive_del_log", + "relationships": { + "well": {"id":"{{recursive_del_well_id}}"}, + "logset": {"id":"{{recursive_del_logset_id}}"} + } + }, + "kind": "{{logKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_create_logset() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_create_logset', + method='POST', + url='{{base_url}}/ddms/v2/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_recursive_del_logset", + "relationships": { + "well": {"id":"{{recursive_del_well_id}}"}, + "wellbore": {"id":"{{recursive_del_wellbore_id}}"} + } + }, + "kind": "{{logSetKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_create_wellbore() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_create_wellbore', + method='POST', + url='{{base_url}}/ddms/v2/wellbores', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_recursive_del_wellbore", + "relationships": { "well": {"id":"{{recursive_del_well_id}}"} } + }, + "kind": "{{wellboreKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_recursive_del_setup_create_record_refs() -> RequestRunner: + rq_proto = Request( + name='recursive_del_setup_create_record_refs', + method='POST', + url='{{base_url}}/ddms/v2/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "kind": "{{logSetKind}}", + "data": { + "name": "wdms_e2e_recursive_del_refs", + "description": "this is not an actual logset, but a record used in wdms integration tests to ref some other records. Purpose is for testing only.", + "channelNames": [ + "{{recursive_del_well_id}}", + "{{recursive_del_wellbore_id}}", + "{{recursive_del_logset_id}}", + "{{recursive_del_log_id}}" + ] + } +} +] +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/search_apis/__init__.py b/tests/integration/functional/request_builders/wdms/search_apis/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..f68dbd5f7d1255e43ef2c720f62772ffd4c3af80 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/search_apis/__init__.py @@ -0,0 +1,17 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..search_apis import setup, search + +__all__ = ["setup", "search"] diff --git a/tests/integration/functional/request_builders/wdms/search_apis/search.py b/tests/integration/functional/request_builders/wdms/search_apis/search.py new file mode 100644 index 0000000000000000000000000000000000000000..dbe5021f6ba27fa0336887085dc509f0084ca8ba --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/search_apis/search.py @@ -0,0 +1,227 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_search_logs_by_logset_id() -> RequestRunner: + rq_proto = Request( + name='search logs by logset id', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/logset/{{search_logset_id}}/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_markers_by_wellbore_id() -> RequestRunner: + rq_proto = Request( + name='search markers by wellbore id', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/markers', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_wellbores_by_geo_polygon() -> RequestRunner: + rq_proto = Request( + name='search wellbores by geo polygon', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores/bygeopolygon', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" + { + "points": [ + { + "latitude": 46, + "longitude": -101 + }, + { + "latitude": 49, + "longitude": -102 + }, + { + "latitude": 45, + "longitude": -105 + } + ], + "query": { + "query": "" + } + } + """ + ) + return RequestRunner(rq_proto) + + +def build_request_search_logs_by_wellbore_id() -> RequestRunner: + rq_proto = Request( + name='search logs by wellbore id', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_logset_by_wellbores_attribute() -> RequestRunner: + rq_proto = Request( + name='search logset by wellbores attribute', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores/data.state:"North Dakota"/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_logs_by_wellbores_attribute() -> RequestRunner: + rq_proto = Request( + name='search logs by wellbores attribute', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores/data.state:"North Dakota"/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_wellbores_by_bounding_box() -> RequestRunner: + rq_proto = Request( + name='search wellbores by bounding box', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores/byboundingbox?latitude_top_left=48&longitude_top_left=-104&latitude_bottom_right=45&longitude_bottom_right=-101', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_wellbores_by_distance() -> RequestRunner: + rq_proto = Request( + name='search wellbores by distance', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores/bydistance?latitude=46.8&longitude=-103.2&distance=15000', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_logs_by_logsets_attribute() -> RequestRunner: + rq_proto = Request( + name='search logs by logsets attribute', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/logsets/data.classification:"Quad-Combo"/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_logset_by_wellbore_id() -> RequestRunner: + rq_proto = Request( + name='search logset by wellbore id', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbore/{{search_wellbore_id}}/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_wellbores() -> RequestRunner: + rq_proto = Request( + name='search wellbores', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/wellbores', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) + + +def build_request_search_logs() -> RequestRunner: + rq_proto = Request( + name='search logs', + method='POST', + url='{{base_url}}/ddms/{{search_query_type}}/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload='{ "query": "" }' + ) + return RequestRunner(rq_proto) \ No newline at end of file diff --git a/tests/integration/functional/request_builders/wdms/search_apis/setup.py b/tests/integration/functional/request_builders/wdms/search_apis/setup.py new file mode 100644 index 0000000000000000000000000000000000000000..344f20939c57b168beb73fb1f8baa1640591f6dd --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/search_apis/setup.py @@ -0,0 +1,591 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_seach_tests_setup_end() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_end', + method='GET', + url='{{base_url}}/ddms/v2/about', + headers={ + 'accept': 'application/json', + }, + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_create_logsets() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_create_logsets', + method='POST', + url='{{base_url}}/ddms/v2/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "kind": "{{logSetKind}}", + "data": { + "name": "wdms_e2e_search_record_v{{search_record_version}}", + "azimuthReference": "TN", + "channelMnemonics": [ + "DCAL", + "DPOR", + "GR", + "NPOR", + "RHOB", + "DT" + ], + "channelNames": [ + "Differential Caliper", + "Density Porosity", + "Gamma Ray", + "Enhanced Thermal Neutron Porosity in Selected Lithology", + "CDL Bulk Density", + "Delta-T (also called Slowness or Interval Transit Time)" + ], + "classification": "Quad-Combo", + "dateCreated": "2013-03-22T11:16:03Z", + "dateModified": "2013-03-22T11:16:03Z", + "elevationReference": { + "elevationFromMsl": { + "unitKey": "ft", + "value": 2680.5 + }, + "name": "KB" + }, + "externalIds": [ + "Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:9190e417-8d42-4994-9e6a-9a327b4f47b1" + ], + "operation": "Harmonization", + "properties": [ + { + "description": "Run 1 date {DD/MM/YYYY}", + "name": "RUN_DATE-RUN1", + "unitKey": "", + "value": "22/09/1998" + }, + { + "description": "Run 1 depth interval", + "name": "RUN_DEPTH-TOP-RUN1", + "unitKey": "ft", + "value": 0 + }, + { + "description": "Run 1 depth interval", + "name": "RUN_DEPTH-BASE-RUN1", + "unitKey": "ft", + "value": 1500 + }, + { + "description": "Run 2 date {DD/MM/YYYY}", + "name": "RUN_DATE-RUN2", + "unitKey": "", + "value": "23/10/1998" + }, + { + "description": "Run 2 depth interval", + "name": "RUN_DEPTH-TOP-RUN2", + "unitKey": "ft", + "value": 1500 + }, + { + "description": "Run 2 depth interval", + "name": "RUN_DEPTH-BASE-RUN2", + "unitKey": "ft", + "value": 2513 + }, + { + "associations": [ + "ENSEMBLE_TOOLELEMENT", + "EDTC-B_8612", + "EDTC-B_8612" + ], + "description": "from Toolstring_Parameter", + "name": "EDTC-B_8612", + "value": 8612 + }, + { + "description": "zone range", + "format": "{AF}", + "name": "ERRBND_Zone[1]", + "values": [ + -999.25, + 43474.6413266435 + ] + } + ], + "reference": { + "dataType": "number", + "dimension": 1, + "family": "Measured Depth", + "familyType": "Depth", + "format": "float32", + "mnemonic": "MD", + "name": "Measured Depth", + "unitKey": "ft" + }, + "referenceType": "Measured Depth", + "relationships": { + "well": { + "name": "Newton 2-31" + }, + "wellbore": { + "confidence": 1.0, + "id": "{{setup_search_wellbore_id}}", + "name": "wddms-e2e-search-test-0000" + } + }, + "start": { + "unitKey": "ft", + "value": 1234.56 + }, + "step": { + "unitKey": "ft", + "value": 0.1 + }, + "stop": { + "unitKey": "ft", + "value": 13856.25 + } + }, + "meta": [ + { + "kind": "Unit", + "name": "ft", + "persistableReference": "{\"scaleOffset\":{\"scale\":0.3048,\"offset\":0.0},\"symbol\":\"ft\",\"baseMeasurement\":{\"ancestry\":\"Length\",\"type\":\"UM\"},\"type\":\"USO\"}", + "propertyNames": [ + "stop.value", + "elevationReference.elevationFromMsl.value", + "start.value", + "step.value", + "reference.unitKey" + ], + "propertyValues": [ + "ft" + ] + }, + { + "kind": "DateTime", + "name": "datetime", + "persistableReference": "{\"format\":\"yyyy-MM-ddTHH:mm:ssZ\",\"timeZone\":\"UTC\",\"type\":\"DTM\"}", + "propertyNames": [ + "dateModified", + "dateCreated" + ] + } + ] +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_create_record_refs() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_create_record_refs', + method='POST', + url='{{base_url}}/ddms/v2/logsets', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "kind": "{{logSetKind}}", + "data": { + "name": "wdms_e2e_search_refs_v{{search_record_version}}", + "description": "this is not an actual logset, but a record used in wdms integration tests to ref some other records. Purpose is for testing only.", + "channelNames": [ + "{{setup_search_wellbore_id}}", + "{{setup_search_logset_id}}" + ] + } +} +] + +""" + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_create_logs() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_create_logs', + method='POST', + url='{{base_url}}/ddms/v2/logs', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "kind": "{{logKind}}", + "data": { + "azimuthReference": "TN", + "dateCreated": "2013-03-22T11:16:03Z", + "dateModified": "2013-03-22T11:16:03Z", + "basin": "Feda Graben (Central Graben)", + "elevationReference": { + "elevationFromMsl": { + "unitKey": "ft", + "value": 2680.5 + }, + "name": "KB" + }, + "externalIds": [ + "Petrel:tenant1/ProjectLouisiana/3764913/a9b46fc4-1840-450a-ac01-d15bdaa086ba:438c35f3-fb59-4581-bb21-93b591d7cd1f" + ], + "history": [ + { + "date": "2019-02-01T11:16:03Z", + "description": "Created by Quanti_ Borehole computation; \nFamilies: True Vertical Depth; \nVariables: TVD, \nZonation: ZONATION_ALL; Unit: ft; \nMudType: Water; BSALinput: 0; Unit: ppk; BFHIinput: -9999; Unit: unitless; BPressCompute: Compute from mud weight and TVD; AirGap: 2; Unit: m; MudWeight: 1.1; Unit: g/m3; BTempCompute: Compute from depth tie point and gradient; BTEMPinput: 75; Unit: degC; BTEMPreferenceTVD: 2438.4; Unit: m; BTEMPgradient: 2; Unit: degC/100m; RmCompute: Compute from zoned variables; RMinput: 0.1; Unit: ohm.m; RMtemperature: 20; Unit: degC; RMFinput: 0.08; Unit: ohm.m; RMFtemperature: 20; Unit: degC; RMCinput: 0.16; Unit: ohm.m; RMCtemperature: 20; Unit: degC; RWinput: 0.1; Unit: ohm.m; RWtemperature: 100; Unit: degC; FormationSalinity: -9999; Unit: ppk;", + "user": "Ddahan" + } + ], + "log": { + "dataType": "number", + "dimension": 1, + "family": "Density Porosity", + "familyType": "Porosity", + "format": "float32", + "logstoreId": 2156256839304115, + "mnemonic": "DPOR", + "name": "Density Porosity", + "properties": [ + { + "description": "Linear depth offset of the channel sensor relative to some reference point, typically the toolstring zero", + "name": "MEASURE_POINT_OFFSET", + "unitKey": "m", + "value": 0.264922 + } + ], + "unitKey": "%" + }, + "name": "DPOR", + "reference": { + "dataType": "number", + "dimension": 1, + "family": "Measured Depth", + "familyType": "Depth", + "format": "float32", + "mnemonic": "MD", + "name": "Measured Depth", + "unitKey": "ft" + }, + "referenceType": "Measured Depth", + "relationships": { + "logSet": { + "id": "{{setup_search_logset_id}}" + }, + "well": { + "name": "wddms-e2e-search-test-0000" + }, + "wellbore": { + "confidence": 1.0, + "id": "{{setup_search_wellbore_id}}", + "name": "wddms-e2e-search-test-0000" + } + }, + "start": { + "unitKey": "ft", + "value": 1234.56 + }, + "step": { + "unitKey": "ft", + "value": 0.1 + }, + "stop": { + "unitKey": "ft", + "value": 13856.25 + } + }, + "meta": [ + { + "kind": "Unit", + "name": "ft", + "persistableReference": "{\"scaleOffset\":{\"scale\":0.3048,\"offset\":0.0},\"symbol\":\"ft\",\"baseMeasurement\":{\"ancestry\":\"Length\",\"type\":\"UM\"},\"type\":\"USO\"}", + "propertyNames": [ + "reference.unitKey", + "stop.value", + "elevationReference.elevationFromMsl.value", + "start.value", + "step.value" + ], + "propertyValues": [ + "ft" + ] + }, + { + "kind": "Unit", + "name": "%", + "persistableReference": "{\"scaleOffset\":{\"scale\":0.01,\"offset\":0.0},\"symbol\":\"%\",\"baseMeasurement\":{\"ancestry\":\"Dimensionless\",\"type\":\"UM\"},\"type\":\"USO\"}", + "propertyNames": [ + "log.unitKey" + ], + "propertyValues": [ + "%" + ] + }, + { + "kind": "Unit", + "name": "m", + "persistableReference": "{\"scaleOffset\":{\"scale\":1.0,\"offset\":0.0},\"symbol\":\"m\",\"baseMeasurement\":{\"ancestry\":\"Length\",\"type\":\"UM\"},\"type\":\"USO\"}", + "propertyNames": [ + "log.properties.namedProperty.value" + ] + } + ] +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_create_wellbore() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_create_wellbore', + method='POST', + url='{{base_url}}/ddms/v2/wellbores', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "data": { + "name": "wdms_e2e_search_record_v{{search_record_version}}", + "basinContext": { + "basinCode": "C5031", + "basinName": "Williston Basin", + "subBasinCode": "C50310104", + "subBasinName": "Three Forks Formation and Jefferson Group" + }, + "block": "Block 11/8", + "country": "United States of America", + "county": "Stark", + "dateCreated": "2013-03-22T11:16:03Z", + "dateLicenseIssued": "2012-10-21T18:00:00Z", + "dateModified": "2013-03-22T11:16:03Z", + "datePluggedAbandoned": "2019-02-21T18:00:00Z", + "dateSpudded": "2014-02-21T19:00:00Z", + "directionWell": "producer", + "district": "Fryburg", + "elevationReference": { + "elevationFromMsl": { + "unitKey": "ft", + "value": 2650.5 + }, + "name": "GL" + }, + "field": "Bell", + "fluidWell": "oil-gas", + "groundElevation": { + "unitKey": "ft", + "value": 2645.6 + }, + "locationWGS84": { + "features": [ + { + "geometry": { + "coordinates": [ + -103.2380248, + 46.8925081, + 2650.5 + ], + "type": "Point" + }, + "properties": { + "name": "Newton 2-31" + }, + "type": "Feature" + } + ], + "type": "FeatureCollection" + }, + "operator": "Don E. Beckert", + "operatorDivision": "Division ND", + "plssLocation": { + "range": "99W", + "section": 31, + "township": "140N" + }, + "propertyDictionary": { + "API Number": "33003000080000", + "Activity Code": "E", + "Basin": "WILLISTON BASIN", + "Basin Code": "713200", + "Class Initial Code": "WF", + "Class Initial Name": "NEW FIELD WILDCAT", + "Country Name": "UNITED STATES", + "County Name": "BARNES", + "Current Operator City": "BILLINGS", + "Current Operator Name": "NYVATEX MONTANA", + "Date First Report": "11-12-1982", + "Date Last Activity": "06-03-2016", + "Depth Total Projected": "1800", + "Elevation Reference Datum": "GR", + "Elevation Reference Value": "1407", + "Field Name": "WILDCAT", + "Final Status": "ABANDON LOCATION", + "Formation Projected Name": "PRECAMBRIAN", + "Ground Elevation": "1407", + "Hole Direction": "VERTICAL", + "Lease Acres": "40", + "Lease Name": "TRIEBOLD", + "Operator City": "BILLINGS", + "Operator Name": "NYVATEX MONTANA", + "Permit Date": "11-10-1982", + "Permit Filer Long": ";PRESIDENT;;;;;;;", + "Permit Number": "9896", + "Permit Status": "APPROVED", + "Source": "PI", + "State Name": "NORTH DAKOTA", + "Status Final Code": "A", + "Sub Basin": "EASTERN SHELF (WILLISTON BASIN)", + "Sub Basin Code": "100000004313", + "Surface LL Source": "IH", + "Surface Latitude": "+47.1981919", + "Surface Longitude": " -97.8621697", + "UWI": "33003000080000", + "Unit of Measure": "ACRE", + "Well Num": "34-14" + }, + "region": "North America", + "relationships": { + "well": { + "name": "Newton 2-31" + } + }, + "state": "North Dakota", + "uwi": "33-089-00300-00", + "wellHeadElevation": { + "unitKey": "ft", + "value": 2650.5 + }, + "wellHeadGeographic": { + "crsKey": "geographic", + "elevationFromMsl": { + "unitKey": "ft", + "value": 2650.5 + }, + "latitude": 46.89249512931594, + "longitude": -103.23756979739804 + }, + "wellHeadProjected": { + "crsKey": "projected", + "elevationFromMsl": { + "unitKey": "ft", + "value": 2650.5 + }, + "x": 1315694.366039069, + "y": 458966.7531300551 + }, + "wellHeadWgs84": { + "latitude": 46.8925081, + "longitude": -103.2380248 + }, + "wellLocationType": "Onshore", + "wellNumberGovernment": "42-501-20130-P", + "wellNumberLicense": "42-501-20130-P", + "wellNumberOperator": "12399-001", + "wellPurpose": "development -- producer", + "wellStatus": "active -- producing", + "wellType": "reentry" + }, + "kind": "{{wellboreKind}}", + "acl": {{record_acl}}, "legal": {{record_legal}} +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_create_markers() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_create_markers', + method='POST', + url='{{base_url}}/ddms/v2/markers', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +[ +{ + "acl": {{record_acl}}, "legal": {{record_legal}}, + "data": { + "name": "wdms_e2e_search_record_v{{search_record_version}}", + "md": {"unitKey": "Unknown", "value": 0}, + "relationships": { + "wellbore": { + "confidence": 1.0, + "id": "{{setup_search_wellbore_id}}", + "name": "wdms_e2e_search_record_v{{search_record_version}}" + } + } + }, + "kind": "{{markerKind}}" +} +] +""" + ) + return RequestRunner(rq_proto) + + +def build_request_seach_tests_setup_start() -> RequestRunner: + rq_proto = Request( + name='seach_tests_setup_start', + method='POST', + url='{{base_url}}/ddms/query', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + payload=r""" +{ + "kind": "{{logSetKind}}", + "query": "data.name:\"wdms_e2e_search_refs_v{{search_record_version}}\"", + "returnedFields": ["id", "data.channelNames"] +} + +""" + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/status.py b/tests/integration/functional/request_builders/wdms/status.py new file mode 100644 index 0000000000000000000000000000000000000000..e6ad80c2a00b39ea0953c29952279218b1c78658 --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/status.py @@ -0,0 +1,31 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_status() -> RequestRunner: + rq_proto = Request( + name='status', + method='GET', + url='{{base_url}}/ddms/v2/status', + headers={ + 'accept': 'application/json', + 'data-partition-id': '{{data_partition}}', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms/version.py b/tests/integration/functional/request_builders/wdms/version.py new file mode 100644 index 0000000000000000000000000000000000000000..058f541132417dbd373ccd8116a1ebdeb8e344cd --- /dev/null +++ b/tests/integration/functional/request_builders/wdms/version.py @@ -0,0 +1,30 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from request_runner import RequestRunner, Request + + +def build_request_version() -> RequestRunner: + rq_proto = Request( + name='version', + method='GET', + url='{{base_url}}/ddms/v2/version', + headers={ + 'accept': 'application/json', + 'Connection': '{{header_connection}}', + 'Authorization': 'Bearer {{token}}', + }, + ) + return RequestRunner(rq_proto) + diff --git a/tests/integration/functional/request_builders/wdms_variables.py b/tests/integration/functional/request_builders/wdms_variables.py new file mode 100644 index 0000000000000000000000000000000000000000..b0e095239d5bf45d57cd7fb11386fc600690e30c --- /dev/null +++ b/tests/integration/functional/request_builders/wdms_variables.py @@ -0,0 +1,47 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + +variables_dict = { + "base_url": "https://open.opendes.cloud.slb-ds.com/api/os-wellbore-ddms", + "token": "", + "cloud_provider": "local", + "base_url_entity": "logs", + "entity_kind": "{{logKind}}", + "dipsetKind": "{{data_partition}}:wks:dipSet:1.0.0", + "wellKind": "{{data_partition}}:wks:well:1.0.2", + "wellboreKind": "{{data_partition}}:wks:wellbore:1.0.6", + "logSetKind": "{{data_partition}}:wks:logSet:1.0.5", + "markerKind": "{{data_partition}}:wks:marker:1.0.4", + "trajectoryKind": "{{data_partition}}:wks:trajectory:1.0.5", + "trajectory_data": {"name": "wdms_e2e_trajectory"}, + "logKind": "{{data_partition}}:wks:log:1.0.5", + "acl_domain": "p4d.cloud.slb-ds.com", + "acl_owner": "data.default.owners@{{data_partition}}.{{acl_domain}}", + "acl_viewer": "data.default.viewers@{{data_partition}}.{{acl_domain}}", + "legal_tag": "opendes-public-usa-dataset-1", + "data_partition": "", + "data": {}, + "search_record_version": "0001", + "record_acl": { + "owners": ["{{acl_owner}}"], + "viewers": ["{{acl_viewer}}"] + }, + "record_legal": { + "legaltags": ["{{legal_tag}}"], + "otherRelevantDataCountries": ["US", "FR"] + }, + "header_connection": "close" +} + diff --git a/tests/integration/functional/request_runner.py b/tests/integration/functional/request_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..92f479c7aa209a90cc3bca8bd2d4fc805eaefb86 --- /dev/null +++ b/tests/integration/functional/request_runner.py @@ -0,0 +1,241 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +from variables import Variables, CmdLineSpecialVar +from typing import Any, Dict, List +from dataclasses import dataclass, field +from datetime import datetime +from os import linesep +from math import floor +from munch import munchify +import warnings +import logging +import os +import json +import uuid +from io import BytesIO + +logger = logging.getLogger() + + +def current_test(): + cur_test = os.environ.get('PYTEST_CURRENT_TEST', '') + return cur_test.split('::')[-1].replace(' (call)', '').strip() + + +def make_correlation_id(prefix: str = None): + if not prefix: + prefix = 'wdms_e2e_' + current_test() + + return f'{prefix}_{uuid.uuid4()}' + + +@dataclass +class Request: + method: str + url: str + name: str = '' + description: str = '' + payload: Any = None + """ support same than requests.request, Dictionary, list of tuples, bytes, or file-like object but variables + resolution will only occurs for string or dict """ + + headers: Dict[str, str] = field(default_factory=dict) + + def __str__(self): + r_str = f'{self.name}{linesep}' if self.name else '' + r_str += f'URL: [{self.method.upper()}] {self.url}){linesep}' + r_str += f'headers: {linesep}' + for k, v in self.headers.items(): + if k.lower() in ['authorization', 'apikey', 'appkey']: + v = v[:12] + '******' + v[-5:] + r_str += f' - {k}: {v}{linesep}' + if self.payload: + r_str += f'body: -------------------------------------------{linesep}' + r_str += str(self.payload) + r_str += f'-------------------------------------------{linesep}' + r_str += linesep + return r_str + + def get_body_obj(self): + """ only valid for json body """ + if not self.payload: + return munchify({}) + if isinstance(self.payload, str): + return munchify(json.loads(self.payload)) + if isinstance(self.payload, dict): + return munchify(self.payload) + if isinstance(self.payload, bytes): + return munchify(json.load(BytesIO(self.payload))) + if hasattr(self.payload, 'read'): + return munchify(json.load(self.payload)) + return munchify({}) + + +@dataclass +class RunResult: + start_ts: datetime + end_ts: datetime + request: Request + response: Any + + def assert_ok(self): + """ assert for any error (see requests.raise_for_status() )""" + try: + self.response.raise_for_status() + except requests.HTTPError: + logger.error('Error on call:') + logger.error(str(self)) + raise + + def assert_status_code(self, expected_code): + """ assert for a specific code """ + if int(expected_code) != self.response.status_code: + logger.error(f'Unexpected status code: actual={self.response.status_code}, expected={expected_code}') + logger.error(str(self)) + + assert int(expected_code) == self.response.status_code,\ + f'unexpected status code, actual={self.response.status_code}, expected={expected_code}' + + @property + def ok(self): + return self.response.ok + + @property + def elapsed(self): + return floor((self.end_ts - self.start_ts).total_seconds() * 1000) + + def get_response_obj(self): + return munchify(self.response.json()) + + @property + def summary(self): + return f'{self.start_ts} ({self.elapsed} ms) [code={self.response.status_code}] - {self.request.method.upper()} {self.request.url}' + + def __str__(self): + r_str = f'[{self.response.status_code}] - {self.request.name or self.request.url}{linesep}' + r_str += f'start: [{self.start_ts}], end: {self.end_ts}, elapsed: {self.elapsed} ms{linesep}' + r_str += f'{linesep}============ REQUEST =============== {linesep}' + r_str += str(self.request) + r_str += f'{linesep}============ RESPONSE =============== {linesep}' + r_str += f'headers: {linesep}' + for k, v in self.response.headers.items(): + if k.lower() in ['authorization', 'apikey', 'appkey']: + v = v[:12] + '******' + v[-5:] + r_str += f' - {k}: {v}{linesep}' + r_str += f'body: -------------------------------------------{linesep}' + response_text = self.response.text + if response_text: + r_str += response_text + else: + r_str += '(no body)' + r_str += linesep + return r_str + + +class RequestRunner: + + def __init__(self, rq: Request): + self.request_prototype = rq + self.runs: List[RunResult] = [] + self._no_env = Variables() + + def call(self, env: Variables = None, headers=None, *, assert_status=None, **kwargs) -> RunResult: + """ + :param env: variables to use and substituted in the request + :param headers: additional headers to set, will update and replace the ones in the original if same + :param assert_status: If not None, will assert the http status code is the one provided + :param kwargs: any variables to set for this call only, with override the one in 'env' parameter. + :return: RunResult, contains both request and response objects + """ + if kwargs: + env = env.copy() + env.update(**kwargs) + + error_for_retry = CmdLineSpecialVar.get_retry_on_error(env) or [] + nb_attempt = 4 + for _ in range(nb_attempt): + result = self._inner_call(env, headers) + if result.response.status_code in error_for_retry and result.response.status_code >= 500: + from time import sleep + warnings.warn(UserWarning(f'{result.response.status_code} returned from ' + result.response.url)) + logger.warning(f'{result.response.status_code} status code, retry in 10s') + sleep(10) + continue + break + + if assert_status: + result.assert_status_code(assert_status) + return result + + def _make_headers(self, env: Variables = None, headers=None): + result_hrd = self.request_prototype.headers.copy() + + # update from hrd from cmd line + result_hrd.update(CmdLineSpecialVar.get_headers(env)) + + # put correlation_id + result_hrd['correlation-id'] = make_correlation_id() + + # override by headers provided + result_hrd.update(headers or {}) + + # resolve from environment + return env.resolve(result_hrd) + + def _inner_call(self, env: Variables = None, headers=None) -> RunResult: + env = env or self._no_env + rq = Request(method=self.request_prototype.method, + url=env.resolve(self.request_prototype.url)) + + rq.headers = self._make_headers(env, headers) + + if self.request_prototype.payload: + rq.payload = env.resolve(self.request_prototype.payload) + + timeout = CmdLineSpecialVar.get_timeout_request(env) or 0 + timeout = None if timeout == 0 else float(timeout) / 1000. # input is in ms, requests expected float seconds + + start_ts = datetime.now() + log_level = CmdLineSpecialVar.get_log_request_level(env) + if log_level >= 1: + logger.info(f'{current_test()} => {rq.name} {rq.method.upper()} {rq.url}') + + verify = not CmdLineSpecialVar.get_disable_ssl_validation(env) + + if isinstance(rq.payload, dict): + rq.payload = json.dumps(rq.payload) + response = requests.request(rq.method, rq.url, data=rq.payload, headers=rq.headers, timeout=timeout, verify=verify) + rq.headers = response.request.headers + result = RunResult(start_ts=start_ts, end_ts=datetime.now(), request=rq, response=response) + + if log_level == 1: + logger.info(f'{current_test()} <= {rq.name} status_code={result.response.status_code} ({result.elapsed} ms)') + elif log_level > 1: + logger.info(f'{current_test()} <= ') + logger.info(result) + + self.runs.append(result) + return result + + def __str__(self): + if not self.runs: + return 'no run, request prototype =' + linesep + str(self.request_prototype) + r_str = f'{len(self.runs)} run(s) for {self.request_prototype.name or self.request_prototype.url}:{linesep}{linesep}' + for count, run in enumerate(self.runs): + r_str += f'# run {count +1}: {linesep}' + r_str += str(run) + r_str += linesep + linesep + linesep + return r_str diff --git a/tests/integration/functional/tests/__init__.py b/tests/integration/functional/tests/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/integration/functional/tests/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/integration/functional/tests/fixtures.py b/tests/integration/functional/tests/fixtures.py new file mode 100644 index 0000000000000000000000000000000000000000..df56e38d872ffa7fe3c9c84f8729736a5b03d588 --- /dev/null +++ b/tests/integration/functional/tests/fixtures.py @@ -0,0 +1,24 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from variables import Variables +from ..request_builders.wdms_variables import variables_dict + +WDMS_Variables = Variables.from_dict(variables_dict) + + +@pytest.fixture(scope='session') +def with_wdms_env(): + return WDMS_Variables diff --git a/tests/integration/functional/tests/test_about.py b/tests/integration/functional/tests/test_about.py new file mode 100644 index 0000000000000000000000000000000000000000..738116783ba8e9a9bdf3b812f91b9042e7633e77 --- /dev/null +++ b/tests/integration/functional/tests/test_about.py @@ -0,0 +1,47 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from ..request_builders.wdms.about import build_request_about +from ..request_builders.wdms.status import build_request_status +from ..request_builders.wdms.version import build_request_version +from .fixtures import with_wdms_env +import pytest + + +@pytest.mark.tag('basic', 'smoke') +def test_about(with_wdms_env): + result = build_request_about().call(with_wdms_env) + result.assert_ok() + + resobj = result.get_response_obj() + fields = ["service", "version", "buildNumber", "cloudEnvironment"] + for f in fields: + assert f in resobj, f"missing {f} in body" + assert isinstance(resobj[f], str), f"{f} should be a string" + + assert resobj.cloudEnvironment == with_wdms_env['cloud_provider'] + + +@pytest.mark.tag('basic', 'smoke') +def test_version(with_wdms_env): + result = build_request_version().call(with_wdms_env) + result.assert_ok() + assert result.get_response_obj() + + +@pytest.mark.tag('basic', 'smoke') +def test_status(with_wdms_env): + result = build_request_status().call(with_wdms_env) + result.assert_ok() + assert result.get_response_obj() diff --git a/tests/integration/functional/tests/test_crud.py b/tests/integration/functional/tests/test_crud.py new file mode 100644 index 0000000000000000000000000000000000000000..817a68f0b41f3d531be04de344614a667b77957c --- /dev/null +++ b/tests/integration/functional/tests/test_crud.py @@ -0,0 +1,77 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders import build_request + + +kind_list = ['well', 'wellbore', 'logset', 'marker', 'trajectory', 'log'] + + +# parametrize of kind + dependency on the create_record +param_kind_depend_on_create = [ + pytest.param(k, marks=pytest.mark.dependency(depends=[f'test_create_record_{k}'])) for k in kind_list +] + + +@pytest.mark.tag('basic', 'crud', 'smoke') +@pytest.mark.parametrize( + 'kind', [pytest.param(k, marks=pytest.mark.dependency(name=f'test_create_record_{k}')) for k in kind_list]) +def test_crud_create_record(with_wdms_env, kind): + result = build_request(f'crud.{kind}.create_{kind}').call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.recordCount == 1 + assert len(resobj.recordIds) == 1 + with_wdms_env.set(f'{kind}_record_id', resobj.recordIds[0]) # stored the record id for the following tests + + +@pytest.mark.tag('basic', 'crud', 'smoke') +@pytest.mark.parametrize('kind', param_kind_depend_on_create) +def test_crud_get_record(with_wdms_env, kind): + result = build_request(f'crud.{kind}.get_{kind}').call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.data.name == f'wdms_e2e_{kind}' + + +@pytest.mark.tag('basic', 'crud', 'smoke') +@pytest.mark.parametrize('kind', param_kind_depend_on_create) +def test_crud_record_versions(with_wdms_env, kind): + # get all version of the record + result = build_request(f'crud.{kind}.get_versions_of_{kind}').call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + + record_id = with_wdms_env.get(f'{kind}_record_id') + assert resobj.recordId == record_id + assert len(resobj.versions) >= 1 + + # get specific version of the record + result = build_request(f'crud.{kind}.get_{kind}_specific_version').call( + with_wdms_env, + **{f'{kind}_record_version': resobj.versions[0]} # set/pass version to fetch + ) + + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.data.name == f'wdms_e2e_{kind}' + + +@pytest.mark.tag('basic', 'crud', 'smoke') +@pytest.mark.parametrize('kind', param_kind_depend_on_create) +def test_crud_delete_record(with_wdms_env, kind): + result = build_request(f'crud.{kind}.delete_{kind}').call(with_wdms_env) + result.assert_status_code(204) diff --git a/tests/integration/functional/tests/test_dips.py b/tests/integration/functional/tests/test_dips.py new file mode 100644 index 0000000000000000000000000000000000000000..63aa720358edfb7041752b443131e7ace700d363 --- /dev/null +++ b/tests/integration/functional/tests/test_dips.py @@ -0,0 +1,564 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.crud.dips import * +from jsonschema import validate + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency() +def test_create_dipset(with_wdms_env): + result = build_request_create__dipset().call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.recordCount == 1 + assert len(resobj.recordIds) == 1 + with_wdms_env.set('dipsetId', resobj.recordIds[0]) + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_create_dipset"]) +def test_get_dipset(with_wdms_env): + result = build_request_get_dipset().call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + + dip_schema = { + "type": "object", + "properties": { + "acl": { + "type": "object", + "properties": { + "viewers": {"type": "array"}, + "owners": {"type": "array"} + } + }, + "data": { + "type": "object", + "properties": { + + } + + }, + "id": {"type": "string"}, + "kind": {"type": "string"}, + "legal": { + "type": "object", + "properties": { + "legaltags": {"type": "array"}, + "otherRelevantDataCountries": {"type": "array"} + } + }, + }, + "required": ["acl", "data", "id", "kind", "legal"] + } + + validate(resobj, schema=dip_schema) + + assert resobj.data.name == "wdms_e2e_dipset_Keon" + assert resobj.kind == with_wdms_env['dipsetKind'] + + +expected_dips = [ + { + "reference": { + "unitKey": "meter", + "value": 1000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 0.12345678912121212 + }, + "inclination": { + "unitKey": "dega", + "value": 12.0 + }, + "quality": { + "unitKey": "unitless", + "value": 1.0 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 1.0 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 2.0 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 3.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 2000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 34.0 + }, + "inclination": { + "unitKey": "dega", + "value": 27.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 3000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 3.0 + }, + "inclination": { + "unitKey": "dega", + "value": 1.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 4000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 4.0 + }, + "inclination": { + "unitKey": "dega", + "value": 2.0 + }, + "classification": "breakout" + } +] + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_create_dipset"]) +def test_create_dips(with_wdms_env): + result = build_request_create_dips().call(with_wdms_env) + result.assert_ok() + + assert result.response.json() == expected_dips + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_create_dips"]) +def test_get_dips(with_wdms_env): + result = build_request_get_dips().call(with_wdms_env) + result.assert_ok() + + assert result.response.json() == expected_dips + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_create_dips"]) +def test_get_dip_from_index(with_wdms_env): + result = build_request_get_dip_from_index().call(with_wdms_env) + result.assert_ok() + expected = { + "reference": { + "unitKey": "meter", + "value": 2000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 34.0 + }, + "inclination": { + "unitKey": "dega", + "value": 27.0 + } + } + assert result.response.json() == expected + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_get_dips", "test_get_dip_from_index"]) +def test_insert_dips(with_wdms_env): + result = build_request_insert_dips().call(with_wdms_env) + result.assert_ok() + assert result.response.json() == [ + { + "reference": { + "unitKey": "meter", + "value": 888.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 666.66 + }, + "inclination": { + "unitKey": "dega", + "value": 99.99 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 1000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 0.12345678912121212 + }, + "inclination": { + "unitKey": "dega", + "value": 12.0 + }, + "quality": { + "unitKey": "unitless", + "value": 1.0 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 1.0 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 2.0 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 3.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 1500.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 77.0 + }, + "inclination": { + "unitKey": "dega", + "value": 81.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 2000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 34.0 + }, + "inclination": { + "unitKey": "dega", + "value": 27.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 3000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 3.0 + }, + "inclination": { + "unitKey": "dega", + "value": 1.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 4000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 4.0 + }, + "inclination": { + "unitKey": "dega", + "value": 2.0 + }, + "classification": "breakout" + } + ] + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_insert_dips"]) +def test_patch_dip(with_wdms_env): + result = build_request_patch_dip().call(with_wdms_env) + result.assert_ok() + + assert result.response.json() == [ + { + "reference": { + "unitKey": "meter", + "value": 1000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 0.12345678912121212 + }, + "inclination": { + "unitKey": "dega", + "value": 12.0 + }, + "quality": { + "unitKey": "unitless", + "value": 1.0 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 1.0 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 2.0 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 3.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 1000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 8.0 + }, + "inclination": { + "unitKey": "dega", + "value": 12.0 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 1500.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 77.0 + }, + "inclination": { + "unitKey": "dega", + "value": 81.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 2000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 34.0 + }, + "inclination": { + "unitKey": "dega", + "value": 27.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 3000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 3.0 + }, + "inclination": { + "unitKey": "dega", + "value": 1.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 4000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 4.0 + }, + "inclination": { + "unitKey": "dega", + "value": 2.0 + }, + "classification": "breakout" + } + ] + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_patch_dip"]) +def test_delete_dip(with_wdms_env): + result = build_request_delete_dip().call(with_wdms_env) + result.assert_ok() + + assert result.get_response_obj() == [ + { + "reference": { + "unitKey": "meter", + "value": 1000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 8.0 + }, + "inclination": { + "unitKey": "dega", + "value": 12.0 + }, + "xCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "yCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "zCoordinate": { + "unitKey": "meter", + "value": 12.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 1500.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 77.0 + }, + "inclination": { + "unitKey": "dega", + "value": 81.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 2000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 34.0 + }, + "inclination": { + "unitKey": "dega", + "value": 27.0 + } + }, + { + "reference": { + "unitKey": "meter", + "value": 3000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 3.0 + }, + "inclination": { + "unitKey": "dega", + "value": 1.0 + }, + "classification": "fracture" + }, + { + "reference": { + "unitKey": "meter", + "value": 4000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 4.0 + }, + "inclination": { + "unitKey": "dega", + "value": 2.0 + }, + "classification": "breakout" + } + ] + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +@pytest.mark.dependency(depends=["test_delete_dip"]) +def test_query_dips(with_wdms_env): + result = build_request_query_dips().call(with_wdms_env) + result.assert_ok() + + assert result.get_response_obj() == [ + { + "reference": { + "unitKey": "meter", + "value": 4000.0 + }, + "azimuth": { + "unitKey": "dega", + "value": 4.0 + }, + "inclination": { + "unitKey": "dega", + "value": 2.0 + }, + "classification": "breakout" + } + ] + + +# @pytest.mark.dependency(depends=["test_query_dips"]) +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip', 'bulk') +def test_delete_dipset(with_wdms_env): + if with_wdms_env.get('dipsetId', None): + result = build_request_delete_dipset().call(with_wdms_env) + result.assert_status_code(204) + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'dip') +def test_dip_error_code(with_wdms_env): + env = with_wdms_env + env.set('dipsetId', 'opendes:doc:00000000000000000000000000000000000') + build_request_get_dips().call(with_wdms_env, assert_status=404).get_response_obj() \ No newline at end of file diff --git a/tests/integration/functional/tests/test_log_data.py b/tests/integration/functional/tests/test_log_data.py new file mode 100644 index 0000000000000000000000000000000000000000..7aec866c86f1931eeca744a04525ae414fa68d37 --- /dev/null +++ b/tests/integration/functional/tests/test_log_data.py @@ -0,0 +1,52 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.crud.log import * + + +@pytest.fixture(scope='module') +def env_with_log_record(with_wdms_env): + # create the log + result = build_request_create_log().call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.recordCount == 1 + assert len(resobj.recordIds) == 1 + + with with_wdms_env.scoped_update(log_record_id=resobj.recordIds[0]): + yield with_wdms_env + + # teardown: delete the log + build_request_delete_log().call(with_wdms_env).assert_status_code(204) + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'bulk') +@pytest.mark.dependency() +def test_add_log_bulk_data(env_with_log_record): + result = build_request_add_log_bulk_data().call(env_with_log_record) + result.assert_ok() + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'bulk') +@pytest.mark.dependency(depends=["test_add_log_bulk_data"]) +def test_get_log_bulk_data(env_with_log_record): + result = build_request_get_log_bulk_data().call(env_with_log_record) + result.assert_ok() + + resobj = result.get_response_obj() + assert resobj.data[0][1] == 10 + assert resobj.data[1][1] == 20 + assert resobj.data[2][1] == 30 diff --git a/tests/integration/functional/tests/test_log_version_data.py b/tests/integration/functional/tests/test_log_version_data.py new file mode 100644 index 0000000000000000000000000000000000000000..774ff2ca1504c227e373311dfe591c7a85da5e3e --- /dev/null +++ b/tests/integration/functional/tests/test_log_version_data.py @@ -0,0 +1,73 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from ..request_builders.wdms.crud.log import * +from request_runner import RequestRunner, Request +from .fixtures import with_wdms_env + +@pytest.fixture(scope="module") +def env_with_log_record(with_wdms_env): + # create the log + result = build_request_create_log().call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.recordCount == 1 + assert len(resobj.recordIds) == 1 + + with with_wdms_env.scoped_update(log_record_id=resobj.recordIds[0]): + result = build_request_add_log_bulk_data().call(with_wdms_env) + result.assert_ok() + + yield with_wdms_env + + # teardown: delete the log + build_request_delete_log().call(with_wdms_env).assert_status_code(204) + + +def test_get_log_version_data(env_with_log_record): + result = build_request_get_versions_of_log().call(env_with_log_record) + result.assert_ok() + resobj = result.get_response_obj() + assert len(resobj.versions) >= 1 + versions = resobj.versions[1] + + request_runner = RequestRunner( + Request( + name="Get log version data", + method="GET", + url="{{base_url}}/ddms/v2/logs/{{log_record_id}}/versions/"+str(versions)+"/data", + headers={ + "accept": "application/json", + "data-partition-id": "{{data_partition}}", + "Connection": "{{header_connection}}", + "Authorization": "Bearer {{token}}", + }, + ) + ) + result = request_runner.call( + env_with_log_record, assert_code=200 + ) + result.assert_ok() + resobj = result.get_response_obj() + + assert resobj.data + assert len(resobj.data) >= 3 + assert resobj.data[0] + assert resobj.data[1] + assert resobj.data[2] + + assert resobj.data[0][1] == 10 + assert resobj.data[1][1] == 20 + assert resobj.data[2][1] == 30 diff --git a/tests/integration/functional/tests/test_model_validation.py b/tests/integration/functional/tests/test_model_validation.py new file mode 100644 index 0000000000000000000000000000000000000000..d26a50bd70ad5cdde5788fd132bd0538b16dcbda --- /dev/null +++ b/tests/integration/functional/tests/test_model_validation.py @@ -0,0 +1,63 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.error_cases import build_request_create_log_with_invalid_data_should_422 +from ..request_builders.wdms.model_extensibility import * + + +def test_create_log_with_invalid_data_should_422(with_wdms_env): + build_request_create_log_with_invalid_data_should_422().call(with_wdms_env).assert_status_code(422) + + +@pytest.fixture +def env_with_record_extra_created(with_wdms_env): + with_wdms_env.set("entity", 'logs') + result = build_request_create_log_with_extra_fields().call(with_wdms_env) + result.assert_ok() + with_wdms_env.set("record_id", result.get_response_obj().recordIds[0]) + yield with_wdms_env + + build_request_clean_up_delete_log().call(with_wdms_env).assert_ok() + + +@pytest.mark.tag('basic', 'smoke', 'error') +def test_record_should_keep_extra_field(env_with_record_extra_created): + result = build_request_get_record_check_for_extra_fields().call(env_with_record_extra_created) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.data.xxx_extra_at_data == 'value_at_data' + assert 'US' in resobj.legal.otherRelevantDataCountries + + +tests_parameters_for_relationship_extra_field = [ + ('logs', "{{logKind}}", {"relationships": {"extra_field": "EXTRA_VALUE"}}), + ('logsets', "{{logSetKind}}", {"relationships": {"wellbore": "", "extra_field": "EXTRA_VALUE"}}), + ('markers', "{{markerKind}}", {"name": "foo", "md": {"value": 1, "unitKey": "m"}, "relationships": {"wellbore": "", "extra_field": "EXTRA_VALUE"}}), + ('trajectories', "{{trajectoryKind}}", {"relationships": {"wellbore": "", "extra_field": "EXTRA_VALUE"}}), + ('dipsets', "{{dipsetKind}}", {"relationships": {"wellbore": "", "extra_field": "EXTRA_VALUE"}}), + ('wellbores', "{{wellboreKind}}", {"relationships": {"extra_field": "EXTRA_VALUE"}}), + ('wells', "{{wellKind}}", {"relationships": {"extra_field": "EXTRA_VALUE"}}) +] +@pytest.mark.parametrize('entities, entities_kind, data_extra_field', tests_parameters_for_relationship_extra_field) +def test_relationships_extra_field(with_wdms_env, entities, entities_kind, data_extra_field): + with_wdms_env.set("base_url_entity", entities) + with_wdms_env.set("entity_kind", entities_kind) + with_wdms_env.set("data", data_extra_field) + result = build_request_create_data_extra_fields().call(with_wdms_env, assert_status=200) + with_wdms_env.set("record_id", result.get_response_obj().recordIds[0]) + result = build_request_get_record_check_for_extra_fields().call(with_wdms_env, assert_status=200) + resobj = result.get_response_obj() + assert resobj.data.relationships.extra_field == 'EXTRA_VALUE' \ No newline at end of file diff --git a/tests/integration/functional/tests/test_recursive_delete.py b/tests/integration/functional/tests/test_recursive_delete.py new file mode 100644 index 0000000000000000000000000000000000000000..0f09780b552e279708aece3eb1fc265fe2c82629 --- /dev/null +++ b/tests/integration/functional/tests/test_recursive_delete.py @@ -0,0 +1,129 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.recursive_delete.setup import * +from ..request_builders.wdms.recursive_delete.delete_well import * +from time import sleep +from random import randint + + +def query_for_record_set_available(env): + result = build_request_recursive_del_setup_check_state_start().call(env) + result.assert_ok() + return result.get_response_obj() + + +def create_and_set_id(rq, env, env_key): + rec_id = rq.call(env, assert_status=200).get_response_obj().recordIds[0] + env.set(env_key, rec_id) + + +@pytest.fixture +def env_with_records(with_wdms_env): + """ + There's a bit of logic in this setup. This is because recursive delete relies on search to find + any children of a record and delete them along the actual record. Given this hierarchy (relation goes + from child to parent): + + [well] <-- [wellbore] <-- [logset] <-- [log] + + recursively delete the well should delete also the wellbore, logset and log. + + The search result is depended to the indexation to run over the records. Then create the records and immediately + do the recursive delete won't work. Moreover the there's no way to know when the indexation will be done. So + the logic here is to create several record set for future runs and if needed wait some time if there's no record + available yet (up to 10 minutes). + + In order to prevent to parallel test run on the same environment to use the same set of record, we use a single + record (see build_request_recursive_del_setup_create_record_refs) that references all records + (well, wellbore, logset, log) involves in the recursive. Successfully delete this single record ensure no + other run will use the same set of records. + """ + + threshold_enough_record = 6 + number_of_record_to_create = 8 + wait_attempt = 10 + env = with_wdms_env + + check_query_obj = query_for_record_set_available(env) + nb_record_set = check_query_obj.totalCount + print(f'{nb_record_set} record set found') + + if nb_record_set < threshold_enough_record: # not enough record + print(f'recursive delete setup: Create {number_of_record_to_create} new record set ...') + # create some records the delete + for _ in range(number_of_record_to_create): # chain well->wellbore->logset->logs + create_and_set_id(build_request_recursive_del_setup_create_well(), env, 'recursive_del_well_id') + create_and_set_id(build_request_recursive_del_setup_create_wellbore(), env, 'recursive_del_wellbore_id') + create_and_set_id(build_request_recursive_del_setup_create_logset(), env, 'recursive_del_logset_id') + create_and_set_id(build_request_recursive_del_setup_create_logs(), env, 'recursive_del_log_id') + + # create the record that references of id for a set of record + build_request_recursive_del_setup_create_record_refs().call(with_wdms_env).assert_ok() + + # might need to wait + while nb_record_set < 2 and wait_attempt >= 0: + print('not enough record indexed => Wait 1 minute ... attempt countdown=' + str(wait_attempt)) + sleep(60) + check_query_obj = query_for_record_set_available(with_wdms_env) + nb_record_set = check_query_obj.totalCount + wait_attempt -= 1 + + assert nb_record_set >= 1, 'maximum attempt reached' + + # randomly pick a set of record for the current tests + while check_query_obj.results: + idx = randint(0, len(check_query_obj.results)-1) + selected_set = check_query_obj.results.pop(idx) + assert selected_set.id + with_wdms_env.set('recursive_del_ref_record_id', selected_set.id) + # delete the ref_record to 'reserve' it, + if build_request_recursive_del_setup_end().call(with_wdms_env).response.status_code == 204: + # delete response with 204, records successfully reserved + with with_wdms_env.scoped_update( + recursive_del_well_id=selected_set.data.channelNames[0], + recursive_del_wellbore_id=selected_set.data.channelNames[1], + recursive_del_logset_id=selected_set.data.channelNames[2], + recursive_del_log_id=selected_set.data.channelNames[3]): + yield with_wdms_env + + break + + assert check_query_obj.results, 'fail to select record set for delete recursive tests' + + +def clean_up_all_ref_record(with_wdms_env): + for _ in range(10): + check_query_obj = query_for_record_set_available(with_wdms_env) + nb_record_set = check_query_obj.totalCount + for result in check_query_obj.results: + with_wdms_env.set('recursive_del_ref_record_id', result.id) + build_request_recursive_del_setup_end().call(with_wdms_env) + + if nb_record_set == len(check_query_obj.results): # not need for another loop + break + +@pytest.mark.skip(reason="Temporary disable as search indexing is failing, and this block our tests") +@pytest.mark.tag('recursive_delete', 'search') +def test_recursive_delete_well(env_with_records): + # when + result = build_request_recursive_delete_well().call(env_with_records) + result.assert_status_code(204) + + # check children record are gone + build_request_check_wellbore_is_deleted().call(env_with_records).assert_status_code(404) + build_request_check_log_is_deleted().call(env_with_records).assert_status_code(404) + build_request_check_logset_is_deleted().call(env_with_records).assert_status_code(404) diff --git a/tests/integration/functional/tests/test_sample.py b/tests/integration/functional/tests/test_sample.py new file mode 100644 index 0000000000000000000000000000000000000000..332b58bad65accb3fb3df6299f0b1c3e7d7fc29b --- /dev/null +++ b/tests/integration/functional/tests/test_sample.py @@ -0,0 +1,101 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" + This file contains test sample showing how use RequestRunner +""" +import pytest +from ..request_runner import Request, RequestRunner +from ..variables import Variables + + +def sample_of_test(): + # define some variables + environment = Variables.from_dict({ + "echo_url": "https://postman-echo.com/post", + "param1": "value1", + "param2": "{{param1}} - value2", # use a nested variable, + "int_value": 42, + "param_dict": {"param1": "{{param1}}", "int_value": "{{int_value}}"} # use a nested in dict + }) + + # basic get set + assert environment['echo_url'] == "https://postman-echo.com/post" + assert environment['param2'] == 'value1 - value2' + assert environment['int_value'] == 42 + assert environment.get('not_exist', 'default_value') == 'default_value' + environment['not_exist'] = 'now exists' + assert environment.get('not_exist', 'default_value') == 'now exists' + + # substitution in dict + assert environment['param_dict']['param1'] == 'value1' + assert environment['param_dict']['int_value'] == '42' # substituted but converted to str + + # declare a request + request_prototype = Request(method='POST', + url='{{echo_url}}?param1={{param1}}', + headers={ + 'accept': 'application/json', + 'Content-Type': 'application/json', + 'x-param1': '{{param1}}', + 'x-param2': '{{param2}}', + 'x-custom-param': '{{custom_param}}' + }, + payload={ + 'param1': '{{param1}}', + 'param_mix': 'param1={{param1}} & int_value={{int_value}}', + }) + + runner = RequestRunner(request_prototype) + + # run the call and assert 200 + result = runner.call( + env=environment, # [optional] environment to use + headers={'x-additional-header': 'ok'}, # [optional] additional headers + assert_status=200, # [optional] assert on a specific status code + custom_param='custom_param_value', # add a parameter + param2='param2_override' # add parameter, here override param2 value from environment + ) + + # some other way to assert on status code + result.assert_status_code(200) + result.assert_ok() + result.response.raise_for_status() + assert result.ok + assert result.response.status_code == 200 + + # check the input request + assert result.request.url == 'https://postman-echo.com/post?param1=value1' + assert result.request.headers['x-param2'] == 'param2_override' + assert result.request.headers['x-custom-param'] == 'custom_param_value' + request_body_obj = result.request.get_body_obj() + assert request_body_obj['param1'] == 'value1' + assert request_body_obj.param1 == 'value1' # as attribute thanks to Munch lib + + # check the response (response is a Requests.response) + response = result.response + print(response.headers) + + response_obj = result.get_response_obj() + assert response_obj.args.param1 == 'value1' # postman echo par every request query param in response.args + assert response_obj.headers.accept == 'application/json' + assert response_obj.headers['x-param2'] == 'param2_override' + + # other info + print('call took', result.elapsed, 'seconds') + result.assert_status_code(200) # assert on a specific status code + + + + diff --git a/tests/integration/functional/tests/test_search.py b/tests/integration/functional/tests/test_search.py new file mode 100644 index 0000000000000000000000000000000000000000..13a57baec21e82d264529809a92fd9ebe06fa58d --- /dev/null +++ b/tests/integration/functional/tests/test_search.py @@ -0,0 +1,170 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.search_apis.setup import * +from ..request_builders.wdms.search_apis.search import * +from time import sleep + + + +def query_for_record_set_available(env): + result = build_request_seach_tests_setup_start().call(env) + result.assert_ok() + return result.get_response_obj() + + +@pytest.fixture(params=['query', 'fastquery']) +def set_search_query_type(with_wdms_env, request): + env = with_wdms_env + env.set('search_query_type', request.param) + +@pytest.mark.tag('search') +@pytest.mark.dependency() +def test_setup_for_search(with_wdms_env): + # TODO this must be revisited to have independent setup for each needed record + wait_attempt = 10 + + # set here the version the data set of record needed for the tests + env = with_wdms_env + env.set('search_record_version', '0001') + # this value is use in the query to fetch the record + + env.set('search_query_type', 'fastquery') + + query_result = query_for_record_set_available(env) + nb_record = len(query_result.results) + + if nb_record == 0: + # create one wellbore + record_id = build_request_seach_tests_setup_create_wellbore().call( + env, assert_status=200).get_response_obj().recordIds[0] + env.set("setup_search_wellbore_id", record_id) + + for _ in range(2): # create 2 logset + record_id = build_request_seach_tests_setup_create_logsets().call( + env, assert_status=200).get_response_obj().recordIds[0] + env.set("setup_search_logset_id", record_id) # it doesn't matter which logset id is set + + for _ in range(2): # create 2 marker + build_request_seach_tests_setup_create_markers().call(env, assert_status=200) + + for _ in range(3): # create 3 logs + build_request_seach_tests_setup_create_logs().call(env, assert_status=200) + + # create the ref record + build_request_seach_tests_setup_create_record_refs().call(env, assert_status=200) + + # wait for the record to be searchable + while nb_record <= 0 and wait_attempt >= 0: + print('not enough record indexed => Wait 1 minute ... attempt countdown=' + str(wait_attempt)) + sleep(60) + query_result = query_for_record_set_available(env) + nb_record = len(query_result.results) + wait_attempt -= 1 + + assert nb_record > 0, 'search setup failure, get the record set' + + # pick the first + ref_data = query_result.results[0] + env.set('search_wellbore_id', ref_data.data.channelNames[0]) + env.set('search_logset_id', ref_data.data.channelNames[1]) + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_wellbores_by_distance(with_wdms_env, set_search_query_type): + resobj = build_request_search_wellbores_by_distance().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 1 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_wellbores_by_bounding_box(with_wdms_env, set_search_query_type): + resobj = build_request_search_wellbores_by_bounding_box().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 1 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_wellbores_by_geo_polygon(with_wdms_env, set_search_query_type): + resobj = build_request_search_wellbores_by_geo_polygon().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 1 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logset_by_wellbore_id(with_wdms_env, set_search_query_type): + resobj = build_request_search_logset_by_wellbore_id().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 2 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_markers_by_wellbore_id(with_wdms_env, set_search_query_type): + resobj = build_request_search_markers_by_wellbore_id().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 2 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logset_by_wellbore_attribute(with_wdms_env, set_search_query_type): + resobj = build_request_search_logset_by_wellbores_attribute().call( + with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 2 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logs_by_wellbore_id(with_wdms_env, set_search_query_type): + resobj = build_request_search_logs_by_wellbore_id().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 3 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logs_by_wellbore_attribute(with_wdms_env, set_search_query_type): + resobj = build_request_search_logs_by_wellbores_attribute().call( + with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 3 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logs_by_logset_id(with_wdms_env, set_search_query_type): + resobj = build_request_search_logs_by_logset_id().call(with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 3 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logs_by_logset_attribute(with_wdms_env, set_search_query_type): + resobj = build_request_search_logs_by_logsets_attribute().call( + with_wdms_env, assert_status=200).get_response_obj() + assert resobj.totalCount >= 3 + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_wellbores(with_wdms_env, set_search_query_type): + build_request_search_wellbores().call( + with_wdms_env, assert_status=200) + + +@pytest.mark.tag('search') +@pytest.mark.dependency(depends=["test_setup_for_search"]) +def test_search_logs(with_wdms_env, set_search_query_type): + build_request_search_logs().call( + with_wdms_env, assert_status=200) \ No newline at end of file diff --git a/tests/integration/functional/tests/test_traj_data.py b/tests/integration/functional/tests/test_traj_data.py new file mode 100644 index 0000000000000000000000000000000000000000..5fe1847a1e5530660c4898bea59785a3b05d2ae1 --- /dev/null +++ b/tests/integration/functional/tests/test_traj_data.py @@ -0,0 +1,38 @@ +import pytest +from .fixtures import with_wdms_env +from ..request_builders.wdms.crud.trajectory import build_request_create_trajectory, build_request_delete_trajectory, build_request_add_trajectory_bulk_data, build_request_get_trajectory_bulk_data + + +@pytest.fixture(scope='module') +def env_with_traj_record(with_wdms_env): + # create the traj + result = build_request_create_trajectory().call(with_wdms_env) + result.assert_ok() + resobj = result.get_response_obj() + assert resobj.recordCount == 1 + assert len(resobj.recordIds) == 1 + + with with_wdms_env.scoped_update(trajectory_record_id=resobj.recordIds[0]): + yield with_wdms_env + + # teardown: delete the traj + build_request_delete_trajectory().call(with_wdms_env).assert_status_code(204) + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'bulk') +@pytest.mark.dependency() +def test_add_traj_bulk_data(env_with_traj_record): + result = build_request_add_trajectory_bulk_data().call(env_with_traj_record) + result.assert_ok() + + +@pytest.mark.tag('basic', 'crud', 'smoke', 'bulk') +@pytest.mark.dependency(depends=["test_add_traj_bulk_data"]) +def test_get_log_bulk_data(env_with_traj_record): + result = build_request_get_trajectory_bulk_data().call(env_with_traj_record) + result.assert_ok() + + resobj = result.get_response_obj() + assert resobj.data[0][1] == 1001 + assert resobj.data[1][1] == 1002 + assert resobj.data[2][1] == 1003 diff --git a/tests/integration/functional/tests/test_trajectory.py b/tests/integration/functional/tests/test_trajectory.py new file mode 100644 index 0000000000000000000000000000000000000000..1c2e1a5e54a42df1d8d33d420fe8ccf1a304405b --- /dev/null +++ b/tests/integration/functional/tests/test_trajectory.py @@ -0,0 +1,39 @@ +import pytest +from .fixtures import with_wdms_env + +from jsonschema import validate + +from ..request_builders.wdms.crud.trajectory import build_request_create_trajectory_with_id, \ + build_request_get_trajectory, build_request_get_trajectory_bulk_data, build_request_add_trajectory_bulk_data, \ + build_request_create_trajectory, build_request_delete_trajectory + + +@pytest.mark.tag('trajectory') +@pytest.mark.dependency() +def test_trajectory_error_code(with_wdms_env): + env = with_wdms_env + # set data and id variables to use them for the trajectory creation + env.set('trajectory_data', '{"name":"trajectory_test-trajectory_name"}') + # create one trajectory and check the response code 200 + resobj = build_request_create_trajectory().call(with_wdms_env, assert_status=200).get_response_obj() + # set record id variable to use it to get trajectory + env.set('trajectory_record_id', resobj.recordIds[0]) + # get trajectory and check the response code 200 + build_request_get_trajectory().call(with_wdms_env, assert_status=200) + # get data trajectory without adding any data to it and check the response code 204 + build_request_get_trajectory_bulk_data().call(with_wdms_env, assert_status=204) + # Add data to trajectory and check the response code 200 + build_request_add_trajectory_bulk_data().call(with_wdms_env, assert_status=200) + # get data trajectory and check the response code 200 + build_request_get_trajectory_bulk_data().call(with_wdms_env, assert_status=200) + # get trajectory and check the response code 200 + resobj=build_request_get_trajectory().call(with_wdms_env, assert_status=200).get_response_obj() + # set trajectory data json variable to use it to create trajectory with fake bulkURI + env.set('trajectory_data', + '{"name":"trajectory_test_CLA_traj-trajectory_name", "bulkURI":"urn:uuid:00000000-0000-0000-0000-000000000000"}') + # create one trajectory and check the response code 200 + build_request_create_trajectory_with_id().call(with_wdms_env, assert_status=200).get_response_obj() + # get trajectory and check the response code 500 and the error response message about the invalid bulkURI + build_request_get_trajectory_bulk_data().call(with_wdms_env, assert_status=500).get_response_obj() + # delete trajectory + build_request_delete_trajectory().call(with_wdms_env).assert_status_code(204) diff --git a/tests/integration/functional/variables.py b/tests/integration/functional/variables.py new file mode 100644 index 0000000000000000000000000000000000000000..4ac6db395bd11b198bf88fee79d28ae070bb7973 --- /dev/null +++ b/tests/integration/functional/variables.py @@ -0,0 +1,197 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Optional +from contextlib import contextmanager +import json + + +class Variables: + """ + Manage variables and nested substitution. Quite similar to a dict: + vars['var1'] = 'value1' + + nested variable must are inside double embraces '{{' '}}': + vars['var2'] = 'the value of var1 = {{var1}}' + + then + assert vars['my_var2'] == 'the value of var1 = value1' + + limitation, in case of nested variable, is only resolve in dict or string + the value is always + converted to string + + """ + def __init__(self): + self._variables = {} + + def __iter__(self): + for v in self._variables.values(): + yield v + + @classmethod + def load_env(cls, file_path, ignore_empty=False): + with open(file_path) as file: + return cls.from_pm_env_obj(json.load(file), ignore_empty) + + @classmethod + def from_dict(cls, variables_dict): + inst = cls() + for k, v in variables_dict.items(): + inst.set(k, v) + return inst + + def update(self, **kwargs): + self._variables.update(kwargs) + + def update_env(self, other: 'Variables'): + self._variables.update(other._variables) + + @classmethod + def from_pm_env_obj(cls, data, ignore_empty=False): + inst = cls() + values = data['values'] + for var_data in values: + if var_data.get('enabled', True): + value = var_data['value'] + if value or not ignore_empty: + inst._variables[var_data['key']] = var_data['value'] + return inst + + def resolve(self, d): + if isinstance(d, dict): + return {k: self.resolve(v) for k, v in d.items()} + + if isinstance(d, str): + return self._resolve_value(d) + + return d + + def _resolve_value(self, value: Optional[str]) -> Optional[str]: + if not value or not isinstance(value, str): + return value + + idx = value.find('{{', 0) + while idx >= 0: + idx_end = value.find('}}', idx) + if idx_end > idx + 2: + nested_var = value[idx+2: idx_end] + if nested_var in self: + nested_value = self.get(nested_var) + if isinstance(nested_value, dict): + nested_value = json.dumps(nested_value, indent=0) + value = value.replace('{{' + str(nested_var) + '}}', str(nested_value)) + idx = 0 + + idx = value.find('{{', idx + 2) + return value + + def get(self, key: str, default=None) -> Optional: + if key not in self._variables: + return self.resolve(default) + return self.resolve(self._variables[key]) + + def __getitem__(self, key: str): + return self.resolve(self._variables[key]) + + def __contains__(self, item): + return self._variables.__contains__(item) + + def set(self, key: str, value): + self._variables[key] = value + + def __setitem__(self, key: str, value): + self._variables[key] = value + + def print(self): + for k, v in self._variables.items(): + print(f'{k}={v}') + + def copy(self): + new_inst = Variables() + new_inst._variables = self._variables.copy() + return new_inst + + @contextmanager + def scoped_update(self, **kwargs): + origin_variables = self._variables.copy() + self.update(**kwargs) + yield self + self._variables = origin_variables + + def __str__(self): + return self._variables.__str__() + + def __repr__(self): + return self._variables.__repr__() + + +class CmdLineSpecialVar: + """ + These are internal variables used to setup parameters for the run + """ + + timeout_request_key = '___param_timeout_request' + """ timeout in seconds for the server to issue a response """ + + headers_key = '___param_headers' + """ custom to put for each request make (can be overridden add test level) """ + + log_request_level_key = '___param_log_request' + """ 0 nothing, 1: one line summary, 2: complete request/response headers and payload """ + + disable_ssl_validation_key = '___param_disable_ssl_validation' + """ boolean, to disable or not the ssl validation """ + + retry_on_error_key = '___param_retry_on_error' + """ list of error code on to enable retry strategy (limited to 4 attempts) """ + + @staticmethod + def set_timeout_request(variables: Variables, value: int): + variables.set(CmdLineSpecialVar.timeout_request_key, value) + + @staticmethod + def get_timeout_request(variables: Variables): + return variables.get(CmdLineSpecialVar.timeout_request_key, default=0) + + @staticmethod + def set_retry_on_error(variables: Variables, list_of_status_code): + variables.set(CmdLineSpecialVar.retry_on_error_key, [int(s) for s in list_of_status_code]) + + @staticmethod + def get_retry_on_error(variables: Variables): + return variables.get(CmdLineSpecialVar.retry_on_error_key, default=[]) + + @staticmethod + def set_disable_ssl_validation(variables: Variables, value: bool): + variables.set(CmdLineSpecialVar.disable_ssl_validation_key, value) + + @staticmethod + def get_disable_ssl_validation(variables: Variables): + return variables.get(CmdLineSpecialVar.disable_ssl_validation_key, default=False) + + @staticmethod + def set_log_request_level(variables: Variables, value: int): + variables.set(CmdLineSpecialVar.log_request_level_key, value) + + @staticmethod + def get_log_request_level(variables: Variables): + return variables.get(CmdLineSpecialVar.log_request_level_key, default=1) + + @staticmethod + def set_headers(variables: Variables, value: dict): + variables.set(CmdLineSpecialVar.headers_key, value) + + @staticmethod + def get_headers(variables: Variables): + return variables.get(CmdLineSpecialVar.headers_key, default={}) diff --git a/tests/integration/gen_postman_env.py b/tests/integration/gen_postman_env.py new file mode 100644 index 0000000000000000000000000000000000000000..9916c25723158c6cfa7efbe8a2365a0ce0f0b65c --- /dev/null +++ b/tests/integration/gen_postman_env.py @@ -0,0 +1,80 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from datetime import datetime +import time +import json +import argparse +import os + +def get_environment_data(env_name, token): + return { + "name": env_name, + "values": [ + { + "enabled": True, + "key": "token", + "value": token, + "type": "text" + }, + ], + "date" : datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "timestamp": time.time(), + "_postman_variable_scope": "environment", + } + +def add_environment_data(environment, key, value, type): + environment['values'].append( + { + "enabled": True, + "key": key, + "value": value, + "type": type + } + ) + +def create_env_file(file_path, env_data): + with open(file_path, 'w') as outfile: + json.dump(env_data, outfile) + + +if __name__ == "__main__": + # execute only if run as a script + + parser = argparse.ArgumentParser(description='Generates postman environment file for test') + parser.add_argument('--token', dest="token", help="auth token") + parser.add_argument('--base_url', dest="base_url", help="service base url", default=None) + parser.add_argument('--data_partition', dest="data_partition", help="data partition name", default=None) + parser.add_argument('--cloud_provider', dest="cloud_provider", help="Name of cloud provider in which tests are run") + parser.add_argument('--acl_domain', dest="acl_domain", help="acl_domain name", default=None) + parser.add_argument('--legal_tag', dest="legal_tag", help="legal_tag", default=None) + args = parser.parse_args() + + try: + os.mkdir("./generated") + except FileExistsError: + pass + + env_data = get_environment_data(env_name="wellboredms_continuous_tests", token=args.token) + if args.base_url: + add_environment_data(env_data, "base_url", args.base_url, "string") + if args.data_partition: + add_environment_data(env_data, "data_partition", args.data_partition, "string") + if args.cloud_provider: + add_environment_data(env_data, "cloud_provider", args.cloud_provider, "string") + if args.acl_domain: + add_environment_data(env_data, "acl_domain", args.acl_domain, "string") + if args.legal_tag: + add_environment_data(env_data, "legal_tag", args.legal_tag, "string") + create_env_file("generated/postman_environment.json", env_data) \ No newline at end of file diff --git a/tests/integration/security/conftest.py b/tests/integration/security/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..a923c79db1926bc3240ec31ef768dbe43045c266 --- /dev/null +++ b/tests/integration/security/conftest.py @@ -0,0 +1,29 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + + + +def pytest_addoption(parser): + parser.addoption("--base_url", action="store") + parser.addoption("--check_cert", action="store", default=True) + parser.addoption("--token", action="store") + + +def pytest_generate_tests(metafunc): + base_url = metafunc.config.getoption("base_url") + verify_cert = bool(metafunc.config.getoption('check_cert')) + token = metafunc.config.getoption("token") + metafunc.parametrize( + 'base_url, check_cert, token', + [(base_url, verify_cert, token)]) diff --git a/tests/integration/security/readme.md b/tests/integration/security/readme.md new file mode 100644 index 0000000000000000000000000000000000000000..600df64b79067724a84de3671d46fb45ebb398cb --- /dev/null +++ b/tests/integration/security/readme.md @@ -0,0 +1,51 @@ +## Authentication (JWT) tests + +Run integration tests for authentication which checks for common misconfigurations in JWT signing and validation: + +- expired token +- missing token +- unsigned token +- self-signed token +- invalid token + +## Setup Pre-Requisities + +```bash +pip install -r requirements_dev.txt +``` + +## Run Security Tests Locally + +### Pass Options to Test + +The tests expect the following arguments in order to run and these can be set when running `pytest test_auth.py` +- base_url: url of the api that is being tested +- check_cert: boolean to skip the cert validation + - For False - pass in an empty string + - For True - pass in "True" +- token: valid token + +### Run Tests + +Run the python script with arguments: + +```bash +# set options +export base_url="<appurl>" +export check_cert="<boolean to skip the cert validation>" +export token="<valid token>" + +# navigate to the security integration tests directory +cd tests/integration/security + +# run the tests +pytest test_auth.py --base_url $base_url --check_cert $check_cert --token $token +``` + +### Notes + +Issues encountered: + +- Depending on the run environment, the pyjwt and jwt can have conflicts. https://github.com/jpadilla/pyjwt/issues/374 +- pyjwt needs cryptography package to use RS256 signing algorithm https://github.com/jpadilla/pyjwt/issues/230 +- pip does not collect the correct packages with older python versions or when "use python version 3.X" task is missing \ No newline at end of file diff --git a/tests/integration/security/test_auth.py b/tests/integration/security/test_auth.py new file mode 100644 index 0000000000000000000000000000000000000000..5906e5d7ed2701d71f6080191c9996178ef85c7c --- /dev/null +++ b/tests/integration/security/test_auth.py @@ -0,0 +1,99 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import requests +import pytest + +payload = {} + +wellbore_api_group_prefix = 'ddms/v2' + +def build_url(base_url: str, path: str): + return f"{base_url}/{wellbore_api_group_prefix}{path}" + + +@pytest.fixture +def skip_if_gcp_environment(base_url): + """ + In GCP environment there is no AuthorizationPolicy set. Certain tests may fail on GCP + and this fixture aims to skip a test case when targeted environment is GCP. + """ + response = requests.request("GET", build_url(base_url, "/about"), verify=False) + assert response.status_code == 200 + about_response = response.json() + + if about_response.get("cloudEnvironment") == "gcp": + pytest.skip('skipped on this cloud provider because no AuthorizationPolicy in place') + + +# Test for expired token +def test_expired_token_returns_40X(base_url, check_cert, token): + url = build_url(base_url, "/about") + headers = { + 'Authorization': 'REMOVED_FOR_CICD_SCAN' + } + response = requests.request("GET", url, headers=headers, data=payload, verify=check_cert) + assert response.status_code == 401 + +# Test for no token on some paths where JWT token is NOT required due to the AuthorizationPolicy. Test to ensure headers are present for docs endpoint +def test_notoken_paths_returns_20X_docs(base_url, check_cert, token): + + url = f"{base_url}/docs" + headers = {} + response = requests.request("GET", url, headers=headers, data=payload, verify=check_cert) + assert response.status_code == 200 + assert 'content-security-policy' in response.headers + +# Test for no token on some paths where JWT token is NOT required due to the AuthorizationPolicy +@pytest.mark.parametrize("path", ["docs", "openapi.json", f"{wellbore_api_group_prefix}/about"]) +def test_notoken_paths_returns_20X(base_url, check_cert, token, path): + + url = f"{base_url}/{path}" + headers = {} + response = requests.request("GET", url, headers=headers, data=payload, verify=check_cert) + assert response.status_code == 200 + +# Test for no token on some paths where JWT token is required due to the AuthorizationPolicy +@pytest.mark.parametrize("path", ["version", "nonExistingPath"]) +def test_notoken_returns_40X(base_url, check_cert, token, skip_if_gcp_environment, path): + + url = build_url(base_url, f"/{path}") + headers = {} + response = requests.request("GET", url, headers=headers, data=payload, verify=check_cert) + assert response.status_code == 403 + assert "access denied" in response.text + + +# Test for invalid token +def test_invalid_token_returns_40X(base_url, check_cert, token): + url = build_url(base_url, "/about") + blank = {} + token_invalid = token[0:len(token) - 10] + headers = { + 'Authorization': f"REMOVED_FOR_CICD_SCAN' + } + + response = requests.request("GET", url, headers=headers, data=blank, verify=check_cert) + assert response.status_code == 401 + + +# Test for unauthorized issuer +def test_invalid_issuer_token_returns_40X(base_url, check_cert, token): + url = build_url(base_url, "/about") + blank = {} + headers = { + 'Authorization': 'REMOVED_FOR_CICD_SCAN' + } + response = requests.request("GET", url, headers=headers, data=blank, verify=check_cert) + assert response.status_code == 401 diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/app_conf_test.py b/tests/unit/app_conf_test.py new file mode 100644 index 0000000000000000000000000000000000000000..5d3ba13c8fa426d632377470b9d91a0db98104b8 --- /dev/null +++ b/tests/unit/app_conf_test.py @@ -0,0 +1,258 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import os +import uuid +import mock +from app.utils import Context +from tests.unit.test_utils import NopeLogger +import app.conf as conf +from app.helper.traces import create_exporter +from app.conf import ConfigurationContainer, Config, check_environment, validator_path_must_exist, \ + cloud_provider_additional_environment + + +@pytest.fixture +def testing_context(): + """ This is a basic Context initialized with empty logger to ensure tested methods can contains logging calls """ + ctx = Context(logger=NopeLogger(), request_id='this is a test') + Context.set_current(ctx) + return ctx + + +@pytest.fixture +def testing_config(): + return Config + + +@pytest.fixture() +def gcp_config_fixture(monkeypatch): + provider_name = "gcp" + + environment_dict = os.environ.copy() + environment_dict[ConfigurationContainer.cloud_provider.key] = provider_name + environment_dict['SERVICE_HOST_ENTITLEMENTS'] = 'https://test-endpoint/api/entitlements' + environment_dict['SERVICE_HOST_STORAGE'] = 'https://test-endpoint/api/storage' + environment_dict['SERVICE_HOST_SEARCH'] = 'https://test-endpoint/api/search' + + conf.Config = ConfigurationContainer.with_load_all( + environment_dict=environment_dict, + contextual_loader=cloud_provider_additional_environment) + + yield conf.Config, provider_name + + # restore initial config + ConfigurationContainer.with_load_all(environment_dict=os.environ, contextual_loader=None) + + +@pytest.fixture() +def azure_config_fixture(monkeypatch): + provider_name = "az" + + environment_dict = os.environ.copy() + environment_dict[ConfigurationContainer.cloud_provider.key] = provider_name + environment_dict['AZ_AI_INSTRUMENTATION_KEY'] = 'ffffffff-1111-2222-aaaa-ffffffffffff' + environment_dict['SERVICE_HOST_ENTITLEMENTS'] = 'https://test-endpoint/api/entitlements' + environment_dict['SERVICE_HOST_STORAGE'] = 'https://test-endpoint/api/storage' + environment_dict['SERVICE_HOST_SEARCH'] = 'https://test-endpoint/api/search' + environment_dict['USE_PARTITION_SERVICE'] = 'disabled' + + conf.Config = ConfigurationContainer.with_load_all( + environment_dict=environment_dict, + contextual_loader=cloud_provider_additional_environment) + + yield conf.Config, provider_name + + # restore initial config + ConfigurationContainer.with_load_all(environment_dict=os.environ, contextual_loader=None) + + +def test_gcp_configuration_checker(gcp_config_fixture): + gcp_config, provider_name = gcp_config_fixture + + assert gcp_config.cloud_provider.value == provider_name + variables_dict = gcp_config.as_printable_dict() + + assert "default_data_tenant_project_id" in variables_dict.keys() + assert "default_data_tenant_credentials" in variables_dict.keys() + + with pytest.raises(RuntimeError, match=".*Incorrect .* env var OS_WELLBORE_DDMS_DATA_PROJECT_CREDENTIALS.*"): + check_environment(gcp_config) + + +def test_azure_configuration_checker(azure_config_fixture): + azure_config, provider_name = azure_config_fixture + + assert azure_config.cloud_provider.value == 'az' + variables_dict = azure_config.as_printable_dict().keys() + + check_environment(azure_config) + + # below attribute are gcp only + assert "default_data_tenant_project_id" not in variables_dict + assert "default_data_tenant_credentials" not in variables_dict + + +def test_azure_trace_exporter_created(azure_config_fixture): + exporter_name = 'AzureExporter' + + mock_exporter = mock.MagicMock() + mock_exporter.configure_mock(**{'exporter_name': exporter_name}) + + with mock.patch('app.helper.traces._create_azure_exporter', mock.Mock(return_value=mock_exporter)): + exporter = create_exporter('test-service') + assert len(exporter.exporters) == 1 + # ensure called method is azure exporter + azure_exporter = exporter.exporters[0] + assert azure_exporter.exporter_name == exporter_name + + +def test_gcp_trace_exporter_created(gcp_config_fixture): + exporter_name = 'StackdriverExporter' + + mock_exporter = mock.MagicMock() + mock_exporter.configure_mock(**{'exporter_name': exporter_name}) + + with mock.patch('app.helper.traces._create_gcp_exporter', mock.Mock(return_value=mock_exporter)): + exporter = create_exporter('test-service') + assert len(exporter.exporters) == 1 + # ensure called method is gcp exporter + gcp_exporter = exporter.exporters[0] + assert gcp_exporter.exporter_name == exporter_name + + +def test_config_get_by_name(testing_config): + assert testing_config.dev_mode.value == testing_config['dev_mode'] + + +def test_add(testing_config): + assert testing_config.get('custom', 42) == 42 + assert 'custom' not in testing_config + testing_config.add('custom', 1337) + assert testing_config['custom'] == 1337 + assert testing_config.custom == 1337 + assert testing_config.get('custom', 42) == 1337 + + with pytest.raises(Exception): + testing_config.add('custom', 42) + assert testing_config.custom == 1337 + + # with override + testing_config.add('custom', 42, override=True) + assert testing_config.custom == 42 + + +def test_contains(testing_config): + env_var_key = str(uuid.uuid4()) + assert env_var_key not in testing_config + testing_config.add_from_env(env_var_key=env_var_key, attribute_name='custom_var'+env_var_key, default='Dummy') + + assert env_var_key in testing_config + assert 'custom_var'+env_var_key in testing_config + + +def test_get_as_env(testing_config): + env_var_key = str(uuid.uuid4()) + assert env_var_key not in testing_config + testing_config.add_from_env(env_var_key=env_var_key, attribute_name='custom_var'+env_var_key, default='Dummy') + testing_config.add('Not_an_env'+env_var_key, 'value') + + assert 'Not_an_env'+env_var_key in testing_config + + +def test_add_from_env(testing_config): + expected_path = os.getenv('PATH') + testing_config.add_from_env(env_var_key='PATH', attribute_name='env_var_path') + assert expected_path == testing_config.env_var_path.value + assert expected_path == testing_config['env_var_path'] + assert expected_path == testing_config['PATH'] + + with pytest.raises(Exception): + testing_config.add_from_env(env_var_key='NO_EXISTING_VAR', attribute_name='env_var_path', default='Dummy') + assert expected_path == testing_config.env_var_path.value + + testing_config.add_from_env(env_var_key='NO_EXISTING_VAR', attribute_name='env_var_path', default='Dummy', override=True) + assert 'Dummy' == testing_config.env_var_path.value + + +def test_get_fall_back_to_env_if_not_declared(testing_config): + expected_path = os.getenv('PATH') + assert expected_path == testing_config['PATH'] + assert expected_path == testing_config.get('PATH') + + +def test_secret_value_must_not_be_printed(testing_config): + testing_config.add_from_env(env_var_key='DUMMY_VAR', default='ThisIsSecret', secret=True) + + assert 'ThisIsSecret' not in testing_config.DUMMY_VAR.printable_value + assert 'ThisIsSecret' not in str(testing_config) + + +def test_check_environment_must_throw_for_undefined_envvar(testing_config): + # given undefined (overriding it to ensure undefined) + env_var_key = str(uuid.uuid4()) # substitute of the real one + testing_config.add_from_env(env_var_key=env_var_key, + attribute_name=env_var_key, + override=True, + is_mandatory=True) + + testing_config.dev_mode.value = False + + # then + with pytest.raises(RuntimeError) as e: + # when + check_environment(testing_config) + + # and then expect something meaningful in the error description + assert env_var_key in str(e) + + +def test_check_environment_must_throw_for_invalid_path(testing_config): + # given defined with invalid path (overriding it to ensure undefined) + env_var_key = str(uuid.uuid4()) # substitute of the real one + testing_config.add_from_env(env_var_key=env_var_key, + attribute_name=env_var_key, + override=True, + default=env_var_key + '.not_exist.txt', + is_mandatory=True, + validator=validator_path_must_exist) + + testing_config.dev_mode.value = False + + # then + with pytest.raises(RuntimeError) as e: + # when + check_environment(testing_config) + + # and then expect something meaningful in the error description + assert env_var_key in str(e) + + +@pytest.mark.parametrize('input_value,expected_value', [ + ('false', False), + ('0', False), + ('dummy_value', False), + ('TrUe', True), + ('1', True)]) +def test_config_dev_mode(input_value: str, expected_value: bool, testing_config): + # update dev_mode input value without effecting env + testing_config.dev_mode.default = input_value + testing_config.dev_mode.key = str(uuid.uuid4()) + testing_config.dev_mode.load(os.environ) + + # then + assert type(testing_config.dev_mode.value) == bool + assert testing_config.dev_mode.value == expected_value + diff --git a/tests/unit/bulk_persistence/__init__.py b/tests/unit/bulk_persistence/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/bulk_persistence/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/bulk_persistence/blob_storage_test.py b/tests/unit/bulk_persistence/blob_storage_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2110c011bccd0dc021862b9e9655178242c95ddb --- /dev/null +++ b/tests/unit/bulk_persistence/blob_storage_test.py @@ -0,0 +1,125 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import concurrent.futures +from app.bulk_persistence.blob_storage import create_and_write_blob, BlobFileExporters, load_from_parquet +import pytest +import pandas as pd +from os import path + +Process_pool_executor = concurrent.futures.ProcessPoolExecutor(2) +Thread_pool_executor = concurrent.futures.ThreadPoolExecutor(2) + +DUMMY_CONTENT_BYTES = b'dummy content' +CONSTANT_PLAIN_TEXT = "plain/text" + + +def fake_exporter(file_path: str, *args, **kwargs): + # print('fake exporter received id=', file_id) + with open(file_path, 'wb') as file: + file.write(DUMMY_CONTENT_BYTES) + return file_path, {'content-type': CONSTANT_PLAIN_TEXT} + + +def fake_exporter_as_bytes(file_id: str, *args, **kwargs): + return DUMMY_CONTENT_BYTES, {'content-type': CONSTANT_PLAIN_TEXT} + + +async def async_fake_exporter(file_id: str, *args, **kwargs): + return fake_exporter(file_id) + + +def sync_to_async(sync): + async def sync_wrapped_in_async(*args, **kwargs): + sync(*args, **kwargs) + + return sync_wrapped_in_async + + +VALID_VALUES_FORMS = [ + ([[10, 11], [20, 21], [30, 31]]), +] + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + "executor,exporter", [ + (None, async_fake_exporter), + (None, fake_exporter), + (None, fake_exporter_as_bytes), + (Process_pool_executor, fake_exporter), + (Thread_pool_executor, fake_exporter) + ]) +async def test_executor_exported_combination(executor, exporter): + table = pd.DataFrame([[10, 11], [20, 21], [30, 31]], index=[1, 2, 3], columns=['c1', 'c2']) + + async with create_and_write_blob(table, + executor=executor, + custom_export_to_file_fn=exporter) as blob: + assert blob.content_type == CONSTANT_PLAIN_TEXT + assert blob.data.read() == DUMMY_CONTENT_BYTES + assert blob.metadata['content-type'] == CONSTANT_PLAIN_TEXT + + +@pytest.mark.asyncio +@pytest.mark.parametrize("values_form", VALID_VALUES_FORMS) +async def test_create_blob_various_valid_values(values_form): + table = pd.DataFrame(values_form, index=[1, 2, 3], columns=['c1', 'c2']) + + async with create_and_write_blob(table, + executor=None, + file_exporter=BlobFileExporters.PARQUET) as bulk_blob: + df = load_from_parquet(bulk_blob.data) + assert df.columns.tolist() == table.columns.tolist() + assert df.index.tolist() == table.index.tolist() + assert df['c1'].tolist() == [10, 20, 30] + assert df['c2'].tolist() == [11, 21, 31] + + +@pytest.mark.asyncio +@pytest.mark.parametrize("values_form", VALID_VALUES_FORMS) +async def test_create_blob_various_valid_values_no_column(values_form): + table = pd.DataFrame(values_form, index=[1, 2, 3]) + + async with create_and_write_blob(table, + executor=None, + file_exporter=BlobFileExporters.PARQUET) as bulk_blob: + df = load_from_parquet(bulk_blob.data) + assert df.columns.tolist() == [0, 1] + assert df.index.tolist() == table.index.tolist() + assert df[0].tolist() == [10, 20, 30] + assert df[1].tolist() == [11, 21, 31] + + +@pytest.mark.asyncio +async def test_create_blob_should_forward_filename_and_df(): + def capture_it(*args, **kwargs): + capture_it.args = args + capture_it.kwargs = kwargs + return b'', {} + + table = pd.DataFrame([[10, 11], [20, 21], [30, 31]], index=[1, 2, 3]) + async with create_and_write_blob(table, + executor=None, + custom_export_to_file_fn=capture_it, + blob_id='my_custom_filename') as blob: + assert blob.id == 'my_custom_filename' + assert capture_it.args[0] is not None + dir_path, file_name = path.split(capture_it.args[0]) + assert path.exists(dir_path) + df = capture_it.args[1] + assert df.columns.tolist() == [0, 1] + assert df.index.tolist() == [1, 2, 3] + assert df[0].tolist() == [10, 20, 30] + assert df[1].tolist() == [11, 21, 31] diff --git a/tests/unit/bulk_persistence/bulk_id_test.py b/tests/unit/bulk_persistence/bulk_id_test.py new file mode 100644 index 0000000000000000000000000000000000000000..eeddec7d3fcdc7c6591e5a7d0d99c80de171ab93 --- /dev/null +++ b/tests/unit/bulk_persistence/bulk_id_test.py @@ -0,0 +1,21 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.bulk_persistence import BulkId +import uuid + + +def test_bulk_id_is_an_uuid(): + uuid.UUID(BulkId.new_bulk_id()) + diff --git a/tests/unit/bulk_persistence/dataframe_serializer_test.py b/tests/unit/bulk_persistence/dataframe_serializer_test.py new file mode 100644 index 0000000000000000000000000000000000000000..934fa7c79e7e45cd974f912a5992682c0a7330b2 --- /dev/null +++ b/tests/unit/bulk_persistence/dataframe_serializer_test.py @@ -0,0 +1,177 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.bulk_persistence.dataframe_serializer import DataframeSerializer, JSONOrient +from tests.unit.test_utils import temp_directory +import pandas as pd +import json +import pytest +from io import StringIO, BytesIO + +from tempfile import SpooledTemporaryFile + +Reference_df = pd.DataFrame([[1., 10, 11], [2., 20, 21], [3., 30, 31]], columns=['ref', 'a', 'b']) +CONSTANT_DATA_JSON = '/data.json' + + +# we're building it manually as we want to spot any change from anywhere that could occur (in pandas for instance) +# we want format to be stable +dataframe_dict = { + 'split': {'index': Reference_df.index.tolist(), + 'columns': Reference_df.columns.tolist(), + 'data': Reference_df.values.tolist()}, + 'index': { + str(row_val): { + str(col_val): Reference_df[col_val].tolist()[count] for col_val in Reference_df.columns.tolist() + } for count, row_val in enumerate(Reference_df.index.tolist()) + }, + 'columns': { + str(col_val): { + str(row_val): Reference_df[col_val].tolist()[count] for count, row_val in enumerate(Reference_df.index.tolist()) + } for col_val in Reference_df.columns.tolist() + }, + 'values': Reference_df.values.tolist(), # WARNING losing index and column info + 'records': [{c: v for c, v in zip(Reference_df.columns, row_values)} for row_values in Reference_df.values] +} + + +def assert_dataframe_equals(lhs: pd.DataFrame, rhs: pd.DataFrame): + assert lhs.columns.tolist() == rhs.columns.tolist() + assert lhs.index.tolist() == rhs.index.tolist() + assert lhs.values.tolist() == rhs.values.tolist() + + +def check_dataframe(df: pd.DataFrame): + """ check against ref dataframe """ + assert_dataframe_equals(df, Reference_df) + + +@pytest.mark.parametrize("orient", list(JSONOrient.__iter__())) +def test_schema(orient): + assert DataframeSerializer.get_schema(orient) + + +@pytest.mark.parametrize("data_dict,orient", [(d, o) for o, d in dataframe_dict.items()]) +def test_load_from_str_various_orient(data_dict, orient): + print(orient) + dataframe_json = json.dumps(data_dict) + print(dataframe_json) + df = DataframeSerializer.read_json(dataframe_json, orient=orient) + if orient == 'values': + assert df.values.tolist() == Reference_df.values.tolist() + else: + check_dataframe(df) + + +def test_load_from_path(temp_directory): + orient = 'split' + data_dict = dataframe_dict[orient] + path = temp_directory + CONSTANT_DATA_JSON + with open(path, 'w') as file: + json.dump(data_dict, file) + + df = DataframeSerializer.read_json(path, orient=orient) + check_dataframe(df) + + +def test_load_from_file_like(temp_directory): + orient = 'split' + data_dict = dataframe_dict[orient] + path = temp_directory + CONSTANT_DATA_JSON + with open(path, 'w') as file: + json.dump(data_dict, file) + + with open(path, 'r') as file: + df = DataframeSerializer.read_json(file, orient=orient) + check_dataframe(df) + + +def test_load_parquet_from_file_like(temp_directory): + path = temp_directory + '/data.parquet' + Reference_df.to_parquet(path) + + with open(path, 'rb') as file: + df = DataframeSerializer.read_parquet(file) + check_dataframe(df) + + buffer = BytesIO() + Reference_df.to_parquet(buffer) + df = DataframeSerializer.read_parquet(buffer) + check_dataframe(df) + + +def test_load_parquet_from_spooled_file(): + max_size = 2000 + + # small one + spooled_file = SpooledTemporaryFile(max_size=max_size) + frame = pd.DataFrame([1], columns=['r']) + frame.to_parquet(spooled_file) + assert not spooled_file._rolled # ensure on buffer mode + df = DataframeSerializer.read_parquet(spooled_file) + assert df.equals(frame) + + # bigger one + spooled_file = SpooledTemporaryFile(max_size=max_size) + frame = pd.DataFrame(list(range(max_size)), columns=['r']) + frame.to_parquet(spooled_file) + assert spooled_file._rolled # ensure on file mode + df = DataframeSerializer.read_parquet(spooled_file) + assert df.equals(frame) + + +@pytest.mark.parametrize("data_dict,orient", [(d, o) for o, d in dataframe_dict.items()]) +def test_to_json_str_various_orient(data_dict, orient): + result = DataframeSerializer.to_json(Reference_df, orient=orient) + actual_dict = json.loads(result) + assert actual_dict == data_dict + + +def test_to_json_to_path(temp_directory): + orient = 'split' + data_dict = dataframe_dict[orient] + path = temp_directory + CONSTANT_DATA_JSON + + result = DataframeSerializer.to_json(Reference_df, path_or_buf=path, orient=orient) + assert result is None + + with open(path, 'r') as file: + actual_dict = json.load(file) + assert actual_dict == data_dict + + +def test_to_json_to_file(temp_directory): + orient = 'split' + data_dict = dataframe_dict[orient] + path = temp_directory + CONSTANT_DATA_JSON + + with open(path, 'w') as file: + result = DataframeSerializer.to_json(Reference_df, path_or_buf=file, orient=orient) + assert result is None + + with open(path, 'r') as file: + actual_dict = json.load(file) + assert actual_dict == data_dict + + +def test_to_json_to_file_like(): + orient = 'split' + data_dict = dataframe_dict[orient] + str_buf = StringIO() + result = DataframeSerializer.to_json(Reference_df, path_or_buf=str_buf, orient=orient) + assert result is None + + str_buf.seek(0) + actual_dict = json.loads(str_buf.read()) + assert actual_dict == data_dict diff --git a/tests/unit/bulk_persistence/mime_types_test.py b/tests/unit/bulk_persistence/mime_types_test.py new file mode 100644 index 0000000000000000000000000000000000000000..fb441385414ed7853a1d078216278b23421628ab --- /dev/null +++ b/tests/unit/bulk_persistence/mime_types_test.py @@ -0,0 +1,43 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.bulk_persistence import MimeTypes +import pytest + + +def test_list_all_types(): + assert len(list(MimeTypes.types())) > 0 + + +@pytest.mark.parametrize( + "value,expected", [ + ('application/x-parquet', MimeTypes.PARQUET), + ('APPLICATION/X-PARQUET', MimeTypes.PARQUET), + ('application/parquet', MimeTypes.PARQUET), + ('APPLICATION/PARQUET', MimeTypes.PARQUET), + ('parquet', MimeTypes.PARQUET), + ('.parquet', MimeTypes.PARQUET), + ('PARQUET', MimeTypes.PARQUET), + ('application/json', MimeTypes.JSON), + ('application/messagepack', MimeTypes.MSGPACK) + ]) +def test_mime_from_valid_string(value, expected): + assert MimeTypes.from_str(value) == expected + + +def test_mime_from_invalid_string(): + with pytest.raises(ValueError) as e: + MimeTypes.from_str('unknown_type') + assert 'unknown_type' in str(e.value) + diff --git a/tests/unit/clients/__init__.py b/tests/unit/clients/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/clients/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/clients/client_factory_test.py b/tests/unit/clients/client_factory_test.py new file mode 100644 index 0000000000000000000000000000000000000000..ef6ebe9d1aed9b58901cb7edc7287b7ab5db1827 --- /dev/null +++ b/tests/unit/clients/client_factory_test.py @@ -0,0 +1,85 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from pytest_httpx import HTTPXMock +import httpx + +from app.clients import ( + make_storage_record_client, + make_search_client, + make_entitlements_auth_client, + StorageRecordServiceClient, + SearchServiceClient, + EntitlementsAuthServiceClient) +from app.utils import get_or_create_ctx +from tests.unit.test_utils import make_record +import odes_storage.exceptions +import odes_search.exceptions +import odes_entitlements.exceptions +from tests.unit.test_utils import ctx_fixture as test_context + + +@pytest.mark.asyncio +async def test_make_storage_client(httpx_mock: HTTPXMock, test_context): + host = 'http://my_host:81234' + async with make_storage_record_client(host) as client: + assert isinstance(client, StorageRecordServiceClient) + + # ensure host + assert client.api_client.host == host + # using literal here to make config change visible + assert client.api_client._async_client.timeout == httpx.Timeout(timeout=45) + + httpx_mock.add_response(status_code=500) + + # expect the correct exception - ie. composition do not mix several clients + with pytest.raises(odes_storage.exceptions.UnexpectedResponse): + await client.create_or_update_records(data_partition_id="dp", record=[make_record(id='123')]) + + +@pytest.mark.asyncio +async def test_make_search_client(httpx_mock: HTTPXMock, test_context): + host = 'http://my_host:81234' + async with make_search_client(host) as client: + assert isinstance(client, SearchServiceClient) + + # ensure host + assert client.api_client.host == host + assert client.api_client._async_client.timeout == httpx.Timeout(timeout=45) + get_or_create_ctx() + + httpx_mock.add_response(status_code=500) + + # expect the correct exception - ie. composition do not mix several clients + with pytest.raises(odes_search.exceptions.UnexpectedResponse): + await client.get_index_schema(kind='kind', data_partition_id="dp") + + +@pytest.mark.asyncio +async def test_make_entitlement_client(httpx_mock: HTTPXMock, test_context): + host = 'http://my_host:81234' + async with make_entitlements_auth_client(host) as client: + assert isinstance(client, EntitlementsAuthServiceClient) + + # ensure host + assert client.api_client.host == host + assert client.api_client._async_client.timeout == httpx.Timeout(timeout=45) + get_or_create_ctx() + + httpx_mock.add_response(status_code=500) + + # expect the correct exception - ie. composition do not mix several clients + with pytest.raises(odes_entitlements.exceptions.UnexpectedResponse): + await client.auth() diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 0000000000000000000000000000000000000000..e57a488446cc48017a2b8cc7930cd699d72a48a6 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,54 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.conf import ConfigurationContainer +import app.conf as conf +import os +import pytest + + +@pytest.fixture(autouse=True) +def top_fixture(monkeypatch): + """ + Hooks mechanism from PyTest. + This fixture will be called after `pytest_configure` and can use fixture such as monkeypatch + """ + + provider_name = 'local' + monkeypatch.setenv(name=ConfigurationContainer.cloud_provider.key, value=provider_name) + + environment_dict = os.environ.copy() + conf.Config = ConfigurationContainer.with_load_all( + environment_dict=environment_dict, + contextual_loader=None + ) + + +def pytest_configure(config): + """ + Pytest Hook, called before loading fixtures and test cases. + """ + # Env vars used by client lib configuration. + # Required to be set before fixtures as all tests are currently loading dependencies at import time. + os.environ.setdefault('KEYVAULT_URL', 'non-empty-name') + os.environ.setdefault('SERVICE_HOST_PARTITION', 'https://test-endpoint/api/partition') + + +def pytest_unconfigure(config): + """ + Pytest Hook, called after running all test cases. + """ + del os.environ['KEYVAULT_URL'] + del os.environ['SERVICE_HOST_PARTITION'] + diff --git a/tests/unit/context_test.py b/tests/unit/context_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e7fcc8b068b00657e6aa12703a8326dd8bb16ef5 --- /dev/null +++ b/tests/unit/context_test.py @@ -0,0 +1,119 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.utils import Context +import pytest +import uuid +import asyncio +import time + + +def get_context(): + return Context(logger='logger', correlation_id='correlation_id', request_id='request_id', + dev_mode=True, auth='auth', partition_id='partition_id', + app_key='app_key', api_key='api_key', custom1='c1', custom2='c2') + + +@pytest.fixture +def context_base(): + return get_context() + + +def test_context_repr(context_base): + expected = '{"tracer": null, "logger": "logger", "correlation_id": "correlation_id", "request_id": "request_id", "dev_mode": true, "partition_id": "partition_id", "app_key": "app_key", "api_key": "api_key"}' + + assert str(context_base) == expected + assert repr(context_base) == expected + + +def test_context_basic(context_base): + assert context_base.logger == 'logger' + assert context_base['logger'] == 'logger' + + assert context_base.correlation_id == 'correlation_id' + assert context_base.request_id == 'request_id' + assert context_base.dev_mode + assert context_base.auth == 'auth' + assert context_base.partition_id == 'partition_id' + assert context_base.app_key == 'app_key' + assert context_base.api_key == 'api_key' + + assert context_base['custom1'] == 'c1' + assert context_base.get('custom1') == 'c1' + + assert context_base['custom2'] == 'c2' + assert context_base.get('custom2') == 'c2' + + assert context_base.get('unknown', 'default') == 'default' + + with pytest.raises(KeyError): + context_base['unknown'] + + +def test_context_clone(context_base): + new_context = context_base.with_value(correlation_id='new_correlation_id', custom1='new_c1', custom3='added_c3') + + assert new_context.logger == context_base.logger + assert new_context.correlation_id == 'new_correlation_id' + assert new_context.request_id == context_base.request_id + assert new_context.dev_mode == context_base.dev_mode + assert new_context.auth == context_base.auth + assert new_context.partition_id == context_base.partition_id + assert new_context.app_key == context_base.app_key + assert new_context.api_key == context_base.api_key + + assert new_context['custom1'] == 'new_c1' + assert new_context['custom2'] == context_base['custom2'] + assert new_context['custom3'] == 'added_c3' + + +async def context_assert_current_rq_id(expected_request_id): + assert Context.current().request_id == expected_request_id + + +async def context_assigned_and_check(): + id = str(uuid.uuid4()) + Context.set_current(get_context().with_value(request_id=id)) + await asyncio.sleep(1) + await context_assert_current_rq_id(id) + + +@pytest.mark.asyncio +async def test_set_current_with_value(context_base): + context_base.set_current() + Context.set_current_with_value(correlation_id='new_correlation_id') + assert Context.current().correlation_id == 'new_correlation_id' + + +@pytest.mark.asyncio +async def test_context_current(): + size = 100 + coros = [context_assigned_and_check() for _ in range(size)] + assert len(coros) == size + await asyncio.gather(*coros) + + +def sync_context_assigned_and_check(): + id = str(uuid.uuid4()) + Context.set_current(get_context().with_value(request_id=id)) + time.sleep(0.01) + assert Context.current().request_id == id + + +@pytest.mark.asyncio +async def test_context_current_in_thread_executor(): + size = 30 + coros = [asyncio.get_event_loop().run_in_executor(None, sync_context_assigned_and_check) for _ in range(size)] + assert len(coros) == size + await asyncio.gather(*coros) diff --git a/tests/unit/errors/__init__.py b/tests/unit/errors/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/errors/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/errors/error_handler_test.py b/tests/unit/errors/error_handler_test.py new file mode 100644 index 0000000000000000000000000000000000000000..fcde5aecda12e7a92f317b9d3eecf8ac4d78f4e7 --- /dev/null +++ b/tests/unit/errors/error_handler_test.py @@ -0,0 +1,150 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +import pytest +import mock +from fastapi import Header + +from fastapi.testclient import TestClient +import starlette.status as status + +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.middleware import require_data_partition_id +from app.utils import Context +from app.wdms_app import wdms_app +from app.clients import * +from app.helper import traces +from app.auth.auth import require_opendes_authorized_user + +from tests.unit.test_utils import patch_async, create_mock_class, nope_logger_fixture +from odes_storage.exceptions import ( + UnexpectedResponse as OSDUStorageUnexpectedResponse, + ResponseValidationError as OSDUStorageResponseValidationError, + ResponseHandlingException as OSDUStorageResponseHandlingException +) + +from osdu_az.exceptions.data_access_error import DataAccessError as OSDUPartitionError + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) +SearchServiceClientMock = create_mock_class(SearchServiceClient) +StorageRecordServiceBlobStorageMock = create_mock_class(StorageRecordServiceBlobStorage) + + +@pytest.fixture +def client(nope_logger_fixture): + async def bypass_authorization(): + pass + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + mock_storage = mock.AsyncMock(return_value=StorageRecordServiceClientMock()) + mock_search = mock.AsyncMock(return_value=SearchServiceClientMock()) + mock_storage_blob = mock.AsyncMock(return_value=StorageRecordServiceBlobStorageMock()) + + with mock.patch('app.routers.ddms_v2.logset_ddms_v2.get_storage_record_service', mock_storage): + with mock.patch('app.routers.ddms_v2.logset_ddms_v2.get_search_service', mock_search): + with mock.patch('app.routers.ddms_v2.log_ddms_v2.get_storage_record_service', mock_storage_blob): + wdms_app.dependency_overrides[require_opendes_authorized_user] = bypass_authorization + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + client = TestClient(wdms_app) + yield client + wdms_app.dependency_overrides = {} + + +header = {"Content-Type": "application/json, charset=utf-16"} + + +def _error_content(code: int, msg: str) -> str: + return json.dumps({ + "error": { + "code": code, + "message": msg + } + }) + + +# This test should work also for other exceptions +def test_storage_client_raise_api_exception(client): + exception = OSDUStorageUnexpectedResponse( + status_code=status.HTTP_401_UNAUTHORIZED, + content=_error_content(status.HTTP_401_UNAUTHORIZED, "Not authorized").encode('utf-8'), + headers=header, + reason_phrase="An unexpected response") + + with StorageRecordServiceClientMock.set_throw('delete_record', exception): + # when + response = client.delete("/ddms/v2/logsets/123456") + json_res = response.json() + assert json_res['origin'] == 'osdu-data-ecosystem-storage' + assert json_res['errors'][0] == "An unexpected response" + assert response.status_code == status.HTTP_401_UNAUTHORIZED + + +def test_storage_client_raise_response_handling_exception(client): + exception = OSDUStorageResponseHandlingException(KeyError("Exception")) + + with StorageRecordServiceClientMock.set_throw('delete_record', exception): + response = client.delete("/ddms/v2/logsets/123456") + json_res = response.json() + + assert json_res['origin'] == 'osdu-data-ecosystem-storage' + assert json_res['errors'][0] == "Exception" + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + + +def test_storage_client_raise_response_validation_error(client): + exception = OSDUStorageResponseValidationError( + source=ArithmeticError("Cannot divide by zero"), + status_code=403, + content="Cannot divide by zero") + + with StorageRecordServiceClientMock.set_throw('delete_record', exception): + response = client.delete("/ddms/v2/logsets/123456") + json_res = response.json() + + assert json_res['origin'] == 'osdu-data-ecosystem-storage' + assert json_res['errors'][0] == "Cannot divide by zero" + assert response.status_code == status.HTTP_403_FORBIDDEN + + +def test_validation_error_exception(client): + response = client.post("/ddms/v2/logsets", data={'test': 'test'}) + json_res = response.json() + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY + + +@mock.patch.object(StorageRecordServiceClientMock, + 'delete_record', + mock.AsyncMock(side_effect=KeyError("Error"))) +def test_unhandled_exception(client): + with pytest.raises(KeyError): + client.delete("/ddms/v2/logsets/123456") + + +def test_partition_client_raise_api_exception(client): + exception = OSDUPartitionError( + status_code=status.HTTP_404_NOT_FOUND, + message='Failed to retrieve partition. Not found.') + + with StorageRecordServiceBlobStorageMock.set_throw('get_record', exception): + response = client.get("/ddms/v2/logs/123456/data") + json_res = response.json() + + assert response.status_code == status.HTTP_404_NOT_FOUND + assert json_res['errors'][0] == 'Failed to retrieve partition. Not found.' diff --git a/tests/unit/injector/__init__.py b/tests/unit/injector/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/injector/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/injector/app_injector_test.py b/tests/unit/injector/app_injector_test.py new file mode 100644 index 0000000000000000000000000000000000000000..59b2453a2c0efdb1d71510aef85ef0614829693b --- /dev/null +++ b/tests/unit/injector/app_injector_test.py @@ -0,0 +1,131 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.injector.app_injector import AppInjector, AppInjectorModule, WithLifeTime +import pytest +import uuid + + +class A: + def foo(self) -> str: + return 'A' + + +class B(A): + def foo(self) -> str: + return 'B' + + +class Custom(A): + def __init__(self, value: str = 'Custom'): + self.value = value + + def foo(self) -> str: + return self.value + + +async def builder_b(): + return B() + + +async def builder_custom(value: str): + return Custom(value) + + +class AppInjectorModuleTesting(AppInjectorModule): + def __init__(self, coro): + self.coro = coro + + def configure(self, injector: AppInjector): + injector.register(A, self.coro) + + +@pytest.mark.asyncio +async def test_app_injector2(): + injector = AppInjector() + + async def builder(): + return 'tt' + injector.register(str, builder) + + obj = await injector.get(str) + print(obj) + + +@pytest.mark.asyncio +async def test_app_injector(): + injector = AppInjector() + + async def builder(): + return B() + injector.register(A, builder) + + instance: A = await injector.get(A) + assert instance.foo() == B().foo() + + +@pytest.mark.asyncio +async def test_app_injector_known_should_raise(): + with pytest.raises(Exception): + injector = AppInjector() + await injector.get(A) + + +@pytest.mark.asyncio +async def test_app_injector_module(): + injector = AppInjector() + AppInjectorModuleTesting(builder_b).configure(injector) + instance: A = await injector.get(A) + assert instance.foo() == B().foo() + + AppInjectorModuleTesting(builder_custom).configure(injector) + instance = await injector.get(A, value='my_value') + assert instance.foo() == 'my_value' + + +@pytest.mark.asyncio +async def test_app_injector_lifetime(): + class Inner: + def __init__(self): + self.value = uuid.uuid4() + + async def build_fn(): + return Inner() + + # default is transient + injector_default = AppInjector() + injector_default.register(Inner, build_fn) + + i1 = await injector_default.get(Inner) + i2 = await injector_default.get(Inner) + + assert i1.value != i2.value + + # transient + injector_transient = AppInjector() + injector_transient.register(Inner, build_fn, WithLifeTime.Transient()) + + i1 = await injector_transient.get(Inner) + i2 = await injector_transient.get(Inner) + + assert i1.value != i2.value + + # singleton + injector_singleton = AppInjector() + injector_singleton.register(Inner, build_fn, WithLifeTime.Singleton()) + + i1 = await injector_singleton.get(Inner) + i2 = await injector_singleton.get(Inner) + + assert i1.value == i2.value diff --git a/tests/unit/middleware/__init__.py b/tests/unit/middleware/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/middleware/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/model/__init__.py b/tests/unit/model/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/model/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/model/entity_utils_test.py b/tests/unit/model/entity_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f36772dd022688ef4419e7b3c3115191ce4087ca --- /dev/null +++ b/tests/unit/model/entity_utils_test.py @@ -0,0 +1,49 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest + +from app.model import entity_utils, schema_version +from app.model.entity_utils import Entity, KindMetaData + + +def test_get_version(): + assert entity_utils.get_version(Entity.LOG) == schema_version.log_version + assert entity_utils.get_version(Entity.LOGSET) == schema_version.logset_version + assert entity_utils.get_version(Entity.MARKER) == schema_version.marker_version + assert entity_utils.get_version(Entity.TRAJECTORY) == schema_version.trajectory_version + assert entity_utils.get_version(Entity.WELL) == schema_version.well_version + assert entity_utils.get_version(Entity.WELLBORE) == schema_version.wellbore_version + assert entity_utils.get_version(Entity.DIP) == schema_version.dip_version + assert entity_utils.get_version(Entity.DIPSET) == schema_version.dipset_version + + +def test_get_kind(): + expected_kind = 'my-data-partition:source-1:well:1.0.2' + actual_kind = entity_utils.get_kind(data_partition='my-data-partition', source='source-1', entity=Entity.WELL) + assert actual_kind == expected_kind + + +def test_get_kind_meta(): + expected_meta = KindMetaData(data_partition_id='other-data-partition', + source='source-1', + entity_type='my-entity', + version='0.0.8') + actual_meta = entity_utils.get_kind_meta('other-data-partition:source-1:my-entity:0.0.8') + assert actual_meta == expected_meta + + +def test_get_kind_meta_invalid(): + with pytest.raises(ValueError, match=f"Invalid kind format in entity:version"): + entity_utils.get_kind_meta('entity:version') diff --git a/tests/unit/model/log_bulk_test.py b/tests/unit/model/log_bulk_test.py new file mode 100644 index 0000000000000000000000000000000000000000..8afac8a16947025ff53c2fc3f50b02ff3afb1fa5 --- /dev/null +++ b/tests/unit/model/log_bulk_test.py @@ -0,0 +1,66 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app.model.log_bulk import LogBulkHelper +from app.bulk_persistence import BulkId +from tests.unit.test_utils import basic_record +import uuid +import pytest + + +@pytest.fixture +def record_with_bulkURI(basic_record): + basic_record.data = {'custombulkid':'toto', 'log': {'bulkURI': str(uuid.uuid4())}} + return basic_record + + +def test_bulk_id_is_an_uuid(): + uuid.UUID(BulkId.new_bulk_id()) + + +def test_update_bulk_id(record_with_bulkURI): + b_id = str(uuid.uuid4()) + LogBulkHelper.update_bulk_id(record_with_bulkURI, b_id) + assert record_with_bulkURI.data['log']['bulkURI'] == uuid.UUID(b_id).urn + + +def test_update_bulk_id_with_path(record_with_bulkURI): + b_id = str(uuid.uuid4()) + LogBulkHelper.update_bulk_id(record_with_bulkURI, b_id, "data.custombulkid") + assert record_with_bulkURI.data['custombulkid'] == uuid.UUID(b_id).urn + + +def test_update_bulk_id_on_not_valid_data_should_throw(basic_record): + basic_record.data = 'not a dict data' + + with pytest.raises(Exception): + LogBulkHelper.update_bulk_id(basic_record, str(uuid.uuid4())) + + +def test_get_update_bulk_id(record_with_bulkURI): + assert LogBulkHelper.get_bulk_id(record_with_bulkURI) == record_with_bulkURI.data['log']['bulkURI'] + + +def test_update_bulk_id_on_empty_record(basic_record): + b_id = str(uuid.uuid4()) + LogBulkHelper.update_bulk_id(basic_record, b_id) + assert basic_record.data['log']['bulkURI'] == uuid.UUID(b_id).urn + + +def test_get_bulk_id_on_empty_record(basic_record): + assert LogBulkHelper.get_bulk_id(basic_record) is None + + + + diff --git a/tests/unit/model/model_utils_test.py b/tests/unit/model/model_utils_test.py new file mode 100644 index 0000000000000000000000000000000000000000..34221b2365829689e6c2a35db35b266378c66787 --- /dev/null +++ b/tests/unit/model/model_utils_test.py @@ -0,0 +1,307 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pydantic + +import json + +import pydantic +import pytest +from odes_storage.models import Record +from pydantic import Extra, ValidationError + +import app.model.model_curated as models +import app.model.model_utils as utils + + +@pytest.mark.parametrize("ddms_model", [models.log, + models.logset, + models.well, + models.wellbore, + models.trajectory, + models.marker, + models.dipset]) +def test_list_record_models(ddms_model): + # the main goal here is to spot any new record model + # If OK and a valid 'record' kind model, add it to the expected list + # otherwise the list function must be reviewed + assert 'data' in ddms_model.__fields__.keys() + assert Extra.forbid == ddms_model.Config.extra + assert Extra.allow == ddms_model.__fields__['data'].type_.__config__.extra + + +def test_check_record_model_base_config(): + # this the base config make sure that this has not changed + assert Record.__config__.extra == Extra.ignore + + +wellbore_str = """ +{ + "kind": "opendes:osdu:wellbore:2.0.0", + "acl": { + "viewers": ["data.default.viewers@opendes.p4d.cloud.slb-ds.com"], + "owners": ["data.default.owners@opendes.p4d.cloud.slb-ds.com"] + }, + "legal": { + "legaltags": ["opendes-public-usa-dataset-1"], + "otherRelevantDataCountries": ["US"] + }, + "data": { + "country": "FR", + "name": "toto", + "airGap": { + "unitKey": "m", + "value": 123 + }, + "xxx_color": "#56AF8E" + } +} +""" + +# This object should be serialized correctly by Record model +# And also by wdms models (log, logset, ...) +wellbore_str_with_meta_valid_ddms = """ +{ + "kind":"opendes:osdu:wellbore:2.0.0", + "acl":{ + "viewers":[ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ], + "owners":[ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ] + }, + "legal":{ + "legaltags":[ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries":[ + "US" + ] + }, + "data":{ + "country":"FR", + "name":"toto", + "airGap":{ + "unitKey":"m", + "value":123 + }, + "md":{ + "unitKey":"some unit", + "value":12.0 + }, + "xxx_color":"#56AF8E" + }, + "meta":[ + { + "name":"Some name", + "kind":"CRS", + "persistableReference":"ref", + "propertyNames":[ + "name" + ], + "propertyValues":[ + "value" + ], + "uncertainty":0.9 + } + ] +} +""" + +# This object should be serialized correctly by Record model +# But not by wdms models (log, logset, ...) +wellbore_str_with_meta_not_valid_ddms = """ +{ + "kind": "opendes:osdu:wellbore:2.0.0", + "acl": { + "viewers": ["data.default.viewers@opendes.p4d.cloud.slb-ds.com"], + "owners": ["data.default.owners@opendes.p4d.cloud.slb-ds.com"] + }, + "legal": { + "legaltags": ["opendes-public-usa-dataset-1"], + "otherRelevantDataCountries": ["US"] + }, + "data": { + "country": "FR", + "name": "toto", + "airGap": { + "unitKey": "m", + "value": 123 + }, + "xxx_color": "#56AF8E" + }, + "meta": [ + {"notValidMetaKey": "Some value"} + ] +} +""" + + +def test_extra_allow_in_data_on_wellbore_record(): + wellbore_obj = models.wellbore.parse_raw(wellbore_str) + wellbore_dict = wellbore_obj.dict(exclude_unset=True) + + assert wellbore_dict['data']['xxx_color'] == '#56AF8E' + + +@pytest.mark.parametrize("ddms_model", + [models.log, models.logset, models.well, models.wellbore, models.trajectory, models.marker, + models.dipset]) +def test_record_should_not_serialize_known_meta(ddms_model): + expected_keys = ["kind", "name", "persistableReference", "propertyNames", "propertyValues", "uncertainty"] + expected_values = ["CRS", "Some name", "ref", ["name"], ["value"], 0.9] + + parsed = ddms_model.parse_raw(wellbore_str_with_meta_valid_ddms) + parsed_meta_dict = parsed.meta[0].dict() + + for index in range(len(expected_keys)): + key = expected_keys[index] + value = expected_values[index] + assert parsed_meta_dict[key] == value + + record = utils.to_record(parsed) + assert "meta" in record.__fields__.keys() + + for index in range(len(expected_keys)): + key = expected_keys[index] + value = expected_values[index] + assert record.meta[0][key] == value + + +@pytest.mark.parametrize("ddms_model", + [models.log, models.logset, models.well, models.wellbore, models.trajectory, models.marker, + models.dipset]) +def test_record_should_not_serialize_unknown_meta(ddms_model): + # Record model should handle unknown meta fields + record = Record.parse_raw(wellbore_str_with_meta_not_valid_ddms) + meta_obj_keys = record.meta[0].keys() + + assert "notValidMetaKey" in meta_obj_keys + assert "Some value" == record.meta[0]['notValidMetaKey'] + + # But wellbore ddms model should not handle unknown meta fields + with pytest.raises(pydantic.ValidationError) as execinfo: + ddms_model.parse_obj(record) + assert "extra fields not permitted" in str(execinfo) + + +def test_extra_forbidden_at_root_on_wellbore_record(): + # given json string with extra at root level + json_with_root_extra_str = json.dumps({'extra': '', **json.loads(wellbore_str)}) + with pytest.raises(ValidationError): + models.wellbore.parse_raw(json_with_root_extra_str) + + +def test_no_data_lost_after_convert_to_record(): + # testing whether the model conversion is correct or not + wellbore_obj = models.wellbore.parse_raw(wellbore_str) + wellbore_obj.type = 'dummy_type' + + # from dict + record_dict = Record(**wellbore_obj.dict()).dict() + # we check if the record model has ignored the extra passed to it + assert 'type' not in record_dict.keys() + + # from json + record_dict_from_json = Record.parse_raw(wellbore_obj.json()).dict() + assert 'type' not in record_dict_from_json.keys() + + parsed_record_dict = Record.parse_raw(wellbore_obj.json()).dict() + assert 'xxx_color' in parsed_record_dict['data'].keys() + + assert '#56AF8E' == parsed_record_dict['data']['xxx_color'] + + +def test_to_record_do_not_populate_unset(): + wellbore_obj = models.wellbore.parse_raw(wellbore_str) + record = utils.to_record(wellbore_obj) + + reloaded_wellbore_dict = json.loads(record.json()) + origin_wellbore_dict = json.loads(wellbore_str) + # checking the data part, must be same as the inputs + assert origin_wellbore_dict['data'] == reloaded_wellbore_dict['data'] + + +def test_to_record_compatibility_snake_camel_case(): + # as otherRelevantDataCountries is declared in camel case in wdms model + # as other_relevant_data_countries in Record with 'otherRelevantDataCountries' as alias + + wellbore_obj = models.wellbore.parse_raw(wellbore_str) + assert wellbore_obj.legal.otherRelevantDataCountries == ["US"] + + record = utils.to_record(wellbore_obj) + assert record.legal.other_relevant_data_countries == ["US"] + + assert json.loads(record.json(by_alias=True))['legal']['otherRelevantDataCountries'] == ["US"] + + +def test_from_record_compatibility_snake_camel_case(): + # as otherRelevantDataCountries is declared in camel case in wdms model + # as other_relevant_data_countries in Record with 'otherRelevantDataCountries' as alias + + record = Record.parse_raw(wellbore_str) + wellbore_from_record = utils.from_record(models.wellbore, record) + assert wellbore_from_record.legal.otherRelevantDataCountries == ["US"] + + +def test_back_and_forth_from_to_record(): + expected_dict = json.loads(wellbore_str) + + # case wellbore -> record -> wellbore + wellbore = utils.from_record(models.wellbore, + utils.to_record(models.wellbore.parse_raw(wellbore_str))) + + assert utils.record_to_dict(wellbore) == expected_dict + + # case record -> wellbore -> record + record = utils.to_record( + utils.from_record(models.wellbore, Record.parse_raw(wellbore_str))) + + assert utils.record_to_dict(record) == expected_dict + + # compare json outputs + assert json.loads(utils.record_to_json(record)) == json.loads(utils.record_to_json(wellbore)) + + +@pytest.mark.parametrize('model_cls, data_content', + [ + (models.log, {"relationships": {}}), + (models.logset, {"relationships": {"wellbore": ""}}), + (models.well, {"relationships": {}}), + (models.wellbore, {"relationships": {}}), + (models.trajectory, {"relationships": {"wellbore": ""}}), + (models.marker, + {"name": "foo", "md": {"value": 1, "unitKey": "m"}, "relationships": {"wellbore": ""}}), + (models.dipset, {"relationships": {"wellbore": ""}}) + ]) +def test_model_allow_extra_field_in_relationship_success(model_cls, data_content): + data_content["relationships"]["extra_field_in_relationship"] = "extra_value" + raw_base_dict = { + "acl": {"viewers": [], "owners": []}, + "legal": {"legaltags": []}, + "id": "123456", + "kind": "opened:osdu:dummy", + "data": data_content + } + parsed_obj = model_cls.parse_raw(json.dumps(raw_base_dict)) + # deserialized should keep it extra field + assert parsed_obj.data.relationships.extra_field_in_relationship == "extra_value" + # serialized should keep it + assert parsed_obj.dict()['data']['relationships']['extra_field_in_relationship'] == "extra_value" + # using utils from/to record + record_obj = utils.to_record(parsed_obj) + assert record_obj.data['relationships']['extra_field_in_relationship'] == "extra_value" + parsed_obj = utils.from_record(model_cls, record_obj) + assert parsed_obj.data.relationships.extra_field_in_relationship == "extra_value" \ No newline at end of file diff --git a/tests/unit/model/root_model_test.py b/tests/unit/model/root_model_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7d9eb7ef7ee5cf784ade5cf3b43f628e511f9fa8 --- /dev/null +++ b/tests/unit/model/root_model_test.py @@ -0,0 +1,112 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import app.model.model_curated as model +import pytest + + +def test_removing_root_value_as_number(): + json = '{"associations": null, ' \ + '"description": null, ' \ + '"format": null, ' \ + '"name": "testfloatvalue", ' \ + '"unitKey": null, ' \ + '"value": 456.0, ' \ + '"values": [123.0, 456.0]' \ + '}' + my_model = model.namedProperty.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_value_as_string(): + json = '{"associations": null, ' \ + '"description": null, ' \ + '"format": null, ' \ + '"name": "teststringvalue", ' \ + '"unitKey": null, ' \ + '"value": "stringiam", ' \ + '"values": null' \ + '}' + my_model = model.namedProperty.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_linestring(): + json = '{"bbox": null, ' \ + '"coordinates": [[1.1, 1.2], [2.1, 2.2]], ' \ + '"type": "LineString"' \ + '}' + my_model = model.GeoJsonLineString.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_multilinestring(): + json = '{"bbox": null, ' \ + '"coordinates": [[[1.1, 1.2], [2.1, 2.2]], [[3.1, 3.2], [4.1, 4.2]]], ' \ + '"type": "MultiLineString"' \ + '}' + my_model = model.GeoJsonMultiLineString.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_polygon_coords(): + json = '{"bbox": null, ' \ + '"coordinates": [[1.1, 1.2], [2.1, 2.2]], ' \ + '"type": "MultiPoint"' \ + '}' + my_model = model.GeoJsonMultiPoint.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_polygon_points(): + json = '{"bbox": null, ' \ + '"coordinates": [[[1.1, 1.2], [2.1, 2.2]], [[3.1, 3.2], [4.1, 4.2]]], ' \ + '"type": "Polygon"' \ + '}' + my_model = model.Polygon.parse_raw(json) + result = my_model.json() + assert (result == json) + + +def test_removing_root_polygon_array(): + json = '{"bbox": null, ' \ + '"coordinates": [[[[1.1, 1.2], [2.1, 2.2]], [[3.1, 3.2], [4.1, 4.2]]]], ' \ + '"type": "MultiPolygon"' \ + '}' + my_model = model.GeoJsonMultiPolygon.parse_raw(json) + result = my_model.json() + assert (result == json) + + +@pytest.mark.parametrize("sub_type", [ + '{"bbox": null, "coordinates": [1.0, 2.1], "type": "Point"}', + '{"bbox": null, "coordinates": [[1.0, 2.1]], "type": "MultiPoint"}', + '{"bbox": null, "coordinates": [[1.0, 2.1]], "type": "LineString"}', + '{"bbox": null, "coordinates": [[[1.0, 2.1]]], "type": "MultiLineString"}', + '{"bbox": null, "coordinates": [[[1.0, 2.1]]], "type": "Polygon"}', + '{"bbox": null, "coordinates": [[[[1.0, 2.1]]]], "type": "MultiPolygon"}', +]) +def test_removing_root_GeoJsonFeatureGeometryItem(sub_type): + json = '{"bbox": null, ' \ + f'"geometries": [{sub_type}], ' \ + '"type": "GeometryCollection"' \ + '}' + my_model = model.geometryItem.parse_raw(json) + result = my_model.json() + assert (result == json) diff --git a/tests/unit/routers/__init__.py b/tests/unit/routers/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/routers/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/routers/about_test.py b/tests/unit/routers/about_test.py new file mode 100644 index 0000000000000000000000000000000000000000..b3b58911093bbcf7a6377243488f34ec3130ac94 --- /dev/null +++ b/tests/unit/routers/about_test.py @@ -0,0 +1,83 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from fastapi.testclient import TestClient +import pytest +from app.wdms_app import wdms_app, wellbore_api_group_prefix +from app.auth.auth import require_opendes_authorized_user +from app.helper import traces +from app.conf import Config +from tests.unit.test_utils import ctx_fixture + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + + +@pytest.fixture +def client(ctx_fixture): + yield TestClient(wdms_app) + wdms_app.dependency_overrides = {} + + +@pytest.fixture +def client_with_authenticated_user(): + async def mock_require_opendes_authorized_user(): + # empty method + pass + + client = TestClient(wdms_app) + wdms_app.dependency_overrides[require_opendes_authorized_user] = mock_require_opendes_authorized_user + yield client + wdms_app.dependency_overrides = {} + + +def build_url(path: str): + return wellbore_api_group_prefix + path + + +def test_about_contains_build_n_version(client): + + response = client.get(build_url("/about")) + assert response.status_code == 200 + + response_json = response.json() + assert response_json['buildNumber'] + assert response_json['version'] + + +@pytest.mark.parametrize("cloud_provider", ['Azure', 'gcp', 'unknown', None]) +def test_about_with_cloud_provider(client, cloud_provider): + + Config.cloud_provider.value = cloud_provider + + response = client.get(build_url("/about")) + assert response.status_code == 200 + json_response = response.json() + assert json_response['cloudEnvironment'] == cloud_provider + + +def test_version_requires_authentication(client): + response = client.get(build_url("/version")) + assert response.status_code == 403 + + +def test_version_properly_read_details(client_with_authenticated_user, monkeypatch): + # override value of build details + Config.build_details.value = 'key1=value1; key2=value2' + + response = client_with_authenticated_user.get(build_url("/version")) + assert response.status_code == 200 + response_json = response.json() + assert response_json['details']['key1'] == 'value1' + assert response_json['details']['key2'] == 'value2' diff --git a/tests/unit/routers/ddms_v2/__init__.py b/tests/unit/routers/ddms_v2/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/routers/ddms_v2/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/routers/ddms_v2/common_ddms_v2_test.py b/tests/unit/routers/ddms_v2/common_ddms_v2_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c4aa0a4fa7db715c054c4eb8eade2ef2edf85bfb --- /dev/null +++ b/tests/unit/routers/ddms_v2/common_ddms_v2_test.py @@ -0,0 +1,351 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json + +import mock +import pytest +import starlette.status as status +from fastapi import HTTPException, Header +from fastapi.testclient import TestClient +from odes_search.models import CursorQueryResponse +from odes_storage.models import RecordVersions, CreateUpdateRecordsResponse + +from app.auth.auth import require_opendes_authorized_user +from app.clients import * +from app.helper import traces +from app.middleware import require_data_partition_id +from app.model.entity_utils import Entity +from app.model.model_curated import * +from app.routers.ddms_v2.storage_helper import StorageHelper +from app.routers.search.search_wrapper import SearchWrapper +from app.utils import Context +from app.wdms_app import wdms_app, app_injector +from tests.unit.test_utils import create_mock_class, make_record, nope_logger_fixture + +""" +Contains unified common tests for the different kind. Mainly CRUD test cases +""" + +tests_parameters = [ + ('/ddms/v2/logs', log(id='123456', data={})), + ('/ddms/v2/logsets', logset(id='123456', data={})), + ('/ddms/v2/dipsets', dipset(id="123456", data={})), + ('/ddms/v2/markers', marker(acl={}, + kind='opendes:wks:marker:1.0.4', + legal=Legal(), + data=markerData(md=ValueWithUnit(value=1.0, unitKey='m'), name='name'), + id='123456')), + ('/ddms/v2/trajectories', trajectory(id='123456', data={})), + ('/ddms/v2/wellbores', wellbore(id='123456', data={})), + ('/ddms/v2/wells', well(id='123456', data={})) +] + + +tests_parameters_for_recursive = [ + ('/ddms/v2/logsets', logset(id='123456', data={})), + ('/ddms/v2/dipsets', dipset(id="123456", data={})), + ('/ddms/v2/wellbores', wellbore(id='123456', data={})), + ('/ddms/v2/wells', well(id='123456', data={})) +] + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) +SearchServiceClientMock = create_mock_class(SearchServiceClient) +StorageHelperMock = create_mock_class(StorageHelper) +SearchWrapperMock = create_mock_class(SearchWrapper) + + +@pytest.fixture +def client(nope_logger_fixture): + async def bypass_authorization(): + # empty method + pass + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + async def build_mock_storage(): + return StorageRecordServiceClientMock() + + async def build_mock_search(): + return SearchServiceClientMock() + + app_injector.register(StorageRecordServiceClient, build_mock_storage) + app_injector.register(SearchServiceClient, build_mock_search) + + # override authentication dependency + previous_overrides = wdms_app.dependency_overrides + + try: + wdms_app.dependency_overrides[require_opendes_authorized_user] = bypass_authorization + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + client = TestClient(wdms_app) + yield client + finally: + wdms_app.dependency_overrides = previous_overrides # clean up + + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_success(client, base_url, record_obj): + record_id = record_obj.id + moc = mock.AsyncMock(return_value=record_obj) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc): + # when + response = client.get(f'{base_url}/{record_id}', headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_200_OK + + # then assert storage is called with the proper id and data_partition + moc.assert_called_with(id=record_id, data_partition_id='testing_partition') + + # assert it validates the input object schema + record_obj.validate(response.json()) + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_without_default_values(client, base_url, record_obj): + record_id = record_obj.id + moc = mock.AsyncMock(return_value=record_obj) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc): + # when + response = client.get(f'{base_url}/{record_id}', headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_200_OK + + # assert we retrieve only the input fields + assert(response.json() == record_obj.dict(exclude_unset=True)) + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_not_found_case(client, base_url, record_obj): + record_id = record_obj.id + exception = HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='Not found') + + # the following doesn't work, do not raise but return AsyncMock instead: + # with mock.patch.object(StorageRecordServiceClientMock, 'get_record', mock.AsyncMock(side_effect=record_obj)): + + with StorageRecordServiceClientMock.set_throw('get_record', exception): + # when + response = client.get(f'{base_url}/{record_id}', headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_404_NOT_FOUND + assert 'not found' in response.text.lower() + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_delete_record_successful(client, base_url, record_obj): + record_id = record_obj.id + moc = mock.AsyncMock() + + with mock.patch.object(StorageRecordServiceClientMock, 'delete_record', moc): + response = client.delete(f'{base_url}/{record_id}', headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_204_NO_CONTENT + + # then assert storage is called with the proper id and data_partition + moc.assert_called_with(id=record_id, data_partition_id='testing_partition') + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_delete_recursive_record_with_recursive_not_in_query_successful(client, base_url, record_obj): + record_id = record_obj.id + + with mock.patch.object(StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record) as mock_storage: + with mock.patch.object(SearchServiceClientMock, 'query_with_cursor', + wraps=SearchServiceClientMock.query_with_cursor) as mock_search: + # when + response = client.delete(f'{base_url}/{record_id}', headers={'data-partition-id': 'testing_partition'}) + # then + mock_storage.assert_called_with(id=record_id, data_partition_id='testing_partition') + assert not mock_search.called + assert response.status_code == status.HTTP_204_NO_CONTENT + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_delete_recursive_record_with_recursive_false_successful(client, base_url, record_obj): + record_id = record_obj.id + with mock.patch.object(StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record) as mock_storage: + with mock.patch.object(SearchServiceClientMock, 'query_with_cursor', + wraps=SearchServiceClientMock.query_with_cursor) as mock_search: + # when + response = client.delete(f'{base_url}/{record_id}', + headers={'data-partition-id': 'testing_partition'}, params={'recursive': False}) + # then + mock_storage.assert_called_with(id=record_id, data_partition_id='testing_partition') + assert not mock_search.called + assert response.status_code == status.HTTP_204_NO_CONTENT + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters_for_recursive) +def test_delete_recursive_record_with_recursive_true_successful(client, base_url, record_obj): + record_id = record_obj.id + + with mock.patch('app.routers.ddms_v2.storage_helper.StorageHelper.delete_recursively') as moc_delete_rec: + client.delete(f'{base_url}/{record_id}', + headers={'data-partition-id': 'testing_partition'}, params={'recursive': True}) + moc_delete_rec.assert_called_once() + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters_for_recursive) +def test_delete_recursive_record_with_recursive_true_successful_delete_multiple_records(client, base_url, record_obj): + record_id = record_obj.id + mocked_query_response = CursorQueryResponse(**{'results': [{'id': 'id:one', 'kind': 'data-partition:wks:log:1.0.5'}, + {'id': 'id:two', + 'kind': 'data-partition:wks:log:1.0.5'}]}) + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=mocked_query_response + ): + with mock.patch.object( + StorageRecordServiceClientMock, 'get_record', + return_value=record_obj + ): + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record) as moc_storage_delete_record: + client.delete(f'{base_url}/{record_id}', + headers={'data-partition-id': 'testing_partition'}, + params={'recursive': True}) + # number of calls to delete_record is 3 because the record has 2 children + # delete recursive will the record and 2 children makes it 3 calls + assert moc_storage_delete_record.call_count == 3 + + +@pytest.mark.parametrize('base_url, sub_entity_list', [ + ('/ddms/v2/logsets', [Entity.LOG]), + ('/ddms/v2/dipsets', [Entity.LOG]), + ('/ddms/v2/wellbores', [Entity.LOGSET, + Entity.LOG, + Entity.MARKER]), + ('/ddms/v2/wells', [Entity.WELLBORE, + Entity.LOGSET, + Entity.LOG, + Entity.MARKER, + Entity.TRAJECTORY, + Entity.DIPSET]) +]) +def test_delete_recursive_check_sub_deleted_type(client, base_url, sub_entity_list): + with mock.patch( + 'app.routers.ddms_v2.storage_helper.StorageHelper.delete_recursively', + return_value=None + ) as moc_delete_recursively: + client.delete(f'{base_url}/123', + headers={'data-partition-id': 'dp'}, + params={'recursive': True}) + assert set(moc_delete_recursively.call_args.kwargs['entity_list']) == set(sub_entity_list) + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_versions_successful(client, base_url, record_obj): + record_id = record_obj.id + expect_response = RecordVersions(recordId='123456', versions=["12356", "89693"]) + moc_get_all_record_versions = mock.AsyncMock(return_value=expect_response) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_all_record_versions', moc_get_all_record_versions): + # when + response = client.get(f'{base_url}/{record_id}/versions', headers={'data-partition-id': 'testing_partition'}) + + # then + assert response.status_code == status.HTTP_200_OK + assert RecordVersions.parse_raw(response.text) == expect_response + moc_get_all_record_versions.assert_called_with(id=record_id, data_partition_id='testing_partition') + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_versions_errors(client, base_url, record_obj): + exception = HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='Not found') + + with StorageRecordServiceClientMock.set_throw('get_all_record_versions', exception): + # when + response = client.get(f'{base_url}/{record_obj.id}/versions', + headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == exception.status_code + assert exception.detail in response.text + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_at_version_successful(client, base_url, record_obj): + record_id = record_obj.id + record_obj.version = 1337 + + moc_get_record_version = mock.AsyncMock(return_value=record_obj) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record_version', moc_get_record_version): + # when + response = client.get(f'{base_url}/{record_id}/versions/{record_obj.version}', + headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_200_OK + + # then assert storage is called with the proper id and data_partition + moc_get_record_version.assert_called_with(id=record_id, + version=record_obj.version, + data_partition_id='testing_partition') + + # assert it validates the input object schema + response_obj = record_obj.validate(response.json()) + assert response_obj.version == record_obj.version + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_at_version_successful_without_default_values(client, base_url, record_obj): + record_id = record_obj.id + record_obj.version = 1337 + + moc_get_record_version = mock.AsyncMock(return_value=record_obj) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record_version', moc_get_record_version): + # when + response = client.get(f'{base_url}/{record_id}/versions/{record_obj.version}', + headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_200_OK + + # assert we retrieve only the input fields + assert(response.json() == record_obj.dict(exclude_unset=True)) + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_at_version_errors(client, base_url, record_obj): + exception = HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail='Not found') + + with StorageRecordServiceClientMock.set_throw('get_record_version', exception): + # when + response = client.get(f'{base_url}/{record_obj.id}/versions/1337', + headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == exception.status_code + assert exception.detail in response.text + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_post_records_successful(client, base_url, record_obj): + expected_response = CreateUpdateRecordsResponse(recordCount=2, recordIds=['rec1', 'rec2']) + + # done this way because of the current inconsistency of root fields between wdms model vs storage client model + record_dict_list = [ + make_record(True, data=record_obj.data.dict()) for _ in expected_response.record_ids + ] + + moc_create_or_update_records = mock.AsyncMock(return_value=expected_response) + + with mock.patch.object(StorageRecordServiceClientMock, 'create_or_update_records', moc_create_or_update_records): + # when + response = client.post(base_url, data=json.dumps(record_dict_list)) + + # then + assert response.status_code == status.HTTP_200_OK + assert CreateUpdateRecordsResponse.parse_raw(response.text) == expected_response + + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_post_records_error_invalid_data(client, base_url, record_obj): + response = client.post(base_url, data=json.dumps([{"invalid": "data"}])) + assert response.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY diff --git a/tests/unit/routers/ddms_v2/log_data_test.py b/tests/unit/routers/ddms_v2/log_data_test.py new file mode 100644 index 0000000000000000000000000000000000000000..d908f5570839013d195a8aeed67330ef2205dc05 --- /dev/null +++ b/tests/unit/routers/ddms_v2/log_data_test.py @@ -0,0 +1,211 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os + +from tests.unit.test_utils import create_mock_class, nope_logger_fixture + +from tempfile import TemporaryDirectory + +from fastapi import HTTPException, Header +from fastapi.testclient import TestClient +import pytest + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from osdu.core.api.storage.blob_storage_local_fs import LocalFSBlobStorage + + +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.helper import traces +from app.auth.auth import require_opendes_authorized_user +from app.middleware import require_data_partition_id + +from app.utils import Context +from app.wdms_app import wdms_app, app_injector +from app.clients import * + +from starlette.status import HTTP_422_UNPROCESSABLE_ENTITY + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + +data_partition_id = 'test_partition' + +log_payload = { + "acl": { + "owners": [ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ], + "viewers": [ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ] + }, + "data": { + "name": "13223135351" + }, + "kind": "opendes:wks:log:0.0.1", + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": ["US", "FR"] + } + } + +headers = {"data-partition-id": data_partition_id} + +prev_data = {"columns": ["col_100X"], "data": [[0], [1], [2]], 'index': [0, 1, 2]} + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) + + +@pytest.fixture +def client(): + with TemporaryDirectory() as tmpdir: + async def storage_service_builder(*args, **kwargs): + return StorageRecordServiceBlobStorage(LocalFSBlobStorage(directory=tmpdir), 'p1', 'c1') + + async def blob_storage_builder(*args, **kwargs): + return LocalFSBlobStorage(directory=tmpdir) + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + app_injector.register(BlobStorageBase, blob_storage_builder) + app_injector.register(StorageRecordServiceClient, storage_service_builder) + + async def do_nothing(): + # empty method + pass + + wdms_app.dependency_overrides[require_opendes_authorized_user] = do_nothing + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + + yield TestClient(wdms_app) + + wdms_app.dependency_overrides = {} # clean up + + +@pytest.fixture +def client_with_log(client): + # Create or update a log record + response = client.post("/ddms/v2/logs", json=[log_payload], headers=headers) + assert response.status_code in range(200, 209), "Create or update log failed" + + log_id = response.json()["recordIds"][0] + + # add data to the log + response = client.post(f"/ddms/v2/logs/{log_id}/data", params={"orient": "split"}, json=prev_data, headers=headers) + assert response.status_code in range(200, 209), "PUT log data failed" + + # get data + response = client.get(f"/ddms/v2/logs/{log_id}/data", headers=headers) + assert response.status_code in range(200, 209), "GET log data by channels failed" + assert response.json() == prev_data, "GET log data response json body should match data for latest version" + + # get versions + response = client.get(f"/ddms/v2/logs/{log_id}/versions", headers=headers) + assert response.status_code == 200, "GET log data failed" + + version_id = response.json()["versions"][0] + + yield client, log_id, version_id + + response = client.delete(f"/ddms/v2/logs/{log_id}", headers=headers) + assert response.status_code in range(200, 209), "Delete test log failed" + + +@pytest.mark.parametrize("orient_value", ["split", "index", "columns", "records", "values"]) +def test_log_get_data_orient_param_validation(client_with_log, orient_value): + client, log_id, _ = client_with_log + response = client.get(f"/ddms/v2/logs/{log_id}/data", params={"orient":orient_value}, headers=headers) + assert response.status_code == 200 + + +def test_log_get_orient_param_validation_negative(client_with_log): + client, log_id, _ = client_with_log + response = client.get(f"/ddms/v2/logs/{log_id}/data", params={"orient":"wrong_orient"}, headers=headers) + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY + + +@pytest.mark.parametrize("orient_value, data", +[ + ( + "split", + { + "columns": ["Ref", "col_100X"], + "index": [0, 1, 2], + "data": [[0.0, 1001], [0.5, 1002], [1.0, 1003]] + } + ), + ( + "index", + { + "0": {"Ref": 0.0, "col_100X": 1001}, + "1": {"Ref": 0.5, "col_100X": 1002}, + "2": {"Ref": 1.0, "col_100X": 1003}, + } + ), + ( + "columns", + { + "Ref": {"0": 0.0, "1": 0.5, "2": 1.0}, + "col_100X": {"0": 1001, "1": 1002, "2": 1003}, + } + ), + ( + "records", + [ + {"Ref": 0.0, "col_100X": 1001}, + {"Ref": 0.5, "col_100X": 1002}, + {"Ref": 1.0, "col_100X": 1003} + ] + ) +]) +def test_log_post_data_orient_param_validation(client_with_log, orient_value, data): + client, log_id, _ = client_with_log + response = client.post(f"/ddms/v2/logs/{log_id}/data", params={"orient": orient_value}, json=data, headers=headers) + assert response.ok + + +def test_log_post_data_orient_param_validation_negative(client_with_log): + client, log_id, _ = client_with_log + response = client.post(f"/ddms/v2/logs/{log_id}/data", params={"orient": "wrong_orient"}, json={}, headers=headers) + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY + + +def test_log_version_data(client_with_log): + client, log_id, version_id = client_with_log + + # get data for previous version + response = client.get(f"/ddms/v2/logs/{log_id}/versions/{version_id}/data", headers=headers) + assert response.status_code == 200, "GET data for previous version failed" + assert response.json() == prev_data, "response json body should match previous version data" + + +@pytest.mark.parametrize("orient_value", ["split", "index", "columns", "records", "values"]) +def test_log_version_data_orient_param_validation(client_with_log, orient_value): + client, log_id, version_id = client_with_log + + # get data for previous version + response = client.get(f"/ddms/v2/logs/{log_id}/versions/{version_id}/data", params={"orient": orient_value}, headers=headers) + assert response.ok + + +def test_log_version_data_orient_param_validation_negative(client_with_log): + client, log_id, version_id = client_with_log + + # get data for previous version + response = client.get(f"/ddms/v2/logs/{log_id}/versions/{version_id}/data", params={"orient": "wrong"}, headers=headers) + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY \ No newline at end of file diff --git a/tests/unit/routers/ddms_v2/log_ddms_v2_private_test.py b/tests/unit/routers/ddms_v2/log_ddms_v2_private_test.py new file mode 100644 index 0000000000000000000000000000000000000000..2223523419746c2ac68208d1fd0461d977cf0897 --- /dev/null +++ b/tests/unit/routers/ddms_v2/log_ddms_v2_private_test.py @@ -0,0 +1,281 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import mock +import pytest +import uuid + +import pandas as pd +from pandas.util.testing import assert_frame_equal + +from fastapi import Header + +from app.routers.ddms_v2.log_ddms_v2 import (fetch_record, + update_records, + _write_log_data, + get_persistence, + _get_log_data) + +from app.clients.storage_service_client import StorageRecordServiceClient + +from app.helper import traces +from app.auth.auth import require_opendes_authorized_user +from app.middleware import require_data_partition_id +from app.wdms_app import wdms_app, app_injector +from app.utils import Context +from tests.unit.test_utils import create_mock_class, nope_logger_fixture, ctx_fixture + +from odes_storage.models import CreateUpdateRecordsResponse, Record + +data_partition_id = 'test_partition' + +log_payload = { + "acl": { + "owners": [ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ], + "viewers": [ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ] + }, + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": ["US", "FR"], + "status": None + }, + "kind": f"{data_partition_id}:wks:log:1.0.5", + "data": { + "name": f"{os.path.basename(__file__)}" + } +} + + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) + + +@pytest.fixture +def with_test_setup(ctx_fixture): + async def bypass_authorization(): + # empty method + pass + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + async def build_mock_storage(): + return StorageRecordServiceClientMock() + + app_injector.register(StorageRecordServiceClient, build_mock_storage) + + # override authentication dependency + previous_overrides = wdms_app.dependency_overrides + + try: + wdms_app.dependency_overrides[require_opendes_authorized_user] = bypass_authorization + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + Context.set_current_with_value(app_injector=app_injector) + yield + finally: + wdms_app.dependency_overrides = previous_overrides # clean up + + +@pytest.fixture +def mock_persistence(): + class MockPersistence: + + def __init__(self): + self.dataframe = None + self.id = None + + async def read_bulk(self, ctx, record: Record, bulk_id_path: str) -> pd.DataFrame: + return self.dataframe + + async def write_bulk(self, ctx, dataframe) -> str: + self.dataframe = dataframe + self.id = str(uuid.uuid4()) + return self.id + + mock = MockPersistence() + + async def override_get_persistence(): + return mock + + previous_overrides = wdms_app.dependency_overrides + + try: + wdms_app.dependency_overrides[get_persistence] = override_get_persistence + yield mock + finally: + wdms_app.dependency_overrides = previous_overrides # clean up + + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + + +@pytest.mark.asyncio +async def test_fetch_record(with_test_setup): + expected_record = Record.parse_obj(log_payload) + moc_get_record = mock.AsyncMock(return_value=expected_record) + moc_get_record_version = mock.AsyncMock(return_value=expected_record) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_get_record): + with mock.patch.object(StorageRecordServiceClientMock, 'get_record_version', moc_get_record_version): + + computed_record = await fetch_record(ctx=Context.set_current_with_value(partition_id=data_partition_id), + record_id="132") + + assert computed_record == expected_record + moc_get_record.assert_called_with(id="132", data_partition_id=data_partition_id) + moc_get_record_version.assert_not_called() + + +@pytest.mark.asyncio +async def test_fetch_record_version(with_test_setup): + expected_record = Record.parse_obj(log_payload) + moc_get_record = mock.AsyncMock(return_value=expected_record) + moc_get_record_version = mock.AsyncMock(return_value=expected_record) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_get_record): + with mock.patch.object(StorageRecordServiceClientMock, 'get_record_version', moc_get_record_version): + + computed_record = await fetch_record(ctx=Context.set_current_with_value(partition_id=data_partition_id), + record_id="132", version="1") + + assert computed_record == expected_record + moc_get_record_version.assert_called_with(id="132", data_partition_id=data_partition_id, version="1") + moc_get_record.assert_not_called() + + +@pytest.mark.asyncio +async def test_update_records(with_test_setup): + expected_response = CreateUpdateRecordsResponse(record_count=2, record_ids=["1", "2"], skipped_record_ids=["1"]) + moc = mock.AsyncMock(return_value=expected_response) + + with mock.patch.object(StorageRecordServiceClientMock, 'create_or_update_records', moc): + record = Record.parse_obj(log_payload) + computed_response = await update_records(ctx=Context.set_current_with_value(partition_id=data_partition_id), + records=[record]) + + assert computed_response == expected_response + moc.assert_called_with(record=[record], data_partition_id=data_partition_id) + + +@pytest.mark.asyncio +async def test_write_log_data(with_test_setup, mock_persistence): + expected_response = CreateUpdateRecordsResponse(record_count=2, record_ids=["1", "2"], skipped_record_ids=["1"]) + + expected_record = Record.parse_obj(log_payload) + get_record_moc = mock.AsyncMock(return_value=expected_record) + create_or_update_records_moc = mock.AsyncMock(return_value=expected_response) + + data = pd.DataFrame.from_dict({'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']}) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', get_record_moc): + with mock.patch.object(StorageRecordServiceClientMock, 'create_or_update_records', + create_or_update_records_moc): + + computed_response = await _write_log_data( + ctx=Context.set_current_with_value(partition_id=data_partition_id), + persistence=mock_persistence, + logid="1234", + bulk_path=None, + dataframe=data) + + assert computed_response == expected_response + get_record_moc.assert_called_once_with(id="1234", data_partition_id=data_partition_id) + create_or_update_records_moc.assert_called_once_with(record=[expected_record], + data_partition_id=data_partition_id) + assert_frame_equal(mock_persistence.dataframe, data) + + +@pytest.mark.asyncio +async def test_write_log_data_with_bulk_path(with_test_setup, mock_persistence): + expected_response = CreateUpdateRecordsResponse(record_count=2, record_ids=["1", "2"], skipped_record_ids=["1"]) + + expected_record = Record.parse_obj(log_payload) + expected_record.data["custom_bulkid"] = "default" + get_record_moc = mock.AsyncMock(return_value=expected_record) + create_or_update_records_moc = mock.AsyncMock(return_value=expected_response) + + data = pd.DataFrame.from_dict({'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']}) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', get_record_moc): + with mock.patch.object(StorageRecordServiceClientMock, 'create_or_update_records', create_or_update_records_moc): + computed_response = await _write_log_data( + ctx=Context.set_current_with_value(partition_id=data_partition_id), + persistence=mock_persistence, + logid="1234", + bulk_path="data.custom_bulkid", + dataframe=data) + + assert computed_response == expected_response + get_record_moc.assert_called_once_with(id="1234", data_partition_id=data_partition_id) + create_or_update_records_moc.assert_called_once_with(record=[expected_record], + data_partition_id=data_partition_id) + assert_frame_equal(mock_persistence.dataframe, data) + + +@pytest.mark.asyncio +async def test_get_log_data(with_test_setup, mock_persistence): + expected_record = Record.parse_obj(log_payload) + expected_data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} + + get_record_moc = mock.AsyncMock(return_value=expected_record) + mock_persistence.dataframe = pd.DataFrame.from_dict(expected_data) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', get_record_moc): + + computed_response = await _get_log_data( + ctx=Context.set_current_with_value(partition_id=data_partition_id), + persistence=mock_persistence, + logid="1234", + version=None, + orient="columns", + bulk_id_path=None) + + get_record_moc.assert_called_once_with(id="1234", data_partition_id=data_partition_id) + + assert computed_response.status_code == 200 + assert computed_response.body == b'{"col_1":{"0":3,"1":2,"2":1,"3":0},"col_2":{"0":"a","1":"b","2":"c","3":"d"}}' + assert computed_response.media_type == 'application/json' + + +@pytest.mark.asyncio +async def test_get_log_data_with_bulk_path(with_test_setup, mock_persistence): + expected_record = Record.parse_obj(log_payload) + expected_record.data["custom_bulkid"] = "424242" + expected_data = {'col_1': [3, 2, 1, 0], 'col_2': ['a', 'b', 'c', 'd']} + + get_record_moc = mock.AsyncMock(return_value=expected_record) + mock_persistence.dataframe = pd.DataFrame.from_dict(expected_data) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', get_record_moc): + + computed_response = await _get_log_data( + ctx=Context.set_current_with_value(partition_id=data_partition_id), + persistence=mock_persistence, + logid="1234", + version=None, + orient="columns", + bulk_id_path="data.custom_bulkid") + + get_record_moc.assert_called_once_with(id="1234", data_partition_id=data_partition_id) + + assert computed_response.status_code == 200 + assert computed_response.body == b'{"col_1":{"0":3,"1":2,"2":1,"3":0},"col_2":{"0":"a","1":"b","2":"c","3":"d"}}' + assert computed_response.media_type == 'application/json' + + diff --git a/tests/unit/routers/ddms_v2/log_ddms_v2_test.py b/tests/unit/routers/ddms_v2/log_ddms_v2_test.py new file mode 100644 index 0000000000000000000000000000000000000000..e389225fe6e15f7d7026aec851c6d3c5b7e86ca0 --- /dev/null +++ b/tests/unit/routers/ddms_v2/log_ddms_v2_test.py @@ -0,0 +1,480 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +tests specific to logset APIs. Common tests implemented in common_ddms_v2_test +""" + +import asyncio +import json +from tempfile import TemporaryDirectory +from io import BytesIO + +import numpy as np +import pandas as pd +import pyarrow as pa +import pyarrow.parquet as pq +from fastapi import HTTPException, Header +from fastapi.testclient import TestClient +import pytest + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from osdu.core.api.storage.blob_storage_local_fs import LocalFSBlobStorage +from odes_storage.models import Record, CreateUpdateRecordsResponse + +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.helper import traces +from app.auth.auth import require_opendes_authorized_user +from app.middleware import require_data_partition_id +from app.model.log_bulk import LogBulkHelper +from app.bulk_persistence import MimeTypes +from app.utils import Context +from app.wdms_app import wdms_app, app_injector +from app.clients import * +from tests.unit.test_utils import assert_dict_contained, make_record + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + + +class TestHelper: + DATA_PARTITION_ID = 'test_partition' + BASE_HEADERS = {'data-partition-id': DATA_PARTITION_ID} + URL_PREFIX = '/ddms/v2' + + @staticmethod + def build_url(path: str): + return TestHelper.URL_PREFIX + path + + @staticmethod + def get_record_from_storage(record_id): + async def _fetcher(_id): + storage: StorageRecordServiceClient = await app_injector.get(StorageRecordServiceClient) + return await storage.get_record(_id, TestHelper.DATA_PARTITION_ID) + + loop = asyncio.get_event_loop() + return loop.run_until_complete(_fetcher(record_id)) + + @staticmethod + def post_record_to_storage(one_record_or_list_of_records): + """ return one id it single record else list of ids """ + + async def _putter(record_or_list): + records = record_or_list if type(record_or_list) == list else [record_or_list] + storage: StorageRecordServiceClient = await app_injector.get(StorageRecordServiceClient) + response = await storage.create_or_update_records(record=records, + data_partition_id=TestHelper.DATA_PARTITION_ID) + ids = response.record_ids + return ids if type(record_or_list) == list else ids[0] + + loop = asyncio.get_event_loop() + return loop.run_until_complete(_putter(one_record_or_list_of_records)) + + @staticmethod + def make_minimal_log_dict(name: str, id: str = None) -> dict: + return TestHelper.make_minimal_log_record(name, id).dict() + + @staticmethod + def make_minimal_log_record(name: str, id: str = None) -> Record: + record = make_record() + record.data = {"log": {"name": name}} + + if id: + record.id = id + return record + + @staticmethod + def get_bulk_id_from_record(record) -> str: + return LogBulkHelper.get_bulk_id(record) + + +@pytest.fixture +def client(): + with TemporaryDirectory() as tmpdir: + async def storage_service_builder(*args, **kwargs): + return StorageRecordServiceBlobStorage(LocalFSBlobStorage(directory=tmpdir), 'p1', 'c1') + + async def blob_storage_builder(*args, **kwargs): + return LocalFSBlobStorage(directory=tmpdir) + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + app_injector.register(BlobStorageBase, blob_storage_builder) + app_injector.register(StorageRecordServiceClient, storage_service_builder) + + async def do_nothing(): + # empty method + pass + + wdms_app.dependency_overrides[require_opendes_authorized_user] = do_nothing + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + + yield TestClient(wdms_app) + + wdms_app.dependency_overrides = {} # clean up + + +log_data = [ + pytest.param([[2.0, 10.1], [2.2, 20.1], [2.4, 30.1], [2.6, 40.]], id="double"), + pytest.param([[2.0, 10.1], [2.2, 20.1], [2.4, np.NaN], [2.6, 40.1]], id="double with nan"), + pytest.param([[2.0, [1.0, 10.0]], [2.2, [2.0, 20.0]], [2.4, [3.0, 30.0]], [2.6, [4.0, 40.0]]], id="double array"), + pytest.param([[2.0, [1.0, 10.0]], [2.2, np.NaN], [2.4, [2.0, 20.0]], [2.6, [3.0, 30.0]]], + id="double array with nan"), + pytest.param([[2.0, "ZONE 1"], [2.2, "Zone 2"], [2.4, "ZONE 3"], [2.6, "ZONE 4"]], id="text"), + pytest.param([[2.0, "ZONE 1"], [2.2, np.NaN], [2.4, "ZONE 2"], [2.6, "ZONE 3"]], id="text with nan"), + pytest.param([[2.0, ["ZONE 1", "AAA"]], [2.2, ["ZONE 2", "BBB"]], [2.4, ["ZONE 3", ]]], id="text array"), + pytest.param([[2.0, ["ZONE 1", "AAA"]], [2.2, np.NaN], [2.4, ["ZONE 3", ]]], id="text array with NaN"), + pytest.param([[2.4, 10.1], [2.2, 20.1], [2.0, 30.1]], id="decreasing index"), + pytest.param([[2.4, 10.1], [2.3, np.NaN], [2.2, 20.], [2.0, 30.1]], id="decreasing index with nan"), + pytest.param([[2.0, 10.1], [1.8, 20.1], [1.8, 20.1], [2.0, 30.1]], id="duplicate index"), + pytest.param([[2.0, 10.1], [1.8, 20.1], [1.8, np.NaN], [2.0, 30.1]], id="duplicate index with nan"), +] +log_data_orient = 'values' + + +def logs_write(client, test_data, nan_conversion): + # given + log_id = '1337' + record = TestHelper.make_minimal_log_record('test_logs_write_data_log', id=log_id) + TestHelper.post_record_to_storage(record) + df = pd.DataFrame(test_data) + content = df + if nan_conversion: + content = content.fillna("NaN") + + byte_stream = BytesIO(str.encode(content.to_json(orient=log_data_orient))) + # when WRITE ---------------------------------------------------------- + response = client.post(TestHelper.build_url(f'/logs/{log_id}/upload_data?orient=' + log_data_orient), + files={'file': ('test_file_data.json', byte_stream, 'application/json')}, + headers=TestHelper.BASE_HEADERS) + return log_id, df, response, client + + +@pytest.mark.parametrize("nan_conversion", [pytest.param(True, id="nan_string"), + pytest.param(False, id="native_nan")]) +@pytest.mark.parametrize("test_data", log_data) +def test_logs_write_then_read_data(client, test_data, nan_conversion): + log_id, df, response, client = logs_write(client, test_data, nan_conversion) + + # then + assert response.status_code == 200 + store_response = CreateUpdateRecordsResponse.parse_raw(response.content) + assert store_response.record_count == 1 + assert store_response.record_ids[0] == log_id + + # check + actual = TestHelper.get_record_from_storage(log_id) + bulk_id = TestHelper.get_bulk_id_from_record(actual) + assert bulk_id + + # when READ ----------------------------------------------------------- + response = client.get(TestHelper.build_url(f'/logs/{log_id}/data?orient=' + log_data_orient), + headers=TestHelper.BASE_HEADERS) + + data = response.json() + actual_df = pd.DataFrame(data).replace("NaN", np.NaN) + pd.testing.assert_frame_equal(df, actual_df) + + +@pytest.mark.parametrize("nan_conversion", [pytest.param(True, id="nan_string"), + pytest.param(False, id="native_nan")]) +@pytest.mark.parametrize("test_data", log_data) +def test_logs_write_then_read_data_statistics(client, test_data, nan_conversion): + log_id, df, response, client = logs_write(client, test_data, nan_conversion) + + # when READ ----------------------------------------------------------- + response = client.get(TestHelper.build_url(f'/logs/{log_id}/statistics'), + headers=TestHelper.BASE_HEADERS) + + df_stat = df.describe(include="all").to_json() + data = response.json() + actual_df_stat = pd.DataFrame(data).to_json() + assert df_stat == actual_df_stat + + +@pytest.mark.parametrize("nan_conversion", [pytest.param(True, id="nan_string"), + pytest.param(False, id="native_nan")]) +@pytest.mark.parametrize("test_data", log_data) +def test_logs_upload_file_then_read_data(client, test_data, nan_conversion): + + log_id, df, response, client = logs_write(client, test_data, nan_conversion) + + # then + assert response.status_code == 200 + store_response = CreateUpdateRecordsResponse.parse_raw(response.content) + assert store_response.record_count == 1 + assert store_response.record_ids[0] == log_id + + # check + actual = TestHelper.get_record_from_storage(log_id) + bulk_id = TestHelper.get_bulk_id_from_record(actual) + assert bulk_id + + # when READ ----------------------------------------------------------- + response = client.get(TestHelper.build_url(f'/logs/{log_id}/data?orient=' + log_data_orient), + headers=TestHelper.BASE_HEADERS) + + data = response.json() + actual_df = pd.DataFrame(data).replace("NaN", np.NaN) + pd.testing.assert_frame_equal(df, actual_df) + + +def test_logs_upload_parquet_read_json(client): + # given + record = TestHelper.make_minimal_log_record('test_logs_upload_parquet_read_json', id='1337') + TestHelper.post_record_to_storage(record) + + df = pd.DataFrame([[1, [1, 4]], [2, [2, 5]], [3, [3, 6]]]) + buffer = BytesIO() + pq.write_table(pa.Table.from_pandas(df), buffer, compression='none') + buffer.seek(0) + + # df.to_hdf(byte_stream, key='df') + + # when WRITE ---------------------------------------------------------- + response = client.post(TestHelper.build_url('/logs/1337/upload_data?orient=' + log_data_orient), + files={'file': ('test_file_data.parquet', buffer, MimeTypes.PARQUET.type)}, + headers=TestHelper.BASE_HEADERS) + + # then + assert response.status_code == 200 + store_response = CreateUpdateRecordsResponse.parse_raw(response.content) + assert store_response.record_count == 1 + assert store_response.record_ids[0] == '1337' + + # check + actual = TestHelper.get_record_from_storage('1337') + bulk_id = TestHelper.get_bulk_id_from_record(actual) + assert bulk_id + + # when READ ----------------------------------------------------------- + response = client.get(TestHelper.build_url('/logs/1337/data?orient=' + log_data_orient), + headers=TestHelper.BASE_HEADERS) + + data = response.json() + actual_df = pd.DataFrame(data).replace("NaN", np.NaN) + pd.testing.assert_frame_equal(df, actual_df) + + +@pytest.mark.parametrize("nan_conversion", [pytest.param(True, id="nan_string"), + pytest.param(False, id="native_nan")]) +@pytest.mark.parametrize("test_data", log_data) +def test_logs_write_twice_then_read_data(client, test_data, nan_conversion): + # given + record = TestHelper.make_minimal_log_record('test_logs_write_twice_then_read_data', id='1337') + TestHelper.post_record_to_storage(record) + + initial_df = pd.DataFrame(test_data) + initial_df_json = initial_df + if nan_conversion: + initial_df_json = initial_df_json.fillna("NaN") + initial_df_json = initial_df_json.to_json(orient='values') + + # when WRITE twice ---------------------------------------------------------- + client.post(TestHelper.build_url('/logs/1337/data?orient=values'), + initial_df_json, + headers=TestHelper.BASE_HEADERS) + + client.post(TestHelper.build_url('/logs/1337/data?orient=values'), + initial_df_json, + headers=TestHelper.BASE_HEADERS) + + # when READ ----------------------------------------------------------- + response = client.get(TestHelper.build_url('/logs/1337/data?orient=values'), headers=TestHelper.BASE_HEADERS) + + assert response.status_code == 200 + data = response.json() + actual_df = pd.DataFrame(data).replace("NaN", np.NaN) + pd.testing.assert_frame_equal(initial_df, actual_df) + + +def double_frame_with_nan(): + df = pd.DataFrame(np.linspace(20., 40., 50)) + x = pd.DataFrame(10 * np.cos(np.linspace(20., 40., 50))) + x = x.mask(x > 8.) + df[1] = x + return df + + +def decreasing_index_with_nan(): + df = pd.DataFrame({0: np.linspace(40., 20., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}) + x = pd.DataFrame(10 * np.cos(np.linspace(20., 40., 50))) + x = x.mask(x > 8.) + df[1] = x + return df + + +def duplicate_index(): + df = pd.concat([pd.DataFrame(np.linspace(20., 35., 25)), pd.DataFrame(np.linspace(35., 50., 25))]) + x = pd.DataFrame(10 * np.cos(np.linspace(20., 40., 50))) + df[1] = x + return df + + +def duplicate_index_with_nan(): + df = pd.concat([pd.DataFrame(np.linspace(20., 35., 25)), pd.DataFrame(np.linspace(35., 50., 25))]) + x = pd.DataFrame(10 * np.cos(np.linspace(20., 40., 50))) + x = x.mask(x > 8.) + df[1] = x + return df + + +decimated_log_data = [ + pytest.param(pd.DataFrame({0: np.linspace(20., 40., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}), + pd.DataFrame([[20.8163265306, -3.2441897554], [22.8571428571, -5.4493543395], + [24.8979591837, 8.1802597798], [26.9387755102, -1.9603923158], + [28.9795918367, -6.4045202806], [31.0204081633, 7.7616595133], + [33.0612244898, -0.6260548071], [35.1020408163, -7.1945739149], + [37.1428571429, 7.1429590909], [39.1836734694, 0.7244227635]]), + None, None, 10, id="double - 10 quantiles - no start and stop"), + pytest.param(pd.DataFrame({0: np.linspace(20., 40., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}), + pd.DataFrame([[24.0816326531, 4.176987118], [25.9183673469, 6.3540531412], + [27.5510204081, -6.7375832739], [29.1836734694, -5.5210547907], + [30.8163265307, 7.4201766234], [32.4489795918, 4.6036642907], + [34.0816326531, -7.9893487738], [35.7142857142, -3.6159044249], + [37.3469387756, 8.4363996374], [39.1836734694, 0.7244227635]]), + 23., 47., 10, id="double - 10 quantiles"), + pytest.param(pd.DataFrame({0: np.linspace(20., 40., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}), + pd.DataFrame([[25.9183673469, 0.6992074799], [31.6326530612, 0.9659168348], + [37.3469387755, 0.9283513371]]), + 23., 47., 3, id="double - 3 quantiles"), + pytest.param(double_frame_with_nan(), + pd.DataFrame([[25.9183673469, -2.61463028], [31.6326530612, -1.3483568721], + [37.3469387755, -1.3681570385]]), + 23., 47., 3, id="double - 3 quantiles"), + # Note: apparently pandas does not preserve row order when using groupby. + # That should not be an issue. + pytest.param(pd.DataFrame({0: np.linspace(40., 20., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}), + pd.DataFrame([[24.0816326531, -1.7529330587], [25.9183673469, -7.9893487738], + [27.5510204082, 4.6036642907], [29.1836734693, 7.4201766234], + [30.8163265306, -5.5210547907], [32.4489795918, -6.7375832739], + [34.0816326531, 6.3540531412], [35.7142857142, 5.9520025144], + [37.3469387756, -7.0899265361], [39.1836734694, -3.2441897554]]), + 47., 23., 10, id="decreasing index - 10 quantiles"), + pytest.param(pd.DataFrame({0: np.linspace(40., 20., 50), + 1: 10 * np.cos(np.linspace(20., 40., 50))}), + pd.DataFrame([[25.9183673469, -0.8791573344], [31.6326530612, -0.9847789041], + [37.3469387755, -0.7801838536]]), + 47., 23., 3, id="decreasing index - 3 quantiles"), + pytest.param(decreasing_index_with_nan(), + pd.DataFrame([[24.0816326531, -1.7529330587], [25.9183673469, -7.9893487738], + [27.5510204082, 3.0956886966], [29.1836734693, 5.226768143], + [30.8163265306, -5.5210547907], [32.4489795918, -6.7375832739], + [34.0816326531, 3.6049946383], [35.7142857142, 3.0395129137], + [37.3469387756, -7.0899265361], [39.1836734694, -3.2441897554]]), + 47., 23., 10, id="decreasing index with nan - 10 quantiles"), + pytest.param(decreasing_index_with_nan(), + pd.DataFrame([[25.9183673469, -2.6195828582], [31.6326530612, -2.6142522246], + [37.3469387755, -3.5001480995]]), + 47., 23., 3, id="decreasing index with nan - 3 quantiles"), + # It is not sure the decimation behavior with index that has duplicated indexes is the one expected for directional wells + # Right now pandas will average based on the index value even if there are other index values in-between + # we could reconsider it if there is a need with a precise requirement + pytest.param(duplicate_index(), + pd.DataFrame([[24.0625, -7.0899265361], [26.5625, 5.9520025144], + [29.0625, 6.3540531412], [31.5625, -6.7375832739], + [34.25, -3.6006797089], [36.25, -3.6458372785], + [38.4375, -8.7002223092], [40.9375, 2.7898212287], + [43.4375, 8.3553039034], [45.9375, -3.8228258073]]), + 23., 47., 10, id="duplicate index - 10 quantiles"), + pytest.param(duplicate_index_with_nan(), + pd.DataFrame([[24.0625, -7.0899265361], [26.5625, 3.0395129137], + [29.0625, 3.6049946383], [31.5625, -6.7375832739], + [34.25, -3.6006797089], [36.25, -3.6458372785], + [38.4375, -8.7002223092], [40.9375, 1.0519837867], + [43.4375, 5.4893416501], [45.9375, -3.8228258073]]), + 23., 47., 10, id="duplicate index with nan - 10 quantiles"), + pytest.param(pd.DataFrame([[2.0, [1.0, 10.0]], [2.2, [2.0, 20.0]], [2.4, [3.0, 30.0]], [2.6, [4.0, 40.0]]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="double array"), + pytest.param(pd.DataFrame([[2.0, [1.0, 10.0]], [2.2, np.NaN], [2.4, [2.0, 20.0]], [2.6, [3.0, 30.0]]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="double array with nan"), + pytest.param(pd.DataFrame([[2.0, "ZONE 1"], [2.2, "Zone 2"], [2.4, "ZONE 3"], [2.6, "ZONE 4"]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="text"), + pytest.param(pd.DataFrame([[2.0, "ZONE 1"], [2.2, np.NaN], [2.4, "ZONE 2"], [2.6, "ZONE 3"]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="text with nan"), + pytest.param(pd.DataFrame([[2.0, ["ZONE 1", "AAA"]], [2.2, ["ZONE 2", "BBB"]], [2.4, ["ZONE 3", ]]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="text array"), + pytest.param(pd.DataFrame([[2.0, ["ZONE 1", "AAA"]], [2.2, np.NaN], [2.4, ["ZONE 3", ]]]), + HTTPException(status_code=422), + 2.2, 2.6, 2, + id="text array with NaN"), + pytest.param(pd.DataFrame([1.2, 1.5, 2.3, 2.4, 4.6, 5.8]), + HTTPException(status_code=400), + 2.2, 2.6, 2, + id="data with one column, bulk data must have an index"), + pytest.param(pd.DataFrame([[2.0], [2.2], [2.4], [2.6]]), + HTTPException(status_code=400), + 2.2, 2.6, 2, + id="data with one column, bulk data must have an index"), +] +decimated_log_data_orient = 'values' + + +@pytest.mark.parametrize("nan_conversion", [pytest.param(True, id="nan_string"), + pytest.param(False, id="native_nan")]) +@pytest.mark.parametrize("decimated_test_data, expected_result, start, stop, quantile", decimated_log_data) +def test_decimated_logs(client, decimated_test_data, expected_result, start, stop, quantile, nan_conversion): + # given + record = TestHelper.make_minimal_log_record('test_decimated_logs', id='1337') + TestHelper.post_record_to_storage(record) + content = decimated_test_data + if nan_conversion: + content = content.fillna("NaN") + content = content.to_json(orient='values') + + response = client.post(TestHelper.build_url('/logs/1337/data?orient=' + decimated_log_data_orient), + content, + headers=TestHelper.BASE_HEADERS) + + assert response.status_code == 200 + store_response = CreateUpdateRecordsResponse.parse_raw(response.content) + assert store_response.record_count == 1 + assert store_response.record_ids[0] == '1337' + + # when read decimated + actual = TestHelper.get_record_from_storage('1337') + bulk_id = TestHelper.get_bulk_id_from_record(actual) + assert bulk_id + + params = {'quantiles': quantile} + if start is not None: + params.update({'start': start}) + if stop is not None: + params.update({'stop': stop}) + response = client.get(TestHelper.build_url('/logs/1337/decimated?orient=' + decimated_log_data_orient), + params=params, + headers=TestHelper.BASE_HEADERS) + data = response.json() + if isinstance(expected_result, HTTPException): + assert response.status_code == expected_result.status_code + else: + assert response.status_code == 200 + actual_df = pd.DataFrame(data).replace("NaN", np.NaN) + pd.testing.assert_frame_equal(expected_result, actual_df) diff --git a/tests/unit/routers/delete_recursive_test.py b/tests/unit/routers/delete_recursive_test.py new file mode 100644 index 0000000000000000000000000000000000000000..f59280ad15915090c1e0f1d9cf1bf100f7ee6ff4 --- /dev/null +++ b/tests/unit/routers/delete_recursive_test.py @@ -0,0 +1,308 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +import mock +from odes_search.models import CursorQueryResponse +from starlette import status +from starlette.exceptions import HTTPException as starletteHTTPException +from fastapi import HTTPException as fastApiHTTPException +from odes_search.exceptions import UnexpectedResponse as clientHTTPException + +from app.clients import StorageRecordServiceClient +from app.routers.ddms_v2.storage_helper import StorageHelper +from app.model.entity_utils import Entity, get_kind, format_kind +from app.utils import Context +from tests.unit.test_utils import create_mock_class, make_record +from tests.unit.app_conf_test import testing_context + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) + + +@pytest.fixture +def data_partition(): + return 'test_data_partition' + + +@pytest.fixture +def entity_source(): + return 'test_source' + + +@pytest.fixture +def well_record(data_partition, entity_source): + return make_record( + id='id:source_id', + kind=get_kind(data_partition, entity_source, Entity.WELL)) + + +@pytest.fixture +def with_patched_get_record(well_record): + """ patch storage storage to return well_record of get_record call """ + with mock.patch.object( + StorageRecordServiceClientMock, 'get_record', + return_value=well_record + ): + yield + + +@pytest.mark.asyncio +async def test_delete_recursive_only_delete_entity_provided(testing_context, + data_partition, + entity_source, + well_record, + with_patched_get_record): + expect_delete_ids = ['id:sub1', 'id:sub2', 'id:sub3'] + entity_types = [Entity.LOGSET, Entity.MARKER] + + mocked_query_response_dict = { + 'results': [ + # expected to be delete + {'id': expect_delete_ids[0], 'kind': get_kind(data_partition, entity_source, entity_types[0])}, + {'id': expect_delete_ids[1], 'kind': get_kind(data_partition, entity_source, entity_types[0])}, + {'id': expect_delete_ids[2], 'kind': get_kind(data_partition, entity_source, entity_types[1])}, + + # expected to NOT be delete + {'id': 'id:no_delete_1', 'kind': format_kind(data_partition, entity_source, 'otherEntity', '1')}, + {'id': 'id:no_delete_2', 'kind': format_kind(data_partition, entity_source, 'otherEntity', '1')}, + ] + } + expect_delete_ids.append(well_record.id) + mocked_query_response = CursorQueryResponse(**mocked_query_response_dict) + + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=mocked_query_response + ): + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record) as moc_storage_delete_record: + # when + await StorageHelper.delete_recursively( + testing_context, + well_record.id, 'well', + [Entity.LOGSET, Entity.MARKER], + data_partition, + None, + StorageRecordServiceClientMock + ) + + # then + actual_deleted_id = set([call.kwargs['id'] for call in moc_storage_delete_record.call_args_list]) + assert set(expect_delete_ids) == actual_deleted_id + + +@pytest.mark.asyncio +async def test_delete_failure_on_parent_dont_delete_children(testing_context, + data_partition, + entity_source, + well_record, + with_patched_get_record): + # in case of exception on delete call, should still call delete on all of them + + sub_ids = [f'id:{i}' for i in range(10)] + sub_kind = get_kind(data_partition, entity_source, Entity.LOGSET) + expect_delete_ids = sub_ids + [well_record.id] + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=CursorQueryResponse(**{ + 'results': [ + {'id': rid, 'kind': sub_kind} for rid in sub_ids + ] + }) + ): + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record, + side_effect=RuntimeError('simulate error')) as moc_storage_delete_record: + with pytest.raises(RuntimeError): # expect to raise + await StorageHelper.delete_recursively( + testing_context, + well_record.id, 'well', + [Entity.LOGSET], + data_partition, + None, + StorageRecordServiceClientMock + ) + + # but still expected to call delete on each + assert moc_storage_delete_record.call_count == 1 + assert moc_storage_delete_record.call_args_list[0].kwargs['id'] == well_record.id + + +@pytest.mark.asyncio +async def test_delete_should_keep_delete_heterogeneous_failure( + testing_context, + data_partition, + entity_source, + well_record, + with_patched_get_record): + moc_logger = mock.MagicMock() + ctx = Context.set_current_with_value(logger=moc_logger) + + # in case of exception on delete call, should still call delete on all of them + + sub_ids = [f'id:{i}' for i in range(10)] + sub_kind = get_kind(data_partition, entity_source, Entity.LOGSET) + expect_delete_ids = sub_ids + [well_record.id] + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=CursorQueryResponse(**{ + 'results': [ + {'id': rid, 'kind': sub_kind} for rid in sub_ids + ] + }) + ): + async def delete_success_only_well(*args, **kwargs): + if kwargs['id'] == 'id:0': + raise starletteHTTPException(status_code=401, detail='UNAUTHORIZED') + if kwargs['id'] != well_record.id: + raise RuntimeError('simulate error') + + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record, + side_effect=delete_success_only_well) as moc_storage_delete_record: + with pytest.raises(fastApiHTTPException) as exp_info: # expect to raise + await StorageHelper.delete_recursively( + ctx, + well_record.id, 'well', + [Entity.LOGSET], + data_partition, + None, + StorageRecordServiceClientMock + ) + + # the status status is 500 + assert exp_info.value.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + + # but still expected to call delete on each + actual_deleted_id = set([call.kwargs['id'] for call in moc_storage_delete_record.call_args_list]) + assert set(expect_delete_ids) == actual_deleted_id + + # check error are logged + assert moc_logger.error.call_count == len(sub_ids) + + +@pytest.mark.asyncio +async def test_delete_should_keep_delete_homogenous_failure( + testing_context, + data_partition, + entity_source, + well_record, + with_patched_get_record): + + moc_logger = mock.MagicMock() + ctx = Context.set_current_with_value(logger=moc_logger) + + # in case of exception on delete call, should still call delete on all of them + + sub_ids = [f'id:{i}' for i in range(10)] + sub_kind = get_kind(data_partition, entity_source, Entity.LOGSET) + expect_delete_ids = sub_ids + [well_record.id] + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=CursorQueryResponse(**{ + 'results': [ + {'id': rid, 'kind': sub_kind} for rid in sub_ids + ] + }) + ): + async def delete_success_only_well(*args, **kwargs): + if kwargs['id'] != well_record.id: + raise starletteHTTPException(status_code=403, detail="Forbidden") + + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record, + side_effect=delete_success_only_well) as moc_storage_delete_record: + with pytest.raises(fastApiHTTPException) as exp_info: # expect to raise + await StorageHelper.delete_recursively( + ctx, + well_record.id, 'well', + [Entity.LOGSET], + data_partition, + None, + StorageRecordServiceClientMock + ) + + # the status status is kept + assert exp_info.value.status_code == status.HTTP_403_FORBIDDEN + + # but still expected to call delete on each + actual_deleted_id = set([call.kwargs['id'] for call in moc_storage_delete_record.call_args_list]) + assert set(expect_delete_ids) == actual_deleted_id + + # check error are logged + assert moc_logger.error.call_count == len(sub_ids) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('exception', [starletteHTTPException(status_code=status.HTTP_404_NOT_FOUND), + fastApiHTTPException(status_code=status.HTTP_404_NOT_FOUND), + clientHTTPException(status_code=status.HTTP_404_NOT_FOUND, + reason_phrase='', + content=b'', + headers={})]) +async def test_delete_404_of_sub_delete_is_valid(testing_context, + data_partition, + entity_source, + well_record, + with_patched_get_record, + exception): + with mock.patch( + 'app.routers.search.search_wrapper.SearchWrapper.query_cursorless', + return_value=CursorQueryResponse(**{ + 'results': [ + {'id': 'id:sub', + 'kind': get_kind(data_partition, entity_source, Entity.LOGSET)}] + }) + ): + async def delete_success_only_well(*args, **kwargs): + print(args) + print(kwargs) + if kwargs['id'] != well_record.id: + raise exception + + with mock.patch.object( + StorageRecordServiceClientMock, 'delete_record', + wraps=StorageRecordServiceClientMock.delete_record, + side_effect=delete_success_only_well): + # no exception raised + await StorageHelper.delete_recursively( + testing_context, + well_record.id, 'well', + [Entity.LOGSET], + data_partition, + None, + StorageRecordServiceClientMock) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('exception', + [fastApiHTTPException(status_code=status.HTTP_403_FORBIDDEN), + fastApiHTTPException(status_code=status.HTTP_404_NOT_FOUND), + fastApiHTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR), + RuntimeError()]) +async def test_delete_failure_get_record(testing_context, + data_partition, + entity_source, + well_record, + exception): + with StorageRecordServiceClientMock.set_throw('get_record', exception): + with pytest.raises(exception.__class__): + await StorageHelper.delete_recursively( + testing_context, + well_record.id, 'well', [], + data_partition, None, StorageRecordServiceClientMock) diff --git a/tests/unit/routers/dipset/__init__.py b/tests/unit/routers/dipset/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/routers/dipset/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/routers/dipset/dip_ddms_v2.py b/tests/unit/routers/dipset/dip_ddms_v2.py new file mode 100644 index 0000000000000000000000000000000000000000..d215591dd53a3c124d4e7f9f70360d11ca910cf6 --- /dev/null +++ b/tests/unit/routers/dipset/dip_ddms_v2.py @@ -0,0 +1,67 @@ +import json +import pytest +import mock +from fastapi import Header, HTTPException + +from fastapi.testclient import TestClient +import starlette.status as status + +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.middleware import require_data_partition_id +from app.model.model_curated import dipset +from app.utils import Context +from app.wdms_app import wdms_app, app_injector +from app.clients import * +from app.auth.auth import require_opendes_authorized_user +from tests.unit.errors.error_handler_test import StorageRecordServiceBlobStorageMock + +from tests.unit.test_utils import patch_async, create_mock_class, nope_logger_fixture +from odes_storage.exceptions import UnexpectedResponse + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) +SearchServiceClientMock = create_mock_class(SearchServiceClient) +StorageRecordServiceBlobStorageMock = create_mock_class(StorageRecordServiceBlobStorage) + +tests_parameters = [ + ('/ddms/v2/dipsets', dipset(id="opendes:doc:00000000000000000000000000000000000", data={})), +] + +@pytest.fixture +def client(nope_logger_fixture): + async def bypass_authorization(): + # empty method + pass + + async def set_default_partition(data_partition_id: str =Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + async def build_mock_storage(): + return StorageRecordServiceClientMock() + + async def build_mock_search(): + return SearchServiceClientMock() + + app_injector.register(StorageRecordServiceClient, build_mock_storage) + app_injector.register(SearchServiceClient, build_mock_search) + + # override authentication dependency + previous_overrides = wdms_app.dependency_overrides + + try: + wdms_app.dependency_overrides[require_opendes_authorized_user] = bypass_authorization + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + client = TestClient(wdms_app) + yield client + finally: + wdms_app.dependency_overrides = previous_overrides # clean up + +@pytest.mark.parametrize('base_url, record_obj', tests_parameters) +def test_get_record_not_found_case_dipset(client, base_url, record_obj): + record_id = record_obj.id + exception = UnexpectedResponse(status_code=status.HTTP_404_NOT_FOUND, reason_phrase="not found", content=b'', headers=Header('test')) + + with StorageRecordServiceClientMock.set_throw('get_record', exception): + # when + response = client.get(f'{base_url}/{record_id}/dips', headers={'data-partition-id': 'testing_partition'}) + assert response.status_code == status.HTTP_404_NOT_FOUND + assert 'not found' in response.text.lower() \ No newline at end of file diff --git a/tests/unit/routers/dipset/dip_model_test.py b/tests/unit/routers/dipset/dip_model_test.py new file mode 100644 index 0000000000000000000000000000000000000000..c6b29c2d1596132c84d68f34239d908acd99389f --- /dev/null +++ b/tests/unit/routers/dipset/dip_model_test.py @@ -0,0 +1,162 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from pydantic import ValidationError + +from app.model.model_curated import ValueWithUnit +from app.routers.dipset.dip_model import Dip + + +def test_dip_model_nominal(): + + try: + Dip( + reference=ValueWithUnit(unitKey="meter", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + quality=ValueWithUnit(unitKey="unitless", value=0.5), + xCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + yCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + zCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + classification="fracture", + ) + except Exception: + pytest.fail("should not fail") + + +def test_dip_model_optional_fields(): + Dip( + reference=ValueWithUnit(unitKey="meter", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +def test_dip_model_missing_all_mandatory_field(): + with pytest.raises(ValidationError): + Dip( + quality=ValueWithUnit(unitKey="unitless", value=0.5), + xCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + yCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + zCoordinate=ValueWithUnit(unitKey="meter", value=18.0), + classification="fracture", + ) + + +def test_dip_model_missing_reference(): + with pytest.raises(ValidationError): + Dip(azimuth=ValueWithUnit(unitKey="dega", value=34), inclination=ValueWithUnit(unitKey="dega", value=18.0)) + + +def test_dip_model_missing_azimuth(): + with pytest.raises(ValidationError): + Dip( + reference=ValueWithUnit(unitKey="meter", value=1000.0), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +def test_dip_model_missing_inclination(): + with pytest.raises(ValidationError): + Dip( + reference=ValueWithUnit(unitKey="meter", value=1000.0), azimuth=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +def test_dip_model_missing_reference_and_azimuth(): + with pytest.raises(ValidationError): + Dip(inclination=ValueWithUnit(unitKey="dega", value=18.0)) + + +@pytest.mark.parametrize("unit", ["m", "meter", "Meter", "METER", "meters", "Meters", "METERS"]) +def test_dip_model_validator_unit(unit): + Dip( + reference=ValueWithUnit(unitKey=unit, value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +@pytest.mark.parametrize("unit", ["ft", "", None]) +def test_dip_model_reference_with_wrong_unit(unit): + with pytest.raises(ValidationError) as excinfo: + Dip( + reference=ValueWithUnit(unitKey=unit, value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +@pytest.mark.parametrize("unit", ["rad", "", None]) +def test_dip_model_azimuth_with_wrong_unit(unit): + with pytest.raises(ValidationError) as excinfo: + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey=unit, value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + ) + + +@pytest.mark.parametrize("unit", ["rad", "", None]) +def test_dip_model_inclination_with_wrong_unit(unit): + with pytest.raises(ValidationError) as excinfo: + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey=unit, value=18.0), + ) + + +@pytest.mark.parametrize("unit", ["unitless", "Unitless", "UNITLESS", "UnitLess"]) +def test_dip_model_quality_unit(unit): + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + quality=ValueWithUnit(unitKey=unit, value=0.5), + ) + + +@pytest.mark.parametrize("unit", ["m", "", None]) +def test_dip_model_quality_unit_negative(unit): + with pytest.raises(ValidationError) as excinfo: + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + quality=ValueWithUnit(unitKey=unit, value=0.5), + ) + + +@pytest.mark.parametrize("value", [0, 0.42, 1]) +def test_dip_model_quality_value_validation(value): + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + quality=ValueWithUnit(unitKey="unitless", value=value), + ) + + +@pytest.mark.parametrize("value", [-1, 1.0001, 42]) +def test_dip_model_quality_value_validation_negative(value): + with pytest.raises(ValidationError) as excinfo: + Dip( + reference=ValueWithUnit(unitKey="m", value=1000.0), + azimuth=ValueWithUnit(unitKey="dega", value=34), + inclination=ValueWithUnit(unitKey="dega", value=18.0), + quality=ValueWithUnit(unitKey="unitless", value=value), + ) + diff --git a/tests/unit/routers/log_recognition/__init__.py b/tests/unit/routers/log_recognition/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/routers/log_recognition/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/routers/log_recognition/log_recognition_test.py b/tests/unit/routers/log_recognition/log_recognition_test.py new file mode 100644 index 0000000000000000000000000000000000000000..7e6223f637986dac9fe9d43093e178c51dfa7ffd --- /dev/null +++ b/tests/unit/routers/log_recognition/log_recognition_test.py @@ -0,0 +1,368 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import time +import mock +import pytest +import starlette.status as status +from fastapi import Header +from fastapi.testclient import TestClient +from odes_storage import models as m +from odes_storage.exceptions import UnexpectedResponse +from odes_storage.models import CreateUpdateRecordsResponse + +from app.auth.auth import require_opendes_authorized_user +from app.clients import * +from app.helper import traces +from app.middleware import require_data_partition_id +from app.routers.logrecognition.log_recognition import family_processor_manager +from app.utils import Context +from app.wdms_app import wdms_app +from tests.unit.test_utils import create_mock_class, nope_logger_fixture + +StorageRecordServiceClientMock = create_mock_class(StorageRecordServiceClient) +SearchServiceClientMock = create_mock_class(SearchServiceClient) + + +@pytest.fixture +def client(nope_logger_fixture): + async def bypass_authorization(): + pass + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + mock_storage = mock.AsyncMock(return_value=StorageRecordServiceClientMock()) + with mock.patch('app.routers.logrecognition.family_processor_manager.get_storage_record_service', mock_storage): + with mock.patch('app.routers.logrecognition.log_recognition.get_storage_record_service', mock_storage): + # override authentication dependency + previous_overrides = wdms_app.dependency_overrides + + try: + wdms_app.dependency_overrides[require_opendes_authorized_user] = bypass_authorization + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + client = TestClient(wdms_app) + yield client + finally: + wdms_app.dependency_overrides = previous_overrides # clean up + + +@pytest.fixture(autouse=True) +def setup_teardown(): + # setup + # run the test + yield + # teardown + family_processor_manager._processors["opendes"] = None + family_processor_manager._catalog_lifetime = 200 + + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + + +@pytest.mark.parametrize("label, unit, expected", [ + ('GR', 'gAPI', {'family': 'Gamma Ray', 'family_type': ['Gamma Ray'], 'log_unit': 'gAPI', 'base_unit': 'gAPI'}), + ('TVD', 'cm', {'family': 'True Vertical Depth', 'family_type': ['Reference'], 'log_unit': 'cm', 'base_unit': 'ft'}), + ('DTC', 'us/cm', + {'family': 'Compressional Slowness', 'family_type': ['Slowness'], 'log_unit': 'us/cm', 'base_unit': 'us/ft'}), + ('TRUESTRATIGRAPHICTHICKNESS1', 'ft', + {"family": "Thickness", "family_type": ["Formation Geometry", "Rock Quality"], "log_unit": "ft", "base_unit": "ft"}) +]) +def test_family_assignment_rules(client, label, unit, expected): + with StorageRecordServiceClientMock.set_throw( + 'get_record', + UnexpectedResponse(status_code=status.HTTP_404_NOT_FOUND, reason_phrase="", content=None, headers=None)): + response = client.post("/log-recognition/family", + json={"label": label, + "log_unit": unit}) + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + assert response_json == expected + + +def test_family_assignment_rules_not_found(client): + with StorageRecordServiceClientMock.set_throw( + 'get_record', + UnexpectedResponse(status_code=status.HTTP_404_NOT_FOUND, reason_phrase="", content=None, headers=None)): + response = client.post("/log-recognition/family", + json={"label": "unknown", + "log_unit": ""}) + assert response.status_code == status.HTTP_400_BAD_REQUEST + + +def test_upload_good_catalog(client): + good_catalog = { + "data": { + "family_catalog": [{"unit": "f", "family": "fake family", "rule": "FF"}, + {"unit": "g", "family": "other fake family", "rule": "OF"}], + "main_family_catalog": [{"MainFamily": "Fake", "Family": "fake family", "Unit": "ef"}, + {"MainFamily": "Other Fake", "Family": "other fake family", "Unit": "jai"}] + }, + "legal": { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": [ + "US" + ], + "status": "compliant" + }, + "acl": { + "viewers": [ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ], + "owners": [ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ] + } + } + + expected_response = CreateUpdateRecordsResponse(recordCount=1, recordIds=['rec1']) + moc_create_or_update_records = mock.AsyncMock(return_value=expected_response) + + with mock.patch.object(StorageRecordServiceClientMock, 'create_or_update_records', moc_create_or_update_records): + response = client.put("/log-recognition/upload-catalog", json=good_catalog) + assert response.status_code == status.HTTP_200_OK + assert CreateUpdateRecordsResponse.parse_raw(response.text) == expected_response + + +@pytest.mark.parametrize("label, unit, code, expected", [ + ('fantomas', '', status.HTTP_400_BAD_REQUEST, {}), + ('FF', 'f', status.HTTP_200_OK, + {'family': 'fake family', 'family_type': ['Fake'], 'log_unit': 'f', 'base_unit': 'ef'}), + ('OF', 'g', status.HTTP_200_OK, + {'family': 'other fake family', 'family_type': ['Other Fake'], 'log_unit': 'g', 'base_unit': 'jai'}), + ('AOF', 'gg', status.HTTP_200_OK, + {'family': "another fake family", 'family_type': ["Other Fake", "Another fake family"], + 'log_unit': 'gg', 'base_unit': 'jaijai'}), + ('DTC', 'us/cm', status.HTTP_200_OK, + {'family': 'Compressional Slowness', 'family_type': ['Slowness'], 'log_unit': 'us/cm', 'base_unit': 'us/ft'}) +]) +def test_family_assignment_rules_custom(client, label, unit, code, expected): + record_obj = m.Record( + data={ + "family_catalog": [ + {"unit": "f", "family": "fake family", "rule": "FF"}, + {"unit": "g", "family": "other fake family", "rule": "OF"}, + {"unit": "gg", "family": "another fake family", "rule": "AOF"} + ], + "main_family_catalog": [ + {"MainFamily": "Fake", "Family": "fake family", "Unit": "ef"}, + {"MainFamily": "Other Fake", "Family": "other fake family", "Unit": "jai"}, + {"MainFamily": ["Other Fake", "Another fake family"], "Family": "another fake family", "Unit": "jaijai"} + ] + }, + kind="", + acl=m.StorageAcl(viewers=[], owners=[]), + legal={} + ) + + moc_storage = mock.AsyncMock(return_value=record_obj) + + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_storage): + response = client.post("/log-recognition/family", + json={"label": label, + "log_unit": unit}) + + assert response.status_code == code + if code == status.HTTP_200_OK: + response_json = response.json() + assert response_json == expected + + +@pytest.mark.parametrize("label, unit, code, expected", [ + ('OF', 'F', status.HTTP_400_BAD_REQUEST, {}), + ('DTC', 'us/cm', status.HTTP_200_OK, + {'family': 'Compressional Slowness', 'family_type': ['Slowness'], 'log_unit': 'us/cm', 'base_unit': 'us/ft'}) +]) +def test_family_assignment_rules_custom_catalog_not_found(client, label, unit, code, expected): + with StorageRecordServiceClientMock.set_throw( + 'get_record', + UnexpectedResponse(status_code=status.HTTP_404_NOT_FOUND, reason_phrase="", content=None, headers=None)): + response = client.post("/log-recognition/family", + json={"label": label, + "log_unit": unit}) + + assert response.status_code == code + if code == status.HTTP_200_OK: + response_json = response.json() + assert response_json == expected + + +def test_failing_storage(client): + unexpected_response = UnexpectedResponse(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, + reason_phrase="", content=b'content', headers=None) + + with StorageRecordServiceClientMock.set_throw('get_record', unexpected_response): + response = client.post("/log-recognition/family", + json={"label": "MD", + "log_unit": "M"}) + + assert response.status_code == status.HTTP_500_INTERNAL_SERVER_ERROR + + +def test_unvalidate_catalogs(client): + record_obj = m.Record( + data={ + "family_catalog": [{"unit": "f", "family": "fake family", "rule": "FF"}, + {"unit": "g", "family": "other fake family", "rule": "OF"}], + "main_family_catalog": [{"MainFamily": "Fake", "Family": "fake family", "Unit": "ef"}, + {"MainFamily": "Other Fake", "Family": "other fake family", "Unit": "jai"}] + }, + kind="", + acl=m.StorageAcl(viewers=[], owners=[]), + legal={} + ) + + moc_storage = mock.AsyncMock(return_value=record_obj) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_storage): + # Force a big catalog_lifetime + family_processor_manager._catalog_lifetime = 1000 + + moc_storage.assert_not_called() + response = client.post("/log-recognition/family", + json={"label": "FF", + "log_unit": "f"}) + assert response.status_code == status.HTTP_200_OK + assert moc_storage.call_count == 1 + + response = client.post("/log-recognition/family", + json={"label": "FF", + "log_unit": "f"}) + assert response.status_code == status.HTTP_200_OK + assert moc_storage.call_count == 1 + + # Force a small catalog_lifetime + family_processor_manager._catalog_lifetime = 1 + # Sorry we need to sleep 1 second + time.sleep(1) + response = client.post("/log-recognition/family", + json={"label": "FF", + "log_unit": "f"}) + assert response.status_code == status.HTTP_200_OK + assert moc_storage.call_count == 2 + + time.sleep(1) + response = client.post("/log-recognition/family", + json={"label": "FF", + "log_unit": "f"}) + assert response.status_code == status.HTTP_200_OK + assert moc_storage.call_count == 3 + + +nb_storage_call = 0 + + +def test_invalidate_default_catalogs(client): + def response_fn(*args, **kwargs): + global nb_storage_call + nb_storage_call += 1 + raise UnexpectedResponse(status_code=status.HTTP_404_NOT_FOUND, reason_phrase='', content=b'', headers=None) + + with StorageRecordServiceClientMock.set_answer('get_record', response_fn): + global nb_storage_call + response = client.post("/log-recognition/family", + json={"label": "GR", + "log_unit": "gApi"}) + assert response.status_code == status.HTTP_200_OK + assert nb_storage_call == 1 + + response = client.post("/log-recognition/family", + json={"label": "GR", + "log_unit": "gApi"}) + assert response.status_code == status.HTTP_200_OK + assert nb_storage_call == 1 # we are inside the catalog_lifetime so no call to DE + + family_processor_manager._catalog_lifetime = 1 + time.sleep(1) + response = client.post("/log-recognition/family", + json={"label": "GR", + "log_unit": "gApi"}) + assert response.status_code == status.HTTP_200_OK + assert nb_storage_call == 2 # catalog_lifetime excedeed, one more call to DE expected + + +def test_no_catalog(client): + record_obj = m.Record( + data={ + "main_family_catalog": [{"MainFamily": "Fake", "Family": "fake family", "Unit": "ef"}, + {"MainFamily": "Other Fake", "Family": "other fake family", "Unit": "jai"}] + }, + kind="", + acl=m.StorageAcl(viewers=[], owners=[]), + legal={} + ) + + moc_storage = mock.AsyncMock(return_value=record_obj) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_storage): + response = client.post("/log-recognition/family", + json={"label": "MD", + "log_unit": "m"}) + assert response.status_code == status.HTTP_200_OK + + +def test_no_main_family_catalog(client): + record_obj = m.Record( + data={ + "family_catalog": [{"unit": "f", "family": "fake family", "rule": "FF"}, + {"unit": "g", "family": "other fake family", "rule": "OF"}] + }, + kind="", + acl=m.StorageAcl(viewers=[], owners=[]), + legal={} + ) + + moc_storage = mock.AsyncMock(return_value=record_obj) + with mock.patch.object(StorageRecordServiceClientMock, 'get_record', moc_storage): + response = client.post("/log-recognition/family", + json={"label": "OF", + "log_unit": "g"}) + assert response.status_code == status.HTTP_200_OK + response_json = response.json() + assert response_json == {"family": "other fake family", "family_type": None, "log_unit": "g", "base_unit": None} + + +def test_swagger_generation(): + swagger_dict = wdms_app.openapi() + assert swagger_dict["paths"].get("/log-recognition/family", None) is not None + assert swagger_dict["paths"]["/log-recognition/family"]["post"]["summary"] == 'Recognize family and unit' + assert swagger_dict["paths"]["/log-recognition/family"]["post"][ + "description"] == 'Find the most probable family and ' \ + 'unit using family assignment rule based catalogs. User defined catalog will have the priority.' + assert \ + swagger_dict["paths"]["/log-recognition/family"]["post"]["requestBody"]["content"]["application/json"][ + "schema"][ + "$ref"] == '#/components/schemas/GuessRequest' + assert swagger_dict["components"]["schemas"]["GuessRequest"]["example"] == {'label': 'GRD', 'log_unit': 'GAPI', + 'description': 'LDTD Gamma Ray'} + + assert swagger_dict["paths"].get("/log-recognition/upload-catalog", None) is not None + assert swagger_dict["paths"]["/log-recognition/upload-catalog"]["put"][ + "summary"] == 'Upload user-defined catalog with family assignment rules' + assert swagger_dict["paths"]["/log-recognition/upload-catalog"]["put"]["description"] == """Upload user-defined catalog with family assignment rules for specific partition ID. + If there is an existing catalog, it will be replaced. It takes maximum of 5 mins to replace the existing catalog. + Hence, any call to retrieve the family should be made after 5 mins of uploading the catalog""" + assert \ + swagger_dict["paths"]["/log-recognition/upload-catalog"]["put"]["requestBody"]["content"]["application/json"][ + "schema"][ + "$ref"] == '#/components/schemas/CatalogRecord' + assert swagger_dict["components"]["schemas"]["CatalogRecord"]["example"] == { + 'acl': {'viewers': ['abc@example.com, cde@example.com'], 'owners': ['abc@example.com, cde@example.com']}, + 'legal': {'legaltags': ['opendes-public-usa-dataset-1'], 'otherRelevantDataCountries': ['US']}, + 'data': {'family_catalog': [{'unit': 'ohm.m', 'family': 'Medium Resistivity', 'rule': 'MEDR'}], + 'main_family_catalog': [ + {'MainFamily': 'Resistivity', 'Family': 'Medium Resistivity', 'Unit': 'OHMM'}]}} + + assert swagger_dict is not None diff --git a/tests/unit/routers/trajectory/__init__.py b/tests/unit/routers/trajectory/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/routers/trajectory/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/routers/trajectory/trajectory_ddms_v2_test.py b/tests/unit/routers/trajectory/trajectory_ddms_v2_test.py new file mode 100644 index 0000000000000000000000000000000000000000..45c6f8c63fccc0b24c6ac09e10fac7a91d10b367 --- /dev/null +++ b/tests/unit/routers/trajectory/trajectory_ddms_v2_test.py @@ -0,0 +1,226 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import json +from tempfile import TemporaryDirectory + +from fastapi.testclient import TestClient +from fastapi import Header +from starlette.status import HTTP_204_NO_CONTENT, HTTP_422_UNPROCESSABLE_ENTITY, HTTP_400_BAD_REQUEST +import pytest + +from osdu.core.api.storage.blob_storage_base import BlobStorageBase +from osdu.core.api.storage.blob_storage_local_fs import LocalFSBlobStorage + +from app.clients.storage_service_blob_storage import StorageRecordServiceBlobStorage +from app.clients.storage_service_client import StorageRecordServiceClient + +from app.helper import traces +from app.auth.auth import require_opendes_authorized_user +from app.middleware import require_data_partition_id +from app.wdms_app import wdms_app, app_injector + +from app.utils import Context +from tests.unit.test_utils import nope_logger_fixture + +# Initialize traces exporter in app, like it is in app's startup decorator +wdms_app.trace_exporter = traces.CombinedExporter(service_name='tested-ddms') + +DATA_PARTITION_ID = 'test_partition' +BASE_HEADERS = {'data-partition-id': DATA_PARTITION_ID} +URL_PREFIX = '/ddms/v2' + +acl = { + "owners": [ + "data.default.owners@opendes.p4d.cloud.slb-ds.com" + ], + "viewers": [ + "data.default.viewers@opendes.p4d.cloud.slb-ds.com" + ] +} + +legal = { + "legaltags": [ + "opendes-public-usa-dataset-1" + ], + "otherRelevantDataCountries": ["US", "FR"], +} + +trajectory_kind = "opendes:wks:trajectory:1.0.5" +trajectory_id = "opendes:wddms-test_CLA_traj-trajectory:0000" +trajectory_name = "trajectory_test_CLA_traj-trajectory_name" + + +traj = { + "acl": acl, + "legal": legal, + "kind": trajectory_kind, + "id": trajectory_id, + "data": { + "name": trajectory_name + } + } + +headers = { "data-partition-id": "DATA_PARTITION_ID" } + + + +@pytest.fixture +def client(nope_logger_fixture): + with TemporaryDirectory() as tmpdir: + async def storage_service_builder(*args, **kwargs): + return StorageRecordServiceBlobStorage(LocalFSBlobStorage(directory=tmpdir), 'p1', 'c1') + + async def blob_storage_builder(*args, **kwargs): + return LocalFSBlobStorage(directory=tmpdir) + + async def set_default_partition(data_partition_id: str = Header('opendes')): + Context.set_current_with_value(partition_id=data_partition_id) + + app_injector.register(BlobStorageBase, blob_storage_builder) + app_injector.register(StorageRecordServiceClient, storage_service_builder) + + async def do_nothing(): + # empty method + pass + + wdms_app.dependency_overrides[require_opendes_authorized_user] = do_nothing + wdms_app.dependency_overrides[require_data_partition_id] = set_default_partition + + yield TestClient(wdms_app) + + wdms_app.dependency_overrides = {} # clean up + + +@pytest.mark.parametrize("orient_value, data", +[ + ( + "split", + { + "columns": ["MD", "X", "Y", "Z"], + "index": [0, 1, 2], + "data": [[1.0, 10, 11, 12], [1.5, 20, 21, 22], [2.0, 30, 31, 32]] + } + ), + ( + "index", + { + "0": {"MD": 1.0, "X": 10, "Y": 11, "Z": 12}, + "1": {"MD": 1.5, "X": 20, "Y": 21, "Z": 22}, + "2": {"MD": 2.0, "X": 30, "Y": 31, "Z": 32}, + } + ), + ( + "columns", + { + "MD": {"0": 1.0, "1": 1.5, "2": 2.0}, + "X": {"0": 10, "1": 20, "2": 30}, + "Y": {"0": 11, "1": 21, "2": 31}, + "Z": {"0": 12, "1": 22, "2": 32}, + } + ), + ( + "records", + [ + {"MD": 1.0, "X": 10, "Y": 11, "Z": 12}, + {"MD": 1.5, "X": 20, "Y": 21, "Z": 22}, + {"MD": 2.0, "X": 30, "Y": 31, "Z": 32} + ] + ) +]) +def test_traj_bulk(client, orient_value, data): + # Create or update a traj record + response = client.post("/ddms/v2/trajectories", json=[traj], headers=headers) + assert response.ok, "Create or update trajectory failed" + + # get data + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}/data?orient={orient_value}", headers=headers) + assert response.status_code == HTTP_204_NO_CONTENT, "GET trajectory data should return 204 when trajectory doesn't have data" + + # add data to the traj + response = client.post(f"/ddms/v2/trajectories/{trajectory_id}/data?orient={orient_value}", json=data, headers=headers) + assert response.ok, "PUT trajectory data failed" + + # check record + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}", headers=headers) + assert response.ok, "GET trajectory record failed" + computed_record = response.json() + + assert computed_record["id"] == trajectory_id, "id in the record should match trajectory id" + assert computed_record["kind"] == trajectory_kind, "kind in the record should match trajectory kind" + assert computed_record["data"]["name"] == trajectory_name, "data.name in the record should match trajectory kind" + assert computed_record["acl"] == acl, "acl in the record should match trajectory acl" + assert computed_record["legal"] == legal, "legal in the record should match trajectory acl" + assert computed_record["data"]["bulkURI"], "trajectory record should have a bulkid" + + computed_channel = {channel["name"]: channel["bulkURI"] for channel in computed_record["data"]["channels"]} + + assert computed_channel["MD"] == computed_record["data"]["bulkURI"] + ":MD", "bulkid for channel MD should match {data.bulkid}:MD" + assert computed_channel["X"] == computed_record["data"]["bulkURI"] + ":X", "bulkid for channel X should match {data.bulkid}:MD" + assert computed_channel["Y"] == computed_record["data"]["bulkURI"] + ":Y", "bulkid for channel Y should match {data.bulkid}:MD" + assert computed_channel["Z"] == computed_record["data"]["bulkURI"] + ":Z", "bulkid for channel Z should match {data.bulkid}:MD" + + # get data + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}/data?orient={orient_value}", headers=headers) + assert response.ok, "GET trajectory data failed" + assert response.json() == data, "GET trajectory data response json should match trajectory data" + + # get specific channels data + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}/data", headers=headers, params={'orient': 'columns', 'channels': ['X', 'Y']}) + assert response.ok, "GET trajectory data by channels failed" + assert response.json() == { + "X": { + "0": 10, + "1": 20, + "2": 30 + }, + "Y": { + "0": 11, + "1": 21, + "2": 31 + }, + }, "GET trajectory data by channels response json body should match trajectory channels data" + + # get unknow channels + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}/data?orient=columns&channels=X&channels=Wrong", + headers=headers) + assert response.status_code == HTTP_400_BAD_REQUEST, "Get unknown channels data should fail with code 400" + assert response.reason == "Bad Request" + assert response.text == '{"detail":"\\"[\'Wrong\'] not in index\\""}' + + +def test_traj_create_and_delete(client): + response = client.post("/ddms/v2/trajectories", json=[traj], headers=headers) + assert response.ok + data = response.json() + assert data == {'recordCount': 1, 'recordIds': [f'{trajectory_id}'], 'skippedRecordIds': None} + + response = client.delete(f"/ddms/v2/trajectories/{trajectory_id}", headers=headers) + assert response.ok + + +@pytest.mark.parametrize("orient_value, data", [("wrong_orient", {}), ("values", {})]) +def test_get_data_orient_param_validation_negative(client, orient_value, data): + # Create or update a traj record + response = client.post("/ddms/v2/trajectories", json=[traj], headers=headers) + assert response.ok, "Create or update trajectory failed" + + # get data + response = client.get(f"/ddms/v2/trajectories/{trajectory_id}/data?orient={orient_value}", headers=headers) + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY + + # add data to the traj + response = client.post(f"/ddms/v2/trajectories/{trajectory_id}/data?orient={orient_value}", json=data, + headers=headers) + assert response.status_code == HTTP_422_UNPROCESSABLE_ENTITY \ No newline at end of file diff --git a/tests/unit/storage/__init__.py b/tests/unit/storage/__init__.py new file mode 100755 index 0000000000000000000000000000000000000000..300d2c3c2a24680c1f4efe15f790d5bbb89d9b7c --- /dev/null +++ b/tests/unit/storage/__init__.py @@ -0,0 +1,14 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..11ad922700753a9d91ce90e7ba71dcfdf38d2127 --- /dev/null +++ b/tests/unit/test_utils.py @@ -0,0 +1,232 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import pytest +from tempfile import TemporaryDirectory +from odes_storage.models import Record, StorageAcl, Legal +import mock +import asyncio +from contextlib import contextmanager +from app.model.model_utils import record_to_dict +from app.utils import get_or_create_ctx + + +def from_env(key, default=None): + import os + result = os.environ.get(key, default) + # assert result, "Failed to get {} env variable".format(key) + return result + + +@pytest.fixture() +def ctx_fixture(): + """ Create context with a fake tracer in it """ + ctx = get_or_create_ctx().set_current_with_value(tracer=mock.MagicMock(), logger=NopeLogger()) + yield ctx + + +@pytest.fixture +def nope_logger_fixture(): + from app.helper import logger + logger._LOGGER = NopeLogger() + yield + + +class NopeLogger: + def __init__(self): + # empty method + pass + + def debug(*arg, **kargs): + # empty method + pass + + def info(*arg, **kargs): + # empty method + pass + + def warning(*arg, **kargs): + # empty method + pass + + def error(*arg, **kargs): + # empty method + pass + + def exception(*arg, **kargs): + # empty method + pass + + def critical(*arg, **kargs): + # empty method + pass + + +class AsyncMock: + def __init__(self, *, return_value=None, forward_input_name: str = None, forward_input_index: int = 0): + self._return_value = return_value + self._from_input = forward_input_name + self._from_input_index = forward_input_index + + async def __call__(self, *args, **kwargs): + if self._return_value is not None: + return self._return_value + + if self._from_input: + return kwargs[self._from_input] + + if self._from_input_index < 0: + return None + + return args[self._from_input_index] + + +def patch_async(target: str, return_value, mocker=mock): + future = asyncio.Future() + future.set_result(return_value) + return mocker.patch(target, return_value=future) + + +def create_mock_class(cls_to_mock): + cls_name = cls_to_mock.__name__ + 'AutoMock' + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb): + pass + + @classmethod + async def _async_method(cls, *args, **kwargs): + # empty method + pass + + @classmethod + async def _sync_method(cls, *args, **kwargs): + # empty method + pass + + @classmethod + @contextmanager + def set_answer(cls, method_name, fn): + previous_fn = getattr(cls, method_name) + + def _patch_sync(self_or_cls, *args, **kwargs): + return fn(*args, **kwargs) + + async def _patch_async(self_or_cls, *args, **kwargs): + if asyncio.iscoroutinefunction(fn): + return await fn(*args, **kwargs) + return fn(*args, **kwargs) + + if asyncio.iscoroutinefunction(previous_fn): + setattr(cls, method_name, _patch_async) + elif callable(previous_fn): + setattr(cls, method_name, _patch_sync) + + try: + yield + finally: + setattr(cls, method_name, previous_fn) + + @classmethod + def set_return_value(cls, method_name, return_value): + return cls.set_answer(method_name, lambda *args, **kwargs: return_value) + + @classmethod + def set_throw(cls, method_name, exception): + def _do_throw(*args, **kwargs): + raise exception + return cls.set_answer(method_name, _do_throw) + + m_dict = { + 'set_return_value': set_return_value, + 'set_answer': set_answer, + 'set_throw': set_throw, + '__aenter__': __aenter__, + '__aexit__': __aexit__, + } + for name, _ in cls_to_mock.__dict__.items(): + if name.startswith('_'): + continue + + attr = getattr(cls_to_mock, name) + + if asyncio.iscoroutinefunction(attr): + m_dict[name] = _async_method + elif callable(attr): + m_dict[name] = _sync_method + + _new_class_ = type(cls_name, (object, ), m_dict) + return _new_class_ + + +def assert_dict_contained(dict_to_check: dict, ref_dict: dict, path=''): + """ + check actual dict contained ref_dict + path param use the default value + """ + for key, value in ref_dict.items(): + current_path = path + '.' + key if path else key + assert key in dict_to_check + sub_item = dict_to_check[key] + assert type(sub_item) == type(value), f'type of {current_path} ({type(sub_item)}) != ref {type(value)}' + if type(value) == dict: + assert_dict_contained(sub_item, value, current_path) + else: + assert sub_item == value, f'{current_path}: actual {sub_item} != ref {value}' + + +@pytest.fixture +async def temp_directory() -> str: + with TemporaryDirectory() as tmpdir: + yield tmpdir + + +def make_record(as_dict=False, **kwargs): + kwargs.setdefault('kind', 'opendes:osdu:raw:2.0.0') + kwargs.setdefault('acl', StorageAcl( + viewers=['data.default.viewers@opendes.p4d.cloud.ds.com'], + owners=['data.default.owners@opendes.p4d.cloud.ds.com'])) + kwargs.setdefault('legal', Legal()) + kwargs.setdefault('data', {}) + record = Record(**kwargs) + return record_to_dict(record) if as_dict else record + + +@pytest.fixture +def basic_record(kind: str = None): + return make_record() if kind is None else make_record(kind=kind) + + +def make_fn_return_value(value_to_return, as_coroutine: bool = False): + if not as_coroutine: + return lambda *args, **kwargs: value_to_return + + async def return_async_fn(*args, **kwargs): + return value_to_return + + return return_async_fn + + +def make_fn_do_nothing(as_coroutine: bool = False): + return make_fn_return_value(None, as_coroutine) + + +def make_async_return_value(value_to_return): + return make_fn_return_value(value_to_return, True) + + +def make_async_do_nothing(): + return make_fn_do_nothing(True) \ No newline at end of file diff --git a/tests/unit/version_test.py b/tests/unit/version_test.py new file mode 100644 index 0000000000000000000000000000000000000000..4b7cc3f368e30cbf93951c1c902eb42bbdffffc7 --- /dev/null +++ b/tests/unit/version_test.py @@ -0,0 +1,32 @@ +# Copyright 2021 Schlumberger +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from app import __version__, __build_number__, __app_name__ +import re + + +# to ensure info are ok +def test_version_info(): + assert __version__ is not None + assert type(__version__) == str + + #NOSONAR + regex = re.compile('^(\\d+)(.\\d+)*$') + assert regex.match(__version__) + + assert type(__build_number__) == str + assert __build_number__ is not None + + assert type(__app_name__) == str + assert __app_name__ is not None