Commit 124ec52b authored by Sumra Zafar's avatar Sumra Zafar
Browse files

Azure fix m13

parent 89ea06e6
......@@ -44,7 +44,7 @@ include:
ref: "master"
file: "cloud-providers/osdu-gcp-dag.yaml"
- local: "/devops/azure/override-stages.yml"
- local: "/devops/azure/override-stages_bk.yml"
- local: "/devops/gcp/override-stages.yml"
......
......@@ -14,25 +14,20 @@ class CreateDag:
self.namespace = os.environ.get('NAMESPACE')
self.file_path = file_path
self.service_name = "segy-to-zgy-conversion"
self.build_version = os.environ.get('BUILD_VERSION')
self.build_version = os.environ.get('BUILD_VERSION', 'latest')
self.dag_name = f"{self.service_name}-{self.build_version}"
self.dag_name = self.dag_name[:50]
self.env_vars = {
"STORAGE_SVC_URL": f"https://{self.dns_host_name}/api/storage/v2",
"SD_SVC_URL": f"https://{self.dns_host_name}/api/schema-service/v1",
"search_service_endpoint": f"https://{self.dns_host_name}/api/search/v2",
"partition_service_endpoint": f"https://{self.dns_host_name}/api/partition/v1",
"unit_service_endpoint": f"https://{self.dns_host_name}/api/unit/v2/unit/symbol",
"file_service_endpoint": f"https://{self.dns_host_name}/api/file/v2",
"KEYVAULT_URI": "{{ var.value.keyvault_uri }}",
"appinsights_key": "{{ var.value.appinsights_key }}",
"azure_paas_podidentity_isEnabled": "{{ var.value.azure_enable_msi }}",
"JAVA_OPTS": "-XX:InitialRAMPercentage=25.0 -XX:MaxRAMPercentage=50.0 -XX:+HeapDumpOnOutOfMemoryError",
"APPINSIGHTS_LOGGING_ENABLED": "false",
"SD_READ_CACHE_PAGE_SIZE": "4195024",
"OSDU_DATAPARTITIONID": "PARTITION_PLACEHOLDER",
"SD_SVC_URL": f"https://{self.dns_host_name}/seistore-svc/api/v3",
"STORAGE_SVC_URL": f"https://{self.dns_host_name}/api/storage/v2/",
"SD_SVC_TOKEN": "TOKEN_PLACEHOLDER",
"STORAGE_SVC_TOKEN": "TOKEN_PLACEHOLDER",
"SD_SVC_API_KEY": "NA",
"SD_READ_CACHE_MAX_PAGES": "256",
"SD_READ_CACHE_PAGE_SIZE": "4195024",
"SEGYTOZGY_VERBOSITY": "3",
"SEGYTOZGY_GENERATE_INDEX": "1"
}
......@@ -58,15 +53,15 @@ class CreateDag:
print(f"service_name: {self.service_name}")
print(f"build_version: {self.build_version}")
print(f"dag_name: {self.dag_name}")
print(f"folder: {self.dag_folder_path}")
print(f"docker_tag: {self.docker_tag}")
def _render_template(self, file) -> str:
env = Environment(
loader=FileSystemLoader(searchpath=os.getenv("ZGY_DAG_FOLDER_PATH", default="airflow/workflow-svc-v2")),
variable_start_string='{|', variable_end_string='|}',
)
template = env.get_template("/" + file.name)
template = env.get_template(os.path.basename(file.name))
params = {
"DAG_NAME": self.dag_name,
......@@ -74,7 +69,7 @@ class CreateDag:
"DNS_HOST": self.dns_host_name,
"NAMESPACE": self.namespace,
"ENV_VARS": json.dumps(self.env_vars, indent=4),
"K8S_POD_OPERATOR_KWARGS": json.dumps(self.kubernetes_pod_operator_options).replace('','{}')
"K8S_POD_OPERATOR_KWARGS": json.dumps(self.kubernetes_pod_operator_options, indent=2)
}
return template.render(**params)
......
# Override the containerize stage so that we can build the dag load container as well.
.azure_variables:
variables:
WORKFLOW_URL: https://${AZURE_DNS_NAME}/api/workflow/v1/
---
.azure_variables:
variables:
AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID
AZURE_AD_TENANT_ID: $AZURE_TENANT_ID
AZURE_CLIENT_ID: $AZURE_PRINCIPAL_ID
AZURE_CLIENT_SECRET: $AZURE_PRINCIPAL_SECRET
AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID
SHARED_TENANT: opendes
DATA_PARTITION_ID: opendes
azure_containerize:
image: danielscholl/azure-build-image
tags: ['osdu-medium']
stage: containerize
variables:
DAG_TASK_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
DAG_LOAD_IMAGE: ${CI_PROJECT_NAME}-dag-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
extends:
SHARED_TENANT: opendes
WORKFLOW_URL: "https://${AZURE_DNS_NAME}/api/workflow/v1/"
azure_build_dag:
artifacts:
expire_in: "2 days"
paths:
- output_dags
before_script:
- "az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID"
- "az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks"
- "docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY"
extends:
- .azure_variables
before_script:
- echo -n $CI_JOB_TOKEN | docker login -u $CI_REGISTRY_USER --password-stdin $CI_REGISTRY
- az --version
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
script:
# Build Docker Image
- docker build -t $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE --file Dockerfile .
- docker build -t $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE --file deployments/scripts/azure/Dockerfile .
- docker push $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE
- docker push $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE
# Azure Container Registry
- az acr login -n $AZURE_REGISTRY
- docker tag $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE ${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
- docker tag $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE ${AZURE_REGISTRY}.azurecr.io/$DAG_LOAD_IMAGE
- docker push ${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
- docker push ${AZURE_REGISTRY}.azurecr.io/$DAG_LOAD_IMAGE
only:
variables:
- $AZURE == 'true'
azure_deploy:
stage: deploy
image: danielscholl/azure-build-image
needs: ["azure_containerize"]
tags: ['osdu-medium']
variables:
DAG_TASK_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
DAG_LOAD_IMAGE: ${CI_PROJECT_NAME}-dag-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
extends:
only:
variables:
- "$AZURE == 'true'"
script:
- |
cat > .env << EOF
DAG_TASK_IMAGE=${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
SHARED_TENANT=$SHARED_TENANT
AZURE_TENANT_ID=$AZURE_TENANT_ID
AZURE_DNS_NAME=$AZURE_DNS_NAME
AZURE_AD_APP_RESOURCE_ID=$AZURE_AD_APP_RESOURCE_ID
AZURE_CLIENT_ID=$AZURE_CLIENT_ID
AZURE_CLIENT_SECRET=$AZURE_CLIENT_SECRET
BUILD_VERSION=$CI_COMMIT_SHA
EOF
docker build -t $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE --file Dockerfile .
docker build -t $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE --file deployments/scripts/azure/Dockerfile .
docker push $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE
docker push $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE
az acr login -n $AZURE_REGISTRY
docker tag $CI_REGISTRY_IMAGE/$DAG_TASK_IMAGE ${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
docker tag $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE ${AZURE_REGISTRY}.azurecr.io/$DAG_LOAD_IMAGE
docker push ${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
docker push ${AZURE_REGISTRY}.azurecr.io/$DAG_LOAD_IMAGE
if [ "$(docker ps -a | grep docker_generate_dags)" ]; then
docker stop docker_generate_dags
docker rm docker_generate_dags
fi
docker run --name "docker_generate_dags" --env-file .env $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE
stage: containerize
tags:
- osdu-medium
variables:
DAG_LOAD_IMAGE: "${CI_PROJECT_NAME}-dag-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}"
DAG_TASK_IMAGE: "${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}"
azure_register_dag:
artifacts:
expire_in: "2 days"
paths:
- output_dags
extends:
- .azure_variables
before_script:
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
- az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks
script:
image: "python:3.8"
needs:
- azure_build_dag
only:
variables:
- "$AZURE == 'true'"
script:
- |
cat > .env << EOF
DAG_TASK_IMAGE=${AZURE_REGISTRY}.azurecr.io/$DAG_TASK_IMAGE
SHARED_TENANT=$SHARED_TENANT
AZURE_TENANT_ID=$AZURE_TENANT_ID
AZURE_DNS_NAME=$AZURE_DNS_NAME
AZURE_AD_APP_RESOURCE_ID=$AZURE_AD_APP_RESOURCE_ID
AZURE_CLIENT_ID=$AZURE_CLIENT_ID
AZURE_CLIENT_SECRET=$AZURE_CLIENT_SECRET
BUILD_VERSION=$CI_COMMIT_SHA
EOF
cat .env
if [ "$(docker ps -a | grep docker_container)" ]; then
docker stop docker_container
docker rm docker_container
fi
docker run --name "docker_container" --env-file .env $CI_REGISTRY_IMAGE/$DAG_LOAD_IMAGE
output_dag_folder_path="/home/osdu/scripts/output_dags/"
docker cp "docker_container:${output_dag_folder_path}" .
artifacts:
paths:
- "output_dags"
expire_in: 2 days
only:
variables:
- $AZURE == 'true'
azure_copy_dag:
image: danielscholl/azure-build-image
tags: ["osdu-medium"]
cat > requirements.txt << EOF
msal
Jinja2==2.10.1
EOF
- |
export ZGY_DAG_FOLDER_PATH="airflow/workflow-svc-v2"
export DOCKER_TAG=${CI_COMMIT_REF_SLUG}
export BUILD_VERSION=$(echo ${CI_COMMIT_SHA} | cut -c -5)
- |
# Python script for generating the Bearer Token
cat > Token.py << EOF
import os
import msal
class AzureToken(object):
def get_azure_id_token(self):
tenant_id = os.getenv('AZURE_TENANT_ID')
resource_id = os.getenv('AZURE_AD_APP_RESOURCE_ID')
client_id = os.getenv('AZURE_CLIENT_ID')
client_secret = os.getenv('AZURE_CLIENT_SECRET')
if tenant_id is None:
print('Please pass tenant Id to generate token')
exit(1)
if resource_id is None:
print('Please pass resource Id to generate token')
exit(1)
if client_id is None:
print('Please pass client Id to generate token')
exit(1)
if client_secret is None:
print('Please pass client secret to generate token')
exit(1)
try:
authority_host_uri = 'https://login.microsoftonline.com'
authority_uri = authority_host_uri + '/' + tenant_id
scope = [resource_id + '/.default']
app = msal.ConfidentialClientApplication(client_id=client_id, authority=authority_uri, client_credential=client_secret)
result = app.acquire_token_for_client(scopes=scope)
token = 'Bearer ' + result.get('access_token')
print(token)
return token
except Exception as e:
print(e)
if __name__ == '__main__':
AzureToken().get_azure_id_token()
EOF
- |
# Python script for registering the DAG by calling Workflow service API
cat > registeration_dag.py << EOF
import json
import requests
import os
class RegisterDag:
def __init__(self):
self.token = os.environ.get('BEARER_TOKEN')
self.data_partition_id = os.environ.get('SHARED_TENANT')
self.workflow_service_url = os.environ.get('WORKFLOW_URL') + "workflow"
def register(self):
self.register_dag()
def register_dag(self):
data = '{"workflowName":"$DAG_NAME","description":"$DAG_NAME","registrationInstructions":{"dagName":"$DAG_NAME"}}'
headers = {
'Content-Type': 'application/json',
'Authorization': self.token,
'data-partition-id': self.data_partition_id
}
response = requests.post(self.workflow_service_url, headers=headers, data=data)
if response.status_code == 200:
workflow_id = response.json().get('workflowId')
print("DAG registered with workflowId: {0}".format(workflow_id))
elif response.status_code == 409:
workflow_id = response.json().get('workflowId')
print("DAG is already registered with workflowId: {0}".format(workflow_id))
else:
print("Error while registering DAG {0}".format(response.raise_for_status()))
if __name__ == "__main__":
RegisterDag().register()
EOF
- "pip install -r ./requirements.txt"
- "export BEARER_TOKEN=$(python ./Token.py)"
- "echo \"$(echo $BEARER_TOKEN | cut -c -20)***********\""
- "python ./registeration_dag.py"
- "python deployments/scripts/azure/output_dag_folder.py -f airflow/workflow-svc-v2/segy_to_zgy_ingestion_dag.py"
- "cd output_dags/dags"
stage: deploy
needs: ["azure_deploy"]
extends:
tags:
- osdu-medium
variables:
DAG_NAME: "${CI_PROJECT_NAME}-dag-${CI_COMMIT_REF_SLUG}"
azure_copy_dag:
artifacts:
expire_in: "2 days"
paths:
- output_dags
before_script:
- "az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID"
- "az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks"
extends:
- .azure_variables
before_script:
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
- az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks
script:
image: danielscholl/azure-build-image
needs:
- azure_register_dag
only:
variables:
- "$AZURE == 'true'"
script:
- |
# Installing the Azcopy utility
apk add --update coreutils && rm -rf /var/cache/apk/*
mkdir -p tmp
cd tmp
wget -O azcopy_v10.tar.gz https://aka.ms/downloadazcopy-v10-linux && tar -xf azcopy_v10.tar.gz --strip-components=1
cp ./azcopy /usr/bin/
cd ..
- EXPIRE=$(date -u -d "59 minutes" '+%Y-%m-%dT%H:%M:%SZ')
- START=$(date -u -d "-1 minute" '+%Y-%m-%dT%H:%M:%SZ')
- accountKey=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountkey}' | base64 -d)
- accountName=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountname}' | base64 -d)
# Generating the SAS Token required for Authorization
- AZURE_STORAGE_SAS_TOKEN=$(az storage account generate-sas --account-name $accountName --account-key $accountKey --start $START --expiry $EXPIRE --https-only --resource-types sco --services f --permissions cwdlur -o tsv)
- cd output_dags
# Installing the Azcopy utility
apk add --update coreutils && rm -rf /var/cache/apk/*
mkdir -p tmp
cd tmp
wget -O azcopy_v10.tar.gz https://aka.ms/downloadazcopy-v10-linux && tar -xf azcopy_v10.tar.gz --strip-components=1
cp ./azcopy /usr/bin/
cd ..
- "EXPIRE=$(date -u -d \"59 minutes\" '+%Y-%m-%dT%H:%M:%SZ')"
- "START=$(date -u -d \"-1 minute\" '+%Y-%m-%dT%H:%M:%SZ')"
- "accountKey=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountkey}' | base64 -d)"
- "accountName=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountname}' | base64 -d)"
- "AZURE_STORAGE_SAS_TOKEN=$(az storage account generate-sas --account-name $accountName --account-key $accountKey --start $START --expiry $EXPIRE --https-only --resource-types sco --services f --permissions cwdlur -o tsv)"
- "cd output_dags/dags"
- |
if [ "$AZURE_DEPLOY_PACKAGED_DAG" == "true" ]; then
echo "Packaged Dags are enabled"
if [ -d "./dags/" ]; then
echo "Uploading to: ${accountName}"
# Copying the zipped dag inside the dags folder
azcopy cp "./dags/*.zip" "https://${accountName}.file.core.windows.net/airflowdags/dags?${AZURE_STORAGE_SAS_TOKEN}"
azcopy cp "./dags/*.zip" "https://${accountName}.file.core.windows.net/airflow2dags/dags?${AZURE_STORAGE_SAS_TOKEN}"
fi
else
echo "Packaged Dags are disabled"
if [ -d "./dags/" ]; then
# Copying all the contents inside the dags folder
azcopy cp "./dags/*" "https://${accountName}.file.core.windows.net/airflowdags/dags?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true
azcopy cp "./dags/*" "https://${accountName}.file.core.windows.net/airflow2dags/dags?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true
cd dags
# Now syncing only the folders which are part of source to remove the deleted files
for directory in *; do
if [ -d "$directory" ]; then
azcopy sync "./$directory/" "https://${accountName}.file.core.windows.net/airflowdags/dags/$directory?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true --delete-destination=true
azcopy sync "./$directory/" "https://${accountName}.file.core.windows.net/airflow2dags/dags/$directory?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true --delete-destination=true
fi
done
cd ..
fi
fi
artifacts:
paths:
- "output_dags"
expire_in: 2 days
only:
variables:
- $AZURE == 'true'
azure_register_dag:
tags: ["osdu-medium"]
image: python:3.8
stage: bootstrap
needs: ["azure_copy_dag"]
variables:
PARSER_IMAGE: ${AZURE_REGISTRY}.azurecr.io/${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
extends:
- .azure_variables
script:
- |
cat > requirements.txt << EOF
msal
EOF
- |
# Python script for generating the Bearer Token
cat > Token.py << EOF
import os
import msal
class AzureToken(object):
def get_azure_id_token(self):
tenant_id = os.getenv('AZURE_TENANT_ID')
resource_id = os.getenv('AZURE_AD_APP_RESOURCE_ID')
client_id = os.getenv('AZURE_CLIENT_ID')
client_secret = os.getenv('AZURE_CLIENT_SECRET')
if tenant_id is None:
print('Please pass tenant Id to generate token')
exit(1)
if resource_id is None:
print('Please pass resource Id to generate token')
exit(1)
if client_id is None:
print('Please pass client Id to generate token')
exit(1)
if client_secret is None:
print('Please pass client secret to generate token')
exit(1)
try:
authority_host_uri = 'https://login.microsoftonline.com'
authority_uri = authority_host_uri + '/' + tenant_id
scope = [resource_id + '/.default']
app = msal.ConfidentialClientApplication(client_id=client_id, authority=authority_uri, client_credential=client_secret)
result = app.acquire_token_for_client(scopes=scope)
token = 'Bearer ' + result.get('access_token')
print(token)
return token
except Exception as e:
print(e)
if __name__ == '__main__':
AzureToken().get_azure_id_token()
EOF
- |
# Python script for registering the DAG by calling Workflow service API
cat > register_dag.py << EOF
import json
import requests
import os
class RegisterDag:
def __init__(self):
self.token = os.environ.get('BEARER_TOKEN')
self.data_partition_id = os.environ.get('SHARED_TENANT')
self.workflow_service_url = os.environ.get('WORKFLOW_URL') + "workflow"
def register(self):
self.register_dag()
def register_dag(self):
with open('./output_dags/workflow_request_body.json', 'r', encoding='utf-8') as f:
file_data = json.load(f)
for data in file_data:
data = json.dumps(data)
headers = {
'Content-Type': 'application/json',
'Authorization': self.token,
'data-partition-id': self.data_partition_id
}
response = requests.post(self.workflow_service_url, headers=headers, data=data)
if response.status_code == 200:
workflow_id = response.json().get('workflowId')
print("DAG registered with workflowId: {0}".format(workflow_id))
elif response.status_code == 409:
workflow_id = response.json().get('workflowId')
print("DAG is already registered with workflowId: {0}".format(workflow_id))
else:
print("Error while registering DAG {0}".format(response.raise_for_status()))
exit(1)
if __name__ == "__main__":
RegisterDag().register()
EOF
- pip install -r ./requirements.txt
- export BEARER_TOKEN=$(python ./Token.py)
- echo $BEARER_TOKEN
- python ./register_dag.py
artifacts:
paths:
- "output_dags"
expire_in: 2 days
only:
variables:
- $AZURE == 'true'
tags:
- osdu-medium
variables:
AZURE_DEPLOY_PACKAGED_DAG: "true"
\ No newline at end of file
This diff is collapsed.
......@@ -32,7 +32,7 @@
"FileCollectionPath": "sd://opendes/sntc/volve/",
"FileSourceInfos": [
{
"FileSource": "sd://opendes/sntc/volve/ST10010ZC11_PZ_PSDM_KIRCH_FULL_D.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"FileSource": "sd://suzafardftest26-opendes/suzafar/ST10010ZC11_PZ_PSDM_KIRCH_FULL_T.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"PreloadFilePath": "MyDocuments\\seismic-to-osdu\\Volve_SEGY_Depth\\ST10010ZC11_PZ_PSDM_KIRCH_FULL_D.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"Name": "Volve_SEGY_Depth",
"FileSize": "1368413160"
......
#!/bin/bash
# Settings
DATA_PARTITION_ID=opendes
ACL_OWNER=data.default.owners@opendes.enterprisedata.cloud.slb-ds.com
ACL_VIEWER=data.default.viewers@opendes.enterprisedata.cloud.slb-ds.com
LEGAL_TAG=opendes-default-legal
DATA_PARTITION_ID=suzafardftest26-opendes
ACL_OWNER=data.default.owners@suzafardftest26-opendes.contoso.com
ACL_VIEWER=data.default.viewers@suzafardftest26-opendes.contoso.com
LEGAL_TAG=suzafardftest26-opendes-Seismic-Legal-Tag-Test999544286722
# Code
set -e
......
......@@ -28,7 +28,7 @@
"FileCollectionPath": "sd://opendes/sntc/volve/",
"FileSourceInfos": [
{
"FileSource": "sd://opendes/sntc/volve/ST10010ZC11_PZ_PSDM_KIRCH_FULL_T.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"FileSource": "sd://suzafardftest26-opendes/suzafar/ST10010ZC11_PZ_PSDM_KIRCH_FULL_T.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"PreloadFilePath": "MyDocuments\\seismic-to-osdu\\Volve_SEGY_Time\\ST10010ZC11_PZ_PSDM_KIRCH_FULL_T.MIG_FIN.POST_STACK.3D.JS-017536.segy",
"Name": "Volve_SEGY_Time",
"FileSize": "1050849760"
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment