# EXPECTED PIPELINE INHERITED GROUP VARIABLES # -------------------------------------------------------------------------------- # AZURE (Protected Branch) # AZURE_APP_ID (Protected Branch) # AZURE_APP_ID_OTHER (Protected Branch) # AZURE_APP_OID_OTHER (Protected Branch) # AZURE_BASE (Protected Branch) # AZURE_BASENAME_21 (Protected Branch) # AZURE_DNS_NAME (Protected Branch) # AZURE_ELASTIC_HOST (Protected Branch) # AZURE_ELASTIC_PASSWORD (Protected Branch/Masked Variable) # AZURE_INVALID_JWT (Protected Branch) # AZURE_NO_ACCESS_ID (Protected Branch) # AZURE_NO_ACCESS_SECRET (Protected Branch/Masked Variable) # AZURE_PRINCIPAL_ID (Protected Branch) # AZURE_PRINCIPAL_SECRET (Protected Branch/Masked Variable) # AZURE_REGISTRY (Protected Branch) # AZURE_SERVICEBUS_KEY (Protected Branch/Masked Variable) # AZURE_STORAGE_KEY (Protected Branch/Masked Variable) # AZURE_SUBSCRIPTION_ID (Protected Branch) # AZURE_SUBSCRIPTION_NAME (Protected Branch) # AZURE_TENANT_ID (Protected Branch) # AZURE_APPINSIGHTS_KEY (Protected Branch/Masked Variable) # EXPECTED PIPELINE VARIABLES # -------------------------------------------------------------------------------- # AZURE_TEST_SUBDIR .azure_variables: variables: WORKFLOW_URL: https://${AZURE_DNS_NAME}/api/workflow/v1/ AZURE_AD_TENANT_ID: $AZURE_TENANT_ID AZURE_CLIENT_ID: $AZURE_PRINCIPAL_ID AZURE_CLIENT_SECRET: $AZURE_PRINCIPAL_SECRET AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID SHARED_TENANT: opendes DATA_PARTITION_ID: opendes # JOBS # -------------------------------------------------------------------------------- # This stage generates the output dag folder by running a docker container which will # built via Dockerfile present in DAD repository root. This Dockerfile should contain the # logic to generate all the prerequisite artifacts for the DAG and output all the # Dag related files in folder azure_create_dag: tags: ["osdu-medium"] image: danielscholl/azure-build-image stage: containerize needs: ["compile-and-unit-test"] variables: PARSER_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA} DAG_IMAGE: ${CI_PROJECT_NAME}-dag-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA} extends: - .azure_variables before_script: - docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY - az --version - az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID script: - | # Generating environment file to be passed while running the docker container cat > .env << EOF CI_REGISTRY_USER=${CI_REGISTRY_USER} CI_REGISTRY_PASSWORD=${CI_REGISTRY_PASSWORD} CI_REGISTRY=${CI_REGISTRY} AZURE_REGISTRY=${AZURE_REGISTRY} AZURE_PRINCIPAL_ID=${AZURE_PRINCIPAL_ID} AZURE_PRINCIPAL_SECRET=${AZURE_PRINCIPAL_SECRET} AZURE_TENANT_ID=${AZURE_TENANT_ID} CI_PROJECT_NAME=${CI_PROJECT_NAME} CI_COMMIT_REF_SLUG=${CI_COMMIT_REF_SLUG} CI_COMMIT_SHA=${CI_COMMIT_SHA} CI_REGISTRY_IMAGE=${CI_REGISTRY_IMAGE} AZURE_DEPLOYMENTS_SCRIPTS_SUBDIR=${AZURE_DEPLOYMENTS_SCRIPTS_SUBDIR} PARSER_IMAGE=${PARSER_IMAGE} DAG_IMAGE=${DAG_IMAGE} SHARED_TENANT=$SHARED_TENANT AZURE_DNS_NAME=$AZURE_DNS_NAME EOF - docker build -t $CI_REGISTRY_IMAGE/$PARSER_IMAGE-orchestrator --file $AZURE_DEPLOYMENTS_SCRIPTS_SUBDIR/dockerFolder/output_dags_dockerfile . - | # Checking if container with name docker_generate_dags exists then delete # Container with a name is required to perform the copy operation below if [ "$(docker ps -a | grep docker_generate_dags)" ]; then docker stop docker_generate_dags docker rm docker_generate_dags fi - docker run --name "docker_generate_dags" -v /var/run/docker.sock:/var/run/docker.sock --env-file .env $CI_REGISTRY_IMAGE/$PARSER_IMAGE-orchestrator # Copying the dag folder from the docker container to he host - docker cp "docker_generate_dags:$AZURE_OUTPUT_DAG_FOLDER" . artifacts: paths: - "output_dags" only: variables: - $AZURE == 'true' azure_copy_dag: image: danielscholl/azure-build-image tags: ["osdu-medium"] stage: deploy needs: ["azure_create_dag"] extends: - .azure_variables before_script: - az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID - az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks script: - | # Installing the Azcopy utility apk add --update coreutils && rm -rf /var/cache/apk/* mkdir -p tmp cd tmp wget -O azcopy_v10.tar.gz https://aka.ms/downloadazcopy-v10-linux && tar -xf azcopy_v10.tar.gz --strip-components=1 cp ./azcopy /usr/bin/ cd .. - EXPIRE=$(date -u -d "59 minutes" '+%Y-%m-%dT%H:%M:%SZ') - START=$(date -u -d "-1 minute" '+%Y-%m-%dT%H:%M:%SZ') - accountKey=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountkey}' | base64 -d) - accountName=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountname}' | base64 -d) # Generating the SAS Token required for Authorization - AZURE_STORAGE_SAS_TOKEN=$(az storage account generate-sas --account-name $accountName --account-key $accountKey --start $START --expiry $EXPIRE --https-only --resource-types sco --services f --permissions cwdlur -o tsv) - cd output_dags - | if [ -d "./dags/" ]; then # Copying all the contents inside the dags folder azcopy cp "./dags/*" "https://${accountName}.file.core.windows.net/airflowdags/dags?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true cd dags # Now syncing only the folders which are part of source to remove the deleted files for directory in *; do if [ -d "$directory" ]; then azcopy sync "./$directory/" "https://${accountName}.file.core.windows.net/airflowdags/dags/$directory?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true --delete-destination=true fi done cd .. fi - | if [ -d "./plugins/" ]; then # Copying all the contents inside the plugins folder azcopy cp "./plugins/*" "https://${accountName}.file.core.windows.net/airflowdags/plugins?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true cd plugins if [ -d "operators" ]; then cd operators # Syncing only the sub folders which are part of source in operators folder for directory in *; do if [ -d "$directory" ]; then azcopy sync "./$directory/" "https://${accountName}.file.core.windows.net/airflowdags/plugins/operators/$directory?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true --delete-destination=true fi done cd .. fi if [ -d "hooks" ]; then cd hooks # Syncing only the sub folders which are part of source in hooks folder for directory in *; do if [ -d "$directory" ]; then azcopy sync "./$directory/" "https://${accountName}.file.core.windows.net/airflowdags/plugins/hooks/$directory?${AZURE_STORAGE_SAS_TOKEN}" --recursive=true --delete-destination=true fi done cd .. fi cd .. fi artifacts: paths: - "output_dags" only: variables: - $AZURE == 'true' azure_register_dag: tags: ["osdu-medium"] image: python:3.8 stage: bootstrap needs: ["azure_copy_dag"] variables: PARSER_IMAGE: ${AZURE_REGISTRY}.azurecr.io/${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA} extends: - .azure_variables script: - | cat > requirements.txt << EOF msal EOF - | # Python script for generating the Bearer Token cat > Token.py << EOF import os import msal class AzureToken(object): def get_azure_id_token(self): tenant_id = os.getenv('AZURE_TENANT_ID') resource_id = os.getenv('AZURE_AD_APP_RESOURCE_ID') client_id = os.getenv('AZURE_CLIENT_ID') client_secret = os.getenv('AZURE_CLIENT_SECRET') if tenant_id is None: print('Please pass tenant Id to generate token') exit(1) if resource_id is None: print('Please pass resource Id to generate token') exit(1) if client_id is None: print('Please pass client Id to generate token') exit(1) if client_secret is None: print('Please pass client secret to generate token') exit(1) try: authority_host_uri = 'https://login.microsoftonline.com' authority_uri = authority_host_uri + '/' + tenant_id scope = [resource_id + '/.default'] app = msal.ConfidentialClientApplication(client_id=client_id, authority=authority_uri, client_credential=client_secret) result = app.acquire_token_for_client(scopes=scope) token = 'Bearer ' + result.get('access_token') print(token) return token except Exception as e: print(e) if __name__ == '__main__': AzureToken().get_azure_id_token() EOF - | # Python script for registering the DAG by calling Workflow service API cat > register_dag.py << EOF import json import requests import os class RegisterDag: def __init__(self): self.token = os.environ.get('BEARER_TOKEN') self.data_partition_id = os.environ.get('SHARED_TENANT') self.workflow_service_url = os.environ.get('WORKFLOW_URL') + "workflow" def register(self): self.register_dag() def register_dag(self): with open('./output_dags/workflow_request_body.json', 'r', encoding='utf-8') as f: file_data = json.load(f) for data in file_data: data = json.dumps(data) headers = { 'Content-Type': 'application/json', 'Authorization': self.token, 'data-partition-id': self.data_partition_id } response = requests.post(self.workflow_service_url, headers=headers, data=data) if response.status_code == 200: workflow_id = response.json().get('workflowId') print("DAG registered with workflowId: {0}".format(workflow_id)) elif response.status_code == 409: workflow_id = response.json().get('workflowId') print("DAG is already registered with workflowId: {0}".format(workflow_id)) else: print("Error while registering DAG {0}".format(response.raise_for_status())) exit(1) if __name__ == "__main__": RegisterDag().register() EOF - pip install -r ./requirements.txt - export BEARER_TOKEN=$(python ./Token.py) - echo $BEARER_TOKEN - python ./register_dag.py only: variables: - $AZURE == 'true' azure_test_dag: tags: ["osdu-medium"] image: $CI_REGISTRY/danielscholl/azure-maven/azure-maven:v1.0 stage: testDag needs: ["azure_register_dag"] variables: AZURE_SERVICE: csv-parser AZURE_TENANT_ID: $AZURE_TENANT_ID KEYVAULT_URI: $AZURE_KEYVAULT_URI partition_service_endpoint: https://${AZURE_DNS_NAME}/api/partition/v1 storage_service_endpoint: https://${AZURE_DNS_NAME}/api/storage/v2 schema_service_endpoint: https://${AZURE_DNS_NAME}/api/schema-service/v1 search_service_endpoint: https://${AZURE_DNS_NAME}/api/search/v2 unit_service_endpoint: https://${AZURE_DNS_NAME}/api/unit/v2/unit/symbol legal_service_endpoint: https://${AZURE_DNS_NAME}/api/legal/v1 file_service_endpoint: https://${AZURE_DNS_NAME}/api/file/v2 DMS_KIND: opendes:osdudemo:wellbore:1.0.0 DMS_ACL: data.test1@opendes.contoso.com DMS_LEGAL_TAG: opendes-public-usa-dataset-7643990 AZURE_STORAGE_CONTAINER: opendes aad_client_id: $AZURE_APP_ID extends: - .azure_variables script: - mvn clean install -pl $AZURE_PARSER_BUILD_SUBDIR -am - mvn clean verify -f $AZURE_TEST_SUBDIR/pom.xml only: variables: - $AZURE == 'true' except: variables: - $AZURE_SKIP_TEST == 'true'