Commit 12fbfbf6 authored by Ankit Sharma [Microsoft]'s avatar Ankit Sharma [Microsoft]
Browse files

Merge branch 'master' into azure-integration-test-report-generation

# Conflicts:
#	cloud-providers/azure.yml
parents ee855d5a 9798417a
Pipeline #24002 passed with stage
in 9 seconds
compile-and-unit-test:
image: node
tags: ["osdu-medium"]
stage: build
artifacts:
reports:
junit: test-results.xml
cobertura: coverage/cobertura-coverage.xml
paths:
- coverage/
expire_in: 15 days
script:
- npm install -g typescript
- chmod +x devops/scripts/utest_build_run.sh
- devops/scripts/utest_build_run.sh
\ No newline at end of file
.aws_mongodb_variables:
variables:
ACCESS_KEY_ID: $AWS_MONGODB_ACCESS_KEY_ID
SECRET_ACCESS_KEY: $AWS_MONGODB_SECRET_ACCESS_KEY
AWS_ACCESS_KEY_ID : $AWS_MONGODB_ACCESS_KEY_ID
AWS_SECRET_ACCESS_KEY: $AWS_MONGODB_SECRET_ACCESS_KEY
INTEGRATION_TEST_DIR: $AWS_MONGODB_TEST_SUBDIR
SERVICE_NAME: $AWS_MONGODB_SERVICE
BUILD_DIR: $AWS_MONGODB_BUILD_SUBDIR
ENVIRONMENT: $AWS_MONGODB_ENVIRONMENT
APPLICATION_NAME: os-$AWS_MONGODB_SERVICE
LOCAL_IMAGE_TAG: $CI_REGISTRY_IMAGE/$CI_COMMIT_REF_SLUG:$CI_COMMIT_SHA
AWS_IMAGE_TAG_BASE: $AWS_MONGODB_ACCOUNT_ID.dkr.ecr.$AWS_MONGODB_REGION.amazonaws.com/os-$AWS_MONGODB_SERVICE
S3_DATA_BUCKET: $AWS_MONGODB_S3_DATA_BUCKET
LEGALTAG_BASE_URL: $AWS_MONGODB_LEGALTAG_BASE_URL
SNS_TOPIC_NAME: $AWS_MONGODB_SNS_TOPIC_NAME
OTHER_RELEVANT_DATA_COUNTRIES: $AWS_MONGODB_OTHER_RELEVANT_DATA_COUNTRIES
LEGAL_TAG : $AWS_MONGODB_LEGAL_TAG
TENANT_NAME : $AWS_MONGODB_TENANT_NAME
PRIVATE_TENANT1: $AWS_MONGODB_DEFAULT_DATA_PARTITION_ID_TENANT1
PRIVATE_TENANT2: tenant2
SHARED_TENANT: $AWS_MONGODB_DEFAULT_DATA_PARTITION_ID_TENANT2
VENDOR: aws
STORAGE_URL: $AWS_MONGODB_STORAGE_URL
DOMAIN: $AWS_MONGODB_TESTING_DOMAIN
LEGAL_URL: $AWS_MONGODB_LEGAL_URL
AWS_COGNITO_CLIENT_ID: $AWS_MONGODB_COGNITO_CLIENT_ID
AWS_COGNITO_USER_POOL_ID: $AWS_MONGODB_COGNITO_USER_POOL_ID
AWS_COGNITO_AUTH_FLOW: $AWS_MONGODB_COGNITO_AUTH_FLOW
AWS_COGNITO_AUTH_PARAMS_PASSWORD: $AWS_MONGODB_COGNITO_AUTH_PARAMS_PASSWORD
AWS_COGNITO_AUTH_PARAMS_USER: $AWS_MONGODB_COGNITO_AUTH_PARAMS_USER
AWS_COGNITO_AUTH_PARAMS_USER_NO_ACCESS: $AWS_MONGODB_COGNITO_AUTH_PARAMS_USER_NO_ACCESS
AWS_ACCOUNT_ID: $AWS_MONGODB_ACCOUNT_ID
AWS_REGION: $AWS_MONGODB_REGION
DEPLOY_ENV: $AWS_MONGODB_DEPLOY_ENV
CACHE_CLUSTER_GROUP_ENDPOINT: $AWS_MONGODB_CACHE_CLUSTER_GROUP_ENDPOINT
CACHE_CLUSTER_GROUP_PORT: $AWS_MONGODB_CACHE_CLUSTER_GROUP_PORT
CACHE_CLUSTER_LEGALTAG_ENDPOINT: $AWS_MONGODB_CACHE_CLUSTER_LEGALTAG_ENDPOINT
CACHE_CLUSTER_LEGALTAG_PORT: $AWS_MONGODB_CACHE_CLUSTER_LEGALTAG_PORT
CACHE_CLUSTER_SCHEMA_ENDPOINT: $AWS_MONGODB_CACHE_CLUSTER_SCHEMA_ENDPOINT
CACHE_CLUSTER_SCHEMA_PORT: $AWS_MONGODB_CACHE_CLUSTER_SCHEMA_PORT
APPLICATION_PORT: $AWS_MONGODB_APPLICATION_PORT
HOST_URL: $AWS_MONGODB_LEGAL_URL
MY_TENANT: $AWS_MONGODB_TENANT_NAME
ENTITLEMENTS_TEST_TENANT: $AWS_MONGODB_ENTITLEMENTS_TEST_TENANT
AWS_S3_ENDPOINT: $AWS_MONGODB_S3_ENDPOINT
AWS_S3_REGION: $AWS_MONGODB_REGION
LOG_LEVEL: INFO
SKIP_HTTP_TESTS: $AWS_MONGODB_SKIP_HTTP_TESTS
S3_LEGAL_CONFIG_BUCKET: $AWS_MONGODB_S3_LEGAL_CONFIG_BUCKET
LEGAL_QUEUE: $AWS_MONGODB_LEGAL_QUEUE
TABLE_PREFIX: $AWS_MONGODB_TABLE_PREFIX
RESOURCE_PREFIX: $AWS_MONGODB_TABLE_PREFIX
DYNAMO_DB_REGION: $AWS_MONGODB_DYNAMO_DB_REGION
DYNAMO_DB_ENDPOINT: $AWS_MONGODB_DYNAMO_DB_ENDPOINT
DELIVERY_INT_TEST_BUCKET_NAME: $AWS_MONGODB_DELIVERY_INT_TEST_BUCKET_NAME
DEFAULT_DATA_PARTITION_ID_TENANT1: $AWS_MONGODB_DEFAULT_DATA_PARTITION_ID_TENANT1
DEFAULT_DATA_PARTITION_ID_TENANT2: $AWS_MONGODB_DEFAULT_DATA_PARTITION_ID_TENANT2
SEARCH_HOST: $AWS_MONGODB_SEARCH_HOST
STORAGE_HOST: $AWS_MONGODB_STORAGE_HOST
LEGAL_HOST: $AWS_MONGODB_LEGAL_HOST
DELIVERY_HOST: $AWS_MONGODB_DELIVERY_HOST
ENTITLEMENTS_DOMAIN: $AWS_MONGODB_ENTITLEMENTS_DOMAIN
ELASTIC_HOST: $AWS_MONGODB_ELASTIC_HOST
DEFAULT_ELASTIC_USER_NAME: es
DEFAULT_ELASTIC_PASSWORD: $AWS_MONGODB_REGION
ELASTIC_PORT: 443
INDEXER_HOST: $AWS_MONGODB_INDEXER_HOST
AWS_CLUSTER_NAME: $AWS_MONGODB_CLUSTER_NAME
ENTITLEMENTS_URL: $AWS_MONGODB_ENTITLEMENTS_URL
VIRTUAL_SERVICE_HOST_NAME: $AWS_MONGODB_API_GATEWAY_HOST
HOST: $AWS_MONGODB_API_GATEWAY_URL
PARTITION_BASE_URL: $AWS_MONGODB_API_GATEWAY_URL/ #needs trailing slash
WORKFLOW_HOST: $AWS_MONGODB_WORKFLOW_URL
INT_TEST_DAG_NAME: my_first_dag
.aws_mongodb:
tags: ['osdu-medium']
image: $CI_REGISTRY/divido/aws-maven/aws-maven:v1.1
environment:
name: AWS
variables:
MAVEN_REPO_PATH: "$CI_PROJECT_DIR/.m2/repository"
MAVEN_CLI_OPTS: "--batch-mode --settings=$CI_PROJECT_DIR/.mvn/community-maven.settings.xml"
cache:
paths:
- $MAVEN_REPO_PATH
extends:
- .aws_mongodb_variables
before_script:
- unset AWS_ACCESS_KEY_ID
- unset AWS_SECRET_ACCESS_KEY
- mkdir -p ~/.aws
- |
cat > ~/.aws/credentials <<EOF
[default]
aws_access_key_id = $ACCESS_KEY_ID
aws_secret_access_key = $SECRET_ACCESS_KEY
EOF
aws-mongodb-containerize:
extends:
- .aws_mongodb
- .aws_mongodb_variables
stage: containerize
needs: ['compile-and-unit-test']
script:
- docker build -f $BUILD_DIR/Dockerfile -t $LOCAL_IMAGE_TAG .
# Push to the local container registry
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
- docker push $LOCAL_IMAGE_TAG
# Push to Amazon's container registry
- unset AWS_ACCESS_KEY_ID
- unset AWS_SECRET_ACCESS_KEY
- $(aws ecr get-login --no-include-email --region $AWS_REGION)
- docker tag $LOCAL_IMAGE_TAG $AWS_IMAGE_TAG_BASE:$CI_COMMIT_SHA
- docker tag $LOCAL_IMAGE_TAG $AWS_IMAGE_TAG_BASE:latest
- docker push $AWS_IMAGE_TAG_BASE:$CI_COMMIT_SHA
- docker push $AWS_IMAGE_TAG_BASE:latest
only:
variables:
- $AWS == 'true'
aws-mongodb-update-ecs:
extends:
- .aws_mongodb
- .aws_mongodb_variables
stage: deploy
needs: ['aws-mongodb-containerize']
script:
- ECS_SERVICE_NAME=$(aws ssm get-parameter --name ecs-$SERVICE_NAME --query Parameter.Value --output text --region $AWS_REGION)
- aws ecs update-service --cluster osdu-mongo-env-core-cluster --service $ECS_SERVICE_NAME --region $AWS_REGION --force-new-deployment
only:
variables:
- $AWS == 'true'
aws-mongodb-test-java:
extends:
- .aws_mongodb
- .aws_mongodb_variables
stage: integration
needs: ['aws-mongodb-update-ecs']
script:
- ls -ltr
- cd $INTEGRATION_TEST_DIR
- mvn $MAVEN_CLI_OPTS -Dmaven.repo.local=$MAVEN_REPO_PATH ${AWS_MAVEN_TEST_COMMAND_OVERRIDE:-test} --update-snapshots -Dorg.slf4j.simpleLogger.defaultLogLevel=info
only:
variables:
- $AWS_MONGODB_SKIP_TESTS != 'true' && $AWS == 'true' && ($AWS_INT_TEST_TYPE == 'java' || $AWS_INT_TEST_TYPE == null) #Default if not defined
artifacts:
when: on_failure
paths:
- $INTEGRATION_TEST_DIR
expire_in: 1 week
aws-mongodb-test-python:
extends:
- .aws_mongodb
- .aws_mongodb_variables
stage: integration
needs: ['aws-mongodb-update-ecs']
script:
- ls -ltr
- cd $INTEGRATION_TEST_DIR
- chmod +x ./run-integration-tests.sh
- ./run-integration-tests.sh
only:
variables:
- $AWS_MONGODB_SKIP_TESTS != 'true' && $AWS == 'true' && $AWS_INT_TEST_TYPE == 'python'
artifacts:
when: on_failure
paths:
- $INTEGRATION_TEST_DIR
expire_in: 1 week
...@@ -85,7 +85,12 @@ ...@@ -85,7 +85,12 @@
NOTIFICATION_BASE_URL: $AWS_NOTIFICATION_BASE_URL NOTIFICATION_BASE_URL: $AWS_NOTIFICATION_BASE_URL
NOTIFICATION_REGISTER_BASE_URL: $AWS_NOTIFICATION_REGISTER_BASE_URL NOTIFICATION_REGISTER_BASE_URL: $AWS_NOTIFICATION_REGISTER_BASE_URL
FILE_SERVICE_HOST: $AWS_FILE_SERVICE_HOST FILE_SERVICE_HOST: $AWS_FILE_SERVICE_HOST
STORAGE_BASE_URL: $AWS_API_GATEWAY_URL/api/storage/v2/
DATASET_BASE_URL: $AWS_API_GATEWAY_URL/api/dataset/v1/
PROVIDER_KEY: AWS_S3
LEGAL_BASE_URL: $AWS_LEGAL_URL
ENTITLEMENTS_BASE_URL: $AWS_ENTITLEMENTS_URL
FILEDMS_BASE_URL: $AWS_FILEDMS_BASE_URL
.aws: .aws:
tags: ['osdu-medium'] tags: ['osdu-medium']
image: $CI_REGISTRY/divido/aws-maven/aws-maven:v1.1 image: $CI_REGISTRY/divido/aws-maven/aws-maven:v1.1
......
variables:
CHART_PATH: devops/azure/chart
CLOUD_PROVIDER: azure
E2E_ADMIN_EMAIL: integration.test@azureglobal1.onmicrosoft.com
E2E_DATA_PARTITION: opendes
E2E_LEGALTAG01: opendes-public-usa-dataset-7643990
E2E_LEGALTAG02: opendes-dps-integration-test-valid2-legal-tag
E2E_SUBPROJECT: demosandbox01
E2E_SUBPROJECT_LONG_NAME: looooooooooooooooooooooooooooooooooooooooooooooooooooongnaaaaaaaaaaaaaaaaaaaameeeeeee
E2E_TENANT: opendes
POD_IDENTITY: osdu-identity
REDIS_INSTANCE_PORT: 6380
azure_deploy:
tags: ["osdu-medium"]
image: danielscholl/azure-build-image
stage: deploy
needs: ["push_runtime_image_azure"]
variables:
AZURE_KEYVAULT: osdu-svc-properties
DES_URL: ${AZURE_DNS_NAME}
IMAGE: ${AZURE_REGISTRY}.azurecr.io/${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}
SERVICE_NAME: ${AZURE_SERVICE}
TAG: ${CI_COMMIT_SHA}
before_script:
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
- az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks
script:
# Replace values in config file
- cp ${CHART_PATH}/helm-config.yaml ${CHART_PATH}/values.yaml
- sed -i 's,#{CONTAINER_REGISTRY_NAME}#,'$IMAGE',' ${CHART_PATH}/values.yaml
- sed -i 's,#{DNS_HOST}#,'$DES_URL',' ${CHART_PATH}/values.yaml
- sed -i 's/#{ENVIRONMENT_NAME}#/'$ENVIRONMENT'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{IMAGE_TAG}#/'$TAG'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{KEYVAULT_NAME}#/'$AZURE_KEYVAULT'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{PORT}#/'${PORT}'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{PROVIDER_NAME}#/'$CLOUD_PROVIDER'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{REDIS_HOST}#/'$REDIS_INSTANCE_ADDRESS'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{REDIS_PORT}#/'$REDIS_INSTANCE_PORT'/' ${CHART_PATH}/values.yaml
- sed -i 's/#{REPLICA_COUNT}#/'$REPLICA'/' ${CHART_PATH}/values.yaml
- pod=$(kubectl get pod -n osdu|grep $CI_PROJECT_NAME |tail -1 |awk '{print $1}')
- kubectl describe pod $pod -n osdu
# Install helm chart
- helm upgrade $SERVICE_NAME ${CHART_PATH} --install --dry-run --values $CHART_PATH/values.yaml
- helm upgrade $SERVICE_NAME ${CHART_PATH} --install --values $CHART_PATH/values.yaml
# Wait for service to be running to start
- kubectl rollout status deployment.v1.apps/$SERVICE_NAME -n osdu --timeout=900s
- pod=$(kubectl get pod -n osdu|grep $CI_PROJECT_NAME |tail -1 |awk '{print $1}')
- status=$(kubectl wait -n osdu --for=condition=Ready pod/$pod --timeout=300s)
- if [[ "$status" != *"met"* ]]; then echo "POD didn't start correctly" ; exit 1 ; fi
only:
variables:
- $AZURE == 'true'
azure_test:
tags: ["osdu-medium"]
image: node
stage: integration
needs: ["azure_deploy"]
variables:
AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID
AZURE_AD_TENANT_ID: $AZURE_TENANT_ID
AZURE_TESTER_SERVICEPRINCIPAL_SECRET: $AZURE_PRINCIPAL_SECRET
INTEGRATION_TESTER: $AZURE_PRINCIPAL_ID
script:
- apt-get update
- apt-get install -y python
- apt-get install -y python-pip
- pip install msal
- svctoken=$(python devops/scripts/azure_jwt_client.py)
- echo $svctoken
- npm install -g newman
- chmod +x ./tests/e2e/run_e2e_tests.sh
- ./tests/e2e/run_e2e_tests.sh --seistore-svc-url="https://${AZURE_DNS_NAME}/seistore-svc/api/v3" --seistore-svc-api-key="NA" --user-idtoken="$svctoken" --user1-idtoken="$svctoken" --tenant="${E2E_TENANT}" --subproject="${E2E_SUBPROJECT}" --admin-email="${E2E_ADMIN_EMAIL}" --datapartition="${E2E_DATA_PARTITION}" --legaltag01="${E2E_LEGALTAG01}" --legaltag02="${E2E_LEGALTAG02}" --subproject-long-name="${E2E_SUBPROJECT_LONG_NAME}" --VCS-Provider="${ISGITLAB}"
only:
variables:
- $AZURE == 'true'
\ No newline at end of file
...@@ -31,6 +31,7 @@ ...@@ -31,6 +31,7 @@
LOG_LEVEL: INFO LOG_LEVEL: INFO
# Common Section # Common Section
ENTITLEMENT_URL: https://${AZURE_DNS_NAME}/entitlements/v1/ ENTITLEMENT_URL: https://${AZURE_DNS_NAME}/entitlements/v1/
ENTITLEMENT_V2_URL: https://${AZURE_DNS_NAME}/entitlements/v2/
LEGAL_URL: https://${AZURE_DNS_NAME}/api/legal/v1/ LEGAL_URL: https://${AZURE_DNS_NAME}/api/legal/v1/
STORAGE_URL: https://${AZURE_DNS_NAME}/api/storage/v2/ STORAGE_URL: https://${AZURE_DNS_NAME}/api/storage/v2/
SEARCH_URL: https://${AZURE_DNS_NAME}/api/search/v2/ SEARCH_URL: https://${AZURE_DNS_NAME}/api/search/v2/
...@@ -43,7 +44,7 @@ ...@@ -43,7 +44,7 @@
AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID AZURE_AD_APP_RESOURCE_ID: $AZURE_APP_ID
AZURE_STORAGE_ACCOUNT: ${AZURE_BASE}data AZURE_STORAGE_ACCOUNT: ${AZURE_BASE}data
MY_TENANT: opendes MY_TENANT: opendes
SHARED_TENANT: common SHARED_TENANT: opendes
DOMAIN: contoso.com DOMAIN: contoso.com
ELASTIC_HOST: $AZURE_ELASTIC_HOST ELASTIC_HOST: $AZURE_ELASTIC_HOST
ELASTIC_PORT: 9243 ELASTIC_PORT: 9243
...@@ -181,6 +182,18 @@ azure_deploy: ...@@ -181,6 +182,18 @@ azure_deploy:
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID - az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
- az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks - az aks get-credentials -g $AZURE_UNIQUE-rg -n $AZURE_UNIQUE-aks
script: script:
#Update Crs Conversion Service Copy Dataset
- |
search_dir="apachesis_setup/SIS_DATA"
if [ -d "$search_dir" ]; then
echo "Starting to upload files for CRS Conversion Service"
accountKey=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountkey}' | base64 -d)
accountName=$(kubectl get secret airflow -n osdu -o jsonpath='{.data.azurestorageaccountname}' | base64 -d)
find "$search_dir/" -type f -print0 | while read -d $'\0' file; do
az storage file upload --account-name $accountName --account-key $accountKey --share-name $SHARE_NAME --source "$file"
done
echo "File upload successfully completed for CRS Conversion Service"
fi
- cd devops/azure - cd devops/azure
- echo "--set image.branch=$BRANCH --set image.tag=$TAG" - echo "--set image.branch=$BRANCH --set image.tag=$TAG"
...@@ -202,6 +215,9 @@ azure_deploy: ...@@ -202,6 +215,9 @@ azure_deploy:
only: only:
variables: variables:
- $AZURE == 'true' - $AZURE == 'true'
except:
variables:
- $AZURE_SKIP_DEPLOY == 'true'
azure_test: azure_test:
image: $CI_REGISTRY/danielscholl/azure-maven/azure-maven:v1.0 image: $CI_REGISTRY/danielscholl/azure-maven/azure-maven:v1.0
...@@ -224,7 +240,7 @@ azure_test: ...@@ -224,7 +240,7 @@ azure_test:
- $AZURE_TEST_SUBDIR/target/*/TEST-*.xml - $AZURE_TEST_SUBDIR/target/*/TEST-*.xml
only: only:
variables: variables:
- $AZURE == 'true' && $AZURE_TEST_TYPE != 'python' - $AZURE == 'true' && $AZURE_SKIP_DEPLOY != 'true' && $AZURE_TEST_TYPE != 'python'
except: except:
variables: variables:
- $AZURE_SKIP_TEST == 'true' - $AZURE_SKIP_TEST == 'true'
...@@ -243,7 +259,7 @@ azure_test_py: ...@@ -243,7 +259,7 @@ azure_test_py:
./run-integration-tests.sh ./run-integration-tests.sh
only: only:
variables: variables:
- $AZURE == 'true' && $AZURE_TEST_TYPE == 'python' - $AZURE == 'true' && $AZURE_SKIP_DEPLOY != 'true' && $AZURE_TEST_TYPE == 'python'
except: except:
variables: variables:
- $AZURE_SKIP_TEST == 'true' - $AZURE_SKIP_TEST == 'true'
......
variables:
REDIS_INSTANCE_PORT: 6380
gcp_deploy:
image: google/cloud-sdk
stage: deploy
needs: ["push_runtime_image_gcp"]
variables:
AZURE_KEYVAULT: osdu-svc-properties
ENTITLEMENT_URL: ${AZURE_DNS_NAME}/entitlements/v1
IMAGE: ${AZURE_REGISTRY}.azurecr.io/${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}
SERVICE_NAME: ${AZURE_SERVICE}
TAG: ${CI_COMMIT_SHA}
before_script:
- gcloud auth activate-service-account --key-file="$GCP_DEPLOY_FILE"
script:
# Replace values in config file
- gcloud config set project $GCP_PROJECT
only:
variables:
- $GCP == 'true'
gcp_test:
image: node
stage: integration
needs: ["gcp_deploy"]
script:
# Replace values in config file
- echo "test"
only:
variables:
- $GCP == 'true'
\ No newline at end of file
variables:
REDIS_INSTANCE_PORT: 6380
ibm_deploy:
image: openshift/origin-cli
stage: deploy
needs: ["compile-and-unit-test"]
before_script:
- oc login $IBM_OPENSHIFT_URL -u apikey -p $IBM_OPENSHIFT_TOKEN
script:
# Replace values in config file
- oc project $IBM_OPENSHIFT_PROJECT
only:
variables:
- $IBM == 'true'
ibm_test:
image: node
stage: integration
needs: ["ibm_deploy"]
script:
# Replace values in config file
- echo "test"
only:
variables:
- $IBM == 'true'
\ No newline at end of file
...@@ -43,7 +43,7 @@ ...@@ -43,7 +43,7 @@
OSDU_GCP_SEARCH_QUERY_URL: https://os-search-attcrcktoa-uc.a.run.app/api/search/v2/query/ OSDU_GCP_SEARCH_QUERY_URL: https://os-search-attcrcktoa-uc.a.run.app/api/search/v2/query/
OSDU_GCP_INDEXER_HOST: https://os-indexer-attcrcktoa-uc.a.run.app/ OSDU_GCP_INDEXER_HOST: https://os-indexer-attcrcktoa-uc.a.run.app/
OSDU_GCP_INDEXER_HOST_SEARCH: https://os-indexer-attcrcktoa-uc.a.run.app/api/indexer/v2/ OSDU_GCP_INDEXER_HOST_SEARCH: https://os-indexer-attcrcktoa-uc.a.run.app/api/indexer/v2/
OSDU_GCP_FILE_URL: https://os-file-attcrcktoa-uc.a.run.app/ OSDU_GCP_FILE_URL: https://os-file-attcrcktoa-uc.a.run.app
OSDU_GCP_NOTIFICATION_URL: https://os-notification-attcrcktoa-uc.a.run.app/ OSDU_GCP_NOTIFICATION_URL: https://os-notification-attcrcktoa-uc.a.run.app/
OSDU_GCP_STORAGE_QUERY_RECORD_HOST: https://os-storage-attcrcktoa-uc.a.run.app/api/storage/v2/query/records OSDU_GCP_STORAGE_QUERY_RECORD_HOST: https://os-storage-attcrcktoa-uc.a.run.app/api/storage/v2/query/records
OSDU_GCP_STORAGE_SCHEMA_HOST: https://os-storage-attcrcktoa-uc.a.run.app/api/storage/v2/schemas OSDU_GCP_STORAGE_SCHEMA_HOST: https://os-storage-attcrcktoa-uc.a.run.app/api/storage/v2/schemas
...@@ -73,10 +73,13 @@ ...@@ -73,10 +73,13 @@
OTHER_RELEVANT_DATA_COUNTRIES: US OTHER_RELEVANT_DATA_COUNTRIES: US
DEFAULT_DATA_PARTITION_ID_TENANT1: osdu DEFAULT_DATA_PARTITION_ID_TENANT1: osdu
GCLOUD_PROJECT: nice-etching-277309 GCLOUD_PROJECT: nice-etching-277309
ACL_OWNERS: data.test1
ACL_VIEWERS: data.test1
# variables for storage tests # variables for storage tests
TENANT_NAME: osdu TENANT_NAME: osdu
# variables for workflow tests # variables for workflow tests
FINISHED_WORKFLOW_ID: fad778da-fbc4-4261-8b3e-deb48be44969 FINISHED_WORKFLOW_ID: fad778da-fbc4-4261-8b3e-deb48be44969
OSDU_GCP_TEST_DAG_NAME: airflow_monitoring
# variables for ingestion tests # variables for ingestion tests
TEST_OSDU_FILE_PATH: gs://nice-etching-277309-file/r1/data/provided/well-logs/1013_akm11_1978_comp.las TEST_OSDU_FILE_PATH: gs://nice-etching-277309-file/r1/data/provided/well-logs/1013_akm11_1978_comp.las
# variables for file tests # variables for file tests
...@@ -85,6 +88,7 @@ ...@@ -85,6 +88,7 @@
USER_ID: common-user USER_ID: common-user
TIME_ZONE: UTC+0 TIME_ZONE: UTC+0
FILE_BUCKET: nice-etching-277309-file FILE_BUCKET: nice-etching-277309-file
CLOUD_VENDOR: gcp
# variables for delivery tests # variables for delivery tests
ENTITLEMENTS_DOMAIN: osdu-gcp.go3-nrg.projects.epam.com ENTITLEMENTS_DOMAIN: osdu-gcp.go3-nrg.projects.epam.com
TENANT: osdu TENANT: osdu
...@@ -126,6 +130,8 @@ ...@@ -126,6 +130,8 @@
PRIVATE_TENANT1: osdu PRIVATE_TENANT1: osdu
PRIVATE_TENANT2: osdu PRIVATE_TENANT2: osdu
SHARED_TENANT: osdu SHARED_TENANT: osdu
# variables fo rpartition tests
PARTITION_BASE_URL: https://os-partition-attcrcktoa-uc.a.run.app/
osdu-gcp-containerize: osdu-gcp-containerize:
stage: containerize stage: containerize
...@@ -196,6 +202,7 @@ osdu-gcp-test: ...@@ -196,6 +202,7 @@ osdu-gcp-test:
HOST_URL: $OSDU_GCP_LEGAL_HOST_URL HOST_URL: $OSDU_GCP_LEGAL_HOST_URL
INDEXER_HOST: $OSDU_GCP_INDEXER_HOST_SEARCH INDEXER_HOST: $OSDU_GCP_INDEXER_HOST_SEARCH
OSDU_GCP_TESTING_DIR: testing OSDU_GCP_TESTING_DIR: testing
TEST_DAG_NAME: OSDU_GCP_TEST_DAG_NAME
script: script:
- export GCP_DEPLOY_FILE=$(echo $OSDU_GCP_DEPLOY_FILE) - export GCP_DEPLOY_FILE=$(echo $OSDU_GCP_DEPLOY_FILE)
- > - >
......
push_runtime_image:
image: docker:19.03.12
services:
- docker:19.03.12-dind
tags: ["osdu-medium"]
stage: containerize
variables:
SHA_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
LATEST_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:latest
before_script:
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
script:
# Gitlab Container Registry
- docker build -t builder_sdms:latest --file docker/builder.Dockerfile .
- docker build -t $UTEST_RUNTIME_IMAGE --file docker/runtime.Dockerfile --build-arg docker_builder_image=builder_sdms .
- docker tag $UTEST_RUNTIME_IMAGE $CI_REGISTRY_IMAGE/$SHA_IMAGE
- docker push $CI_REGISTRY_IMAGE/$SHA_IMAGE
- docker tag $UTEST_RUNTIME_IMAGE $CI_REGISTRY_IMAGE/$LATEST_IMAGE
- docker push $CI_REGISTRY_IMAGE/$LATEST_IMAGE
push_runtime_image_azure:
image: danielscholl/azure-build-image
tags: ["osdu-medium"]
stage: containerize
variables:
SHA_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
LATEST_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:latest
before_script:
- az login --service-principal -u $AZURE_PRINCIPAL_ID -p $AZURE_PRINCIPAL_SECRET --tenant $AZURE_TENANT_ID
- az acr login -n $AZURE_REGISTRY
script:
# Azure Container Registry
- docker build -t builder_sdms:latest --file docker/builder.Dockerfile .
- docker build -t $UTEST_RUNTIME_IMAGE --file docker/runtime.Dockerfile --build-arg docker_builder_image=builder_sdms .
- docker tag $UTEST_RUNTIME_IMAGE ${AZURE_REGISTRY}.azurecr.io/$SHA_IMAGE
- docker push ${AZURE_REGISTRY}.azurecr.io/$SHA_IMAGE
- docker tag $UTEST_RUNTIME_IMAGE ${AZURE_REGISTRY}.azurecr.io/$LATEST_IMAGE
- docker push ${AZURE_REGISTRY}.azurecr.io/$LATEST_IMAGE
only:
variables:
- $AZURE == 'true'
push_runtime_image_gcp:
image: gcr.io/google.com/cloudsdktool/cloud-sdk
tags: ["osdu-medium"]
stage: containerize
variables:
SHA_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:${CI_COMMIT_SHA}
LATEST_IMAGE: ${CI_PROJECT_NAME}-${CI_COMMIT_REF_SLUG}:latest
before_script:
- gcloud auth activate-service-account --key-file="$GCP_DEPLOY_FILE"
- gcloud config set project $GCP_PROJECT
script:
# GCP Container Registry
- gcloud builds submit --config provider/$GCP_VENDOR/cloudbuild/cloudbuild.yaml --substitutions=_GCP_SERVICE=$GCP_SERVICE,_APPLICATION_NAME=$GCP_APPLICATION_NAME,_PROVIDER_NAME=$GCP_VENDOR,_SHORT_SHA=$CI_COMMIT_SHORT_SHA,_PORT=$PORT
only:
variables:
- $GCP == 'true'
\ No newline at end of file
include:
- local: 'scanners/fossa-global.yml'
# --------------------------------------------------------------------------------
fossa-analyze:
image: node
stage: scan
needs: ['compile-and-unit-test']
tags: ['osdu-medium']
variables:
fossa_url: https://raw.githubusercontent.com/fossas/fossa-cli/master/install.sh
fossa_url_setting: "Cache-Control: no-cache"
only:
variables:
- $FOSSA_API_KEY
script:
# fossa-check-for-licensing-issues needs a CI_COMMIT_BRANCH defined to know how to parse the FOSSA API results
# When building tags, this isn't defined by GitLab. In that case, we use the tag name instead. If that's not defined
# then things will fail and we'll have to make this smarter
- test -z "$CI_COMMIT_BRANCH" && export CI_COMMIT_BRANCH="$CI_COMMIT_TAG"
- curl -H "${fossa_url_setting}" "${fossa_url}" | bash
- npm install --production
- fossa analyze --project "${CI_PROJECT_TITLE}" --project-url "${CI_PROJECT_URL}" --branch "${CI_COMMIT_BRANCH}"
...@@ -3,6 +3,7 @@ include: ...@@ -3,6 +3,7 @@ include:
- local: 'build/maven.yml' - local: 'build/maven.yml'
- local: 'scanners/gitlab-ultimate.yml' - local: 'scanners/gitlab-ultimate.yml'
- local: 'cloud-providers/aws.yml' - local: 'cloud-providers/aws.yml'
- local: 'cloud-providers/aws-mongodb.yml'
- local: 'cloud-providers/gcp.yml' - local: 'cloud-providers/gcp.yml'
- local: 'cloud-providers/ibm.yml' - local: 'cloud-providers/ibm.yml'
- local: 'scanners/fossa.yml' - local: 'scanners/fossa.yml'
......