Commit d8d52a64 authored by Komal Makkar's avatar Komal Makkar
Browse files

Merge branch 'master' of...

Merge branch 'master' of https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning into users/komakkar/automationFix
parents 4bde9ca0 95b5c92e
Pipeline #29660 passed with stages
in 27 seconds
......@@ -5,7 +5,7 @@
k8-gitops-manifests/
osdu-charts/
custom.tfvars
custom*.tfvars
dev.tfvars
.envrc*
......
......@@ -5,6 +5,14 @@
__Bug Fixes__
- [Bug 119 - Add Airflow python package `python-keycloak`](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/issues/119)
__Infra Changes__
- [Arch 123 - Obsolete Cosmos DB Tables and add new v2 tables](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/issues/123)
- [Issue 127](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/issues/127)
- [Feature 126 - Add support to pass JAVA_OPTS to java command in DockerFile](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/issues/126)
- [Feature 125 - Enable Manifest Ingestion](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/issues/125)
# v0.5 (2021-2-11)
__Service Onboarded__
......
......@@ -192,10 +192,9 @@ __Azure AD Admin Consent__
For more information on Azure identity and authorization, see the official Microsoft documentation [here](https://docs.microsoft.com/en-us/azure/active-directory/develop/v2-permissions-and-consent).
## Elastic Search Setup
Infrastructure requires a bring your own Elastic Search Instance of a version of 6.8.x with a valid https endpoint and the access information must now be stored in the Common KeyVault. The recommended method of Elastic Search is to use the [Elastic Cloud Managed Service from the Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/elastic.ec-azure?tab=Overview).
Infrastructure requires a bring your own Elastic Search Instance of a version of 7.x (ie: 7.11.1) with a valid https endpoint and the access information must now be stored in the Common KeyVault. The recommended method of Elastic Search is to use the [Elastic Cloud Managed Service from the Marketplace](https://azuremarketplace.microsoft.com/en-us/marketplace/apps/elastic.ec-azure?tab=Overview).
> Note: Elastic Cloud Managed Service requires a Credit Card to be associated to the subscription for billing purposes.
......@@ -221,6 +220,10 @@ EOF
cp .envrc .envrc_${UNIQUE}
```
## Configure Back Up
Back is enabled by default. To set the backup policies, utilize the script
[here](https://community.opengroup.org/osdu/platform/deployment-and-operations/infra-azure-provisioning/-/tree/master/tools).
The script should be run whenever you bring up a Resource Group in your deployment.
## Configure Key Access in Manifest Repository
......@@ -260,6 +263,8 @@ __Manual Installation__
1. Register your partition with the Data Partition API by following the instructions [here](./tools/rest/README.md) to configure your IDE to make authenticated requests to your OSDU instance and send the API request located [here](./tools/rest/partition.http) (createPartition).
1. Load Service Data following directions [here](./docs/service-data.md).
__Automated Pipeline Installation__
......
......@@ -331,6 +331,7 @@ git clone https://community.opengroup.org/osdu/platform/system/schema-service.gi
git clonehttps://community.opengroup.org/osdu/platform/data-flow/ingestion/ingestion-workflow.git $SRC_DIR/ingestion-workflow
git clone https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/seismic/seismic-dms-suite/seismic-store-service.git $SRC_DIR/seismic-store-service
git clone https://community.opengroup.org:osdu/platform/domain-data-mgmt-services/wellbore/wellbore-domain-services.git $SRC_DIR/wellbore-domain-services
git clone https://community.opengroup.org/osdu/platform/data-flow/ingestion/ingestion-service.git $SRC_DIR/ingestion-service
```
__Additional Manual Steps__
......@@ -433,7 +434,8 @@ SERVICE_LIST="infra-azure-provisioning \
register \
notification \
schema-service \
ingestion-workflow"
ingestion-workflow \
ingestion-service"
for SERVICE in $SERVICE_LIST;
do
......
......@@ -99,3 +99,7 @@ spec:
serviceName: seismic-store-service
servicePort: 80
path: /seistore-svc/api/v3/*
- backend:
serviceName: ingestion-service
servicePort: 80
path: /api/ingestion/*
\ No newline at end of file
apiVersion: networking.istio.io/v1alpha3
kind: EnvoyFilter
metadata:
name: header-remove-user-appid-from-default
namespace: osdu
spec:
configPatches:
- applyTo: HTTP_FILTER
match:
context: SIDECAR_INBOUND
listener:
filterChain:
filter:
name: envoy.http_connection_manager
subFilter:
name: envoy.router
patch:
operation: INSERT_BEFORE
value:
name: envoy.lua.remove-user-appid-header
typed_config:
"@type": "type.googleapis.com/envoy.config.filter.http.lua.v2.Lua"
inlineCode: |
function envoy_on_request(request_handle)
request_handle:headers():remove("x-user-id")
request_handle:headers():remove("x-app-id")
end
---
apiVersion: networking.istio.io/v1alpha3
kind: EnvoyFilter
metadata:
name: header-add-user-from-msft-aad-token
namespace: osdu
spec:
configPatches:
- applyTo: HTTP_FILTER
match:
context: SIDECAR_INBOUND
listener:
filterChain:
filter:
name: envoy.http_connection_manager
subFilter:
name: envoy.lua.remove-user-appid-header
patch:
operation: INSERT_AFTER
value:
name: envoy.lua.user-from-msft-aad-token
typed_config:
"@type": "type.googleapis.com/envoy.config.filter.http.lua.v2.Lua"
inlineCode: |
msft_issuer = "https://sts.windows.net/{{ .Values.global.azure.tenant }}/"
function envoy_on_request(request_handle)
local jwt_authn = request_handle:streamInfo():dynamicMetadata():get("envoy.filters.http.jwt_authn")
if jwt_authn then
if jwt_authn[msft_issuer] then
request_handle:headers():add("x-app-id", jwt_authn[msft_issuer]["aud"])
if jwt_authn[msft_issuer]["upn"] then
request_handle:headers():add("x-user-id", jwt_authn[msft_issuer]["upn"])
elseif jwt_authn[msft_issuer]["unique_name"] then
request_handle:headers():add("x-user-id", jwt_authn[msft_issuer]["unique_name"])
elseif jwt_authn[msft_issuer]["appid"] then
request_handle:headers():add("x-user-id", jwt_authn[msft_issuer]["appid"])
end
end
end
end
---
apiVersion: networking.istio.io/v1alpha3
kind: EnvoyFilter
metadata:
name: header-add-user-from-msftonline-token
namespace: osdu
spec:
configPatches:
- applyTo: HTTP_FILTER
match:
context: SIDECAR_INBOUND
listener:
filterChain:
filter:
name: envoy.http_connection_manager
subFilter:
name: envoy.lua.remove-user-appid-header
patch:
operation: INSERT_AFTER
value:
name: envoy.lua.user-from-msftonline-token
typed_config:
"@type": "type.googleapis.com/envoy.config.filter.http.lua.v2.Lua"
inlineCode: |
msft_issuer = "https://login.microsoftonline.com/{{ .Values.global.azure.tenant }}/v2.0"
function envoy_on_request(request_handle)
local jwt_authn = request_handle:streamInfo():dynamicMetadata():get("envoy.filters.http.jwt_authn")
if jwt_authn then
if jwt_authn[msft_issuer] then
request_handle:headers():add("x-app-id", jwt_authn[msft_issuer]["aud"])
if jwt_authn[msft_issuer]["oid"] then
request_handle:headers():add("x-user-id", jwt_authn[msft_issuer]["oid"])
elseif jwt_authn[msft_issuer]["azp"] then
request_handle:headers():add("x-user-id", jwt_authn[msft_issuer]["azp"])
end
end
end
end
......@@ -43,7 +43,7 @@ steps:
VOLUME /tmp
ARG JAR_FILE
COPY ${JAR_FILE} app.jar
ENTRYPOINT ["java","-jar","/app.jar"]' > Dockerfile
ENTRYPOINT exec java ${JAVA_OPTS} -jar /app.jar' > Dockerfile
}
curl -L https://aka.ms/acr/installaad/bash | /bin/bash
......
......@@ -4,7 +4,7 @@ Date: 2021-02-11
## Status
Proposed
Approved
## Context
......
# 4. Add environment template for dashboard
Date: 2021-02-16
## Status
Accepted
## Context
A dashboard is used for monitoring other resources or pinning them on a tile for quick access. Dashboards reference frequently the fully qualified ID of the resource,
including the resource name and resource type. Format: `/subscriptions/{guid}/resourceGroups/{resource-group-name}/{resource-provider-namespace}/{resource-type}/{resource-name}`. By virtue of this, it requires those resource names to exist and be passed to it at the time of creation. Resources are created with 3 seperated terraform templates that also have a creation order.
1. Central Resources -- No Dependencies.
2. Data Partition -- Depends on Central Resources
3. Service Resources -- Depends on Central Resources
A dashboard created by Terraform would be required to be created last in order to bring in the state of other templates to have available the naming conventions, or it could be a fully independent item and naming conventions then submitted as criteria.
## Decision
A new terraform template will be created for dashboards.
## Consequences
Dashboards will be an opt in feature.
Dashboards can only be created after resources exist.
Dashboards will break and have to be updated if the resourceId of the resources displayed are changed.
......@@ -27,6 +27,7 @@ Empty repositories need to be created that will be used by a pipeline to mirror
| ingestion-workflow | https://community.opengroup.org/osdu/platform/data-flow/ingestion/ingestion-workflow.git |
| seismic-store-service | https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/seismic/seismic-dms-suite/seismic-store-service.git |
| wellbore-domain-services | https://community.opengroup.org/osdu/platform/domain-data-mgmt-services/wellbore/wellbore-domain-services.git |
| ingestion-service | https://community.opengroup.org/osdu/platform/data-flow/ingestion/ingestion-service.git |
```bash
export ADO_ORGANIZATION=<organization_name>
......@@ -55,7 +56,8 @@ SERVICE_LIST="infra-azure-provisioning \
schema-service \
ingestion-workflow \
seismic-store-service \
wellbore-domain-services"
wellbore-domain-services \
ingestion-service"
for SERVICE in $SERVICE_LIST;
......@@ -95,6 +97,7 @@ Variable Group Name: `Mirror Variables`
| INGESTION_WORKFLOW_REPO | https://dev.azure.com/osdu-demo/osdu/_git/ingestion-workflow |
| SEISMIC_STORE_SERVICE_REPO | https://dev.azure.com/osdu-demo/osdu/_git/seismic-store-service |
| WELLBORE_DOMAIN_SERVICSE_REPO | https://dev.azure.com/osdu-demo/osdu/_git/wellbore-domain-services |
| INGESTION_SERVICE_REPO | https://dev.azure.com/osdu-demo/osdu/_git/ingestion-service |
| ACCESS_TOKEN | <your_personal_access_token> |
......@@ -129,6 +132,7 @@ az pipelines variable-group create \
INGESTION_WORKFLOW_REPO=https://dev.azure.com/${ADO_ORGANIZATION}/$ADO_PROJECT/_git/ingestion-workflow \
SEISMIC_STORE_SERVICE_REPO=https://dev.azure.com/${ADO_ORGANIZATION}/$ADO_PROJECT/_git/seismic-store-service \
WELLBORE_DOMAIN_SERVICSE_REPO=https://dev.azure.com/${ADO_ORGANIZATION}/$ADO_PROJECT/_git/wellbore-domain-services \
INGESTION_SERVICE_REPO=https://dev.azure.com/${ADO_ORGANIZATION}/$ADO_PROJECT/_git/ingestion-service \
ACCESS_TOKEN=$ACCESS_TOKEN \
-ojson
```
......@@ -324,6 +328,13 @@ jobs:
destinationGitRepositoryUri: '$(WELLBORE_DOMAIN_SERVICSE_REPO)'
destinationGitRepositoryPersonalAccessToken: $(ACCESS_TOKEN)
- task: swellaby.mirror-git-repository.mirror-git-repository-vsts-task.mirror-git-repository-vsts-task@1
displayName: 'ingestion-service'
inputs:
sourceGitRepositoryUri: 'https://community.opengroup.org/osdu/platform/data-flow/ingestion/ingestion-service.git'
destinationGitRepositoryUri: '$(INGESTION_SERVICE_REPO)'
destinationGitRepositoryPersonalAccessToken: $(ACCESS_TOKEN)
EOF
......
......@@ -62,3 +62,27 @@ az storage file upload-batch \
--source ${PROJECT_FOLDER} \
--pattern ${SOURCE_FOLDER}
```
_Ingest Manifest DAGS_
```bash
FILE_SHARE="airflowdags"
PROJECT_FOLDER=$(realpath ../ingestion-dags/src)
GROUP=$(az group list --query "[?contains(name, 'cr${UNIQUE}')].name" -otsv)
ENV_VAULT=$(az keyvault list --resource-group $GROUP --query [].name -otsv)
az storage file upload-batch \
--account-name $(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/airflow-storage --query value -otsv) \
--account-key $(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/airflow-storage-key --query value -otsv) \
--destination $FILE_SHARE \
--source ${PROJECT_FOLDER} \
--pattern "*.ini"
az storage file upload-batch \
--account-name $(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/airflow-storage --query value -otsv) \
--account-key $(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/airflow-storage-key --query value -otsv) \
--destination $FILE_SHARE \
--source ${PROJECT_FOLDER} \
--pattern "*.py"
```
......@@ -77,7 +77,7 @@ ENVIRONMENT="demo"
REGION="centralus"
REGION_PAIR="eastus2"
PARTITION_NAME="opendes"
ELASTIC_VERSION="6.8.12"
ELASTIC_VERSION="7.11.1"
GIT_REPO=git@ssh.dev.azure.com:v3/${ADO_ORGANIZATION}/${ADO_PROJECT}/k8-gitops-manifests
az pipelines variable-group create \
......
......@@ -44,6 +44,7 @@ This variable group will be used to hold the common values for the services to b
| NOTIFICATION_BASE_URL | `https://<your_fqdn>/api/notification/v1/` |
| REGISTER_CUSTOM_PUSH_URL_HMAC | `https://<your_fqdn>/api/register/v1/test/challenge/1`|
| AGENT_IMAGE | `ubuntu-latest` |
| PROVIDER_NAME | `azure` |
```bash
......@@ -51,6 +52,7 @@ ADMIN_EMAIL="<your_cert_admin>" # ie: admin@email.com
DNS_HOST="<your_ingress_hostname>" # ie: osdu.contoso.com
SERVICE_CONNECTION_NAME=osdu-mvp-$UNIQUE
INVALID_TOKEN="<an_invalid_token>"
PROVIDER_NAME=azure
az pipelines variable-group create \
--name "Azure - OSDU" \
......@@ -94,6 +96,7 @@ az pipelines variable-group create \
NOTIFICATION_BASE_URL="https://${DNS_HOST}/api/notification/v1/" \
REGISTER_CUSTOM_PUSH_URL_HMAC="https://${DNS_HOST}/api/register/v1/test/challenge/1" \
AGENT_IMAGE="ubuntu-latest" \
PROVIDER_NAME="$PROVIDER_NAME" \
-ojson
```
......@@ -138,7 +141,6 @@ This variable group will be used to hold the specific environment values necessa
| INTEGRATION_TESTER | `$(app-dev-sp-username)` |
| MY_TENANT | `opendes` |
| LEGAL_TAG | `opendes-public-usa-dataset-7643990` |
| PROVIDER_NAME | `azure` |
| REDIS_PORT | `6380` |
| STORAGE_ACCOUNT | `$(opendes-storage)` |
| STORAGE_ACCOUNT_KEY | `$(opendes-storage-key)` |
......@@ -156,7 +158,6 @@ DATA_PARTITION_NAME=opendes
LEGAL_TAG=opendes-public-usa-dataset-7643990
DNS_HOST="<your_ingress_hostname>" # ie: osdu.contoso.com
ENVIRONMENT_NAME=$UNIQUE
PROVIDER_NAME=azure
REDIS_PORT="6380"
......@@ -180,7 +181,6 @@ az pipelines variable-group create \
INTEGRATION_TESTER='$(app-dev-sp-username)' \
MY_TENANT="$DATA_PARTITION_NAME" \
LEGAL_TAG="$LEGAL_TAG" \
PROVIDER_NAME="$PROVIDER_NAME" \
REDIS_PORT="$REDIS_PORT" \
STORAGE_ACCOUNT='$('${DATA_PARTITION_NAME}'-storage)' \
STORAGE_ACCOUNT_KEY='$('${DATA_PARTITION_NAME}'-storage-key)' \
......@@ -627,8 +627,9 @@ This variable group is the service specific variables necessary for testing and
| PORT | `80` |
| REPLICA_COUNT | `1` |
| serviceUrlSuffix | `seistore-svc/api/v3` |
| utest.mount.dir | `/service` |
| utest.runtime.image | `seistore-svc-runtime` |
| hldRegPath | `providers/azure/hld-registry` |
| utest_mount_dir | `/service` |
| utest_runtime_image | `seistore-svc-runtime` |
```bash
e2eAdminEmail="<your_cert_admin>" # ie: admin@email.com
......@@ -641,8 +642,9 @@ e2eTenant=opendes
PORT="80"
REPLICA_COUNT="1"
serviceUrlSuffix="seistore-svc/api/v3"
utest.mount.dir="/service"
utest.runtime.image=seistore-svc-runtime
hldRegPath="providers/azure/hld-registry"
utest_mount_dir="/service"
utest_runtime_image=seistore-svc-runtime
az pipelines variable-group create \
--name "Azure Service Release - seismic-store-service" \
......@@ -658,11 +660,35 @@ az pipelines variable-group create \
PORT='${PORT}' \
REPLICA_COUNT='${REPLICA_COUNT}' \
serviceUrlSuffix='${serviceUrlSuffix}' \
utest.mount.dir='${utest.mount.dir}' \
utest.runtime.image=${utest.runtime.image} \
hldRegPath='${hldRegPath}' \
utest_mount_dir='${utest_mount_dir}' \
utest_runtime_image=${utest_runtime_image} \
-ojson
```
__Setup and Configure the ADO Library `Azure Service Release - ingestion-service`__
This variable group is the service specific variables necessary for testing and deploying the `ingestion` service.
| Variable | Value |
|----------|-------|
| MAVEN_DEPLOY_POM_FILE_PATH | `drop/provider/ingest-azure` |
No Test Path is needed since the service has python tests
```bash
az pipelines variable-group create \
--name "Azure Service Release - ingestion-service" \
--authorize true \
--variables \
MAVEN_DEPLOY_POM_FILE_PATH="drop/provider/ingest-azure"
MAVEN_INTEGRATION_TEST_OPTIONS=`-DargLine=""` \
MAVEN_INTEGRATION_TEST_POM_FILE_PATH="drop/deploy/testing/ingest-test-azurepom.xml" \
SERVICE_RESOURCE_NAME='$(AZURE_INGESTION_SERVICE_NAME)' \
-ojson
```
__Create the Chart Pipelines__
Create the pipelines and run things in this exact order.
......@@ -1059,4 +1085,20 @@ az pipelines create \
--repository-type tfsgit \
--yaml-path /devops/azure/pipeline.yml \
-ojson
```
21. Add a Pipeline for __ingestion-service__ to deploy the Ingestion Service.
_Repo:_ `ingestion-service`
_Path:_ `/devops/azure/pipeline.yml`
_Validate:_ https://<your_dns_name>/api/ingestion/docs is alive.
```bash
az pipelines create \
--name 'ingestion-service' \
--repository ingestion-service \
--branch master \
--repository-type tfsgit \
--yaml-path /devops/azure/pipeline.yml \
-ojson
```
\ No newline at end of file
# Load Service Data
## Service Schema Loading
Schema Service has standard shared schemas that have to be loaded.
```bash
# Setup Variables
UNIQUE="<your_osdu_unique>" # ie: demo
AZURE_DNS_NAME="<your_osdu_fqdn>" # ie: osdu-$UNIQUE.contoso.com
DATA_PARTITION="<your_partition>" # ie:opendes
# This logs your local Azure CLI in using the configured service principal.
az login --service-principal -u $ARM_CLIENT_ID -p $ARM_CLIENT_SECRET --tenant $ARM_TENANT_ID
GROUP=$(az group list --query "[?contains(name, 'cr${UNIQUE}')].name" -otsv)
ENV_VAULT=$(az keyvault list --resource-group $GROUP --query [].name -otsv)
cat > .env << EOF
DATA_PARTITION=$DATA_PARTITION
AZURE_TENANT_ID=$ARM_TENANT_ID
AZURE_DNS_NAME=$AZURE_DNS_NAME
AZURE_AD_APP_RESOURCE_ID=$(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/aad-client-id --query value -otsv)
AZURE_CLIENT_ID=$(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/app-dev-sp-username --query value -otsv)
AZURE_CLIENT_SECRET=$(az keyvault secret show --id https://${ENV_VAULT}.vault.azure.net/secrets/app-dev-sp-password --query value -otsv)
EOF
# Execute container to load the schema's
docker run --env-file .env msosdu.azurecr.io/osdu-azure-core-load:latest
```
......@@ -34,4 +34,144 @@ cosmos_databases = [
}
]
blob_cors_rule = [
{
allowed_headers = ["*"]
allowed_origins = ["https://osdu-demo.contoso.org"]
allowed_methods = ["GET","HEAD","POST","PUT","DELETE"]
exposed_headers = ["*"]
max_age_in_seconds = 3600
}
]
cosmos_sql_collections = [
{
name = "LegalTag"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "StorageRecord"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "StorageSchema"
database_name = "osdu-db"
partition_key_path = "/kind"
partition_key_version = null
},
{
name = "TenantInfo"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "UserInfo"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "Authority"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "EntityType"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "SchemaInfo"
database_name = "osdu-db"
partition_key_path = "/partitionId"
partition_key_version = null
},
{
name = "Source"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "RegisterAction"
database_name = "osdu-db"
partition_key_path = "/dataPartitionId"
partition_key_version = null
},
{
name = "RegisterDdms"
database_name = "osdu-db"
partition_key_path = "/dataPartitionId"
partition_key_version = null
},
{
name = "RegisterSubscription"
database_name = "osdu-db"
partition_key_path = "/dataPartitionId"
partition_key_version = null
},
{
name = "IngestionStrategy"
database_name = "osdu-db"
partition_key_path = "/workflowType"
partition_key_version = null
},
{
name = "RelationshipStatus"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "MappingInfo"
database_name = "osdu-db"
partition_key_path = "/sourceSchemaKind"
partition_key_version = null
},
{
name = "FileLocationEntity"
database_name = "osdu-db"
partition_key_path = "/id"
partition_key_version = null
},
{
name = "WorkflowCustomOperatorInfo"
database_name = "osdu-db"
partition_key_path = "/operatorId"
partition_key_version = null
},
{
name = "WorkflowV2"
database_name = "osdu-db"
partition_key_path = "/partitionKey"
partition_key_version = 2