diff --git a/README.md b/README.md index cdb2dc7faae8a7296ec49c1f34808ad2f46045f6..ea0a96526af1338b4a0c2556b471e1a62b504f73 100644 --- a/README.md +++ b/README.md @@ -198,17 +198,18 @@ Infrastructure requires a bring your own Elastic Search Instance of a version of ES_ENDPOINT="" ES_USERNAME="" ES_PASSWORD="" -az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-endpoint-dp1-demo" --value $ES_ENDPOINT -az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-username-dp1-demo" --value $ES_USERNAME -az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-password-dp1-demo" --value $ES_PASSWORD +az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-endpoint-dp1-${UNIQUE}" --value $ES_ENDPOINT +az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-username-dp1-${UNIQUE}" --value $ES_USERNAME +az keyvault secret set --vault-name $COMMON_VAULT --name "elastic-password-dp1-${UNIQUE}" --value $ES_PASSWORD + cat >> .envrc << EOF # https://cloud.elastic.co # ------------------------------------------------------------------------------------------------------ -export TF_VAR_elasticsearch_endpoint="$(az keyvault secret show --id https://$COMMON_VAULT.vault.azure.net/secrets/elastic-endpoint-dp1-demo --query value -otsv)" -export TF_VAR_elasticsearch_username="$(az keyvault secret show --id https://$COMMON_VAULT.vault.azure.net/secrets/elastic-username-dp1-demo --query value -otsv)" -export TF_VAR_elasticsearch_password="$(az keyvault secret show --id https://$COMMON_VAULT.vault.azure.net/secrets/elastic-password-dp1-demo --query value -otsv)" +export TF_VAR_elasticsearch_endpoint="$ES_ENDPOINT" +export TF_VAR_elasticsearch_username="$ES_USERNAME" +export TF_VAR_elasticsearch_password="$ES_PASSWORD" EOF diff --git a/infra/templates/osdu-r3-mvp/data_partition/main.tf b/infra/templates/osdu-r3-mvp/data_partition/main.tf index 5ca2db87cec4cb65864508fbc7af8eff31898826..4bc433c4ad1d42bb6783ece5bd4ba8d4e986191d 100644 --- a/infra/templates/osdu-r3-mvp/data_partition/main.tf +++ b/infra/templates/osdu-r3-mvp/data_partition/main.tf @@ -171,6 +171,7 @@ resource "azurerm_role_assignment" "storage_access" { // Add Data Contributor Role to Principal resource "azurerm_role_assignment" "storage_data_contributor" { count = length(local.rbac_principals) + depends_on = [azurerm_role_assignment.storage_access] role_definition_name = "Storage Blob Data Contributor" principal_id = local.rbac_principals[count.index] @@ -201,6 +202,7 @@ resource "azurerm_role_assignment" "sdms_storage_access" { // Add Data Contributor Role to Principal resource "azurerm_role_assignment" "sdms_storage_data_contributor" { count = length(local.rbac_principals) + depends_on = [azurerm_role_assignment.sdms_storage_access] role_definition_name = "Storage Blob Data Contributor" principal_id = local.rbac_principals[count.index] diff --git a/infra/templates/osdu-r3-mvp/service_resources/README.md b/infra/templates/osdu-r3-mvp/service_resources/README.md index 65f4826cb72331b15c941c132dc578baea3924f8..e32a6bb0cccecc2407b49186c91e55003470e839 100644 --- a/infra/templates/osdu-r3-mvp/service_resources/README.md +++ b/infra/templates/osdu-r3-mvp/service_resources/README.md @@ -49,6 +49,8 @@ storage_queues = [ "airflowlogqueue" ] __Provision__ +> Please run `helm repo update` prior to executing in case you have helm charts locally that need updates. + Execute the following commands to set up your terraform workspace. ```bash