Commit 74b5092a authored by Kishore Battula's avatar Kishore Battula
Browse files

Moved airflow resource creation to airflow.tf

parent 21e9bbd2
......@@ -25,7 +25,8 @@
# Airflow
#-------------------------------
locals {
airflow_admin_password = coalesce(var.airflow_admin_password, random_password.airflow_admin_password[0].result)
airflow_admin_password = coalesce(var.airflow_admin_password, random_password.airflow_admin_password[0].result)
airflow_log_queue_name = "airflowlogqueue"
}
resource "random_password" "airflow_admin_password" {
......@@ -66,6 +67,45 @@ resource "azurerm_key_vault_secret" "airflow_admin_password" {
key_vault_id = data.terraform_remote_state.central_resources.outputs.keyvault_id
}
// Create Fileshare and folder structure
resource "azurerm_storage_share" "airflow_share" {
name = "airflowdags"
storage_account_name = module.storage_account.name
quota = 50
}
resource "azurerm_storage_share_directory" "dags" {
name = "dags"
share_name = azurerm_storage_share.airflow_share.name
storage_account_name = module.storage_account.name
}
resource "azurerm_storage_share_directory" "plugins" {
name = "plugins"
share_name = azurerm_storage_share.airflow_share.name
storage_account_name = module.storage_account.name
}
resource "azurerm_storage_share_directory" "operators" {
name = "plugins/operators"
share_name = azurerm_storage_share.airflow_share.name
storage_account_name = module.storage_account.name
depends_on = [azurerm_storage_share_directory.plugins]
}
// Airflow log container
resource "azurerm_storage_container" "main" {
name = "airflow-logs"
storage_account_name = module.storage_account.name
container_access_type = "private"
}
// Airflow queue for blob create event
resource "azurerm_storage_queue" "main" {
name = local.airflow_log_queue_name
storage_account_name = module.storage_account.name
}
// Add the Airflow Log Connection to the Vault
resource "azurerm_key_vault_secret" "airflow_remote_log_connection" {
name = "airflow-remote-log-connection"
......@@ -80,7 +120,7 @@ resource "azurerm_eventgrid_event_subscription" "airflow_log_event_subscription"
storage_queue_endpoint {
storage_account_id = module.storage_account.id
queue_name = "airflowlogqueue"
queue_name = local.airflow_log_queue_name
}
included_event_types = ["Microsoft.Storage.BlobCreated"]
......
......@@ -34,14 +34,10 @@ subnet_aks_prefix = "10.10.2.0/23"
# Storage Settings
storage_replication_type = "LRS"
storage_containers = [
"azure-webjobs-hosts",
"airflow-logs"
"azure-webjobs-hosts"
]
storage_shares = [
"airflowdags",
"unit",
"crs"
]
storage_queues = [
"airflowlogqueue"
]
storage_queues = []
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment