Commit e86bfbb7 authored by Daniel Scholl's avatar Daniel Scholl
Browse files

Merge branch 'users/kiveerap/AddingAirflowDeploymentPipeline' into 'master'

Adding airflow pipeline

See merge request osdu/platform/deployment-and-operations/infra-azure-provisioning!59
parents 895bd19e a0d5f59d
......@@ -139,6 +139,16 @@ helm template osdu-flux ${INFRA_SRC}/charts/osdu-istio-auth -f ${INFRA_SRC}/char
&& git commit -m "Initialize Istio Auth Chart" \
&& git push origin $UNIQUE)
# Extract manifests from the airflow charts.
helm template airflow ${INFRA_SRC}/charts/airflow -f ${INFRA_SRC}/charts/config.yaml | ${INFRA_SRC}/charts/airflow/add-namespace.py > ${FLUX_SRC}/providers/azure/hld-registry/airflow.yaml
# Commit and Checkin to Deploy
(cd $FLUX_SRC \
&& git switch $UNIQUE \
&& git add ${FLUX_SRC}/providers/azure/hld-registry/airflow.yaml \
&& git commit -m "Initialize Airflow Chart" \
&& git push origin $UNIQUE)
# Extract manifests from each service chart.
for SERVICE in partition entitlements-azure legal storage indexer-queue indexer-service search-service;
......
......@@ -13,7 +13,7 @@
# limitations under the License.
apiVersion: v2
name: osdu-airflow
name: airflow
appVersion: "latest"
description: Installs the airflow and required components for osdu on Azure"
version: 0.1.0
......
# Copyright © Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
trigger:
batch: true
branches:
include:
- master
paths:
include:
- /charts/airflow/*
exclude:
- /**/*.md
pr:
autoCancel: false
branches:
include:
- "*"
exclude:
- master
paths:
include:
- /charts/airflow/*
exclude:
- /**/*.md
resources:
repositories:
- repository: FluxRepo
type: git
name: k8-gitops-manifests
variables:
- group: 'Azure - OSDU'
- group: 'Azure - OSDU secrets'
- name: serviceName
value: "airflow"
- name: chartPath
value: "charts/airflow"
- name: valuesFile
value: "charts/airflow/values.yaml"
- name: 'MANIFEST_REPO'
value: $[ resources.repositories['FluxRepo'].name ]
stages:
- template: /devops/chart-stages.yml
parameters:
serviceName: ${{ variables.serviceName }}
chartPath: ${{ variables.chartPath }}
valuesFile: ${{ variables.valuesFile }}
skipDeploy: ${{ variables.SKIP_DEPLOY }}
skipCheck: true
chartModificationScript: "scripts/add-namespace.py"
# Add multiple chart folder paths with pipe(|) seperated
extractedChartFolder: "templates|charts/airflow/templates"
providers:
- name: Azure
environments: ["dev"]
#!/usr/bin/env python
import yaml
import sys
def addingNamespace(namespace):
for manifest in yaml.load_all(sys.stdin, Loader=yaml.FullLoader):
if manifest:
if 'metadata' in manifest and 'namespace' not in manifest['metadata'] and 'Namespace' not in manifest['kind']:
manifest['metadata']['namespace'] = namespace
if 'subjects' in manifest:
manifest['subjects'][0]['namespace'] = namespace
print ('---')
print (yaml.dump(manifest, default_flow_style=False, sort_keys=False))
namespace="osdu"
addingNamespace(namespace)
......@@ -94,7 +94,7 @@ airflow:
cert-manager.io/acme-challenge-type: http01
path: "/airflow"
## DNS name mapping to Application Gateway Public IP
host: appgatewayhostfqdn
host: #{DNS_HOST}#
livenessPath: "/airflow/health"
tls:
enabled: true
......@@ -113,11 +113,11 @@ airflow:
externalDatabase:
type: postgres
## Azure PostgreSQL Database username, formatted as {username}@{hostname}
user: dbadmin@dbhost
user: osdu_admin@#{base-name-sr}#-pg
passwordSecret: "postgres"
passwordSecretKey: "postgres-password"
## Azure PostgreSQL Database host
host: dbhostfqdn
host: #{base-name-sr}#-pg.postgres.database.azure.com
port: 5432
properties: "?sslmode=require"
database: airflow
......@@ -125,7 +125,7 @@ airflow:
enabled: false
externalRedis:
## Azure Redis Cache host
host: redishost
host: #{base-name-sr}#-cache.redis.cache.windows.net
port: 6380
passwordSecret: "redis"
passwordSecretKey: "redis-password"
appinsightstatsd:
aadpodidbinding: "osdu-identity"
airflowLogin:
name: admin
airflow:
airflow:
image:
repository: apache/airflow
tag: 1.10.12-python3.6
pullPolicy: IfNotPresent
pullSecret: ""
config:
AIRFLOW__SCHEDULER__STATSD_ON: "True"
AIRFLOW__SCHEDULER__STATSD_HOST: "appinsights-statsd"
AIRFLOW__SCHEDULER__STATSD_PORT: 8125
AIRFLOW__SCHEDULER__STATSD_PREFIX: "osdu_airflow"
AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: "False"
## Enable for Debug purpose
AIRFLOW__WEBSERVER__EXPOSE_CONFIG: "False"
AIRFLOW__WEBSERVER__AUTHENTICATE: "True"
AIRFLOW__WEBSERVER__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__API__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__CORE__REMOTE_LOGGING: "True"
AIRFLOW__CORE__REMOTE_LOG_CONN_ID: "az_log"
AIRFLOW__CORE__REMOTE_BASE_LOG_FOLDER: "wasb-airflowlog"
AIRFLOW__CORE__LOGGING_CONFIG_CLASS: "log_config.DEFAULT_LOGGING_CONFIG"
AIRFLOW__CORE__LOG_FILENAME_TEMPLATE: "{{ run_id }}/{{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log"
AIRFLOW__CELERY__SSL_ACTIVE: "True"
AIRFLOW__WEBSERVER__ENABLE_PROXY_FIX: "True"
extraEnv:
- name: AIRFLOW__CORE__FERNET_KEY
valueFrom:
secretKeyRef:
name: airflow
key: airflow-fernet-key
- name: AIRFLOW_CONN_AZ_LOG
valueFrom:
secretKeyRef:
name: airflow
key: airflow-remote-log-connection
extraConfigmapMounts:
- name: remote-log-config
mountPath: /opt/airflow/config
configMap: airflow-remote-log-config
readOnly: true
extraPipPackages: [
"flask-bcrypt",
"apache-airflow[statsd]",
"apache-airflow[kubernetes]",
"apache-airflow-backport-providers-microsoft-azure"
]
extraVolumeMounts:
# - name: airflow-kubernetes-config
# mountPath: "/home/airflow/.kube"
# readOnly: true
- name: azure-keyvault
mountPath: "/mnt/azure-keyvault"
readOnly: true
extraVolumes:
# - name: airflow-kubernetes-config
# secret:
# secretName: airflow-kubernetes-config
# items:
# - key: airflow-kubernetes-config
# path: config
- name: azure-keyvault
csi:
driver: secrets-store.csi.k8s.io
readOnly: true
volumeAttributes:
secretProviderClass: azure-keyvault
dags:
installRequirements: true
persistence:
enabled: true
existingClaim: airflowdagpvc
scheduler:
podLabels:
aadpodidbinding: "osdu-identity"
variables: |
{}
web:
podLabels:
aadpodidbinding: "osdu-identity"
baseUrl: "http://localhost/airflow"
ingress:
enabled: true
web:
annotations:
kubernetes.io/ingress.class: azure/application-gateway
cert-manager.io/cluster-issuer: letsencrypt
cert-manager.io/acme-challenge-type: http01
path: "/airflow"
host: osdu-weisun.msft-osdu-test.org
livenessPath: "/airflow/health"
tls:
enabled: true
secretName: osdu-certificate
precedingPaths:
- path: "/airflow/*"
serviceName: airflow-web
servicePort: 8080
workers:
podLabels:
aadpodidbinding: "osdu-identity"
flower:
enabled: false
postgresql:
enabled: false
externalDatabase:
type: postgres
user: osdu_admin@osdu-mvp-weisr-7heu-pg
passwordSecret: "postgres"
passwordSecretKey: "postgres-password"
host: "osdu-mvp-weisr-7heu-pg.postgres.database.azure.com"
port: 5432
properties: "?sslmode=require"
database: airflow
redis:
enabled: false
externalRedis:
host: "osdu-mvp-weisr-7heu-cache.redis.cache.windows.net"
port: 6380
passwordSecret: "redis"
passwordSecretKey: "redis-password"
......@@ -19,6 +19,7 @@ parameters:
skipDeploy: false
hldRegPath: "providers/azure/hld-registry"
checkoutRepo: self
extractedChartFolder: "templates"
stages:
- ${{ each provider in parameters.providers }}:
......@@ -51,6 +52,8 @@ stages:
skipDeploy: ${{ parameters.skipDeploy }}
hldRegPath: ${{ parameters.hldRegPath }}
checkoutRepo: ${{ parameters.checkoutRepo }}
chartModificationScript: ${{ parameters.chartModificationScript }}
extractedChartFolder: ${{ parameters.extractedChartFolder }}
- template: tasks/flux-chart-wait.yml
parameters:
......
......@@ -22,6 +22,8 @@ parameters:
hldRegPath: ""
generationPath: "generated"
checkoutRepo: self
chartModificationScript: ""
extractedChartFolder: ""
steps:
- checkout: FluxRepo
......@@ -50,6 +52,7 @@ steps:
chartPath: ${{ parameters.chartPath }}
valuesFile: ${{ parameters.valuesFile }}
generationPath: ${{parameters.generationPath}}
chartModificationScript: ${{parameters.chartModificationScript}}
- template: gitops.yml
parameters:
serviceName: ${{parameters.serviceName}}
......@@ -58,3 +61,4 @@ steps:
branchName: ${{parameters.environment}}
hldRegPath: ${{parameters.hldRegPath}}
skipDeploy: ${{parameters.skipDeploy}}
extractedChartFolder: ${{parameters.extractedChartFolder}}
......@@ -26,6 +26,8 @@ steps:
SERVICE_NAME: ${{parameters.serviceName}}
HLD_REG_PATH: ${{parameters.hldRegPath}}
GENERATION_PATH: ${{parameters.generationPath}}
CHART_FOLDERS: ${{parameters.extractedChartFolder}}
inputs:
targetType: "inline"
script: |
......@@ -72,9 +74,14 @@ steps:
echo "COPYING YAML FILES TO $HLD_REG_PATH/$SERVICE_NAME"
mkdir -p ./$HLD_REG_PATH/$SERVICE_NAME
rm -rf ./$HLD_REG_PATH/$SERVICE_NAME/*
cp -rf $GITOPS_MANIFEST_DIRECTORY/$SERVICE_NAME/templates/* ./$HLD_REG_PATH/$SERVICE_NAME
IFS="|"; for folder in $CHART_FOLDERS; do
echo "COPYING YAML FILES OF EXTRACTED CHARTS FOLDER $folder"
cp -rf $GITOPS_MANIFEST_DIRECTORY/$SERVICE_NAME/$folder/* ./$HLD_REG_PATH/$SERVICE_NAME
done
fi
git add -A
if [[ $(git status --porcelain) ]]; then
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment