Commit afba1082 authored by harshit aggarwal's avatar harshit aggarwal
Browse files

init

parent 88c7be84
Pipeline #46160 passed with stages
in 1 minute and 1 second
CHANGELOG.md
*envoy-airflow-authentication.yaml
.*\.http$
......@@ -108,6 +108,13 @@ image:
airflowLogin:
name: admin
###############################################################################
# Specify configuration required for authentication of API calls to webserver
airflowAuthentication:
username: admin
keyvaultMountPath: /mnt/azure-keyvault/
passwordKey: airflow-admin-password
################################################################################
# Specify any custom configs/environment values
......@@ -206,8 +213,20 @@ airflow:
# Airflow - WebUI Configs
###################################
web:
replicas: 1
livenessProbe:
timeoutSeconds: 60
resources:
requests:
cpu: "2000m"
memory: "2Gi"
limits:
cpu: "3000m"
memory: "2Gi"
podLabels:
aadpodidbinding: "osdu-identity"
podAnnotations:
sidecar.istio.io/userVolumeMount: '[{"name": "azure-keyvault", "mountPath": "/mnt/azure-keyvault", "readonly": true}]'
baseUrl: "http://localhost/airflow"
###################################
......@@ -266,7 +285,7 @@ airflow:
AIRFLOW__WEBSERVER__AUTHENTICATE: "True"
AIRFLOW__WEBSERVER__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__WEBSERVER__RBAC: "True"
AIRFLOW__API__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.default"
AIRFLOW__CORE__REMOTE_LOGGING: "True"
AIRFLOW__CORE__REMOTE_LOG_CONN_ID: "az_log"
AIRFLOW__CORE__REMOTE_BASE_LOG_FOLDER: "wasb-airflowlog"
......@@ -283,6 +302,11 @@ airflow:
AIRFLOW_VAR_CORE__SERVICE__STORAGE__URL: "http://storage.osdu.svc.cluster.local/api/storage/v2/records"
AIRFLOW_VAR_CORE__SERVICE__FILE__HOST: "http://file.osdu.svc.cluster.local/api/file/v2"
AIRFLOW_VAR_CORE__SERVICE__WORKFLOW__HOST: "http://ingestion-workflow.osdu.svc.cluster.local/api/workflow"
AIRFLOW__WEBSERVER__WORKERS: 15
AIRFLOW__WEBSERVER__WORKER_REFRESH_BATCH_SIZE: 0
AIRFLOW__CORE__STORE_SERIALIZED_DAGS: True #This flag decides whether to serialise DAGs and persist them in DB
AIRFLOW__CORE__STORE_DAG_CODE: True #This flag decides whether to persist DAG files code in DB
AIRFLOW__WEBSERVER__WORKER_CLASS: gevent
AIRFLOW_VAR_CORE__SERVICE__SEARCH_WITH_CURSOR__URL: "http://search-service.osdu.svc.cluster.local/api/search/v2/query_with_cursor"
extraEnv:
- name: AIRFLOW__CORE__FERNET_KEY
......
......@@ -15,6 +15,11 @@ image:
airflowLogin:
name: admin
airflowAuthentication:
username: admin
keyvaultMountPath: /mnt/azure-keyvault/
passwordKey: airflow-admin-password
################################################################################
# Specify any custom configs/environment values
#
......@@ -108,8 +113,20 @@ airflow:
# Airflow - WebUI Configs
###################################
web:
replicas: 1
livenessProbe:
timeoutSeconds: 60
resources:
requests:
cpu: "2000m"
memory: "2Gi"
limits:
cpu: "3000m"
memory: "2Gi"
podLabels:
aadpodidbinding: "osdu-identity"
podAnnotations:
sidecar.istio.io/userVolumeMount: '[{"name": "azure-keyvault", "mountPath": "/mnt/azure-keyvault", "readonly": true}]'
baseUrl: "http://localhost/airflow"
###################################
......@@ -123,12 +140,12 @@ airflow:
## minReplicas is picked from Values.workers.replicas and default value is 1
maxReplicas: 3
metrics:
- type: Resource
resource:
name: memory
target:
type: Utilization
averageUtilization: 60
- type: Resource
resource:
name: memory
target:
type: Utilization
averageUtilization: 60
labels:
# DO NOT DELETE THIS LABEL. SET IT TO "false" WHEN AUTOSCALING IS DISABLED, SET IT TO "true" WHEN AUTOSCALING IS ENABLED
autoscalingEnabled: "false"
......@@ -171,7 +188,7 @@ airflow:
AIRFLOW__WEBSERVER__AUTHENTICATE: "True"
AIRFLOW__WEBSERVER__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__WEBSERVER__RBAC: "True"
AIRFLOW__API__AUTH_BACKEND: "airflow.contrib.auth.backends.password_auth"
AIRFLOW__API__AUTH_BACKEND: "airflow.api.auth.backend.default"
AIRFLOW__CORE__REMOTE_LOGGING: "True"
AIRFLOW__CORE__REMOTE_LOG_CONN_ID: "az_log"
AIRFLOW__CORE__REMOTE_BASE_LOG_FOLDER: "wasb-airflowlog"
......@@ -188,55 +205,60 @@ airflow:
AIRFLOW_VAR_CORE__SERVICE__STORAGE__URL: "http://storage.osdu.svc.cluster.local/api/storage/v2/records"
AIRFLOW_VAR_CORE__SERVICE__FILE__HOST: "http://file.osdu.svc.cluster.local/api/file/v2"
AIRFLOW_VAR_CORE__SERVICE__WORKFLOW__HOST: "http://ingestion-workflow.osdu.svc.cluster.local/api/workflow"
AIRFLOW__WEBSERVER__WORKERS: 15
AIRFLOW__WEBSERVER__WORKER_REFRESH_BATCH_SIZE: 0
AIRFLOW__CORE__STORE_SERIALIZED_DAGS: True #This flag decides whether to serialise DAGs and persist them in DB
AIRFLOW__CORE__STORE_DAG_CODE: True #This flag decides whether to persist DAG files code in DB
AIRFLOW__WEBSERVER__WORKER_CLASS: gevent
AIRFLOW_VAR_CORE__SERVICE__SEARCH_WITH_CURSOR__URL: "http://search-service.osdu.svc.cluster.local/api/search/v2/query_with_cursor"
extraEnv:
- name: CLOUD_PROVIDER
value: "azure"
- name: AIRFLOW_VAR_KEYVAULT_URI
valueFrom:
configMapKeyRef:
name: osdu-svc-properties
key: ENV_KEYVAULT
- name: AIRFLOW__CORE__FERNET_KEY
valueFrom:
secretKeyRef:
name: airflow
key: fernet-key
- name: AIRFLOW_CONN_AZ_LOG
valueFrom:
secretKeyRef:
name: airflow
key: remote-log-connection
- name: AIRFLOW_VAR_AZURE_TENANT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: tenantid
- name: AIRFLOW_VAR_AZURE_CLIENT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: principal-clientid
- name: AIRFLOW_VAR_AZURE_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: active-directory
key: principal-clientpassword
- name: AIRFLOW_VAR_AAD_CLIENT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: application-appid
- name: AIRFLOW_VAR_APPINSIGHTS_KEY
valueFrom:
secretKeyRef:
name: central-logging
key: appinsights
- name: CLOUD_PROVIDER
value: "azure"
- name: AIRFLOW_VAR_KEYVAULT_URI
valueFrom:
configMapKeyRef:
name: osdu-svc-properties
key: ENV_KEYVAULT
- name: AIRFLOW__CORE__FERNET_KEY
valueFrom:
secretKeyRef:
name: airflow
key: fernet-key
- name: AIRFLOW_CONN_AZ_LOG
valueFrom:
secretKeyRef:
name: airflow
key: remote-log-connection
- name: AIRFLOW_VAR_AZURE_TENANT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: tenantid
- name: AIRFLOW_VAR_AZURE_CLIENT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: principal-clientid
- name: AIRFLOW_VAR_AZURE_CLIENT_SECRET
valueFrom:
secretKeyRef:
name: active-directory
key: principal-clientpassword
- name: AIRFLOW_VAR_AAD_CLIENT_ID
valueFrom:
secretKeyRef:
name: active-directory
key: application-appid
- name: AIRFLOW_VAR_APPINSIGHTS_KEY
valueFrom:
secretKeyRef:
name: central-logging
key: appinsights
extraConfigmapMounts:
- name: remote-log-config
mountPath: /opt/airflow/config
configMap: airflow-remote-log-config
readOnly: true
- name: remote-log-config
mountPath: /opt/airflow/config
configMap: airflow-remote-log-config
readOnly: true
extraPipPackages: [
"flask-bcrypt==0.7.1",
"apache-airflow[statsd]",
......@@ -255,16 +277,16 @@ airflow:
"https://azglobalosdutestlake.blob.core.windows.net/pythonsdk/osdu_api-0.0.4.tar.gz"
]
extraVolumeMounts:
- name: azure-keyvault
mountPath: "/mnt/azure-keyvault"
readOnly: true
- name: dags-data
mountPath: /opt/airflow/plugins
subPath: plugins
- name: azure-keyvault
mountPath: "/mnt/azure-keyvault"
readOnly: true
- name: dags-data
mountPath: /opt/airflow/plugins
subPath: plugins
extraVolumes:
- name: azure-keyvault
csi:
driver: secrets-store.csi.k8s.io
readOnly: true
volumeAttributes:
secretProviderClass: azure-keyvault
- name: azure-keyvault
csi:
driver: secrets-store.csi.k8s.io
readOnly: true
volumeAttributes:
secretProviderClass: azure-keyvault
apiVersion: networking.istio.io/v1alpha3
kind: EnvoyFilter
metadata:
name: basic-auth-for-airflow
namespace: osdu
spec:
workloadSelector:
labels:
app: airflow
configPatches:
- applyTo: HTTP_FILTER
match:
context: SIDECAR_INBOUND
listener:
filterChain:
filter:
name: envoy.http_connection_manager
subFilter:
name: envoy.router
patch:
operation: INSERT_BEFORE
value:
name: envoy.lua.basic-auth-for-airflow
typed_config:
"@type": "type.googleapis.com/envoy.config.filter.http.lua.v2.Lua"
inlineCode: |
function starts_with(str, start)
return str:sub(1, #start) == start
end
function decode(data)
-- Useful links http://lua-users.org/wiki/BaseSixtyFour, http://lua-users.org/wiki/StringLibraryTutorial
-- Supported characters for Base64
local base64Characters ='ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'
-- Removing any characters outside from the ones suported in base 64
data = string.gsub(data, '[^'..base64Characters..'=]', '')
return (data:gsub('.', function(x)
-- in gsub method if a pattern is matched (in this case any character) it will be sent as an argument to the function
if (x == '=') then return '' end
-- finding the position of the character in the base64Characters string which will be used to generate the binary representation of the character 'x'
local r,f='',(base64Characters:find(x)-1)
-- this loop will generate the binary representation and store in variable r
for i=6,1,-1 do r=r..(f%2^i-f%2^(i-1)>0 and '1' or '0') end
-- the value returned is substituted back into the string
return r;
end):gsub('%d%d%d?%d?%d?%d?%d?%d?', function(x)
-- Here the input will be received in binary format as per the above pattern
if (#x ~= 8) then return '' end
local c=0
-- Generating characters from the binary format
for i=1,8 do c=c+(x:sub(i,i)=='1' and 2^(8-i) or 0) end
return string.char(c)
end))
end
function split(inputStr, separator)
-- Useful Links http://lua-users.org/wiki/StringLibraryTutorial
local Table={}
-- string.gmatch method returns an iterator function that, each time it is called, returns the next captures from pattern over the string inputStr
-- Here we are using the gmatch function to capture strings which contain at least one character of anything other than the desired separator.
for str in string.gmatch(inputStr, "([^"..separator.."]+)") do
table.insert(Table, str)
end
return Table
end
function read_file(path)
local file = io.open(path, "rb") -- r read mode and b binary mode
if not file then return nil end
local content = file:read "*a" -- *a or *all reads the whole file
file:close()
return content
end
function authenticationFailure(request_handle)
request_handle:logErr("Airflow authentication failed")
request_handle:respond({[":status"] = "401"}, "Airflow authentication failed")
end
function authenticationSuccess(request_handle)
request_handle:logInfo("Airflow authentication successful")
end
function authenticateRequest(request_handle)
local path = request_handle:headers():get(":path")
if (starts_with(path, "/airflow/api/experimental")) then
local authHeader = request_handle:headers():get("Authorization")
if starts_with(authHeader, "Basic") then
local encodedCredential = string.sub(authHeader, 7)
local decodedCredential = decode(encodedCredential)
local credsList = split(decodedCredential, ":")
local username = credsList[1]
local password = credsList[2]
local airflowUsername = "{{ .Values.airflowAuthentication.username }}"
local airflowPasswordFilepath = "{{ .Values.airflowAuthentication.keyvaultMountPath }}" .. "{{ .Values.airflowAuthentication.passwordKey }}"
local airflowPassword = read_file(airflowPasswordFilepath)
if (username == airflowUsername and password == airflowPassword) then
authenticationSuccess(request_handle)
else
authenticationFailure(request_handle)
end
else
authenticationFailure(request_handle)
end
end
end
function envoy_on_request(request_handle)
local status, result = pcall(authenticateRequest, request_handle)
if (not status) then
authenticationFailure(request_handle)
end
end
\ No newline at end of file
......@@ -44,4 +44,4 @@ global:
defaultCpuRequests: "0.5"
defaultMemoryRequests: "4Gi"
defaultCpuLimits: "1"
defaultMemoryLimits: "4Gi"
defaultMemoryLimits: "4Gi"
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment