Commit be39c9ee authored by Daniel Scholl's avatar Daniel Scholl
Browse files

Initial Code

parent a9e743e4
local.settings.json
samplequeuemsg.json
*.yaml
## Ignore Visual Studio temporary files, build results, and
## files generated by popular Visual Studio add-ons.
# Azure Functions localsettings file
local.settings.json
samplequeuemsg.json
keda.yaml
deploy.yaml
deploy-queue.yaml
# User-specific files
*.suo
*.user
*.userosscache
*.sln.docstates
# User-specific files (MonoDevelop/Xamarin Studio)
*.userprefs
# Build results
[Dd]ebug/
[Dd]ebugPublic/
[Rr]elease/
[Rr]eleases/
x64/
x86/
bld/
[Bb]in/
[Oo]bj/
[Ll]og/
# Visual Studio 2015 cache/options directory
.vs/
# Uncomment if you have tasks that create the project's static files in wwwroot
#wwwroot/
# MSTest test Results
[Tt]est[Rr]esult*/
[Bb]uild[Ll]og.*
# NUNIT
*.VisualState.xml
TestResult.xml
# Build Results of an ATL Project
[Dd]ebugPS/
[Rr]eleasePS/
dlldata.c
# DNX
project.lock.json
project.fragment.lock.json
artifacts/
*_i.c
*_p.c
*_i.h
*.ilk
*.meta
*.obj
*.pch
*.pdb
*.pgc
*.pgd
*.rsp
*.sbr
*.tlb
*.tli
*.tlh
*.tmp
*.tmp_proj
*.log
*.vspscc
*.vssscc
.builds
*.pidb
*.svclog
*.scc
# Chutzpah Test files
_Chutzpah*
# Visual C++ cache files
ipch/
*.aps
*.ncb
*.opendb
*.opensdf
*.sdf
*.cachefile
*.VC.db
*.VC.VC.opendb
# Visual Studio profiler
*.psess
*.vsp
*.vspx
*.sap
# TFS 2012 Local Workspace
$tf/
# Guidance Automation Toolkit
*.gpState
# ReSharper is a .NET coding add-in
_ReSharper*/
*.[Rr]e[Ss]harper
*.DotSettings.user
# JustCode is a .NET coding add-in
.JustCode
# TeamCity is a build add-in
_TeamCity*
# DotCover is a Code Coverage Tool
*.dotCover
# NCrunch
_NCrunch_*
.*crunch*.local.xml
nCrunchTemp_*
# MightyMoose
*.mm.*
AutoTest.Net/
# Web workbench (sass)
.sass-cache/
# Installshield output folder
[Ee]xpress/
# DocProject is a documentation generator add-in
DocProject/buildhelp/
DocProject/Help/*.HxT
DocProject/Help/*.HxC
DocProject/Help/*.hhc
DocProject/Help/*.hhk
DocProject/Help/*.hhp
DocProject/Help/Html2
DocProject/Help/html
# Click-Once directory
publish/
# Publish Web Output
*.[Pp]ublish.xml
*.azurePubxml
# TODO: Comment the next line if you want to checkin your web deploy settings
# but database connection strings (with potential passwords) will be unencrypted
#*.pubxml
*.publishproj
# Microsoft Azure Web App publish settings. Comment the next line if you want to
# checkin your Azure Web App publish settings, but sensitive information contained
# in these scripts will be unencrypted
PublishScripts/
# NuGet Packages
*.nupkg
# The packages folder can be ignored because of Package Restore
**/packages/*
# except build/, which is used as an MSBuild target.
!**/packages/build/
# Uncomment if necessary however generally it will be regenerated when needed
#!**/packages/repositories.config
# NuGet v3's project.json files produces more ignoreable files
*.nuget.props
*.nuget.targets
# Microsoft Azure Build Output
csx/
*.build.csdef
# Microsoft Azure Emulator
ecf/
rcf/
# Windows Store app package directories and files
AppPackages/
BundleArtifacts/
Package.StoreAssociation.xml
_pkginfo.txt
# Visual Studio cache files
# files ending in .cache can be ignored
*.[Cc]ache
# but keep track of directories ending in .cache
!*.[Cc]ache/
# Others
ClientBin/
~$*
*~
*.dbmdl
*.dbproj.schemaview
*.jfm
*.pfx
*.publishsettings
node_modules/
orleans.codegen.cs
# Since there are multiple workflows, uncomment next line to ignore bower_components
# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
#bower_components/
# RIA/Silverlight projects
Generated_Code/
# Backup & report files from converting an old project file
# to a newer Visual Studio version. Backup files are not needed,
# because we have git ;-)
_UpgradeReport_Files/
Backup*/
UpgradeLog*.XML
UpgradeLog*.htm
# SQL Server files
*.mdf
*.ldf
# Business Intelligence projects
*.rdl.data
*.bim.layout
*.bim_*.settings
# Microsoft Fakes
FakesAssemblies/
# GhostDoc plugin setting file
*.GhostDoc.xml
# Node.js Tools for Visual Studio
.ntvs_analysis.dat
# Visual Studio 6 build log
*.plg
# Visual Studio 6 workspace options file
*.opt
# Visual Studio LightSwitch build output
**/*.HTMLClient/GeneratedArtifacts
**/*.DesktopClient/GeneratedArtifacts
**/*.DesktopClient/ModelManifest.xml
**/*.Server/GeneratedArtifacts
**/*.Server/ModelManifest.xml
_Pvt_Extensions
# Paket dependency manager
.paket/paket.exe
paket-files/
# FAKE - F# Make
.fake/
# JetBrains Rider
.idea/
*.sln.iml
# CodeRush
.cr/
# Python Tools for Visual Studio (PTVS)
__pycache__/
*.pyc
\ No newline at end of file
using Microsoft.Extensions.Logging;
using System;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Security.Cryptography;
using System.Text;
using System.Threading.Tasks;
using ILogger = Microsoft.Extensions.Logging.ILogger;
namespace MyFunctionProj
{
class ApiHelper
{
// You can use an optional field to specify the timestamp from the data. If the time field is not specified, Azure Monitor assumes the time is the message ingestion time
static string TimeStampField = "";
/// <summary>
/// Send Logs into Azure Log Workspace
/// </summary>
/// <param name="json"></param>
/// <param name="customerId">customerId to your Log Analytics workspace ID</param>
/// <param name="sharedKey">For sharedKey, use either the primary or the secondary Connected Sources client authentication key </param>
/// <param name="logName">LogName is name of the event type that is being submitted to Azure Monitor</param>
/// <param name="log"></param>
public static void SendLogs(string json, string customerId, string sharedKey, string logName, ILogger log)
{
// Create a hash for the API signature
var datestring = DateTime.UtcNow.ToString("r");
var jsonBytes = Encoding.UTF8.GetBytes(json);
string stringToHash = "POST\n" + jsonBytes.Length + "\napplication/json\n" + "x-ms-date:" + datestring + "\n/api/logs";
string hashedString = BuildSignature(stringToHash, sharedKey);
string signature = "SharedKey " + customerId + ":" + hashedString;
PostData(signature, datestring, json, customerId, logName, log);
}
// Build the API signature
protected static string BuildSignature(string message, string secret)
{
var encoding = new System.Text.ASCIIEncoding();
byte[] keyByte = Convert.FromBase64String(secret);
byte[] messageBytes = encoding.GetBytes(message);
using (var hmacsha256 = new HMACSHA256(keyByte))
{
byte[] hash = hmacsha256.ComputeHash(messageBytes);
return Convert.ToBase64String(hash);
}
}
/// <summary>
/// Send a request to the POST API endpoint
/// </summary>
/// <param name="signature"></param>
/// <param name="date"></param>
/// <param name="json"></param>
/// <param name="customerId"></param>
/// <param name="LogName"></param>
/// <param name="log"></param>
protected static void PostData(string signature, string date, string json, string customerId, string LogName, ILogger log)
{
try
{
string url = "https://" + customerId + ".ods.opinsights.azure.com/api/logs?api-version=2016-04-01";
System.Net.Http.HttpClient client = new System.Net.Http.HttpClient();
client.DefaultRequestHeaders.Add("Accept", "application/json");
client.DefaultRequestHeaders.Add("Log-Type", LogName);
client.DefaultRequestHeaders.Add("Authorization", signature);
client.DefaultRequestHeaders.Add("x-ms-date", date);
client.DefaultRequestHeaders.Add("time-generated-field", TimeStampField);
System.Net.Http.HttpContent httpContent = new StringContent(json, Encoding.UTF8);
httpContent.Headers.ContentType = new MediaTypeHeaderValue("application/json");
Task<System.Net.Http.HttpResponseMessage> response = client.PostAsync(new Uri(url), httpContent);
System.Net.Http.HttpContent responseContent = response.Result.Content;
string result = responseContent.ReadAsStringAsync().Status.ToString();
log.LogInformation("Return Result: " + result);
}
catch (Exception excep)
{
log.LogError($"API Post Exception: {excep.Message}");
}
}
}
}
\ No newline at end of file
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netcoreapp3.1</TargetFramework>
<AzureFunctionsVersion>v3</AzureFunctionsVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Azure.Storage.Blobs" Version="12.6.0" />
<PackageReference Include="Microsoft.Azure.WebJobs.Extensions.Storage" Version="3.0.10" />
<PackageReference Include="Microsoft.NET.Sdk.Functions" Version="3.0.3" />
<PackageReference Include="Newtonsoft.Json" Version="12.0.3" />
</ItemGroup>
<ItemGroup>
<None Update="host.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
</None>
<None Update="local.settings.json">
<CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory>
<CopyToPublishDirectory>Never</CopyToPublishDirectory>
</None>
</ItemGroup>
</Project>
\ No newline at end of file
FROM mcr.microsoft.com/dotnet/core/sdk:3.1 AS installer-env
COPY . /src/dotnet-function-app
RUN cd /src/dotnet-function-app && \
mkdir -p /home/site/wwwroot && \
dotnet publish *.csproj --output /home/site/wwwroot
# To enable ssh & remote debugging on app service change the base image to the one below
# FROM mcr.microsoft.com/azure-functions/dotnet:3.0-appservice
FROM mcr.microsoft.com/azure-functions/dotnet:3.0
ENV AzureWebJobsScriptRoot=/home/site/wwwroot \
AzureFunctionsJobHost__Logging__Console__IsEnabled=true
COPY --from=installer-env ["/home/site/wwwroot", "/home/site/wwwroot"]
\ No newline at end of file
#user below command to recover yaml
func kubernetes deploy --name airflow-logs-to-workspace --registry binroon --dotnet --dry-run > deploy.yaml
{
"version": "2.0",
"logging": {
"applicationInsights": {
"samplingExcludedTypes": "Request",
"samplingSettings": {
"isEnabled": true
}
},
"logLevel": {
"default": "Warning",
"Host.Results": "Error",
"Function": "Trace",
"Host.Aggregator": "Trace"
}
}
}
\ No newline at end of file
using System;
using Microsoft.Azure.WebJobs;
using Microsoft.Azure.WebJobs.Host;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Azure.Storage;
using Azure.Storage.Blobs;
using Azure.Storage.Blobs.Models;
using System.IO;
using System.ComponentModel.Design.Serialization;
using System.Text.RegularExpressions;
using System.Text;
namespace MyFunctionProj
{
public static class QueueTrigger
{
//[StorageAccount("AzureWebJobsStorage")]
[FunctionName("queueTrigger")]
public static void Run([QueueTrigger("airflowlogqueue")] string myQueueItem, ILogger log)
{
log.LogInformation($"C# Queue trigger function processed: {myQueueItem}");
// get blob url
JObject o = JObject.Parse(myQueueItem);
string blobUrl = (string)o["data"]["url"];
string runID = blobUrl.Split("/")[5]; // the 2nd part after containerName
log.LogInformation($"C# Queue trigger function processed: blobUrl - {blobUrl}");
string connection = GetEnvironmentVariable("AzureWebJobsStorage");
string customerId = GetEnvironmentVariable("AzureLogWorkspaceCustomerId");
string sharedKey = GetEnvironmentVariable("AzureLogWorkspaceSharedKey");
string logName = GetEnvironmentVariable("AzureLogWorkspaceLogName");
log.LogInformation(connection);
// parse blob url
BlobClient blob = new BlobClient(new Uri(blobUrl));
// Get a reference to a container
BlobContainerClient container = new BlobContainerClient(connection, blob.BlobContainerName);
// get blob with authentication
blob = container.GetBlobClient(blob.Name);
// Download the blob
Azure.Response<BlobDownloadInfo> blobDownloadInfo = blob.Download();
MemoryStream memoryStream = new MemoryStream();
const int bufferLength = 1024;
int actual;
byte[] buffer = new byte[bufferLength];
while ((actual = blobDownloadInfo.Value.Content.Read(buffer, 0, bufferLength)) > 0)
{
memoryStream.Write(buffer, 0, actual);
}
memoryStream.Position = 0;
StreamReader sr = new StreamReader(memoryStream);
LogLineEntity logLineEntity = null;
int lineNumber = 0;
while (true)
{
var line = sr.ReadLine();
lineNumber++;
// started dealing a new line
// [2020-08-24 03:22:52,180] {taskinstance.py:881} INFO - Starting attempt 1 of 2
Regex timestamp = new Regex(@"(?<=\[)\d+\-\d+\-\d+\s\d+:\d+:\d+,\d+(?=\])"); // timestamp, start of the line
Regex task = new Regex(@"(?<=\s\{).+(?=\}\s)");
Regex logLevel = new Regex(@"(?<=\}\s)\w+(?=\s\-)");
Regex content = new Regex(@"(?<=\}\s\w+\s\-\s).*");
// in case reached the end of document, send the last record to log analytics
if (string.IsNullOrEmpty(line))
{
if (logLineEntity != null)
{
string json = JsonConvert.SerializeObject(logLineEntity);
//log.LogInformation($"Executing send last record: {json}");
ApiHelper.SendLogs(json: json, customerId: customerId, sharedKey: sharedKey, logName: logName, log: log);
}
log.LogInformation($"Congrats!!! Job finished with {lineNumber} lines!");
// quit the entire loop
break;
}
// if line started with timeStamp
Match m = timestamp.Match(line);
if (m.Success)
{
// before dealing next record, post last record to log analytics
if (logLineEntity != null)
{
string json = JsonConvert.SerializeObject(logLineEntity);
//log.LogInformation($"Executing send record: {json}");
ApiHelper.SendLogs(json: json, customerId: customerId, sharedKey: sharedKey, logName: logName, log: log);
}
// reset object
logLineEntity = new LogLineEntity();
// then start to deal the next record
logLineEntity.LogFileName = blobUrl;
logLineEntity.RunID = runID;
logLineEntity.LogTimestamp = m.Value;
logLineEntity.Task = task.Match(line).Value;
logLineEntity.LogLevel = logLevel.Match(line).Value;
logLineEntity.Content = content.Match(line).Value;
logLineEntity.LineNumber = lineNumber;
}
// line not starting with timestap, another line of content
else
{
logLineEntity.Content += "\r\n" + line;
}
}
}
public static string GetEnvironmentVariable(string name)
{
return System.Environment.GetEnvironmentVariable(name, EnvironmentVariableTarget.Process);
}
}
public class LogLineEntity
{
public string LogTimestamp { get; set; }
public string Task { get; set; }
public string LogLevel { get; set; }
public string Content { get; set; }
public string LogFileName { get; set; }
public string RunID { get; set; }
public int LineNumber { get; set; }
}
}
\ No newline at end of file
# Dotfiles
.git*
.pkgr.yml
.travis.yml
# Non-app files
*.md
*.txt
exampleProxyConfig.js
# Non-app directories
debian
node_modules
packager
examples
docs
.vscode
# Local dev files
config.js
# Generated files
# *.js commented out, it ignores all *.js files such as lib/appinsights.js
*.js.map
# Logs
logs
*.log
npm-debug.log*
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
node_modules
typings