diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..92947ba --- /dev/null +++ b/.editorconfig @@ -0,0 +1,41 @@ +# EditorConfig is awesome: http://EditorConfig.org + +# top-most EditorConfig file +root = true + +# Default settings: +# A newline ending every file +# Use 4 spaces as indentation +[*] +insert_final_newline = true +indent_style = space +indent_size = 2 +trim_trailing_whitespace = true +charset = utf-8 + +[*.json] +indent_size = 4 + +[*.cs] +indent_size = 4 +# IDE0090: Use 'new(...)' +csharp_style_implicit_object_creation_when_type_is_apparent = false + + +# Xml project files +[*.{csproj,vcxproj,vcxproj.filters,proj,nativeproj,locproj,xproj}] +indent_size = 2 + +# Xml files +[*.{xml,xsd,stylecop,resx,ruleset}] +indent_size = 2 + +# Xml config files +[*.{props,targets,config,nuspec}] +indent_size = 2 + +# Shell scripts +[*.sh] +end_of_line = lf +[*.{cmd, bat}] +end_of_line = crlf \ No newline at end of file diff --git a/.github/workflows/build-deploy-workflow.yml b/.github/workflows/build-deploy-workflow.yml new file mode 100644 index 0000000..9582fa6 --- /dev/null +++ b/.github/workflows/build-deploy-workflow.yml @@ -0,0 +1,22 @@ +name: Deploy dan-plugin-DATASOURCENAME + +on: + push: + branches: [ main ] + paths-ignore: + - '**/README.md' + - '**/*.yml' + workflow_dispatch: + +jobs: + run: + uses: data-altinn-no/deploy-actions/.github/workflows/dan-deploy-flow.yml@main + with: + artifact_name: 'dan-plugin-DATASOURCENAME' # Can be omitted, defaults to 'artifact' + function_project_path: 'src/Dan.Plugin.DATASOURCENAME' + secrets: + function_app_name: ${{ secrets.FUNCTIONAPP_NAME }} + publish_profile: ${{ secrets.AZURE_FUNCTION_PUBLISH_CREDS }} + azure_artifact_pat: ${{ secrets.AZURE_ARTIFACTS_PAT }} + azure_credentials: ${{ secrets.AZURE_CREDENTIALS }} + resource_group_prod: ${{ secrets.RESOURCE_GROUP_PROD }} diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..bc5d5ba --- /dev/null +++ b/.gitignore @@ -0,0 +1,353 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.rsuser +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Mono auto generated files +mono_crash.* + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +[Aa][Rr][Mm]/ +[Aa][Rr][Mm]64/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ +[Ll]ogs/ + +# Visual Studio 2015/2017 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# Visual Studio 2017 auto generated files +Generated\ Files/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUnit +*.VisualState.xml +TestResult.xml +nunit-*.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# Benchmark Results +BenchmarkDotNet.Artifacts/ + +# .NET Core +project.lock.json +project.fragment.lock.json +artifacts/ + +# StyleCop +StyleCopReport.xml + +# Files built by Visual Studio +*_i.c +*_p.c +*_h.h +*.ilk +*.meta +*.obj +*.iobj +*.pch +*.pdb +*.ipdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*_wpftmp.csproj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# Visual Studio Trace Files +*.e2e + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# AxoCover is a Code Coverage Tool +.axoCover/* +!.axoCover/settings.json + +# Visual Studio code coverage results +*.coverage +*.coveragexml + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# Note: Comment the next line if you want to checkin your web deploy settings, +# but database connection strings (with potential passwords) will be unencrypted +*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# NuGet Symbol Packages +*.snupkg +# The packages folder can be ignored because of Package Restore +**/[Pp]ackages/* +# except build/, which is used as an MSBuild target. +!**/[Pp]ackages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/[Pp]ackages/repositories.config +# NuGet v3's project.json files produces more ignorable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt +*.appx +*.appxbundle +*.appxupload + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!?*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +orleans.codegen.cs + +# Including strong name files can present a security risk +# (https://github.com/github/gitignore/pull/2483#issue-259490424) +#*.snk + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm +ServiceFabricBackup/ +*.rptproj.bak + +# SQL Server files +*.mdf +*.ldf +*.ndf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings +*.rptproj.rsuser +*- [Bb]ackup.rdl +*- [Bb]ackup ([0-9]).rdl +*- [Bb]ackup ([0-9][0-9]).rdl + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat +node_modules/ + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio 6 auto-generated workspace file (contains which files were open etc.) +*.vbw + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# CodeRush personal settings +.cr/personal + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# Cake - Uncomment if you are using it +# tools/** +# !tools/packages.config + +# Tabs Studio +*.tss + +# Telerik's JustMock configuration file +*.jmconfig + +# BizTalk build output +*.btp.cs +*.btm.cs +*.odx.cs +*.xsd.cs + +# OpenCover UI analysis results +OpenCover/ + +# Azure Stream Analytics local run output +ASALocalRun/ + +# MSBuild Binary and Structured Log +*.binlog + +# NVidia Nsight GPU debugger configuration file +*.nvuser + +# MFractors (Xamarin productivity tool) working folder +.mfractor/ + +# Local History for Visual Studio +.localhistory/ + +# BeatPulse healthcheck temp database +healthchecksdb + +# Backup folder for Package Reference Convert tool in Visual Studio 2017 +MigrationBackup/ + +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ diff --git a/Dan.Plugin.DATASOURCENAME.sln b/Dan.Plugin.DATASOURCENAME.sln new file mode 100644 index 0000000..0a3970b --- /dev/null +++ b/Dan.Plugin.DATASOURCENAME.sln @@ -0,0 +1,31 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31717.71 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Dan.Plugin.DATASOURCENAME.Test", "test\Dan.Plugin.DATASOURCENAME.Test\Dan.Plugin.DATASOURCENAME.Test.csproj", "{B3D46531-F347-4F9E-AD08-6A93E02A7E48}" +EndProject +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Dan.Plugin.DATASOURCENAME", "src\Dan.Plugin.DATASOURCENAME\Dan.Plugin.DATASOURCENAME.csproj", "{EC1D3A58-C881-442C-8C4A-B1D2A2535F6B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Release|Any CPU = Release|Any CPU + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {B3D46531-F347-4F9E-AD08-6A93E02A7E48}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B3D46531-F347-4F9E-AD08-6A93E02A7E48}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B3D46531-F347-4F9E-AD08-6A93E02A7E48}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B3D46531-F347-4F9E-AD08-6A93E02A7E48}.Release|Any CPU.Build.0 = Release|Any CPU + {EC1D3A58-C881-442C-8C4A-B1D2A2535F6B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EC1D3A58-C881-442C-8C4A-B1D2A2535F6B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EC1D3A58-C881-442C-8C4A-B1D2A2535F6B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EC1D3A58-C881-442C-8C4A-B1D2A2535F6B}.Release|Any CPU.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(ExtensibilityGlobals) = postSolution + SolutionGuid = {9C136489-16DA-43F4-A16A-9038157D6FDF} + EndGlobalSection +EndGlobal diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4ec23d1 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 Digitaliseringsdirektoratet + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md new file mode 100644 index 0000000..e69de29 diff --git a/plugin.DATASOURCENAME.code-workspace b/plugin.DATASOURCENAME.code-workspace new file mode 100644 index 0000000..f4043fd --- /dev/null +++ b/plugin.DATASOURCENAME.code-workspace @@ -0,0 +1,16 @@ +{ + "folders": [ + { + "path": "src/Dan.Plugin.Arbeidstilsynet" + }, + { + "path": "test/Dan.Plugin.Arbeidstilsynet.Test" + } + ], + "extensions": { + "recommendations": [ + "ms-dotnettools.csharp", + "github.vscode-pull-request-github" + ] + } +} \ No newline at end of file diff --git a/src/Dan.Plugin.DATASOURCENAME/Dan.Plugin.DATASOURCENAME.csproj b/src/Dan.Plugin.DATASOURCENAME/Dan.Plugin.DATASOURCENAME.csproj new file mode 100644 index 0000000..f99047f --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Dan.Plugin.DATASOURCENAME.csproj @@ -0,0 +1,22 @@ + + + net6.0 + v4 + Exe + + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + diff --git a/src/Dan.Plugin.DATASOURCENAME/Metadata.cs b/src/Dan.Plugin.DATASOURCENAME/Metadata.cs new file mode 100644 index 0000000..2d29a1e --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Metadata.cs @@ -0,0 +1,91 @@ +using System.Collections.Generic; +using System.Net; +using System.Threading.Tasks; +using Dan.Common; +using Dan.Common.Enums; +using Dan.Common.Interfaces; +using Dan.Common.Models; +using Dan.Plugin.DATASOURCENAME.Models; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Newtonsoft.Json.Schema.Generation; + +namespace Dan.Plugin.DATASOURCENAME; + +/// +/// All plugins must implement IEvidenceSourceMetadata, which describes that datasets returned by this plugin. An example is implemented below. +/// +public class Metadata : IEvidenceSourceMetadata +{ + /// + /// + /// + /// + public List GetEvidenceCodes() + { + JSchemaGenerator generator = new JSchemaGenerator(); + + return new List() + { + new() + { + EvidenceCodeName = global::Dan.Plugin.DATASOURCENAME.Plugin.SimpleDatasetName, + EvidenceSource = global::Dan.Plugin.DATASOURCENAME.Plugin.SourceName, + Values = new List() + { + new() + { + EvidenceValueName = "field1", + ValueType = EvidenceValueType.String + }, + new() + { + EvidenceValueName = "field2", + ValueType = EvidenceValueType.String + } + } + }, + new() + { + EvidenceCodeName = global::Dan.Plugin.DATASOURCENAME.Plugin.RichDatasetName, + EvidenceSource = global::Dan.Plugin.DATASOURCENAME.Plugin.SourceName, + Values = new List() + { + new() + { + // Convention for rich datasets with a single JSON model is to use the value name "default" + EvidenceValueName = "default", + ValueType = EvidenceValueType.JsonSchema, + JsonSchemaDefintion = generator.Generate(typeof(ExampleModel)).ToString() + } + }, + AuthorizationRequirements = new List + { + new MaskinportenScopeRequirement + { + RequiredScopes = new List { "altinn:dataaltinnno/somescope" } + } + } + } + }; + } + + + /// + /// This function must be defined in all DAN plugins, and is used by core to enumerate the available datasets across all plugins. + /// Normally this should not be changed. + /// + /// + /// + /// + [Function(Constants.EvidenceSourceMetadataFunctionName)] + public async Task GetMetadataAsync( + [HttpTrigger(AuthorizationLevel.Anonymous, "get", Route = null)] HttpRequestData req, + FunctionContext context) + { + var response = req.CreateResponse(HttpStatusCode.OK); + await response.WriteAsJsonAsync(GetEvidenceCodes()); + return response; + } + +} diff --git a/src/Dan.Plugin.DATASOURCENAME/Models/ExampleModel.cs b/src/Dan.Plugin.DATASOURCENAME/Models/ExampleModel.cs new file mode 100644 index 0000000..e123b49 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Models/ExampleModel.cs @@ -0,0 +1,12 @@ +using Newtonsoft.Json; + +namespace Dan.Plugin.DATASOURCENAME.Models; + +public class ExampleModel +{ + [JsonRequired] + public string ResponseField1 { get; set; } + + [JsonRequired] + public string ResponseField2 { get; set; } +} diff --git a/src/Dan.Plugin.DATASOURCENAME/Plugin.cs b/src/Dan.Plugin.DATASOURCENAME/Plugin.cs new file mode 100644 index 0000000..986d122 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Plugin.cs @@ -0,0 +1,146 @@ +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Threading.Tasks; +using Dan.Common; +using Dan.Common.Exceptions; +using Dan.Common.Interfaces; +using Dan.Common.Models; +using Dan.Common.Util; +using Dan.Plugin.DATASOURCENAME.Models; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Newtonsoft.Json; + +namespace Dan.Plugin.DATASOURCENAME; + +public class Plugin +{ + private readonly IEvidenceSourceMetadata _evidenceSourceMetadata; + private readonly ILogger _logger; + private readonly HttpClient _client; + private readonly Settings _settings; + + // The datasets must supply a human-readable source description from which they originate. Individual fields might come from different sources, and this string should reflect that (ie. name all possible sources). + public const string SourceName = "Digitaliseringsdirektoratet"; + + // The function names (ie. HTTP endpoint names) and the dataset names must match. Using constants to avoid errors. + public const string SimpleDatasetName = "SimpleDataset"; + public const string RichDatasetName = "RichDataset"; + + // These are not mandatory, but there should be a distinct error code (any integer) for all types of errors that can occur. The error codes does not have to be globally + // unique. These should be used within either transient or permanent exceptions, see Plugin.cs for examples. + private const int ERROR_UPSTREAM_UNAVAILBLE = 1001; + private const int ERROR_INVALID_INPUT = 1002; + private const int ERROR_NOT_FOUND = 1003; + private const int ERROR_UNABLE_TO_PARSE_RESPONSE = 1004; + + public Plugin( + IHttpClientFactory httpClientFactory, + ILoggerFactory loggerFactory, + IOptions settings, + IEvidenceSourceMetadata evidenceSourceMetadata) + { + _client = httpClientFactory.CreateClient(Constants.SafeHttpClient); + _logger = loggerFactory.CreateLogger(); + _settings = settings.Value; + _evidenceSourceMetadata = evidenceSourceMetadata; + + _logger.LogDebug("Initialized plugin! This should be visible in the console"); + } + + [Function(SimpleDatasetName)] + public async Task GetSimpleDatasetAsync( + [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequestData req, + FunctionContext context) + { + + _logger.LogDebug("debug HERE"); + _logger.LogWarning("warning HERE"); + _logger.LogError("error HERE"); + + var evidenceHarvesterRequest = await req.ReadFromJsonAsync(); + + return await EvidenceSourceResponse.CreateResponse(req, + () => GetEvidenceValuesSimpledataset(evidenceHarvesterRequest)); + } + + [Function(RichDatasetName)] + public async Task GetRichDatasetAsync( + [HttpTrigger(AuthorizationLevel.Function, "post", Route = null)] HttpRequestData req, + FunctionContext context) + { + var evidenceHarvesterRequest = await req.ReadFromJsonAsync(); + + return await EvidenceSourceResponse.CreateResponse(req, + () => GetEvidenceValuesRichDataset(evidenceHarvesterRequest)); + } + + private async Task> GetEvidenceValuesSimpledataset(EvidenceHarvesterRequest evidenceHarvesterRequest) + { + var url = _settings.EndpointUrl + "?someparameter=" + evidenceHarvesterRequest.OrganizationNumber; + var exampleModel = await MakeRequest(url); + + var ecb = new EvidenceBuilder(_evidenceSourceMetadata, SimpleDatasetName); + ecb.AddEvidenceValue("field1", exampleModel.ResponseField1, SourceName); + ecb.AddEvidenceValue("field2", exampleModel.ResponseField2, SourceName); + + return ecb.GetEvidenceValues(); + } + + private async Task> GetEvidenceValuesRichDataset(EvidenceHarvesterRequest evidenceHarvesterRequest) + { + + var url = _settings.EndpointUrl + "?someparameter=" + evidenceHarvesterRequest.OrganizationNumber; + var exampleModel = await MakeRequest(url); + + var ecb = new EvidenceBuilder(_evidenceSourceMetadata, RichDatasetName); + + // Here we reserialize the model. While it is possible to merely send the received JSON string directly through without parsing it, + // the extra step of deserializing it to a known model ensures that the JSON schema supplied in the metadata always matches the + // dataset model. + // + // Another way to do this is to not generate the schema from the model, but "hand code" the schema in the metadata and validate the + // received JSON against it, throwing eg. a EvidenceSourcePermanentServerException if it fails to match. + ecb.AddEvidenceValue("default", JsonConvert.SerializeObject(exampleModel), SourceName); + + return ecb.GetEvidenceValues(); + } + + private async Task MakeRequest(string target) + { + HttpResponseMessage result; + try + { + var request = new HttpRequestMessage(HttpMethod.Get, target); + result = await _client.SendAsync(request); + } + catch (HttpRequestException ex) + { + throw new EvidenceSourceTransientException(ERROR_UPSTREAM_UNAVAILBLE, "Error communicating with upstream source", ex); + } + + if (!result.IsSuccessStatusCode) + { + throw result.StatusCode switch + { + HttpStatusCode.NotFound => new EvidenceSourcePermanentClientException(ERROR_NOT_FOUND, "Upstream source could not find the requested entity (404)"), + HttpStatusCode.BadRequest => new EvidenceSourcePermanentClientException(ERROR_INVALID_INPUT, "Upstream source indicated an invalid request (400)"), + _ => new EvidenceSourceTransientException(ERROR_UPSTREAM_UNAVAILBLE, $"Upstream source retuned an HTTP error code ({(int)result.StatusCode})") + }; + } + + try + { + return JsonConvert.DeserializeObject(await result.Content.ReadAsStringAsync()); + } + catch (Exception ex) + { + _logger.LogError("Unable to parse data returned from upstream source: {exceptionType}: {exceptionMessage}", ex.GetType().Name, ex.Message); + throw new EvidenceSourcePermanentServerException(ERROR_UNABLE_TO_PARSE_RESPONSE, "Could not parse the data model returned from upstream source", ex); + } + } +} diff --git a/src/Dan.Plugin.DATASOURCENAME/Program.cs b/src/Dan.Plugin.DATASOURCENAME/Program.cs new file mode 100644 index 0000000..00e9243 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Program.cs @@ -0,0 +1,23 @@ +using Dan.Plugin.DATASOURCENAME; +using Microsoft.Extensions.Hosting; +using Dan.Common.Extensions; +using Microsoft.Extensions.DependencyInjection; + +var host = new HostBuilder() + .ConfigureDanPluginDefaults() + .ConfigureAppConfiguration((context, configuration) => + { + // Add more configuration sources if necessary. ConfigureDanPluginDefaults will load environment variables, which includes + // local.settings.json (if developing locally) and applications settings for the Azure Function + }) + .ConfigureServices((context, services) => + { + // Add any additional services here + + // This makes IOption available in the DI container. + var configurationRoot = context.Configuration; + services.Configure(configurationRoot); + }) + .Build(); + +await host.RunAsync(); diff --git a/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.json b/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.json new file mode 100644 index 0000000..df4dcc9 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.json @@ -0,0 +1,11 @@ +{ + "dependencies": { + "appInsights1": { + "type": "appInsights" + }, + "storage1": { + "type": "storage", + "connectionId": "AzureWebJobsStorage" + } + } +} \ No newline at end of file diff --git a/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.local.json b/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.local.json new file mode 100644 index 0000000..b804a28 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Properties/serviceDependencies.local.json @@ -0,0 +1,11 @@ +{ + "dependencies": { + "appInsights1": { + "type": "appInsights.sdk" + }, + "storage1": { + "type": "storage.emulator", + "connectionId": "AzureWebJobsStorage" + } + } +} \ No newline at end of file diff --git a/src/Dan.Plugin.DATASOURCENAME/Settings.cs b/src/Dan.Plugin.DATASOURCENAME/Settings.cs new file mode 100644 index 0000000..7f215be --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/Settings.cs @@ -0,0 +1,10 @@ +namespace Dan.Plugin.DATASOURCENAME; + +public class Settings +{ + public int DefaultCircuitBreakerOpenCircuitTimeSeconds { get; set; } + public int DefaultCircuitBreakerFailureBeforeTripping { get; set; } + public int SafeHttpClientTimeout { get; set; } + + public string EndpointUrl { get; set; } +} diff --git a/src/Dan.Plugin.DATASOURCENAME/host.json b/src/Dan.Plugin.DATASOURCENAME/host.json new file mode 100644 index 0000000..748e3a0 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/host.json @@ -0,0 +1,14 @@ +{ + "version": "2.0", + "logging": { + "applicationInsights": { + "samplingSettings": { + "isEnabled": false + } + }, + "logLevel": { + "default": "Information", + "Microsoft": "Warning" + } + } +} diff --git a/src/Dan.Plugin.DATASOURCENAME/local.settings.json.template b/src/Dan.Plugin.DATASOURCENAME/local.settings.json.template new file mode 100644 index 0000000..43d2a82 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/local.settings.json.template @@ -0,0 +1,15 @@ +{ + "IsEncrypted": false, + "Values": { + "AzureWebJobsStorage": "UseDevelopmentStorage=true", + "FUNCTIONS_WORKER_RUNTIME": "dotnet-isolated", + + + "DefaultCircuitBreakerOpenCircuitTimeSeconds": "10", + "DefaultCircuitBreakerFailureBeforeTripping": "4", + "SafeHttpClientTimeout": "30", + + "EndpointUrl": "https://example.com" + + } +} diff --git a/src/Dan.Plugin.DATASOURCENAME/worker.json b/src/Dan.Plugin.DATASOURCENAME/worker.json new file mode 100644 index 0000000..56eb4e5 --- /dev/null +++ b/src/Dan.Plugin.DATASOURCENAME/worker.json @@ -0,0 +1,13 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Function": "Debug" + }, + "ApplicationInsights": { + "SamplingSettings": { + "IsEnabled": false + } + } + } +} diff --git a/test/Dan.Plugin.DATASOURCENAME.Test/Dan.Plugin.DATASOURCENAME.Test.csproj b/test/Dan.Plugin.DATASOURCENAME.Test/Dan.Plugin.DATASOURCENAME.Test.csproj new file mode 100644 index 0000000..2eb5620 --- /dev/null +++ b/test/Dan.Plugin.DATASOURCENAME.Test/Dan.Plugin.DATASOURCENAME.Test.csproj @@ -0,0 +1,24 @@ + + + + net6.0 + enable + + false + + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + diff --git a/test/Dan.Plugin.DATASOURCENAME.Test/UnitTest1.cs b/test/Dan.Plugin.DATASOURCENAME.Test/UnitTest1.cs new file mode 100644 index 0000000..52c615e --- /dev/null +++ b/test/Dan.Plugin.DATASOURCENAME.Test/UnitTest1.cs @@ -0,0 +1,12 @@ +using Xunit; + +namespace Dan.Plugin.DATASOURCENAME.Test +{ + public class UnitTest1 + { + [Fact] + public void TestMethod1() + { + } + } +}