diff --git a/.github/workflows/continuous-integration.yml b/.github/workflows/continuous-integration.yml index 25c730e..7fa3c31 100644 --- a/.github/workflows/continuous-integration.yml +++ b/.github/workflows/continuous-integration.yml @@ -25,8 +25,8 @@ jobs: - name: Set up dotnet uses: actions/setup-dotnet@v1 with: - dotnet-version: '3.1.x' - + dotnet-version: '8.0.x' + - name: Build dotnet solution run: dotnet build source/gpconnect-analytics.sln diff --git a/.gitignore b/.gitignore index 7dbaf57..7b4683f 100644 --- a/.gitignore +++ b/.gitignore @@ -4,8 +4,12 @@ .vs *.ps1 +.vscode +**/CoverageReport/ +**/TestResults/ database/flyway-7.0.0/ serviceDependencies.json serviceDependencies.local.json *.arm.json -/source/gpconnect-analytics.Functions/Properties/PublishProfiles/GPConnectAnalytics - Zip Deploy.pubxml +/source/gpconnect-analytics.Functions/Properties/PublishProfiles/GPConnectAnalytics - Zip Deploy.pubxml.DS_Store +.DS_Store \ No newline at end of file diff --git a/README.md b/README.md index 579c36a..fea345c 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ [![Build Actions Status](https://github.com/nhsconnect/gpconnect-analytics/workflows/continuous-integration/badge.svg)](https://github.com/nhsconnect/gpconnect-analytics/actions?) ## End-to-end data flow + ![End-to-end diagram](documentation/end-to-end-data-flow.png) ## Data extracts @@ -41,13 +42,13 @@ General format: `PROJECTNAME-EXTRACTNAME-QUERYFROMDATE-QUERYTODATE-SPLUNKINSTANCE-EXTRACTDATE.csv` -Where - - PROJECTNAME is `gpcanalytics` - - EXTRACTNAME is `asidlookup`, `ssptrans` (MESH transactions TBC) - - QUERYDATEFROM and QUERYDATETO is `YYYYMMDDTHHmmss` - - SPLUNKINSTANCE is `cloud`, `spinea`, `spineb` - - EXTRACTDATE is `YYYYMMDDTHHmmss` +Where +- PROJECTNAME is `gpcanalytics` +- EXTRACTNAME is `asidlookup`, `ssptrans` (MESH transactions TBC) +- QUERYDATEFROM and QUERYDATETO is `YYYYMMDDTHHmmss` +- SPLUNKINSTANCE is `cloud`, `spinea`, `spineb` +- EXTRACTDATE is `YYYYMMDDTHHmmss` Examples: @@ -55,7 +56,8 @@ Examples: - `gpcanalytics-ssptrans-20200101T000000-20200107T000000-cloud-20210105T103000.csv` - `gpcanalytics-ssptrans-20200107T000000-2020014T000000-spinea-20210105T103000.csv` -Note: The QUERYDATEFROM and QUERYDATETO don't affect the output of the ASID lookup data query from Splunk, however are included for consistency. +Note: The QUERYDATEFROM and QUERYDATETO don't affect the output of the ASID lookup data query from Splunk, however are +included for consistency. ## Run a local SQL Server instance @@ -67,3 +69,44 @@ To run the instance on the default port: `docker run -e 'ACCEPT_EULA=Y' -e 'SA_PASSWORD=StrongP@ssword1' -p 1433:1433 -d mcr.microsoft.com/mssql/server` +## Testing + +Tests were added the project on 8th Feb 2025 by Grant Riordan covering: + +- Core Project +- Functions Project +- Integration test for Hierarchy repository + +### How To Run Coverage Report + +**Run the coverage tests**: + +If you do not own a DotCover license or equivalent, you can use `coverlet` a free tool for running coverage reports. + +- navigate to the `/source` directory +- open terminal and paste + +```bash +dotnet test --collect:"XPlat Code Coverage" -m:1 +``` + +or + +```bash +dotnet test /p:CollectCoverage=true /p:CoverletOutputFormat=lcov + +``` + +**Install Report Generator Globally** +Report generator allows us to build a html report of the coverage making it easier to view. + +run the following to install: +```bash +dotnet tool install -g dotnet-reportgenerator-globaltool + +``` +then run the following to merge the coverage results into 1 report file + +```bash +reportgenerator -reports:"../**/coverage.cobertura.xml" -reporttypes:"html" -targetdir:"./CoverageReport" +- ``` diff --git a/database/schema/V1.6__AddHierarchyProviderTable.sql b/database/schema/V1.6__AddHierarchyProviderTable.sql new file mode 100644 index 0000000..b67e2fc --- /dev/null +++ b/database/schema/V1.6__AddHierarchyProviderTable.sql @@ -0,0 +1,20 @@ +SET + ANSI_NULLS ON; + +SET + QUOTED_IDENTIFIER ON; + +CREATE TABLE Data.HierarchyProviderConsumers ( + OdsCode NVARCHAR(450) NOT NULL, + PracticeName NVARCHAR(MAX) NULL, + RegisteredPatientCount INT NOT NULL, + RegionCode NVARCHAR(MAX) NULL, + RegionName NVARCHAR(MAX) NULL, + Icb22Name NVARCHAR(MAX) NULL, + PcnName NVARCHAR(MAX) NULL, + Appointments13000 INT NOT NULL, + CONSTRAINT PK_HierarchyProviderConsumers PRIMARY KEY CLUSTERED (OdsCode ASC) +) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY]; + +SET + ANSI_PADDING ON; \ No newline at end of file diff --git a/source/.gitignore b/source/.gitignore index ffdf0c2..9824b04 100644 --- a/source/.gitignore +++ b/source/.gitignore @@ -44,6 +44,7 @@ dlldata.c project.lock.json project.fragment.lock.json artifacts/ +coverage/* *_i.c *_p.c @@ -254,4 +255,4 @@ paket-files/ *.sln.iml # CodeRush -.cr/ \ No newline at end of file +.cr/.DS_Store diff --git a/source/.runsettings b/source/.runsettings new file mode 100644 index 0000000..3a90f83 --- /dev/null +++ b/source/.runsettings @@ -0,0 +1,26 @@ + + + + + + + cobertura,opencover + + [Core.DTOs]*, + [Core.DTOs.*]* + + + **/*.g.cs, + **/Generated*.cs, + **/Program.cs, + **/Microsoft.Azure.Functions.Worker.Sdk.Generators/**/*.cs + + + GeneratedCodeAttribute + + true + + + + + \ No newline at end of file diff --git a/source/Core.Tests/.gitignore b/source/Core.Tests/.gitignore new file mode 100644 index 0000000..c74c428 --- /dev/null +++ b/source/Core.Tests/.gitignore @@ -0,0 +1,264 @@ +/coveragereport +/TestResults + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/source/Core.Tests/Core.Tests.csproj b/source/Core.Tests/Core.Tests.csproj new file mode 100644 index 0000000..6891900 --- /dev/null +++ b/source/Core.Tests/Core.Tests.csproj @@ -0,0 +1,38 @@ + + + + net8.0 + enable + enable + + false + true + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + + + + + all + runtime; build; native; contentfiles; analyzers; buildtransitive + + + + + + + + + + diff --git a/source/Core.Tests/Helpers/ApplicationHelperTests.cs b/source/Core.Tests/Helpers/ApplicationHelperTests.cs new file mode 100644 index 0000000..dc23801 --- /dev/null +++ b/source/Core.Tests/Helpers/ApplicationHelperTests.cs @@ -0,0 +1,104 @@ +using System.Reflection; +using Core.Helpers; +using FluentAssertions; +using Moq; +using Xunit; + +namespace Core.Tests.Helpers; + +public class ApplicationHelperTests +{ + public ApplicationHelperTests() + { + // Reset the environment variable before each test + Environment.SetEnvironmentVariable("BUILD_TAG", null); + } + + [Fact] + public void GetAssemblyVersionInternal_ShouldReturnAssemblyFullName_WhenBuildTagIsNull() + { + // Arrange + Environment.SetEnvironmentVariable("BUILD_TAG", null); + var mockAssembly = new Mock>(); + mockAssembly.Setup(x => x.Invoke()).Returns(typeof(ApplicationHelper).Assembly); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersionInternal(mockAssembly.Object); + + // Assert + result.Should().Be(typeof(ApplicationHelper).Assembly.GetName().FullName); + } + + [Fact] + public void GetAssemblyVersion_ShouldReturnAssemblyFullName_WhenBuildTagIsNull() + { + // Arrange + Environment.SetEnvironmentVariable("BUILD_TAG", null); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersion(); + + // Assert + result.Should().Be(typeof(ApplicationHelper).Assembly.GetName().FullName); + } + + [Fact] + public void GetAssemblyVersionInternal_ShouldReturnAssemblyFullName_WhenBuildTagIsEmpty() + { + // Arrange + Environment.SetEnvironmentVariable("BUILD_TAG", ""); + var mockAssembly = new Mock>(); + mockAssembly.Setup(x => x.Invoke()).Returns(typeof(ApplicationHelper).Assembly); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersionInternal(mockAssembly.Object); + + // Assert + result.Should().Be(typeof(ApplicationHelper).Assembly.GetName().FullName); + } + + [Fact] + public void GetAssemblyVersionInternal_ShouldReturnAssemblyFullName_WhenBuildTagIsWhitespace() + { + // Arrange + Environment.SetEnvironmentVariable("BUILD_TAG", " "); + var mockAssembly = new Mock>(); + mockAssembly.Setup(x => x.Invoke()).Returns(typeof(ApplicationHelper).Assembly); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersionInternal(mockAssembly.Object); + + // Assert + result.Should().Be(typeof(ApplicationHelper).Assembly.GetName().FullName); + } + + [Fact] + public void GetAssemblyVersionInternal_ShouldReturnBuildTag_WhenBuildTagIsSet() + { + // Arrange + string expectedBuildTag = "1.0.0-Build123"; + Environment.SetEnvironmentVariable("BUILD_TAG", expectedBuildTag); + var mockAssembly = new Mock>(); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersionInternal(mockAssembly.Object); + + // Assert + result.Should().Be(expectedBuildTag); + } + + [Fact] + public void GetAssemblyVersionInternal_ShouldHandleNullAssembly() + { + // Arrange + Environment.SetEnvironmentVariable("BUILD_TAG", null); + var mockAssembly = new Mock>(); + mockAssembly.Setup(x => x.Invoke()).Returns((Assembly)null); + + // Act + var result = ApplicationHelper.ApplicationVersion.GetAssemblyVersionInternal(mockAssembly.Object); + + // Assert + result.Should().BeNull(); + } +} \ No newline at end of file diff --git a/source/Core.Tests/Helpers/AttributeExtensionsTests.cs b/source/Core.Tests/Helpers/AttributeExtensionsTests.cs new file mode 100644 index 0000000..32974bb --- /dev/null +++ b/source/Core.Tests/Helpers/AttributeExtensionsTests.cs @@ -0,0 +1,87 @@ +using Core.Helpers; +using FluentAssertions; +using Xunit; + +namespace Core.Tests.Helpers +{ + public class AttributeExtensionsTests + { + [Fact] + public void GetFileType_ShouldReturnDocument_ForDocumentPath() + { + // Arrange + const string filePath = "/asid-lookup-data/report.docx"; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().Be(FileTypes.asidlookup); + } + + [Fact] + public void GetFileType_ShouldReturnImage_ForImagePath() + { + // Arrange + const string filePath = "/ssp-transactions/file1.csv"; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().Be(FileTypes.ssptrans); + } + + [Fact] + public void GetFileType_ShouldReturnVideo_ForVideoPath() + { + // Arrange + const string filePath = "/mesh-transactions/file1.csv"; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().Be(FileTypes.meshtrans); + } + + [Fact] + public void GetFileType_ShouldReturnNull_ForUnknownPath() + { + // Arrange + const string filePath = "/unknown/path/file.txt"; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().BeNull(); + } + + [Fact] + public void GetFileType_ShouldReturnNull_ForEmptyPath() + { + // Arrange + const string filePath = ""; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().BeNull(); + } + + [Fact] + public void GetFileType_ShouldReturnNull_ForNullPath() + { + // Arrange + const string filePath = null; + + // Act + var fileType = filePath.GetFileType(); + + // Assert + fileType.Should().BeNull(); + } + } +} \ No newline at end of file diff --git a/source/Core.Tests/Helpers/ConnectionStringsTests.cs b/source/Core.Tests/Helpers/ConnectionStringsTests.cs new file mode 100644 index 0000000..b9e01d4 --- /dev/null +++ b/source/Core.Tests/Helpers/ConnectionStringsTests.cs @@ -0,0 +1,20 @@ +using Core.Helpers; +using FluentAssertions; +using Xunit; + +namespace Core.Tests.Helpers; + +public class ConnectionStringsTests +{ + [Fact] + public void GpConnectAnalytics_ShouldNotBeEmpty() + { + // Arrange + // Act + var result = ConnectionStrings.GpConnectAnalytics; + + // Assert + result.Should().NotBeEmpty(); + result.Should().Be("GpConnectAnalytics"); + } +} \ No newline at end of file diff --git a/source/Core.Tests/Helpers/DateFormatConstantsTests.cs b/source/Core.Tests/Helpers/DateFormatConstantsTests.cs new file mode 100644 index 0000000..8ed0af8 --- /dev/null +++ b/source/Core.Tests/Helpers/DateFormatConstantsTests.cs @@ -0,0 +1,19 @@ +using Core.Helpers; +using FluentAssertions; +using Xunit; + +namespace Core.Tests.Helpers; + +public class DateFormatConstantsTests +{ + [Fact] + public void ValidateDateFormatConstants() + { + DateFormatConstants.FilePathQueryDate.Should().Be("yyyyMMdd"); + DateFormatConstants.FilePathQueryHour.Should().Be("hhmmss"); + DateFormatConstants.FilePathQueryDateYearMonth.Should().Be("yyyy-MM"); + DateFormatConstants.FilePathNowDate.Should().Be("yyyyMMddTHHmmss"); + DateFormatConstants.SplunkQueryDate.Should().Be("MM/dd/yyyy:HH:mm:ss"); + DateFormatConstants.SplunkQueryHour.Should().Be("hhmm"); + } +} \ No newline at end of file diff --git a/source/Core.Tests/Helpers/DateTimeHelperTests.cs b/source/Core.Tests/Helpers/DateTimeHelperTests.cs new file mode 100644 index 0000000..d8bc089 --- /dev/null +++ b/source/Core.Tests/Helpers/DateTimeHelperTests.cs @@ -0,0 +1,59 @@ +using Core.Helpers; +using FluentAssertions; +using Xunit; + +namespace Core.Tests.Helpers +{ + public class DateTimeHelperTests + { + [Fact] + public void EachDay_ShouldReturnAllDaysBetweenTwoDates_Inclusive() + { + // Arrange + var fromDate = new DateTime(2025, 1, 1); + var toDate = new DateTime(2025, 1, 5); + + // Act + var result = DateTimeHelper.EachDay(fromDate, toDate).ToList(); + + // Assert + result.Should().HaveCount(5); + result.Should().ContainInOrder( + new DateTime(2025, 1, 1), + new DateTime(2025, 1, 2), + new DateTime(2025, 1, 3), + new DateTime(2025, 1, 4), + new DateTime(2025, 1, 5) + ); + } + + [Fact] + public void EachDay_ShouldReturnSingleDay_WhenFromAndToDatesAreSame() + { + // Arrange + var fromDate = new DateTime(2025, 1, 1); + var toDate = new DateTime(2025, 1, 1); + + // Act + var result = DateTimeHelper.EachDay(fromDate, toDate).ToList(); + + // Assert + result.Should().HaveCount(1); + result.First().Should().Be(fromDate); + } + + [Fact] + public void EachDay_ShouldReturnEmpty_WhenFromDateIsAfterToDate() + { + // Arrange + var fromDate = new DateTime(2025, 1, 5); + var toDate = new DateTime(2025, 1, 1); + + // Act + var result = DateTimeHelper.EachDay(fromDate, toDate).ToList(); + + // Assert + result.Should().BeEmpty(); + } + } +} \ No newline at end of file diff --git a/source/Core.Tests/Helpers/FilePathTests.cs b/source/Core.Tests/Helpers/FilePathTests.cs new file mode 100644 index 0000000..d64db8c --- /dev/null +++ b/source/Core.Tests/Helpers/FilePathTests.cs @@ -0,0 +1,47 @@ +using Core.Helpers; +using FluentAssertions; +using Xunit; + +namespace Core.Tests.Helpers; + +public class FilePathTests +{ + [Fact] + public void FilePathValue_Returns_Value_WhenEmptyIsProvidedToConstructor() + { + // Arrange + var filePathAttribute = new FilePathAttribute("test_file_path"); + + // Act + var filePath = filePathAttribute.FilePath; + + // Assert + filePath.Should().Be("test_file_path"); + } + + [Fact] + public void FilePathValue_Returns_Empty_WhenEmptyIsProvidedToConstructor() + { + // Arrange + var filePathAttribute = new FilePathAttribute(""); + + // Act + var filePath = filePathAttribute.FilePath; + + // Assert + filePath.Should().Be(""); + } + + [Fact] + public void FilePathValue_Returns_Null_WhenNullIsProvidedToConstructor() + { + // Arrange + var filePathAttribute = new FilePathAttribute(null!); + + // Act + var filePath = filePathAttribute.FilePath; + + // Assert + filePath.Should().Be(null); + } +} \ No newline at end of file diff --git a/source/Core.Tests/Services/DataServiceTests.cs b/source/Core.Tests/Services/DataServiceTests.cs new file mode 100644 index 0000000..5d8fc49 --- /dev/null +++ b/source/Core.Tests/Services/DataServiceTests.cs @@ -0,0 +1,209 @@ +using System.Data; +using System.Data.Common; +using Core.Repositories; +using Core.Services.Interfaces; +using Dapper; +using FluentAssertions; +using Functions.Services; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; +using Xunit; + +namespace Core.Tests.Services +{ + public class DataServiceTests + { + private readonly Mock _mockConfigService; + private readonly Mock _mockDapper; + private readonly DataService _dataService; + private readonly string _testConnectionString = "Server=localhost;Database=TestDb;User Id=test;Password=test;"; + private readonly Mock _mockConnectionFactory; + private readonly FakeLogger _fakeLogger; + private readonly Mock _mockConnection; + + public DataServiceTests() + { + _fakeLogger = new FakeLogger(); + _mockConfigService = new Mock(); + _mockDapper = new Mock(); + _mockConnectionFactory = new Mock(); + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(1); + + _mockConfigService.Setup(c => c.GetConnectionString(It.IsAny())) + .Returns(_testConnectionString); + + _mockConnection = new Mock(); + _mockConnectionFactory.Setup(cf => cf.CreateConnection(_testConnectionString)) + .Returns(_mockConnection.Object); + + _dataService = new DataService(_fakeLogger, _mockConfigService.Object, _mockDapper.Object, + _mockConnectionFactory.Object); + } + + [Fact] + public async Task ExecuteRawUpsertSqlAsync_ShouldReturnRowsAffected() + { + // Arrange + const string sqlCommand = "INSERT INTO TestTable (Id, Name) VALUES (@Id, @Name);"; + var parameters = new { Id = 1, Name = "Test" }; + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), sqlCommand, parameters, It.IsAny())) + .ReturnsAsync(1); + + // Act + var result = await _dataService.ExecuteRawUpsertSqlAsync(sqlCommand, parameters); + + // Assert + result.Should().Be(1); + } + + [Fact] + public async Task ExecuteRawUpsertSqlAsync_ShouldLogAndThrowOnError() + { + // Arrange + const string sqlCommand = "INVALID SQL COMMAND"; + var parameters = new { Id = 1, Name = "Test" }; + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), sqlCommand, parameters, It.IsAny())) + .ThrowsAsync(new Exception("Simulated SQL error")); + + // Act + var act = async () => await _dataService.ExecuteRawUpsertSqlAsync(sqlCommand, parameters); + + // Assert + await act.Should().ThrowAsync(); + var r = _fakeLogger.LatestRecord; + Assert.Equal(LogLevel.Error, r.Level); + Assert.Contains($"An error has occurred while executing the raw SQL command: {sqlCommand}", r.Message); + } + + [Fact] + public async Task ExecuteStoreProcedure_ShouldReturn_Integer() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + var expectedResult = 1; + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), procedureName, parameters, null)) + .ReturnsAsync(expectedResult); + + // Act + var result = await _dataService.ExecuteStoredProcedure(procedureName, parameters); + + // Assert + result.Should().Be(expectedResult); + } + + [Fact] + public async Task ExecuteStoreProcedure_Should_LogErrorAndThrow() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + var expectedResult = 1; + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), procedureName, parameters, null)) + .ThrowsAsync(new Exception("Database went wrong")); + + // Act + await _dataService.Invoking(x => x.ExecuteStoredProcedure(procedureName, parameters)) + .Should().ThrowAsync(); + + var log = _fakeLogger.LatestRecord; + log.Message.Should().Be($"An error has occurred while attempting to execute the function {procedureName}"); + } + + [Fact] + public async Task ExecuteQueryStoredProcedure_ShouldReturnResults() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + var expectedResult = new List { "Result1", "Result2" }; + + _mockDapper.Setup(d => d.QueryStoredProcedureAsync( + It.IsAny(), procedureName, parameters, 0)) + .ReturnsAsync(expectedResult); + + // Act + var result = await _dataService.ExecuteQueryStoredProcedure(procedureName, parameters); + + // Assert + result.Should().BeEquivalentTo(expectedResult); + } + + [Fact] + public async Task ExecuteQueryStoredProcedure_ShouldLogAndThrowOnError() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + + _mockDapper.Setup(d => d.QueryStoredProcedureAsync( + It.IsAny(), procedureName, parameters, 0)) + .ThrowsAsync(new Exception("Simulated SQL error")); + + // Act + var act = async () => await _dataService.ExecuteQueryStoredProcedure(procedureName, parameters); + + // Assert + await act.Should().ThrowAsync(); + var r = _fakeLogger.LatestRecord; + Assert.Equal(LogLevel.Error, r.Level); + Assert.Contains($"An error has occurred while attempting to execute the function {procedureName}", + r.Message); + } + + [Fact] + public async Task ExecuteStoredProcedureWithOutputParameters_ShouldReturnParameters() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + parameters.Add("OutputParam", dbType: DbType.String, direction: ParameterDirection.Output); + + _mockDapper.Setup(d => d.ExecuteAsync( + It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny())) + .ReturnsAsync(1); + + // Act + var result = await _dataService.ExecuteStoredProcedureWithOutputParameters(procedureName, parameters); + + // Assert + result.Should().BeSameAs(parameters); + } + + [Fact] + public async Task ExecuteStoredProcedureWithOutputParameters_ShouldLogAndThrowOnError() + { + // Arrange + const string procedureName = "TestProcedure"; + var parameters = new DynamicParameters(); + + _mockDapper.Setup(d => d.ExecuteStoredProcedureAsync( + It.IsAny(), procedureName, parameters, 0)) + .ThrowsAsync(new Exception("Simulated SQL error")); + + // Act + var act = async () => + await _dataService.ExecuteStoredProcedureWithOutputParameters(procedureName, parameters); + + // Assert + await act.Should().ThrowAsync(); + var r = _fakeLogger.LatestRecord; + Assert.Equal(LogLevel.Error, r.Level); + Assert.Contains($"An error has occurred while attempting to execute the function {procedureName}", + r.Message); + } + } +} \ No newline at end of file diff --git a/source/Core/.gitignore b/source/Core/.gitignore new file mode 100644 index 0000000..b5b508a --- /dev/null +++ b/source/Core/.gitignore @@ -0,0 +1,265 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. + +# Azure Functions localsettings file +local.settings.json + +# User-specific files +*.suo +*.user +*.userosscache +*.sln.docstates + +# User-specific files (MonoDevelop/Xamarin Studio) +*.userprefs + +# Build results +[Dd]ebug/ +[Dd]ebugPublic/ +[Rr]elease/ +[Rr]eleases/ +x64/ +x86/ +bld/ +[Bb]in/ +[Oo]bj/ +[Ll]og/ + +# Visual Studio 2015 cache/options directory +.vs/ +# Uncomment if you have tasks that create the project's static files in wwwroot +#wwwroot/ + +# MSTest test Results +[Tt]est[Rr]esult*/ +[Bb]uild[Ll]og.* + + +# NUNIT +*.VisualState.xml +TestResult.xml + +# Build Results of an ATL Project +[Dd]ebugPS/ +[Rr]eleasePS/ +dlldata.c + +# DNX +project.lock.json +project.fragment.lock.json +artifacts/ + +*_i.c +*_p.c +*_i.h +*.ilk +*.meta +*.obj +*.pch +*.pdb +*.pgc +*.pgd +*.rsp +*.sbr +*.tlb +*.tli +*.tlh +*.tmp +*.tmp_proj +*.log +*.vspscc +*.vssscc +.builds +*.pidb +*.svclog +*.scc + +# Chutzpah Test files +_Chutzpah* + +# Visual C++ cache files +ipch/ +*.aps +*.ncb +*.opendb +*.opensdf +*.sdf +*.cachefile +*.VC.db +*.VC.VC.opendb + +# Visual Studio profiler +*.psess +*.vsp +*.vspx +*.sap + +# TFS 2012 Local Workspace +$tf/ + +# Guidance Automation Toolkit +*.gpState + +# ReSharper is a .NET coding add-in +_ReSharper*/ +*.[Rr]e[Ss]harper +*.DotSettings.user + +# JustCode is a .NET coding add-in +.JustCode + +# TeamCity is a build add-in +_TeamCity* + +# DotCover is a Code Coverage Tool +*.dotCover + +# NCrunch +_NCrunch_* +.*crunch*.local.xml +nCrunchTemp_* + +# MightyMoose +*.mm.* +AutoTest.Net/ + +# Web workbench (sass) +.sass-cache/ + +# Installshield output folder +[Ee]xpress/ + +# DocProject is a documentation generator add-in +DocProject/buildhelp/ +DocProject/Help/*.HxT +DocProject/Help/*.HxC +DocProject/Help/*.hhc +DocProject/Help/*.hhk +DocProject/Help/*.hhp +DocProject/Help/Html2 +DocProject/Help/html + +# Click-Once directory +publish/ + +# Publish Web Output +*.[Pp]ublish.xml +*.azurePubxml +# TODO: Comment the next line if you want to checkin your web deploy settings +# but database connection strings (with potential passwords) will be unencrypted +#*.pubxml +*.publishproj + +# Microsoft Azure Web App publish settings. Comment the next line if you want to +# checkin your Azure Web App publish settings, but sensitive information contained +# in these scripts will be unencrypted +PublishScripts/ + +# NuGet Packages +*.nupkg +# The packages folder can be ignored because of Package Restore +**/packages/* +# except build/, which is used as an MSBuild target. +!**/packages/build/ +# Uncomment if necessary however generally it will be regenerated when needed +#!**/packages/repositories.config +# NuGet v3's project.json files produces more ignoreable files +*.nuget.props +*.nuget.targets + +# Microsoft Azure Build Output +csx/ +*.build.csdef + +# Microsoft Azure Emulator +ecf/ +rcf/ + +# Windows Store app package directories and files +AppPackages/ +BundleArtifacts/ +Package.StoreAssociation.xml +_pkginfo.txt + +# Visual Studio cache files +# files ending in .cache can be ignored +*.[Cc]ache +# but keep track of directories ending in .cache +!*.[Cc]ache/ + +# Others +ClientBin/ +~$* +*~ +*.dbmdl +*.dbproj.schemaview +*.jfm +*.pfx +*.publishsettings +node_modules/ +orleans.codegen.cs + +# Since there are multiple workflows, uncomment next line to ignore bower_components +# (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) +#bower_components/ + +# RIA/Silverlight projects +Generated_Code/ + +# Backup & report files from converting an old project file +# to a newer Visual Studio version. Backup files are not needed, +# because we have git ;-) +_UpgradeReport_Files/ +Backup*/ +UpgradeLog*.XML +UpgradeLog*.htm + +# SQL Server files +*.mdf +*.ldf + +# Business Intelligence projects +*.rdl.data +*.bim.layout +*.bim_*.settings + +# Microsoft Fakes +FakesAssemblies/ + +# GhostDoc plugin setting file +*.GhostDoc.xml + +# Node.js Tools for Visual Studio +.ntvs_analysis.dat + +# Visual Studio 6 build log +*.plg + +# Visual Studio 6 workspace options file +*.opt + +# Visual Studio LightSwitch build output +**/*.HTMLClient/GeneratedArtifacts +**/*.DesktopClient/GeneratedArtifacts +**/*.DesktopClient/ModelManifest.xml +**/*.Server/GeneratedArtifacts +**/*.Server/ModelManifest.xml +_Pvt_Extensions + +# Paket dependency manager +.paket/paket.exe +paket-files/ + +# FAKE - F# Make +.fake/ + +# JetBrains Rider +.idea/ +*.sln.iml + +# CodeRush +.cr/ + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc \ No newline at end of file diff --git a/source/Core/Core.csproj b/source/Core/Core.csproj new file mode 100644 index 0000000..57aafe7 --- /dev/null +++ b/source/Core/Core.csproj @@ -0,0 +1,23 @@ + + + + net8.0 + enable + enable + Core + Core + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/source/Core/DTOs/AppConfiguration.cs b/source/Core/DTOs/AppConfiguration.cs new file mode 100644 index 0000000..4aee1f4 --- /dev/null +++ b/source/Core/DTOs/AppConfiguration.cs @@ -0,0 +1,7 @@ +namespace Core.DTOs +{ + public class AppConfiguration + { + public string GpConnectAnalyticsConnectionString { get; set; } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/AsidLookup.cs b/source/Core/DTOs/Request/AsidLookup.cs similarity index 87% rename from source/gpconnect-analytics.DTO/Request/AsidLookup.cs rename to source/Core/DTOs/Request/AsidLookup.cs index dd8deaf..5b65270 100644 --- a/source/gpconnect-analytics.DTO/Request/AsidLookup.cs +++ b/source/Core/DTOs/Request/AsidLookup.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class AsidLookup { @@ -10,4 +10,4 @@ public class AsidLookup public string PName { get; set; } public string PostCode { get; set; } } -} +} \ No newline at end of file diff --git a/source/Core/DTOs/Request/AsidLookupRun.cs b/source/Core/DTOs/Request/AsidLookupRun.cs new file mode 100644 index 0000000..79c43fd --- /dev/null +++ b/source/Core/DTOs/Request/AsidLookupRun.cs @@ -0,0 +1,6 @@ +namespace Core.DTOs.Request +{ + public class AsidLookupRun : BaseRun + { + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/BaseRun.cs b/source/Core/DTOs/Request/BaseRun.cs similarity index 86% rename from source/gpconnect-analytics.DTO/Request/BaseRun.cs rename to source/Core/DTOs/Request/BaseRun.cs index 359d1f2..12cb663 100644 --- a/source/gpconnect-analytics.DTO/Request/BaseRun.cs +++ b/source/Core/DTOs/Request/BaseRun.cs @@ -1,6 +1,4 @@ -using System; - -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class BaseRun { @@ -12,4 +10,4 @@ public class BaseRun public int FileRowCount { get; set; } public string ErrorMessage { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/File.cs b/source/Core/DTOs/Request/File.cs similarity index 72% rename from source/gpconnect-analytics.DTO/Request/File.cs rename to source/Core/DTOs/Request/File.cs index 59a8821..36f19ab 100644 --- a/source/gpconnect-analytics.DTO/Request/File.cs +++ b/source/Core/DTOs/Request/File.cs @@ -1,8 +1,8 @@ -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class File { public int FileTypeId { get; set; } public string FilePath { get; set; } } -} +} \ No newline at end of file diff --git a/source/Core/DTOs/Request/OrganisationHeirarchyProvider.cs b/source/Core/DTOs/Request/OrganisationHeirarchyProvider.cs new file mode 100644 index 0000000..5841837 --- /dev/null +++ b/source/Core/DTOs/Request/OrganisationHeirarchyProvider.cs @@ -0,0 +1,15 @@ +#nullable enable +namespace Core.DTOs.Request +{ + public class OrganisationHierarchyProvider + { + public required string OdsCode { get; set; } + public string? PracticeName { get; set; } + public int RegisteredPatientCount { get; set; } + public string? RegionCode { get; set; } + public string? RegionName { get; set; } + public string? Icb22Name { get; set; } + public string? PcnName { get; set; } + public int Appointments13000 { get; set; } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/RequestUri.cs b/source/Core/DTOs/Request/RequestUri.cs similarity index 78% rename from source/gpconnect-analytics.DTO/Request/RequestUri.cs rename to source/Core/DTOs/Request/RequestUri.cs index e4798d6..6b52c97 100644 --- a/source/gpconnect-analytics.DTO/Request/RequestUri.cs +++ b/source/Core/DTOs/Request/RequestUri.cs @@ -1,6 +1,4 @@ -using System; - -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class UriRequest { @@ -9,4 +7,4 @@ public class UriRequest public DateTime LatestDate { get; set; } public TimeSpan Hour { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/SspTransaction.cs b/source/Core/DTOs/Request/SspTransaction.cs similarity index 91% rename from source/gpconnect-analytics.DTO/Request/SspTransaction.cs rename to source/Core/DTOs/Request/SspTransaction.cs index 3e16793..955d7a0 100644 --- a/source/gpconnect-analytics.DTO/Request/SspTransaction.cs +++ b/source/Core/DTOs/Request/SspTransaction.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class SspTransaction { @@ -13,4 +13,4 @@ public class SspTransaction public string responseErrorMessage { get; set; } public string method { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Request/SspTransactionRun.cs b/source/Core/DTOs/Request/SspTransactionRun.cs similarity index 72% rename from source/gpconnect-analytics.DTO/Request/SspTransactionRun.cs rename to source/Core/DTOs/Request/SspTransactionRun.cs index 05183d2..bb64c47 100644 --- a/source/gpconnect-analytics.DTO/Request/SspTransactionRun.cs +++ b/source/Core/DTOs/Request/SspTransactionRun.cs @@ -1,10 +1,8 @@ -using System; - -namespace gpconnect_analytics.DTO.Request +namespace Core.DTOs.Request { public class SspTransactionRun : BaseRun { public DateTime QueryFromDate { get; set; } public DateTime QueryToDate { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/BlobStorage.cs b/source/Core/DTOs/Response/Configuration/BlobStorage.cs similarity index 83% rename from source/gpconnect-analytics.DTO/Response/Configuration/BlobStorage.cs rename to source/Core/DTOs/Response/Configuration/BlobStorage.cs index 75736d1..ce4588d 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/BlobStorage.cs +++ b/source/Core/DTOs/Response/Configuration/BlobStorage.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class BlobStorage { @@ -8,4 +8,4 @@ public class BlobStorage public string QueueName { get; set; } public string SqlExternalDataSourceName { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/Email.cs b/source/Core/DTOs/Response/Configuration/Email.cs similarity index 88% rename from source/gpconnect-analytics.DTO/Response/Configuration/Email.cs rename to source/Core/DTOs/Response/Configuration/Email.cs index c00a4e4..bc79e50 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/Email.cs +++ b/source/Core/DTOs/Response/Configuration/Email.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class Email { @@ -12,4 +12,4 @@ public class Email public string DefaultSubject { get; set; } public string RecipientAddress { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/FilePathConstants.cs b/source/Core/DTOs/Response/Configuration/FilePathConstants.cs similarity index 83% rename from source/gpconnect-analytics.DTO/Response/Configuration/FilePathConstants.cs rename to source/Core/DTOs/Response/Configuration/FilePathConstants.cs index 921e4f2..3dcaa54 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/FilePathConstants.cs +++ b/source/Core/DTOs/Response/Configuration/FilePathConstants.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class FilePathConstants { @@ -8,4 +8,4 @@ public class FilePathConstants public string FileExtension { get; set; } public string EmptyDateCharacter { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/FileType.cs b/source/Core/DTOs/Response/Configuration/FileType.cs similarity index 84% rename from source/gpconnect-analytics.DTO/Response/Configuration/FileType.cs rename to source/Core/DTOs/Response/Configuration/FileType.cs index c201267..68a3144 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/FileType.cs +++ b/source/Core/DTOs/Response/Configuration/FileType.cs @@ -1,6 +1,4 @@ -using System; - -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class FileType { @@ -13,4 +11,4 @@ public class FileType public int QueryPeriodHours { get; set; } public bool Enabled { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/SplunkClient.cs b/source/Core/DTOs/Response/Configuration/SplunkClient.cs similarity index 85% rename from source/gpconnect-analytics.DTO/Response/Configuration/SplunkClient.cs rename to source/Core/DTOs/Response/Configuration/SplunkClient.cs index a378baf..684150b 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/SplunkClient.cs +++ b/source/Core/DTOs/Response/Configuration/SplunkClient.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class SplunkClient { @@ -10,4 +10,4 @@ public class SplunkClient public int QueryTimeout { get; set; } public string ApiToken { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Configuration/SplunkInstance.cs b/source/Core/DTOs/Response/Configuration/SplunkInstance.cs similarity index 68% rename from source/gpconnect-analytics.DTO/Response/Configuration/SplunkInstance.cs rename to source/Core/DTOs/Response/Configuration/SplunkInstance.cs index f8ecc16..9560903 100644 --- a/source/gpconnect-analytics.DTO/Response/Configuration/SplunkInstance.cs +++ b/source/Core/DTOs/Response/Configuration/SplunkInstance.cs @@ -1,8 +1,8 @@ -namespace gpconnect_analytics.DTO.Response.Configuration +namespace Core.DTOs.Response.Configuration { public class SplunkInstance { public string Source { get; set; } public string SourceGroup { get; set; } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Queue/Message.cs b/source/Core/DTOs/Response/Queue/Message.cs similarity index 76% rename from source/gpconnect-analytics.DTO/Response/Queue/Message.cs rename to source/Core/DTOs/Response/Queue/Message.cs index 0f5e84f..a2e554d 100644 --- a/source/gpconnect-analytics.DTO/Response/Queue/Message.cs +++ b/source/Core/DTOs/Response/Queue/Message.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.DTO.Response.Queue +namespace Core.DTOs.Response.Queue { public class Message { @@ -6,4 +6,4 @@ public class Message public string BlobName { get; set; } public bool Override { get; set; } = false; } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Splunk/Extract.cs b/source/Core/DTOs/Response/Splunk/Extract.cs similarity index 80% rename from source/gpconnect-analytics.DTO/Response/Splunk/Extract.cs rename to source/Core/DTOs/Response/Splunk/Extract.cs index 1935562..1e74fb1 100644 --- a/source/gpconnect-analytics.DTO/Response/Splunk/Extract.cs +++ b/source/Core/DTOs/Response/Splunk/Extract.cs @@ -1,6 +1,4 @@ -using System; - -namespace gpconnect_analytics.DTO.Response.Splunk +namespace Core.DTOs.Response.Splunk { public class Extract { @@ -10,4 +8,4 @@ public class Extract public TimeSpan QueryHour { get; set; } public bool Override { get; set; } = false; } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DTO/Response/Splunk/ExtractResponse.cs b/source/Core/DTOs/Response/Splunk/ExtractResponse.cs similarity index 71% rename from source/gpconnect-analytics.DTO/Response/Splunk/ExtractResponse.cs rename to source/Core/DTOs/Response/Splunk/ExtractResponse.cs index 49e5e39..46b9343 100644 --- a/source/gpconnect-analytics.DTO/Response/Splunk/ExtractResponse.cs +++ b/source/Core/DTOs/Response/Splunk/ExtractResponse.cs @@ -1,8 +1,6 @@ -using gpconnect_analytics.DTO.Request; -using System.IO; -using System.Net.Http; +using Core.DTOs.Request; -namespace gpconnect_analytics.DTO.Response.Splunk +namespace Core.DTOs.Response.Splunk { public class ExtractResponse { @@ -12,4 +10,4 @@ public class ExtractResponse public string FilePath { get; set; } public UriRequest UriRequest { get; set; } } -} +} \ No newline at end of file diff --git a/source/Core/Helpers/ApplicationHelper.cs b/source/Core/Helpers/ApplicationHelper.cs new file mode 100644 index 0000000..283f1d4 --- /dev/null +++ b/source/Core/Helpers/ApplicationHelper.cs @@ -0,0 +1,23 @@ +using System.Reflection; + +namespace Core.Helpers +{ + public class ApplicationHelper + { + public static class ApplicationVersion + { + public static string GetAssemblyVersion() + { + return GetAssemblyVersionInternal(Assembly.GetCallingAssembly); + } + + // Internal method to allow dependency injection for testing + public static string GetAssemblyVersionInternal(Func getAssembly) + { + var buildTag = System.Environment.GetEnvironmentVariable("BUILD_TAG"); + + return string.IsNullOrWhiteSpace(buildTag) ? getAssembly()?.GetName().FullName : buildTag; + } + } + } +} \ No newline at end of file diff --git a/source/Core/Helpers/AttributeExtensions.cs b/source/Core/Helpers/AttributeExtensions.cs new file mode 100644 index 0000000..d8b2572 --- /dev/null +++ b/source/Core/Helpers/AttributeExtensions.cs @@ -0,0 +1,27 @@ +using System.Reflection; + +namespace Core.Helpers +{ + public static class AttributeExtensions + { + public static FileTypes? GetFileType(this string filePath) + { + return GetValueFromPath(filePath); + } + + private static FileTypes? GetValueFromPath(string filePath) + { + if (string.IsNullOrEmpty(filePath)) + { + return null; + } + + var fileType = typeof(T).GetFields() + .FirstOrDefault(field => + field.GetCustomAttribute() is { } attribute && + filePath.Contains(attribute.FilePath)); + + return fileType?.GetValue(null) as FileTypes?; + } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/ConnectionStrings.cs b/source/Core/Helpers/ConnectionStrings.cs similarity index 65% rename from source/gpconnect-analytics.DAL/ConnectionStrings.cs rename to source/Core/Helpers/ConnectionStrings.cs index dcc1cc6..fa09863 100644 --- a/source/gpconnect-analytics.DAL/ConnectionStrings.cs +++ b/source/Core/Helpers/ConnectionStrings.cs @@ -1,7 +1,7 @@ -namespace gpconnect_analytics.DAL +namespace Core.Helpers { public static class ConnectionStrings { - public static string GpConnectAnalytics { get; } = "GpConnectAnalytics"; + public static string GpConnectAnalytics { get; } = "GpConnectAnalytics"; } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Helpers/DateFormatConstants.cs b/source/Core/Helpers/DateFormatConstants.cs similarity index 82% rename from source/gpconnect-analytics.Helpers/DateFormatConstants.cs rename to source/Core/Helpers/DateFormatConstants.cs index b30d318..a6f6dc9 100644 --- a/source/gpconnect-analytics.Helpers/DateFormatConstants.cs +++ b/source/Core/Helpers/DateFormatConstants.cs @@ -1,6 +1,6 @@ -namespace gpconnect_analytics.Helpers +namespace Core.Helpers { - public class DateFormatConstants + public static class DateFormatConstants { public const string FilePathQueryDate = "yyyyMMdd"; public const string FilePathQueryHour = "hhmmss"; @@ -9,4 +9,4 @@ public class DateFormatConstants public const string SplunkQueryDate = "MM/dd/yyyy:HH:mm:ss"; public const string SplunkQueryHour = "hhmm"; } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Helpers/DateTimeHelper.cs b/source/Core/Helpers/DateTimeHelper.cs similarity index 74% rename from source/gpconnect-analytics.Helpers/DateTimeHelper.cs rename to source/Core/Helpers/DateTimeHelper.cs index c4db187..92d7241 100644 --- a/source/gpconnect-analytics.Helpers/DateTimeHelper.cs +++ b/source/Core/Helpers/DateTimeHelper.cs @@ -1,7 +1,4 @@ -using System; -using System.Collections.Generic; - -namespace gpconnect_analytics.Helpers +namespace Core.Helpers { public static class DateTimeHelper { diff --git a/source/gpconnect-analytics.Helpers/FilePath.cs b/source/Core/Helpers/FilePath.cs similarity index 57% rename from source/gpconnect-analytics.Helpers/FilePath.cs rename to source/Core/Helpers/FilePath.cs index cc28f59..85b4e83 100644 --- a/source/gpconnect-analytics.Helpers/FilePath.cs +++ b/source/Core/Helpers/FilePath.cs @@ -1,14 +1,12 @@ -using System; - -namespace gpconnect_analytics.Helpers +namespace Core.Helpers { public class FilePathAttribute : Attribute { - public string FilePath { get; protected set; } = ""; + public string FilePath { get; } = ""; public FilePathAttribute(string value) { FilePath = value; } } -} +} \ No newline at end of file diff --git a/source/Core/Helpers/FileTypes.cs b/source/Core/Helpers/FileTypes.cs new file mode 100644 index 0000000..cf68883 --- /dev/null +++ b/source/Core/Helpers/FileTypes.cs @@ -0,0 +1,9 @@ +namespace Core.Helpers +{ + public enum FileTypes + { + [FilePath("asid-lookup-data")] asidlookup, + [FilePath("ssp-transactions")] ssptrans, + [FilePath("mesh-transactions")] meshtrans + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Helpers/SplunkInstance.cs b/source/Core/Helpers/SplunkInstance.cs similarity index 68% rename from source/gpconnect-analytics.Helpers/SplunkInstance.cs rename to source/Core/Helpers/SplunkInstance.cs index fd57a09..83f7227 100644 --- a/source/gpconnect-analytics.Helpers/SplunkInstance.cs +++ b/source/Core/Helpers/SplunkInstance.cs @@ -1,4 +1,4 @@ -namespace gpconnect_analytics.Helpers +namespace Core.Helpers { public enum SplunkInstances { @@ -6,4 +6,4 @@ public enum SplunkInstances spinea, spineb } -} +} \ No newline at end of file diff --git a/source/Core/Helpers/TimeProvider.cs b/source/Core/Helpers/TimeProvider.cs new file mode 100644 index 0000000..65defc9 --- /dev/null +++ b/source/Core/Helpers/TimeProvider.cs @@ -0,0 +1,9 @@ +using Core; + +public class TimeProvider : ITimeProvider +{ + public DateTime UtcDateTime() => DateTime.UtcNow; + + + public DateTime CurrentDate() => DateTime.Today; +} \ No newline at end of file diff --git a/source/Core/IConnectionFactory.cs b/source/Core/IConnectionFactory.cs new file mode 100644 index 0000000..46b2cf2 --- /dev/null +++ b/source/Core/IConnectionFactory.cs @@ -0,0 +1,8 @@ +using System.Data.Common; + +namespace Core; + +public interface IConnectionFactory +{ + DbConnection CreateConnection(string connectionString); +} \ No newline at end of file diff --git a/source/Core/ITimeProvider.cs b/source/Core/ITimeProvider.cs new file mode 100644 index 0000000..669d433 --- /dev/null +++ b/source/Core/ITimeProvider.cs @@ -0,0 +1,16 @@ +namespace Core; + +public interface ITimeProvider +{ + /// + /// Returns the Current Date and Time as UTC + /// + /// + DateTime UtcDateTime(); + + /// + /// Gets current Date, time is set at 00:00 + /// + /// current DateTime + DateTime CurrentDate(); +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/Mapping/SplunkInstanceMap.cs b/source/Core/Mapping/SplunkInstanceMap.cs similarity index 74% rename from source/gpconnect-analytics.DAL/Mapping/SplunkInstanceMap.cs rename to source/Core/Mapping/SplunkInstanceMap.cs index fe18d81..9ab7dfd 100644 --- a/source/gpconnect-analytics.DAL/Mapping/SplunkInstanceMap.cs +++ b/source/Core/Mapping/SplunkInstanceMap.cs @@ -1,7 +1,7 @@ -using gpconnect_analytics.DTO.Response.Configuration; +using Core.DTOs.Response.Configuration; using Dapper.FluentMap.Mapping; -namespace gpconnect_analytics.DAL.Mapping +namespace Core.Mapping { public class SplunkInstanceMap : EntityMap { @@ -11,4 +11,4 @@ public SplunkInstanceMap() Map(p => p.SourceGroup).ToColumn("SplunkInstanceGroup"); } } -} +} \ No newline at end of file diff --git a/source/Core/Repositories/DapperWrapper.cs b/source/Core/Repositories/DapperWrapper.cs new file mode 100644 index 0000000..3e31970 --- /dev/null +++ b/source/Core/Repositories/DapperWrapper.cs @@ -0,0 +1,68 @@ +using System.Data; +using Dapper; + +namespace Core.Repositories; + +public interface IDapperWrapper +{ + Task ExecuteAsync(IDbConnection connection, string sql, object param = null, + IDbTransaction? transaction = null); + + Task> QueryAsync(IDbConnection connection, string sql, object param = null, + IDbTransaction transaction = null); + + Task ExecuteStoredProcedureAsync(IDbConnection connection, string procedureName, + object parameters, int commandTimeout = 30); + + Task> QueryStoredProcedureAsync(IDbConnection connection, string procedureName, + object parameters, int commandTimeout = 30); +} + +public class DapperWrapper : IDapperWrapper +{ + public async Task ExecuteAsync(IDbConnection connection, string sql, object param = null, + IDbTransaction transaction = null) + { + return await connection.ExecuteAsync(sql, param, transaction); + } + + public async Task ExecuteStoredProcedureAsync(IDbConnection connection, string procedureName, + object parameters, int commandTimeout = 30) + { + try + { + return await connection.ExecuteAsync( + procedureName, + parameters, + commandType: CommandType.StoredProcedure, + commandTimeout: commandTimeout); + } + catch (Exception ex) + { + throw new Exception($"Error executing stored procedure: {procedureName}", ex); + } + } + + public async Task> QueryStoredProcedureAsync(IDbConnection connection, string procedureName, + object parameters, int commandTimeout = 30) + { + try + { + return await connection.QueryAsync( + procedureName, + parameters, + commandType: CommandType.StoredProcedure, + commandTimeout: commandTimeout); + } + catch (Exception ex) + { + throw new Exception($"Error executing stored procedure: {procedureName}", ex); + } + } + + public async Task> QueryAsync(IDbConnection connection, string sql, object param = null, + IDbTransaction transaction = null) + { + return await connection.QueryAsync(sql, param, transaction); + } +} \ No newline at end of file diff --git a/source/Core/Repositories/HierarchyProviderConsumerRepo.cs b/source/Core/Repositories/HierarchyProviderConsumerRepo.cs new file mode 100644 index 0000000..31dcd5e --- /dev/null +++ b/source/Core/Repositories/HierarchyProviderConsumerRepo.cs @@ -0,0 +1,66 @@ +using System.Data; +using Core.DTOs.Request; +using Core.Helpers; +using Core.Services.Interfaces; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; + +namespace Core.Repositories; + +public class HierarchyProviderConsumerRepo( + ICoreConfigurationService configurationService, + IDapperWrapper dapperWrapper, + IConnectionFactory connectionFactory, + ILogger logger) + : IHierarchyProviderConsumerRepo +{ + public async Task InsertHierarchyProviderConsumers(List providers) + { + var connectionString = configurationService.GetConnectionString(ConnectionStrings.GpConnectAnalytics); + await using var connection = connectionFactory.CreateConnection(connectionString); + + // Explicitly open the connection + if (connection.State != ConnectionState.Open) + { + await connection.OpenAsync(); + } + + await using var transaction = await connection.BeginTransactionAsync(); + + const string sql = """ + INSERT INTO [Data].[HierarchyProviderConsumers] ( + OdsCode, + PracticeName, + RegisteredPatientCount, + RegionCode, + RegionName, + Icb22Name, + PcnName, + Appointments13000 + ) + VALUES ( + @OdsCode, + @PracticeName, + @RegisteredPatientCount, + @RegionCode, + @RegionName, + @Icb22Name, + @PcnName, + @Appointments13000 + ); + """; + + try + { + await dapperWrapper.ExecuteAsync(connection, sql, providers, transaction); + await transaction.CommitAsync(); + return providers.Count; + } + catch (Exception ex) + { + await transaction.RollbackAsync(); + logger.LogError(ex, "Error inserting hierarchy provider consumers"); + return 0; + } + } +} \ No newline at end of file diff --git a/source/Core/Repositories/IHierarchyProviderConsumerRepo.cs b/source/Core/Repositories/IHierarchyProviderConsumerRepo.cs new file mode 100644 index 0000000..5086756 --- /dev/null +++ b/source/Core/Repositories/IHierarchyProviderConsumerRepo.cs @@ -0,0 +1,8 @@ +using Core.DTOs.Request; + +namespace Core.Repositories; + +public interface IHierarchyProviderConsumerRepo +{ + Task InsertHierarchyProviderConsumers(List providers); +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/Interfaces/IConfigurationService.cs b/source/Core/Services/Interfaces/IConfigurationService.cs similarity index 66% rename from source/gpconnect-analytics.DAL/Interfaces/IConfigurationService.cs rename to source/Core/Services/Interfaces/IConfigurationService.cs index 7208521..9468dc4 100644 --- a/source/gpconnect-analytics.DAL/Interfaces/IConfigurationService.cs +++ b/source/Core/Services/Interfaces/IConfigurationService.cs @@ -1,9 +1,7 @@ -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.Helpers; -using System.Collections.Generic; -using System.Threading.Tasks; +using Core.DTOs.Response.Configuration; +using Core.Helpers; -namespace gpconnect_analytics.DAL.Interfaces +namespace Core.Services.Interfaces { public interface IConfigurationService { @@ -14,4 +12,4 @@ public interface IConfigurationService Task GetSplunkClientConfiguration(); Task GetSplunkInstance(SplunkInstances splunkInstance); } -} +} \ No newline at end of file diff --git a/source/Core/Services/Interfaces/ICoreConfiguration.cs b/source/Core/Services/Interfaces/ICoreConfiguration.cs new file mode 100644 index 0000000..52be50f --- /dev/null +++ b/source/Core/Services/Interfaces/ICoreConfiguration.cs @@ -0,0 +1,6 @@ +namespace Core.Services.Interfaces; + +public interface ICoreConfigurationService +{ + string GetConnectionString(string name); +} \ No newline at end of file diff --git a/source/Core/Services/Interfaces/IDataService.cs b/source/Core/Services/Interfaces/IDataService.cs new file mode 100644 index 0000000..4e13a13 --- /dev/null +++ b/source/Core/Services/Interfaces/IDataService.cs @@ -0,0 +1,15 @@ +using Dapper; + +namespace Core.Services.Interfaces +{ + public interface IDataService + { + Task> ExecuteQueryStoredProcedure(string procedureName, DynamicParameters parameters = null) + where T : class; + + Task ExecuteStoredProcedureWithOutputParameters(string procedureName, + DynamicParameters parameters); + + Task ExecuteStoredProcedure(string procedureName, DynamicParameters parameters = null); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/FilePathHelperTests.cs b/source/Functions.Tests/FilePathHelperTests.cs new file mode 100644 index 0000000..d875c78 --- /dev/null +++ b/source/Functions.Tests/FilePathHelperTests.cs @@ -0,0 +1,121 @@ +using Moq; +using System; +using System.Threading.Tasks; +using Xunit; +using Core; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using System.Text; +using FluentAssertions; +using Functions.HelperClasses; +using Functions.Tests.TestHelpers; + +namespace Functions.Tests +{ + public class FilePathHelperTests + { + private readonly Mock _mockConfigurationService; + private readonly Mock _mockTimeProvider; + private readonly Extract _mockExtract; + private readonly FilePathHelper _filePathHelper; + + public FilePathHelperTests() + { + _mockConfigurationService = new Mock(); + _mockTimeProvider = new Mock(); + + _mockTimeProvider.Setup(x => x.CurrentDate()).Returns(new DateTime(2025, 12, 25, 0, 0, 0)); + _mockTimeProvider.Setup(x => x.UtcDateTime()).Returns(new DateTime(2025, 12, 25, 11, 11, 0)); + + _mockExtract = new Extract + { + ExtractRequired = false, + QueryFromDate = new DateTime(2025, 1, 1, 0, 0, 0), + QueryToDate = new DateTime(2025, 1, 2, 0, 0, 0), + QueryHour = TimeSpan.Zero, + Override = false + }; + + _filePathHelper = new FilePathHelper( + _mockConfigurationService.Object, + _mockTimeProvider.Object, + _mockExtract + ); + } + + [Fact] + public async Task ConstructFilePath_WhenIsTodayTrue_MidnightIsFalse_ReturnsExpectedFilePath() + { + // Arrange + var splunkInstance = new SplunkInstance { Source = "source" }; + var fileType = new FileType { DirectoryName = "dir", FileTypeFilePrefix = "filePrefix" }; + + var filePathConstants = ConfigurationHelpers.GenerateFilePathConstants(); + + _mockConfigurationService.Setup(x => x.GetFilePathConstants()) + .ReturnsAsync(filePathConstants); + + // Act + var result = await _filePathHelper.ConstructFilePath(splunkInstance, fileType, true); + + // Assert + var expectedFilePath = + "dir/source/2025-01/proj__filePrefix_20250101T000000_20250102T000000_source_20251225T235959.csv"; + result.Should().Be(expectedFilePath); + } + + [Fact] + public async Task ConstructFilePath_WhenIsTodayFalseAndSetDateAsMidnightTrue_ReturnsExpectedFilePath() + { + // Arrange + var splunkInstance = new SplunkInstance { Source = "source" }; + var fileType = new FileType { DirectoryName = "dir", FileTypeFilePrefix = "filePrefix" }; + + var filePathConstants = ConfigurationHelpers.GenerateFilePathConstants(); + + _mockConfigurationService.Setup(x => x.GetFilePathConstants()) + .ReturnsAsync(filePathConstants); + + + // Act + var result = await _filePathHelper.ConstructFilePath(splunkInstance, fileType, false, true); //midnight true + + // Assert + var expectedFilePath = + "dir/source/2025-01/proj__filePrefix_20250101T000000_20250102T000000_source_20251225T000000.csv"; + result.Should().Be(expectedFilePath); + } + + [Fact] + public async Task ConstructFilePath_WhenIsTodayFalseAndSetDateAsMidnightFalse_ReturnsExpectedFilePath() + { + // Arrange + var splunkInstance = new SplunkInstance { Source = "source" }; + var fileType = new FileType { DirectoryName = "dir", FileTypeFilePrefix = "filePrefix" }; + + // expectations + var directory = "dir"; + var source = "source"; + var dateFolder = "2025-01"; + var filePrefix = "filePrefix"; + var queryDateFrom = "20250101T000000"; + var queryDateTo = "20250102T000000"; + var date = "20251225T111100"; + + var filePathConstants = ConfigurationHelpers.GenerateFilePathConstants(); + + _mockConfigurationService.Setup(x => x.GetFilePathConstants()) + .ReturnsAsync(filePathConstants); + + // Act + var result = await _filePathHelper.ConstructFilePath(splunkInstance, fileType, false); + + // Assert + var expectedFilePath = + $"{directory}/{source}/{dateFolder}/{filePathConstants.ProjectNameFilePrefix}_{filePrefix}_{queryDateFrom}_{queryDateTo}_{source}_{date}{filePathConstants.FileExtension}"; + result.Should().Be(expectedFilePath); + } + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions.Tests.csproj b/source/Functions.Tests/Functions.Tests.csproj new file mode 100644 index 0000000..64345f1 --- /dev/null +++ b/source/Functions.Tests/Functions.Tests.csproj @@ -0,0 +1,31 @@ + + + + net8.0 + enable + enable + + false + true + + + + + + + + + + + + + + + + + + + + + + diff --git a/source/Functions.Tests/Functions.Tests.csproj.DotSettings b/source/Functions.Tests/Functions.Tests.csproj.DotSettings new file mode 100644 index 0000000..f4b6a50 --- /dev/null +++ b/source/Functions.Tests/Functions.Tests.csproj.DotSettings @@ -0,0 +1,2 @@ + + True \ No newline at end of file diff --git a/source/Functions.Tests/Functions/ExecuteImportByTriggerTests.cs b/source/Functions.Tests/Functions/ExecuteImportByTriggerTests.cs new file mode 100644 index 0000000..ca625c1 --- /dev/null +++ b/source/Functions.Tests/Functions/ExecuteImportByTriggerTests.cs @@ -0,0 +1,57 @@ +using Core.DTOs.Response.Queue; +using FluentAssertions; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; +using Moq; + +namespace Functions.Tests; + +public class ExecuteImportByTriggerTests +{ + private readonly Mock _importServiceMock; + private readonly Mock _loggerMock; + private readonly ExecuteImportByTrigger _function; + + public ExecuteImportByTriggerTests() + { + _importServiceMock = new Mock(); + _loggerMock = new Mock(); + _function = new ExecuteImportByTrigger(_importServiceMock.Object); + } + + [Fact] + public async Task Run_Should_Call_InstallData_With_Correct_QueueItem() + { + // Arrange + var queueItem = new Message + { + Override = true, + BlobName = "fakeBlob", + FileTypeId = 123 + }; + + // Act + await _function.Run(queueItem, _loggerMock.Object); + + // Assert + _importServiceMock.Verify(s => s.InstallData(queueItem), Times.Once); + } + + [Fact] + public async Task Run_Should_Not_Throw_Exception() + { + // Arrange + var queueItem = new Message + { + Override = true, + BlobName = "fakeBlob", + FileTypeId = 123 + }; + + // Act + var act = async () => await _function.Run(queueItem, _loggerMock.Object); + + // Assert + await act.Should().NotThrowAsync(); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/GetDataFromApiByDateRangeTests.cs b/source/Functions.Tests/Functions/GetDataFromApiByDateRangeTests.cs new file mode 100644 index 0000000..255dbc2 --- /dev/null +++ b/source/Functions.Tests/Functions/GetDataFromApiByDateRangeTests.cs @@ -0,0 +1,164 @@ +using System.Net; +using Core.Helpers; +using FluentAssertions; +using Functions.Services.Interfaces; +using Functions.Tests.TestHelpers; +using Microsoft.Extensions.Logging; +using Moq; + +namespace Functions.Tests; + +public class GetDataFromApiByDateRangeTests +{ + private readonly Mock _batchServiceMock; + private readonly Mock _loggerMock; + private readonly GetDataFromApiByDateRange _function; + + public GetDataFromApiByDateRangeTests() + { + _batchServiceMock = new Mock(); + _loggerMock = new Mock(); + _function = new GetDataFromApiByDateRange(_batchServiceMock.Object, _loggerMock.Object); + } + + + [Fact] + public async Task GetDataFromSspTransByDateRange_Should_Return_Successful_Response() + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.ssptrans, It.IsAny(), It.IsAny())) + .ReturnsAsync(10); // Simulate 10 rows processed + + + // Act + var result = await _function.GetDataFromSspTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Batch Download successful: 10 requests processed"); + } + + [Fact] + public async Task GetDataFromSSpTransByDateRange_Should_LogError_AndReturn_InternalServerError_OnError() + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.ssptrans, It.IsAny(), It.IsAny())) + .ThrowsAsync(new Exception("Error processing batch")); // Simulate an error + + + // Act + var result = await _function.GetDataFromSspTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Failed to download - see logs"); + } + + [Fact] + public async Task GetDataFromApiDateRange_MeshTrans_Should_Return_Successful_Response() + + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.meshtrans, It.IsAny(), It.IsAny())) + .ReturnsAsync(10); // Simulate 10 rows processed + + + // Act + var result = await _function.GetDataFromMeshTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Batch Download successful: 10 requests processed"); + } + + [Fact] + public async Task GetDataFromMeshTransByDateRange_Should_LogError_AndReturn_InternalServerError_OnError() + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.meshtrans, It.IsAny(), It.IsAny())) + .ThrowsAsync(new Exception("Error processing batch")); // Simulate an error + + + // Act + var result = await _function.GetDataFromMeshTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Failed to download - see logs"); + } + + [Fact] + public async Task GetDataFromApiByDateRange_AsidLookup_Should_Return_Successful_Response() + + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.asidlookup, It.IsAny(), It.IsAny())) + .ReturnsAsync(10); // Simulate 10 rows processed + + + // Act + var result = await _function.GetDataFromAsidLookupByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Batch Download successful: 10 requests processed"); + } + + [Fact] + public async Task GetDataFromAsidLookupByDateRange_Should_LogError_AndReturn_InternalServerError_OnError() + { + // Arrange + var request = MockRequests.CreateDateRangeMockRequest("2024-01-01", "2024-01-31"); + + _batchServiceMock + .Setup(s => s.StartBatchDownloadAsync(FileTypes.asidlookup, It.IsAny(), It.IsAny())) + .ThrowsAsync(new Exception("Error processing batch")); // Simulate an error + + + // Act + var result = await _function.GetDataFromAsidLookupByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Failed to download - see logs"); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/GetDataFromApiManualTests.cs b/source/Functions.Tests/Functions/GetDataFromApiManualTests.cs new file mode 100644 index 0000000..6b65e49 --- /dev/null +++ b/source/Functions.Tests/Functions/GetDataFromApiManualTests.cs @@ -0,0 +1,104 @@ +using System.Net; +using FluentAssertions; +using Functions.Services.Interfaces; +using Functions.Tests.TestHelpers; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; + +namespace Functions.Tests; + +public class GetDataFromApiManualTests +{ + private readonly Mock _importService; + private readonly FakeLogger _loggerMock; + private readonly GetDataFromApiManual _function; + + + public GetDataFromApiManualTests() + { + _importService = new Mock(); + _loggerMock = new FakeLogger(); + _function = new GetDataFromApiManual(_importService.Object, _loggerMock); + } + + [Fact] + public async Task GetDataFromApi_ShouldReturnSuccessfulResponse() + { + // Arrange + var request = MockRequests.MockAddDownloadRequest(); + _importService.Setup(x => x.AddDownloadedFileManually(It.IsAny())) + .Returns(Task.CompletedTask); + + + // Act + var response = await _function.AddDownloadedFile(request); + + response.StatusCode.Should().Be(HttpStatusCode.OK); + response.Body.Position = 0; // reset the position to the beginning of the stream + + using var reader = new StreamReader(response.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Successfully added files"); + + // Assert + } + + [Fact] + public async Task GetDataFromApi_ReturnBadRequest_WhenMissingFilePathQueryParam() + { + // Arrange + var request = MockRequests.MockRequestNoQuery(); + + // Act + var response = await _function.AddDownloadedFile(request); + + response.StatusCode.Should().Be(HttpStatusCode.BadRequest); + response.Body.Position = 0; // reset the position to the beginning of the stream + + // Assert + using var reader = new StreamReader(response.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Filepath missing"); + } + + [Fact] + public async Task GetDataFromApi_ReturnInternalServerError_WhenExceptionThrown() + { + // Arrange + var request = MockRequests.MockAddDownloadRequest(); + _importService.Setup(x => x.AddDownloadedFileManually(It.IsAny())) + .Throws(new Exception("An error occurred")); + + // Act + var response = await _function.AddDownloadedFile(request); + + response.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + response.Body.Position = 0; // reset the position to the beginning of the stream + + // Assert + using var reader = new StreamReader(response.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Something went wrong - see error logs for more details"); + } + + [Fact] + public async Task GetDataFromApi_LogsError_WhenExceptionThrown() + { + // Arrange + var request = MockRequests.MockAddDownloadRequest(); + _importService.Setup(x => x.AddDownloadedFileManually(It.IsAny())) + .Throws(new Exception("An error occurred")); + + // Act + var response = await _function.AddDownloadedFile(request); + + response.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + response.Body.Position = 0; // reset the position to the beginning of the stream + + // Assert + _loggerMock.Collector.LatestRecord.Message.Should().Be("Error adding downloaded file: An error occurred"); + _loggerMock.Collector.LatestRecord.Exception.Should().BeOfType(); + _loggerMock.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/GetDataFromApiTodayTests.cs b/source/Functions.Tests/Functions/GetDataFromApiTodayTests.cs new file mode 100644 index 0000000..9a5e6da --- /dev/null +++ b/source/Functions.Tests/Functions/GetDataFromApiTodayTests.cs @@ -0,0 +1,234 @@ +using System.Net; +using Core.Helpers; +using FluentAssertions; +using Functions.Services.Interfaces; +using Functions.Tests.TestHelpers; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; + +namespace Functions.Tests; + +public class GetDataFromApiTodayTests +{ + private readonly GetDataFromApiToday _function; + private readonly FakeLogger _loggerMock; + private readonly Mock _batchService; + + public GetDataFromApiTodayTests() + { + _batchService = new Mock(); + _loggerMock = new FakeLogger(); + _function = new GetDataFromApiToday(_batchService.Object, _loggerMock); + } + + [Fact] + public async Task SspTrans_ShouldReturnSuccessful() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.ssptrans)) + .ReturnsAsync(2); + + + // Act + var result = await _function.GetDataFromSspTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Processed 2 requests"); + } + + [Fact] + public async Task SspTrans_ShouldReturn500_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.ssptrans)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromSspTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Be("An error occurred whilst processing batch download - see logs for more details"); + } + + [Fact] + public async Task SspTrans_ShouldLogError_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.ssptrans)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromSspTransByDateRange(request); + + // Assert + _loggerMock.Collector.LatestRecord.Message.Should() + .Be("An error occurred during batch download when processing urls"); + + _loggerMock.Collector.LatestRecord.Exception.Should().BeOfType(); + _loggerMock.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _loggerMock.Collector.LatestRecord.Exception?.Message.Should() + .Be("Error while downloading"); + } + + [Fact] + public async Task AsidLookup_ShouldReturnSuccessful() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.asidlookup)) + .ReturnsAsync(2); + + + // Act + var result = await _function.GetDataFromAsidLookupByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Processed 2 requests"); + } + + [Fact] + public async Task AsidLookup_ShouldReturn500_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.asidlookup)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromAsidLookupByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Be("An error occurred whilst processing batch download - see logs for more details"); + } + + [Fact] + public async Task AsidLookup_ShouldLogError_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.asidlookup)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromAsidLookupByDateRange(request); + + // Assert + _loggerMock.Collector.LatestRecord.Message.Should() + .Be("An error occurred during batch download when processing urls"); + + _loggerMock.Collector.LatestRecord.Exception.Should().BeOfType(); + _loggerMock.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _loggerMock.Collector.LatestRecord.Exception?.Message.Should() + .Be("Error while downloading"); + } + + [Fact] + public async Task MeshTrans_ShouldReturnSuccessful() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.meshtrans)) + .ReturnsAsync(2); + + + // Act + var result = await _function.GetDataFromMeshTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.OK); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Processed 2 requests"); + } + + [Fact] + public async Task MeshTrans_ShouldReturn500_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.meshtrans)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromMeshTransByDateRange(request); + + // Assert + result.Body.Position = 0; // reset the position to the beginning of the stream + result.StatusCode.Should().Be(HttpStatusCode.InternalServerError); + + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Be("An error occurred whilst processing batch download - see logs for more details"); + } + + [Fact] + public async Task MeshTrans_ShouldLogError_WhenErrorThrown() + { + // Arrange + var request = MockRequests.CreateTodayMockRequest(); + + _batchService + .Setup(s => s.StartBatchDownloadForTodayAsync(FileTypes.meshtrans)) + .ThrowsAsync(new Exception("Error while downloading")); + + + // Act + var result = await _function.GetDataFromMeshTransByDateRange(request); + + // Assert + _loggerMock.Collector.LatestRecord.Message.Should() + .Be("An error occurred during batch download when processing urls"); + + _loggerMock.Collector.LatestRecord.Exception.Should().BeOfType(); + _loggerMock.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _loggerMock.Collector.LatestRecord.Exception?.Message.Should() + .Be("Error while downloading"); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/GetDataFromApiTodayTimerTests.cs b/source/Functions.Tests/Functions/GetDataFromApiTodayTimerTests.cs new file mode 100644 index 0000000..7910af2 --- /dev/null +++ b/source/Functions.Tests/Functions/GetDataFromApiTodayTimerTests.cs @@ -0,0 +1,67 @@ +using Core.Helpers; +using Functions.Services.Interfaces; +using Functions.Tests.TestHelpers; +using Microsoft.Extensions.Logging; +using Moq; + +namespace Functions.Tests; + +public class GetDataFromApiByTriggerTests +{ + private readonly Mock _batchServiceMock; + private readonly Mock _loggerMock; + private readonly GetDataFromApiByTrigger _function; + + public GetDataFromApiByTriggerTests() + { + _batchServiceMock = new Mock(); + _loggerMock = new Mock(); + _function = new GetDataFromApiByTrigger(_batchServiceMock.Object, _loggerMock.Object); + } + + + [Fact] + public async Task GetDataFromAsidLookup_Should_Invoke_BatchService() + { + // Arrange + var timerInfo = MockTriggers.CreateMockTimerInfo(); + + // Act + await _function.GetDataFromAsidLookup(timerInfo); + + // Assert + _batchServiceMock.Verify( + x => x.StartBatchDownloadForTodayAsync(FileTypes.asidlookup), + Times.Once); + } + + [Fact] + public async Task GetDataFromSspTrans_Should_Invoke_BatchService() + { + // Arrange + var timerInfo = MockTriggers.CreateMockTimerInfo(); + + // Act + await _function.GetDataFromSspTrans(timerInfo); + + // Assert + _batchServiceMock.Verify( + x => x.StartBatchDownloadForTodayAsync(FileTypes.ssptrans), + Times.Once); + } + + [Fact] + public async Task GetDataFromMeshTrans_Should_Invoke_BatchService() + { + // Arrange + var timerInfo = MockTriggers.CreateMockTimerInfo(); + + // Act + await _function.GetDataFromMeshTrans(timerInfo); + + // Assert + _batchServiceMock.Verify( + x => x.StartBatchDownloadForTodayAsync(FileTypes.meshtrans), + Times.Once); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/PurgeErrorLogByTriggerTests.cs b/source/Functions.Tests/Functions/PurgeErrorLogByTriggerTests.cs new file mode 100644 index 0000000..e0f09a7 --- /dev/null +++ b/source/Functions.Tests/Functions/PurgeErrorLogByTriggerTests.cs @@ -0,0 +1,33 @@ +using Functions.Services.Interfaces; +using Functions.Tests.TestHelpers; +using Microsoft.Azure.Functions.Worker; +using Moq; + +namespace Functions.Tests; + +public class PurgeErrorLogByTriggerTests +{ + private readonly TimerInfo _timerInfo; + private readonly Mock _loggingServiceMock; + private PurgeErrorLogByTrigger _function; + + public PurgeErrorLogByTriggerTests() + { + _loggingServiceMock = new Mock(); + _timerInfo = MockTriggers.CreateMockTimerInfo(); + + + _function = new PurgeErrorLogByTrigger(_loggingServiceMock.Object); + } + + [Fact] + public async Task PurgeErrorLogByTrigger_Should_Invoke_LoggingServiceCorrectly() + { + // Arrange + // Act + await _function.PurgeErrorLog(_timerInfo); + + // Assert + _loggingServiceMock.Verify(x => x.PurgeErrorLog(), Times.Once); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Functions/StoreProviderConsumerDataTests.cs b/source/Functions.Tests/Functions/StoreProviderConsumerDataTests.cs new file mode 100644 index 0000000..7b80462 --- /dev/null +++ b/source/Functions.Tests/Functions/StoreProviderConsumerDataTests.cs @@ -0,0 +1,146 @@ +using System.Net; +using System.Text; +using System.Text.Json; +using Core.DTOs.Request; +using Core.Repositories; +using FluentAssertions; +using Functions.Tests.TestHelpers; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; + +namespace Functions.Tests; + +public class StoreProviderConsumerDataTests +{ + private readonly Mock _repositoryMock; + private readonly FakeLogger _loggerMock; + private readonly StoreProviderConsumerData _function; + + public StoreProviderConsumerDataTests() + { + _repositoryMock = new Mock(); + _loggerMock = new FakeLogger(); + _function = new StoreProviderConsumerData(_repositoryMock.Object, _loggerMock); + } + + + [Fact] + public async Task Run_ShouldReturnBadRequest_WhenRequestBodyIsEmpty() + { + // Arrange + var request = MockRequests.MockRequestNoQuery(); + + // Act + var response = await _function.Run(request); + + // Assert + response.StatusCode.Should().Be(HttpStatusCode.BadRequest); + } + + [Fact] + public async Task Run_ShouldReturnBadRequest_WhenInvalidJsonIsProvided() + { + // Arrange + var request = MockRequests.MockRequestNoQuery(); + + // Act + var response = await _function.Run(request); + + // Assert + response.StatusCode.Should().Be(HttpStatusCode.BadRequest); + } + + [Fact] + public async Task Run_ShouldReturnOk_WhenValidInputIsProvided() + { + // Arrange + var records = new List + { + new() + { + OdsCode = null, + PracticeName = null, + RegisteredPatientCount = 0, + RegionCode = null, + RegionName = null, + Icb22Name = null, + PcnName = null, + Appointments13000 = 0 + } + }; + var json = JsonSerializer.Serialize(records); + var request = MockRequests.MockRequestWithBody(json); + + _repositoryMock.Setup(r => r.InsertHierarchyProviderConsumers(It.IsAny>())) + .ReturnsAsync(1); + + // Act + var response = await _function.Run(request); + + // Assert + response.StatusCode.Should().Be(HttpStatusCode.OK); + } + + [Fact] + public async Task Run_ShouldReturnBadRequest_WhenRepositoryFails() + { + // Arrange + var records = new List + { + new() + { + OdsCode = null, + PracticeName = null, + RegisteredPatientCount = 0, + RegionCode = null, + RegionName = null, + Icb22Name = null, + PcnName = null, + Appointments13000 = 0 + } + }; + var json = JsonSerializer.Serialize(records); + var request = MockRequests.MockRequestWithBody(json); + + _repositoryMock.Setup(r => r.InsertHierarchyProviderConsumers(It.IsAny>())) + .ReturnsAsync(0); + + // Act + var response = await _function.Run(request); + + // Assert + response.StatusCode.Should().Be(HttpStatusCode.BadRequest); + response.Headers.GetValues("Content-Type").Should().Contain("text/plain; charset=utf-8"); + _loggerMock.Collector.LatestRecord.Message.Should().Be("No items of 1 were saved to the database"); + + response.Body.Position = 0; + using var reader = new StreamReader(response.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Be("Failed to save to the database - see logs for more information"); + } + + [Fact] + public async Task Test_InvalidJson_ReturnsBadRequest_WithCorrectMessage() + { + // Arrange: + var invalidJsonBody = "{ message: 'Invalid JSON' "; // Invalid JSON (missing closing brace) + + var mockHttpRequest = MockRequests.MockRequestWithBody(invalidJsonBody); + + // Act + var result = await _function.Run(mockHttpRequest); + + // Assert + result.StatusCode.Should().Be(HttpStatusCode.BadRequest); + + result.Body.Position = 0; + using var reader = new StreamReader(result.Body); + var responseBody = await reader.ReadToEndAsync(); + responseBody.Should().Contain("Invalid json input"); + + _loggerMock.Collector.LatestRecord.Message.Should().Contain("Failed to deserialize request body:"); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/HttpClientExtensionsTests.cs b/source/Functions.Tests/HttpClientExtensionsTests.cs new file mode 100644 index 0000000..5a10350 --- /dev/null +++ b/source/Functions.Tests/HttpClientExtensionsTests.cs @@ -0,0 +1,39 @@ +using System.Security.Authentication; +using FluentAssertions; +using Functions.Configuration.Infrastructure.HttpClient; + +namespace Functions.Tests; + +public class HttpClientExtensionsTests +{ + [Fact] + public void ConfigureHttpClient_ShouldSetTimeoutAndAcceptHeader() + { + // Arrange + var options = new HttpClient(); + + // Act + HttpClientExtensions.ConfigureHttpClient(options); + + // Assert + options.Timeout.Should().Be(new TimeSpan(0, 0, 1, 0)); + options.DefaultRequestHeaders.Accept.Should().ContainSingle(h => h.MediaType == "text/csv"); + options.DefaultRequestHeaders.CacheControl?.NoCache.Should().BeTrue(); + } + + [Fact] + public void CreateHttpMessageHandler_ShouldReturnHandlerWithCorrectSslProtocols() + { + // Act + var handler = HttpClientExtensions.CreateHttpMessageHandler(); + + // Assert + handler.Should().BeOfType(); // Verify type + var httpClientHandler = (HttpClientHandler)handler; + + httpClientHandler.SslProtocols.Should().Be( + SslProtocols.Tls13 | SslProtocols.Tls12 | SslProtocols.Tls11 | SslProtocols.Tls, + because: "the handler should support TLS 1.0, 1.1, 1.2, and 1.3" + ); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/LoggingServiceTests.cs b/source/Functions.Tests/LoggingServiceTests.cs new file mode 100644 index 0000000..37dca06 --- /dev/null +++ b/source/Functions.Tests/LoggingServiceTests.cs @@ -0,0 +1,33 @@ +using Core.Services.Interfaces; +using Dapper; +using Functions.Services; +using Moq; + +namespace Functions.Tests; + +public class LoggingServiceTests +{ + private readonly Mock _mockDataService; + private readonly LoggingService _loggingService; + + public LoggingServiceTests() + { + _mockDataService = new Mock(); + _loggingService = new LoggingService(_mockDataService.Object); + } + + [Fact] + public async Task PurgeErrorLog_ShouldCallDataService() + { + // Arrange + _mockDataService.Setup(m => m.ExecuteStoredProcedure("Logging.PurgeErrorLog", It.IsAny())) + .ReturnsAsync(1); + + // Act + await _loggingService.PurgeErrorLog(); + + // Assert + _mockDataService.Verify(m => m.ExecuteStoredProcedure("Logging.PurgeErrorLog", It.IsAny()), + Times.Once); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/RequestWrappers.cs b/source/Functions.Tests/RequestWrappers.cs new file mode 100644 index 0000000..16d468a --- /dev/null +++ b/source/Functions.Tests/RequestWrappers.cs @@ -0,0 +1,42 @@ +using System.Diagnostics.CodeAnalysis; +using System.Net; +using System.Security.Claims; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; + +namespace Functions.Tests; + +[ExcludeFromCodeCoverage] +public class FakeHttpRequestData : HttpRequestData +{ + public FakeHttpRequestData(FunctionContext functionContext, Uri url, Stream body = null) : base(functionContext) + { + Url = url; + Body = body ?? new MemoryStream(); + } + + public override Stream Body { get; } = new MemoryStream(); + public override HttpHeadersCollection Headers { get; } = new HttpHeadersCollection(); + public override IReadOnlyCollection Cookies { get; } + public override Uri Url { get; } + public override IEnumerable Identities { get; } + public override string Method { get; } + + public override HttpResponseData CreateResponse() + { + return new FakeHttpResponseData(FunctionContext); + } +} + +[ExcludeFromCodeCoverage] +public class FakeHttpResponseData : HttpResponseData +{ + public FakeHttpResponseData(FunctionContext functionContext) : base(functionContext) + { + } + + public override HttpStatusCode StatusCode { get; set; } + public override HttpHeadersCollection Headers { get; set; } = new HttpHeadersCollection(); + public override Stream Body { get; set; } = new MemoryStream(); + public override HttpCookies Cookies { get; } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/BatchServiceTests.cs b/source/Functions.Tests/Services/BatchServiceTests.cs new file mode 100644 index 0000000..4531f45 --- /dev/null +++ b/source/Functions.Tests/Services/BatchServiceTests.cs @@ -0,0 +1,277 @@ +using System.Net; +using Bogus; +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using Dapper; +using FluentAssertions; +using Functions.Services; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; +using Xunit.Abstractions; +using static System.DateTime; + +namespace Functions.Tests.Services; + +public class BatchServiceTests +{ + private readonly ITestOutputHelper _testOutputHelper; + private readonly Mock _mockConfigurationService; + private readonly Mock _mockSplunkService; + private readonly Mock _mockDataService; + private readonly BatchService _batchService; + private readonly FileType _fileType; + private readonly SplunkClient _splunkClient; + private readonly List _uriList; + private readonly ExtractResponse _mockExtractResponse; + + private readonly FakeLogger _fakeLogger; + + public BatchServiceTests(ITestOutputHelper testOutputHelper) + { + _testOutputHelper = testOutputHelper; + _mockConfigurationService = new Mock(); + _mockSplunkService = new Mock(); + _fakeLogger = new FakeLogger(); + _mockDataService = new Mock(); + + _batchService = new BatchService( + _mockConfigurationService.Object, + _mockSplunkService.Object, + _fakeLogger, + _mockDataService.Object + ); + + _fileType = new FileType + { + Enabled = true, + FileTypeId = 1, + FileTypeFilePrefix = "test", + SplunkQuery = "test query {latest}, {earliest}, {hour}" + }; + + _splunkClient = new SplunkClient + { + HostName = "fake.splunk.com", + HostPort = 8089, + BaseUrl = "/services/search/jobs", + QueryParameters = "search={0}" + }; + + _uriList = new List + { + new() { Request = new Uri("https://example.com"), EarliestDate = Now, LatestDate = Now } + }; + + _mockExtractResponse = new ExtractResponse + { + ExtractResponseMessage = new HttpResponseMessage(HttpStatusCode.OK), + ExtractResponseStream = Stream.Null, + ExtractRequestDetails = new Extract(), + FilePath = "test.csv", + UriRequest = new UriRequest() + }; + + // Common Mocks + _mockConfigurationService.Setup(x => x.GetFileType(It.IsAny())) + .ReturnsAsync(_fileType); + + _mockConfigurationService.Setup(x => x.GetSplunkClientConfiguration()) + .ReturnsAsync(_splunkClient); + + _mockSplunkService.Setup(x => x.DownloadCSVDateRangeAsync(It.IsAny(), It.IsAny(), true)) + .Callback(() => _testOutputHelper.WriteLine("DownloadCSVDateRangeAsync called")) + .ReturnsAsync(_mockExtractResponse); + + _mockSplunkService.Setup(x => + x.ExecuteBatchDownloadFromSplunk(It.IsAny(), It.IsAny(), It.IsAny())) + .Returns(Task.CompletedTask); + } + + #region StartBatchForToday + + [Fact] + public async Task StartBatchDownloadForTodayAsync_ShouldProcessUrisAndReturnCount() + { + // Act + var result = await _batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); + + // Assert + result.Should().Be(24); // One for each hour + _mockConfigurationService.Verify(x => x.GetFileType(It.IsAny()), Times.Once); + + _mockSplunkService.Verify( + x => x.ExecuteBatchDownloadFromSplunk(It.IsAny(), It.IsAny(), true), + Times.Exactly(24)); + } + + [Fact] + public async Task StartBatchDownLoadForToday_ShouldCallRemovePreviousDownloads() + { + //Arrange + var expectedProcedureName = "Import.RemovePreviousDownload"; + + // Act + await _batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); + + // Assert + // RemovePreviousDownload executes the remove stored procedure + _mockDataService.Verify(x => x.ExecuteStoredProcedure(expectedProcedureName, It.IsAny()), + Times.Once); + } + + #endregion + + #region StartBatchDownloadForDateRange + + [Fact] + public async Task StartBatchDownloadAsync_ShouldReturnReturnProcessCount() + { + // Arrange + var startDate = "2023-01-01"; + var endDate = "2023-01-03"; + + // Act + var result = await _batchService.StartBatchDownloadAsync(FileTypes.asidlookup, startDate, endDate); + + // Assert + result.Should().Be(72); // 3 days * 24 hours + } + + [Fact] + public async Task StartBatchDownloadAsync_ShouldThrowException_WhenStartDateIsAfterEndDate() + { + // Arrange + var startDate = "2023-01-03"; + var endDate = "2023-01-01"; + + // Act + var action = async () => await _batchService.StartBatchDownloadAsync(FileTypes.asidlookup, startDate, endDate); + + // Assert + await action.Should().ThrowAsync().WithMessage("Start date cannot be later than end date"); + _fakeLogger.Collector.LatestRecord.Should().NotBeNull(); + _fakeLogger.Collector.LatestRecord.Message.Should().Be("Start date cannot be later than end date"); + _fakeLogger.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _fakeLogger.Collector.Count.Should().Be(1); + } + + [Fact] + public async Task StartBatchDownLoad_Should_CallRemovePreviousDownloads() + { + //Arrange + var expectedProcedureName = "Import.RemovePreviousDownload"; + var start = DateTime.Now.AddDays(-2).ToString(); + var end = DateTime.Now.AddDays(1).ToString(); + + // Act + await _batchService.StartBatchDownloadAsync(FileTypes.asidlookup, start, end); + + // Assert + _mockDataService.Verify(x => x.ExecuteStoredProcedure(expectedProcedureName, It.IsAny()), + Times.Once); + } + + [Fact] + public async Task GetBatchDownloadAsync_Should_HandleNullOrEmptyDateInputs() + { + // Act + + await _batchService.Invoking(x => x.StartBatchDownloadAsync(FileTypes.asidlookup, null, null)) + .Should() + .ThrowAsync().WithMessage("Start and end dates are required for batch download"); + + _fakeLogger.Collector.LatestRecord.Should().NotBeNull(); + _fakeLogger.Collector.LatestRecord.Message.Should().Be("Start and end dates are required for batch download"); + _fakeLogger.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _fakeLogger.Collector.Count.Should().Be(1); + } + + [Fact] + public async Task GetBatchDownloadAsync_Should_HandleGenericExceptions_LoggingError() + { + // Act + var startDate = "2023-01-01"; + var endDate = "2023-01-03"; + + + _mockSplunkService.Setup(x => + x.ExecuteBatchDownloadFromSplunk(It.IsAny(), It.IsAny(), false)) + .Throws(new Exception("Something went wrong")); + + await _batchService.Invoking(x => + x.StartBatchDownloadAsync(FileTypes.asidlookup, startDate, endDate)) + .Should() + .ThrowAsync().WithMessage("Something went wrong"); + + _fakeLogger.Collector.LatestRecord.Should().NotBeNull(); + _fakeLogger.Collector.LatestRecord.Message.Should() + .Be("An error occurred during batch download when processing urls"); + _fakeLogger.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + } + + #endregion + + + [Fact] + public async Task GetBatchDownloadUriList_ShouldGenerateCorrectUris() + { + // Arrange + + var faker = new Faker(); + var fakeDates = faker.Make(3, () => faker.Date.Past(1)).ToList(); // 3 random past dates + + // Fake FileType with a sample SplunkQuery template + var fakeFileType = new FileType + { + SplunkQuery = "index=main | earliest={earliest} latest={latest} hour={hour}" + }; + + // Act + var result = await _batchService.GetBatchDownloadUriList(fakeFileType, fakeDates); + + // Assert + result.Should().NotBeNull(); + result.Count.Should().Be(fakeDates.Count * 24); // 24 URIs per date + + // Validate a sample URI format + var sampleUriRequest = result.First(); + sampleUriRequest.Request.Should().NotBeNull(); + sampleUriRequest.EarliestDate = fakeDates.First().AddDays(-2); + sampleUriRequest.LatestDate = fakeDates.First().AddDays(-1); + sampleUriRequest.Request.AbsoluteUri.Should().Contain("fake.splunk.com"); + sampleUriRequest.Request.Query.Should().Contain("search="); + } + + [Fact] + public async Task RemovePreviousDownloads_Calls_DataService_ExecuteStoreProcedureCorrectly() + { + // Arrange + var startDate = DateTime.Now.AddDays(-4); + var endDate = DateTime.Now.AddDays(-2); + + var expectedProcedureName = "Import.RemovePreviousDownload"; + var expectedParameters = new DynamicParameters(); + expectedParameters.Add("@StartDate", startDate); + expectedParameters.Add("@EndDate", endDate); + expectedParameters.Add("@FileTypeId", _fileType.FileTypeId); + + + // Act + await _batchService.RemovePreviousDownloads(_fileType, startDate, endDate); + + // Assert + _mockDataService.Verify(x => x.ExecuteStoredProcedure( + expectedProcedureName, + It.Is(p => + p.Get("@StartDate") == startDate.AddDays(-2) && + p.Get("@EndDate") == endDate.AddDays(-1) && + p.Get("@FileTypeId") == _fileType.FileTypeId + )), + Times.Once); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/BlobServiceTests.cs b/source/Functions.Tests/Services/BlobServiceTests.cs new file mode 100644 index 0000000..28b2fdb --- /dev/null +++ b/source/Functions.Tests/Services/BlobServiceTests.cs @@ -0,0 +1,326 @@ +using System.Text; +using Azure; +using Azure.Storage.Blobs; +using Azure.Storage.Blobs.Models; +using Azure.Storage.Queues; +using Bogus; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Services.Interfaces; +using FluentAssertions; +using Functions.Services; +using Microsoft.Extensions.Logging; +using Moq; + +namespace Functions.Tests.Services +{ + public class BlobServiceTests + { + private readonly Mock _mockConfigService; + private readonly Mock> _mockLogger; + private readonly Mock _mockQueueClient; + private readonly Mock _mockBlobServiceClient; + private readonly Mock _mockContainerClient; + private readonly Mock _mockBlobClient; + private readonly Faker _faker; + + public BlobServiceTests() + { + _faker = new Faker(); + _mockConfigService = new Mock(); + _mockLogger = new Mock>(); + _mockQueueClient = new Mock(); + _mockBlobServiceClient = new Mock(); + _mockContainerClient = new Mock(); + _mockBlobClient = new Mock(); + } + + private BlobService CreateBlobService( + BlobStorage blobStorageConfig = null, + QueueClient queueClient = null) + { + string connectionString = + "DefaultEndpointsProtocol=https;" + + "AccountName=mystorageaccount;" + + "AccountKey=myAccountKey;" + + "EndpointSuffix=core.windows.net"; + + blobStorageConfig ??= new BlobStorage + { + ConnectionString = connectionString, + ContainerName = _faker.Random.Word(), + QueueName = _faker.Random.Word() + }; + + _mockConfigService + .Setup(x => x.GetBlobStorageConfiguration()) + .ReturnsAsync(blobStorageConfig); + + var service = new BlobService( + _mockConfigService.Object, + _mockLogger.Object, + queueClient ?? _mockQueueClient.Object); + + // Replace private BlobServiceClient with mock + var serviceField = typeof(BlobService) + .GetField("_blobServiceClient", + System.Reflection.BindingFlags.NonPublic | System.Reflection.BindingFlags.Instance); + serviceField.SetValue(service, _mockBlobServiceClient.Object); + + return service; + } + + [Fact] + public async Task AddObjectToBlob_WhenContainerExists_ShouldUploadSuccessfully() + { + // Arrange + var extractResponse = new ExtractResponse + { + FilePath = _faker.System.FileName(), + ExtractResponseStream = new MemoryStream(Encoding.UTF8.GetBytes(_faker.Lorem.Paragraph())) + }; + + var mockBlobContentInfo = new Mock(); + var mockResponse = new Mock>(); + mockResponse.Setup(x => x.Value).Returns(mockBlobContentInfo.Object); + + _mockBlobServiceClient + .Setup(x => x.GetBlobContainerClient(It.IsAny())) + .Returns(_mockContainerClient.Object); + + _mockContainerClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ReturnsAsync(Response.FromValue(true, new Mock().Object)); + + _mockContainerClient + .Setup(x => x.GetBlobClient(It.IsAny())) + .Returns(_mockBlobClient.Object); + + _mockBlobClient + .Setup(x => x.UploadAsync( + It.IsAny(), + It.Is(o => o == true), + It.IsAny())) + .ReturnsAsync(mockResponse.Object); + + var service = CreateBlobService(); + + // Act + var result = await service.AddObjectToBlob(extractResponse); + + // Assert + result.Should().Be(mockBlobContentInfo.Object); + } + + [Fact] + public async Task AddObjectToBlob_WhenContainerDoesNotExist_ShouldReturnNull() + { + // Arrange + var extractResponse = new ExtractResponse + { + FilePath = _faker.System.FileName(), + ExtractResponseStream = new MemoryStream(Encoding.UTF8.GetBytes(_faker.Lorem.Paragraph())) + }; + + _mockBlobServiceClient + .Setup(x => x.GetBlobContainerClient(It.IsAny())) + .Returns(_mockContainerClient.Object); + + _mockContainerClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ReturnsAsync(Response.FromValue(false, Mock.Of())); + + var service = CreateBlobService(); + + // Act + var result = await service.AddObjectToBlob(extractResponse); + + // Assert + result.Should().BeNull(); + } + + [Fact] + public async Task AddMessageToBlobQueue_WhenQueueExists_ShouldSendMessage() + { + // Arrange + var fileTypeId = _faker.Random.Int(1, 100); + var blobName = _faker.System.FileName(); + + _mockQueueClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ReturnsAsync(Response.FromValue(true, Mock.Of())); + + var service = CreateBlobService(); + + // Act + await service.AddMessageToBlobQueue(1, fileTypeId, blobName); + + // Assert + _mockQueueClient.Verify(x => x.SendMessageAsync(It.IsAny()), Times.Once); + + _mockLogger.Verify( + x => x.Log( + LogLevel.Information, + It.IsAny(), + It.Is((o, t) => o.ToString().Contains("Adding message to blob queue")), + null, + It.IsAny>()), + Times.Once); + } + + [Fact] + public async Task AddMessageToBlobQueue_WhenFileCountNotOne_ShouldNotSendMessage() + { + // Arrange + var fileTypeId = _faker.Random.Int(1, 100); + var blobName = _faker.System.FileName(); + + _mockQueueClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ReturnsAsync(Response.FromValue(true, Mock.Of())); + + var service = CreateBlobService(); + + // Act + await service.AddMessageToBlobQueue(2, fileTypeId, blobName); + + // Assert + _mockQueueClient.Verify( + x => x.SendMessageAsync(It.IsAny(), + It.IsAny()), + Times.Never); + } + + [Fact] + public async Task AddObjectToBlob_WhenRequestFailedExceptionOccurs_ShouldThrowException() + { + // Arrange + var extractResponse = new ExtractResponse + { + FilePath = _faker.System.FileName(), + ExtractResponseStream = new MemoryStream(Encoding.UTF8.GetBytes(_faker.Lorem.Paragraph())) + }; + + var requestFailedException = new RequestFailedException("Container does not exist"); + + _mockBlobServiceClient + .Setup(x => x.GetBlobContainerClient(It.IsAny())) + .Returns(_mockContainerClient.Object); + + _mockContainerClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ThrowsAsync(requestFailedException); + + var service = CreateBlobService(); + + // Act & Assert + await Assert.ThrowsAsync(() => + service.AddObjectToBlob(extractResponse)); + + _mockLogger.Verify( + x => x.Log( + LogLevel.Error, + It.IsAny(), + It.Is((o, t) => o.ToString().Contains("The container does not exist")), + requestFailedException, + It.IsAny>()), + Times.Once); + } + + [Fact] + public async Task AddObjectToBlob_ShouldLogError_AndThrow_OnGenericException() + { + // Arrange + var extractResponse = new ExtractResponse + { + FilePath = _faker.System.FileName(), + ExtractResponseStream = new MemoryStream(Encoding.UTF8.GetBytes(_faker.Lorem.Paragraph())) + }; + + var genericException = new Exception("Something bad happened"); + + _mockBlobServiceClient + .Setup(x => x.GetBlobContainerClient(It.IsAny())) + .Returns(_mockContainerClient.Object); + + _mockContainerClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ThrowsAsync(genericException); + + var service = CreateBlobService(); + + // Act & Assert + await Assert.ThrowsAsync(() => + service.AddObjectToBlob(extractResponse)); + + _mockLogger.Verify( + x => x.Log( + LogLevel.Error, + It.IsAny(), + It.Is((o, t) => + o.ToString().Contains("An error occurred while trying to add a blob to the storage")), + genericException, + It.IsAny>()), + Times.Once); + } + + [Fact] + public async Task AddMessageToBlobQueue_WhenQueueDoesNotExist_ShouldThrowException() + { + // Arrange + var fileTypeId = _faker.Random.Int(1, 100); + var blobName = _faker.System.FileName(); + + var requestFailedException = new RequestFailedException("Queue does not exist"); + + _mockQueueClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ThrowsAsync(requestFailedException); + + var service = CreateBlobService(); + + // Act & Assert + await Assert.ThrowsAsync(() => + service.AddMessageToBlobQueue(1, fileTypeId, blobName)); + + _mockLogger.Verify( + x => x.Log( + LogLevel.Error, + It.IsAny(), + It.Is((o, t) => o.ToString().Contains("The queue does not exist")), + requestFailedException, + It.IsAny>()), + Times.Once); + } + + [Fact] + public async Task AddMessageToBlobQueue_ShouldThrowError_AndLogMessage_OnGenericException() + { + // Arrange + var fileTypeId = _faker.Random.Int(1, 100); + var blobName = _faker.System.FileName(); + + var genericException = new Exception("Something bad happened"); + + _mockQueueClient + .Setup(x => x.ExistsAsync(It.IsAny())) + .ThrowsAsync(genericException); + + var service = CreateBlobService(); + + // Act & Assert + await Assert.ThrowsAsync(() => + service.AddMessageToBlobQueue(1, fileTypeId, blobName)); + + _mockLogger.Verify( + x => x.Log( + LogLevel.Error, + It.IsAny(), + It.Is((o, t) => + o.ToString().Contains("An error occurred while trying to add a message to the queue")), + genericException, + It.IsAny>()), + Times.Once); + } + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/ConfigurationServiceTests.cs b/source/Functions.Tests/Services/ConfigurationServiceTests.cs new file mode 100644 index 0000000..1ba3191 --- /dev/null +++ b/source/Functions.Tests/Services/ConfigurationServiceTests.cs @@ -0,0 +1,255 @@ +using Bogus; +using Core.DTOs.Response.Configuration; +using Core.Helpers; +using Core.Services.Interfaces; +using Dapper; +using FluentAssertions; +using Functions.Services; +using Microsoft.Extensions.Logging.Testing; +using Moq; + +namespace Functions.Tests.Services +{ + public class ConfigurationServiceTests + { + private readonly Mock _mockDataService; + private readonly FakeLogger _fakeLogger; + private readonly ConfigurationService _configurationService; + + public ConfigurationServiceTests() + { + _mockDataService = new Mock(); + _fakeLogger = new FakeLogger(); + + _configurationService = new ConfigurationService(_mockDataService.Object, _fakeLogger); + } + + private static BlobStorage GenerateBlobStorage() => + new Faker() + .RuleFor(b => b.ConnectionString, f => f.Internet.Url()) + .RuleFor(b => b.ContainerName, f => f.Random.Word()) + .RuleFor(b => b.QueueName, f => f.Random.Word()) + .Generate(); + + private static FilePathConstants GenerateFilePathConstants() => + new Faker() + .RuleFor(f => f.PathSeparator, "/") + .RuleFor(f => f.ProjectNameFilePrefix, "proj_") + .RuleFor(f => f.ComponentSeparator, "_") + .RuleFor(f => f.FileExtension, ".csv") + .Generate(); + + private static List GenerateFileTypes(int count = 3) => + new Faker() + .RuleFor(f => f.FileTypeFilePrefix, f => f.Random.Word()) + .RuleFor(f => f.DirectoryName, f => f.System.DirectoryPath()) + .RuleFor(f => f.Enabled, true) + .Generate(count); + + private static SplunkClient GenerateSplunkClient() => + new Faker() + .RuleFor(s => s.ApiToken, f => f.Random.AlphaNumeric(32)) + .RuleFor(s => s.QueryTimeout, f => f.Random.Int(10, 60)) + .Generate(); + + private static List GenerateSplunkInstances(int count = 3) => new List() + { + new SplunkInstance() + { + Source = SplunkInstances.cloud.ToString(), + SourceGroup = "fakeGroup1", + }, + new SplunkInstance() + { + Source = SplunkInstances.spineb.ToString(), + SourceGroup = "fakeGroup2", + }, + new SplunkInstance() + { + Source = SplunkInstances.spinea.ToString(), + SourceGroup = "fakeGroup3", + }, + }; + + #region GetBlobStorageConfiguration Tests + + [Fact] + public async Task GetBlobStorageConfiguration_ShouldReturnBlobStorage_WhenDataExists() + { + var expected = GenerateBlobStorage(); + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure("[Configuration].[GetBlobStorageConfiguration]", + It.IsAny())) + .ReturnsAsync(new List { expected }); + + var result = await _configurationService.GetBlobStorageConfiguration(); + + result.Should().BeEquivalentTo(expected); + } + + [Fact] + public async Task GetBlobStorageConfiguration_ShouldReturnNull_WhenNoDataExists() + { + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure( + It.IsAny(), + It.IsAny())) + .ReturnsAsync(new List()); + + var result = await _configurationService.GetBlobStorageConfiguration(); + + result.Should().BeNull(); + } + + #endregion + + #region GetFilePathConstants Tests + + [Fact] + public async Task GetFilePathConstants_ShouldReturnFilePathConstants_WhenDataExists() + { + var expected = GenerateFilePathConstants(); + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure( + "[Configuration].[GetFilePathConstants]", + It.IsAny())) + .ReturnsAsync(new List { expected }); + + var result = await _configurationService.GetFilePathConstants(); + + result.Should().BeEquivalentTo(expected); + } + + [Fact] + public async Task GetFilePathConstants_ShouldReturnNull_WhenNoDataExists() + { + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure( + "[Configuration].[GetFilePathConstants]", + It.IsAny())) + .ReturnsAsync(new List()); + + var result = await _configurationService.GetFilePathConstants(); + + result.Should().BeNull(); + } + + #endregion + + #region GetFileTypes Tests + + [Fact] + public async Task GetFileTypes_ShouldReturnFileTypes_WhenDataExists() + { + var expected = GenerateFileTypes(); + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure("[Configuration].[GetFileTypes]", + It.IsAny())) + .ReturnsAsync(expected); + + var result = await _configurationService.GetFileTypes(); + + result.Should().BeEquivalentTo(expected); + } + + + [Fact] + public async Task GetFileTypes_ShouldReturnEmptyList_WhenNoDataExists() + { + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure("[Configuration].[GetFileTypes]", + It.IsAny())) + .ReturnsAsync([]); + + var result = await _configurationService.GetFileTypes(); + + result.Should().BeEmpty(); + } + + #endregion + + #region GetFileType + + [Fact] + public async Task GetFileType_ShouldReturnMatchingFiletTypeFromConfiguration() + { + var expectedResult = new FileType() + { + FileTypeId = 1, + DirectoryName = "fakeDir", + FileTypeFilePrefix = "asidlookup", + Enabled = true, + }; + + var fakeResults = new List() + { + new() + { + FileTypeId = 2, + DirectoryName = "fakeDir", + FileTypeFilePrefix = "ssptrans", + Enabled = true, + }, + new() + { + FileTypeId = 3, + DirectoryName = "fakeDir", + FileTypeFilePrefix = "meshtrans", + Enabled = true, + }, + expectedResult + }; + + _mockDataService.Setup(x => + x.ExecuteQueryStoredProcedure("[Configuration].[GetFileTypes]", + It.IsAny())) + .ReturnsAsync(fakeResults); + + var result = await _configurationService.GetFileType(FileTypes.asidlookup); + + result.Should().BeEquivalentTo(expectedResult); + } + + #endregion + + #region GetSplunkClientConfiguration Tests + + [Fact] + public async Task GetSplunkClientConfiguration_ShouldReturnSplunkClient_WhenDataExists() + { + var expected = GenerateSplunkClient(); + _mockDataService + .Setup(x => + x.ExecuteQueryStoredProcedure("[Configuration].[GetSplunkClientConfiguration]", + It.IsAny())) + .ReturnsAsync(new List { expected }); + + var result = await _configurationService.GetSplunkClientConfiguration(); + + result.Should().BeEquivalentTo(expected); + } + + #endregion + + #region GetSplunkInstance Tests + + [Fact] + public async Task GetSplunkInstance_ShouldReturnMatchingInstance_WhenExists() + { + var instances = GenerateSplunkInstances(); + var targetInstance = instances.First(); + + _mockDataService + .Setup(x => x.ExecuteQueryStoredProcedure("[Configuration].[GetSplunkInstances]", + It.IsAny())) + .ReturnsAsync(instances); + + var result = + await _configurationService.GetSplunkInstance(Enum.Parse(targetInstance.Source)); + + result.Should().BeEquivalentTo(targetInstance); + } + + #endregion + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/CoreConfigurationServiceTests.cs b/source/Functions.Tests/Services/CoreConfigurationServiceTests.cs new file mode 100644 index 0000000..fbd5ed0 --- /dev/null +++ b/source/Functions.Tests/Services/CoreConfigurationServiceTests.cs @@ -0,0 +1,56 @@ +using FluentAssertions; +using Functions.Services; +using Microsoft.Extensions.Configuration; +using Moq; + +namespace Functions.Tests.Services; + +public class CoreConfigurationServiceTests +{ + private readonly Mock _mockConfiguration; + private readonly CoreConfigurationService _configurationService; + + public CoreConfigurationServiceTests() + { + var inMemorySettings = new Dictionary + { + { "ConnectionStrings:GPConnectAnalytics", "Server=myServer;Database=myDB;User Id=myUser;Password=myPass;" } + }; + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(inMemorySettings!) + .Build(); + + + _configurationService = new CoreConfigurationService(configuration); + } + + [Fact] + public void GetConnectionString_ShouldReturnConnectionString_WhenValidNameIsProvided() + { + // Arrange + const string connectionName = "GPConnectAnalytics"; + const string expectedConnectionString = "Server=myServer;Database=myDB;User Id=myUser;Password=myPass;"; + + // Act + var result = _configurationService.GetConnectionString(connectionName); + + // Assert + result.Should().Be(expectedConnectionString); + } + + + [Fact] + public void GetConnectionString_ShouldThrowArgumentException_WhenConnectionStringIsNull() + { + // Arrange + const string connectionName = "InvalidConnection"; + + // Act + Action act = () => _configurationService.GetConnectionString(connectionName); + + // Assert + act.Should().Throw() + .WithMessage("No connection string with given name"); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/FileServiceTests.cs b/source/Functions.Tests/Services/FileServiceTests.cs new file mode 100644 index 0000000..dcae812 --- /dev/null +++ b/source/Functions.Tests/Services/FileServiceTests.cs @@ -0,0 +1,29 @@ +using Core.Services.Interfaces; +using Dapper; +using Functions.Services; +using Moq; + +namespace Functions.Tests.Services; + +public class FileServiceTests +{ + [Fact] + public void ApiReaderAddFile_ShouldCallStoredProcedure() + { + // Arrange + var _mockDataService = new Mock(); + var _fileService = new FileService(_mockDataService.Object); + + // Act + _fileService.ApiReaderAddFile(1, "path/to/file", true); + + // Assert + _mockDataService.Verify(x => x.ExecuteStoredProcedure("ApiReader.AddFile", + It.Is(p => + p.Get("FileTypeId") == 1 && + p.Get("FilePath") == "path/to/file" && + p.Get("Override") == true + )), + Times.Once); + } +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/ImportServiceTests.cs b/source/Functions.Tests/Services/ImportServiceTests.cs new file mode 100644 index 0000000..54de01f --- /dev/null +++ b/source/Functions.Tests/Services/ImportServiceTests.cs @@ -0,0 +1,315 @@ +using System.Data; +using Azure.Storage.Blobs.Models; +using Bogus; +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Queue; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using Dapper; +using FluentAssertions; +using Functions.Services; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; + +namespace Functions.Tests.Services; + +public class ImportServiceTests +{ + private readonly Mock _mockConfigService; + private readonly Mock _mockDataService; + private readonly Mock _mockBlobService; + private readonly FakeLogger _fakeLogger; + private readonly ImportService _importService; + private readonly Mock _mockFileService; + + public ImportServiceTests() + { + _fakeLogger = new FakeLogger(); + _mockConfigService = new Mock(); + _mockDataService = new Mock(); + _mockBlobService = new Mock(); + _mockFileService = new Mock(); + + + _importService = new ImportService( + _mockConfigService.Object, + _mockDataService.Object, + _mockBlobService.Object, + _fakeLogger, + _mockFileService.Object + ); + } + + #region AddDownloadFileManually Tests + + [Fact] + public void AddDownloadFileManually_ShouldAddFileToDb_And_AddToBlobQueue() + { + // Arrange + var fakeFileType = GenerateFileType(); + var filePath = "asid-lookup-data/testfile.csv"; + _mockConfigService.Setup(x => x.GetFileType(FileTypes.asidlookup)).ReturnsAsync(fakeFileType); + + _mockBlobService + .Setup(x => x.AddMessageToBlobQueue(It.IsAny(), It.IsAny(), It.IsAny(), + It.IsAny())); + + + _mockFileService.Setup(x => x.ApiReaderAddFile(fakeFileType.FileTypeId, filePath, true)) + .ReturnsAsync(1); + + // Act + var result = _importService.AddDownloadedFileManually(filePath); + + // Assert + _mockBlobService.Verify(x => + x.AddMessageToBlobQueue( + 1, // mocked file count added + fakeFileType.FileTypeId, + filePath, + true), Times.Once + ); + + // Verify it calls internal AddFileMessage - which in turn calls the stored procedure + _mockFileService.Verify(x => x.ApiReaderAddFile(fakeFileType.FileTypeId, filePath, true), Times.Once); + } + + [Fact] + public async Task AddDownloadFileManually_ShouldThrowArgumentException_WhenFileTypeFromPathNull() + { + // Arrange + var filePath = "non-existant-fileType/testfile.csv"; + + // Act + var ex = await Assert.ThrowsAsync(() => + _importService.AddDownloadedFileManually(filePath) + ); + + // Assert + ex.Message.Should().Be("Filepath does not contain vailid file type suffix"); + } + + #endregion + + + #region AddObjectFileMessage Tests + + [Fact] + public async Task AddObjectFileMessage_ShouldAddObjectToBlob_WhenSuccessfulStatusCode() + { + //Arrange + var fileType = GenerateFileType(); + var extractResponse = new ExtractResponse() + { + ExtractResponseMessage = new HttpResponseMessage() + { + StatusCode = System.Net.HttpStatusCode.OK, + }, + FilePath = "asid-lookup-data/testfile.csv", + }; + + // Act + await _importService.AddObjectFileMessage(fileType, extractResponse); + + // Assert + _mockBlobService.Verify(x => x.AddObjectToBlob(extractResponse), Times.Once); + } + + [Fact] + public async Task AddObjectFileMessage_LogsWarning_WhenStatusCodeNotOk() + { + //Arrange + var fileType = GenerateFileType(); + var extractResponse = new ExtractResponse() + { + ExtractResponseMessage = new HttpResponseMessage() + { + StatusCode = System.Net.HttpStatusCode.BadRequest, + }, + FilePath = "asid-lookup-data/testfile.csv", + }; + + // Act + await _importService.AddObjectFileMessage(fileType, extractResponse); + + // Assert + _fakeLogger.LatestRecord.Message.Should().Be(extractResponse.ExtractResponseMessage.ToString()); + _fakeLogger.LatestRecord.Level.Should().Be(LogLevel.Warning); + } + + [Fact] + public async Task AddObjectFileMessage_DoesNotCallAddObjectToBlob_WhenStatusCodeNotOk() + { + //Arrange + var fileType = GenerateFileType(); + var extractResponse = new ExtractResponse() + { + ExtractResponseMessage = new HttpResponseMessage() + { + StatusCode = System.Net.HttpStatusCode.BadRequest, + }, + FilePath = "asid-lookup-data/testfile.csv", + }; + + // Act + await _importService.AddObjectFileMessage(fileType, extractResponse); + + // Assert + _mockBlobService.Verify(x => x.AddObjectToBlob(extractResponse), Times.Never); + } + + [Fact] + public async Task AddObjectFileMessage_ShouldAddFileToDb_And_AddMessageToBlobQueue() + { + // Arrange + var fileType = GenerateFileType(); + var extractResponse = new ExtractResponse() + { + ExtractResponseMessage = new HttpResponseMessage() + { + StatusCode = System.Net.HttpStatusCode.OK, + }, + FilePath = "asid-lookup-data/testfile.csv", + }; + + _mockBlobService + .Setup(x => x.AddObjectToBlob(extractResponse)) + .ReturnsAsync(Mock.Of()); + + _mockFileService.Setup(x => x.ApiReaderAddFile(fileType.FileTypeId, extractResponse.FilePath, true)) + .ReturnsAsync(1); + + // Act + await _importService.AddObjectFileMessage(fileType, extractResponse); + + // Assert + _mockBlobService.Verify(x => + x.AddMessageToBlobQueue( + 1, // mocked file count added + fileType.FileTypeId, + extractResponse.FilePath, + true), Times.Once + ); + + _mockFileService.Verify(x => x.ApiReaderAddFile(fileType.FileTypeId, extractResponse.FilePath, true), + Times.Once); + } + + [Fact] + public async Task AddObjectFileMessage_ShouldNotAddFileToDb_And_AddMessageToBlobQueue_WhenStatusNotOk() + { + // Arrange + var fileType = GenerateFileType(); + var extractResponse = new ExtractResponse() + { + ExtractResponseMessage = new HttpResponseMessage() + { + StatusCode = System.Net.HttpStatusCode.BadRequest, + }, + FilePath = "asid-lookup-data/testfile.csv", + }; + + _mockBlobService + .Setup(x => x.AddObjectToBlob(extractResponse)) + .ReturnsAsync(Mock.Of()); + + _mockFileService.Setup(x => x.ApiReaderAddFile(fileType.FileTypeId, extractResponse.FilePath, true)) + .ReturnsAsync(1); + + // Act + await _importService.AddObjectFileMessage(fileType, extractResponse); + + // Assert + _mockBlobService.Verify(x => + x.AddMessageToBlobQueue( + 1, // mocked file count added + fileType.FileTypeId, + extractResponse.FilePath, + true), Times.Never + ); + + _mockFileService.Verify(x => x.ApiReaderAddFile(fileType.FileTypeId, extractResponse.FilePath, true), + Times.Never); + } + + #endregion + + #region InstallData Tests + + [Fact] + public async Task InstallData_ShouldExecuteStoredProcedure_InstallNextFile_WithCorrectParameters() + { + // Arrange + var fileType = GenerateFileType(); + var mockQueueItem = new Message + { + FileTypeId = fileType.FileTypeId, + BlobName = "RandomBlobName", + Override = true, + }; + + var procedureName = "Import.InstallNextFile"; + var callCount = 0; + var capturedParams = new List(); + + _mockDataService + .Setup(x => x.ExecuteStoredProcedureWithOutputParameters(procedureName, It.IsAny())) + .Callback((_, parameters) => + { + // Capture a copy of the parameters + var copiedParams = new DynamicParameters(parameters); + capturedParams.Add(copiedParams); + + // Simulate stored procedure behavior: first call sets MoreFilesToInstall = true, second = false + parameters.Add("@MoreFilesToInstall", callCount == 0, DbType.Boolean, ParameterDirection.Output); + callCount++; + }) + .ReturnsAsync((string proc, DynamicParameters parameters) => parameters); + + + // Act + await _importService.InstallData(mockQueueItem); + + // Assert + _mockDataService.Verify( + x => x.ExecuteStoredProcedureWithOutputParameters(procedureName, It.IsAny()), + Times.Exactly(2)); + + // Validate first call parameters + Assert.Equal(mockQueueItem.FileTypeId, capturedParams[0].Get("@FileTypeId")); + Assert.Equal(mockQueueItem.Override, capturedParams[0].Get("@Override")); + + // Validate second call parameters + Assert.Equal(mockQueueItem.FileTypeId, capturedParams[1].Get("@FileTypeId")); + Assert.Equal(mockQueueItem.Override, capturedParams[1].Get("@Override")); + + // Validate log messages are recorded: + _fakeLogger.Collector.GetSnapshot()[0].Message.Should().Be("Installing file into database"); + var messages = _fakeLogger.Collector.GetSnapshot(); + + var moreFilesMessageTrue = messages + .Any(x => x.Message.Contains("More files to install? True")); + moreFilesMessageTrue.Should().BeTrue(); + + var moreFilesMessageFalse = messages + .Any(x => x.Message.Contains("More files to install? False")); + + moreFilesMessageFalse.Should().BeTrue(); + } + + #endregion + + #region Private Methods + + private static FileType GenerateFileType() => new Faker() + .RuleFor(f => f.FileTypeFilePrefix, f => f.Random.Word()) + .RuleFor(f => f.DirectoryName, f => f.System.DirectoryPath()) + .RuleFor(f => f.Enabled, true) + .Generate(); + + #endregion +} \ No newline at end of file diff --git a/source/Functions.Tests/Services/SplunkServicesTests.cs b/source/Functions.Tests/Services/SplunkServicesTests.cs new file mode 100644 index 0000000..d56ea49 --- /dev/null +++ b/source/Functions.Tests/Services/SplunkServicesTests.cs @@ -0,0 +1,294 @@ +using System.IdentityModel.Tokens.Jwt; +using System.Net; +using System.Security.Claims; +using System.Text; +using Bogus; +using Core; +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using FluentAssertions; +using Functions.Services; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Microsoft.IdentityModel.Tokens; +using Moq; +using Moq.Protected; +using Newtonsoft.Json; + +namespace Functions.Tests.Services; + +public class SplunkServiceTests +{ + private readonly Mock _mockConfigService; + private readonly Mock _mockHttpClientFactory; + private readonly Mock _mockImportService; + private readonly FakeLogger _fakeLogger; + private readonly SplunkService _splunkService; + private readonly Mock _mockTimeProvider; + + public SplunkServiceTests() + { + _mockTimeProvider = new Mock(); + _mockConfigService = new Mock(); + _mockHttpClientFactory = new Mock(); + _mockImportService = new Mock(); + _fakeLogger = new FakeLogger(); + + _mockConfigService.Setup(x => x.GetFilePathConstants()) + .ReturnsAsync(new FilePathConstants + { + PathSeparator = "/", ProjectNameFilePrefix = "proj_", ComponentSeparator = "_", FileExtension = ".csv" + }); + + var splunkInstance = new SplunkInstance { Source = "splunk-source" }; + _mockConfigService.Setup(x => x.GetSplunkInstance(It.IsAny())).ReturnsAsync(splunkInstance); + + _splunkService = new SplunkService( + _mockConfigService.Object, + _mockHttpClientFactory.Object, + _mockImportService.Object, + _fakeLogger, + _mockTimeProvider.Object + ); + } + + private static FileType GenerateFileType() => new Faker() + .RuleFor(f => f.FileTypeFilePrefix, f => f.Random.Word()) + .RuleFor(f => f.DirectoryName, f => f.System.DirectoryPath()) + .RuleFor(f => f.Enabled, true) + .Generate(); + + private static UriRequest GenerateUriRequest(DateTime? fixedFromDate = null, DateTime? fixedToDate = null, + TimeSpan? hour = null, bool isFake = true) + { + if (!isFake) + { + return new UriRequest + { + Request = new Uri("https://splunk.com/api/fixed-test"), + EarliestDate = fixedFromDate ?? DateTime.Now.AddDays(-1), + LatestDate = fixedToDate ?? DateTime.Now, + Hour = hour ?? new TimeSpan(0, 0, 0) + }; + } + + var faker = new Faker(); + return new UriRequest() + { + Request = new Uri("https://splunk.com/api/fake-test"), + EarliestDate = faker.Date.Past(), + LatestDate = faker.Date.Recent(1), + Hour = faker.Date.Timespan() + }; + } + + + [Fact] + public async Task DownloadCSVDateRangeAsync_ShouldReturnExtractResponse_WhenValidInput() + { + var fileType = GenerateFileType(); + var uriRequest = GenerateUriRequest(); + + var httpClient = new HttpClient(new Mock().Object); + _mockHttpClientFactory.Setup(x => x.CreateClient(It.IsAny())).Returns(httpClient); + + var result = await _splunkService.DownloadCSVDateRangeAsync(fileType, uriRequest, true); + + result.Should().NotBeNull(); + result.FilePath.Should().Contain(fileType.FileTypeFilePrefix); + } + + [Fact] + public async Task DownloadCSVDateRangeAsync_ShouldThrowTimeoutException() + { + var fileType = GenerateFileType(); + var uriRequest = GenerateUriRequest(); + _mockConfigService.Setup(x => x.GetFilePathConstants()).ThrowsAsync(new TimeoutException()); + + Func act = async () => await _splunkService.DownloadCSVDateRangeAsync(fileType, uriRequest, true); + + await act.Should().ThrowAsync(); + _fakeLogger.Collector.LatestRecord.Message.Should().Be("A timeout error has occurred"); + } + + [Fact] + public async Task DownloadCSVDateRangeAsync_ShouldThrowException() + { + var fileType = GenerateFileType(); + var uriRequest = GenerateUriRequest(); + _mockConfigService.Setup(x => x.GetFilePathConstants()).ThrowsAsync(new Exception()); + + Func act = async () => await _splunkService.DownloadCSVDateRangeAsync(fileType, uriRequest, true); + await act.Should().ThrowAsync(); + + _fakeLogger.Collector.LatestRecord.Message.Should().Be("An error occurred in trying to execute a GET request"); + } + + [Fact] + public async Task ExecuteBatchDownloadFromSplunk_ShouldCallDownloadAndImport_WhenFileTypeIsEnabled() + { + var fileType = GenerateFileType(); + var uriRequest = GenerateUriRequest(); + var extractResponse = new ExtractResponse { FilePath = "mock-path" }; + + _mockConfigService.Setup(x => x.GetFilePathConstants()).ReturnsAsync(new FilePathConstants()); + _mockConfigService.Setup(x => x.GetSplunkInstance(It.IsAny())) + .ReturnsAsync(new SplunkInstance()); + + _mockImportService.Setup(x => x.AddObjectFileMessage(fileType, extractResponse)).Returns(Task.CompletedTask); + + await _splunkService.ExecuteBatchDownloadFromSplunk(fileType, uriRequest, true); + + _mockImportService.Verify(x => x.AddObjectFileMessage(fileType, It.IsAny()), Times.Once); + } + + [Fact] + public void ExecuteBatchDownloadFromSplunk_ShouldLogWarning_WhenFileTypeIsDisabled() + { + var fileType = GenerateFileType(); + fileType.Enabled = false; + + var uriRequest = GenerateUriRequest(); + + + _splunkService.ExecuteBatchDownloadFromSplunk(fileType, uriRequest, true); + + _fakeLogger.Collector.LatestRecord.Message.Should() + .Be($"Filetype {fileType.FileTypeFilePrefix} is not enabled. Please check if this is correct"); + + _fakeLogger.Collector.LatestRecord.Level.Should().Be(LogLevel.Warning); + } + + [Fact] + public void ExecuteBatchDownloadFromSplunk_ShouldLogError_WhenExceptionThrown() + { + var fileType = GenerateFileType(); + + var uriRequest = GenerateUriRequest(); + _mockConfigService.Setup(x => x.GetFilePathConstants()).ThrowsAsync(new Exception()); + + _ = _splunkService.ExecuteBatchDownloadFromSplunk(fileType, uriRequest, true); + + _fakeLogger.Collector.LatestRecord.Message.Should() + .Be("An error has occurred while attempting to execute an Azure function"); + + _fakeLogger.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + } + + [Fact] + public async Task GetSearchResultsFromRequestUri_ShouldReturnExtractResponse_ApiTokenValid() + { + var fakeToken = GenerateFakeJwt(); + + var uriRequest = GenerateUriRequest(isFake: false); + var mockHandler = new Mock(); + + mockHandler + .Protected() + .Setup>( + "SendAsync", + ItExpr.IsAny(), + ItExpr.IsAny() + ) + .ReturnsAsync(new HttpResponseMessage + { + StatusCode = HttpStatusCode.OK, + Content = new StringContent("{ \"message\": \"Success\" }", Encoding.UTF8, "application/json") + }); + + var httpClient = new HttpClient(mockHandler.Object); + _mockHttpClientFactory.Setup(x => x.CreateClient("SplunkApiClient")).Returns(httpClient); + _mockConfigService.Setup(x => x.GetSplunkClientConfiguration()).ReturnsAsync(new SplunkClient + { + QueryTimeout = 30, + ApiToken = fakeToken, + }); + + // Act + var resultMessage = await _splunkService.GetSearchResultFromRequestUri(uriRequest); + + // Assert: Ensure the response is not null + resultMessage.Should().NotBeNull(); + resultMessage.ExtractResponseMessage.Should().NotBeNull(); + resultMessage.ExtractResponseStream.Should().NotBeNull(); + resultMessage.UriRequest.Should().Be(uriRequest); + + resultMessage.ExtractResponseMessage.StatusCode.Should().Be(HttpStatusCode.OK); + + // Assert: Read and verify response content + var contentString = await new StreamReader(resultMessage.ExtractResponseStream).ReadToEndAsync(); + contentString.Should().Contain("Success"); // Verify the JSON content + } + + [Fact] + public async Task GetSearchResultsFromRequestUri_ShouldReturnExtractResponse_ApiToken_NotValid() + { + var fakeToken = GenerateFakeJwt(isValid: false); + var uriRequest = GenerateUriRequest(isFake: false); + _mockConfigService.Setup(x => x.GetSplunkClientConfiguration()).ReturnsAsync(new SplunkClient + { + QueryTimeout = 30, + ApiToken = fakeToken, + }); + + // Act + var resultMessage = await _splunkService.GetSearchResultFromRequestUri(uriRequest); + + // Assert: Ensure the response is not null + resultMessage.ExtractResponseMessage.StatusCode.Should().Be(HttpStatusCode.Unauthorized); + resultMessage.ExtractResponseMessage.ReasonPhrase.Should().Be("The authentication token has expired"); + } + + [Fact] + public async Task GetSearchResultsFromRequestUri_ShouldReturnRequestTimeout_OnOperationCancelledException() + { + var fakeToken = GenerateFakeJwt(isValid: false); + var uriRequest = GenerateUriRequest(isFake: false); + _mockConfigService.Setup(x => x.GetSplunkClientConfiguration()) + .ThrowsAsync(new OperationCanceledException("Operation timed out")); + + // Act + var resultMessage = await _splunkService.GetSearchResultFromRequestUri(uriRequest); + + // Assert: Ensure the response is not null + resultMessage.ExtractResponseMessage.StatusCode.Should().Be(HttpStatusCode.RequestTimeout); + resultMessage.ExtractResponseMessage.ReasonPhrase.Should().Be("Operation timed out"); + } + + + #region Helpers + + private string GenerateFakeJwt(bool isValid = true) + { + var securityKey = + new SymmetricSecurityKey("your-very-secure-and-long-secret-key-123456"u8.ToArray()); + var credentials = new SigningCredentials(securityKey, SecurityAlgorithms.HmacSha256); + + var claims = new[] + { + new Claim(JwtRegisteredClaimNames.Sub, "test-user"), + new Claim(JwtRegisteredClaimNames.Iss, "fake-issuer"), + new Claim(JwtRegisteredClaimNames.Aud, "expected-audience"), + new Claim(JwtRegisteredClaimNames.Exp, + isValid + ? (DateTimeOffset.UtcNow.AddYears(10)).ToUnixTimeSeconds().ToString() + : (DateTimeOffset.UtcNow.AddYears(-10)).ToUnixTimeSeconds().ToString()) + }; + + var token = new JwtSecurityToken( + issuer: "fake-issuer", + audience: "expected-audience", + claims: claims, + expires: isValid ? DateTime.UtcNow.AddYears(10) : DateTime.UtcNow.AddYears(-10), + signingCredentials: credentials + ); + + return new JwtSecurityTokenHandler().WriteToken(token); + } + + #endregion +} \ No newline at end of file diff --git a/source/Functions.Tests/TestHelpers/ConfigurationHelpers.cs b/source/Functions.Tests/TestHelpers/ConfigurationHelpers.cs new file mode 100644 index 0000000..2a474b0 --- /dev/null +++ b/source/Functions.Tests/TestHelpers/ConfigurationHelpers.cs @@ -0,0 +1,30 @@ +using Bogus; +using Core.DTOs.Response.Configuration; +using Microsoft.Extensions.Configuration; + +namespace Functions.Tests.TestHelpers; + +public class ConfigurationHelpers +{ + public static IConfiguration CreateDefaultConfiguration(string connectionString) + { + var inMemorySettings = new Dictionary + { + { "ConnectionStrings:GPConnectAnalytics", connectionString } + }; + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(inMemorySettings!) + .Build(); + + return configuration; + } + + public static FilePathConstants GenerateFilePathConstants() => + new Faker() + .RuleFor(f => f.PathSeparator, "/") + .RuleFor(f => f.ProjectNameFilePrefix, "proj_") + .RuleFor(f => f.ComponentSeparator, "_") + .RuleFor(f => f.FileExtension, ".csv") + .Generate(); +} \ No newline at end of file diff --git a/source/Functions.Tests/TestHelpers/MockRequests.cs b/source/Functions.Tests/TestHelpers/MockRequests.cs new file mode 100644 index 0000000..f254701 --- /dev/null +++ b/source/Functions.Tests/TestHelpers/MockRequests.cs @@ -0,0 +1,56 @@ +using System.Text; +using Microsoft.Azure.Functions.Worker; +using Moq; + +namespace Functions.Tests.TestHelpers; + +public class MockRequests +{ + public static FakeHttpRequestData CreateDateRangeMockRequest(string startDate, string endDate) + { + var context = new Mock(); + var requestMock = new FakeHttpRequestData(context.Object, + new Uri($"https://localhost/api?StartDate={startDate}&EndDate={endDate}")); + + return requestMock; + } + + public static FakeHttpRequestData CreateTodayMockRequest() + { + var context = new Mock(); + var requestMock = new FakeHttpRequestData(context.Object, + new Uri($"https://localhost/api/getDataFromToday")); + + return requestMock; + } + + public static FakeHttpRequestData MockAddDownloadRequest() + { + var context = new Mock(); + var requestMock = new FakeHttpRequestData(context.Object, + new Uri($"https://localhost/api?FilePath=asid-lookup-data/test.csv")); + + return requestMock; + } + + public static FakeHttpRequestData MockRequestNoQuery() + { + var context = new Mock(); + var requestMock = new FakeHttpRequestData(context.Object, + new Uri($"https://localhost/api")); + + return requestMock; + } + + public static FakeHttpRequestData MockRequestWithBody(string jsonBody) + { + var context = new Mock(); + var requestMock = new FakeHttpRequestData( + context.Object, + new Uri($"https://localhost/api"), + new MemoryStream(Encoding.UTF8.GetBytes(jsonBody))); + + + return requestMock; + } +} \ No newline at end of file diff --git a/source/Functions.Tests/TestHelpers/MockTriggers.cs b/source/Functions.Tests/TestHelpers/MockTriggers.cs new file mode 100644 index 0000000..7939c30 --- /dev/null +++ b/source/Functions.Tests/TestHelpers/MockTriggers.cs @@ -0,0 +1,14 @@ +using Microsoft.Azure.Functions.Worker; + +namespace Functions.Tests.TestHelpers; + +public class MockTriggers +{ + public static TimerInfo CreateMockTimerInfo() + { + return new TimerInfo + { + ScheduleStatus = new ScheduleStatus() + }; + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Functions/.gitignore b/source/Functions/.gitignore similarity index 100% rename from source/gpconnect-analytics.Functions/.gitignore rename to source/Functions/.gitignore diff --git a/source/Functions/Configuration/EmailConfigurationProvider.cs b/source/Functions/Configuration/EmailConfigurationProvider.cs new file mode 100644 index 0000000..c259c06 --- /dev/null +++ b/source/Functions/Configuration/EmailConfigurationProvider.cs @@ -0,0 +1,29 @@ +using System.Data; +using Core; +using Core.DTOs.Response.Configuration; +using Core.Helpers; +using Dapper; +using Microsoft.Extensions.Configuration; + +namespace Functions.Configuration; + +public interface IEmailConfigurationProvider +{ + Email? GetEmailConfiguration(IConfiguration configuration); +} + +public class EmailConfigurationProvider(IConnectionFactory connectionFactory) : IEmailConfigurationProvider +{ + public Email? GetEmailConfiguration(IConfiguration configuration) + { + var connectionString = configuration.GetConnectionString(ConnectionStrings.GpConnectAnalytics) ?? + throw new InvalidOperationException("connection string cannot be null at this point."); + + using var sqlConnection = connectionFactory.CreateConnection(connectionString); + + IEnumerable result = sqlConnection.Query("[Configuration].[GetEmailConfiguration]", + commandType: CommandType.StoredProcedure); + + return result.FirstOrDefault(); + } +} \ No newline at end of file diff --git a/source/Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs b/source/Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs new file mode 100644 index 0000000..87fbeee --- /dev/null +++ b/source/Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs @@ -0,0 +1,23 @@ +using System.Net.Http.Headers; +using System.Security.Authentication; + +namespace Functions.Configuration.Infrastructure.HttpClient; + +public static class HttpClientExtensions +{ + public static void ConfigureHttpClient(System.Net.Http.HttpClient options) + { + options.Timeout = new TimeSpan(0, 0, 1, 0); + options.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/csv")); + options.DefaultRequestHeaders.CacheControl = new CacheControlHeaderValue { NoCache = true }; + } + + public static HttpMessageHandler CreateHttpMessageHandler() + { + var httpClientHandler = new HttpClientHandler + { + SslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12 | SslProtocols.Tls11 | SslProtocols.Tls + }; + return httpClientHandler; + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs b/source/Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs similarity index 69% rename from source/gpconnect-analytics.Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs rename to source/Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs index 5c9b259..5d96cc0 100644 --- a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs +++ b/source/Functions/Configuration/Infrastructure/Logging/LoggingExtensions.cs @@ -1,29 +1,39 @@ -using Dapper; -using gpconnect_analytics.DAL; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.Helpers; +using System.Data; +using Core.DTOs.Response.Configuration; +using Core.Helpers; +using Dapper; +using Microsoft.Data.SqlClient; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.Logging; +using NLog; +using NLog.Extensions.Logging; using NLog.Layouts; using NLog.Targets; -using NLog.Web; -using System.Data; -using System.Data.SqlClient; -using System.Linq; -namespace gpconnect_analytics.Configuration.Infrastructure.Logging +namespace Functions.Configuration.Infrastructure.Logging { public static class LoggingExtensions { - public static ILoggingBuilder ConfigureLoggingServices(ILoggingBuilder loggingBuilder, IConfiguration configuration) + public static ILoggingBuilder ConfigureLoggingServices( + ILoggingBuilder loggingBuilder, + IConfiguration configuration, + IEmailConfigurationProvider emailConfigurationProvider) { + // Set up NLog + LogManager.Setup().LoadConfigurationFromFile("nlog.config"); + + // Add NLog to the logging pipeline + loggingBuilder.AddNLog(); + + // Add custom targets manually (optional) var nLogConfiguration = new NLog.Config.LoggingConfiguration(); var consoleTarget = AddConsoleTarget(); var databaseTarget = AddDatabaseTarget(configuration); - var mailTarget = AddMailTarget(configuration); + var mailTarget = AddMailTarget(configuration, emailConfigurationProvider); - nLogConfiguration.Variables.Add("applicationVersion", ApplicationHelper.ApplicationVersion.GetAssemblyVersion()); + nLogConfiguration.Variables.Add("applicationVersion", + ApplicationHelper.ApplicationVersion.GetAssemblyVersion()); nLogConfiguration.AddRule(NLog.LogLevel.Trace, NLog.LogLevel.Fatal, consoleTarget); nLogConfiguration.AddRule(NLog.LogLevel.Trace, NLog.LogLevel.Fatal, databaseTarget); @@ -33,32 +43,26 @@ public static ILoggingBuilder ConfigureLoggingServices(ILoggingBuilder loggingBu nLogConfiguration.AddTarget(databaseTarget); nLogConfiguration.AddTarget(mailTarget); - var nLogOptions = new NLogAspNetCoreOptions - { - RegisterHttpContextAccessor = true, - IgnoreEmptyEventId = true, - IncludeScopes = true, - ShutdownOnDispose = true - }; - - var logFactory = NLogBuilder.ConfigureNLog(nLogConfiguration); - logFactory.AutoShutdown = false; - - var nLogConfig = logFactory.Configuration; - loggingBuilder.AddNLog(nLogConfig, nLogOptions); - return loggingBuilder; } - private static MailTarget AddMailTarget(IConfiguration configuration) + private static MailTarget AddMailTarget(IConfiguration configuration, + IEmailConfigurationProvider emailConfigurationProvider) { - var emailConfiguration = GetEmailConfiguration(configuration); + var emailConfiguration = emailConfigurationProvider.GetEmailConfiguration(configuration); + if (emailConfiguration == null) + { + throw new InvalidOperationException("EmailConfiguration cannot be null"); + } + var mailTarget = new MailTarget { Name = "Mail", Html = false, SmtpServer = emailConfiguration.Hostname, - SmtpAuthentication = emailConfiguration.AuthenticationRequired ? SmtpAuthenticationMode.Basic : SmtpAuthenticationMode.None, + SmtpAuthentication = emailConfiguration.AuthenticationRequired + ? SmtpAuthenticationMode.Basic + : SmtpAuthenticationMode.None, SmtpUserName = emailConfiguration.Username, SmtpPort = emailConfiguration.Port, SmtpPassword = emailConfiguration.Password, @@ -82,22 +86,27 @@ private static JsonLayout GetExceptionLayout() { Attributes = { - new JsonAttribute("type", "${exception:format=:innerFormat=Type:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"), - new JsonAttribute("message", "${exception:format=:innerFormat=Message:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"), - new JsonAttribute("stacktrace", "${exception:format=:innerFormat=StackTrace:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}") + new JsonAttribute("type", + "${exception:format=:innerFormat=Type:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"), + new JsonAttribute("message", + "${exception:format=:innerFormat=Message:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}"), + new JsonAttribute("stacktrace", + "${exception:format=:innerFormat=StackTrace:MaxInnerExceptionLevel=1:InnerExceptionSeparator=}") }, RenderEmptyObject = false }, false)); return exceptionLayout; } - private static Email GetEmailConfiguration(IConfiguration configuration) + private static Email? GetEmailConfiguration(IConfiguration configuration) { - using (var sqlConnection = new SqlConnection(configuration.GetConnectionString(ConnectionStrings.GpConnectAnalytics))) - { - var result = sqlConnection.Query("[Configuration].[GetEmailConfiguration]", commandType: CommandType.StoredProcedure); - return result.FirstOrDefault(); - } + using var sqlConnection = + new SqlConnection(configuration.GetConnectionString(ConnectionStrings.GpConnectAnalytics)); + + IEnumerable result = sqlConnection.Query("[Configuration].[GetEmailConfiguration]", + commandType: CommandType.StoredProcedure); + + return result.FirstOrDefault(); } private static DatabaseTarget AddDatabaseTarget(IConfiguration configuration) @@ -173,4 +182,4 @@ private static ConsoleTarget AddConsoleTarget() return consoleTarget; } } -} +} \ No newline at end of file diff --git a/source/Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs b/source/Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs new file mode 100644 index 0000000..b243854 --- /dev/null +++ b/source/Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs @@ -0,0 +1,13 @@ +using Core.Mapping; +using Dapper.FluentMap; + +namespace Functions.Configuration.Infrastructure.Mapping +{ + public static class MappingExtensions + { + public static void ConfigureMappingServices() + { + FluentMapper.Initialize(config => { config.AddMap(new SplunkInstanceMap()); }); + } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Functions/Dockerfile b/source/Functions/Dockerfile similarity index 100% rename from source/gpconnect-analytics.Functions/Dockerfile rename to source/Functions/Dockerfile diff --git a/source/Functions/ExecuteImportByTrigger.cs b/source/Functions/ExecuteImportByTrigger.cs new file mode 100644 index 0000000..3eb66a3 --- /dev/null +++ b/source/Functions/ExecuteImportByTrigger.cs @@ -0,0 +1,16 @@ +using Core.DTOs.Response.Queue; +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class ExecuteImportByTrigger(IImportService importService) + { + [Function("ExecuteImportByTrigger")] + public async Task Run([QueueTrigger("%QueueName%")] Message queueItem, ILogger log) + { + await importService.InstallData(queueItem); + } + } +} \ No newline at end of file diff --git a/source/Functions/Functions.csproj b/source/Functions/Functions.csproj new file mode 100644 index 0000000..f476f14 --- /dev/null +++ b/source/Functions/Functions.csproj @@ -0,0 +1,39 @@ + + + net8.0 + V4 + Exe + enable + enable + + + + + + + + + + + + + + + + PreserveNewest + + + PreserveNewest + Never + + + PreserveNewest + + + + + + + + + \ No newline at end of file diff --git a/source/Functions/GetDataFromApiByDateRange.cs b/source/Functions/GetDataFromApiByDateRange.cs new file mode 100644 index 0000000..4699464 --- /dev/null +++ b/source/Functions/GetDataFromApiByDateRange.cs @@ -0,0 +1,85 @@ +using System.Net; +using Core.Helpers; +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class GetDataFromApiByDateRange(IBatchService batchService, ILogger log) + { + [Function("GetDataFromApiByDateRangeSspTrans")] + public async Task GetDataFromSspTransByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + try + { + var startDate = req.Query["StartDate"]; + var endDate = req.Query["EndDate"]; + var rows = await batchService.StartBatchDownloadAsync(FileTypes.ssptrans, startDate, endDate); + + + var response = req.CreateResponse(HttpStatusCode.OK); + await response.WriteStringAsync($"Batch Download successful: {rows} requests processed"); + return response; + } + catch (Exception ex) + { + log.LogError(ex, "Error starting batch download for SSP Trans"); + var response = req.CreateResponse(HttpStatusCode.InternalServerError); + await response.WriteStringAsync("Failed to download - see logs"); + return response; + } + } + + [Function("GetDataFromApiByDateRangeMeshTrans")] + public async Task GetDataFromMeshTransByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + try + { + var startDate = req.Query["StartDate"]; + var endDate = req.Query["EndDate"]; + var processed = await batchService.StartBatchDownloadAsync(FileTypes.meshtrans, startDate, endDate); + + var response = req.CreateResponse(HttpStatusCode.OK); + await response.WriteStringAsync($"Batch Download successful: {processed} requests processed"); + return response; + } + catch (Exception ex) + { + log.LogError(ex, "Error starting batch download for Mesh Trans"); + var response = req.CreateResponse(HttpStatusCode.InternalServerError); + await response.WriteStringAsync("Failed to download - see logs"); + return response; + } + } + + [Function("GetDataFromApiByDateRangeAsidLookup")] + public async Task GetDataFromAsidLookupByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + try + { + var startDate = req.Query["StartDate"]; + var endDate = req.Query["EndDate"]; + var processed = await batchService.StartBatchDownloadAsync(FileTypes.asidlookup, startDate, endDate); + + var response = req.CreateResponse(HttpStatusCode.OK); + await response.WriteStringAsync($"Batch Download successful: {processed} requests processed"); + return response; + } + catch (Exception ex) + { + log.LogError(ex, "Error starting batch download for Asid Lookup"); + var response = req.CreateResponse(HttpStatusCode.InternalServerError); + await response.WriteStringAsync("Failed to download - see logs"); + return response; + } + } + } +} \ No newline at end of file diff --git a/source/Functions/GetDataFromApiByTrigger.cs b/source/Functions/GetDataFromApiByTrigger.cs new file mode 100644 index 0000000..c4b214d --- /dev/null +++ b/source/Functions/GetDataFromApiByTrigger.cs @@ -0,0 +1,34 @@ +using Core.Helpers; +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class GetDataFromApiByTrigger(IBatchService batchService, ILogger log) + { + [Function("GetDataFromApiByTriggerAsidLookup")] + public async Task GetDataFromAsidLookup( + [TimerTrigger("%GetDataFromApiByTriggerAsidLookupSchedule%", RunOnStartup = false)] + TimerInfo myTimer) + { + await batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); + } + + [Function("GetDataFromApiByTriggerSspTrans")] + public async Task GetDataFromSspTrans( + [TimerTrigger("%GetDataFromApiByTriggerSspTransSchedule%", RunOnStartup = false)] + TimerInfo myTimer) + { + await batchService.StartBatchDownloadForTodayAsync(FileTypes.ssptrans); + } + + [Function("GetDataFromApiByTriggerMeshTrans")] + public async Task GetDataFromMeshTrans( + [TimerTrigger("%GetDataFromApiByTriggerMeshTransSchedule%", RunOnStartup = false)] + TimerInfo myTimer) + { + await batchService.StartBatchDownloadForTodayAsync(FileTypes.meshtrans); + } + } +} \ No newline at end of file diff --git a/source/Functions/GetDataFromApiManual.cs b/source/Functions/GetDataFromApiManual.cs new file mode 100644 index 0000000..c8c79a0 --- /dev/null +++ b/source/Functions/GetDataFromApiManual.cs @@ -0,0 +1,42 @@ +using System.Net; +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class GetDataFromApiManual(IImportService importService, ILogger log) + { + [Function("GetDataFromApiManual")] + public async Task AddDownloadedFile( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + var response = req.CreateResponse(); + response.Headers.Add("Content-Type", "application/text"); + try + { + var filePath = req.Query["FilePath"]; + if (string.IsNullOrEmpty(filePath)) + { + response.StatusCode = HttpStatusCode.BadRequest; + await response.WriteStringAsync("Filepath missing"); + return response; + } + + await importService.AddDownloadedFileManually(filePath); + response.StatusCode = HttpStatusCode.OK; + await response.WriteStringAsync("Successfully added files"); + return response; + } + catch (Exception ex) + { + log.LogError(ex, $"Error adding downloaded file: {ex.Message}"); + response.StatusCode = HttpStatusCode.InternalServerError; + await response.WriteStringAsync("Something went wrong - see error logs for more details"); + return response; + } + } + } +} \ No newline at end of file diff --git a/source/Functions/GetDataFromApiToday.cs b/source/Functions/GetDataFromApiToday.cs new file mode 100644 index 0000000..f4b69a4 --- /dev/null +++ b/source/Functions/GetDataFromApiToday.cs @@ -0,0 +1,98 @@ +using System.Net; +using Core.Helpers; +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class GetDataFromApiToday(IBatchService batchService, ILogger log) + { + [Function("GetDataFromApiTodaySspTrans")] + public async Task GetDataFromSspTransByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + var response = req.CreateResponse(); + response.Headers.Add("Content-Type", "application/text"); + + + var affectedCount = 0; + try + { + affectedCount = await batchService.StartBatchDownloadForTodayAsync(FileTypes.ssptrans); + } + catch (Exception ex) + { + log.LogError(ex, "An error occurred during batch download when processing urls"); + response.StatusCode = HttpStatusCode.InternalServerError; + await response.WriteStringAsync( + $"An error occurred whilst processing batch download - see logs for more details"); + + return response; + } + + response.StatusCode = HttpStatusCode.OK; + await response.WriteStringAsync($"Processed {affectedCount} requests"); + return response; + } + + [Function("GetDataFromApiTodayMeshTrans")] + public async Task GetDataFromMeshTransByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + var response = req.CreateResponse(); + response.Headers.Add("Content-Type", "application/text"); + + + var affectedCount = 0; + try + { + affectedCount = await batchService.StartBatchDownloadForTodayAsync(FileTypes.meshtrans); + } + catch (Exception ex) + { + log.LogError(ex, "An error occurred during batch download when processing urls"); + response.StatusCode = HttpStatusCode.InternalServerError; + await response.WriteStringAsync( + $"An error occurred whilst processing batch download - see logs for more details"); + + return response; + } + + response.StatusCode = HttpStatusCode.OK; + await response.WriteStringAsync($"Processed {affectedCount} requests"); + return response; + } + + [Function("GetDataFromApiTodayAsidLookup")] + public async Task GetDataFromAsidLookupByDateRange( + [HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] + HttpRequestData req) + { + var response = req.CreateResponse(); + response.Headers.Add("Content-Type", "application/text"); + var affectedCount = 0; + try + { + affectedCount = await batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); + } + catch (Exception ex) + { + log.LogError(ex, "An error occurred during batch download when processing urls"); + response.StatusCode = HttpStatusCode.InternalServerError; + await response.WriteStringAsync( + $"An error occurred whilst processing batch download - see logs for more details"); + + + return response; + } + + response.StatusCode = HttpStatusCode.OK; + await response.WriteStringAsync($"Processed {affectedCount} requests"); + return response; + } + } +} \ No newline at end of file diff --git a/source/Functions/HelperClasses/FilePathHelper.cs b/source/Functions/HelperClasses/FilePathHelper.cs new file mode 100644 index 0000000..810d72a --- /dev/null +++ b/source/Functions/HelperClasses/FilePathHelper.cs @@ -0,0 +1,54 @@ +using System.Text; +using Core; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; + +namespace Functions.HelperClasses; + +public class FilePathHelper(IConfigurationService configurationService, ITimeProvider timeProvider, Extract extract) +{ + public async Task ConstructFilePath(SplunkInstance splunkInstance, FileType fileType, bool isToday, + bool setDateAsMidnight = false) + { + //TODO: fix this - replace with less convoluted approach to filePath building + + var filePathConstants = await configurationService.GetFilePathConstants(); + var filePathString = new StringBuilder(); + filePathString.Append(fileType.DirectoryName); + filePathString.Append(filePathConstants.PathSeparator); + filePathString.Append(splunkInstance.Source); + filePathString.Append(filePathConstants.PathSeparator); + filePathString.Append(extract.QueryFromDate.ToString(DateFormatConstants.FilePathQueryDateYearMonth)); + filePathString.Append(filePathConstants.PathSeparator); + filePathString.Append(filePathConstants.ProjectNameFilePrefix); + filePathString.Append(filePathConstants.ComponentSeparator); + filePathString.Append(fileType.FileTypeFilePrefix); + filePathString.Append(filePathConstants.ComponentSeparator); + filePathString.Append( + $"{extract.QueryFromDate.ToString(DateFormatConstants.FilePathQueryDate)}T{extract.QueryHour.ToString(DateFormatConstants.FilePathQueryHour)}"); + filePathString.Append(filePathConstants.ComponentSeparator); + filePathString.Append( + $"{extract.QueryToDate.ToString(DateFormatConstants.FilePathQueryDate)}T{extract.QueryHour.ToString(DateFormatConstants.FilePathQueryHour)}"); + filePathString.Append(filePathConstants.ComponentSeparator); + filePathString.Append(splunkInstance.Source); + filePathString.Append(filePathConstants.ComponentSeparator); + + //TODO: is the correct ? seems should be the other way round , or a simpler logic for Today Midnight, Today End of Day, or Right Now. + if (!isToday) + { + filePathString.Append(setDateAsMidnight + ? timeProvider.CurrentDate().ToString(DateFormatConstants.FilePathNowDate) + : timeProvider.UtcDateTime().ToString(DateFormatConstants.FilePathNowDate)); + } + else + { + filePathString.Append(timeProvider.CurrentDate().AddDays(1).AddSeconds(-1) + .ToString(DateFormatConstants.FilePathNowDate)); + } + + filePathString.Append(filePathConstants.FileExtension); + return filePathString.ToString(); + } +} \ No newline at end of file diff --git a/source/Functions/Program.cs b/source/Functions/Program.cs new file mode 100644 index 0000000..fc6366c --- /dev/null +++ b/source/Functions/Program.cs @@ -0,0 +1,54 @@ +using Core; +using Core.Repositories; +using Core.Services.Interfaces; +using Functions; +using Functions.Configuration; +using Functions.Configuration.Infrastructure.HttpClient; +using Functions.Configuration.Infrastructure.Logging; +using Functions.Configuration.Infrastructure.Mapping; +using Functions.Services; +using Functions.Services.Interfaces; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + + +var builder = Host.CreateDefaultBuilder(args) + .ConfigureFunctionsWorkerDefaults() + .ConfigureServices((context, services) => + { + // Configure your services here + MappingExtensions.ConfigureMappingServices(); + services.AddSingleton(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddScoped(); + services.AddSingleton(); + services.AddSingleton(); + services.AddScoped(); + + // Configure logging with email configuration provider + services.AddLogging(loggingBuilder => + { + var emailProvider = services.BuildServiceProvider().GetRequiredService(); + LoggingExtensions.ConfigureLoggingServices(loggingBuilder, context.Configuration, emailProvider); + }); + + // Configure HttpClient + services.AddHttpClient("SplunkApiClient", options => + HttpClientExtensions + .ConfigureHttpClient(options)) + .ConfigurePrimaryHttpMessageHandler(() => + HttpClientExtensions + .CreateHttpMessageHandler()); + }); + + +var host = builder.Build(); +host.Run(); \ No newline at end of file diff --git a/source/gpconnect-analytics.Functions/Properties/launchSettings.json b/source/Functions/Properties/launchSettings.json similarity index 100% rename from source/gpconnect-analytics.Functions/Properties/launchSettings.json rename to source/Functions/Properties/launchSettings.json diff --git a/source/Functions/PurgeErrorLogByTrigger.cs b/source/Functions/PurgeErrorLogByTrigger.cs new file mode 100644 index 0000000..51e401e --- /dev/null +++ b/source/Functions/PurgeErrorLogByTrigger.cs @@ -0,0 +1,16 @@ +using Functions.Services.Interfaces; +using Microsoft.Azure.Functions.Worker; + +namespace Functions +{ + public class PurgeErrorLogByTrigger(ILoggingService loggingService) + { + [Function("PurgeErrorLogByTrigger")] + public async Task PurgeErrorLog( + [TimerTrigger("%PurgeErrorLogByTriggerSchedule%", RunOnStartup = false)] + TimerInfo myTimer) + { + await loggingService.PurgeErrorLog(); + } + } +} \ No newline at end of file diff --git a/source/Functions/Services/BatchService.cs b/source/Functions/Services/BatchService.cs new file mode 100644 index 0000000..3b2d526 --- /dev/null +++ b/source/Functions/Services/BatchService.cs @@ -0,0 +1,146 @@ +using System.Web; +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.Helpers; +using Core.Services.Interfaces; +using Dapper; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; + +namespace Functions.Services +{ + public class BatchService( + IConfigurationService configurationService, + ISplunkService splunkService, + ILogger logger, + IDataService dataService) + : IBatchService + { + private SplunkClient _splunkClient; + + public async Task StartBatchDownloadForTodayAsync(FileTypes fileTypes) + { + var dateInScope = DateTime.Today.AddDays(1); + var fileType = await configurationService.GetFileType(fileTypes); + var uriList = + await GetBatchDownloadUriList(fileType, DateTimeHelper.EachDay(dateInScope, dateInScope).ToList()); + + await RemovePreviousDownloads(fileType, dateInScope, dateInScope); + + await ProcessUrls(fileType, uriList, true); + return uriList.Count; + } + + public async Task StartBatchDownloadAsync(FileTypes fileTypes, string? startDate, string? endDate) + { + if (string.IsNullOrWhiteSpace(startDate) || string.IsNullOrWhiteSpace(endDate)) + { + logger.LogError("Start and end dates are required for batch download"); + throw new ArgumentException("Start and end dates are required for batch download"); + } + + var start = DateTime.TryParse(startDate, out DateTime parsedStart) + ? parsedStart + : DateTime.Today; + var end = DateTime.TryParse(endDate, out DateTime parsedEnd) + ? parsedEnd + : DateTime.Today; + + if (parsedEnd >= parsedStart) + { + var fileType = await configurationService.GetFileType(fileTypes); + var uriList = + await GetBatchDownloadUriList(fileType, DateTimeHelper.EachDay(start, end).ToList()); + + await RemovePreviousDownloads(fileType, start, end); + + try + { + await ProcessUrls(fileType, uriList, false); + return uriList.Count; + } + catch (Exception ex) + { + logger.LogError(ex, "An error occurred during batch download when processing urls"); + throw; + } + } + + logger.LogError("Start date cannot be later than end date"); + throw new ArgumentException("Start date cannot be later than end date"); + } + + private async Task ProcessUrls(FileType fileType, List uriList, bool isToday) + { + var downloadTasks = new List(); + + // Create and start all download tasks + for (var i = 0; i < uriList.Count; i++) + { + var requestUri = uriList[i]; + downloadTasks.Add(splunkService.ExecuteBatchDownloadFromSplunk(fileType, requestUri, isToday)); + } + + // Wait for tasks to complete + while (downloadTasks.Count > 0) + { + var finishedTask = await Task.WhenAny(downloadTasks); + downloadTasks.Remove(finishedTask); + } + } + + public async Task> GetBatchDownloadUriList(FileType fileType, List dateTimeList) + { + var uriList = new List(); + _splunkClient = await configurationService.GetSplunkClientConfiguration(); + + foreach (var dateTime in dateTimeList) + { + var earliestDate = dateTime.AddDays(-2); + var latestDate = dateTime.AddDays(-1); + + for (var i = 0; i < 24; i++) + { + var splunkQuery = fileType.SplunkQuery; + var hour = TimeSpan.Zero.Add(TimeSpan.FromHours(i)); + + splunkQuery = splunkQuery.Replace("{earliest}", + earliestDate.ToString(DateFormatConstants.SplunkQueryDate)); + splunkQuery = splunkQuery.Replace("{latest}", + latestDate.ToString(DateFormatConstants.SplunkQueryDate)); + splunkQuery = splunkQuery.Replace("{hour}", + hour.ToString(DateFormatConstants.SplunkQueryHour)); + + var uriBuilder = new UriBuilder + { + Scheme = Uri.UriSchemeHttps, + Host = _splunkClient.HostName, + Port = _splunkClient.HostPort, + Path = _splunkClient.BaseUrl, + Query = string.Format(_splunkClient.QueryParameters, HttpUtility.UrlEncode(splunkQuery)) + }; + + uriList.Add(new UriRequest() + { + Request = uriBuilder.Uri, + EarliestDate = earliestDate, + LatestDate = latestDate, + Hour = hour + }); + } + } + + return uriList; + } + + public async Task RemovePreviousDownloads(FileType fileType, DateTime startDate, DateTime endDate) + { + var procedureName = "Import.RemovePreviousDownload"; + var parameters = new DynamicParameters(); + parameters.Add("@FileTypeId", fileType.FileTypeId); + parameters.Add("@StartDate", startDate.AddDays(-2)); + parameters.Add("@EndDate", endDate.AddDays(-1)); + await dataService.ExecuteStoredProcedure(procedureName, parameters); + } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/BlobService.cs b/source/Functions/Services/BlobService.cs similarity index 56% rename from source/gpconnect-analytics.DAL/BlobService.cs rename to source/Functions/Services/BlobService.cs index 9ffb709..cec51fe 100644 --- a/source/gpconnect-analytics.DAL/BlobService.cs +++ b/source/Functions/Services/BlobService.cs @@ -1,49 +1,51 @@ -using Azure; +using System.Text; +using System.Text.Json; +using Azure; using Azure.Storage.Blobs; using Azure.Storage.Blobs.Models; using Azure.Storage.Queues; -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.DTO.Response.Splunk; -using gpconnect_analytics.Helpers; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Queue; +using Core.DTOs.Response.Splunk; +using Core.Services.Interfaces; +using Functions.Services.Interfaces; using Microsoft.Extensions.Logging; -using Newtonsoft.Json; -using System; -using System.Threading.Tasks; -namespace gpconnect_analytics.DAL +namespace Functions.Services { public class BlobService : IBlobService { private readonly ILogger _logger; - private readonly IConfigurationService _configurationService; private readonly BlobStorage _blobStorageConfiguration; private readonly QueueClient _queueClient; private readonly BlobServiceClient _blobServiceClient; - public BlobService(IConfigurationService configurationService, ILogger logger) + public BlobService(IConfigurationService configurationService, ILogger logger, + QueueClient? queueClient) { _logger = logger; - _configurationService = configurationService; - _blobStorageConfiguration = _configurationService.GetBlobStorageConfiguration().Result; + _blobStorageConfiguration = configurationService.GetBlobStorageConfiguration().Result; _blobServiceClient = new BlobServiceClient(_blobStorageConfiguration.ConnectionString); - _queueClient = new QueueClient(_blobStorageConfiguration.ConnectionString, _blobStorageConfiguration.QueueName); + _queueClient = queueClient ?? new QueueClient(_blobStorageConfiguration.ConnectionString, + _blobStorageConfiguration.QueueName); } - public async Task AddObjectToBlob(ExtractResponse extractResponse) + public async Task AddObjectToBlob(ExtractResponse extractResponse) { _logger.LogInformation($"Adding object to blob storage", extractResponse); try { - var containerClient = _blobServiceClient.GetBlobContainerClient(_blobStorageConfiguration.ContainerName); - if (await containerClient.ExistsAsync()) - { - var blobClient = containerClient.GetBlobClient(extractResponse.FilePath); - var uploadedBlob = await blobClient.UploadAsync(extractResponse.ExtractResponseStream, overwrite: true); - return uploadedBlob; - } - return null; + var containerClient = + _blobServiceClient.GetBlobContainerClient(_blobStorageConfiguration.ContainerName); + + if (!await containerClient.ExistsAsync()) return null; + + var blobClient = containerClient.GetBlobClient(extractResponse.FilePath); + var response = + await blobClient.UploadAsync(extractResponse.ExtractResponseStream, overwrite: true); + + return response; } catch (RequestFailedException requestFailedException) { @@ -57,22 +59,23 @@ public async Task AddObjectToBlob(ExtractResponse extractRespon } } - public async Task AddMessageToBlobQueue(int fileAddedCount, int fileTypeId, string blobName, bool overrideEntry = false) + public async Task AddMessageToBlobQueue(int fileAddedCount, int fileTypeId, string blobName, + bool overrideEntry = false) { try { if ((await _queueClient.ExistsAsync()) && fileAddedCount == 1) { - var message = new DTO.Response.Queue.Message + var message = new Message { FileTypeId = fileTypeId, BlobName = blobName, Override = overrideEntry }; - var messageText = JsonConvert.SerializeObject(message); + var messageText = JsonSerializer.Serialize(message); _logger.LogInformation($"Adding message to blob queue", message); - await _queueClient.SendMessageAsync(messageText.StringToBase64()); + await _queueClient.SendMessageAsync(Convert.ToBase64String(Encoding.UTF8.GetBytes(messageText))); } } catch (RequestFailedException requestFailedException) @@ -87,4 +90,4 @@ public async Task AddMessageToBlobQueue(int fileAddedCount, int fileTypeId, stri } } } -} +} \ No newline at end of file diff --git a/source/Functions/Services/ConfigurationService.cs b/source/Functions/Services/ConfigurationService.cs new file mode 100644 index 0000000..76dfa60 --- /dev/null +++ b/source/Functions/Services/ConfigurationService.cs @@ -0,0 +1,61 @@ +using Core.DTOs.Response.Configuration; +using Core.Helpers; +using Core.Services.Interfaces; +using Microsoft.Extensions.Logging; + +namespace Functions.Services +{ + public class ConfigurationService( + IDataService dataService, + ILogger logger) + : IConfigurationService + { + public async Task GetBlobStorageConfiguration() + { + var result = + await dataService.ExecuteQueryStoredProcedure( + "[Configuration].[GetBlobStorageConfiguration]"); + logger.LogInformation("Loading blob storage configuration", result.FirstOrDefault()); + return result.FirstOrDefault(); + } + + public async Task GetFilePathConstants() + { + var result = + await dataService.ExecuteQueryStoredProcedure( + "[Configuration].[GetFilePathConstants]"); + logger.LogInformation("Loading file path constants", result.FirstOrDefault()); + return result.FirstOrDefault(); + } + + public async Task> GetFileTypes() + { + var result = await dataService.ExecuteQueryStoredProcedure("[Configuration].[GetFileTypes]"); + logger.LogInformation("Loading file types", result); + return result; + } + + public async Task GetFileType(FileTypes fileTypes) + { + var result = await dataService.ExecuteQueryStoredProcedure("[Configuration].[GetFileTypes]"); + return result.FirstOrDefault(ft => ft.FileTypeFilePrefix == fileTypes.ToString()); + } + + public async Task GetSplunkClientConfiguration() + { + var result = + await dataService.ExecuteQueryStoredProcedure( + "[Configuration].[GetSplunkClientConfiguration]"); + logger.LogInformation("Loading splunk client configuration", result.FirstOrDefault()); + return result.FirstOrDefault(); + } + + public async Task GetSplunkInstance(SplunkInstances splunkInstance) + { + var result = + await dataService.ExecuteQueryStoredProcedure("[Configuration].[GetSplunkInstances]"); + logger.LogInformation("Loading splunk instance", result); + return result.FirstOrDefault(x => x.Source == splunkInstance.ToString()); + } + } +} \ No newline at end of file diff --git a/source/Functions/Services/CoreConfigurationService.cs b/source/Functions/Services/CoreConfigurationService.cs new file mode 100644 index 0000000..9790614 --- /dev/null +++ b/source/Functions/Services/CoreConfigurationService.cs @@ -0,0 +1,14 @@ +using Core.Services.Interfaces; +using Microsoft.Extensions.Configuration; + +namespace Functions.Services +{ + public class CoreConfigurationService(IConfiguration configuration) : ICoreConfigurationService + { + public string GetConnectionString(string name) + { + var connectionString = configuration.GetConnectionString(name); + return connectionString ?? throw new ArgumentException("No connection string with given name"); + } + } +} \ No newline at end of file diff --git a/source/Functions/Services/DataService.cs b/source/Functions/Services/DataService.cs new file mode 100644 index 0000000..463b4da --- /dev/null +++ b/source/Functions/Services/DataService.cs @@ -0,0 +1,113 @@ +using Core; +using Core.Helpers; +using Core.Repositories; +using Core.Services.Interfaces; +using Dapper; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; + +namespace Functions.Services +{ + public class DataService( + ILogger logger, + ICoreConfigurationService coreConfigurationService, + IDapperWrapper dapper, + IConnectionFactory connectionFactory) + : IDataService + { + private readonly string _connectionString = + coreConfigurationService.GetConnectionString(ConnectionStrings.GpConnectAnalytics); + + public async Task ExecuteRawUpsertSqlAsync(string sqlCommand, object parameters) + { + try + { + await using var sqlConnection = connectionFactory.CreateConnection(_connectionString); + await sqlConnection.OpenAsync(); + logger.LogInformation($"Executing raw SQL command"); + var rowsAffected = await dapper.ExecuteAsync(sqlConnection, sqlCommand, parameters); + return rowsAffected; + } + catch (Exception ex) + { + logger.LogError(ex, $"An error has occurred while executing the raw SQL command: {sqlCommand}"); + throw; + } + } + + public async Task> ExecuteQueryStoredProcedure(string procedureName, DynamicParameters parameters) + where T : class + { + await using var sqlConnection = connectionFactory.CreateConnection(_connectionString); + try + { + if (sqlConnection is SqlConnection connection) + { + connection.InfoMessage += SqlConnection_InfoMessage; + } + + logger.LogInformation($"Executing stored procedure {procedureName}", parameters); + + var results = await dapper.QueryStoredProcedureAsync(sqlConnection, procedureName, parameters, 0); + return results.AsList(); + } + catch (Exception exc) + { + logger.LogError(exc, $"An error has occurred while attempting to execute the function {procedureName}"); + throw; + } + } + + + public async Task ExecuteStoredProcedureWithOutputParameters(string procedureName, + DynamicParameters parameters) + { + await using var sqlConnection = connectionFactory.CreateConnection(_connectionString); + try + { + if (sqlConnection is SqlConnection connection) + { + connection.InfoMessage += SqlConnection_InfoMessage; + } + + logger.LogInformation($"Executing stored procedure {procedureName}", parameters); + await dapper.ExecuteStoredProcedureAsync(sqlConnection, procedureName, parameters, + 0); + return parameters; + } + catch (Exception exc) + { + logger?.LogError(exc, + $"An error has occurred while attempting to execute the function {procedureName}"); + throw; + } + } + + public async Task ExecuteStoredProcedure(string procedureName, DynamicParameters parameters) + { + await using var sqlConnection = connectionFactory.CreateConnection(_connectionString); + try + { + if (sqlConnection is SqlConnection connection) + { + connection.InfoMessage += SqlConnection_InfoMessage; + } + + logger.LogInformation($"Executing stored procedure {procedureName}", parameters); + var result = await dapper.ExecuteAsync(sqlConnection, procedureName, parameters); + return result; + } + catch (Exception exc) + { + logger?.LogError(exc, + $"An error has occurred while attempting to execute the function {procedureName}"); + throw; + } + } + + private void SqlConnection_InfoMessage(object sender, SqlInfoMessageEventArgs e) + { + logger?.LogInformation(e.Message); + } + } +} \ No newline at end of file diff --git a/source/Functions/Services/FileService.cs b/source/Functions/Services/FileService.cs new file mode 100644 index 0000000..f764463 --- /dev/null +++ b/source/Functions/Services/FileService.cs @@ -0,0 +1,19 @@ +using Core.Services.Interfaces; +using Dapper; +using Functions.Services.Interfaces; + +namespace Functions.Services; + +public class FileService(IDataService dataService) : IFileService +{ + public async Task ApiReaderAddFile(int fileTypeId, string filePath, bool overrideFile) + { + const string procedureName = "ApiReader.AddFile"; + var parameters = new DynamicParameters(); + parameters.Add("@FileTypeId", fileTypeId); + parameters.Add("@FilePath", filePath); + parameters.Add("@Override", overrideFile); + var result = await dataService.ExecuteStoredProcedure(procedureName, parameters); + return result; + } +} \ No newline at end of file diff --git a/source/Functions/Services/ImportService.cs b/source/Functions/Services/ImportService.cs new file mode 100644 index 0000000..36fa134 --- /dev/null +++ b/source/Functions/Services/ImportService.cs @@ -0,0 +1,84 @@ +using System.Data; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Queue; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using Dapper; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; + +namespace Functions.Services +{ + public class ImportService( + IConfigurationService configurationService, + IDataService dataService, + IBlobService blobService, + ILogger logger, + IFileService fileService) + : IImportService + { + public async Task AddDownloadedFileManually(string filePath) + { + var fileTypeFromPath = filePath.GetFileType(); + + if (fileTypeFromPath == null) + { + throw new ArgumentException("Filepath does not contain vailid file type suffix"); + } + + var fileType = await configurationService.GetFileType((FileTypes)fileTypeFromPath); + var fileAddedCount = + await fileService.ApiReaderAddFile(fileType.FileTypeId, filePath, true); + + await blobService.AddMessageToBlobQueue(fileAddedCount, fileType.FileTypeId, filePath, + true); + } + + public async Task AddObjectFileMessage(FileType fileType, ExtractResponse extractResponse) + { + if (extractResponse.ExtractResponseMessage.StatusCode == System.Net.HttpStatusCode.OK) + { + var uploadedBlob = await blobService.AddObjectToBlob(extractResponse); + if (uploadedBlob != null) + { + var fileAddedCount = + await fileService.ApiReaderAddFile(fileType.FileTypeId, extractResponse.FilePath, true); + + await blobService.AddMessageToBlobQueue(fileAddedCount, fileType.FileTypeId, + extractResponse.FilePath, + true); + } + } + else + { + logger?.LogWarning(extractResponse?.ExtractResponseMessage.ToString()); + } + } + + + public async Task InstallData(Message queueItem) + { + var moreFilesToInstall = true; + const string procedureName = "Import.InstallNextFile"; + var parameters = new DynamicParameters(); + parameters.Add("@FileTypeId", queueItem.FileTypeId); + if (queueItem.Override) + { + parameters.Add("@Override", queueItem.Override, dbType: DbType.Boolean, + direction: ParameterDirection.Input); + } + + parameters.Add("@MoreFilesToInstall", dbType: DbType.Boolean, direction: ParameterDirection.Output); + + while (moreFilesToInstall) + { + logger.LogInformation($"Installing file into database", parameters); + var result = await dataService.ExecuteStoredProcedureWithOutputParameters(procedureName, parameters); + + moreFilesToInstall = result.Get("@MoreFilesToInstall"); + logger.LogInformation($"More files to install? {moreFilesToInstall}"); + } + } + } +} \ No newline at end of file diff --git a/source/Functions/Services/Interfaces/IBatchService.cs b/source/Functions/Services/Interfaces/IBatchService.cs new file mode 100644 index 0000000..c65b60f --- /dev/null +++ b/source/Functions/Services/Interfaces/IBatchService.cs @@ -0,0 +1,14 @@ +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.Helpers; + +namespace Functions.Services.Interfaces +{ + public interface IBatchService + { + Task> GetBatchDownloadUriList(FileType fileType, List dateTimeList); + Task RemovePreviousDownloads(FileType fileType, DateTime startDate, DateTime endDate); + Task StartBatchDownloadForTodayAsync(FileTypes fileTypes); + Task StartBatchDownloadAsync(FileTypes fileTypes, string? startDate, string? endDate); + } +} \ No newline at end of file diff --git a/source/Functions/Services/Interfaces/IBlobService.cs b/source/Functions/Services/Interfaces/IBlobService.cs new file mode 100644 index 0000000..7b6817c --- /dev/null +++ b/source/Functions/Services/Interfaces/IBlobService.cs @@ -0,0 +1,11 @@ +using Azure.Storage.Blobs.Models; +using Core.DTOs.Response.Splunk; + +namespace Functions.Services.Interfaces +{ + public interface IBlobService + { + Task AddMessageToBlobQueue(int fileAddedCount, int fileTypeId, string blobName, bool overrideEntry = false); + Task AddObjectToBlob(ExtractResponse extractResponse); + } +} \ No newline at end of file diff --git a/source/Functions/Services/Interfaces/IFileService.cs b/source/Functions/Services/Interfaces/IFileService.cs new file mode 100644 index 0000000..c569467 --- /dev/null +++ b/source/Functions/Services/Interfaces/IFileService.cs @@ -0,0 +1,6 @@ +namespace Functions.Services.Interfaces; + +public interface IFileService +{ + Task ApiReaderAddFile(int fileTypeID, string filePath, bool overrideFile); +} \ No newline at end of file diff --git a/source/Functions/Services/Interfaces/IImportService.cs b/source/Functions/Services/Interfaces/IImportService.cs new file mode 100644 index 0000000..3594bef --- /dev/null +++ b/source/Functions/Services/Interfaces/IImportService.cs @@ -0,0 +1,13 @@ +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Queue; +using Core.DTOs.Response.Splunk; + +namespace Functions.Services.Interfaces +{ + public interface IImportService + { + Task InstallData(Message message); + Task AddDownloadedFileManually(string filePath); + Task AddObjectFileMessage(FileType fileType, ExtractResponse extractResponse); + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/Interfaces/ILoggingService.cs b/source/Functions/Services/Interfaces/ILoggingService.cs similarity index 50% rename from source/gpconnect-analytics.DAL/Interfaces/ILoggingService.cs rename to source/Functions/Services/Interfaces/ILoggingService.cs index 545973d..728dac2 100644 --- a/source/gpconnect-analytics.DAL/Interfaces/ILoggingService.cs +++ b/source/Functions/Services/Interfaces/ILoggingService.cs @@ -1,9 +1,7 @@ -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces +namespace Functions.Services.Interfaces { public interface ILoggingService { Task PurgeErrorLog(); } -} +} \ No newline at end of file diff --git a/source/Functions/Services/Interfaces/ISplunkService.cs b/source/Functions/Services/Interfaces/ISplunkService.cs new file mode 100644 index 0000000..550ba11 --- /dev/null +++ b/source/Functions/Services/Interfaces/ISplunkService.cs @@ -0,0 +1,12 @@ +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; + +namespace Functions.Services.Interfaces +{ + public interface ISplunkService + { + Task DownloadCSVDateRangeAsync(FileType fileType, UriRequest uriRequest, bool isToday); + Task ExecuteBatchDownloadFromSplunk(FileType fileType, UriRequest uriRequest, bool isToday); + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/LoggingService.cs b/source/Functions/Services/LoggingService.cs similarity index 79% rename from source/gpconnect-analytics.DAL/LoggingService.cs rename to source/Functions/Services/LoggingService.cs index 52c7d5b..a6c54c8 100644 --- a/source/gpconnect-analytics.DAL/LoggingService.cs +++ b/source/Functions/Services/LoggingService.cs @@ -1,7 +1,7 @@ -using gpconnect_analytics.DAL.Interfaces; -using System.Threading.Tasks; +using Core.Services.Interfaces; +using Functions.Services.Interfaces; -namespace gpconnect_analytics.DAL +namespace Functions.Services { public class LoggingService : ILoggingService { @@ -18,4 +18,4 @@ public async Task PurgeErrorLog() await _dataService.ExecuteStoredProcedure(procedureName); } } -} +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/SplunkService.cs b/source/Functions/Services/SplunkService.cs similarity index 55% rename from source/gpconnect-analytics.DAL/SplunkService.cs rename to source/Functions/Services/SplunkService.cs index 14fd682..82aa31e 100644 --- a/source/gpconnect-analytics.DAL/SplunkService.cs +++ b/source/Functions/Services/SplunkService.cs @@ -1,47 +1,57 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Request; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.DTO.Response.Splunk; -using Microsoft.Extensions.Logging; -using System; -using System.IdentityModel.Tokens.Jwt; -using System.Net.Http; +using System.IdentityModel.Tokens.Jwt; using System.Net.Http.Headers; using System.Text; -using System.Threading.Tasks; +using Core; +using Core.DTOs.Request; +using Core.DTOs.Response.Configuration; +using Core.DTOs.Response.Splunk; +using Core.Helpers; +using Core.Services.Interfaces; +using Functions.HelperClasses; +using Functions.Services.Interfaces; +using Microsoft.Extensions.Logging; -namespace gpconnect_analytics.DAL +namespace Functions.Services { public class SplunkService : ISplunkService { - private readonly IConfigurationService _configurationService; - private readonly IHttpClientFactory _httpClientFactory; - private readonly ILogger _logger; private SplunkClient _splunkClient; private FilePathConstants _filePathConstants; private Extract _extract; - public SplunkService(IConfigurationService configurationService, IHttpClientFactory httpClientFactory, ILogger logger) + private FilePathHelper filePathHelper; + private readonly IConfigurationService _configurationService; + private readonly IHttpClientFactory _httpClientFactory; + private readonly IImportService _importService; + private readonly ILogger _logger; + + public SplunkService(IConfigurationService configurationService, + IHttpClientFactory httpClientFactory, + IImportService importService, + ILogger logger, + ITimeProvider timeProvider) { + _extract = new(); _configurationService = configurationService; - _logger = logger; _httpClientFactory = httpClientFactory; - _extract = new Extract(); + _importService = importService; + _logger = logger; + filePathHelper = new FilePathHelper(configurationService, timeProvider, _extract); } - public async Task DownloadCSVDateRangeAsync(FileType fileType, UriRequest uriRequest, bool isToday) + public async Task DownloadCSVDateRangeAsync(FileType fileType, UriRequest uriRequest, + bool isToday) { try { - _filePathConstants = await _configurationService.GetFilePathConstants(); - var splunkInstance = await _configurationService.GetSplunkInstance(Helpers.SplunkInstances.cloud); + var splunkInstance = await _configurationService.GetSplunkInstance(SplunkInstances.cloud); _extract.Override = true; _extract.QueryFromDate = uriRequest.EarliestDate; _extract.QueryToDate = uriRequest.LatestDate; _extract.QueryHour = uriRequest.Hour; - var filePath = ConstructFilePath(splunkInstance, fileType, isToday, true); + var filePath = await filePathHelper.ConstructFilePath(splunkInstance, fileType, isToday, true); var extractResponse = await GetSearchResultFromRequestUri(uriRequest); extractResponse.FilePath = filePath; @@ -59,9 +69,9 @@ public async Task DownloadCSVDateRangeAsync(FileType fileType, _logger.LogError(exc, "An error occurred in trying to execute a GET request"); throw; } - } + } - private async Task GetSearchResultFromRequestUri(UriRequest uriRequest) + public async Task GetSearchResultFromRequestUri(UriRequest uriRequest) { var extractResponseMessage = new ExtractResponse { @@ -72,10 +82,11 @@ private async Task GetSearchResultFromRequestUri(UriRequest uri _splunkClient = await _configurationService.GetSplunkClientConfiguration(); var apiTokenExpiry = HasApiTokenExpired(_splunkClient.ApiToken); - if (!apiTokenExpiry.Item1) + if (!apiTokenExpiry.Expired) { var client = _httpClientFactory.CreateClient("SplunkApiClient"); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", _splunkClient.ApiToken); + client.DefaultRequestHeaders.Authorization = + new AuthenticationHeaderValue("Bearer", _splunkClient.ApiToken); client.Timeout = new TimeSpan(0, 0, _splunkClient.QueryTimeout); var httpRequestMessage = new HttpRequestMessage(HttpMethod.Get, uriRequest.Request); @@ -89,7 +100,8 @@ private async Task GetSearchResultFromRequestUri(UriRequest uri } else { - extractResponseMessage.ExtractResponseMessage.ReasonPhrase = $"The authentication token has expired because it is valid up to {apiTokenExpiry.Item2}"; + extractResponseMessage.ExtractResponseMessage.ReasonPhrase = + "The authentication token has expired"; extractResponseMessage.ExtractResponseMessage.StatusCode = System.Net.HttpStatusCode.Unauthorized; } } @@ -101,46 +113,45 @@ private async Task GetSearchResultFromRequestUri(UriRequest uri catch (Exception exc) { extractResponseMessage.ExtractResponseMessage.ReasonPhrase = exc.Message; - extractResponseMessage.ExtractResponseMessage.StatusCode = System.Net.HttpStatusCode.InternalServerError; + extractResponseMessage.ExtractResponseMessage.StatusCode = + System.Net.HttpStatusCode.InternalServerError; } + return extractResponseMessage; } - private string ConstructFilePath(SplunkInstance splunkInstance, FileType fileType, bool isToday, bool setDateAsMidnight = false) + + public async Task ExecuteBatchDownloadFromSplunk(FileType fileType, UriRequest uriRequest, bool isToday) { - var filePathString = new StringBuilder(); - filePathString.Append(fileType.DirectoryName); - filePathString.Append(_filePathConstants.PathSeparator); - filePathString.Append(splunkInstance.Source); - filePathString.Append(_filePathConstants.PathSeparator); - filePathString.Append(_extract.QueryFromDate.ToString(Helpers.DateFormatConstants.FilePathQueryDateYearMonth)); - filePathString.Append(_filePathConstants.PathSeparator); - filePathString.Append(_filePathConstants.ProjectNameFilePrefix); - filePathString.Append(_filePathConstants.ComponentSeparator); - filePathString.Append(fileType.FileTypeFilePrefix); - filePathString.Append(_filePathConstants.ComponentSeparator); - filePathString.Append($"{_extract.QueryFromDate.ToString(Helpers.DateFormatConstants.FilePathQueryDate)}T{_extract.QueryHour.ToString(Helpers.DateFormatConstants.FilePathQueryHour)}"); - filePathString.Append(_filePathConstants.ComponentSeparator); - filePathString.Append($"{_extract.QueryToDate.ToString(Helpers.DateFormatConstants.FilePathQueryDate)}T{_extract.QueryHour.ToString(Helpers.DateFormatConstants.FilePathQueryHour)}"); - filePathString.Append(_filePathConstants.ComponentSeparator); - filePathString.Append(splunkInstance.Source); - filePathString.Append(_filePathConstants.ComponentSeparator); - if (!isToday) + try { - filePathString.Append(setDateAsMidnight ? DateTime.Today.ToString(Helpers.DateFormatConstants.FilePathNowDate) : DateTime.UtcNow.ToString(Helpers.DateFormatConstants.FilePathNowDate)); + if (FileTypeEnabled(fileType)) + { + var extractResponse = await DownloadCSVDateRangeAsync(fileType, uriRequest, isToday); + await _importService.AddObjectFileMessage(fileType, extractResponse); + } + else + { + _logger?.LogWarning( + $"Filetype {fileType.FileTypeFilePrefix} is not enabled. Please check if this is correct"); + } } - else + catch (Exception exc) { - filePathString.Append(DateTime.Today.AddDays(1).AddSeconds(-1).ToString(Helpers.DateFormatConstants.FilePathNowDate)); + _logger?.LogError(exc, $"An error has occurred while attempting to execute an Azure function"); + throw; } - filePathString.Append(_filePathConstants.FileExtension); - return filePathString.ToString(); } - private (bool, DateTime) HasApiTokenExpired(string apiToken) + private (bool Expired, DateTime ValidTo) HasApiTokenExpired(string apiToken) { var jwtToken = new JwtSecurityToken(apiToken); return (DateTime.UtcNow > jwtToken.ValidTo, jwtToken.ValidTo); } + + internal static bool FileTypeEnabled(FileType fileType) + { + return fileType is { Enabled: true }; + } } -} +} \ No newline at end of file diff --git a/source/Functions/SqlConnectionFactory.cs b/source/Functions/SqlConnectionFactory.cs new file mode 100644 index 0000000..a83f328 --- /dev/null +++ b/source/Functions/SqlConnectionFactory.cs @@ -0,0 +1,13 @@ +using System.Data.Common; +using Core; +using Microsoft.Data.SqlClient; + +namespace Functions; + +public class SqlConnectionFactory : IConnectionFactory +{ + public DbConnection CreateConnection(string connectionString) + { + return new SqlConnection(connectionString); + } +} \ No newline at end of file diff --git a/source/Functions/StoreProviderConsumerData.cs b/source/Functions/StoreProviderConsumerData.cs new file mode 100644 index 0000000..98a6839 --- /dev/null +++ b/source/Functions/StoreProviderConsumerData.cs @@ -0,0 +1,68 @@ +using System.Net; +using System.Text.Json; +using Core.DTOs.Request; +using Core.Repositories; +using Microsoft.Azure.Functions.Worker; +using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.Extensions.Logging; + +namespace Functions +{ + public class StoreProviderConsumerData( + IHierarchyProviderConsumerRepo repository, + ILogger logger) + { + [Function("StoreProviderConsumerData")] + public async Task Run( + [HttpTrigger(AuthorizationLevel.Function, "post", Route = "StoreProviderConsumerData")] + HttpRequestData req) + { + logger.LogInformation("Processing HTTP request."); + + var requestBody = await new StreamReader(req.Body).ReadToEndAsync(); + + if (requestBody.Length == 0) + { + var errorLengthResponse = req.CreateResponse(HttpStatusCode.BadRequest); + errorLengthResponse.Headers.Add("Content-Type", "text/plain; charset=utf-8"); + await errorLengthResponse.WriteStringAsync("Request body length is 0"); + return errorLengthResponse; + } + + List records = null; + try + { + records = JsonSerializer.Deserialize>(requestBody) ?? throw new + InvalidOperationException("Unable to deserialize input"); + } + catch (JsonException ex) + { + logger.LogError("Failed to deserialize request body: {exceptionMessage}", ex.Message); + var serializeErrorResponse = req.CreateResponse(HttpStatusCode.BadRequest); + serializeErrorResponse.Headers.Add("Content-Type", "text/plain; charset=utf-8"); + await serializeErrorResponse.WriteStringAsync("Invalid json input"); + return serializeErrorResponse; + } + + + logger.LogInformation($"Attempting to save {records.Count} items into databases"); + var count = await repository.InsertHierarchyProviderConsumers(records); + + if (count > 0) + { + var errorResponse = req.CreateResponse(HttpStatusCode.OK); + errorResponse.Headers.Add("Content-Type", "text/plain; charset=utf-8"); + await errorResponse.WriteStringAsync("Storing of items successful"); + return errorResponse; + } + + var response = req.CreateResponse(HttpStatusCode.BadRequest); + response.Headers.Add("Content-Type", "text/plain; charset=utf-8"); + + logger.LogInformation($"No items of {records.Count} were saved to the database"); + + await response.WriteStringAsync("Failed to save to the database - see logs for more information"); + return response; + } + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.Functions/host.json b/source/Functions/host.json similarity index 61% rename from source/gpconnect-analytics.Functions/host.json rename to source/Functions/host.json index c07c3c7..6e338e5 100644 --- a/source/gpconnect-analytics.Functions/host.json +++ b/source/Functions/host.json @@ -1,11 +1,6 @@ { "version": "2.0", "functionTimeout": "12:00:00", - "Values": { - "GetDataFromApiByTriggerAsidLookupSchedule": "0 0 2 1-7 * MON", - "GetDataFromApiByTriggerSspTransSchedule": "0 0 3 * * *", - "GetDataFromApiByTriggerMeshTransSchedule": "0 0 4 * * *" - }, "logging": { "applicationInsights": { "samplingSettings": { diff --git a/source/Functions/nlog.config.xml b/source/Functions/nlog.config.xml new file mode 100644 index 0000000..dcabb8f --- /dev/null +++ b/source/Functions/nlog.config.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/source/IntegrationTests/DapperTestSetupFixture.cs b/source/IntegrationTests/DapperTestSetupFixture.cs new file mode 100644 index 0000000..089c16e --- /dev/null +++ b/source/IntegrationTests/DapperTestSetupFixture.cs @@ -0,0 +1,74 @@ +using Core.Repositories; +using Core.Services.Interfaces; +using Dapper; +using DotNet.Testcontainers.Builders; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging.Testing; +using Moq; +using Testcontainers.MsSql; + +namespace IntegrationTests; + +public class DapperTestSetupFixture : IAsyncLifetime +{ + public MsSqlContainer Container; + + + public async Task InitializeAsync() + { + Container = new MsSqlBuilder() + .WithImage("mcr.microsoft.com/mssql/server:2022-latest") + .WithPortBinding(1443, true) + .WithPassword("P@ssw0rd123") + .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(1433)) + .Build(); + + // Create docker container for testing + await Container.StartAsync(); + + var mockCoreConfigurationService = new Mock(); + + // Set up the GetConnectionString method to return container's connection string + mockCoreConfigurationService.Setup(x => x.GetConnectionString(It.IsAny())) + .Returns(Container.GetConnectionString()); + + // Step 1: Create the table + var createTableSql = @" + CREATE TABLE Users ( + Id INT IDENTITY(1,1) PRIMARY KEY, + Name NVARCHAR(100) + )"; + + using var connection = new SqlConnection(Container.GetConnectionString()); + await connection.ExecuteAsync(createTableSql); + + // Step 2: Create the stored procedure (must be executed separately) + var createProcedureSql = @" + CREATE PROCEDURE sp_TestAddUser + @Name NVARCHAR(100) + AS + BEGIN + INSERT INTO Users (Name) VALUES (@Name); + SELECT SCOPE_IDENTITY() AS Id; -- Correct way to return the inserted ID + END; + "; + + await connection.ExecuteAsync(createProcedureSql); + + // Step 3: Add Query SP + var createQuerySql = @" + CREATE PROCEDURE sp_TestGetAllUsers + AS + BEGIN + SELECT * FROM Users; + END; + "; + + await connection.ExecuteAsync(createQuerySql); + } + + public async Task DisposeAsync() + { + await Container.DisposeAsync(); + } +} \ No newline at end of file diff --git a/source/IntegrationTests/DapperWrapperIntegrationTests.cs b/source/IntegrationTests/DapperWrapperIntegrationTests.cs new file mode 100644 index 0000000..9f06828 --- /dev/null +++ b/source/IntegrationTests/DapperWrapperIntegrationTests.cs @@ -0,0 +1,147 @@ +using Bogus; +using Core.Repositories; +using Dapper; +using DotNet.Testcontainers.Builders; +using FluentAssertions; +using Microsoft.Data.SqlClient; +using Testcontainers.MsSql; + +namespace IntegrationTests; + +public class DapperWrapperIntegrationTests(DapperTestSetupFixture fixture) : IClassFixture +{ + private readonly string _connectionString = fixture.Container.GetConnectionString(); + private readonly DapperWrapper _dapperWrapper = new(); + private readonly Faker _faker = new(); + + [Fact] + public async Task ExecuteStoredProcedureAsync_Should_Insert_User() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + var name = _faker.Name.FullName(); + var parameters = new { Name = name }; + + // Act + var result = await _dapperWrapper.ExecuteStoredProcedureAsync(connection, "sp_TestAddUser", parameters); + + // Assert + result.Should().BeGreaterThan(0); + + var users = await connection.QueryAsync("SELECT Name FROM Users WHERE NAME = @name", new { name }); + users.Should().Contain(name); + } + + [Fact] + public async Task ExecuteSqlAsync_Should_Insert_User() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + var name = _faker.Name.FullName(); + var sql = "INSERT INTO Users (Name) VALUES(@name)"; + + // Act + await _dapperWrapper.ExecuteAsync(connection, sql, new { name }); + + // Assert + var users = await connection.QueryAsync("SELECT Name FROM Users WHERE NAME = @name", new { name }); + users.Should().Contain(name); + } + + [Fact] + public async Task QueryStoredProcedureAsync_ShouldReturn_ExpectedQueryResult() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + var name = _faker.Name.FullName(); + var parameters = new { name }; + + await InsertUser(name, connection); + + // Act + var result = + await _dapperWrapper.QueryStoredProcedureAsync(connection, "sp_TestGetAllUsers", new { }); + + // Assert + result.Should().Contain(x => x.Name == name); + } + + [Fact] + public async Task QueryAsync_Should_QueryDb() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + var parameters = new { name = _faker.Name.FullName() }; + await InsertUser(parameters.name, connection); + + // Act + var result = + await _dapperWrapper.QueryAsync(connection, "SELECT name FROM USERS WHERE name = @name", + parameters); + + // Assert + result.Should().NotBeNullOrEmpty(); + result.Should().HaveCount(1); + result.First().Should().BeEquivalentTo(new TestUser + { + Name = parameters.name + }); + } + + // ------- ERRORS + + [Fact] + public async Task ExecuteStoredProcedureAsync_Should_Throw_Exception_On_Error() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + var parameters = new { Name = _faker.Name.FullName() }; + + // Act & Assert + _dapperWrapper.Invoking(x => + x.ExecuteStoredProcedureAsync(connection, "sp_NonExistentProcedure", parameters)) + .Should().ThrowAsync() + .WithMessage("Error executing stored procedure: sp_NonExistentProcedure"); + } + + [Fact] + public async Task QueryStoredProcedureAsync_Should_Throw_Exception_On_Error() + { + // Arrange + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + var parameters = new { Name = _faker.Name.FullName() }; + + // Act & Assert + _dapperWrapper.Invoking(x => + x.QueryStoredProcedureAsync(connection, "sp_NonExistentProcedure", parameters)) + .Should().ThrowAsync() + .WithMessage("Error executing stored procedure: sp_NonExistentProcedure"); + } + + #region Helper Methods + + private async Task InsertUser(string name, SqlConnection connection) + { + var sql = "INSERT INTO USERS (NAME) VALUES(@name)"; + await connection.ExecuteAsync(sql, new { name }); + } + + private class TestUser + { + public string Name { get; set; } + public int Id { get; set; } + } + + #endregion +} \ No newline at end of file diff --git a/source/IntegrationTests/EmailConfigurationProviderTests.cs b/source/IntegrationTests/EmailConfigurationProviderTests.cs new file mode 100644 index 0000000..467dcea --- /dev/null +++ b/source/IntegrationTests/EmailConfigurationProviderTests.cs @@ -0,0 +1,131 @@ +using Core.Services.Interfaces; +using Dapper; +using DotNet.Testcontainers.Builders; +using FluentAssertions; +using Functions; +using Functions.Configuration; +using IntegrationTests.TestHelpers; +using Microsoft.Data.SqlClient; +using Moq; +using Testcontainers.MsSql; + +namespace IntegrationTests; + +public class EmailConfigurationProviderTests : IAsyncLifetime +{ + private readonly MsSqlContainer _container = new MsSqlBuilder() + .WithImage("mcr.microsoft.com/mssql/server:2022-latest") + .WithPassword("P@ssw0rd123") + .WithPortBinding(1433, true) + .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(1433)) + .Build(); + + + [Fact] + public void GetEmailConfiguration_ShouldReturnEmailConfiguration() + { + // Arrange + var configuration = ConfigurationHelpers.CreateDefaultConfiguration(_container.GetConnectionString()); + var connectionFactory = new SqlConnectionFactory(); + var emailConfiguration = new EmailConfigurationProvider(connectionFactory); + + // Act + var result = emailConfiguration.GetEmailConfiguration(configuration); + + // Assert + result.Should().NotBeNull(); + result!.SenderAddress.Should().Be("gpconnectappointmentchecker.test@test.net"); + result!.Hostname.Should().Be("fakeHost"); + result!.Port.Should().Be(587); + result!.Encryption.Should().Be("Tls12"); + result!.AuthenticationRequired.Should().Be(true); + result.DefaultSubject.Should().Be("GP Connect Analytics - Error"); + result.RecipientAddress.Should().Be("gpconnectappointmentchecker.test@test.net"); + result.Username.Should().Be("gpconnectappointmentchecker.test@test.net"); + result.Password.Should().Be("fakePassword123!"); + } + + public async Task InitializeAsync() + { + // Create docker container for testing + await _container.StartAsync(); + + var mockCoreConfigurationService = new Mock(); + + // Set up the GetConnectionString method to return container's connection string + mockCoreConfigurationService.Setup(x => x.GetConnectionString(It.IsAny())) + .Returns(_container.GetConnectionString()); + + await using var sqlConnection = new SqlConnection(_container.GetConnectionString()); + await sqlConnection.OpenAsync(); + await sqlConnection.ExecuteAsync("CREATE SCHEMA Configuration"); + + // CREATE EMAIL PROCEDURE + using (var command = new SqlCommand(@" + create procedure Configuration.GetEmailConfiguration as + select + SenderAddress, + Hostname, + Port, + Encryption, + AuthenticationRequired, + Username, + Password, + DefaultSubject, + RecipientAddress + from Configuration.Email;", sqlConnection)) + { + await command.ExecuteNonQueryAsync(); + } + + // CREATE EMAIL TABLE + await sqlConnection.ExecuteAsync(""" + BEGIN + SET ANSI_NULLS ON + SET QUOTED_IDENTIFIER ON + + CREATE TABLE [Configuration].[Email]( + [SingleRowLock] [bit] NOT NULL, + [SenderAddress] [varchar](100) NOT NULL, + [Hostname] [varchar](100) NOT NULL, + [Port] [smallint] NOT NULL, + [Encryption] [varchar](10) NOT NULL, + [AuthenticationRequired] [bit] NOT NULL, + [Username] [varchar](100) NOT NULL, + [Password] [varchar](100) NOT NULL, + [DefaultSubject] [varchar](100) NOT NULL, + [RecipientAddress] [varchar](100) NOT NULL + ) ON [PRIMARY] + END + """); + + // SEED TABLE + await sqlConnection.ExecuteAsync(""" + INSERT INTO [Configuration].[Email]( + SingleRowLock, + SenderAddress, + Hostname, + Port, + Encryption, + AuthenticationRequired, + Username, + Password, + DefaultSubject, + RecipientAddress) + VALUES (1, + 'gpconnectappointmentchecker.test@test.net', + 'fakeHost', + 587, + 'Tls12', + 1, + 'gpconnectappointmentchecker.test@test.net', + 'fakePassword123!', + 'GP Connect Analytics - Error', + 'gpconnectappointmentchecker.test@test.net') + """); + + await sqlConnection.CloseAsync(); + } + + public async Task DisposeAsync() => await _container.DisposeAsync(); +} \ No newline at end of file diff --git a/source/IntegrationTests/HierarchyProviderConsumerRepoTests.cs b/source/IntegrationTests/HierarchyProviderConsumerRepoTests.cs new file mode 100644 index 0000000..0ea8e4f --- /dev/null +++ b/source/IntegrationTests/HierarchyProviderConsumerRepoTests.cs @@ -0,0 +1,119 @@ +using Core; +using Core.DTOs.Request; +using Core.Repositories; +using Core.Services.Interfaces; +using Dapper; +using DotNet.Testcontainers.Builders; +using FluentAssertions; +using Functions; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Testing; +using Moq; +using Testcontainers.MsSql; + + +namespace IntegrationTests +{ + public class HierarchyProviderConsumerRepoTests : IClassFixture + { + private readonly string _connectionString; + private readonly HierarchyProviderConsumerRepo _repo; + private readonly FakeLogger _logger = new(); + + + public HierarchyProviderConsumerRepoTests(MsSqlContainerFixture fixture) + { + _connectionString = fixture.Container.GetConnectionString(); + var mockCoreConfigurationService = new Mock(); + + mockCoreConfigurationService.Setup(x => x.GetConnectionString(It.IsAny())) + .Returns(_connectionString); + + _repo = new HierarchyProviderConsumerRepo(mockCoreConfigurationService.Object, new DapperWrapper(), + new SqlConnectionFactory(), _logger); + } + + [Fact] + public async Task InsertHierarchyProviderConsumers_ShouldInsertMultipleRecordsAndVerifyFirstRecord() + { + // Arrange + var providers = new List + { + new OrganisationHierarchyProvider + { + OdsCode = "ABC123", + PracticeName = "Test Practice 1", + RegisteredPatientCount = 1500, + RegionCode = "Region1", + RegionName = "Test Region 1", + Icb22Name = "ICB Name 1", + PcnName = "PCN Name 1", + Appointments13000 = 120 + }, + new OrganisationHierarchyProvider + { + OdsCode = "XYZ456", + PracticeName = "Test Practice 2", + RegisteredPatientCount = 2000, + RegionCode = "Region2", + RegionName = "Test Region 2", + Icb22Name = "ICB Name 2", + PcnName = "PCN Name 2", + Appointments13000 = 220 + } + }; + + // Act + var insertResult = await _repo.InsertHierarchyProviderConsumers(providers); + + // Assert + + insertResult.Should().Be(2); + + await using var connection = new SqlConnection(_connectionString); + await connection.OpenAsync(); + + // Verify record count + const string countQuery = "SELECT COUNT(*) FROM [Data].[HierarchyProviderConsumers]"; + var count = await connection.QuerySingleAsync(countQuery); + count.Should().Be(2); + + // Verify specific record + const string selectQuery = "SELECT * FROM [Data].[HierarchyProviderConsumers] WHERE OdsCode = @OdsCode"; + var result = await connection.QuerySingleAsync(selectQuery, + new { OdsCode = "ABC123" }); + + result.OdsCode.Should().Be("ABC123"); + result.PracticeName.Should().Be("Test Practice 1"); + result.RegisteredPatientCount.Should().Be(1500); + result.RegionCode.Should().Be("Region1"); + result.RegionName.Should().Be("Test Region 1"); + result.Icb22Name.Should().Be("ICB Name 1"); + result.PcnName.Should().Be("PCN Name 1"); + result.Appointments13000.Should().Be(120); + } + + [Fact] + public async Task InsertHierarchyProviderConsumers_ShouldLogError_WhenConnectionFails() + { + // Arrange + var providers = new List + { + new() { OdsCode = string.Empty, PracticeName = "Test1", RegisteredPatientCount = 100 }, + new() { OdsCode = string.Empty, PracticeName = "Test2", RegisteredPatientCount = 200 } // Duplicate PK + }; + + + // Act + var result = await _repo.InsertHierarchyProviderConsumers(providers); + + // Assert + result.Should().Be(0); // Should return 0 due to exception + _logger.Collector.LatestRecord.Message.Should().Be("Error inserting hierarchy provider consumers"); + _logger.Collector.LatestRecord.Level.Should().Be(LogLevel.Error); + _logger.Collector.LatestRecord.Exception?.Message.Should() + .Contain("Violation of PRIMARY KEY constraint 'PK_Hierarchy"); + } + } +} \ No newline at end of file diff --git a/source/IntegrationTests/IntegrationTests.csproj b/source/IntegrationTests/IntegrationTests.csproj new file mode 100644 index 0000000..a67a3e5 --- /dev/null +++ b/source/IntegrationTests/IntegrationTests.csproj @@ -0,0 +1,51 @@ + + + + net8.0 + enable + enable + + false + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Always + + + + diff --git a/source/IntegrationTests/MsSqlContainerFixture.cs b/source/IntegrationTests/MsSqlContainerFixture.cs new file mode 100644 index 0000000..ddf1849 --- /dev/null +++ b/source/IntegrationTests/MsSqlContainerFixture.cs @@ -0,0 +1,77 @@ +using Core.Repositories; +using Core.Services.Interfaces; +using Dapper; +using DotNet.Testcontainers.Builders; +using Functions; +using Microsoft.Data.SqlClient; +using Microsoft.Extensions.Logging.Testing; +using Moq; +using Testcontainers.MsSql; + +namespace IntegrationTests; + +public class MsSqlContainerFixture : IAsyncLifetime +{ + public MsSqlContainer Container; + private HierarchyProviderConsumerRepo _repo; + private FakeLogger _logger; + + + public async Task InitializeAsync() + { + _logger = new FakeLogger(); + + Container = new MsSqlBuilder() + .WithImage("mcr.microsoft.com/mssql/server:2022-latest") + .WithPortBinding(1443, true) + .WithPassword("P@ssw0rd123") + .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(1433)) + .Build(); + + // Create docker container for testing + await Container.StartAsync(); + + var mockCoreConfigurationService = new Mock(); + + // Set up the GetConnectionString method to return container's connection string + mockCoreConfigurationService.Setup(x => x.GetConnectionString(It.IsAny())) + .Returns(Container.GetConnectionString()); + + + _repo = new HierarchyProviderConsumerRepo(mockCoreConfigurationService.Object, new DapperWrapper(), + new SqlConnectionFactory(), _logger); + + await using var sqlConnection = new SqlConnection(Container.GetConnectionString()); + await sqlConnection.OpenAsync(); + await sqlConnection.ExecuteAsync("CREATE SCHEMA DATA"); + await sqlConnection.ExecuteAsync(@"SET ANSI_NULLS ON + BEGIN + SET QUOTED_IDENTIFIER ON + END + BEGIN + CREATE TABLE [Data].[HierarchyProviderConsumers]( + [OdsCode] [nvarchar](450) NOT NULL, + [PracticeName] [nvarchar](max) NULL, + [RegisteredPatientCount] [int] NOT NULL, + [RegionCode] [nvarchar](max) NULL, + [RegionName] [nvarchar](max) NULL, + [Icb22Name] [nvarchar](max) NULL, + [PcnName] [nvarchar](max) NULL, + [Appointments13000] [int] NOT NULL + ) ON [PRIMARY] TEXTIMAGE_ON [PRIMARY] + END + BEGIN + SET ANSI_PADDING ON + END + BEGIN + ALTER TABLE [Data].[HierarchyProviderConsumers] ADD CONSTRAINT [PK_HierarchyProviderConsumers] PRIMARY KEY CLUSTERED + ( + [OdsCode] ASC + )WITH (STATISTICS_NORECOMPUTE = OFF, IGNORE_DUP_KEY = OFF, ONLINE = OFF, OPTIMIZE_FOR_SEQUENTIAL_KEY = OFF) ON [PRIMARY] + END"); + + await sqlConnection.CloseAsync(); + } + + public async Task DisposeAsync() => await Container.DisposeAsync(); +} \ No newline at end of file diff --git a/source/IntegrationTests/TestHelpers/ConfigurationHelpers.cs b/source/IntegrationTests/TestHelpers/ConfigurationHelpers.cs new file mode 100644 index 0000000..9ef0cee --- /dev/null +++ b/source/IntegrationTests/TestHelpers/ConfigurationHelpers.cs @@ -0,0 +1,25 @@ +using Microsoft.Extensions.Configuration; + +namespace IntegrationTests.TestHelpers; + +public class ConfigurationHelpers +{ + public static IConfiguration CreateDefaultConfiguration(string connectionString = "") + { + var connection = + string.IsNullOrEmpty(connectionString) + ? "Server=myServer;Database=myDB;User Id=myUser;Password=myPass;" + : connectionString; + + var inMemorySettings = new Dictionary + { + { "ConnectionStrings:GPConnectAnalytics", connection } + }; + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(inMemorySettings!) + .Build(); + + return configuration; + } +} \ No newline at end of file diff --git a/source/global.json b/source/global.json new file mode 100644 index 0000000..30823ad --- /dev/null +++ b/source/global.json @@ -0,0 +1,5 @@ +{ + "sdk": { + "version": "8.0.203" + } +} \ No newline at end of file diff --git a/source/gpconnect-analytics.DAL/BatchService.cs b/source/gpconnect-analytics.DAL/BatchService.cs deleted file mode 100644 index d6787f5..0000000 --- a/source/gpconnect-analytics.DAL/BatchService.cs +++ /dev/null @@ -1,161 +0,0 @@ -using Dapper; -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Request; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.Helpers; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; -using System.Web; - -namespace gpconnect_analytics.DAL -{ - public class BatchService : IBatchService - { - private readonly IConfigurationService _configurationService; - private readonly ILogger _logger; - private SplunkClient _splunkClient; - private readonly IDataService _dataService; - private readonly ISplunkService _splunkService; - private readonly IImportService _importService; - - public BatchService(IConfigurationService configurationService, IImportService importService, ISplunkService splunkService, ILogger logger, IDataService dataService) - { - _configurationService = configurationService; - _logger = logger; - _dataService = dataService; - _splunkService = splunkService; - _importService = importService; - } - - public async Task StartBatchDownloadForTodayAsync(FileTypes fileTypes) - { - var dateInScope = DateTime.Today.AddDays(1); - var fileType = await _configurationService.GetFileType(fileTypes); - var uriList = await GetBatchDownloadUriList(fileType, DateTimeHelper.EachDay(dateInScope, dateInScope).ToList()); - - await RemovePreviousDownloads(fileType, dateInScope, dateInScope); - return await ProcessUrls(fileType, uriList, true); - } - - public async Task StartBatchDownloadAsync(HttpRequest req, FileTypes fileTypes) - { - if (req != null) - { - var startDate = DateTime.TryParse(req.Query["StartDate"].ToString(), out DateTime start) ? start : DateTime.Today; - var endDate = DateTime.TryParse(req.Query["EndDate"].ToString(), out DateTime end) ? end : DateTime.Today; - - if (endDate >= startDate) - { - var fileType = await _configurationService.GetFileType(fileTypes); - var uriList = await GetBatchDownloadUriList(fileType, DateTimeHelper.EachDay(startDate, endDate).ToList()); - - await RemovePreviousDownloads(fileType, startDate, endDate); - - return await ProcessUrls(fileType, uriList, false); - } - } - return new BadRequestObjectResult("Bad request"); - } - - private async Task ProcessUrls(FileType fileType, List uriList, bool isToday) - { - for (var i = 0; i < uriList.Count; i++) - { - var downloadTasksQuery = - from requestUri in uriList.Skip(i).Take(1) - select ExecuteBatchDownloadFromSplunk(fileType, requestUri, isToday); - - var downloadTasks = downloadTasksQuery.ToList(); - - while (downloadTasks.Any()) - { - Task finishedTask = await Task.WhenAny(downloadTasks); - downloadTasks.Remove(finishedTask); - } - await Task.Delay(TimeSpan.FromSeconds(10)); - } - return new OkObjectResult($"Batch download complete: {uriList.Count} requests processed"); - } - - private async Task ExecuteBatchDownloadFromSplunk(FileType fileType, UriRequest uriRequest, bool isToday) - { - try - { - if (FileTypeEnabled(fileType)) - { - var extractResponse = await _splunkService.DownloadCSVDateRangeAsync(fileType, uriRequest, isToday); - await _importService.AddObjectFileMessage(fileType, extractResponse); - } - else - { - _logger?.LogWarning($"Filetype {fileType.FileTypeFilePrefix} is not enabled. Please check if this is correct"); - } - } - catch (Exception exc) - { - _logger?.LogError(exc, $"An error has occurred while attempting to execute an Azure function"); - throw; - } - } - - public async Task> GetBatchDownloadUriList(FileType fileType, List dateTimeList) - { - var uriList = new List(); - _splunkClient = await _configurationService.GetSplunkClientConfiguration(); - - foreach (var dateTime in dateTimeList) - { - var earliestDate = dateTime.AddDays(-2); - var latestDate = dateTime.AddDays(-1); - - for (var i = 0; i < 24; i++) - { - var splunkQuery = fileType.SplunkQuery; - var hour = TimeSpan.Zero.Add(TimeSpan.FromHours(i)); - - splunkQuery = splunkQuery.Replace("{earliest}", earliestDate.ToString(Helpers.DateFormatConstants.SplunkQueryDate)); - splunkQuery = splunkQuery.Replace("{latest}", latestDate.ToString(Helpers.DateFormatConstants.SplunkQueryDate)); - splunkQuery = splunkQuery.Replace("{hour}", hour.ToString(Helpers.DateFormatConstants.SplunkQueryHour)); - - var uriBuilder = new UriBuilder - { - Scheme = Uri.UriSchemeHttps, - Host = _splunkClient.HostName, - Port = _splunkClient.HostPort, - Path = _splunkClient.BaseUrl, - Query = string.Format(_splunkClient.QueryParameters, HttpUtility.UrlEncode(splunkQuery)) - }; - - uriList.Add(new UriRequest() - { - Request = uriBuilder.Uri, - EarliestDate = earliestDate, - LatestDate = latestDate, - Hour = hour - }); - } - } - return uriList; - } - - public async Task RemovePreviousDownloads(FileType fileType, DateTime startDate, DateTime endDate) - { - var procedureName = "Import.RemovePreviousDownload"; - var parameters = new DynamicParameters(); - parameters.Add("@FileTypeId", fileType.FileTypeId); - parameters.Add("@StartDate", startDate.AddDays(-2)); - parameters.Add("@EndDate", endDate.AddDays(-1)); - await _dataService.ExecuteStoredProcedure(procedureName, parameters); - } - - private bool FileTypeEnabled(FileType fileType) - { - return (fileType != null && fileType.Enabled); - } - } -} diff --git a/source/gpconnect-analytics.DAL/ConfigurationService.cs b/source/gpconnect-analytics.DAL/ConfigurationService.cs deleted file mode 100644 index b4cc928..0000000 --- a/source/gpconnect-analytics.DAL/ConfigurationService.cs +++ /dev/null @@ -1,63 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.Helpers; -using Microsoft.Extensions.Logging; -using System.Collections.Generic; -using System.Linq; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL -{ - public class ConfigurationService : IConfigurationService - { - private readonly ILogger _logger; - private readonly IDataService _dataService; - - public ConfigurationService(IDataService dataService, ILogger logger) - { - _logger = logger; - _dataService = dataService; - } - - public async Task GetBlobStorageConfiguration() - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetBlobStorageConfiguration]"); - _logger.LogInformation($"Loading blob storage configuration", result.FirstOrDefault()); - return result.FirstOrDefault(); - } - - public async Task GetFilePathConstants() - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetFilePathConstants]"); - _logger.LogInformation($"Loading file path constants", result.FirstOrDefault()); - return result.FirstOrDefault(); - } - - public async Task> GetFileTypes() - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetFileTypes]"); - _logger.LogInformation($"Loading file types", result); - return result; - } - - public async Task GetFileType(FileTypes fileTypes) - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetFileTypes]"); - return result.FirstOrDefault(ft => ft.FileTypeFilePrefix == fileTypes.ToString()); - } - - public async Task GetSplunkClientConfiguration() - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetSplunkClientConfiguration]"); - _logger.LogInformation($"Loading splunk client configuration", result.FirstOrDefault()); - return result.FirstOrDefault(); - } - - public async Task GetSplunkInstance(Helpers.SplunkInstances splunkInstance) - { - var result = await _dataService.ExecuteStoredProcedure("[Configuration].[GetSplunkInstances]"); - _logger.LogInformation($"Loading splunk instance", result); - return result.FirstOrDefault(x => x.Source == splunkInstance.ToString()); - } - } -} diff --git a/source/gpconnect-analytics.DAL/DataService.cs b/source/gpconnect-analytics.DAL/DataService.cs deleted file mode 100644 index c01af79..0000000 --- a/source/gpconnect-analytics.DAL/DataService.cs +++ /dev/null @@ -1,88 +0,0 @@ -using Dapper; -using gpconnect_analytics.DAL.Interfaces; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; -using System; -using System.Collections.Generic; -using System.Data.SqlClient; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL -{ - public class DataService : IDataService - { - private readonly ILogger _logger; - private readonly IConfiguration _configuration; - private readonly string _connectionString; - - public DataService(ILogger logger, IConfiguration configuration) - { - _logger = logger; - _configuration = configuration; - _connectionString = _configuration.GetConnectionString(ConnectionStrings.GpConnectAnalytics); - } - - public async Task> ExecuteStoredProcedure(string procedureName, DynamicParameters parameters) where T : class - { - using (var sqlConnection = new SqlConnection(_connectionString)) - { - try - { - sqlConnection.InfoMessage += SqlConnection_InfoMessage; - _logger.LogInformation($"Executing stored procedure {procedureName}", parameters); - var results = await sqlConnection.QueryAsync(procedureName, parameters, commandType: System.Data.CommandType.StoredProcedure, commandTimeout: 0); - return results.AsList(); - } - catch (Exception exc) - { - _logger?.LogError(exc, $"An error has occurred while attempting to execute the function {procedureName}"); - throw; - } - } - } - - public async Task ExecuteStoredProcedureWithOutputParameters(string procedureName, DynamicParameters parameters) - { - using (var sqlConnection = new SqlConnection(_connectionString)) - { - try - { - sqlConnection.InfoMessage += SqlConnection_InfoMessage; - _logger.LogInformation($"Executing stored procedure {procedureName}", parameters); - await SqlMapper.ExecuteAsync(sqlConnection, procedureName, parameters, commandType: System.Data.CommandType.StoredProcedure, commandTimeout: 0); - return parameters; - } - catch (Exception exc) - { - _logger?.LogError(exc, $"An error has occurred while attempting to execute the function {procedureName}"); - throw; - } - } - } - - public async Task ExecuteStoredProcedure(string procedureName, DynamicParameters parameters) - { - - using (var sqlConnection = new SqlConnection(_connectionString)) - { - try - { - sqlConnection.InfoMessage += SqlConnection_InfoMessage; - _logger.LogInformation($"Executing stored procedure {procedureName}", parameters); - var result = await sqlConnection.ExecuteAsync(procedureName, parameters, commandType: System.Data.CommandType.StoredProcedure, commandTimeout: 0); - return result; - } - catch (Exception exc) - { - _logger?.LogError(exc, $"An error has occurred while attempting to execute the function {procedureName}"); - throw; - } - } - } - - private void SqlConnection_InfoMessage(object sender, SqlInfoMessageEventArgs e) - { - _logger?.LogInformation(e.Message); - } - } -} diff --git a/source/gpconnect-analytics.DAL/ImportService.cs b/source/gpconnect-analytics.DAL/ImportService.cs deleted file mode 100644 index c0c3ad7..0000000 --- a/source/gpconnect-analytics.DAL/ImportService.cs +++ /dev/null @@ -1,101 +0,0 @@ -using Dapper; -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.DTO.Response.Queue; -using gpconnect_analytics.DTO.Response.Splunk; -using gpconnect_analytics.Helpers; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; -using System; -using System.Data; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL -{ - public class ImportService : IImportService - { - private readonly ILogger _logger; - private readonly IDataService _dataService; - private readonly IBlobService _blobService; - private readonly IConfigurationService _configurationService; - - public ImportService(IConfigurationService configurationService, IDataService dataService, IBlobService blobService, ILogger logger) - { - _logger = logger; - _configurationService = configurationService; - _dataService = dataService; - _blobService = blobService; - } - - public async Task AddDownloadedFileManually(HttpRequest req) - { - var filePath = req.Query["FilePath"].ToString(); - var fileTypeFromPath = filePath.GetFileType(); - - if (fileTypeFromPath != null) - { - var fileType = await _configurationService.GetFileType((FileTypes)fileTypeFromPath); - await AddFileMessage(fileType, new ExtractResponse() { FilePath = filePath }); - return new OkObjectResult($"Import of {filePath} complete"); - } - return new BadRequestObjectResult("Bad request"); - } - - public async Task AddObjectFileMessage(FileType fileType, ExtractResponse extractResponse) - { - switch (extractResponse?.ExtractResponseMessage.StatusCode) - { - case System.Net.HttpStatusCode.OK: - var uploadedBlob = await _blobService.AddObjectToBlob(extractResponse); - if (uploadedBlob != null) - { - await AddFileMessage(fileType, extractResponse); - } - break; - default: - _logger?.LogWarning(extractResponse?.ExtractResponseMessage.ToString()); - break; - throw new Exception($"Splunk has returned the following HTTP status code {extractResponse?.ExtractResponseMessage.StatusCode}"); - } - } - - public async Task AddFileMessage(FileType fileType, ExtractResponse extractResponse) - { - var fileAddedCount = await AddFile(fileType.FileTypeId, extractResponse.FilePath, true); - await _blobService.AddMessageToBlobQueue(fileAddedCount, fileType.FileTypeId, extractResponse.FilePath, true); - } - - public async Task AddFile(int fileTypeId, string filePath, bool overrideFile) - { - var procedureName = "ApiReader.AddFile"; - var parameters = new DynamicParameters(); - parameters.Add("@FileTypeId", fileTypeId); - parameters.Add("@FilePath", filePath); - parameters.Add("@Override", overrideFile); - var result = await _dataService.ExecuteStoredProcedure(procedureName, parameters); - return result; - } - - public async Task InstallData(Message queueItem) - { - bool moreFilesToInstall = true; - var procedureName = "Import.InstallNextFile"; - var parameters = new DynamicParameters(); - parameters.Add("@FileTypeId", queueItem.FileTypeId); - if(queueItem.Override) - { - parameters.Add("@Override", queueItem.Override, dbType: DbType.Boolean, direction: ParameterDirection.Input); - } - parameters.Add("@MoreFilesToInstall", dbType: DbType.Boolean, direction: ParameterDirection.Output); - - while (moreFilesToInstall) - { - _logger.LogInformation($"Installing file into database", parameters); - var result = await _dataService.ExecuteStoredProcedureWithOutputParameters(procedureName, parameters); - moreFilesToInstall = result.Get("@MoreFilesToInstall"); - _logger.LogInformation($"More files to install? {moreFilesToInstall}"); - }; - } - } -} diff --git a/source/gpconnect-analytics.DAL/Interfaces/IBatchService.cs b/source/gpconnect-analytics.DAL/Interfaces/IBatchService.cs deleted file mode 100644 index 2e279ce..0000000 --- a/source/gpconnect-analytics.DAL/Interfaces/IBatchService.cs +++ /dev/null @@ -1,19 +0,0 @@ -using gpconnect_analytics.DTO.Request; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.Helpers; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using System; -using System.Collections.Generic; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces -{ - public interface IBatchService - { - Task> GetBatchDownloadUriList(FileType fileType, List dateTimeList); - Task RemovePreviousDownloads(FileType fileType, DateTime startDate, DateTime endDate); - Task StartBatchDownloadForTodayAsync(FileTypes fileTypes); - Task StartBatchDownloadAsync(HttpRequest req, FileTypes fileTypes); - } -} diff --git a/source/gpconnect-analytics.DAL/Interfaces/IBlobService.cs b/source/gpconnect-analytics.DAL/Interfaces/IBlobService.cs deleted file mode 100644 index 8264c8b..0000000 --- a/source/gpconnect-analytics.DAL/Interfaces/IBlobService.cs +++ /dev/null @@ -1,12 +0,0 @@ -using Azure.Storage.Blobs.Models; -using gpconnect_analytics.DTO.Response.Splunk; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces -{ - public interface IBlobService - { - Task AddMessageToBlobQueue(int fileAddedCount, int fileTypeId, string blobName, bool overrideEntry = false); - Task AddObjectToBlob(ExtractResponse extractResponse); - } -} diff --git a/source/gpconnect-analytics.DAL/Interfaces/IDataService.cs b/source/gpconnect-analytics.DAL/Interfaces/IDataService.cs deleted file mode 100644 index a5068b7..0000000 --- a/source/gpconnect-analytics.DAL/Interfaces/IDataService.cs +++ /dev/null @@ -1,13 +0,0 @@ -using Dapper; -using System.Collections.Generic; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces -{ - public interface IDataService - { - Task> ExecuteStoredProcedure(string procedureName, DynamicParameters parameters = null) where T : class; - Task ExecuteStoredProcedureWithOutputParameters(string procedureName, DynamicParameters parameters); - Task ExecuteStoredProcedure(string procedureName, DynamicParameters parameters = null); - } -} diff --git a/source/gpconnect-analytics.DAL/Interfaces/IImportService.cs b/source/gpconnect-analytics.DAL/Interfaces/IImportService.cs deleted file mode 100644 index 3b0c163..0000000 --- a/source/gpconnect-analytics.DAL/Interfaces/IImportService.cs +++ /dev/null @@ -1,17 +0,0 @@ -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.DTO.Response.Queue; -using gpconnect_analytics.DTO.Response.Splunk; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces -{ - public interface IImportService - { - Task InstallData(Message message); - Task AddFile(int fileTypeId, string filePath, bool overrideFile); - Task AddDownloadedFileManually(HttpRequest req); - Task AddObjectFileMessage(FileType fileType, ExtractResponse extractResponse); - } -} diff --git a/source/gpconnect-analytics.DAL/Interfaces/ISplunkService.cs b/source/gpconnect-analytics.DAL/Interfaces/ISplunkService.cs deleted file mode 100644 index dd25a4e..0000000 --- a/source/gpconnect-analytics.DAL/Interfaces/ISplunkService.cs +++ /dev/null @@ -1,12 +0,0 @@ -using gpconnect_analytics.DTO.Request; -using gpconnect_analytics.DTO.Response.Configuration; -using gpconnect_analytics.DTO.Response.Splunk; -using System.Threading.Tasks; - -namespace gpconnect_analytics.DAL.Interfaces -{ - public interface ISplunkService - { - Task DownloadCSVDateRangeAsync(FileType fileType, UriRequest uriRequest, bool isToday); - } -} diff --git a/source/gpconnect-analytics.DAL/gpconnect-analytics.DAL.csproj b/source/gpconnect-analytics.DAL/gpconnect-analytics.DAL.csproj deleted file mode 100644 index eaf6218..0000000 --- a/source/gpconnect-analytics.DAL/gpconnect-analytics.DAL.csproj +++ /dev/null @@ -1,29 +0,0 @@ - - - - netcoreapp3.1 - gpconnect_analytics.DAL - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/source/gpconnect-analytics.DTO/Request/AsidLookupRun.cs b/source/gpconnect-analytics.DTO/Request/AsidLookupRun.cs deleted file mode 100644 index 8f4875b..0000000 --- a/source/gpconnect-analytics.DTO/Request/AsidLookupRun.cs +++ /dev/null @@ -1,6 +0,0 @@ -namespace gpconnect_analytics.DTO.Request -{ - public class AsidLookupRun : BaseRun - { - } -} diff --git a/source/gpconnect-analytics.DTO/Response/Import/NextFile.cs b/source/gpconnect-analytics.DTO/Response/Import/NextFile.cs deleted file mode 100644 index 5d5b209..0000000 --- a/source/gpconnect-analytics.DTO/Response/Import/NextFile.cs +++ /dev/null @@ -1,7 +0,0 @@ -namespace gpconnect_analytics.DTO.Response.Import -{ - public class NextFile - { - public bool MoreFilesToInstall { get; set; } - } -} diff --git a/source/gpconnect-analytics.DTO/gpconnect-analytics.DTO.csproj b/source/gpconnect-analytics.DTO/gpconnect-analytics.DTO.csproj deleted file mode 100644 index 0edb15e..0000000 --- a/source/gpconnect-analytics.DTO/gpconnect-analytics.DTO.csproj +++ /dev/null @@ -1,13 +0,0 @@ - - - - netcoreapp3.1 - gpconnect_analytics.DTO - - - - - - - - diff --git a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs b/source/gpconnect-analytics.Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs deleted file mode 100644 index 8c001ea..0000000 --- a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/HttpClient/HttpClientExtensions.cs +++ /dev/null @@ -1,27 +0,0 @@ -using System; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Authentication; - -namespace gpconnect_analytics.Configuration.Infrastructure.HttpClient -{ - public static class HttpClientExtensions - { - public static System.Net.Http.HttpClient ConfigureHttpClient(System.Net.Http.HttpClient options) - { - options.Timeout = new TimeSpan(0, 0, 1, 0); - options.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("text/csv")); - options.DefaultRequestHeaders.CacheControl = new CacheControlHeaderValue { NoCache = true }; - return options; - } - - public static HttpMessageHandler CreateHttpMessageHandler() - { - var httpClientHandler = new HttpClientHandler - { - SslProtocols = SslProtocols.Tls13 | SslProtocols.Tls12 | SslProtocols.Tls11 | SslProtocols.Tls - }; - return httpClientHandler; - } - } -} diff --git a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs b/source/gpconnect-analytics.Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs deleted file mode 100644 index baa1fea..0000000 --- a/source/gpconnect-analytics.Functions/Configuration/Infrastructure/Mapping/MappingExtensions.cs +++ /dev/null @@ -1,16 +0,0 @@ -using Dapper.FluentMap; -using gpconnect_analytics.DAL.Mapping; - -namespace gpconnect_analytics.Configuration.Infrastructure -{ - public static class MappingExtensions - { - public static void ConfigureMappingServices() - { - FluentMapper.Initialize(config => - { - config.AddMap(new SplunkInstanceMap()); - }); - } - } -} diff --git a/source/gpconnect-analytics.Functions/ExecuteImportByTrigger.cs b/source/gpconnect-analytics.Functions/ExecuteImportByTrigger.cs deleted file mode 100644 index 99675cd..0000000 --- a/source/gpconnect-analytics.Functions/ExecuteImportByTrigger.cs +++ /dev/null @@ -1,24 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.DTO.Response.Queue; -using Microsoft.Azure.WebJobs; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class ExecuteImportByTrigger - { - private readonly IImportService _importService; - - public ExecuteImportByTrigger(IImportService importService) - { - _importService = importService; - } - - [FunctionName("ExecuteImportByTrigger")] - public async Task Run([QueueTrigger("%QueueName%")] Message queueItem, ILogger log) - { - await _importService.InstallData(queueItem); - } - } -} diff --git a/source/gpconnect-analytics.Functions/GetDataFromApiByDateRange.cs b/source/gpconnect-analytics.Functions/GetDataFromApiByDateRange.cs deleted file mode 100644 index f7f91ce..0000000 --- a/source/gpconnect-analytics.Functions/GetDataFromApiByDateRange.cs +++ /dev/null @@ -1,39 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.Helpers; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Azure.WebJobs; -using Microsoft.Azure.WebJobs.Extensions.Http; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class GetDataFromApiByDateRange - { - private readonly IBatchService _batchService; - - public GetDataFromApiByDateRange(IBatchService batchService) - { - _batchService = batchService; - } - - [FunctionName("GetDataFromApiByDateRangeSspTrans")] - public async Task GetDataFromSspTransByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadAsync(req, FileTypes.ssptrans); - } - - [FunctionName("GetDataFromApiByDateRangeMeshTrans")] - public async Task GetDataFromMeshTransByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadAsync(req, FileTypes.meshtrans); - } - - [FunctionName("GetDataFromApiByDateRangeAsidLookup")] - public async Task GetDataFromAsidLookupByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadAsync(req, FileTypes.asidlookup); - } - } -} diff --git a/source/gpconnect-analytics.Functions/GetDataFromApiByTrigger.cs b/source/gpconnect-analytics.Functions/GetDataFromApiByTrigger.cs deleted file mode 100644 index 5741505..0000000 --- a/source/gpconnect-analytics.Functions/GetDataFromApiByTrigger.cs +++ /dev/null @@ -1,36 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.Helpers; -using Microsoft.Azure.WebJobs; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class GetDataFromApiByTrigger - { - private readonly IBatchService _batchService; - - public GetDataFromApiByTrigger(IBatchService batchService) - { - _batchService = batchService; - } - - [FunctionName("GetDataFromApiByTriggerAsidLookup")] - public async Task GetDataFromAsidLookup([TimerTrigger("%GetDataFromApiByTriggerAsidLookupSchedule%", RunOnStartup = false)] TimerInfo myTimer, ILogger log) - { - await _batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); - } - - [FunctionName("GetDataFromApiByTriggerSspTrans")] - public async Task GetDataFromSspTrans([TimerTrigger("%GetDataFromApiByTriggerSspTransSchedule%", RunOnStartup = false)] TimerInfo myTimer, ILogger log) - { - await _batchService.StartBatchDownloadForTodayAsync(FileTypes.ssptrans); - } - - [FunctionName("GetDataFromApiByTriggerMeshTrans")] - public async Task GetDataFromMeshTrans([TimerTrigger("%GetDataFromApiByTriggerMeshTransSchedule%", RunOnStartup = false)] TimerInfo myTimer, ILogger log) - { - await _batchService.StartBatchDownloadForTodayAsync(FileTypes.meshtrans); - } - } -} diff --git a/source/gpconnect-analytics.Functions/GetDataFromApiManual.cs b/source/gpconnect-analytics.Functions/GetDataFromApiManual.cs deleted file mode 100644 index a474b5f..0000000 --- a/source/gpconnect-analytics.Functions/GetDataFromApiManual.cs +++ /dev/null @@ -1,26 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Azure.WebJobs; -using Microsoft.Azure.WebJobs.Extensions.Http; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class GetDataFromApiManual - { - private readonly IImportService _importService; - - public GetDataFromApiManual(IImportService importService) - { - _importService = importService; - } - - [FunctionName("GetDataFromApiManual")] - public async Task AddDownloadedFile([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _importService.AddDownloadedFileManually(req); - } - } -} diff --git a/source/gpconnect-analytics.Functions/GetDataFromApiToday.cs b/source/gpconnect-analytics.Functions/GetDataFromApiToday.cs deleted file mode 100644 index 03fe9be..0000000 --- a/source/gpconnect-analytics.Functions/GetDataFromApiToday.cs +++ /dev/null @@ -1,39 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.Helpers; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Azure.WebJobs; -using Microsoft.Azure.WebJobs.Extensions.Http; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class GetDataFromApiToday - { - private readonly IBatchService _batchService; - - public GetDataFromApiToday(IBatchService batchService) - { - _batchService = batchService; - } - - [FunctionName("GetDataFromApiTodaySspTrans")] - public async Task GetDataFromSspTransByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadForTodayAsync(FileTypes.ssptrans); - } - - [FunctionName("GetDataFromApiTodayMeshTrans")] - public async Task GetDataFromMeshTransByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadForTodayAsync(FileTypes.meshtrans); - } - - [FunctionName("GetDataFromApiTodayAsidLookup")] - public async Task GetDataFromAsidLookupByDateRange([HttpTrigger(AuthorizationLevel.Function, "GET", Route = null)] HttpRequest req, ILogger log) - { - return await _batchService.StartBatchDownloadForTodayAsync(FileTypes.asidlookup); - } - } -} diff --git a/source/gpconnect-analytics.Functions/PurgeErrorLogByTrigger.cs b/source/gpconnect-analytics.Functions/PurgeErrorLogByTrigger.cs deleted file mode 100644 index 8761068..0000000 --- a/source/gpconnect-analytics.Functions/PurgeErrorLogByTrigger.cs +++ /dev/null @@ -1,23 +0,0 @@ -using gpconnect_analytics.DAL.Interfaces; -using Microsoft.Azure.WebJobs; -using Microsoft.Extensions.Logging; -using System.Threading.Tasks; - -namespace gpconnect_analytics.Functions -{ - public class PurgeErrorLogByTrigger - { - private readonly ILoggingService _loggingService; - - public PurgeErrorLogByTrigger(ILoggingService loggingService) - { - _loggingService = loggingService; - } - - [FunctionName("PurgeErrorLogByTrigger")] - public async Task PurgeErrorLog([TimerTrigger("%PurgeErrorLogByTriggerSchedule%", RunOnStartup = false)] TimerInfo myTimer, ILogger log) - { - await _loggingService.PurgeErrorLog(); - } - } -} diff --git a/source/gpconnect-analytics.Functions/Startup.cs b/source/gpconnect-analytics.Functions/Startup.cs deleted file mode 100644 index f3fa4dd..0000000 --- a/source/gpconnect-analytics.Functions/Startup.cs +++ /dev/null @@ -1,31 +0,0 @@ -using gpconnect_analytics.Configuration.Infrastructure; -using gpconnect_analytics.Configuration.Infrastructure.Logging; -using gpconnect_analytics.DAL; -using gpconnect_analytics.DAL.Interfaces; -using gpconnect_analytics.Functions; -using Microsoft.Azure.Functions.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection; - -[assembly: FunctionsStartup(typeof(Startup))] -namespace gpconnect_analytics.Functions -{ - public class Startup : FunctionsStartup - { - public override void Configure(IFunctionsHostBuilder builder) - { - MappingExtensions.ConfigureMappingServices(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - builder.Services.AddScoped(); - - var configuration = builder.GetContext().Configuration; - builder.Services.AddLogging(loggingBuilder => LoggingExtensions.ConfigureLoggingServices(loggingBuilder, configuration)); - builder.Services.AddHttpClient("SplunkApiClient", options => Configuration.Infrastructure.HttpClient.HttpClientExtensions.ConfigureHttpClient(options)) - .ConfigurePrimaryHttpMessageHandler(() => Configuration.Infrastructure.HttpClient.HttpClientExtensions.CreateHttpMessageHandler()); - } - } -} diff --git a/source/gpconnect-analytics.Functions/gpconnect-analytics.Functions.csproj b/source/gpconnect-analytics.Functions/gpconnect-analytics.Functions.csproj deleted file mode 100644 index d2c95d7..0000000 --- a/source/gpconnect-analytics.Functions/gpconnect-analytics.Functions.csproj +++ /dev/null @@ -1,32 +0,0 @@ - - - netcoreapp3.1 - v4 - gpconnect_analytics.Functions - /home/site/wwwroot - Linux - - - - - - - - - - - - - - - - - - PreserveNewest - - - PreserveNewest - Never - - - diff --git a/source/gpconnect-analytics.Helpers/ApplicationHelper.cs b/source/gpconnect-analytics.Helpers/ApplicationHelper.cs deleted file mode 100644 index c214dca..0000000 --- a/source/gpconnect-analytics.Helpers/ApplicationHelper.cs +++ /dev/null @@ -1,20 +0,0 @@ -using System.Reflection; - -namespace gpconnect_analytics.Helpers -{ - public class ApplicationHelper - { - public static class ApplicationVersion - { - public static string GetAssemblyVersion() - { - string buildTag = System.Environment.GetEnvironmentVariable("BUILD_TAG"); - - if (string.IsNullOrWhiteSpace(buildTag)) - return Assembly.GetCallingAssembly()?.GetName().FullName; - - return buildTag; - } - } - } -} diff --git a/source/gpconnect-analytics.Helpers/AttributeExtensions.cs b/source/gpconnect-analytics.Helpers/AttributeExtensions.cs deleted file mode 100644 index a3412c4..0000000 --- a/source/gpconnect-analytics.Helpers/AttributeExtensions.cs +++ /dev/null @@ -1,21 +0,0 @@ -using System; -using System.Linq; - -namespace gpconnect_analytics.Helpers -{ - public static class AttributeExtensions - { - public static FileTypes? GetFileType(this string filePath) - { - return GetValueFromPath(filePath); - } - - public static FileTypes? GetValueFromPath(string filePath) - { - var fileType = typeof(FilePath).GetFields() - .Where(x => Attribute.GetCustomAttribute(x, typeof(FilePathAttribute)) is FilePathAttribute filePathAttribute && filePath.Contains(filePathAttribute?.FilePath)) - .FirstOrDefault(); - return fileType != null ? (FileTypes)fileType.GetValue(filePath) : (FileTypes?)null; - } - } -} \ No newline at end of file diff --git a/source/gpconnect-analytics.Helpers/ConfigurationHelper.cs b/source/gpconnect-analytics.Helpers/ConfigurationHelper.cs deleted file mode 100644 index e23b2f6..0000000 --- a/source/gpconnect-analytics.Helpers/ConfigurationHelper.cs +++ /dev/null @@ -1,15 +0,0 @@ -using Microsoft.Extensions.Configuration; -using System; - -namespace gpconnect_analytics.Helpers -{ - public static class ConfigurationHelper - { - public static string GetConfigurationString(this IConfigurationSection configurationSetting, string defaultValue = "", bool throwExceptionIfEmpty = false) - { - var keyValueExists = configurationSetting.Exists() && !string.IsNullOrEmpty(configurationSetting.Value); - if (!keyValueExists && throwExceptionIfEmpty) throw new ArgumentNullException(configurationSetting.Key); - return keyValueExists ? configurationSetting.Value : defaultValue; - } - } -} diff --git a/source/gpconnect-analytics.Helpers/FileTypes.cs b/source/gpconnect-analytics.Helpers/FileTypes.cs deleted file mode 100644 index 9ca38ea..0000000 --- a/source/gpconnect-analytics.Helpers/FileTypes.cs +++ /dev/null @@ -1,12 +0,0 @@ -namespace gpconnect_analytics.Helpers -{ - public enum FileTypes - { - [FilePath("asid-lookup-data")] - asidlookup, - [FilePath("ssp-transactions")] - ssptrans, - [FilePath("mesh-transactions")] - meshtrans - } -} \ No newline at end of file diff --git a/source/gpconnect-analytics.Helpers/NumberExtensions.cs b/source/gpconnect-analytics.Helpers/NumberExtensions.cs deleted file mode 100644 index de32983..0000000 --- a/source/gpconnect-analytics.Helpers/NumberExtensions.cs +++ /dev/null @@ -1,23 +0,0 @@ -using System; - -namespace gpconnect_analytics.Helpers -{ - public static class NumberExtensions - { - public static int StringToInteger(this string valueIn, int defaultValue) - { - return int.TryParse(valueIn, out _) ? Convert.ToInt32(valueIn) : defaultValue; - } - - public static int StringToInteger(this string valueIn) - { - return int.Parse(valueIn); - } - - public static string UnitsFormatter(this double valueIn, string units) - { - return $"{valueIn} {units}"; - } - } - -} diff --git a/source/gpconnect-analytics.Helpers/StringExtensions.cs b/source/gpconnect-analytics.Helpers/StringExtensions.cs deleted file mode 100644 index ce4e5fb..0000000 --- a/source/gpconnect-analytics.Helpers/StringExtensions.cs +++ /dev/null @@ -1,13 +0,0 @@ -using System; -using System.Text; - -namespace gpconnect_analytics.Helpers -{ - public static class StringExtensions - { - public static string StringToBase64(this string valueIn) - { - return Convert.ToBase64String(Encoding.UTF8.GetBytes(valueIn)); - } - } -} diff --git a/source/gpconnect-analytics.Helpers/gpconnect-analytics.Helpers.csproj b/source/gpconnect-analytics.Helpers/gpconnect-analytics.Helpers.csproj deleted file mode 100644 index 8627406..0000000 --- a/source/gpconnect-analytics.Helpers/gpconnect-analytics.Helpers.csproj +++ /dev/null @@ -1,12 +0,0 @@ - - - - netcoreapp3.1 - - - - - - - - diff --git a/source/gpconnect-analytics.sln b/source/gpconnect-analytics.sln index cf3937b..cf75d48 100644 --- a/source/gpconnect-analytics.sln +++ b/source/gpconnect-analytics.sln @@ -3,13 +3,15 @@ Microsoft Visual Studio Solution File, Format Version 12.00 # Visual Studio Version 16 VisualStudioVersion = 16.0.30804.86 MinimumVisualStudioVersion = 10.0.40219.1 -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "gpconnect-analytics.Functions", "gpconnect-analytics.Functions\gpconnect-analytics.Functions.csproj", "{7EBDF22C-0A85-4151-A8D7-8D81E456276E}" +Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "Functions", "Functions\Functions.csproj", "{7EBDF22C-0A85-4151-A8D7-8D81E456276E}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "gpconnect-analytics.DAL", "gpconnect-analytics.DAL\gpconnect-analytics.DAL.csproj", "{3B1FB921-0C56-47D6-A716-A4DE679F6EBF}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Core", "Core\Core.csproj", "{66EFAC7B-6BD6-421A-B70D-126FC4D0EE3F}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "gpconnect-analytics.DTO", "gpconnect-analytics.DTO\gpconnect-analytics.DTO.csproj", "{18DE5071-433E-49D2-B168-136730EDB158}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Core.Tests", "Core.Tests\Core.Tests.csproj", "{5841302E-4C82-473D-9579-308FCBA85E50}" EndProject -Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "gpconnect-analytics.Helpers", "gpconnect-analytics.Helpers\gpconnect-analytics.Helpers.csproj", "{D8523638-EE9F-4551-8F22-7FD91BD327FC}" +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "IntegrationTests", "IntegrationTests\IntegrationTests.csproj", "{00CE1CEE-7D58-4689-8EB6-0F86E6DF8729}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Functions.Tests", "Functions.Tests\Functions.Tests.csproj", "{3A964393-AD74-49C0-891B-140B5C197661}" EndProject Global GlobalSection(SolutionConfigurationPlatforms) = preSolution @@ -21,18 +23,22 @@ Global {7EBDF22C-0A85-4151-A8D7-8D81E456276E}.Debug|Any CPU.Build.0 = Debug|Any CPU {7EBDF22C-0A85-4151-A8D7-8D81E456276E}.Release|Any CPU.ActiveCfg = Release|Any CPU {7EBDF22C-0A85-4151-A8D7-8D81E456276E}.Release|Any CPU.Build.0 = Release|Any CPU - {3B1FB921-0C56-47D6-A716-A4DE679F6EBF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3B1FB921-0C56-47D6-A716-A4DE679F6EBF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3B1FB921-0C56-47D6-A716-A4DE679F6EBF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3B1FB921-0C56-47D6-A716-A4DE679F6EBF}.Release|Any CPU.Build.0 = Release|Any CPU - {18DE5071-433E-49D2-B168-136730EDB158}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {18DE5071-433E-49D2-B168-136730EDB158}.Debug|Any CPU.Build.0 = Debug|Any CPU - {18DE5071-433E-49D2-B168-136730EDB158}.Release|Any CPU.ActiveCfg = Release|Any CPU - {18DE5071-433E-49D2-B168-136730EDB158}.Release|Any CPU.Build.0 = Release|Any CPU - {D8523638-EE9F-4551-8F22-7FD91BD327FC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D8523638-EE9F-4551-8F22-7FD91BD327FC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D8523638-EE9F-4551-8F22-7FD91BD327FC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D8523638-EE9F-4551-8F22-7FD91BD327FC}.Release|Any CPU.Build.0 = Release|Any CPU + {66EFAC7B-6BD6-421A-B70D-126FC4D0EE3F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {66EFAC7B-6BD6-421A-B70D-126FC4D0EE3F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {66EFAC7B-6BD6-421A-B70D-126FC4D0EE3F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {66EFAC7B-6BD6-421A-B70D-126FC4D0EE3F}.Release|Any CPU.Build.0 = Release|Any CPU + {5841302E-4C82-473D-9579-308FCBA85E50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5841302E-4C82-473D-9579-308FCBA85E50}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5841302E-4C82-473D-9579-308FCBA85E50}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5841302E-4C82-473D-9579-308FCBA85E50}.Release|Any CPU.Build.0 = Release|Any CPU + {00CE1CEE-7D58-4689-8EB6-0F86E6DF8729}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {00CE1CEE-7D58-4689-8EB6-0F86E6DF8729}.Debug|Any CPU.Build.0 = Debug|Any CPU + {00CE1CEE-7D58-4689-8EB6-0F86E6DF8729}.Release|Any CPU.ActiveCfg = Release|Any CPU + {00CE1CEE-7D58-4689-8EB6-0F86E6DF8729}.Release|Any CPU.Build.0 = Release|Any CPU + {3A964393-AD74-49C0-891B-140B5C197661}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3A964393-AD74-49C0-891B-140B5C197661}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3A964393-AD74-49C0-891B-140B5C197661}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3A964393-AD74-49C0-891B-140B5C197661}.Release|Any CPU.Build.0 = Release|Any CPU EndGlobalSection GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE diff --git a/source/gpconnect-analytics.sln.DotSettings b/source/gpconnect-analytics.sln.DotSettings new file mode 100644 index 0000000..3ba9dd0 --- /dev/null +++ b/source/gpconnect-analytics.sln.DotSettings @@ -0,0 +1,16 @@ + + True + True + True + True + True + True + True + True + True + True + True + True + True + True + True \ No newline at end of file