Skip to content
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
41 changes: 37 additions & 4 deletions libs/net/template/ReportEngine.cs
Original file line number Diff line number Diff line change
@@ -1,8 +1,11 @@

using System.Globalization;
using System.IO.Compression;
using System.Text;
using System.Text.Json;
using System.Text.Json.Nodes;
using System.Xml;
using CsvHelper.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using TNO.Core.Exceptions;
Expand Down Expand Up @@ -640,9 +643,36 @@ await report.Sections
{
if (response.Content.Headers.ContentType?.MediaType?.Contains("text/csv") == true)
{
var data = await response.Content.ReadAsStreamAsync();
using var reader = new StreamReader(data);
using var csv = new CsvHelper.CsvReader(reader, System.Globalization.CultureInfo.InvariantCulture);
var contentType = response.Content.Headers.ContentType?.ToString() ?? "unknown";
var contentEncoding = response.Content.Headers.ContentEncoding.FirstOrDefault() ?? "none";
var contentLength = response.Content.Headers.ContentLength ?? -1;
this.Logger.LogInformation("CSV response - Type: {Type}, Encoding: {Encoding}, Length: {Length}",
contentType, contentEncoding, contentLength);

var readStream = await response.Content.ReadAsStreamAsync();

// If server sent gzip but didn't tell HttpClient to decompress → handle manually
if (response.Content.Headers.ContentEncoding.Contains("gzip"))
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is the main fix. The other changes are for logging and debugging.

{
this.Logger.LogInformation("Detected gzip encoding - decompressing");
readStream = new GZipStream(readStream, CompressionMode.Decompress, leaveOpen: true);
}

using var reader = new StreamReader(readStream, detectEncodingFromByteOrderMarks: true);

var config = new CsvConfiguration(CultureInfo.InvariantCulture)
{
// These fields have commas/JSON → ensure proper parsing
ShouldQuote = (args) => true, // or customize per field
BadDataFound = args =>
{
this.Logger.LogWarning("Bad CSV row: {Raw}", args.RawRecord);
// or set to null to skip silently: args = null;
},
// If dates are DD/MM/YY inconsistent → add custom converter later
};

using var csv = new CsvHelper.CsvReader(reader, config);
var records = csv.GetRecords<dynamic>().ToList();
var dataEngineModel = new ReportEngineDataModel<dynamic>(records);
if (!String.IsNullOrWhiteSpace(settings.DataTemplate))
Expand All @@ -665,11 +695,14 @@ await report.Sections
{
var data = await response.Content.ReadAsStringAsync();
sectionData.Data = data;
this.Logger.LogWarning("Non-CSV content type received: {Type}", response.Content.Headers.ContentType?.MediaType);
}
}
catch (Exception ex)
{
this.Logger.LogError(ex, "Failed to parse CSV data from {url}", url);
this.Logger.LogError(ex, "Failed to parse CSV from {Url}. Headers: {Headers}",
url,
string.Join(", ", response?.Content?.Headers?.Select(h => $"{h.Key}:{string.Join(",", h.Value)}") ?? []));
sectionData.Data = ex.GetAllMessages();
}
}
Expand Down
Loading