diff --git a/.claude/settings.local.json b/.claude/settings.local.json index 580a43ba2..dfd9f1beb 100644 --- a/.claude/settings.local.json +++ b/.claude/settings.local.json @@ -44,7 +44,8 @@ "Bash(python:*)", "Bash(python3:*)", "Bash(test:*)", - "Bash(Select-Object -Last 20)" + "Bash(Select-Object -Last 20)", + "Bash(git mv:*)" ], "deny": [] } diff --git a/Directory.Packages.props b/Directory.Packages.props index f0b889ee9..2ab4e7b80 100644 --- a/Directory.Packages.props +++ b/Directory.Packages.props @@ -36,6 +36,7 @@ + @@ -142,6 +143,7 @@ + diff --git a/modules/Insurance/Files/Microsoft/2026/Microsoft.xlsx b/modules/Insurance/Files/Microsoft/2026/Microsoft.xlsx index 33fd6cc1e..ab1b522fe 100644 Binary files a/modules/Insurance/Files/Microsoft/2026/Microsoft.xlsx and b/modules/Insurance/Files/Microsoft/2026/Microsoft.xlsx differ diff --git a/modules/Insurance/Files/Microsoft/2026/Slip.md b/modules/Insurance/Files/Microsoft/2026/Slip.md index fdb61dab4..dc26a1d3d 100644 --- a/modules/Insurance/Files/Microsoft/2026/Slip.md +++ b/modules/Insurance/Files/Microsoft/2026/Slip.md @@ -14,69 +14,78 @@ **Address:** One Microsoft Way, Redmond, WA 98052, United States ## Period of Insurance -**From:** 1 January 2026 -**To:** 31 December 2026 +**From:** 1 January 2026 +**To:** 31 December 2026 **Local Standard Time at the address of the Insured** --- +## Reinsurance Terms + +**Estimated Premium Income (EPI):** USD 200,000,000 +**Brokerage:** 10% + +--- + ## Coverage Sections ### 1. Fire Damage - **Layer 1:** - - Attachment Point: USD 0 - - Limit per Occurrence: USD 100,000,000 - - **Annual Aggregate Limit:** USD 300,000,000 - - **Annual Aggregate Deductible:** USD 25,000,000 (applies to Fire only) + - Deductible per Occurrence: USD 5,000,000 + - Limit per Occurrence: USD 100,000,000 + - **Annual Aggregate Deductible:** USD 25,000,000 + - **Annual Aggregate Limit:** USD 300,000,000 - **Layer 2:** - - Attachment Point: USD 100,000,000 - - Limit per Occurrence: USD 150,000,000 - - **Annual Aggregate Limit:** USD 450,000,000 + - Attachment Point: USD 105,000,000 + - Limit per Occurrence: USD 145,000,000 + - **Annual Aggregate Limit:** USD 435,000,000 - **Layer 3:** - - Attachment Point: USD 250,000,000 - - Limit per Occurrence: USD 250,000,000 - - **Annual Aggregate Limit:** USD 600,000,000 + - Attachment Point: USD 250,000,000 + - Limit per Occurrence: USD 250,000,000 + - **Annual Aggregate Limit:** USD 750,000,000 --- -### 2. Natural Catastrophe (Earthquake, Flood, Storm) +### 2. Natural Catastrophe (Windstorm, Earthquake) - **Layer 1:** - - Attachment Point: USD 0 - - Limit per Occurrence: USD 75,000,000 - - **Annual Aggregate Limit:** USD 225,000,000 + - Deductible per Occurrence: USD 5,000,000 + - Limit per Occurrence: USD 100,000,000 + - **Annual Aggregate Deductible:** USD 25,000,000 + - **Annual Aggregate Limit:** USD 300,000,000 - **Layer 2:** - - Attachment Point: USD 75,000,000 - - Limit per Occurrence: USD 125,000,000 - - **Annual Aggregate Limit:** USD 375,000,000 + - Attachment Point: USD 105,000,000 + - Limit per Occurrence: USD 145,000,000 + - **Annual Aggregate Limit:** USD 435,000,000 - **Layer 3:** - - Attachment Point: USD 200,000,000 - - Limit per Occurrence: USD 200,000,000 - - **Annual Aggregate Limit:** USD 500,000,000 + - Attachment Point: USD 250,000,000 + - Limit per Occurrence: USD 250,000,000 + - **Annual Aggregate Limit:** USD 750,000,000 --- ### 3. Business Interruption - **Layer 1:** - - Attachment Point: USD 0 - - Limit per Occurrence: USD 50,000,000 - - **Annual Aggregate Limit:** USD 150,000,000 + - Deductible per Occurrence: USD 5,000,000 + - Limit per Occurrence: USD 100,000,000 + - **Annual Aggregate Deductible:** USD 25,000,000 + - **Annual Aggregate Limit:** USD 300,000,000 - **Layer 2:** - - Attachment Point: USD 50,000,000 - - Limit per Occurrence: USD 100,000,000 - - **Annual Aggregate Limit:** USD 300,000,000 + - Attachment Point: USD 105,000,000 + - Limit per Occurrence: USD 145,000,000 + - **Annual Aggregate Limit:** USD 435,000,000 - **Layer 3:** - - Attachment Point: USD 150,000,000 - - Limit per Occurrence: USD 150,000,000 - - **Annual Aggregate Limit:** USD 450,000,000 + - Attachment Point: USD 250,000,000 + - Limit per Occurrence: USD 250,000,000 + - **Annual Aggregate Limit:** USD 750,000,000 --- diff --git a/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAIExtensions.cs b/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAIExtensions.cs index 7a562ce36..9a42b4f16 100644 --- a/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAIExtensions.cs +++ b/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAIExtensions.cs @@ -14,6 +14,8 @@ public static class InsuranceAIExtensions public static IServiceCollection AddInsuranceAI(this IServiceCollection services) { services.AddSingleton(); + services.AddSingleton(); + services.AddSingleton(); return services; } diff --git a/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAgent.cs b/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAgent.cs index e6b406b3f..4d9d5feaa 100644 --- a/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAgent.cs +++ b/modules/Insurance/MeshWeaver.Insurance.AI/InsuranceAgent.cs @@ -12,8 +12,8 @@ namespace MeshWeaver.Insurance.AI; /// /// Main Insurance agent that provides access to insurance pricing data and collections. /// -[DefaultAgent] -public class InsuranceAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPlugins, IAgentWithContext +[ExposedInNavigator] +public class InsuranceAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPlugins, IAgentWithContext, IAgentWithDelegations { private Dictionary? typeDefinitionMap; private Dictionary? layoutAreaMap; @@ -23,34 +23,65 @@ public class InsuranceAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPl public string Description => "Handles all questions and actions related to insurance pricings, property risks, and dimensions. " + "Provides access to pricing data, allows creation and management of pricings and property risks. " + - "Also manages submission documents and files for each pricing."; + "Also manages submission documents and files for each pricing. " + + "Can delegate to specialized import agents for processing risk data files and slip documents."; + + public IEnumerable Delegations + { + get + { + yield return new DelegationDescription( + nameof(RiskImportAgent), + "Delegate to RiskImportAgent when the user wants to import property risks from Excel files, " + + "or when working with risk data files (.xlsx, .xls) that contain property information like " + + "location, TSI (Total Sum Insured), address, country, currency, building values, etc. " + + "Common file names include: risks.xlsx, exposure.xlsx, property schedule, location schedule, etc." + ); + + yield return new DelegationDescription( + nameof(SlipImportAgent), + "Delegate to SlipImportAgent when the user wants to import insurance slips from PDF documents, " + + "or when working with slip files (.pdf) that contain insurance submission information like " + + "insured details, coverage terms, premium information, reinsurance structure layers, limits, rates, etc. " + + "Common file names include: slip.pdf, submission.pdf, placement.pdf, quote.pdf, etc." + ); + } + } public string Instructions => $$$""" The agent is the InsuranceAgent, specialized in managing insurance pricings: + ## Content Collection Context + + IMPORTANT: The current context is set to pricing/{pricingId} where pricingId follows the format {company}-{uwy}. + - The submission files collection is named "Submissions-{pricingId}" + - All file paths are relative to the root (/) of this collection + - Example: For pricing "AXA-2024", the collection is "Submissions-AXA-2024" and files are at paths like "/slip.pdf", "/risks.xlsx" + ## Working with Submission Documents and Files CRITICAL: When users ask about submission files, documents, or content: - DO NOT call {{{nameof(DataPlugin.GetData)}}} for Pricing or any other data first - DO NOT try to verify the pricing exists before accessing files - - The SubmissionPlugin is already configured for the current pricing context - - Simply call the SubmissionPlugin functions directly - - Available SubmissionPlugin functions (all collectionName parameters are optional): - - {{{nameof(ContentCollectionPlugin.ListFiles)}}}() - List all files in the current pricing's submissions - - {{{nameof(ContentCollectionPlugin.ListFolders)}}}() - List all folders - - {{{nameof(ContentCollectionPlugin.ListCollectionItems)}}}() - List both files and folders - - {{{nameof(ContentCollectionPlugin.GetDocument)}}}(documentPath) - Get document content - - {{{nameof(ContentCollectionPlugin.SaveDocument)}}}(documentPath, content) - Save a document - - {{{nameof(ContentCollectionPlugin.DeleteFile)}}}(filePath) - Delete a file - - {{{nameof(ContentCollectionPlugin.CreateFolder)}}}(folderPath) - Create a folder - - {{{nameof(ContentCollectionPlugin.DeleteFolder)}}}(folderPath) - Delete a folder + - The ContentPlugin is already configured for the current pricing context + - Simply call the ContentPlugin functions directly + - All file paths should start with "/" (e.g., "/slip.pdf", "/risks.xlsx") + + Available ContentPlugin functions (all collectionName parameters are optional): + - {{{nameof(ContentPlugin.ListFiles)}}}() - List all files in the current pricing's submissions + - {{{nameof(ContentPlugin.ListFolders)}}}() - List all folders + - {{{nameof(ContentPlugin.ListCollectionItems)}}}() - List both files and folders + - {{{nameof(ContentPlugin.GetDocument)}}}(documentPath) - Get document content (use path like "/Slip.md") + - {{{nameof(ContentPlugin.SaveFile)}}}(documentPath, content) - Save a document + - {{{nameof(ContentPlugin.DeleteFile)}}}(filePath) - Delete a file + - {{{nameof(ContentPlugin.CreateFolder)}}}(folderPath) - Create a folder + - {{{nameof(ContentPlugin.DeleteFolder)}}}(folderPath) - Delete a folder Examples: - - User: "Show me the submission files" → You: Call {{{nameof(ContentCollectionPlugin.ListFiles)}}}() - - User: "What files are in the submissions?" → You: Call {{{nameof(ContentCollectionPlugin.ListFiles)}}}() - - User: "Read the slip document" → You: Call {{{nameof(ContentCollectionPlugin.GetDocument)}}}("Slip.md") + - User: "Show me the submission files" → You: Call {{{nameof(ContentPlugin.ListFiles)}}}() + - User: "What files are in the submissions?" → You: Call {{{nameof(ContentPlugin.ListFiles)}}}() + - User: "Read the slip document" → You: Call {{{nameof(ContentPlugin.GetDocument)}}}("/Slip.md") ## Working with Pricing Data @@ -71,14 +102,14 @@ IEnumerable IAgentWithPlugins.GetPlugins(IAgentChat chat) yield return new DataPlugin(hub, chat, typeDefinitionMap).CreateKernelPlugin(); yield return new LayoutAreaPlugin(hub, chat, layoutAreaMap).CreateKernelPlugin(); - // Always provide ContentCollectionPlugin - it will use ContextToConfigMap to determine the collection - var submissionPluginConfig = CreateSubmissionPluginConfig(chat); - yield return new ContentCollectionPlugin(hub, submissionPluginConfig, chat).CreateKernelPlugin(); + // Always provide ContentPlugin - it will use ContextToConfigMap to determine the collection + var submissionPluginConfig = CreateSubmissionPluginConfig(); + yield return new ContentPlugin(hub, submissionPluginConfig, chat).CreateKernelPlugin(); } - private static ContentCollectionPluginConfig CreateSubmissionPluginConfig(IAgentChat chat) + private static ContentPluginConfig CreateSubmissionPluginConfig() { - return new ContentCollectionPluginConfig + return new ContentPluginConfig { Collections = [], ContextToConfigMap = context => @@ -118,7 +149,7 @@ async Task IInitializableAgent.InitializeAsync() var typesResponse = await hub.AwaitResponse( new GetDomainTypesRequest(), o => o.WithTarget(new PricingAddress("default"))); - typeDefinitionMap = typesResponse.Message.Types.Select(t => t with { Address = null }).ToDictionary(x => x.Name); + typeDefinitionMap = typesResponse?.Message?.Types?.Select(t => t with { Address = null }).ToDictionary(x => x.Name!); } catch { @@ -130,7 +161,7 @@ async Task IInitializableAgent.InitializeAsync() var layoutAreaResponse = await hub.AwaitResponse( new GetLayoutAreasRequest(), o => o.WithTarget(new PricingAddress("default"))); - layoutAreaMap = layoutAreaResponse.Message.Areas.ToDictionary(x => x.Area); + layoutAreaMap = layoutAreaResponse?.Message?.Areas?.ToDictionary(x => x.Area); } catch { diff --git a/modules/Insurance/MeshWeaver.Insurance.AI/MeshWeaver.Insurance.AI.csproj b/modules/Insurance/MeshWeaver.Insurance.AI/MeshWeaver.Insurance.AI.csproj index f5e01a264..292eb0d9d 100644 --- a/modules/Insurance/MeshWeaver.Insurance.AI/MeshWeaver.Insurance.AI.csproj +++ b/modules/Insurance/MeshWeaver.Insurance.AI/MeshWeaver.Insurance.AI.csproj @@ -14,4 +14,10 @@ + + + + + + diff --git a/modules/Insurance/MeshWeaver.Insurance.AI/RiskImportAgent.cs b/modules/Insurance/MeshWeaver.Insurance.AI/RiskImportAgent.cs new file mode 100644 index 000000000..fbd697eec --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.AI/RiskImportAgent.cs @@ -0,0 +1,198 @@ +using System.Text.Json.Nodes; +using MeshWeaver.AI; +using MeshWeaver.AI.Plugins; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; +using MeshWeaver.Insurance.Domain; +using MeshWeaver.Messaging; +using Microsoft.SemanticKernel; + +namespace MeshWeaver.Insurance.AI; + +public class RiskImportAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPlugins, IAgentWithContext +{ + private Dictionary? typeDefinitionMap; + private string? propertyRiskSchema; + private string? excelImportConfigSchema; + + public string Name => nameof(RiskImportAgent); + + public string Description => "Runs risk imports for a pricing. Creates mappings and imports property risk data from Excel files."; + + public string Instructions + { + get + { + var baseText = + $$$""" + You control risk imports for a specific pricing. Use the provided tool: + + ## Content Collection Context + + IMPORTANT: The current context is set to pricing/{pricingId} where pricingId follows the format {company}-{uwy}. + - The submission files collection is named "Submissions-{pricingId}" + - All file paths are relative to the root (/) of this collection + - When listing files, you'll see paths like "/risks.xlsx", "/exposure.xlsx" + - When accessing files, use paths starting with "/" (e.g., "/risks.xlsx") + + # Importing Risks + When the user asks you to import risks, you should: + 1) Get the existing risk mapping configuration for the specified file using DataPlugin's GetData function with type="ExcelImportConfiguration" and entityId=filename. + 2) If no import configuration was returned in 1, get a sample of the worksheet using ContentPlugin's GetContent function with the collection name "Submissions-{pricingId}", the filename, and numberOfRows=20. Extract the table start row as well as the mapping as in the schema provided below. + Consider any input from the user to modify the configuration. Ensure the JSON includes "name" field set to the filename. Use DataPlugin's UpdateData function with type="ExcelImportConfiguration" to save the configuration. + 3) Call ContentPlugin's Import function with path=filename, collection="Submissions-{pricingId}", address=PricingAddress, and configuration=the JSON configuration you created or retrieved. + + # Updating Risk Import Configuration + When the user asks you to update the risk import configuration, you should: + 1) Get the existing risk mapping configuration for the specified file using DataPlugin's GetData function with type="ExcelImportConfiguration" and entityId=filename. + 2) Modify it according to the user's input, ensuring it follows the schema provided below. + 3) Upload the new configuration using DataPlugin's UpdateData function with type="ExcelImportConfiguration" and the updated JSON (ensure "name" field is set to filename). + + # Automatic Risk Import Configuration + - Use ContentPlugin's GetContent with numberOfRows=20 to get a sample of the file. It returns a markdown table with: + - First column: Row numbers (1-based) + - Remaining columns: Labeled A, B, C, D, etc. (Excel column letters) + - Empty cells appear as empty values in the table (not "null") + - Column letters start with A (first data column after Row number). Empty columns are still shown with their letters. + - Row numbers are 1-based. When specifying tableStartRow, use the row number from the Row column (e.g., if headers are on row 1 and data starts on row 2, set tableStartRow=2). + - Look for the header row in the markdown table and map column letters (A, B, C, etc.) to PropertyRisk properties. + - Map to the properties of the PropertyRisk type (see schema below). Only these names are allowed for mappings. Read the descriptions contained in the schema to get guidance on which field to map where. + - IMPORTANT: Each TargetProperty should appear ONLY ONCE in the configuration. If a property maps to multiple columns, use the SourceColumns list (e.g., "sourceColumns": ["A", "B"]) instead of creating multiple entries with the same TargetProperty. + - IMPORTANT: Each column (A, B, C, etc.) should be mapped ONLY ONCE across all mappings. Do not include the same column in multiple targetProperty mappings or sourceColumns lists. + - Columns you cannot map ==> ignore (don't include them in the configuration). + - Empty columns at the beginning still get column letters (A, B, C...). You can see which columns are empty by looking at the markdown table. + + # TsiContent Mapping + - MOST COLUMNS will be mapped to the 'tsiContent' property (Total Sum Insured content breakdown). + - Common column headers for tsiContent include: Stock, Fixtures, Fittings, IT Equipment, Land, Leasehold Improv., Leasehold Improvements, Plant & Equipment, Tooling, Workshop Equipment, Rent Forecast. + - These columns typically represent different categories of insured content and should be mapped to tsiContent using the SourceColumns list. + - Example: If you see columns for "Stock", "Fixtures", "IT Equipment", map them as: "targetProperty": "tsiContent", "sourceColumns": ["E", "F", "G"] + + Notes: + - The agent defaults to ignoring rows where Id or Address is missing (adds "Id == null" and "Address == null" to ignoreRowExpressions). + - Provide only the file name (e.g., "risks.xlsx"); it is resolved relative to the pricing's content collection. + + IMPORTANT OUTPUT RULES: + - do not output JSON to the user. + - When the user asks you to import, your job is not finished by creating the risk import configuration. You will actually have to call ContentPlugin's Import function. + """; + + if (excelImportConfigSchema is not null) + baseText += $"\n\n# Schema for ExcelImportConfiguration\n{excelImportConfigSchema}"; + if (propertyRiskSchema is not null) + baseText += $"\n\n# Schema for PropertyRisk (Target for Mapping)\n{propertyRiskSchema}"; + + return baseText; + } + } + + public bool Matches(AgentContext? context) + { + return context?.Address?.Type == PricingAddress.TypeName; + } + + IEnumerable IAgentWithPlugins.GetPlugins(IAgentChat chat) + { + yield return new DataPlugin(hub, chat, typeDefinitionMap).CreateKernelPlugin(); + + // Add ContentPlugin for submissions and import functionality + var submissionPluginConfig = CreateSubmissionPluginConfig(); + yield return new ContentPlugin(hub, submissionPluginConfig, chat).CreateKernelPlugin(); + } + + private static ContentPluginConfig CreateSubmissionPluginConfig() + { + return new ContentPluginConfig + { + Collections = [], + ContextToConfigMap = context => + { + // Only handle pricing contexts + if (context?.Address?.Type != PricingAddress.TypeName) + return null!; + + var pricingId = context.Address.Id; + + // Parse pricingId in format {company}-{uwy} + var parts = pricingId.Split('-'); + if (parts.Length != 2) + return null!; + + // Use Hub-based collection config pointing to the pricing address + // This allows the ContentPlugin to query the pricing hub for the actual collection configuration + return new ContentCollectionConfig + { + SourceType = HubStreamProviderFactory.SourceType, + Name = $"Submissions-{pricingId}", + Address = context.Address + }; + } + }; + } + + async Task IInitializableAgent.InitializeAsync() + { + try + { + var typesResponse = await hub.AwaitResponse( + new GetDomainTypesRequest(), + o => o.WithTarget(new PricingAddress("default"))); + var types = typesResponse?.Message?.Types; + typeDefinitionMap = types?.Select(t => t with { Address = null }).ToDictionary(x => x.Name!); + } + catch + { + typeDefinitionMap = null; + } + + try + { + var resp = await hub.AwaitResponse( + new GetSchemaRequest("ExcelImportConfiguration"), + o => o.WithTarget(new PricingAddress("default"))); + + // Hard-code TypeName to "PropertyRisk" in the schema + var schema = resp?.Message?.Schema; + if (!string.IsNullOrEmpty(schema)) + { + // Parse the schema as JSON to modify it + try + { + var schemaJson = JsonNode.Parse(schema) as JsonObject; + if (schemaJson?["anyOf"] is JsonArray array && array.First() is JsonObject obj && obj["properties"] is JsonObject properties) + { + // Set TypeName property to have a constant value of "PropertyRisk" + properties["typeName"] = new JsonObject + { + ["type"] = "string", + ["const"] = "PropertyRisk", + ["description"] = "The fully qualified type name of the entity to import. This is hard-coded to 'PropertyRisk' for risk imports." + }; + schema = schemaJson.ToJsonString(); + } + } + catch + { + // If parsing fails, use original schema + } + } + excelImportConfigSchema = schema; + } + catch + { + excelImportConfigSchema = null; + } + + try + { + var resp = await hub.AwaitResponse( + new GetSchemaRequest(nameof(PropertyRisk)), + o => o.WithTarget(new PricingAddress("default"))); + propertyRiskSchema = resp?.Message?.Schema; + } + catch + { + propertyRiskSchema = null; + } + } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.AI/SlipImportAgent.cs b/modules/Insurance/MeshWeaver.Insurance.AI/SlipImportAgent.cs new file mode 100644 index 000000000..e448a6371 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.AI/SlipImportAgent.cs @@ -0,0 +1,232 @@ +using MeshWeaver.AI; +using MeshWeaver.AI.Plugins; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; +using MeshWeaver.Insurance.Domain; +using MeshWeaver.Messaging; +using Microsoft.SemanticKernel; + +namespace MeshWeaver.Insurance.AI; + +public class SlipImportAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPlugins, IAgentWithContext +{ + private Dictionary? typeDefinitionMap; + private string? pricingSchema; + private string? acceptanceSchema; + private string? sectionSchema; + + public string Name => nameof(SlipImportAgent); + + public string Description => "Imports insurance slip documents from PDF or Markdown files and structures them into Pricing and ReinsuranceAcceptance data models using LLM-based extraction."; + + public string Instructions + { + get + { + var baseText = + $$$""" + You are a slip import agent that processes insurance submission slip documents in PDF or Markdown format. + Your task is to extract structured data and map it to the insurance domain models using the provided schemas. + + ## Content Collection Context + + IMPORTANT: The current context is set to pricing/{pricingId} where pricingId follows the format {company}-{uwy}. + - The submission files collection is automatically named "Submissions-{pricingId}" + - Files are stored at the root level of this collection + - When listing files, you'll see filenames like "Slip.pdf", "Slip.md", etc. + - When accessing files with ExtractCompleteText, use just the filename (e.g., "Slip.pdf" or "Slip.md") + + # Importing Slips + When the user asks you to import a slip: + 1) First, use ContentCollectionPlugin's ListFiles() to see available files in the submissions collection + 2) Use ContentPlugin's GetContent function to extract the document content from PDF or Markdown files + - Pass collectionName="Submissions-{pricingId}" and filePath=filename (e.g., "Slip.pdf" or "Slip.md") + - For PDFs, this will extract all pages of text + 3) Review the extracted text and identify data that matches the domain schemas + 4) Create JSON objects for each entity type following the schemas below + 5) Import the data using DataPlugin's UpdateData function: + - First, retrieve existing Pricing data using DataPlugin's GetData with type="Pricing" and entityId=pricingId + - Merge new pricing fields with existing data and call DataPlugin's UpdateData with type="Pricing" + - For each ReinsuranceAcceptance (layer), create JSON and call DataPlugin's UpdateData with type="ReinsuranceAcceptance" + - For each ReinsuranceSection (coverage within layer), create JSON and call DataPlugin's UpdateData with type="ReinsuranceSection" + 6) Provide feedback on what data was successfully imported or if any issues were encountered + + # Data Mapping Guidelines + Based on the extracted document text, create JSON objects that match the schemas provided below: + - **Pricing**: Basic pricing information including: + - Insured name (e.g., "Microsoft Corporation") + - Primary insurance company (labeled as "Primary Insurer" or similar in slip header) - populate the PrimaryInsurance field + - Broker name (labeled as "Broker" in slip header) - populate the BrokerName field + - Dates (inception, expiration), premium, country, legal entity + - **ReinsuranceAcceptance**: Represents a reinsurance layer (Layer 1, Layer 2, Layer 3) with financial terms + - **ReinsuranceSection**: Represents a coverage type within a layer (Fire Damage, Natural Catastrophe, Business Interruption) + + # Structure Hierarchy + The data structure follows this hierarchy: + 1. **Pricing** (the main insurance program) + 2. **ReinsuranceAcceptance** (the layers: Layer 1, Layer 2, Layer 3, etc.) + 3. **ReinsuranceSection** (the coverage types within each layer: Fire Damage, Natural Catastrophe, Business Interruption, etc.) + + # Important Rules + - Only extract data that is explicitly present in the document text + - Use null or default values for missing data points + - Ensure all monetary values are properly formatted as numbers + - Convert percentages to decimal format (e.g., 25% → 0.25) + - Provide clear feedback on what data was successfully extracted + - If data is ambiguous or unclear, note it in your response + + # Creating ReinsuranceAcceptance Records (Layers) + - First, create ReinsuranceAcceptance records for each layer (Layer 1, Layer 2, Layer 3) + - Use IDs like "Layer1", "Layer2", "Layer3" + - Set the Name property to "Layer 1", "Layer 2", "Layer 3" + - Include financial terms like share, cession, rate, commission on the acceptance + - If there is a "Reinsurance Terms" section in the header with properties like EPI and Brokerage, apply these values to ALL ReinsuranceAcceptance records (all layers get the same EPI and Brokerage) + - Convert percentage values to decimals (e.g., 10% → 0.10, 100% → 1.0) + + # Creating ReinsuranceSection Records (Coverage Types) + - Then, create ReinsuranceSection records for each coverage type within each layer + - Use IDs like "Layer1-Fire", "Layer1-NatCat", "Layer1-BI", "Layer2-Fire", etc. + - Set the AcceptanceId to link the section to its parent layer (e.g., "Layer1") + - Set the LineOfBusiness to the coverage type (e.g., "Fire Damage", "Natural Catastrophe", "Business Interruption") + - Set the Name to a descriptive name (e.g., "Fire Damage - Layer 1") + - Include the attachment point (Attach), limit, aggregate deductible (AggAttach), and aggregate limit (AggLimit) + + # Example from a Slip + If the slip shows: + - Fire Damage → Layer 1: Attach 5M, Limit 100M, AAD 25M, AAL 300M + - Fire Damage → Layer 2: Attach 100M, Limit 150M, AAL 450M + - Natural Catastrophe → Layer 1: Attach 10M, Limit 75M, AAD 30M, AAL 225M + + Create: + 1. ReinsuranceAcceptance: Id="Layer1", Name="Layer 1" + 2. ReinsuranceAcceptance: Id="Layer2", Name="Layer 2" + 3. ReinsuranceSection: Id="Layer1-Fire", AcceptanceId="Layer1", LineOfBusiness="Fire Damage", Attach=5000000, Limit=100000000, AggAttach=25000000, AggLimit=300000000 + 4. ReinsuranceSection: Id="Layer2-Fire", AcceptanceId="Layer2", LineOfBusiness="Fire Damage", Attach=100000000, Limit=150000000, AggLimit=450000000 + 5. ReinsuranceSection: Id="Layer1-NatCat", AcceptanceId="Layer1", LineOfBusiness="Natural Catastrophe", Attach=10000000, Limit=75000000, AggAttach=30000000, AggLimit=225000000 + + # Document Section Processing + Look for common sections in insurance slips: + - **Header section**: Insured name, Primary Insurer, Broker, dates + - **Insured information**: Name, location, industry + - **Coverage details**: Inception/expiration dates, policy terms + - **Premium and financial information**: Premium amounts, currency + - **Reinsurance terms section**: EPI (Estimated Premium Income), Brokerage percentage, Commission, Taxes + - **Layer structures**: Layer 1, Layer 2, Layer 3 with limits, attachments, rates + - **Coverage types within layers**: Fire Damage, Natural Catastrophe, Business Interruption, etc. + + Notes: + - When listing files, you may see paths with "/" prefix (e.g., "/Slip.pdf", "/Slip.md") + - When calling ContentPlugin's GetContent, use collectionName="Submissions-{pricingId}" and provide the filename + - Both PDF and Markdown (.md) files are supported + - When updating data, ensure each JSON object has the correct $type field and required ID fields (id, pricingId, acceptanceId, etc.) + - Remove null-valued properties from JSON before calling UpdateData + """; + + if (pricingSchema is not null) + baseText += $"\n\n# Pricing Schema\n```json\n{pricingSchema}\n```"; + if (acceptanceSchema is not null) + baseText += $"\n\n# ReinsuranceAcceptance Schema\n```json\n{acceptanceSchema}\n```"; + if (sectionSchema is not null) + baseText += $"\n\n# ReinsuranceSection Schema\n```json\n{sectionSchema}\n```"; + + return baseText; + } + } + + IEnumerable IAgentWithPlugins.GetPlugins(IAgentChat chat) + { + yield return new DataPlugin(hub, chat, typeDefinitionMap).CreateKernelPlugin(); + + // Add ContentPlugin for submissions and file reading functionality + var submissionPluginConfig = CreateSubmissionPluginConfig(); + yield return new ContentPlugin(hub, submissionPluginConfig, chat).CreateKernelPlugin(); + } + + private static ContentPluginConfig CreateSubmissionPluginConfig() + { + return new ContentPluginConfig + { + Collections = [], + ContextToConfigMap = context => + { + // Only handle pricing contexts + if (context?.Address?.Type != PricingAddress.TypeName) + return null!; + + var pricingId = context.Address.Id; + + // Parse pricingId in format {company}-{uwy} + var parts = pricingId.Split('-'); + if (parts.Length != 2) + return null!; + + // Use Hub-based collection config pointing to the pricing address + // This allows the ContentPlugin to query the pricing hub for the actual collection configuration + return new ContentCollectionConfig + { + SourceType = HubStreamProviderFactory.SourceType, + Name = $"Submissions-{pricingId}", + Address = context.Address + }; + } + }; + } + + async Task IInitializableAgent.InitializeAsync() + { + var pricingAddress = new PricingAddress("default"); + + try + { + var typesResponse = await hub.AwaitResponse( + new GetDomainTypesRequest(), + o => o.WithTarget(pricingAddress)); + typeDefinitionMap = typesResponse?.Message?.Types?.Select(t => t with { Address = null }).ToDictionary(x => x.Name!); + } + catch + { + typeDefinitionMap = null; + } + + try + { + var resp = await hub.AwaitResponse( + new GetSchemaRequest(nameof(Pricing)), + o => o.WithTarget(pricingAddress)); + pricingSchema = resp?.Message?.Schema; + } + catch + { + pricingSchema = null; + } + + try + { + var resp = await hub.AwaitResponse( + new GetSchemaRequest(nameof(ReinsuranceAcceptance)), + o => o.WithTarget(pricingAddress)); + acceptanceSchema = resp?.Message?.Schema; + } + catch + { + acceptanceSchema = null; + } + + try + { + var resp = await hub.AwaitResponse( + new GetSchemaRequest(nameof(ReinsuranceSection)), + o => o.WithTarget(pricingAddress)); + sectionSchema = resp?.Message?.Schema; + } + catch + { + sectionSchema = null; + } + } + + public bool Matches(AgentContext? context) + { + return context?.Address?.Type == PricingAddress.TypeName; + } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/Dimensions.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/Dimensions.cs index 9aaa5bd69..55adaea99 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/Dimensions.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/Dimensions.cs @@ -5,6 +5,7 @@ namespace MeshWeaver.Insurance.Domain; /// /// Line of business dimension for insurance classification. /// +[Display(GroupName = "Reference Data")] public record LineOfBusiness { /// @@ -27,6 +28,7 @@ public record LineOfBusiness /// /// Country dimension for geographic classification. /// +[Display(GroupName = "Reference Data")] public record Country { /// @@ -54,6 +56,7 @@ public record Country /// /// Legal entity dimension for organizational structure. /// +[Display(GroupName = "Reference Data")] public record LegalEntity { /// @@ -81,6 +84,7 @@ public record LegalEntity /// /// Currency dimension for monetary values. /// +[Display(GroupName = "Reference Data")] public record Currency { /// diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/GeocodingRequest.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/GeocodingRequest.cs new file mode 100644 index 000000000..f12dcd3c0 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/GeocodingRequest.cs @@ -0,0 +1,34 @@ +using MeshWeaver.Messaging; + +namespace MeshWeaver.Insurance.Domain; + +/// +/// Request to geocode property risks. +/// +public record GeocodingRequest : IRequest; + +/// +/// Response from geocoding operation. +/// +public record GeocodingResponse +{ + /// + /// Whether the geocoding operation was successful. + /// + public required bool Success { get; init; } + + /// + /// Number of risks successfully geocoded. + /// + public int GeocodedCount { get; init; } + + /// + /// Error message if geocoding failed. + /// + public string? Error { get; init; } + + /// + /// List of updated risks with geocoded locations. + /// + public IReadOnlyList? UpdatedRisks { get; init; } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/InsuranceApplicationExtensions.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/InsuranceApplicationExtensions.cs index f809ed66b..32c75887a 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/InsuranceApplicationExtensions.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/InsuranceApplicationExtensions.cs @@ -1,8 +1,11 @@ +using System.Reactive.Linq; using MeshWeaver.ContentCollections; using MeshWeaver.Data; +using MeshWeaver.Import; using MeshWeaver.Import.Configuration; using MeshWeaver.Insurance.Domain.Services; using MeshWeaver.Layout; +using MeshWeaver.Layout.Domain; using MeshWeaver.Messaging; using Microsoft.Extensions.Configuration; using Microsoft.Extensions.DependencyInjection; @@ -15,12 +18,23 @@ namespace MeshWeaver.Insurance.Domain; /// public static class InsuranceApplicationExtensions { + /// + /// Adds Insurance domain services to the service collection. + /// + private static IServiceCollection AddInsuranceDomainServices(this IServiceCollection services) + { + // Register pricing service + services.AddSingleton(); + + return services; + } + /// /// Configures the root Insurance application hub with dimension data and pricing catalog. /// public static MessageHubConfiguration ConfigureInsuranceApplication(this MessageHubConfiguration configuration) => configuration - .WithTypes(typeof(PricingAddress)) + .WithTypes(typeof(PricingAddress), typeof(ImportConfiguration), typeof(ExcelImportConfiguration), typeof(ReinsuranceAcceptance), typeof(ReinsuranceSection), typeof(ImportRequest), typeof(CollectionSource), typeof(GeocodingRequest), typeof(GeocodingResponse)) .AddData(data => { var svc = data.Hub.ServiceProvider.GetRequiredService(); @@ -43,17 +57,12 @@ public static MessageHubConfiguration ConfigureInsuranceApplication(this Message public static MessageHubConfiguration ConfigureSinglePricingApplication(this MessageHubConfiguration configuration) { return configuration - .WithTypes(typeof(InsuranceApplicationExtensions)) + .WithServices(AddInsuranceDomainServices) .AddContentCollection(sp => { var hub = sp.GetRequiredService(); var addressId = hub.Address.Id; - var configuration = sp.GetRequiredService(); - - // Get the global Submissions configuration from appsettings - var globalConfig = configuration.GetSection("Submissions").Get(); - if (globalConfig == null) - throw new InvalidOperationException("Submissions collection not found in configuration"); + var conf = sp.GetRequiredService(); // Parse addressId in format {company}-{uwy} var parts = addressId.Split('-'); @@ -64,11 +73,28 @@ public static MessageHubConfiguration ConfigureSinglePricingApplication(this Mes var uwy = parts[1]; var subPath = $"{company}/{uwy}"; + // Get the global Submissions configuration from appsettings, or create a default one + var globalConfig = conf.GetSection("Submissions").Get(); + + // If no configuration exists, create a default FileSystem-based collection + if (globalConfig == null) + { + // Default to a "Submissions" folder in the current directory + var defaultBasePath = Path.Combine(Directory.GetCurrentDirectory(), "Submissions"); + globalConfig = new ContentCollectionConfig + { + SourceType = FileSystemStreamProvider.SourceType, + Name = "Submissions", + BasePath = defaultBasePath, + DisplayName = "Submission Files" + }; + } + // Create localized config with modified name and basepath var localizedName = GetLocalizedCollectionName("Submissions", addressId); var fullPath = string.IsNullOrEmpty(subPath) ? globalConfig.BasePath ?? "" - : System.IO.Path.Combine(globalConfig.BasePath ?? "", subPath); + : Path.Combine(globalConfig.BasePath ?? "", subPath); return globalConfig with { @@ -89,6 +115,8 @@ public static MessageHubConfiguration ConfigureSinglePricingApplication(this Mes })) .WithType(t => t.WithInitialData(async ct => (IEnumerable)await svc.GetRisksAsync(pricingId, ct))) + .WithType(t => t.WithInitialData(_ => Task.FromResult(Enumerable.Empty()))) + .WithType(t => t.WithInitialData(_ => Task.FromResult(Enumerable.Empty()))) .WithType(t => t.WithInitialData(async ct => await svc.GetImportConfigurationsAsync(pricingId).ToArrayAsync(ct))) ); @@ -102,8 +130,85 @@ await svc.GetImportConfigurationsAsync(pricingId).ToArrayAsync(ct))) LayoutAreas.PropertyRisksLayoutArea.PropertyRisks) .WithView(nameof(LayoutAreas.RiskMapLayoutArea.RiskMap), LayoutAreas.RiskMapLayoutArea.RiskMap) + .WithView(nameof(LayoutAreas.ReinsuranceAcceptanceLayoutArea.Structure), + LayoutAreas.ReinsuranceAcceptanceLayoutArea.Structure) .WithView(nameof(LayoutAreas.ImportConfigsLayoutArea.ImportConfigs), LayoutAreas.ImportConfigsLayoutArea.ImportConfigs) - ); + .AddDomainViews() + ) + .AddImport() + .WithHandler(HandleGeocodingRequest); + } + + private static async Task HandleGeocodingRequest( + IMessageHub hub, + IMessageDelivery request, + CancellationToken ct) + { + try + { + // Get the geocoding service + var geocodingService = hub.ServiceProvider.GetRequiredService(); + + // Get the current property risks from the workspace + var workspace = hub.GetWorkspace(); + var riskStream = workspace.GetStream(); + if (riskStream == null) + { + var errorResponse = new GeocodingResponse + { + Success = false, + GeocodedCount = 0, + Error = "No property risks found in workspace" + }; + hub.Post(errorResponse, o => o.ResponseFor(request)); + return request.Processed(); + } + + var risks = await riskStream.FirstAsync(); + var riskList = risks?.ToList() ?? new List(); + + if (!riskList.Any()) + { + var errorResponse = new GeocodingResponse + { + Success = false, + GeocodedCount = 0, + Error = "No property risks available to geocode" + }; + hub.Post(errorResponse, o => o.ResponseFor(request)); + return request.Processed(); + } + + // Geocode the risks + var geocodingResponse = await geocodingService.GeocodeRisksAsync(riskList, ct); + + // If successful and we have updated risks, update the workspace + if (geocodingResponse.Success && geocodingResponse.UpdatedRisks != null && geocodingResponse.UpdatedRisks.Any()) + { + // Update the workspace with the geocoded risks + var dataChangeRequest = new DataChangeRequest + { + Updates = geocodingResponse.UpdatedRisks.ToList() + }; + + hub.Post(dataChangeRequest, o => o.WithTarget(hub.Address)); + } + + // Post the response + hub.Post(geocodingResponse, o => o.ResponseFor(request)); + } + catch (Exception ex) + { + var errorResponse = new GeocodingResponse + { + Success = false, + GeocodedCount = 0, + Error = $"Geocoding failed: {ex.Message}" + }; + hub.Post(errorResponse, o => o.ResponseFor(request)); + } + + return request.Processed(); } } diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ImportConfigsLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ImportConfigsLayoutArea.cs index cecf8a2a9..4f0240afa 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ImportConfigsLayoutArea.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ImportConfigsLayoutArea.cs @@ -1,4 +1,4 @@ -using System.Reactive.Linq; +using System.Reactive.Linq; using System.Text.Json; using MeshWeaver.Import.Configuration; using MeshWeaver.Insurance.Domain.LayoutAreas.Shared; @@ -42,51 +42,9 @@ public static IObservable ImportConfigs(LayoutAreaHost host, Renderin foreach (var cfg in list.OrderBy(x => x.Name)) { parts.Add($"\n## {cfg.Name}"); - parts.Add($"\n**Worksheet:** {cfg.WorksheetName}"); - parts.Add($"**Data Start Row:** {cfg.DataStartRow}"); - - if (cfg.Mappings.Any()) - { - parts.Add("\n### Column Mappings"); - parts.Add("\n| Target Property | Mapping Kind | Source Columns | Constant Value |"); - parts.Add("|----------------|--------------|----------------|----------------|"); - foreach (var mapping in cfg.Mappings) - { - var sourceColumns = string.Join(", ", mapping.SourceColumns); - var constantValue = mapping.ConstantValue?.ToString() ?? ""; - parts.Add($"| {mapping.TargetProperty} | {mapping.Kind} | {sourceColumns} | {constantValue} |"); - } - } - - if (cfg.Allocations.Any()) - { - parts.Add("\n### Allocations"); - parts.Add("\n| Target Property | Total Cell | Weight Columns | Currency Property |"); - parts.Add("|----------------|------------|----------------|-------------------|"); - foreach (var alloc in cfg.Allocations) - { - var weightColumns = string.Join(", ", alloc.WeightColumns); - parts.Add($"| {alloc.TargetProperty} | {alloc.TotalCell} | {weightColumns} | {alloc.CurrencyProperty ?? ""} |"); - } - } - - if (cfg.TotalRowMarkers.Any()) - { - parts.Add($"\n**Total Row Markers:** {string.Join(", ", cfg.TotalRowMarkers)}"); - } - - if (cfg.IgnoreRowExpressions.Any()) - { - parts.Add("\n**Ignore Row Expressions:**"); - foreach (var expr in cfg.IgnoreRowExpressions) - { - parts.Add($"- `{expr}`"); - } - } - // Add full JSON configuration in a collapsible section var json = JsonSerializer.Serialize(cfg, options); - parts.Add($"\n
\nView Full JSON Configuration\n\n```json\n{json}\n```\n
\n"); + parts.Add($"```json\n{json}\n```"); } var md = string.Join("\n", parts); diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingCatalogLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingCatalogLayoutArea.cs index e6cb6221b..7223810f7 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingCatalogLayoutArea.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingCatalogLayoutArea.cs @@ -1,4 +1,4 @@ -using System.Reactive.Linq; +using System.Reactive.Linq; using MeshWeaver.Layout; using MeshWeaver.Layout.Composition; @@ -28,23 +28,20 @@ private static string RenderPricingTable(IReadOnlyCollection pricings) var lines = new List { - "# Insurance Pricing Catalog", + "# Insurance Pricing Catalog | [Data Model](/pricing/default/DataModel)", "", - "| Insured | Line of Business | Country | Legal Entity | Inception | Expiration | Premium | Status |", - "|---------|------------------|---------|--------------|-----------|------------|---------|--------|" + "| Insured | Line of Business | Country | Legal Entity | Inception | Expiration | Status |", + "|---------|------------------|---------|--------------|-----------|------------|--------|" }; lines.AddRange(pricings - .OrderByDescending(p => p.InceptionDate) + .OrderByDescending(p => p.InceptionDate ?? DateTime.MaxValue) .Select(p => { var link = $"[{p.InsuredName}](/pricing/{p.Id}/Overview)"; var inception = p.InceptionDate?.ToString("yyyy-MM-dd") ?? "-"; var expiration = p.ExpirationDate?.ToString("yyyy-MM-dd") ?? "-"; - var premium = p.Premium.HasValue && p.Currency != null - ? $"{p.Currency} {p.Premium:N0}" - : "-"; - return $"| {link} | {p.LineOfBusiness ?? "-"} | {p.Country ?? "-"} | {p.LegalEntity ?? "-"} | {inception} | {expiration} | {premium} | {p.Status ?? "-"} |"; + return $"| {link} | {p.LineOfBusiness ?? "-"} | {p.Country ?? "-"} | {p.LegalEntity ?? "-"} | {inception} | {expiration} | {p.Status ?? "-"} |"; })); return string.Join("\n", lines); diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingOverviewLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingOverviewLayoutArea.cs index 5987db795..6925fa2fb 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingOverviewLayoutArea.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PricingOverviewLayoutArea.cs @@ -1,4 +1,4 @@ -using System.Reactive.Linq; +using System.Reactive.Linq; using MeshWeaver.Insurance.Domain.LayoutAreas.Shared; using MeshWeaver.Layout; using MeshWeaver.Layout.Composition; @@ -56,12 +56,9 @@ private static string RenderPricingOverview(Pricing pricing) $"- **Country:** {pricing.Country ?? "N/A"}", $"- **Legal Entity:** {pricing.LegalEntity ?? "N/A"}", "", - "### Financial", - $"- **Premium:** {(pricing.Premium.HasValue && pricing.Currency != null ? $"{pricing.Currency} {pricing.Premium:N2}" : "N/A")}", - $"- **Currency:** {pricing.Currency ?? "N/A"}", - "", "### Parties", - $"- **Broker:** {pricing.BrokerName ?? "N/A"}" + $"- **Broker:** {pricing.BrokerName ?? "N/A"}", + $"- **Primary Insurance:** {pricing.PrimaryInsurance ?? "N/A"}" }; return string.Join("\n", lines); diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PropertyRisksLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PropertyRisksLayoutArea.cs index 76f11187f..3db9474af 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PropertyRisksLayoutArea.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/PropertyRisksLayoutArea.cs @@ -1,10 +1,12 @@ using System.Reactive.Linq; using MeshWeaver.Data; using MeshWeaver.Insurance.Domain.LayoutAreas.Shared; +using MeshWeaver.Insurance.Domain.Services; using MeshWeaver.Layout; using MeshWeaver.Layout.Composition; using MeshWeaver.Layout.DataGrid; using MeshWeaver.Utils; +using Microsoft.Extensions.DependencyInjection; namespace MeshWeaver.Insurance.Domain.LayoutAreas; @@ -37,7 +39,8 @@ public static IObservable PropertyRisks(LayoutAreaHost host, Renderin return Controls.Stack .WithView(PricingLayoutShared.BuildToolbar(pricingId, "PropertyRisks")) - .WithView(dataGrid); + .WithView(dataGrid) + .WithView(GeocodingArea); }) .StartWith(Controls.Stack .WithView(PricingLayoutShared.BuildToolbar(pricingId, "PropertyRisks")) @@ -94,6 +97,40 @@ private static UiControl RenderRisksDataGrid(LayoutAreaHost host, IReadOnlyColle .WithView(Controls.Title("Property Risks", 1)) .WithView(dataGrid); } + + private static IObservable GeocodingArea(LayoutAreaHost host, RenderingContext ctx) + { + var svc = host.Hub.ServiceProvider.GetRequiredService(); + return svc.Progress.Select(p => p is null + ? (UiControl)Controls.Button("Geocode").WithClickAction(ClickGeocoding) + : Controls.Progress($"Processing {p.CurrentRiskName}: {p.ProcessedRisks} of {p.TotalRisks}", + p.TotalRisks == 0 ? 0 : (int)(100.0 * p.ProcessedRisks / p.TotalRisks))); + } + + private static async Task ClickGeocoding(UiActionContext obj) + { + // Show initial progress + obj.Host.UpdateArea(obj.Area, Controls.Progress("Starting geocoding...", 0)); + + try + { + // Start the geocoding process + var response = await obj.Host.Hub.AwaitResponse( + new GeocodingRequest(), + o => o.WithTarget(obj.Hub.Address)); + + // Show completion message + var resultMessage = response?.Message?.Success == true + ? $"✅ Geocoding Complete: {response.Message.GeocodedCount} locations geocoded successfully." + : $"❌ Geocoding Failed: {response?.Message?.Error}"; + + obj.Host.UpdateArea(obj.Area, Controls.Markdown($"**{resultMessage}**")); + } + catch (Exception ex) + { + obj.Host.UpdateArea(obj.Area, Controls.Markdown($"**Geocoding Failed**: {ex.Message}")); + } + } } /// diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ReinsuranceAcceptanceLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ReinsuranceAcceptanceLayoutArea.cs new file mode 100644 index 000000000..7631b9dc3 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/ReinsuranceAcceptanceLayoutArea.cs @@ -0,0 +1,189 @@ +using System.Reactive.Linq; +using System.Text; +using MeshWeaver.Insurance.Domain.LayoutAreas.Shared; +using MeshWeaver.Layout; +using MeshWeaver.Layout.Composition; + +namespace MeshWeaver.Insurance.Domain.LayoutAreas; + +/// +/// Layout area for displaying reinsurance acceptances associated with a pricing. +/// +public static class ReinsuranceAcceptanceLayoutArea +{ + // Color definitions for diagram elements + private static readonly string PricingColor = "#2c7bb6"; // Blue - dark background, white text + private static readonly string PricingTextColor = "#ffffff"; + private static readonly string AcceptanceColor = "#fdae61"; // Light Orange - light background, dark text + private static readonly string AcceptanceTextColor = "#000000"; + private static readonly string SectionColor = "#abd9e9"; // Light Blue - light background, dark text + private static readonly string SectionTextColor = "#000000"; + + /// + /// Renders the reinsurance acceptances structure for a specific pricing. + /// + public static IObservable Structure(LayoutAreaHost host, RenderingContext ctx) + { + _ = ctx; + var pricingId = host.Hub.Address.Id; + var pricingStream = host.Workspace.GetStream()!; + var acceptanceStream = host.Workspace.GetStream()!; + var sectionStream = host.Workspace.GetStream()!; + + return Observable.CombineLatest( + pricingStream, + acceptanceStream, + sectionStream, + (pricings, acceptances, sections) => (pricings, acceptances, sections)) + .Select(data => + { + var pricing = data.pricings?.FirstOrDefault(); + var acceptanceList = data.acceptances?.ToList() ?? new List(); + var sectionList = data.sections?.ToList() ?? new List(); + + if (!acceptanceList.Any() && !sectionList.Any()) + { + return Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "Structure")) + .WithView(Controls.Markdown("# Reinsurance Structure\n\n*No reinsurance acceptances loaded. Import or add acceptances to begin.*")); + } + + var diagram = BuildMermaidDiagram(pricingId, pricing, acceptanceList, sectionList); + var mermaidControl = new MarkdownControl($"```mermaid\n{diagram}\n```") + .WithStyle(style => style.WithWidth("100%").WithHeight("600px")); + + return Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "Structure")) + .WithView(Controls.Title("Reinsurance Structure", 1)) + .WithView(mermaidControl); + }) + .StartWith(Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "Structure")) + .WithView(Controls.Markdown("# Reinsurance Structure\n\n*Loading...*"))); + } + + private static string BuildMermaidDiagram(string pricingId, Pricing? pricing, List acceptances, List sections) + { + var sb = new StringBuilder(); + + // Start with flowchart definition for cards + sb.AppendLine("flowchart TD"); + sb.AppendLine(" classDef leftAlign text-align:left"); + + // Add the pricing node (main node) + var pricingContent = new StringBuilder(); + pricingContent.Append($"Pricing: {pricingId}"); + + if (pricing?.PrimaryInsurance != null) + { + pricingContent.Append($"
Primary: {pricing.PrimaryInsurance}"); + } + + if (pricing?.BrokerName != null) + { + pricingContent.Append($"
Broker: {pricing.BrokerName}"); + } + + sb.AppendLine($" pricing[\"{pricingContent}\"]"); + sb.AppendLine($" style pricing fill:{PricingColor},color:{PricingTextColor},stroke:#333,stroke-width:1px"); + sb.AppendLine($" class pricing leftAlign"); + + // Group sections by acceptanceId + var sectionsByAcceptance = sections + .Where(s => s.AcceptanceId != null) + .GroupBy(s => s.AcceptanceId) + .ToDictionary(g => g.Key!, g => g.ToList()); + + // Add acceptances and their sections + // Sort acceptances by name to ensure Layer 1, Layer 2, Layer 3 order + foreach (var acceptance in acceptances.OrderBy(a => a.Name ?? a.Id)) + { + RenderAcceptance(sb, acceptance, pricingId); + + // Add sections for this acceptance + // Sort sections by LineOfBusiness first, then by Attach to group coverage types together + if (sectionsByAcceptance.TryGetValue(acceptance.Id, out var acceptanceSections)) + { + foreach (var section in acceptanceSections.OrderBy(s => s.LineOfBusiness).ThenBy(s => s.Attach)) + { + RenderSection(sb, section, acceptance.Id); + } + } + } + + return sb.ToString(); + } + + private static void RenderAcceptance(StringBuilder sb, ReinsuranceAcceptance acceptance, string pricingId) + { + string acceptanceId = SanitizeId(acceptance.Id); + var acceptanceName = acceptance.Name ?? acceptance.Id; + + // Build acceptance content + var acceptanceContent = new StringBuilder(); + acceptanceContent.Append($"{acceptanceName}"); + + if (acceptance.EPI > 0) + acceptanceContent.Append($"
EPI: {acceptance.EPI:N0}"); + + if (acceptance.Rate > 0) + acceptanceContent.Append($"
Rate: {acceptance.Rate:P2}"); + + if (acceptance.Share > 0) + acceptanceContent.Append($"
Share: {acceptance.Share:P2}"); + + if (acceptance.Cession > 0) + acceptanceContent.Append($"
Cession: {acceptance.Cession:P2}"); + + if (acceptance.Brokerage > 0) + acceptanceContent.Append($"
Brokerage: {acceptance.Brokerage:P2}"); + + if (acceptance.Commission > 0) + acceptanceContent.Append($"
Commission: {acceptance.Commission:P2}"); + + sb.AppendLine($" acc_{acceptanceId}[\"{acceptanceContent}\"]"); + sb.AppendLine($" style acc_{acceptanceId} fill:{AcceptanceColor},color:{AcceptanceTextColor},stroke:#333,stroke-width:1px"); + sb.AppendLine($" class acc_{acceptanceId} leftAlign"); + sb.AppendLine($" pricing --> acc_{acceptanceId}"); + } + + private static void RenderSection(StringBuilder sb, ReinsuranceSection section, string acceptanceId) + { + string sectionId = SanitizeId(section.Id); + string sanitizedAcceptanceId = SanitizeId(acceptanceId); + + // Build section content + var sectionContent = new StringBuilder(); + sectionContent.Append($"{section.Name ?? section.LineOfBusiness ?? section.Id}
"); + + if (!string.IsNullOrEmpty(section.LineOfBusiness) && section.LineOfBusiness != section.Name) + { + sectionContent.Append($"LoB: {section.LineOfBusiness}
"); + } + + sectionContent.Append($"Attach: {section.Attach:N0}
"); + sectionContent.Append($"Limit: {section.Limit:N0}
"); + + if (section.AggAttach.HasValue && section.AggAttach.Value > 0) + { + sectionContent.Append($"AAD: {section.AggAttach.Value:N0}
"); + } + + if (section.AggLimit.HasValue && section.AggLimit.Value > 0) + { + sectionContent.Append($"AAL: {section.AggLimit.Value:N0}"); + } + + // Create node + sb.AppendLine($" sec_{sectionId}[\"{sectionContent}\"]"); + sb.AppendLine($" style sec_{sectionId} fill:{SectionColor},color:{SectionTextColor},stroke:#333,stroke-width:1px"); + sb.AppendLine($" class sec_{sectionId} leftAlign"); + sb.AppendLine($" acc_{sanitizedAcceptanceId} --> sec_{sectionId}"); + } + + private static string SanitizeId(string id) + { + // Replace characters that might cause issues in Mermaid IDs + return id.Replace("-", "_").Replace(" ", "_").Replace(".", "_"); + } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/RiskMapLayoutArea.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/RiskMapLayoutArea.cs index be3819ff1..f35463e38 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/RiskMapLayoutArea.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/RiskMapLayoutArea.cs @@ -1,7 +1,13 @@ using System.Reactive.Linq; +using System.Reactive.Subjects; +using MeshWeaver.Data; +using MeshWeaver.GoogleMaps; using MeshWeaver.Insurance.Domain.LayoutAreas.Shared; +using MeshWeaver.Insurance.Domain.Services; using MeshWeaver.Layout; using MeshWeaver.Layout.Composition; +using MeshWeaver.Messaging; +using Microsoft.Extensions.DependencyInjection; namespace MeshWeaver.Insurance.Domain.LayoutAreas; @@ -13,72 +19,164 @@ public static class RiskMapLayoutArea /// /// Renders a map view of property risks for a specific pricing. /// - public static IObservable RiskMap(LayoutAreaHost host, RenderingContext ctx) + public static IObservable RiskMap(LayoutAreaHost host, RenderingContext _) { - _ = ctx; var pricingId = host.Hub.Address.Id; - var riskStream = host.Workspace.GetStream()!; - return riskStream.Select(risks => - { - var riskList = risks?.ToList() ?? new List(); - var geocodedRisks = riskList.Where(r => r.GeocodedLocation?.Latitude != null && r.GeocodedLocation?.Longitude != null).ToList(); - - if (!riskList.Any()) + return host.Workspace.GetStream()! + .SelectMany(risks => { - return Controls.Stack - .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) - .WithView(Controls.Markdown("# Risk Map\n\n*No risks loaded. Import or add risks to begin.*")); - } + var riskList = risks?.ToList() ?? new List(); + var geocodedRisks = riskList.Where(r => r.GeocodedLocation?.Latitude != null && r.GeocodedLocation?.Longitude != null).ToList(); - if (!geocodedRisks.Any()) - { - return Controls.Stack - .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) - .WithView(Controls.Markdown($"# Risk Map\n\n*No geocoded risks found. {riskList.Count} risk(s) available but none have valid coordinates.*")); - } + if (!riskList.Any()) + { + return Observable.Return(Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) + .WithView(Controls.Markdown("# Risk Map\n\n*No risks loaded. Import or add risks to begin.*"))); + } + + if (!geocodedRisks.Any()) + { + return Observable.Return(Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) + .WithView(Controls.Markdown($"# Risk Map\n\n*No geocoded risks found. {riskList.Count} risk(s) available but none have valid coordinates.*")) + .WithView(GeocodingArea)); + } - var mapContent = RenderMapContent(geocodedRisks); + var mapControl = BuildGoogleMapControl(geocodedRisks); - return Controls.Stack + return Observable.Using( + () => new ReplaySubject(1), + riskDetailsSubject => + { + riskDetailsSubject.OnNext(null); + var mapControlWithClick = mapControl.WithClickAction(ctx => riskDetailsSubject.OnNext(ctx.Payload?.ToString())); + + return Observable.Return( + Controls.Stack + .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) + .WithView(Controls.Title("Risk Map", 2)) + .WithView(mapControlWithClick) + .WithView(GeocodingArea) + .WithView(Controls.Title("Risk Details", 3)) + .WithView((h, c) => riskDetailsSubject + .SelectMany(id => id == null ? + Observable.Return(Controls.Html("Click marker to see details.")) : RenderRiskDetails(host.Hub, id)) + ) + ); + } + ); + }) + .StartWith(Controls.Stack .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) - .WithView(Controls.Markdown(mapContent)); - }) - .StartWith(Controls.Stack - .WithView(PricingLayoutShared.BuildToolbar(pricingId, "RiskMap")) - .WithView(Controls.Markdown("# Risk Map\n\n*Loading...*"))); + .WithView(Controls.Markdown("# Risk Map\n\n*Loading...*"))); + } + + private static IObservable GeocodingArea(LayoutAreaHost host, RenderingContext ctx) + { + var svc = host.Hub.ServiceProvider.GetRequiredService(); + return svc.Progress.Select(p => p is null + ? (UiControl)Controls.Button("Geocode").WithClickAction(ClickGeocoding) + : Controls.Progress($"Processing {p.CurrentRiskName}: {p.ProcessedRisks} of {p.TotalRisks}", + p.TotalRisks == 0 ? 0 : (int)(100.0 * p.ProcessedRisks / p.TotalRisks))); } - private static string RenderMapContent(List geocodedRisks) + private static async Task ClickGeocoding(UiActionContext obj) { - var lines = new List + // Show initial progress + obj.Host.UpdateArea(obj.Area, Controls.Progress("Starting geocoding...", 0)); + + try { - "# Risk Map", - "", - $"**Total Geocoded Risks:** {geocodedRisks.Count}", - "", - "## Risk Locations", - "" - }; + // Start the geocoding process + var response = await obj.Host.Hub.AwaitResponse( + new GeocodingRequest(), + o => o.WithTarget(obj.Hub.Address)); - foreach (var risk in geocodedRisks.Take(10)) + // Show completion message + var resultMessage = response?.Message?.Success == true + ? $"✅ Geocoding Complete: {response.Message.GeocodedCount} locations geocoded successfully." + : $"❌ Geocoding Failed: {response?.Message?.Error}"; + + obj.Host.UpdateArea(obj.Area, Controls.Markdown($"**{resultMessage}**")); + } + catch (Exception ex) { - lines.Add($"- **{risk.LocationName ?? "Unknown"}**: {risk.City}, {risk.State}, {risk.Country}"); - lines.Add($" - Coordinates: {risk.GeocodedLocation!.Latitude:F6}, {risk.GeocodedLocation.Longitude:F6}"); - lines.Add($" - TSI Building: {risk.Currency} {risk.TsiBuilding:N0}"); - lines.Add(""); + obj.Host.UpdateArea(obj.Area, Controls.Markdown($"**Geocoding Failed**: {ex.Message}")); } + } - if (geocodedRisks.Count > 10) + private static IObservable RenderRiskDetails(IMessageHub hub, string id) + { + return hub.GetWorkspace() + .GetStream(new EntityReference(nameof(PropertyRisk), id))! + .Select(r => BuildRiskDetailsMarkdown(r.Value as PropertyRisk)); + } + + private static UiControl BuildRiskDetailsMarkdown(PropertyRisk? risk) + { + if (risk is null) + return Controls.Html("Risk not found"); + + return Controls.Stack + .WithView(Controls.Markdown("## Risk Details")) + .WithView(Controls.Markdown($"**ID:** {risk.Id}")) + .WithView(Controls.Markdown($"**Location:** {risk.LocationName ?? "N/A"}")) + .WithView(Controls.Markdown($"**Address:** {risk.GeocodedLocation?.FormattedAddress ?? risk.Address ?? "N/A"}")) + .WithView(Controls.Markdown($"**City:** {risk.City ?? "N/A"}")) + .WithView(Controls.Markdown($"**State:** {risk.State ?? "N/A"}")) + .WithView(Controls.Markdown($"**Country:** {risk.Country ?? "N/A"}")) + .WithView(Controls.Markdown($"**Currency:** {risk.Currency ?? "N/A"}")) + .WithView(Controls.Markdown($"**TSI Building:** {risk.TsiBuilding:N0}")) + .WithView(Controls.Markdown($"**TSI Content:** {risk.TsiContent:N0}")) + .WithView(Controls.Markdown($"**TSI BI:** {risk.TsiBi:N0}")) + .WithView(Controls.Markdown($"**Latitude:** {risk.GeocodedLocation?.Latitude:F6}")) + .WithView(Controls.Markdown($"**Longitude:** {risk.GeocodedLocation?.Longitude:F6}")); + } + + private static GoogleMapControl BuildGoogleMapControl(IReadOnlyCollection risks) + { + var riskList = risks.Where(r => r.GeocodedLocation?.Latitude is not null && r.GeocodedLocation?.Longitude is not null).ToList(); + + // Find center point + LatLng center; + if (riskList.Any()) { - lines.Add($"*... and {geocodedRisks.Count - 10} more risk(s)*"); - lines.Add(""); + var avgLat = riskList.Average(r => r.GeocodedLocation!.Latitude!.Value); + var avgLng = riskList.Average(r => r.GeocodedLocation!.Longitude!.Value); + center = new LatLng(avgLat, avgLng); + } + else + { + center = new LatLng(0, 0); } - lines.Add("---"); - lines.Add(""); - lines.Add("*Interactive map visualization coming soon...*"); + // Create markers + var markers = riskList.Select(r => new MapMarker + { + Position = new LatLng(r.GeocodedLocation!.Latitude!.Value, r.GeocodedLocation.Longitude!.Value), + Title = ((r.LocationName ?? r.Address) + " " + (r.City ?? "")).Trim(), + Id = r.Id, + Data = r + }).ToList(); - return string.Join("\n", lines); + var mapOptions = new MapOptions + { + Center = center, + Zoom = riskList.Any() ? 6 : 2, + MapTypeId = "roadmap", + ZoomControl = true, + MapTypeControl = true, + StreetViewControl = false, + FullscreenControl = true + }; + + return new GoogleMapControl() + { + Options = mapOptions, + Markers = markers, + Id = "risk-map" + }.WithStyle(style => style.WithHeight("500px").WithWidth("80%")); } } diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/Shared/PricingLayoutShared.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/Shared/PricingLayoutShared.cs index 05fc4b5a1..f00adbec9 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/Shared/PricingLayoutShared.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/LayoutAreas/Shared/PricingLayoutShared.cs @@ -18,6 +18,7 @@ string Item(string key, string icon, string text) {Item("Submission", "📎", "Submission")} {Item("PropertyRisks", "📄", "Risks")} {Item("RiskMap", "🗺️", "Map")} +{Item("Structure", "🏦", "Reinsurance")} {Item("ImportConfigs", "⚙️", "Import")} "; diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/MeshWeaver.Insurance.Domain.csproj b/modules/Insurance/MeshWeaver.Insurance.Domain/MeshWeaver.Insurance.Domain.csproj index e1e43d7f3..1bed21874 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/MeshWeaver.Insurance.Domain.csproj +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/MeshWeaver.Insurance.Domain.csproj @@ -10,6 +10,7 @@ + diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/Pricing.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/Pricing.cs index d724f0c20..bb126a234 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/Pricing.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/Pricing.cs @@ -1,4 +1,4 @@ -using System.ComponentModel.DataAnnotations; +using System.ComponentModel.DataAnnotations; using MeshWeaver.Domain; namespace MeshWeaver.Insurance.Domain; @@ -6,6 +6,7 @@ namespace MeshWeaver.Insurance.Domain; /// /// Represents an insurance pricing entity with dimension-based classification. /// +[Display(GroupName = "Structure")] public record Pricing { /// @@ -58,9 +59,9 @@ public record Pricing public string? BrokerName { get; init; } /// - /// Premium amount in the pricing currency. + /// Name of the primary insurance company. /// - public decimal? Premium { get; init; } + public string? PrimaryInsurance { get; init; } /// /// Currency code for the premium. diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/PropertyRisk.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/PropertyRisk.cs index 7a27187e4..d47e9884d 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/PropertyRisk.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/PropertyRisk.cs @@ -8,11 +8,12 @@ namespace MeshWeaver.Insurance.Domain; /// Represents a property risk within an insurance pricing. /// Contains location details, values, and dimensions for property insurance underwriting. /// +[Display(GroupName = "Risk")] public record PropertyRisk { /// /// Unique identifier for the property risk record. - /// Synonyms: "Plant code", "Plant ID", "Site Code", "Asset ID", "Code". + /// Synonyms: Plant code, Plant ID, Site Code, Asset ID, Code. /// [Key] public required string Id { get; init; } @@ -41,38 +42,38 @@ public record PropertyRisk /// /// Human-friendly site or facility name. - /// Synonyms: "Plant Description", "Site Name", "Location Name". + /// Synonyms: Plant Description, Site Name, Location Name. /// public string? LocationName { get; init; } /// /// Country; typically ISO code or name (dimension). - /// Synonyms: "Country Code", "Country". + /// Synonyms: Country Code, Country. /// [Dimension] public string? Country { get; init; } /// /// Street address. - /// Synonyms: "Property Address", "Address". + /// Synonyms: Property Address, Address. /// public string? Address { get; init; } /// /// State/region/province. - /// Synonyms: "State/Province", "Region". + /// Synonyms: State/Province, Region. /// public string? State { get; init; } /// /// County/district. - /// Synonyms: "District", "County". + /// Synonyms: District, County. /// public string? County { get; init; } /// /// Postal/ZIP code. - /// Synonyms: "ZIP", "Postcode". + /// Synonyms: ZIP, Postcode. /// public string? ZipCode { get; init; } @@ -83,14 +84,14 @@ public record PropertyRisk /// /// Base currency for the risk. - /// Synonyms: "Currency", "Curr.", "Curr", "CCY". + /// Synonyms: Currency, Curr., Curr, CCY. /// [Dimension] public string? Currency { get; init; } /// /// Sum insured for buildings. - /// Synonyms: "Buildings", "Building Value", "TSI Building(s)". + /// Synonyms: Buildings, Building Value, TSI Building(s). /// public double TsiBuilding { get; init; } @@ -101,7 +102,7 @@ public record PropertyRisk /// /// Sum insured for contents. - /// Synonyms: "Stock", "Fixtures & Fittings", "IT Equipment", "Equipment". + /// Synonyms: Stock, Fixtures, Fittings, IT Equipment, Equipment. /// public double TsiContent { get; init; } @@ -112,7 +113,7 @@ public record PropertyRisk /// /// Business Interruption TSI. - /// Synonyms: "BI", "Business Interruption", "Gross Profit". + /// Synonyms: BI, Business Interruption, Gross Profit. /// public double TsiBi { get; init; } @@ -123,7 +124,7 @@ public record PropertyRisk /// /// Account identifier. - /// Synonyms: "Account #", "Account No". + /// Synonyms: Account #, Account No. /// public string? AccountNumber { get; init; } diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceAcceptance.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceAcceptance.cs new file mode 100644 index 000000000..7300dabf2 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceAcceptance.cs @@ -0,0 +1,87 @@ +using System.ComponentModel.DataAnnotations; + +namespace MeshWeaver.Insurance.Domain; + +/// +/// Represents the reinsurance acceptance with financial terms and coverage sections. +/// +[Display(GroupName = "Structure")] +public record ReinsuranceAcceptance +{ + /// + /// Gets or initializes the unique acceptance identifier. + /// + [Key] + public required string Id { get; init; } + + /// + /// Gets or initializes the pricingId this acceptance belongs to. + /// + public string? PricingId { get; init; } + + /// + /// Gets or initializes the acceptance name or description. + /// + public string? Name { get; init; } + + /// + /// Gets or initializes the cession percentage. + /// + public double Cession { get; init; } + + /// + /// Gets or initializes the share percentage. + /// + public double Share { get; init; } + + /// + /// Gets or initializes the collection of reinsurance sections (layers). + /// + public IReadOnlyCollection? Sections { get; init; } + + /// + /// Gets or initializes the Estimated Premium Income (EPI). + /// + public double EPI { get; init; } + + /// + /// Gets or initializes the rate. + /// + public double Rate { get; init; } + + /// + /// Gets or initializes the commission percentage. + /// + public double Commission { get; init; } + + /// + /// Gets or initializes the brokerage percentage. + /// + public double Brokerage { get; init; } + + /// + /// Gets or initializes the tax percentage. + /// + public double Tax { get; init; } + + /// + /// Gets or initializes the reinstatement premium. + /// + public double ReinstPrem { get; init; } + + /// + /// Gets or initializes the no claims bonus percentage. + /// + public double NoClaimsBonus { get; init; } + + /// + /// Gets or initializes the profit commission percentage. + /// + public double ProfitComm { get; init; } + + + /// + /// Gets or initializes the minimum and deposit premium. + /// + public double MDPrem { get; init; } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceSection.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceSection.cs new file mode 100644 index 000000000..42c21572c --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/ReinsuranceSection.cs @@ -0,0 +1,51 @@ +using System.ComponentModel.DataAnnotations; + +namespace MeshWeaver.Insurance.Domain; + +/// +/// Represents a reinsurance coverage section with layer structure and financial terms. +/// +[Display(GroupName = "Structure")] +public record ReinsuranceSection +{ + /// + /// Gets or initializes the unique section identifier. + /// + [Key] + public required string Id { get; init; } + + /// + /// Gets or initializes the acceptanceId this section belongs to. + /// + public string? AcceptanceId { get; init; } + + /// + /// Gets or initializes the section name or description. + /// + public string? Name { get; init; } + + /// + /// Gets or initializes the section type (e.g., "Fire Damage", "Natural Catastrophe", "Business Interruption"). + /// + public string? LineOfBusiness { get; init; } + + /// + /// Gets or initializes the attachment point. + /// + public decimal Attach { get; init; } + + /// + /// Gets or initializes the layer limit. + /// + public decimal Limit { get; init; } + + /// + /// Gets or initializes the aggregate attachment point (annual aggregate deductible). + /// + public decimal? AggAttach { get; init; } + + /// + /// Gets or initializes the aggregate limit (annual aggregate limit). + /// + public decimal? AggLimit { get; init; } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/SampleDataProvider.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/SampleDataProvider.cs index 23f9f1f96..a4970841a 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Domain/SampleDataProvider.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/SampleDataProvider.cs @@ -1,4 +1,4 @@ -namespace MeshWeaver.Insurance.Domain; +namespace MeshWeaver.Insurance.Domain; /// /// Provides sample data for insurance dimensions and entities. @@ -233,14 +233,12 @@ public static IEnumerable GetSamplePricings() { Id = "PRC-2024-001", InsuredName = "Global Manufacturing Corp", - BrokerName = "Marsh McLennan", InceptionDate = new DateTime(2024, 1, 1), ExpirationDate = new DateTime(2024, 12, 31), UnderwritingYear = 2024, LineOfBusiness = "PROP", Country = "US", LegalEntity = "MW-US", - Premium = 125000m, Currency = "USD", Status = "Bound" }, @@ -248,14 +246,12 @@ public static IEnumerable GetSamplePricings() { Id = "PRC-2024-002", InsuredName = "European Logistics Ltd", - BrokerName = "Aon", InceptionDate = new DateTime(2024, 3, 1), ExpirationDate = new DateTime(2025, 2, 28), UnderwritingYear = 2024, LineOfBusiness = "PROP", Country = "GB", LegalEntity = "MW-UK", - Premium = 85000m, Currency = "GBP", Status = "Quoted" }, @@ -263,14 +259,12 @@ public static IEnumerable GetSamplePricings() { Id = "PRC-2024-003", InsuredName = "Tech Industries GmbH", - BrokerName = "Willis Towers Watson", InceptionDate = new DateTime(2024, 6, 1), ExpirationDate = new DateTime(2025, 5, 31), UnderwritingYear = 2024, LineOfBusiness = "PROP", Country = "DE", LegalEntity = "MW-EU", - Premium = 95000m, Currency = "EUR", Status = "Draft" }, @@ -278,14 +272,10 @@ public static IEnumerable GetSamplePricings() { Id = "Microsoft-2026", InsuredName = "Microsoft", - BrokerName = "Marsh McLennan", - InceptionDate = new DateTime(2026, 1, 1), - ExpirationDate = new DateTime(2026, 12, 31), UnderwritingYear = 2026, LineOfBusiness = "PROP", Country = "US", LegalEntity = "MW-US", - Premium = 2500000m, Currency = "USD", Status = "Bound" } diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/Services/GoogleGeocodingService.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/Services/GoogleGeocodingService.cs new file mode 100644 index 000000000..4a0cb3758 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/Services/GoogleGeocodingService.cs @@ -0,0 +1,237 @@ +using System.Collections.Concurrent; +using System.Net.Http.Json; +using System.Reactive.Subjects; +using MeshWeaver.GoogleMaps; +using Microsoft.Extensions.Options; + +namespace MeshWeaver.Insurance.Domain.Services; + +/// +/// Google Maps-based geocoding service for property risks. +/// +public class GoogleGeocodingService(IOptions googleConfig) : IGeocodingService +{ + private readonly ReplaySubject progressSubject = InitializeProgress(); + private readonly object progressLock = new(); + private readonly HttpClient http = new(); + + private static ReplaySubject InitializeProgress() + { + var ret = new ReplaySubject(1); + ret.OnNext(null); + return ret; + } + + public IObservable Progress => progressSubject; + + public async Task GeocodeRisksAsync(IReadOnlyCollection risks, CancellationToken cancellationToken = default) + { + try + { + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress(0, 0, null, "Starting geocoding...")); + } + + if (!risks.Any()) + { + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress(0, 0, null, "No risks to geocode", true)); + } + return new GeocodingResponse + { + Success = true, + GeocodedCount = 0, + Error = "No risks found to geocode" + }; + } + + // Check Google Maps API key + if (string.IsNullOrEmpty(googleConfig.Value.ApiKey)) + { + var error = "Google Maps API key not configured"; + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress(0, 0, null, "Configuration error", true)); + } + return new GeocodingResponse + { + Success = false, + GeocodedCount = 0, + Error = error + }; + } + + // Filter risks that need geocoding + var risksToGeocode = risks + .Where(r => r.GeocodedLocation?.Latitude == null || r.GeocodedLocation?.Longitude == null) + .ToList(); + + if (!risksToGeocode.Any()) + { + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress(risks.Count, risks.Count, null, + "All risks already geocoded", true)); + } + return new GeocodingResponse + { + Success = true, + GeocodedCount = 0, + Error = null + }; + } + + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress(risksToGeocode.Count, 0, null, "Initializing geocoding...")); + } + + var geocodedCount = 0; + var updatedRisks = new ConcurrentBag(); + var processedCount = 0; + + var parallelOptions = new ParallelOptions + { + MaxDegreeOfParallelism = Math.Min(Environment.ProcessorCount, 10), + CancellationToken = cancellationToken + }; + + await Parallel.ForEachAsync(risksToGeocode, parallelOptions, async (risk, ct) => + { + var riskName = risk.LocationName ?? risk.Address ?? $"Risk {risk.Id}"; + + try + { + var geocodedLocation = await GeocodeAsync(risk, ct); + + if (geocodedLocation.Latitude.HasValue && geocodedLocation.Longitude.HasValue) + { + // Update the risk with geocoded data + var updatedRisk = risk with { GeocodedLocation = geocodedLocation }; + updatedRisks.Add(updatedRisk); + Interlocked.Increment(ref geocodedCount); + } + else + { + // Still add the risk with the geocoding attempt result + updatedRisks.Add(risk with { GeocodedLocation = geocodedLocation }); + } + } + catch (Exception) + { + // Add the original risk unchanged + updatedRisks.Add(risk); + } + + // Update progress after processing each risk + var currentProcessed = Interlocked.Increment(ref processedCount); + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress( + risksToGeocode.Count, + currentProcessed, + risk.Id, + $"Processing {currentProcessed}/{risksToGeocode.Count} risks..." + )); + } + }); + + // Final progress update + lock (progressLock) + { + progressSubject.OnNext(new GeocodingProgress( + risksToGeocode.Count, + risksToGeocode.Count, + null, + $"Completed processing {risksToGeocode.Count} risks", + true + )); + } + + return new GeocodingResponse + { + Success = true, + GeocodedCount = geocodedCount, + Error = null, + UpdatedRisks = updatedRisks.ToList() + }; + } + catch (Exception ex) + { + var error = $"Geocoding failed: {ex.Message}"; + return new GeocodingResponse + { + Success = false, + GeocodedCount = 0, + Error = error + }; + } + finally + { + lock (progressLock) + { + progressSubject.OnNext(null); + } + } + } + + private async Task GeocodeAsync(PropertyRisk risk, CancellationToken ct = default) + { + var query = BuildQuery(risk); + var url = $"https://maps.googleapis.com/maps/api/geocode/json?address={Uri.EscapeDataString(query)}&key={googleConfig.Value.ApiKey}"; + + var response = await http.GetFromJsonAsync(url, cancellationToken: ct); + if (response == null) + { + return new GeocodedLocation { Status = "NoResponse" }; + } + + if (response.status != "OK" || response.results == null || response.results.Length == 0) + { + return new GeocodedLocation { Status = response.status }; + } + + var r = response.results[0]; + return new GeocodedLocation + { + Latitude = r.geometry.location.lat, + Longitude = r.geometry.location.lng, + FormattedAddress = r.formatted_address, + PlaceId = r.place_id, + Status = response.status + }; + } + + private static string BuildQuery(PropertyRisk risk) + { + var parts = new[] { risk.LocationName, risk.Address, risk.City, risk.State, risk.ZipCode, risk.Country } + .Where(s => !string.IsNullOrWhiteSpace(s)); + return string.Join(", ", parts); + } + + private sealed class GoogleGeocodeResponse + { + public string status { get; set; } = string.Empty; + public GoogleGeocodeResult[]? results { get; set; } + } + + private sealed class GoogleGeocodeResult + { + public string formatted_address { get; set; } = string.Empty; + public string place_id { get; set; } = string.Empty; + public GoogleGeometry geometry { get; set; } = new(); + } + + private sealed class GoogleGeometry + { + public GoogleLocation location { get; set; } = new(); + } + + private sealed class GoogleLocation + { + public double lat { get; set; } + public double lng { get; set; } + } +} diff --git a/modules/Insurance/MeshWeaver.Insurance.Domain/Services/IGeocodingService.cs b/modules/Insurance/MeshWeaver.Insurance.Domain/Services/IGeocodingService.cs new file mode 100644 index 000000000..bfbd5b3c3 --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Domain/Services/IGeocodingService.cs @@ -0,0 +1,28 @@ +namespace MeshWeaver.Insurance.Domain.Services; + +/// +/// Service for geocoding property risks. +/// +public interface IGeocodingService +{ + /// + /// Observable stream of geocoding progress. + /// + IObservable Progress { get; } + + /// + /// Geocodes a collection of property risks. + /// + Task GeocodeRisksAsync(IReadOnlyCollection risks, CancellationToken cancellationToken = default); +} + +/// +/// Progress information for geocoding operations. +/// +public record GeocodingProgress( + int TotalRisks, + int ProcessedRisks, + string? CurrentRiskId, + string CurrentRiskName, + bool IsComplete = false +); diff --git a/modules/Insurance/MeshWeaver.Insurance.Test/InsuranceTestBase.cs b/modules/Insurance/MeshWeaver.Insurance.Test/InsuranceTestBase.cs index aa7543a80..e60de47a4 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Test/InsuranceTestBase.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Test/InsuranceTestBase.cs @@ -1,9 +1,10 @@ +using System.Text.Json; +using System.Text.Json.Nodes; +using FluentAssertions.Extensions; using MeshWeaver.Data; using MeshWeaver.Hosting.Monolith.TestBase; using MeshWeaver.Insurance.Domain; using MeshWeaver.Mesh; -using System.Text.Json; -using System.Text.Json.Nodes; using Xunit; namespace MeshWeaver.Insurance.Test; @@ -15,8 +16,8 @@ protected override MeshBuilder ConfigureMesh(MeshBuilder builder) return base.ConfigureMesh(builder) .ConfigureHub(c => c .AddData() - .ConfigureInsuranceApplication() - ); + ) + .InstallAssemblies(typeof(InsuranceApplicationAttribute).Assembly.Location); } protected async Task> GetPropertyRisksAsync(PricingAddress address) @@ -27,7 +28,7 @@ protected async Task> GetPropertyRisksAsync(Pr o => o.WithTarget(address), TestContext.Current.CancellationToken); - return (risksResp.Message.Data as IEnumerable)? + return (risksResp?.Message?.Data as IEnumerable)? .Select(x => x as PropertyRisk ?? (x as JsonObject)?.Deserialize(hub.JsonSerializerOptions)) .Where(x => x != null) .Cast() @@ -41,10 +42,11 @@ protected async Task> GetPricingsAsync() var pricingsResp = await hub.AwaitResponse( new GetDataRequest(new CollectionReference(nameof(Pricing))), o => o.WithTarget(InsuranceApplicationAttribute.Address), - TestContext.Current.CancellationToken); + new CancellationTokenSource(10.Seconds()).Token); - return (pricingsResp.Message.Data as IEnumerable)? - .Select(x => x as Pricing ?? (x as JsonObject)?.Deserialize(hub.JsonSerializerOptions)) + return (pricingsResp.Message.Data as InstanceCollection)? + .Instances.Values + .Select(x => x as Pricing ?? (x as JsonObject)?.Deserialize(hub.JsonSerializerOptions)) .Where(x => x != null) .Cast() .ToList() diff --git a/modules/Insurance/MeshWeaver.Insurance.Test/MicrosoftImportTests.cs b/modules/Insurance/MeshWeaver.Insurance.Test/MicrosoftImportTests.cs index 263130574..2d75ab7e0 100644 --- a/modules/Insurance/MeshWeaver.Insurance.Test/MicrosoftImportTests.cs +++ b/modules/Insurance/MeshWeaver.Insurance.Test/MicrosoftImportTests.cs @@ -1,70 +1,124 @@ +using System.Reactive.Linq; using FluentAssertions; +using FluentAssertions.Extensions; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; using MeshWeaver.Import; using MeshWeaver.Import.Configuration; using MeshWeaver.Insurance.Domain; +using MeshWeaver.Insurance.Domain.Services; +using MeshWeaver.Mesh; +using Microsoft.Extensions.DependencyInjection; using Xunit; namespace MeshWeaver.Insurance.Test; public class MicrosoftImportTests(ITestOutputHelper output) : InsuranceTestBase(output) { - [Fact] - public void Import_Microsoft_File() + private readonly string _testFilesPath = Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", "Files", "Microsoft", "2026"); + private const string MicrosoftPricingId = "Microsoft-2026"; + + protected override MeshBuilder ConfigureMesh(MeshBuilder builder) { - // Arrange - Create import configuration - var config = new ExcelImportConfiguration - { - Name = "Microsoft.xlsx", - EntityId = "Microsoft", - WorksheetName = "Locations", // Adjust based on actual worksheet name - DataStartRow = 2, // Assuming row 1 is headers - TotalRowMarkers = new HashSet { "Total", "Grand Total" }, - TotalRowScanAllCells = true, - TotalRowMatchExact = false, - Mappings = new List - { - // Basic identification - new() { TargetProperty = "Id", Kind = MappingKind.Direct, SourceColumns = new List { "A" } }, - new() { TargetProperty = "LocationName", Kind = MappingKind.Direct, SourceColumns = new List { "B" } }, - new() { TargetProperty = "PricingId", Kind = MappingKind.Constant, ConstantValue = "Microsoft" }, + // Ensure test directory exists + Directory.CreateDirectory(_testFilesPath); + + return base.ConfigureMesh(builder) + .ConfigureServices(services => services + .AddSingleton() + ) + .ConfigureHub(c => c + .AddContentCollections() + .AddFileSystemContentCollection($"Submissions-{MicrosoftPricingId}", _ => _testFilesPath) + .AddImport() + .AddData(data => data.AddSource(source => source.WithType())) + ); + } + private static readonly ExcelImportConfiguration Config = new() + { + Name = "Microsoft.xlsx", + EntityId = MicrosoftPricingId, + TypeName = nameof(PropertyRisk), // Auto-generate entity builder for PropertyRisk + //WorksheetName = "Locations", // Adjust based on actual worksheet name + DataStartRow = 7, // Assuming row 1 is headers + TotalRowMarkers = ["Total", "Grand Total"], + TotalRowScanAllCells = true, + TotalRowMatchExact = false, + Mappings = + [ + new () { TargetProperty = "Id", Kind = MappingKind.Direct, SourceColumns = new List { "C" } }, + new() + { + TargetProperty = "LocationName", + Kind = MappingKind.Direct, + SourceColumns = ["D"] + }, + new() { TargetProperty = "PricingId", Kind = MappingKind.Constant, ConstantValue = MicrosoftPricingId }, // Address fields - new() { TargetProperty = "Address", Kind = MappingKind.Direct, SourceColumns = new List { "C" } }, - new() { TargetProperty = "City", Kind = MappingKind.Direct, SourceColumns = new List { "D" } }, - new() { TargetProperty = "State", Kind = MappingKind.Direct, SourceColumns = new List { "E" } }, - new() { TargetProperty = "Country", Kind = MappingKind.Direct, SourceColumns = new List { "F" } }, - new() { TargetProperty = "ZipCode", Kind = MappingKind.Direct, SourceColumns = new List { "G" } }, - - // Values - adjust column letters based on actual Excel file - new() { TargetProperty = "Currency", Kind = MappingKind.Direct, SourceColumns = new List { "H" } }, - new() { TargetProperty = "TsiBuilding", Kind = MappingKind.Direct, SourceColumns = new List { "I" } }, - new() { TargetProperty = "TsiContent", Kind = MappingKind.Direct, SourceColumns = new List { "J" } }, - new() { TargetProperty = "TsiBi", Kind = MappingKind.Direct, SourceColumns = new List { "K" } }, - }, - IgnoreRowExpressions = new List - { - "Id == null", // Skip rows without an ID - "Address == null" // Skip rows without an address - } - }; - - var filePath = "../../Files/Microsoft/2026/Microsoft.xlsx"; - var fullPath = Path.Combine(Directory.GetCurrentDirectory(), filePath); + new() + { + TargetProperty = "Address", + Kind = MappingKind.Direct, + SourceColumns = ["E"] + }, + new() { TargetProperty = "Country", Kind = MappingKind.Direct, SourceColumns = new List { "B" } }, + new() + { + TargetProperty = "TsiBuilding", + Kind = MappingKind.Direct, + SourceColumns = ["H"] + }, + new() + { + TargetProperty = "TsiContent", + Kind = MappingKind.Direct, + SourceColumns = ["G", "I", "J", "K", "L", "M", "N", "O", "P"] + }, + ], + Allocations = [new() { TargetProperty = "TsiBi", WeightColumns = ["Q"] }], + IgnoreRowExpressions = + [ + "Id == null", // Skip rows without an ID + "Address == null" + ] + }; + [Fact] + public async Task Import_Microsoft_File_WithConfiguration() + { // Skip test if file doesn't exist + var fullPath = Path.Combine(_testFilesPath, "Microsoft.xlsx"); if (!File.Exists(fullPath)) + throw new FileNotFoundException(fullPath); + + // Arrange - Create import configuration with TypeName + + // Act - Import using ImportRequest with Configuration + var importRequest = new ImportRequest(new CollectionSource($"Submissions-{MicrosoftPricingId}", "Microsoft.xlsx")) { - Output.WriteLine($"Test file not found: {fullPath}"); - return; - } + Configuration = Config + }; - // Act - Import using the generic importer - var importer = new ConfiguredExcelImporter(BuildPropertyRisk); - var risks = importer.Import(fullPath, config).ToList(); + var importResponse = await Mesh.AwaitResponse( + importRequest, + o => o.WithTarget(Mesh.Address), + TestContext.Current.CancellationToken + ); // Assert + importResponse.Should().NotBeNull(); + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded, + $"import should succeed. Errors: {string.Join(", ", importResponse.Message.Log.Errors().Select(e => e.Message))}"); + + // Verify data was imported by querying the workspace + var workspace = Mesh.ServiceProvider.GetRequiredService(); + var risks = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count > 0); + risks.Should().NotBeEmpty("import should return at least one risk"); - risks.All(r => r.PricingId == "Microsoft").Should().BeTrue("all risks should have PricingId set to Microsoft"); + risks.All(r => r.PricingId == MicrosoftPricingId).Should().BeTrue("all risks should have PricingId set to Microsoft-2026"); risks.All(r => !string.IsNullOrWhiteSpace(r.Id)).Should().BeTrue("all risks should have an Id"); // Verify source tracking @@ -73,52 +127,42 @@ public void Import_Microsoft_File() // Output summary Output.WriteLine($"Successfully imported {risks.Count} property risks"); - Output.WriteLine($"Sample risk: Id={risks.First().Id}, Location={risks.First().LocationName}, Country={risks.First().Country}"); + if (risks.Any()) + { + var first = risks.First(); + Output.WriteLine($"Sample risk: Id={first.Id}, Location={first.LocationName}, Country={first.Country}"); + } } [Fact] - public void Import_Microsoft_WithAllocation() + public async Task Import_Microsoft_WithAllocation() { - // This test demonstrates allocation mapping - distributing a total value proportionally - var config = new ExcelImportConfiguration - { - Name = "Microsoft.xlsx", - EntityId = "Microsoft", - WorksheetName = "Locations", - DataStartRow = 2, - Mappings = new List - { - new() { TargetProperty = "Id", Kind = MappingKind.Direct, SourceColumns = new List { "A" } }, - new() { TargetProperty = "LocationName", Kind = MappingKind.Direct, SourceColumns = new List { "B" } }, - new() { TargetProperty = "PricingId", Kind = MappingKind.Constant, ConstantValue = "Microsoft" }, - new() { TargetProperty = "TsiBuilding", Kind = MappingKind.Direct, SourceColumns = new List { "I" } }, - new() { TargetProperty = "TsiContent", Kind = MappingKind.Direct, SourceColumns = new List { "J" } }, - }, - Allocations = new List - { - // Example: Allocate total BI from cell C3 proportionally based on TsiBuilding + TsiContent weights - new() - { - TargetProperty = "TsiBi", - TotalCell = "C3", // Adjust to actual total cell in Excel - WeightColumns = new List { "I", "J" }, // Weight by TsiBuilding + TsiContent - CurrencyProperty = "Currency" - } - } - }; - - var filePath = "../../Files/Microsoft/2026/Microsoft.xlsx"; - var fullPath = Path.Combine(Directory.GetCurrentDirectory(), filePath); - + // Skip test if file doesn't exist + var fullPath = Path.Combine(_testFilesPath, "Microsoft.xlsx"); if (!File.Exists(fullPath)) + throw new FileNotFoundException(fullPath); + + // Act - Import using ImportRequest with Configuration + var importRequest = new ImportRequest(new CollectionSource($"Submissions-{MicrosoftPricingId}", "Microsoft.xlsx")) { - Output.WriteLine($"Test file not found: {fullPath}"); - return; - } + Configuration = Config + }; - var importer = new ConfiguredExcelImporter(BuildPropertyRisk); - var risks = importer.Import(fullPath, config).ToList(); + var importResponse = await Mesh.AwaitResponse( + importRequest, + o => o.WithTarget(Mesh.Address), + TestContext.Current.CancellationToken + ); + // Assert + importResponse.Should().NotBeNull(); + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded); + + // Verify data was imported + var workspace = Mesh.ServiceProvider.GetRequiredService(); + var risks = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count > 0); risks.Should().NotBeEmpty(); // Verify allocation worked - sum of allocated TsiBi should equal proportional distribution @@ -144,143 +188,37 @@ public void Import_Microsoft_WithAllocation() } [Fact] - public void Import_Microsoft_UsingSumMapping() + public async Task Import_Microsoft_UsingSumMapping() { - // Demonstrate Sum mapping - combining multiple columns - var config = new ExcelImportConfiguration - { - Name = "Microsoft.xlsx", - EntityId = "Microsoft", - WorksheetName = "Locations", - DataStartRow = 2, - Mappings = new List - { - new() { TargetProperty = "Id", Kind = MappingKind.Direct, SourceColumns = new List { "A" } }, - new() { TargetProperty = "PricingId", Kind = MappingKind.Constant, ConstantValue = "Microsoft" }, - - // Sum example: Total TSI = Building + Content + BI - new() { TargetProperty = "TsiBuilding", Kind = MappingKind.Direct, SourceColumns = new List { "I" } }, - new() { TargetProperty = "TsiContent", Kind = MappingKind.Direct, SourceColumns = new List { "J" } }, - new() { TargetProperty = "TsiBi", Kind = MappingKind.Direct, SourceColumns = new List { "K" } }, - } - }; - - var filePath = "../../Files/Microsoft/2026/Microsoft.xlsx"; - var fullPath = Path.Combine(Directory.GetCurrentDirectory(), filePath); - + // Skip test if file doesn't exist + var fullPath = Path.Combine(_testFilesPath, "Microsoft.xlsx"); if (!File.Exists(fullPath)) - { - Output.WriteLine($"Test file not found: {fullPath}"); - return; - } - - var importer = new ConfiguredExcelImporter(BuildPropertyRisk); - var risks = importer.Import(fullPath, config).ToList(); + throw new FileNotFoundException(fullPath); - risks.Should().NotBeEmpty(); - Output.WriteLine($"Imported {risks.Count} risks using sum mapping"); - } + // Demonstrate Sum mapping - combining multiple columns - /// - /// Builder function to construct PropertyRisk from dictionary of properties. - /// This handles type conversion and provides defaults. - /// - private static PropertyRisk BuildPropertyRisk(Dictionary values) - { - return new PropertyRisk + // Act - Import using ImportRequest with Configuration + var importRequest = new ImportRequest(new CollectionSource($"Submissions-{MicrosoftPricingId}", "Microsoft.xlsx")) { - Id = Get(values, nameof(PropertyRisk.Id)) ?? Guid.NewGuid().ToString(), - PricingId = Get(values, nameof(PropertyRisk.PricingId)), - SourceRow = Get(values, nameof(PropertyRisk.SourceRow)), - SourceFile = Get(values, nameof(PropertyRisk.SourceFile)), - LocationName = Get(values, nameof(PropertyRisk.LocationName)), - Country = Get(values, nameof(PropertyRisk.Country)), - State = Get(values, nameof(PropertyRisk.State)), - County = Get(values, nameof(PropertyRisk.County)), - ZipCode = Get(values, nameof(PropertyRisk.ZipCode)), - City = Get(values, nameof(PropertyRisk.City)), - Address = Get(values, nameof(PropertyRisk.Address)), - Currency = Get(values, nameof(PropertyRisk.Currency)), - TsiBuilding = Get(values, nameof(PropertyRisk.TsiBuilding)), - TsiBuildingCurrency = Get(values, nameof(PropertyRisk.TsiBuildingCurrency)) ?? Get(values, nameof(PropertyRisk.Currency)), - TsiContent = Get(values, nameof(PropertyRisk.TsiContent)), - TsiContentCurrency = Get(values, nameof(PropertyRisk.TsiContentCurrency)) ?? Get(values, nameof(PropertyRisk.Currency)), - TsiBi = Get(values, nameof(PropertyRisk.TsiBi)), - TsiBiCurrency = Get(values, nameof(PropertyRisk.TsiBiCurrency)) ?? Get(values, nameof(PropertyRisk.Currency)), - AccountNumber = Get(values, nameof(PropertyRisk.AccountNumber)), - OccupancyScheme = Get(values, nameof(PropertyRisk.OccupancyScheme)), - OccupancyCode = Get(values, nameof(PropertyRisk.OccupancyCode)), - ConstructionScheme = Get(values, nameof(PropertyRisk.ConstructionScheme)), - ConstructionCode = Get(values, nameof(PropertyRisk.ConstructionCode)), - BuildYear = Get(values, nameof(PropertyRisk.BuildYear)), - UpgradeYear = Get(values, nameof(PropertyRisk.UpgradeYear)), - NumberOfStories = Get(values, nameof(PropertyRisk.NumberOfStories)), - Sprinklers = Get(values, nameof(PropertyRisk.Sprinklers)), - GeocodedLocation = null + Configuration = Config }; - } - - /// - /// Generic value getter with type conversion support. - /// - private static T? Get(IDictionary dict, string key) - { - if (dict.TryGetValue(key, out var val) && val is not null) - { - if (val is T t) return t; - - var targetType = typeof(T); - var underlying = Nullable.GetUnderlyingType(targetType) ?? targetType; - // Empty strings should be treated as null/default - if (val is string s && string.IsNullOrWhiteSpace(s)) return default; + var importResponse = await Mesh.AwaitResponse( + importRequest, + o => o.WithTarget(Mesh.Address), + TestContext.Current.CancellationToken + ); - try - { - // Handle common type conversions - if (underlying == typeof(string)) - return (T)(object)val.ToString()!; - - if (underlying == typeof(int)) - { - if (val is int i) return (T)(object)i; - if (val is decimal dm) return (T)(object)(int)dm; - if (val is double d) return (T)(object)(int)d; - if (int.TryParse(val.ToString(), out var parsed)) return (T)(object)parsed; - } - - if (underlying == typeof(double)) - { - if (val is double d) return (T)(object)d; - if (val is decimal dm) return (T)(object)(double)dm; - if (val is int i) return (T)(object)(double)i; - if (double.TryParse(val.ToString(), out var parsed)) return (T)(object)parsed; - } - - if (underlying == typeof(decimal)) - { - if (val is decimal dm) return (T)(object)dm; - if (val is double d) return (T)(object)(decimal)d; - if (decimal.TryParse(val.ToString(), out var parsed)) return (T)(object)parsed; - } - - if (underlying == typeof(bool)) - { - if (val is bool b) return (T)(object)b; - var str = val.ToString()?.Trim().ToLowerInvariant(); - if (str == "true" || str == "yes" || str == "1") return (T)(object)true; - if (str == "false" || str == "no" || str == "0") return (T)(object)false; - } - - // Fallback to Convert.ChangeType - if (val is IConvertible) - return (T)Convert.ChangeType(val, underlying); - } - catch - { - // Swallow conversion errors and return default - } - } - return default; + // Assert + importResponse.Should().NotBeNull(); + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded); + + // Verify data was imported + var workspace = Mesh.ServiceProvider.GetRequiredService(); + var risks = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count > 0); + risks.Should().NotBeEmpty(); + Output.WriteLine($"Imported {risks.Count} risks using direct mapping"); } } diff --git a/modules/Insurance/MeshWeaver.Insurance.Test/PricingCatalogTests.cs b/modules/Insurance/MeshWeaver.Insurance.Test/PricingCatalogTests.cs new file mode 100644 index 000000000..cfb8c39fa --- /dev/null +++ b/modules/Insurance/MeshWeaver.Insurance.Test/PricingCatalogTests.cs @@ -0,0 +1,147 @@ +using System.Reactive.Linq; +using System.Text.Json; +using FluentAssertions; +using FluentAssertions.Extensions; +using MeshWeaver.Data; +using MeshWeaver.Insurance.Domain; +using MeshWeaver.Insurance.Domain.Services; +using MeshWeaver.Layout; +using MeshWeaver.Mesh; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace MeshWeaver.Insurance.Test; + +public class PricingCatalogTests(ITestOutputHelper output) : InsuranceTestBase(output) +{ + protected override MeshBuilder ConfigureMesh(MeshBuilder builder) + { + return base.ConfigureMesh(builder) + .ConfigureServices(services => services + .AddSingleton() + ); + } + + [Fact] + public async Task GetPricingCatalog_ShouldReturnPricings() + { + // Act - Get the pricing catalog from the Insurance hub + var pricings = await GetPricingsAsync(); + + // Assert - Verify that the catalog contains pricings + pricings.Should().NotBeNull("catalog should not be null"); + pricings.Should().NotBeEmpty("catalog should contain sample pricings"); + + // Verify that pricings have required fields + pricings.All(p => !string.IsNullOrWhiteSpace(p.Id)).Should().BeTrue("all pricings should have an Id"); + pricings.All(p => !string.IsNullOrWhiteSpace(p.InsuredName)).Should().BeTrue("all pricings should have an InsuredName"); + pricings.All(p => !string.IsNullOrWhiteSpace(p.Status)).Should().BeTrue("all pricings should have a Status"); + + // Output summary + Output.WriteLine($"Successfully retrieved {pricings.Count} pricings from catalog"); + foreach (var pricing in pricings) + { + Output.WriteLine($" - {pricing.Id}: {pricing.InsuredName} ({pricing.Status}) - {pricing.LineOfBusiness}/{pricing.Country}"); + } + } + + [Fact] + public async Task GetPricingCatalog_ShouldHaveValidDimensions() + { + // Act + var pricings = await GetPricingsAsync(); + + // Assert - Verify dimension fields are populated + pricings.Should().NotBeEmpty(); + + pricings.All(p => !string.IsNullOrWhiteSpace(p.LineOfBusiness)).Should().BeTrue("all pricings should have a LineOfBusiness"); + pricings.All(p => !string.IsNullOrWhiteSpace(p.Country)).Should().BeTrue("all pricings should have a Country"); + pricings.All(p => !string.IsNullOrWhiteSpace(p.LegalEntity)).Should().BeTrue("all pricings should have a LegalEntity"); + pricings.All(p => !string.IsNullOrWhiteSpace(p.Currency)).Should().BeTrue("all pricings should have a Currency"); + + // Output dimension information + Output.WriteLine("Pricing dimensions:"); + Output.WriteLine($" Lines of Business: {string.Join(", ", pricings.Select(p => p.LineOfBusiness).Distinct())}"); + Output.WriteLine($" Countries: {string.Join(", ", pricings.Select(p => p.Country).Distinct())}"); + Output.WriteLine($" Legal Entities: {string.Join(", ", pricings.Select(p => p.LegalEntity).Distinct())}"); + Output.WriteLine($" Currencies: {string.Join(", ", pricings.Select(p => p.Currency).Distinct())}"); + } + + [Fact] + public async Task GetPricingCatalog_ShouldHaveValidDates() + { + // Act + var pricings = await GetPricingsAsync(); + + // Assert + pricings.Should().NotBeEmpty(); + + foreach (var pricing in pricings) + { + pricing.InceptionDate.Should().NotBeNull( + $"pricing {pricing.Id} should have an inception date"); + pricing.ExpirationDate.Should().NotBeNull( + $"pricing {pricing.Id} should have an expiration date"); + + if (pricing.InceptionDate.HasValue && pricing.ExpirationDate.HasValue) + { + pricing.ExpirationDate.Value.Should().BeAfter(pricing.InceptionDate.Value, + $"pricing {pricing.Id} expiration date should be after inception date"); + } + + pricing.UnderwritingYear.Should().NotBeNull( + $"pricing {pricing.Id} should have an underwriting year"); + pricing.UnderwritingYear.Should().BeGreaterThan(2000, + $"pricing {pricing.Id} should have a valid underwriting year"); + } + + Output.WriteLine($"All {pricings.Count} pricings have valid dates"); + } + + [Fact] + public async Task PricingHub_ShouldStartSuccessfully() + { + // This test verifies that the pricing hub initializes correctly + // by successfully retrieving the catalog without errors + + // Act + var pricings = await GetPricingsAsync(); + + // Assert - Hub started if we can get data + pricings.Should().NotBeNull("hub should start and return catalog"); + + // Verify the hub is accessible + Mesh.Should().NotBeNull("mesh should be initialized"); + Mesh.Address.Should().NotBeNull("mesh should have an address"); + + Output.WriteLine($"Pricing hub started successfully"); + Output.WriteLine($"Hub Address: {Mesh.Address}"); + Output.WriteLine($"Retrieved {pricings.Count} pricings from catalog"); + } + + [Fact] + public async Task GetPricingCatalog_UsingLayoutAreaReference_ShouldReturnPricingsControl() + { + // Arrange + var reference = new LayoutAreaReference("Pricings"); + var workspace = Mesh.ServiceProvider.GetRequiredService(); + + // Act - Get the remote stream using LayoutAreaReference + var stream = workspace.GetRemoteStream( + InsuranceApplicationAttribute.Address, + reference + ); + + // Get the control from the stream + var control = await stream.GetControlStream(reference.Area) + .Timeout(10.Seconds()) + .FirstAsync(x => x != null); + + // Assert + control.Should().NotBeNull("layout area should return a control"); + + // Output control information + Output.WriteLine($"Received control type: {control.GetType().Name}"); + Output.WriteLine($"Successfully retrieved Pricings layout area using GetRemoteStream"); + } +} diff --git a/modules/Northwind/MeshWeaver.Northwind.AI/NorthwindAgent.cs b/modules/Northwind/MeshWeaver.Northwind.AI/NorthwindAgent.cs index 68da2f9a8..9889b1b76 100644 --- a/modules/Northwind/MeshWeaver.Northwind.AI/NorthwindAgent.cs +++ b/modules/Northwind/MeshWeaver.Northwind.AI/NorthwindAgent.cs @@ -26,20 +26,20 @@ public class NorthwindAgent(IMessageHub hub) : IInitializableAgent, IAgentWithPl public string Instructions => """ You are the NorthwindAgent, specialized in working with Northwind business data. You have access to: - + - Customer data: information about companies, contacts, and addresses - Order data: sales orders, order details, and order history - Product data: product catalog, categories, suppliers, and inventory - Employee data: staff information and territories - Geographic data: regions, territories, and shipping information - + You can help users: - Query and analyze business data - Generate reports and insights - Answer questions about customers, orders, products, and sales - Provide data-driven recommendations - Layout areas (reports, views, charts, dashboards) related to Northwind data - + Use the DataPlugin to access structured domain data and the LayoutAreaPlugin to display visual components. Always provide accurate, data-driven responses based on the available Northwind data. """; @@ -54,13 +54,13 @@ IEnumerable IAgentWithPlugins.GetPlugins(IAgentChat chat) } private static readonly Address NorthwindAddress = new ApplicationAddress("Northwind"); - + async Task IInitializableAgent.InitializeAsync() { var typeResponse = await hub.AwaitResponse(new GetDomainTypesRequest(), o => o.WithTarget(NorthwindAddress)); - typeDefinitionMap = typeResponse.Message.Types.ToDictionary(x => x.Name); + typeDefinitionMap = typeResponse?.Message?.Types?.ToDictionary(x => x.Name!); var layoutResponse = await hub.AwaitResponse(new GetLayoutAreasRequest(), o => o.WithTarget(NorthwindAddress)); - layoutDefinitionMap = layoutResponse.Message.Areas.ToDictionary(x => x.Area); + layoutDefinitionMap = layoutResponse?.Message?.Areas?.ToDictionary(x => x.Area); } /// diff --git a/portal/MeshWeaver.Portal.Shared.Web/SharedPortalConfiguration.cs b/portal/MeshWeaver.Portal.Shared.Web/SharedPortalConfiguration.cs index 00054952f..0cbf5fe29 100644 --- a/portal/MeshWeaver.Portal.Shared.Web/SharedPortalConfiguration.cs +++ b/portal/MeshWeaver.Portal.Shared.Web/SharedPortalConfiguration.cs @@ -43,6 +43,7 @@ public static void ConfigureWebPortalServices(this WebApplicationBuilder builder .AddEnvironmentVariables(); var services = builder.Services; + services.AddRazorPages() .AddMicrosoftIdentityUI(); diff --git a/portal/MeshWeaver.Portal/appsettings.Development.json b/portal/MeshWeaver.Portal/appsettings.Development.json index 0ea4c9e68..fda758d7c 100644 --- a/portal/MeshWeaver.Portal/appsettings.Development.json +++ b/portal/MeshWeaver.Portal/appsettings.Development.json @@ -10,9 +10,6 @@ "SourceType": "FileSystem", "BasePath": "../../modules/Insurance/Files" }, - "GoogleMaps": { - "ApiKey": "" - }, "EntraId": { "Instance": "https://login.microsoftonline.com/", "Domain": "meshweaverportal.onmicrosoft.com", // Your Entra ID tenant domain diff --git a/portal/MeshWeaver.Portal/appsettings.json b/portal/MeshWeaver.Portal/appsettings.json index 6a12f33d3..0bc0edbe3 100644 --- a/portal/MeshWeaver.Portal/appsettings.json +++ b/portal/MeshWeaver.Portal/appsettings.json @@ -4,7 +4,7 @@ "Default": "Warning", "Microsoft.AspNetCore": "Warning", "MeshWeaver": "Warning", - "MeshWeaver.Messaging.MessageService": "Warning" + "MeshWeaver.Messaging.MessageService": "Information" }, "Console": { "IncludeScopes": true, diff --git a/portal/aspire/MeshWeaver.Portal.ServiceDefaults/SerilogExtensions.cs b/portal/aspire/MeshWeaver.Portal.ServiceDefaults/SerilogExtensions.cs index b7f932ffe..fd350cc73 100644 --- a/portal/aspire/MeshWeaver.Portal.ServiceDefaults/SerilogExtensions.cs +++ b/portal/aspire/MeshWeaver.Portal.ServiceDefaults/SerilogExtensions.cs @@ -21,7 +21,7 @@ public static MeshHostApplicationBuilder AddEfCoreSerilog(this MeshHostApplicati builder.ConfigureHub(h => h.WithInitialization(hub => { - messageDeliveryPolicy.JsonOptions = hub.JsonSerializerOptions; + messageDeliveryPolicy.JsonOptions = hub.CreateLoggingSerializerOptions(); sink.Initialize(hub.ServiceProvider); })); @@ -55,7 +55,7 @@ public static MeshHostApplicationBuilder AddEfCoreMessageLog(this MeshHostApplic builder.ConfigureHub(h => h.WithInitialization(hub => { - messageDeliveryPolicy.JsonOptions = hub.JsonSerializerOptions; + messageDeliveryPolicy.JsonOptions = hub.CreateLoggingSerializerOptions(); sink.Initialize(hub.ServiceProvider); })); diff --git a/src/MeshWeaver.AI/MeshWeaver.AI.csproj b/src/MeshWeaver.AI/MeshWeaver.AI.csproj index 56ee53906..546a6ca40 100644 --- a/src/MeshWeaver.AI/MeshWeaver.AI.csproj +++ b/src/MeshWeaver.AI/MeshWeaver.AI.csproj @@ -6,10 +6,13 @@ + + + diff --git a/src/MeshWeaver.AI/Plugins/CollectionPlugin.cs b/src/MeshWeaver.AI/Plugins/CollectionPlugin.cs deleted file mode 100644 index 614b343bf..000000000 --- a/src/MeshWeaver.AI/Plugins/CollectionPlugin.cs +++ /dev/null @@ -1,184 +0,0 @@ -using System.ComponentModel; -using MeshWeaver.ContentCollections; -using MeshWeaver.Messaging; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.SemanticKernel; - -namespace MeshWeaver.AI.Plugins; - -/// -/// Generalized plugin for reading and writing files to configured collections -/// -public class CollectionPlugin(IMessageHub hub) -{ - private readonly IContentService contentService = hub.ServiceProvider.GetRequiredService(); - - [KernelFunction] - [Description("Gets the content of a file from a specified collection.")] - public async Task GetFile( - [Description("The name of the collection to read from")] string collectionName, - [Description("The path to the file within the collection")] string filePath, - CancellationToken cancellationToken = default) - { - try - { - var collection = await contentService.GetCollectionAsync(collectionName, cancellationToken); - if (collection == null) - return $"Collection '{collectionName}' not found."; - - await using var stream = await collection.GetContentAsync(filePath, cancellationToken); - if (stream == null) - return $"File '{filePath}' not found in collection '{collectionName}'."; - - using var reader = new StreamReader(stream); - var content = await reader.ReadToEndAsync(); - - return content; - } - catch (FileNotFoundException) - { - return $"File '{filePath}' not found in collection '{collectionName}'."; - } - catch (Exception ex) - { - return $"Error reading file '{filePath}' from collection '{collectionName}': {ex.Message}"; - } - } - [KernelFunction] - [Description("Saves content as a file to a specified collection.")] - public async Task SaveFile( - [Description("The name of the collection to save to")] string collectionName, - [Description("The path where the file should be saved within the collection")] string filePath, - [Description("The content to save to the file")] string content, - CancellationToken cancellationToken = default) - { - try - { - var collection = await contentService.GetCollectionAsync(collectionName, cancellationToken); - if (collection == null) - return $"Collection '{collectionName}' not found."; // Ensure directory structure exists if the collection has a base path - EnsureDirectoryExists(collection, filePath); - - // Extract directory and filename components - var directoryPath = Path.GetDirectoryName(filePath) ?? ""; - var fileName = Path.GetFileName(filePath); - - await using var stream = new MemoryStream(System.Text.Encoding.UTF8.GetBytes(content)); - await collection.SaveFileAsync(directoryPath, fileName, stream); - - return $"File '{filePath}' successfully saved to collection '{collectionName}'. Full path: {directoryPath}/{fileName}"; - } - catch (Exception ex) - { - return $"Error saving file '{filePath}' to collection '{collectionName}': {ex.Message}"; - } - } - - [KernelFunction] - [Description("Lists all files in a specified collection.")] - public async Task ListFiles( - [Description("The name of the collection to list files from")] string collectionName, - [Description("The path for which to load files.")] string path = "/", - CancellationToken cancellationToken = default) - { - try - { - var collection = await contentService.GetCollectionAsync(collectionName, cancellationToken); - if (collection == null) - return $"Collection '{collectionName}' not found."; - - var files = await collection.GetFilesAsync(path); - var fileList = files.Select(f => new { f.Name, f.Path }).ToList(); - - if (!fileList.Any()) - return $"No files found in collection '{collectionName}'."; - - return string.Join("\n", fileList.Select(f => $"- {f.Name} ({f.Path})")); - } - catch (Exception ex) - { - return $"Error listing files in collection '{collectionName}': {ex.Message}"; - } - } - - [KernelFunction] - [Description("Checks if a specific file exists in a collection.")] - public async Task FileExists( - [Description("The name of the collection to check")] string collectionName, - [Description("The path to the file within the collection")] string filePath, - CancellationToken cancellationToken = default) - { - try - { - var collection = await contentService.GetCollectionAsync(collectionName, cancellationToken); - if (collection == null) - return $"Collection '{collectionName}' not found."; - - await using var stream = await collection.GetContentAsync(filePath, cancellationToken); - if (stream == null) - return $"File '{filePath}' does not exist in collection '{collectionName}'."; - - return $"File '{filePath}' exists in collection '{collectionName}'."; - } - catch (FileNotFoundException) - { - return $"File '{filePath}' does not exist in collection '{collectionName}'."; - } - catch (Exception ex) - { - return $"Error checking file '{filePath}' in collection '{collectionName}': {ex.Message}"; - } - } - - [KernelFunction] - [Description("Generates a unique filename with timestamp for saving temporary files.")] - public string GenerateUniqueFileName( - [Description("The base name for the file (without extension)")] string baseName, - [Description("The file extension (e.g., 'json', 'txt')")] string extension) - { - var timestamp = DateTime.UtcNow.ToString("yyyyMMdd_HHmmss_fff"); - return $"{baseName}_{timestamp}.{extension.TrimStart('.')}"; - } - - /// - /// Ensures that the directory structure exists for the given file path within the collection. - /// - /// The collection to check - /// The file path that may contain directories - private void EnsureDirectoryExists(object collection, string filePath) - { - try - { - // Normalize path separators and get the directory path from the file path - var normalizedPath = filePath.Replace('/', Path.DirectorySeparatorChar); - var directoryPath = Path.GetDirectoryName(normalizedPath); - - if (string.IsNullOrEmpty(directoryPath) || directoryPath == "." || directoryPath == Path.DirectorySeparatorChar.ToString()) - { - // No directory structure needed, file is in root - return; - } - - // Try to get the collection's base path using reflection if available - var collectionType = collection.GetType(); - var basePathProperty = collectionType.GetProperty("BasePath") ?? - collectionType.GetProperty("Path") ?? - collectionType.GetProperty("RootPath"); - - if (basePathProperty != null) - { - var basePath = basePathProperty.GetValue(collection) as string; - if (!string.IsNullOrEmpty(basePath)) - { - var fullDirectoryPath = Path.Combine(basePath, directoryPath); - Directory.CreateDirectory(fullDirectoryPath); - } - } - } - catch (Exception) - { - // If we can't create directories through reflection, - // let the SaveFileAsync method handle any directory creation or fail gracefully - } - } -} diff --git a/src/MeshWeaver.AI/Plugins/ContentCollectionPlugin.cs b/src/MeshWeaver.AI/Plugins/ContentPlugin.cs similarity index 54% rename from src/MeshWeaver.AI/Plugins/ContentCollectionPlugin.cs rename to src/MeshWeaver.AI/Plugins/ContentPlugin.cs index e47fb7f4f..c077df234 100644 --- a/src/MeshWeaver.AI/Plugins/ContentCollectionPlugin.cs +++ b/src/MeshWeaver.AI/Plugins/ContentPlugin.cs @@ -2,16 +2,22 @@ using System.Reactive.Linq; using System.Reflection; using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using ClosedXML.Excel; +using DocumentFormat.OpenXml.Packaging; +using DocumentFormat.OpenXml.Wordprocessing; using MeshWeaver.ContentCollections; using MeshWeaver.Messaging; using Microsoft.Extensions.DependencyInjection; using Microsoft.SemanticKernel; +using UglyToad.PdfPig; namespace MeshWeaver.AI.Plugins; /// -/// Plugin for managing documents and files in content collections. -/// Provides functions for listing, loading, saving, and deleting documents. +/// Generalized plugin for reading and writing files to configured collections. +/// Supports context resolution via LayoutAreaReference and dynamic collection configuration. /// /// Context Resolution: /// - When LayoutAreaReference.Area is "Content" or "Collection" @@ -31,14 +37,27 @@ namespace MeshWeaver.AI.Plugins; /// - Parsed collection: "Documents" /// - Parsed path: "/" (root) /// -public class ContentCollectionPlugin +public class ContentPlugin { + private readonly IMessageHub hub; private readonly IContentService contentService; - private readonly ContentCollectionPluginConfig config; - private readonly IAgentChat chat; + private readonly ContentPluginConfig config; + private readonly IAgentChat? chat; - public ContentCollectionPlugin(IMessageHub hub, ContentCollectionPluginConfig config, IAgentChat chat) + /// + /// Creates a ContentPlugin with basic functionality (no context resolution). + /// + public ContentPlugin(IMessageHub hub) + : this(hub, new ContentPluginConfig { Collections = [] }, null!) + { + } + + /// + /// Creates a ContentPlugin with context resolution and dynamic collection configuration. + /// + public ContentPlugin(IMessageHub hub, ContentPluginConfig config, IAgentChat chat) { + this.hub = hub; this.config = config; this.chat = chat; contentService = hub.ServiceProvider.GetRequiredService(); @@ -62,10 +81,13 @@ public ContentCollectionPlugin(IMessageHub hub, ContentCollectionPluginConfig co if (!string.IsNullOrEmpty(collectionName)) return collectionName; + if (chat == null) + return null; + // Only parse from LayoutAreaReference.Id when area is "Content" or "Collection" if (chat.Context?.LayoutArea != null) { - var area = chat.Context.LayoutArea.Area?.ToString(); + var area = chat.Context.LayoutArea.Area; if (area == "Content" || area == "Collection") { var id = chat.Context.LayoutArea.Id?.ToString(); @@ -84,12 +106,9 @@ public ContentCollectionPlugin(IMessageHub hub, ContentCollectionPluginConfig co if (config.ContextToConfigMap != null && chat.Context != null) { var contextConfig = config.ContextToConfigMap(chat.Context); - if (contextConfig != null) - { - // Add the dynamically created config to IContentService - contentService.AddConfiguration(contextConfig); - return contextConfig.Name; - } + // Add the dynamically created config to IContentService + contentService.AddConfiguration(contextConfig); + return contextConfig.Name; } // Fall back to the first collection from config as default @@ -105,10 +124,10 @@ public ContentCollectionPlugin(IMessageHub hub, ContentCollectionPluginConfig co /// private string? GetPathFromContext() { - if (chat.Context?.LayoutArea == null) + if (chat?.Context?.LayoutArea == null) return null; - var area = chat.Context.LayoutArea.Area?.ToString(); + var area = chat.Context.LayoutArea.Area; if (area != "Content" && area != "Collection") return null; @@ -127,31 +146,295 @@ public ContentCollectionPlugin(IMessageHub hub, ContentCollectionPluginConfig co } [KernelFunction] - [Description("Lists all available collections with their configurations.")] - public Task GetCollections() + [Description("Gets the content of a file from a specified collection. Supports Excel, Word, PDF, and text files. If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] + public async Task GetContent( + [Description("The path to the file within the collection. If omitted: when Area='Content'/'Collection', extracts from Id (after first '/'); else null.")] + string? filePath = null, + [Description("The name of the collection to read from. If omitted: when Area='Content'/'Collection', extracts from Id (before '/'); else uses ContextToConfigMap/config.")] + string? collectionName = null, + [Description("Optional: number of rows to read. If null, reads entire file. For Excel files, reads first N rows from each worksheet.")] + int? numberOfRows = null, + CancellationToken cancellationToken = default) { + var resolvedCollectionName = GetCollectionName(collectionName); + if (string.IsNullOrEmpty(resolvedCollectionName)) + return "No collection specified and no default collection configured."; + + var resolvedFilePath = filePath ?? GetPathFromContext(); + if (string.IsNullOrEmpty(resolvedFilePath)) + return "No file path specified and no path found in context."; + try { - if (config.Collections.Count == 0) - return Task.FromResult("No collections configured."); + var collection = await contentService.GetCollectionAsync(resolvedCollectionName, cancellationToken); + if (collection == null) + return $"Collection '{resolvedCollectionName}' not found."; - var collectionList = config.Collections.Select(c => new + await using var stream = await collection.GetContentAsync(resolvedFilePath, cancellationToken); + if (stream == null) + return $"File '{resolvedFilePath}' not found in collection '{resolvedCollectionName}'."; + + // Check file type and read accordingly + var extension = Path.GetExtension(resolvedFilePath).ToLowerInvariant(); + if (extension == ".xlsx" || extension == ".xls") { - c.Name, - DisplayName = c.DisplayName ?? c.Name, - Address = c.Address?.ToString() ?? "No address", - c.SourceType, - BasePath = c.BasePath ?? "Not specified" - }).ToList(); + return ReadExcelFile(stream, resolvedFilePath, numberOfRows); + } + else if (extension == ".docx") + { + return ReadWordFile(stream, resolvedFilePath, numberOfRows); + } + else if (extension == ".pdf") + { + return ReadPdfFile(stream, resolvedFilePath, numberOfRows); + } - var result = string.Join("\n", collectionList.Select(c => - $"- {c.DisplayName} (Name: {c.Name}, Type: {c.SourceType}, Path: {c.BasePath}, Address: {c.Address})")); + // For other files, read as text + using var reader = new StreamReader(stream); + if (numberOfRows.HasValue) + { + var sb = new StringBuilder(); + var linesRead = 0; + while (!reader.EndOfStream && linesRead < numberOfRows.Value) + { + var line = await reader.ReadLineAsync(cancellationToken); + sb.AppendLine(line); + linesRead++; + } + return sb.ToString(); + } + else + { + var content = await reader.ReadToEndAsync(cancellationToken); + return content; + } + } + catch (FileNotFoundException) + { + return $"File '{resolvedFilePath}' not found in collection '{resolvedCollectionName}'."; + } + catch (Exception ex) + { + return $"Error reading file '{resolvedFilePath}' from collection '{resolvedCollectionName}': {ex.Message}"; + } + } - return Task.FromResult(result); + private string ReadExcelFile(Stream stream, string filePath, int? numberOfRows) + { + try + { + using var wb = new XLWorkbook(stream); + var sb = new StringBuilder(); + + foreach (var ws in wb.Worksheets) + { + var used = ws.RangeUsed(); + sb.AppendLine($"## Sheet: {ws.Name}"); + sb.AppendLine(); + if (used is null) + { + sb.AppendLine("(No data)"); + sb.AppendLine(); + continue; + } + + var firstRow = used.FirstRow().RowNumber(); + var lastRow = numberOfRows.HasValue + ? Math.Min(used.FirstRow().RowNumber() + numberOfRows.Value - 1, used.LastRow().RowNumber()) + : used.LastRow().RowNumber(); + var firstCol = 1; + var lastCol = used.LastColumn().ColumnNumber(); + + // Build markdown table with column letters as headers + var columnHeaders = new List { "Row" }; + for (var c = firstCol; c <= lastCol; c++) + { + // Convert column number to Excel letter (1=A, 2=B, ..., 27=AA, etc.) + columnHeaders.Add(GetExcelColumnLetter(c)); + } + + // Header row + sb.AppendLine("| " + string.Join(" | ", columnHeaders) + " |"); + // Separator row + sb.AppendLine("|" + string.Join("", columnHeaders.Select(_ => "---:|"))); + + // Data rows + for (var r = firstRow; r <= lastRow; r++) + { + var rowVals = new List { r.ToString() }; + for (var c = firstCol; c <= lastCol; c++) + { + var cell = ws.Cell(r, c); + var raw = cell.GetValue(); + var val = raw?.Replace('\n', ' ').Replace('\r', ' ').Replace("|", "\\|").Trim(); + // Empty cells show as empty in table + rowVals.Add(string.IsNullOrEmpty(val) ? "" : val); + } + + sb.AppendLine("| " + string.Join(" | ", rowVals) + " |"); + } + + sb.AppendLine(); + } + + return sb.ToString(); } catch (Exception ex) { - return Task.FromResult($"Error retrieving collections: {ex.Message}"); + return $"Error reading Excel file '{filePath}': {ex.Message}"; + } + } + + private static string GetExcelColumnLetter(int columnNumber) + { + var columnLetter = ""; + while (columnNumber > 0) + { + var modulo = (columnNumber - 1) % 26; + columnLetter = Convert.ToChar('A' + modulo) + columnLetter; + columnNumber = (columnNumber - 1) / 26; + } + return columnLetter; + } + + private string ReadWordFile(Stream stream, string filePath, int? numberOfRows) + { + try + { + using var wordDoc = WordprocessingDocument.Open(stream, false); + var body = wordDoc.MainDocumentPart?.Document.Body; + + if (body == null) + return $"Word document '{filePath}' has no readable content."; + + var sb = new StringBuilder(); + sb.AppendLine($"# Document: {Path.GetFileName(filePath)}"); + sb.AppendLine(); + + var paragraphs = body.Elements().ToList(); + var paragraphsToRead = numberOfRows.HasValue + ? paragraphs.Take(numberOfRows.Value).ToList() + : paragraphs; + + foreach (var paragraph in paragraphsToRead) + { + var text = paragraph.InnerText; + if (!string.IsNullOrWhiteSpace(text)) + { + sb.AppendLine(text); + sb.AppendLine(); + } + } + + // Also handle tables + var tables = body.Elements().ToList(); + foreach (var table in tables) + { + sb.AppendLine("## Table"); + sb.AppendLine(); + + var rows = table.Elements().ToList(); + var rowsToRead = numberOfRows.HasValue + ? rows.Take(numberOfRows.Value).ToList() + : rows; + + foreach (var row in rowsToRead) + { + var cells = row.Elements().ToList(); + var cellTexts = cells.Select(c => c.InnerText.Replace('|', '\\').Trim()).ToList(); + sb.AppendLine("| " + string.Join(" | ", cellTexts) + " |"); + } + + sb.AppendLine(); + } + + return sb.ToString(); + } + catch (Exception ex) + { + return $"Error reading Word document '{filePath}': {ex.Message}"; + } + } + + private string ReadPdfFile(Stream stream, string filePath, int? numberOfRows) + { + try + { + using var pdfDocument = PdfDocument.Open(stream); + var sb = new StringBuilder(); + sb.AppendLine($"# PDF Document: {Path.GetFileName(filePath)}"); + sb.AppendLine($"Total pages: {pdfDocument.NumberOfPages}"); + sb.AppendLine(); + + var pagesToRead = numberOfRows.HasValue + ? Math.Min(numberOfRows.Value, pdfDocument.NumberOfPages) + : pdfDocument.NumberOfPages; + + for (int pageNum = 1; pageNum <= pagesToRead; pageNum++) + { + var page = pdfDocument.GetPage(pageNum); + sb.AppendLine($"## Page {pageNum}"); + sb.AppendLine(); + + var text = page.Text; + if (!string.IsNullOrWhiteSpace(text)) + { + sb.AppendLine(text); + } + else + { + sb.AppendLine("(No text content)"); + } + sb.AppendLine(); + } + + return sb.ToString(); + } + catch (Exception ex) + { + return $"Error reading PDF document '{filePath}': {ex.Message}"; + } + } + + [KernelFunction] + [Description("Saves content as a file to a specified collection. If collection not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] + public async Task SaveFile( + [Description("The path where the file should be saved within the collection")] string filePath, + [Description("The content to save to the file")] string content, + [Description("The name of the collection to save to. If omitted: when Area='Content'/'Collection', extracts from Id (before '/'); else uses ContextToConfigMap/config.")] string? collectionName = null, + CancellationToken cancellationToken = default) + { + var resolvedCollectionName = GetCollectionName(collectionName); + if (string.IsNullOrEmpty(resolvedCollectionName)) + return "No collection specified and no default collection configured."; + + if (string.IsNullOrEmpty(filePath)) + return "File path is required."; + + try + { + var collection = await contentService.GetCollectionAsync(resolvedCollectionName, cancellationToken); + if (collection == null) + return $"Collection '{resolvedCollectionName}' not found."; + + // Ensure directory structure exists if the collection has a base path + EnsureDirectoryExists(collection, filePath); + + // Extract directory and filename components + var directoryPath = Path.GetDirectoryName(filePath) ?? ""; + var fileName = Path.GetFileName(filePath); + + if (string.IsNullOrEmpty(fileName)) + return $"Invalid file path: '{filePath}'. Must include a filename."; + + await using var stream = new MemoryStream(Encoding.UTF8.GetBytes(content)); + await collection.SaveFileAsync(directoryPath, fileName, stream); + + return $"File '{filePath}' successfully saved to collection '{resolvedCollectionName}'."; + } + catch (Exception ex) + { + return $"Error saving file '{filePath}' to collection '{resolvedCollectionName}': {ex.Message}"; } } @@ -186,6 +469,35 @@ public async Task ListFiles( } } + [KernelFunction] + [Description("Lists all available collections with their configurations.")] + public Task GetCollections() + { + try + { + if (config.Collections.Count == 0) + return Task.FromResult("No collections configured."); + + var collectionList = config.Collections.Select(c => new + { + c.Name, + DisplayName = c.DisplayName ?? c.Name, + Address = c.Address?.ToString() ?? "No address", + c.SourceType, + BasePath = c.BasePath ?? "Not specified" + }).ToList(); + + var result = string.Join("\n", collectionList.Select(c => + $"- {c.DisplayName} (Name: {c.Name}, Type: {c.SourceType}, Path: {c.BasePath}, Address: {c.Address})")); + + return Task.FromResult(result); + } + catch (Exception ex) + { + return Task.FromResult($"Error retrieving collections: {ex.Message}"); + } + } + [KernelFunction] [Description("Lists all folders in a specified collection at a given path. If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] public async Task ListFolders( @@ -253,7 +565,7 @@ public async Task ListCollectionItems( } [KernelFunction] - [Description("Gets the content of a specific document from a collection. If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] + [Description("Gets the content of a specific document from a collection (simple text reading). If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] public async Task GetDocument( [Description("Document path in collection. If omitted: when Area='Content'/'Collection', extracts from Id (after first '/', e.g., 'Slip.md' from 'Submissions-Microsoft-2026/Slip.md'); else null.")] string? documentPath = null, [Description("Collection name. If omitted: when Area='Content'/'Collection', extracts from Id (before '/'); else uses ContextToConfigMap/config.")] string? collectionName = null, @@ -290,41 +602,6 @@ public async Task GetDocument( } } - [KernelFunction] - [Description("Saves content as a document to a specified collection. If collection not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] - public async Task SaveDocument( - [Description("The path where the document should be saved within the collection")] string documentPath, - [Description("The content to save to the document")] string content, - [Description("Collection name. If omitted: when Area='Content'/'Collection', extracts from Id (before '/'); else uses ContextToConfigMap/config.")] string? collectionName = null, - CancellationToken cancellationToken = default) - { - var resolvedCollectionName = GetCollectionName(collectionName); - if (string.IsNullOrEmpty(resolvedCollectionName)) - return "No collection specified and no default collection configured."; - - try - { - var collection = await contentService.GetCollectionAsync(resolvedCollectionName, cancellationToken); - if (collection == null) - return $"Collection '{resolvedCollectionName}' not found."; - - var directoryPath = Path.GetDirectoryName(documentPath) ?? ""; - var fileName = Path.GetFileName(documentPath); - - if (string.IsNullOrEmpty(fileName)) - return $"Invalid document path: '{documentPath}'. Must include a filename."; - - await using var stream = new MemoryStream(Encoding.UTF8.GetBytes(content)); - await collection.SaveFileAsync(directoryPath, fileName, stream); - - return $"Document '{documentPath}' successfully saved to collection '{resolvedCollectionName}'."; - } - catch (Exception ex) - { - return $"Error saving document '{documentPath}' to collection: {ex.Message}"; - } - } - [KernelFunction] [Description("Deletes a file from a specified collection. If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] public async Task DeleteFile( @@ -532,14 +809,204 @@ public async Task GetContentType( } } + [KernelFunction] + [Description("Checks if a specific file exists in a collection. If collection/path not provided: when Area='Content' or 'Collection', parses from LayoutAreaReference.Id ('{collection}/{path}'); otherwise uses ContextToConfigMap or plugin config.")] + public async Task FileExists( + [Description("The path to the file within the collection. If omitted: when Area='Content'/'Collection', extracts from Id (after first '/'); else null.")] string? filePath = null, + [Description("The name of the collection to check. If omitted: when Area='Content'/'Collection', extracts from Id (before '/'); else uses ContextToConfigMap/config.")] string? collectionName = null, + CancellationToken cancellationToken = default) + { + var resolvedCollectionName = GetCollectionName(collectionName); + if (string.IsNullOrEmpty(resolvedCollectionName)) + return "No collection specified and no default collection configured."; + + var resolvedFilePath = filePath ?? GetPathFromContext(); + if (string.IsNullOrEmpty(resolvedFilePath)) + return "No file path specified and no path found in context."; + + try + { + var collection = await contentService.GetCollectionAsync(resolvedCollectionName, cancellationToken); + if (collection == null) + return $"Collection '{resolvedCollectionName}' not found."; + + await using var stream = await collection.GetContentAsync(resolvedFilePath, cancellationToken); + if (stream == null) + return $"File '{resolvedFilePath}' does not exist in collection '{resolvedCollectionName}'."; + + return $"File '{resolvedFilePath}' exists in collection '{resolvedCollectionName}'."; + } + catch (FileNotFoundException) + { + return $"File '{resolvedFilePath}' does not exist in collection '{resolvedCollectionName}'."; + } + catch (Exception ex) + { + return $"Error checking file '{resolvedFilePath}' in collection '{resolvedCollectionName}': {ex.Message}"; + } + } + + [KernelFunction] + [Description("Generates a unique filename with timestamp for saving temporary files.")] + public string GenerateUniqueFileName( + [Description("The base name for the file (without extension)")] string baseName, + [Description("The file extension (e.g., 'json', 'txt')")] string extension) + { + var timestamp = DateTime.UtcNow.ToString("yyyyMMdd_HHmmss_fff"); + return $"{baseName}_{timestamp}.{extension.TrimStart('.')}"; + } + + [KernelFunction] + [Description("Imports data from a file in a collection to a specified address.")] + public async Task Import( + [Description("The path to the file to import")] string path, + [Description("The name of the collection containing the file (optional if default collection is configured)")] string? collection = null, + [Description("The target address for the import (optional if default address is configured), can be a string like 'AddressType/id' or an Address object")] object? address = null, + [Description("The import format to use (optional, defaults to 'Default')")] string? format = null, + [Description("Optional import configuration as JSON string. When provided, this will be used instead of the format parameter.")] string? configuration = null, + CancellationToken cancellationToken = default) + { + try + { + if (string.IsNullOrWhiteSpace(collection)) + return "Collection name is required."; + + if (address == null) + return "Target address is required."; + + // Parse the address - handle both string and Address types + Address targetAddress; + if (address is string addressString) + { + targetAddress = hub.GetAddress(addressString); + } + else if (address is Address addr) + { + targetAddress = addr; + } + else + { + return $"Invalid address type: {address.GetType().Name}. Expected string or Address."; + } + + // Build ImportRequest JSON structure + var importRequestJson = new JsonObject + { + ["$type"] = "MeshWeaver.Import.ImportRequest", + ["source"] = new JsonObject + { + ["$type"] = "MeshWeaver.Import.CollectionSource", + ["collection"] = collection, + ["path"] = path + }, + ["format"] = format ?? "Default" + }; + + // Add configuration if provided + if (!string.IsNullOrWhiteSpace(configuration)) + { + var configNode = JsonNode.Parse(configuration); + if (configNode != null) + { + importRequestJson["configuration"] = configNode; + } + } + + // Serialize and deserialize through hub's serializer to get proper type + var jsonString = importRequestJson.ToJsonString(); + var importRequestObj = JsonSerializer.Deserialize(jsonString, hub.JsonSerializerOptions)!; + + // Post the request to the hub + var responseMessage = await hub.AwaitResponse( + importRequestObj, + o => o.WithTarget(targetAddress), + cancellationToken + ); + + // Serialize the response back to JSON for processing + var responseJson = JsonSerializer.Serialize(responseMessage, hub.JsonSerializerOptions); + var responseObj = JsonNode.Parse(responseJson)!; + + var log = responseObj["log"] as JsonObject; + var status = log?["status"]?.ToString() ?? "Unknown"; + var messages = log?["messages"] as JsonArray ?? new JsonArray(); + + var result = $"Import {status.ToLower()}.\n"; + if (messages.Count > 0) + { + result += "Log messages:\n"; + foreach (var msg in messages) + { + if (msg is JsonObject msgObj) + { + var level = msgObj["logLevel"]?.ToString() ?? "Info"; + var message = msgObj["message"]?.ToString() ?? ""; + result += $" [{level}] {message}\n"; + } + } + } + + return result; + } + catch (Exception ex) + { + return $"Error importing file '{path}' from collection '{collection}' to address '{address}': {ex.Message}"; + } + } + + /// + /// Creates a KernelPlugin from this instance using reflection. + /// public KernelPlugin CreateKernelPlugin() { var plugin = KernelPluginFactory.CreateFromFunctions( - nameof(ContentCollectionPlugin), + nameof(ContentPlugin), GetType().GetMethods(BindingFlags.Instance | BindingFlags.Public) .Where(m => m.GetCustomAttribute() != null) .Select(m => KernelFunctionFactory.CreateFromMethod(m, this)) ); return plugin; } + + /// + /// Ensures that the directory structure exists for the given file path within the collection. + /// + /// The collection to check + /// The file path that may contain directories + private void EnsureDirectoryExists(object collection, string filePath) + { + try + { + // Normalize path separators and get the directory path from the file path + var normalizedPath = filePath.Replace('/', Path.DirectorySeparatorChar); + var directoryPath = Path.GetDirectoryName(normalizedPath); + + if (string.IsNullOrEmpty(directoryPath) || directoryPath == "." || directoryPath == Path.DirectorySeparatorChar.ToString()) + { + // No directory structure needed, file is in root + return; + } + + // Try to get the collection's base path using reflection if available + var collectionType = collection.GetType(); + var basePathProperty = collectionType.GetProperty("BasePath") ?? + collectionType.GetProperty("Path") ?? + collectionType.GetProperty("RootPath"); + + if (basePathProperty != null) + { + var basePath = basePathProperty.GetValue(collection) as string; + if (!string.IsNullOrEmpty(basePath)) + { + var fullDirectoryPath = Path.Combine(basePath, directoryPath); + Directory.CreateDirectory(fullDirectoryPath); + } + } + } + catch (Exception) + { + // If we can't create directories through reflection, + // let the SaveFileAsync method handle any directory creation or fail gracefully + } + } } diff --git a/src/MeshWeaver.AI/Plugins/ContentCollectionPluginConfig.cs b/src/MeshWeaver.AI/Plugins/ContentPluginConfig.cs similarity index 93% rename from src/MeshWeaver.AI/Plugins/ContentCollectionPluginConfig.cs rename to src/MeshWeaver.AI/Plugins/ContentPluginConfig.cs index d42fe7d2b..09416906b 100644 --- a/src/MeshWeaver.AI/Plugins/ContentCollectionPluginConfig.cs +++ b/src/MeshWeaver.AI/Plugins/ContentPluginConfig.cs @@ -6,7 +6,7 @@ namespace MeshWeaver.AI.Plugins; /// /// Configuration for the SubmissionPlugin. /// -public class ContentCollectionPluginConfig +public class ContentPluginConfig { /// /// Collection of content collection configurations. diff --git a/src/MeshWeaver.ContentCollections/CollectionLayoutArea.cs b/src/MeshWeaver.ContentCollections/CollectionLayoutArea.cs index 50184bf23..702565516 100644 --- a/src/MeshWeaver.ContentCollections/CollectionLayoutArea.cs +++ b/src/MeshWeaver.ContentCollections/CollectionLayoutArea.cs @@ -1,4 +1,5 @@ -using MeshWeaver.Layout; +using System.ComponentModel; +using MeshWeaver.Layout; using MeshWeaver.Layout.Composition; namespace MeshWeaver.ContentCollections; @@ -13,6 +14,7 @@ public static class CollectionLayoutArea /// Renders a file browser for the specified collection at the given path. /// The collection and path are parsed from the host reference ID in format: {collection}/{path} /// + [Browsable(false)] public static UiControl Collection(LayoutAreaHost host, RenderingContext _) { var split = host.Reference.Id?.ToString()?.Split("/", StringSplitOptions.RemoveEmptyEntries); diff --git a/src/MeshWeaver.ContentCollections/ContentService.cs b/src/MeshWeaver.ContentCollections/ContentService.cs index 6228fc7e9..c5a896a70 100644 --- a/src/MeshWeaver.ContentCollections/ContentService.cs +++ b/src/MeshWeaver.ContentCollections/ContentService.cs @@ -59,8 +59,8 @@ public ContentService(IMessageHub hub, AccessService accessService) if (factory is null) throw new ArgumentException($"Unknown source type {config.SourceType}"); - // Create provider using the factory - var provider = factory.Create(config); + // Create provider using the factory (now properly async) + var provider = await factory.CreateAsync(config, cancellationToken); // Create and initialize the collection var collection = new ContentCollection(config, provider, hub); @@ -104,18 +104,15 @@ public ContentService(IMessageHub hub, AccessService accessService) if (collections.TryGetValue(config.Name, out var existing)) return existing; - else + lock (initializeLock) { - lock (initializeLock) - { - if (collections.TryGetValue(config.Name, out existing)) - return existing; + if (collections.TryGetValue(config.Name, out existing)) + return existing; - // Create a new initialization task - initTask = InstantiateCollectionAsync(config, cancellationToken); - collections[config.Name] = initTask; - return initTask; - } + // Create a new initialization task + initTask = InstantiateCollectionAsync(config, cancellationToken); + collections[config.Name] = initTask; + return initTask; } } diff --git a/src/MeshWeaver.ContentCollections/EmbeddedResourceStreamProviderFactory.cs b/src/MeshWeaver.ContentCollections/EmbeddedResourceStreamProviderFactory.cs index 035aa88c2..516ee1e34 100644 --- a/src/MeshWeaver.ContentCollections/EmbeddedResourceStreamProviderFactory.cs +++ b/src/MeshWeaver.ContentCollections/EmbeddedResourceStreamProviderFactory.cs @@ -5,7 +5,7 @@ namespace MeshWeaver.ContentCollections; /// public class EmbeddedResourceStreamProviderFactory : IStreamProviderFactory { - public IStreamProvider Create(ContentCollectionConfig config) + public Task CreateAsync(ContentCollectionConfig config, CancellationToken cancellationToken = default) { var assemblyName = config.Settings?.GetValueOrDefault("AssemblyName") ?? throw new ArgumentException("AssemblyName required for EmbeddedResource"); @@ -16,6 +16,6 @@ public IStreamProvider Create(ContentCollectionConfig config) .FirstOrDefault(a => a.GetName().Name == assemblyName) ?? throw new InvalidOperationException($"Assembly not found: {assemblyName}"); - return new EmbeddedResourceStreamProvider(assembly, resourcePrefix); + return Task.FromResult(new EmbeddedResourceStreamProvider(assembly, resourcePrefix)); } } diff --git a/src/MeshWeaver.ContentCollections/FileSystemStreamProvider.cs b/src/MeshWeaver.ContentCollections/FileSystemStreamProvider.cs index 9da51a7f9..94df466be 100644 --- a/src/MeshWeaver.ContentCollections/FileSystemStreamProvider.cs +++ b/src/MeshWeaver.ContentCollections/FileSystemStreamProvider.cs @@ -16,7 +16,7 @@ public class FileSystemStreamProvider(string basePath) : IStreamProvider public Task GetStreamAsync(string reference, CancellationToken cancellationToken = default) { - var fullPath = Path.IsPathRooted(reference) ? reference : Path.Combine(basePath, reference.TrimStart('/')); + var fullPath = Path.Combine(basePath, reference.TrimStart('/')); if (!File.Exists(fullPath)) { return Task.FromResult(null); diff --git a/src/MeshWeaver.ContentCollections/FileSystemStreamProviderFactory.cs b/src/MeshWeaver.ContentCollections/FileSystemStreamProviderFactory.cs index f3dc7fc4c..479b299ba 100644 --- a/src/MeshWeaver.ContentCollections/FileSystemStreamProviderFactory.cs +++ b/src/MeshWeaver.ContentCollections/FileSystemStreamProviderFactory.cs @@ -5,10 +5,10 @@ /// public class FileSystemStreamProviderFactory : IStreamProviderFactory { - public IStreamProvider Create(ContentCollectionConfig config) + public Task CreateAsync(ContentCollectionConfig config, CancellationToken cancellationToken = default) { var basePath = config.BasePath ?? config.Settings?.GetValueOrDefault("BasePath") ?? ""; - return new FileSystemStreamProvider(basePath); + return Task.FromResult(new FileSystemStreamProvider(basePath)); } } diff --git a/src/MeshWeaver.ContentCollections/HubStreamProviderFactory.cs b/src/MeshWeaver.ContentCollections/HubStreamProviderFactory.cs index 333538391..ff71fd424 100644 --- a/src/MeshWeaver.ContentCollections/HubStreamProviderFactory.cs +++ b/src/MeshWeaver.ContentCollections/HubStreamProviderFactory.cs @@ -11,18 +11,19 @@ public class HubStreamProviderFactory(IMessageHub hub) : IStreamProviderFactory { public const string SourceType = "Hub"; - public IStreamProvider Create(ContentCollectionConfig config) + public async Task CreateAsync(ContentCollectionConfig config, CancellationToken cancellationToken = default) { if (config.Address == null) throw new ArgumentException("Address is required for Hub source type"); var collectionName = config.Settings?.GetValueOrDefault("CollectionName") ?? config.Name; - // Query the remote hub for the collection configuration - var response = hub.AwaitResponse( + // Query the remote hub for the collection configuration (now properly async) + var response = await hub.AwaitResponse( new GetContentCollectionRequest([collectionName]), - o => o.WithTarget(config.Address) - ).GetAwaiter().GetResult(); + o => o.WithTarget(config.Address), + cancellationToken + ); var remoteConfig = response.Message.Collections.FirstOrDefault(); if (remoteConfig == null) @@ -33,7 +34,7 @@ public IStreamProvider Create(ContentCollectionConfig config) if (factory == null) throw new InvalidOperationException($"Unknown provider type '{remoteConfig.SourceType}'"); - // Create provider using the factory with the remote config - return factory.Create(remoteConfig); + // Create provider using the factory with the remote config (now properly async) + return await factory.CreateAsync(remoteConfig, cancellationToken); } } diff --git a/src/MeshWeaver.ContentCollections/IStreamProviderFactory.cs b/src/MeshWeaver.ContentCollections/IStreamProviderFactory.cs index ea84c9dd0..bd24bf5d8 100644 --- a/src/MeshWeaver.ContentCollections/IStreamProviderFactory.cs +++ b/src/MeshWeaver.ContentCollections/IStreamProviderFactory.cs @@ -9,6 +9,7 @@ public interface IStreamProviderFactory /// Creates a stream provider from the given configuration /// /// Content collection configuration + /// Cancellation token /// The created stream provider - IStreamProvider Create(ContentCollectionConfig config); + Task CreateAsync(ContentCollectionConfig config, CancellationToken cancellationToken = default); } diff --git a/src/MeshWeaver.Data.Contract/Messages.cs b/src/MeshWeaver.Data.Contract/Messages.cs index 8febdafb1..6d43a6ba0 100644 --- a/src/MeshWeaver.Data.Contract/Messages.cs +++ b/src/MeshWeaver.Data.Contract/Messages.cs @@ -57,7 +57,7 @@ public abstract record StreamMessage(string StreamId); public abstract record JsonChange( string StreamId, long Version, - RawJson Change, + [property: PreventLogging] RawJson Change, ChangeType ChangeType, string? ChangedBy ) : StreamMessage(StreamId); diff --git a/src/MeshWeaver.Data/ChangeItem.cs b/src/MeshWeaver.Data/ChangeItem.cs index 3214a3707..35ab48b4d 100644 --- a/src/MeshWeaver.Data/ChangeItem.cs +++ b/src/MeshWeaver.Data/ChangeItem.cs @@ -1,4 +1,5 @@ using MeshWeaver.Data; +using MeshWeaver.Messaging; namespace MeshWeaver.Data; @@ -12,7 +13,7 @@ public interface IChangeItem public record ChangeItem( - TStream? Value, + [property: PreventLogging] TStream? Value, string? ChangedBy, string? StreamId, ChangeType ChangeType, diff --git a/src/MeshWeaver.Data/DataContext.cs b/src/MeshWeaver.Data/DataContext.cs index ea78c5c79..bbdf686bf 100644 --- a/src/MeshWeaver.Data/DataContext.cs +++ b/src/MeshWeaver.Data/DataContext.cs @@ -1,19 +1,23 @@ using System.Collections.Immutable; -using Microsoft.Extensions.DependencyInjection; using MeshWeaver.Domain; using MeshWeaver.Messaging; using MeshWeaver.Messaging.Serialization; using MeshWeaver.Reflection; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; namespace MeshWeaver.Data; public sealed record DataContext : IDisposable { + public const string InitializationGateName = "DataContextInit"; + public ITypeRegistry TypeRegistry { get; } public DataContext(IWorkspace workspace) { Hub = workspace.Hub; + logger = Hub.ServiceProvider.GetRequiredService>(); Workspace = workspace; ReduceManager = Hub.CreateReduceManager(); @@ -26,6 +30,8 @@ public DataContext(IWorkspace workspace) ); } + private readonly ILogger logger; + private Dictionary TypeSourcesByType { get; set; } = new(); public IEnumerable DataSources => DataSourcesById.Values; @@ -73,6 +79,7 @@ Func, ReduceManager> change public void Initialize() { + logger.LogDebug("Starting initialization of DataContext for {Address}", Hub.Address); DataSourcesById = DataSourceBuilders.Select(x => x.Invoke(Hub)).ToImmutableDictionary(x => x.Id); DataSourcesByType = DataSourcesById.Values @@ -87,10 +94,36 @@ public void Initialize() foreach (var typeSource in TypeSources.Values) TypeRegistry.WithType(typeSource.TypeDefinition.Type, typeSource.TypeDefinition.CollectionName); + // Initialize each data source + foreach (var dataSource in DataSourcesById.Values) + { + dataSource.Initialize(); + tasks.Add(dataSource.Initialized); + initialized.Add(dataSource.Reference); + } + + Task.WhenAll(tasks) + .ContinueWith(task => + { + if (task.IsFaulted) + { + logger.LogError(task.Exception, "DataContext initialization failed for {Address}", Hub.Address); + } + else if (task.IsCanceled) + { + logger.LogWarning("DataContext initialization was canceled for {Address}", Hub.Address); + } + else + { + Hub.OpenGate(InitializationGateName); + logger.LogDebug("Finished initialization of DataContext for {Address}", Hub.Address); + } + }, TaskScheduler.Default); } public IEnumerable MappedTypes => DataSourcesByType.Keys; - + private readonly List tasks = new(); + private readonly List initialized = new(); public void Dispose() { foreach (var dataSource in DataSourcesById.Values) diff --git a/src/MeshWeaver.Data/DataExtensions.cs b/src/MeshWeaver.Data/DataExtensions.cs index 238695b19..0772ebae7 100644 --- a/src/MeshWeaver.Data/DataExtensions.cs +++ b/src/MeshWeaver.Data/DataExtensions.cs @@ -90,6 +90,7 @@ Func dataPluginConfiguration .WithType(typeof(ActivityAddress), ActivityAddress.TypeName) .WithType(typeof(ActivityLog), nameof(ActivityLog)) .RegisterDataEvents() + .WithInitializationGate(DataContext.InitializationGateName) ; } @@ -198,10 +199,10 @@ private static IMessageDelivery HandleDataChangeRequest(IMessageHub hub, IMessageDelivery request) { var activity = hub.Address is ActivityAddress ? null : new Activity(ActivityCategory.DataUpdate, hub); - hub.GetWorkspace().RequestChange(request.Message with { ChangedBy = request.Message.ChangedBy }, activity, - request); if (activity is not null) { + // Register completion action BEFORE starting work to avoid race condition + // where sub-activities complete and auto-dispose before the completion action is registered activity.Complete(log => { hub.Post(new DataChangeResponse(hub.Version, log), @@ -209,7 +210,9 @@ private static IMessageDelivery HandleDataChangeRequest(IMessageHub hub, }); } - else + hub.GetWorkspace().RequestChange(request.Message with { ChangedBy = request.Message.ChangedBy }, activity, + request); + if (activity is null) hub.Post(new DataChangeResponse(hub.Version, new(ActivityCategory.DataUpdate) { Status = ActivityStatus.Succeeded }), o => o.ResponseFor(request)); return request.Processed(); diff --git a/src/MeshWeaver.Data/GenericUnpartitionedDataSource.cs b/src/MeshWeaver.Data/GenericUnpartitionedDataSource.cs index 6dc3ea206..259fa66ec 100644 --- a/src/MeshWeaver.Data/GenericUnpartitionedDataSource.cs +++ b/src/MeshWeaver.Data/GenericUnpartitionedDataSource.cs @@ -27,6 +27,8 @@ public interface IDataSource : IDisposable ISynchronizationStream? GetStreamForPartition(object? partition); IEnumerable TypeSources { get; } + internal Task Initialized { get; } + internal void Initialize(); } public interface IUnpartitionedDataSource : IDataSource @@ -110,8 +112,16 @@ This with - protected readonly Dictionary?> Streams = new(); + protected readonly Dictionary> Streams = new(); + public Task Initialized + { + get + { + lock (Streams) + return Task.WhenAll(Streams.Values.Select(s => s.Hub.Started)); + } + } public CollectionsReference Reference => GetReference(); protected virtual CollectionsReference GetReference() => @@ -119,8 +129,14 @@ protected virtual CollectionsReference GetReference() => public virtual void Dispose() { - foreach (var stream in Streams.Values) - stream?.Dispose(); + ISynchronizationStream[] streamsToDispose; + lock (Streams) + { + streamsToDispose = Streams.Values.ToArray(); + } + + foreach (var stream in streamsToDispose) + stream.Dispose(); if (changesSubscriptions != null) foreach (var subscription in changesSubscriptions) @@ -129,28 +145,29 @@ public virtual void Dispose() public virtual ISynchronizationStream GetStream(WorkspaceReference reference) { var stream = GetStreamForPartition(reference is IPartitionedWorkspaceReference partitioned ? partitioned.Partition : null); - return stream?.Reduce(reference) ?? throw new InvalidOperationException("Unable to create stream"); + return stream.Reduce(reference) ?? throw new InvalidOperationException("Unable to create stream"); } - public ISynchronizationStream? GetStreamForPartition(object? partition) + public ISynchronizationStream GetStreamForPartition(object? partition) { var identity = new StreamIdentity(new DataSourceAddress(Id.ToString() ?? ""), partition); lock (Streams) { if (Streams.TryGetValue(partition ?? Id, out var ret)) return ret; + Logger.LogDebug("Creating new stream for Id {Id} and Partition {Partition}", Id, partition); Streams[partition ?? Id] = ret = CreateStream(identity); return ret; } } - protected abstract ISynchronizationStream? CreateStream(StreamIdentity identity); + protected abstract ISynchronizationStream CreateStream(StreamIdentity identity); - protected virtual ISynchronizationStream? CreateStream(StreamIdentity identity, + protected virtual ISynchronizationStream CreateStream(StreamIdentity identity, Func, StreamConfiguration> config) => SetupDataSourceStream(identity, config); - protected virtual ISynchronizationStream? SetupDataSourceStream(StreamIdentity identity, + protected virtual ISynchronizationStream SetupDataSourceStream(StreamIdentity identity, Func, StreamConfiguration> config) { var reference = GetReference(); @@ -175,7 +192,7 @@ public virtual void Initialize() public record GenericUnpartitionedDataSource(object Id, IWorkspace Workspace) : GenericUnpartitionedDataSource(Id, Workspace) { - public ISynchronizationStream? GetStream() + public ISynchronizationStream GetStream() => GetStreamForPartition(null); } @@ -193,7 +210,7 @@ public TDataSource WithType(Func, TypeSourceWithType public abstract record GenericPartitionedDataSource(object Id, IWorkspace Workspace) : GenericPartitionedDataSource, TPartition>(Id, Workspace) { - public ISynchronizationStream? GetStream() + public ISynchronizationStream GetStream() => GetStreamForPartition(null); } @@ -258,7 +275,7 @@ protected virtual async Task GetInitialValueAsync(ISynchronizationS } - protected override ISynchronizationStream? CreateStream(StreamIdentity identity) + protected override ISynchronizationStream CreateStream(StreamIdentity identity) { return CreateStream(identity, config => config.WithInitialization(GetInitialValueAsync).WithExceptionCallback(LogException)); @@ -270,15 +287,15 @@ private Task LogException(Exception exception) return Task.CompletedTask; } - protected override ISynchronizationStream? SetupDataSourceStream(StreamIdentity identity, Func, StreamConfiguration> config) + protected override ISynchronizationStream SetupDataSourceStream(StreamIdentity identity, Func, StreamConfiguration> config) { var stream = base.SetupDataSourceStream(identity, config); - if (stream == null) return null; var isFirst = true; stream.RegisterForDisposal( - stream.Where(x => isFirst || (x.ChangedBy is not null && !x.ChangedBy.Equals(Id))) + stream .Synchronize() + .Where(x => isFirst || (x.ChangedBy is not null && !x.ChangedBy.Equals(Id))) .Subscribe(change => { if (isFirst) @@ -347,31 +364,31 @@ protected virtual async Task return initial; } - protected override ISynchronizationStream? CreateStream(StreamIdentity identity, Func, StreamConfiguration> config) + protected override ISynchronizationStream CreateStream(StreamIdentity identity, Func, StreamConfiguration> config) { return SetupDataSourceStream(identity, config); } - protected override ISynchronizationStream? SetupDataSourceStream(StreamIdentity identity, Func, StreamConfiguration> config) + protected override ISynchronizationStream SetupDataSourceStream(StreamIdentity identity, Func, StreamConfiguration> config) { var stream = base.SetupDataSourceStream(identity, config); - if (stream == null) return null; // Always use async initialization to call GetInitialValueAsync properly var isFirst = true; stream.RegisterForDisposal( - stream.Where(x => isFirst || (x.ChangedBy is not null && !x.ChangedBy.Equals(Id))) - .Synchronize() - .Subscribe(change => - { - if (isFirst) + stream + .Synchronize() + .Where(x => isFirst || (x.ChangedBy is not null && !x.ChangedBy.Equals(Id))) + .Subscribe(change => { - isFirst = false; - return; // Skip processing on first emission (initialization) - } - Synchronize(change); - }) + if (isFirst) + { + isFirst = false; + return; // Skip processing on first emission (initialization) + } + Synchronize(change); + }) ); return stream; } diff --git a/src/MeshWeaver.Data/IWorkspace.cs b/src/MeshWeaver.Data/IWorkspace.cs index 665850108..c87628e10 100644 --- a/src/MeshWeaver.Data/IWorkspace.cs +++ b/src/MeshWeaver.Data/IWorkspace.cs @@ -19,12 +19,12 @@ public interface IWorkspace : IAsyncDisposable ISynchronizationStream GetStream(params Type[] types); ReduceManager ReduceManager { get; } - ISynchronizationStream? GetRemoteStream( + ISynchronizationStream GetRemoteStream( Address owner, WorkspaceReference reference ); ISynchronizationStream? GetStream( - WorkspaceReference reference, + WorkspaceReference reference, Func, StreamConfiguration>? configuration = null); IObservable>? GetRemoteStream(Address address); diff --git a/src/MeshWeaver.Data/Persistence/HubDataSource.cs b/src/MeshWeaver.Data/Persistence/HubDataSource.cs index 623912f8e..f3da2c7a4 100644 --- a/src/MeshWeaver.Data/Persistence/HubDataSource.cs +++ b/src/MeshWeaver.Data/Persistence/HubDataSource.cs @@ -15,9 +15,15 @@ public UnpartitionedHubDataSource WithType( Func, TypeSourceWithType> typeSource ) => WithTypeSource(typeof(T), typeSource.Invoke(new TypeSourceWithType(Workspace, Id))); - protected override ISynchronizationStream? CreateStream(StreamIdentity identity) => + protected override ISynchronizationStream CreateStream(StreamIdentity identity) => CreateStream(identity, x => x); - protected override ISynchronizationStream? CreateStream(StreamIdentity identity, Func, StreamConfiguration> config) => + protected override ISynchronizationStream CreateStream(StreamIdentity identity, Func, StreamConfiguration> config) => Workspace.GetRemoteStream(Address, GetReference()); + + public override void Initialize() + { + base.Initialize(); + GetStream(GetReference()); + } } diff --git a/src/MeshWeaver.Data/Persistence/PartitionedHubDataSource.cs b/src/MeshWeaver.Data/Persistence/PartitionedHubDataSource.cs index 475872fcc..adf9d3eea 100644 --- a/src/MeshWeaver.Data/Persistence/PartitionedHubDataSource.cs +++ b/src/MeshWeaver.Data/Persistence/PartitionedHubDataSource.cs @@ -6,16 +6,16 @@ public record PartitionedHubDataSource(object Id, IWorkspace Workspa : PartitionedDataSource, IPartitionedTypeSource, TPartition>(Id, Workspace) { public override PartitionedHubDataSource WithType(Func partitionFunction, Func? config = null) -=> WithTypeSource( +=> WithTypeSource( typeof(T), (config ?? (x => x)).Invoke( new PartitionedTypeSourceWithType(Workspace, partitionFunction, Id) ) ); - - public PartitionedHubDataSource InitializingPartitions(IEnumerable partitions) => + + public PartitionedHubDataSource InitializingPartitions(params IEnumerable partitions) => this with { InitializePartitions = InitializePartitions.Concat(partitions).ToArray() @@ -24,10 +24,10 @@ this with private object[] InitializePartitions { get; init; } = []; - protected override ISynchronizationStream? CreateStream(StreamIdentity identity) + protected override ISynchronizationStream CreateStream(StreamIdentity identity) { if (identity.Partition is not Address partition) - return null; + throw new NotSupportedException($"Partition {identity.Partition} must be of type Address"); var reference = GetReference(); var partitionedReference = new PartitionedWorkspaceReference( partition, diff --git a/src/MeshWeaver.Data/Serialization/JsonSynchronizationStream.cs b/src/MeshWeaver.Data/Serialization/JsonSynchronizationStream.cs index be3b2d5b3..346af4a57 100644 --- a/src/MeshWeaver.Data/Serialization/JsonSynchronizationStream.cs +++ b/src/MeshWeaver.Data/Serialization/JsonSynchronizationStream.cs @@ -42,8 +42,8 @@ TReference reference reduced.RegisterForDisposal( reduced .ToDataChanged(c => reduced.ClientId.Equals(c.ChangedBy)) - .Where(x => x is not null) .Synchronize() + .Where(x => x is not null) .Subscribe(e => { logger.LogDebug("Stream {streamId} sending change notification to owner {owner}", @@ -56,8 +56,8 @@ TReference reference reduced.RegisterForDisposal( reduced .ToDataChangeRequest(c => reduced.ClientId.Equals(c.StreamId)) - .Where(x => x.Creations.Any() || x.Deletions.Any() || x.Updates.Any()) .Synchronize() + .Where(x => x.Creations.Any() || x.Deletions.Any() || x.Updates.Any()) .Subscribe(e => { logger.LogDebug("Stream {streamId} sending change notification to owner {owner}", @@ -128,9 +128,9 @@ fromWorkspace as ISynchronizationStream reduced.RegisterForDisposal( reduced .ToDataChanged(c => isFirst || !reduced.ClientId.Equals(c.ChangedBy)) + .Synchronize() .Where(x => x is not null) .Select(x => x!) - .Synchronize() .Subscribe(e => { if (isFirst) @@ -146,23 +146,31 @@ fromWorkspace as ISynchronizationStream }) ); - // outgoing data changed - reduced.RegisterForDisposal( - reduced - .ToDataChangeRequest(c => reduced.ClientId.Equals(c.ChangedBy)) - .Synchronize() - .Subscribe(e => - { - logger.LogDebug("Issuing change request from stream {subscriber} to owner {owner}", reduced.StreamId, reduced.Owner); - reduced.Host.GetWorkspace().RequestChange(e, null, null); - }) - ); + // NOTE: The following subscription was causing an infinite feedback loop. + // When a client sends a DataChangeRequest, the workspace processes it and updates the stream. + // The stream emits with ChangedBy = ClientId, matching the predicate below, which calls + // RequestChange() again, creating an infinite loop. + // All changes should flow through DataChangeRequest messages, not through stream subscriptions. + // Removed to fix the feedback loop bug. + + // // outgoing data changed + // reduced.RegisterForDisposal( + // reduced + // .ToDataChangeRequest(c => reduced.ClientId.Equals(c.ChangedBy)) + // .Synchronize() + // .Subscribe(e => + // { + // logger.LogDebug("Issuing change request from stream {subscriber} to owner {owner}", reduced.StreamId, reduced.Owner); + // reduced.Host.GetWorkspace().RequestChange(e, null, null); + // }) + // ); return reduced; } private static IObservable ToDataChanged( this ISynchronizationStream stream, Func, bool> predicate) where TChange : JsonChange => stream + .Synchronize() .Where(predicate) .Select(x => { @@ -312,6 +320,7 @@ internal static (InstanceCollection, JsonPatch) UpdateJsonElement(this DataChang internal static IObservable ToDataChangeRequest( this ISynchronizationStream stream, Func, bool> predicate) => stream + .Synchronize() .Where(predicate) .Select(x => x.Updates.ToDataChangeRequest(stream.ClientId)); diff --git a/src/MeshWeaver.Data/Serialization/SynchronizationStream.cs b/src/MeshWeaver.Data/Serialization/SynchronizationStream.cs index 546f48d80..84ef83051 100644 --- a/src/MeshWeaver.Data/Serialization/SynchronizationStream.cs +++ b/src/MeshWeaver.Data/Serialization/SynchronizationStream.cs @@ -77,10 +77,13 @@ public virtual IDisposable Subscribe(IObserver> observer) { try { - return Store.Synchronize().Subscribe(observer); + var subscription = Store.Synchronize().Subscribe(observer); + logger.LogDebug("[SYNC_STREAM] Subscribe for {StreamId}, subscription created", StreamId); + return subscription; } - catch (ObjectDisposedException) + catch (ObjectDisposedException e) { + logger.LogDebug("[SYNC_STREAM] Subscribe failed for {StreamId} - Store is disposed: {Exception}", StreamId, e.Message); return new AnonymousDisposable(() => { }); } } @@ -109,34 +112,39 @@ public ChangeItem? Current public ReduceManager ReduceManager { get; init; } - private void SetCurrent(ChangeItem? value) + private void SetCurrent(IMessageHub hub, ChangeItem? value) { - if (startupDeferrable is not null) + if (isDisposed || value == null) { - logger.LogDebug("Disposing startup deferrable for Stream {StreamId}", StreamId); - startupDeferrable.Dispose(); - startupDeferrable = null; + logger.LogWarning("[SYNC_STREAM] Not setting {StreamId} to {Value} because the stream is disposed or value is null. IsDisposed={IsDisposed}", StreamId, value, isDisposed); + return; } - if (isDisposed || value == null) + var valuesEqual = current is not null && Equals(current.Value, value.Value); + + + if (current is not null && valuesEqual) { - logger.LogWarning("Not setting {StreamId} to {Value} because the stream is disposed or value is null.", StreamId, value); + logger.LogDebug("[SYNC_STREAM] Skipping SetCurrent for {StreamId} - same version and equal values", StreamId); return; } - if (current is not null && Equals(current.Value, value.Value)) - return; + current = value; try { - logger.LogDebug("Setting value for {StreamId} to {Value}", StreamId, JsonSerializer.Serialize(value, Host.JsonSerializerOptions)); + logger.LogDebug("[SYNC_STREAM] Emitting OnNext for {StreamId}, Version={Version}, Store.IsDisposed={IsDisposed}, Store.HasObservers={HasObservers}", + StreamId, value.Version, Store.IsDisposed, Store.HasObservers); Store.OnNext(value); + logger.LogDebug("[SYNC_STREAM] OnNext completed for {StreamId}, opening gate", StreamId); + hub.OpenGate(SynchronizationGate); } catch (Exception e) { - logger.LogWarning("Exception setting current value for {Address}: {Exception}", Hub.Address, e); + logger.LogWarning(e, "[SYNC_STREAM] Exception setting current value for {Address}", Hub.Address); } } + private const string SynchronizationGate = nameof(SynchronizationGate); public void Update(Func?> update, Func exceptionCallback) => Hub.Post(new UpdateStreamRequest((stream, _) => Task.FromResult(update.Invoke(stream)), exceptionCallback)); @@ -194,22 +202,14 @@ public SynchronizationStream( this.Reference = Reference; logger = Host.ServiceProvider.GetRequiredService>>(); - logger.LogInformation("Creating Synchronization Stream {StreamId} for Host {Host} and {StreamIdentity} and {Reference}", StreamId, Host.Address, StreamIdentity, Reference); - - Hub = Host.GetHostedHub(new SynchronizationAddress(ClientId), c => ConfigureSynchronizationHub(c)); - if (Configuration.Initialization is null) - startupDeferrable = Hub.Defer(StartupDeferrable); - - - + logger.LogDebug("Creating Synchronization Stream {StreamId} for Host {Host} and {StreamIdentity} and {Reference}", StreamId, Host.Address, StreamIdentity, Reference); + Hub = Host.GetHostedHub(new SynchronizationAddress(ClientId), ConfigureSynchronizationHub); } - private IDisposable? startupDeferrable; - private MessageHubConfiguration ConfigureSynchronizationHub(MessageHubConfiguration config) { - return config + config = config .WithTypes( typeof(EntityStore), typeof(JsonElement), @@ -234,7 +234,7 @@ private MessageHubConfiguration ConfigureSynchronizationHub(MessageHubConfigurat { hub.Dispose(); return delivery.Processed(); - }).WithHandler(async (_, request, ct) => + }).WithHandler(async (hub, request, ct) => { var update = request.Message.UpdateAsync; var exceptionCallback = request.Message.ExceptionCallback; @@ -249,56 +249,68 @@ private MessageHubConfiguration ConfigureSynchronizationHub(MessageHubConfigurat // SetCurrent will be called with the computed result // The Message Hub serializes these messages, so only one UpdateStreamRequest // is processed at a time per stream, preventing race conditions - SetCurrent(newChangeItem); + SetCurrent(hub, newChangeItem); } catch (Exception e) { await exceptionCallback.Invoke(e); } return request.Processed(); - }).WithHandler((_, request) => + }).WithHandler((hub, request) => { try { - SetCurrent(request.Message.Value); + SetCurrent(hub, request.Message.Value); } catch (Exception ex) { throw new SynchronizationException("An error occurred during synchronization", ex); } return request.Processed(); - }).WithStartupDeferral(StartupDeferrable) - .WithInitialization((_, ct) => InitializeAsync(ct)); + }) + .WithInitialization(InitializeAsync) + .WithInitializationGate(SynchronizationGate, d => d.Message is SetCurrentRequest || d.Message is DataChangedEvent); - } + // Apply deferred initialization if configured + if (Configuration.DeferredInitialization) + config = config.WithDeferredInitialization(); - private static readonly Predicate StartupDeferrable = x => - x.Message is not InitializeHubRequest - && x.Message is not SetCurrentRequest && - x.Message is not DataChangedEvent { ChangeType: ChangeType.Full }; + return config; + } - private async Task InitializeAsync(CancellationToken ct) + private async Task InitializeAsync(IMessageHub hub, CancellationToken ct) { if (Configuration.Initialization is null) + { + // No custom initialization return; + } var init = await Configuration.Initialization(this, ct); - SetCurrent(new ChangeItem(init, StreamId, Host.Version)); + SetCurrent(hub, new ChangeItem(init, StreamId, Host.Version)); } private void UpdateStream(IMessageDelivery delivery, IMessageHub hub) where TChange : JsonChange { + logger.LogDebug("[SYNC_STREAM] UpdateStream called for {StreamId}, ChangeType={ChangeType}, Version={Version}, MessageId={MessageId}", + StreamId, delivery.Message.ChangeType, delivery.Message.Version, delivery.Id); + if (Hub.Disposal is not null) + { + logger.LogWarning("[SYNC_STREAM] UpdateStream skipped for {StreamId} - hub is disposing", StreamId); return; + } + var currentJson = Get(); if (delivery.Message.ChangeType == ChangeType.Full) { + logger.LogDebug("[SYNC_STREAM] Processing Full change for {StreamId}", StreamId); currentJson = JsonSerializer.Deserialize(delivery.Message.Change.Content); try { - SetCurrent(new ChangeItem( + SetCurrent(hub, new ChangeItem( currentJson.Value.Deserialize(Host.JsonSerializerOptions)!, StreamId, Host.Version)); @@ -306,16 +318,18 @@ private void UpdateStream(IMessageDelivery delivery, IMessageH } catch (Exception ex) { + logger.LogWarning(ex, "[SYNC_STREAM] Failed to process Full change for {StreamId}", StreamId); SyncFailed(delivery, ex); } } else { + logger.LogDebug("[SYNC_STREAM] Processing Patch change for {StreamId}", StreamId); (currentJson, var patch) = delivery.Message.UpdateJsonElement(currentJson, hub.JsonSerializerOptions); try { - SetCurrent(this.ToChangeItem(Current!.Value!, + SetCurrent(hub, this.ToChangeItem(Current!.Value!, currentJson.Value, patch, delivery.Message.ChangedBy ?? ClientId)); @@ -323,11 +337,13 @@ private void UpdateStream(IMessageDelivery delivery, IMessageH } catch (Exception ex) { + logger.LogError(ex, "[SYNC_STREAM] Failed to process Patch change for {StreamId}", StreamId); SyncFailed(delivery, ex); } } Set(currentJson); + logger.LogDebug("[SYNC_STREAM] UpdateStream completed for {StreamId}", StreamId); } private void SyncFailed(IMessageDelivery delivery, Exception exception) @@ -398,6 +414,13 @@ public StreamConfiguration ReturnNullWhenNotPresent() internal Func ExceptionCallback { get; init; } = _ => Task.CompletedTask; + /// + /// When true, the stream's hosted hub will not automatically post InitializeHubRequest during construction. + /// Manual initialization is required by posting InitializeHubRequest to the stream's hub. + /// This is useful when the stream initialization depends on properties that are set after stream construction. + /// + internal bool DeferredInitialization { get; init; } + public StreamConfiguration WithInitialization(Func, CancellationToken, Task> init) => this with { Initialization = init }; @@ -406,4 +429,14 @@ public StreamConfiguration WithExceptionCallback(Func public StreamConfiguration WithExceptionCallback(Action exceptionCallback) => this with { ExceptionCallback = ex => { exceptionCallback(ex); return Task.CompletedTask; } }; + + /// + /// Enables deferred initialization for the stream's hosted hub. When enabled, the hub will not automatically + /// post InitializeHubRequest during construction. Manual initialization is required by posting InitializeHubRequest + /// to the stream's hub after the stream is fully constructed. + /// + /// Whether to defer initialization (default: true) + /// Updated configuration + public StreamConfiguration WithDeferredInitialization(bool deferred = true) + => this with { DeferredInitialization = deferred }; } diff --git a/src/MeshWeaver.Data/Workspace.cs b/src/MeshWeaver.Data/Workspace.cs index bc2b67810..21a66b0e1 100644 --- a/src/MeshWeaver.Data/Workspace.cs +++ b/src/MeshWeaver.Data/Workspace.cs @@ -46,14 +46,14 @@ public Workspace(IMessageHub hub, ILogger logger) .Select(x => x.Value?.Collections.SingleOrDefault().Value?.Instances.Values.Cast().ToArray()); } - public ISynchronizationStream? GetRemoteStream( + public ISynchronizationStream GetRemoteStream( Address id, WorkspaceReference reference ) => - (ISynchronizationStream?) + (ISynchronizationStream) GetSynchronizationStreamMethod .MakeGenericMethod(typeof(TReduced), reference.GetType()) - .Invoke(this, [id, reference]); + .Invoke(this, [id, reference])!; private static readonly MethodInfo GetSynchronizationStreamMethod = diff --git a/src/MeshWeaver.Data/WorkspaceExtensions.cs b/src/MeshWeaver.Data/WorkspaceExtensions.cs index 3d2735e37..0eb74951e 100644 --- a/src/MeshWeaver.Data/WorkspaceExtensions.cs +++ b/src/MeshWeaver.Data/WorkspaceExtensions.cs @@ -1,7 +1,7 @@ using System.Data; using System.Reactive.Linq; -using Microsoft.Extensions.DependencyInjection; using MeshWeaver.Messaging; +using Microsoft.Extensions.DependencyInjection; using Microsoft.Extensions.Logging; namespace MeshWeaver.Data; @@ -27,16 +27,30 @@ public static IObservable> GetObservable(this IWorkspa { var logger = workspace.Hub.ServiceProvider.GetRequiredService>(); var stream = workspace.GetStream(typeof(T)); - logger.LogDebug("Retrieved stream {StreamId} for type {Type}: {Identity}, {Reference}", stream.StreamId, typeof(T).Name, stream.StreamIdentity, stream.Reference); + logger.LogDebug("[WORKSPACE] GetObservable called for type {Type}, StreamId={StreamId}, Identity={Identity}, Reference={Reference}", + typeof(T).Name, stream.StreamId, stream.StreamIdentity, stream.Reference); return stream .Synchronize() - .Select(ws => ws.Value?.GetData().ToArray()) - .Where(x => x != null) + .Do(_ => logger.LogDebug("[WORKSPACE] Subscription created for {Type}, StreamId={StreamId}", typeof(T).Name, stream.StreamId)) + .Select(ws => + { + logger.LogDebug("[WORKSPACE] Received change item for {Type}, StreamId={StreamId}, HasValue={HasValue}", + typeof(T).Name, stream.StreamId, ws.Value != null); + return ws.Value?.GetData().ToArray(); + }) + .Where(x => + { + var hasData = x != null; + logger.LogDebug("[WORKSPACE] Filter check for {Type}, StreamId={StreamId}, HasData={HasData}", + typeof(T).Name, stream.StreamId, hasData); + return hasData; + }) .Select(x => { var ret = (IReadOnlyCollection)x!; - logger.LogDebug("Stream {StreamId}: Observable Value for {Type}: {val}", stream.StreamId,typeof(T).Name, string.Join(", ", ret.Select(y => y!.ToString()))); + logger.LogDebug("[WORKSPACE] Emitting collection for {Type}, StreamId={StreamId}, Count={Count}, Items={Items}", + stream.StreamId, typeof(T).Name, ret.Count, string.Join(", ", ret.Select(y => y!.ToString()))); return ret; }); } @@ -50,7 +64,7 @@ public static ChangeItem ApplyChanges( this ISynchronizationStream? stream, EntityStoreAndUpdates storeAndUpdates) => new(storeAndUpdates.Store, - storeAndUpdates.ChangedBy ?? stream!.StreamId, + storeAndUpdates.ChangedBy ?? stream!.StreamId, stream!.StreamId, ChangeType.Patch, stream!.Hub.Version, @@ -66,8 +80,8 @@ public static EntityStore AddInstances(this IWorkspace workspace, EntityStore st if (typeSource == null) throw new DataException($"Type {g.Key.Name} is not mapped to the workspace."); var collection = s.Collections.GetValueOrDefault(typeSource.CollectionName); - - collection = collection == null ? new InstanceCollection(g.ToDictionary(typeSource.TypeDefinition.GetKey)) : collection with{Instances = collection.Instances.SetItems(g.ToDictionary(typeSource.TypeDefinition.GetKey)) }; + + collection = collection == null ? new InstanceCollection(g.ToDictionary(typeSource.TypeDefinition.GetKey)) : collection with { Instances = collection.Instances.SetItems(g.ToDictionary(typeSource.TypeDefinition.GetKey)) }; return s with { Collections = s.Collections.SetItem(typeSource.CollectionName, collection) diff --git a/src/MeshWeaver.Data/WorkspaceOperations.cs b/src/MeshWeaver.Data/WorkspaceOperations.cs index 5d57cbe9c..b6ea00238 100644 --- a/src/MeshWeaver.Data/WorkspaceOperations.cs +++ b/src/MeshWeaver.Data/WorkspaceOperations.cs @@ -1,5 +1,6 @@ using System.Collections.Immutable; using System.ComponentModel.DataAnnotations; +using System.Data; using Json.Patch; using MeshWeaver.Messaging; using Microsoft.Extensions.DependencyInjection; @@ -130,6 +131,11 @@ private static void UpdateStreams(this IWorkspace workspace, DataChangeRequest c } var stream = group.Key.DataSource.GetStreamForPartition(group.Key.Partition); + if (stream is null) + throw new DataException($"Data source {group.Key.DataSource.Reference} does not have a stream for partition {group.Key.Partition}"); + if (!stream.Hub.Started.IsCompleted) + throw new DataException($"Data source {group.Key.DataSource.Reference} for partition {group.Key.Partition} is not initialized."); + // Start sub-activity for data update var subActivity = activity?.StartSubActivity(ActivityCategory.DataUpdate); @@ -196,7 +202,7 @@ private static void UpdateStreams(this IWorkspace workspace, DataChangeRequest c throw new NotSupportedException($"Operation {g.Key.Op} not supported"); }); subActivity?.LogInformation("Applying changes to Data Stream {Stream}", stream.StreamIdentity); - logger?.LogInformation("Applying changes to Data Stream {Stream}", stream.StreamIdentity); + logger.LogInformation("Applying changes to Data Stream {Stream}", stream.StreamIdentity); // Complete sub-activity - this would need proper sub-activity tracking to work correctly return stream.ApplyChanges(updates); } diff --git a/src/MeshWeaver.Data/WorkspaceStreams.cs b/src/MeshWeaver.Data/WorkspaceStreams.cs index 1633af6fd..16883772c 100644 --- a/src/MeshWeaver.Data/WorkspaceStreams.cs +++ b/src/MeshWeaver.Data/WorkspaceStreams.cs @@ -188,12 +188,14 @@ internal static ISynchronizationStream CreateReducedStream reducer.Invoke(change, (TReference)reducedStream.Reference, i++ == 0)); if (!reducedStream.Configuration.NullReturn) { selectedInitial = selectedInitial - .Where(x => x is { Value: not null }); + .Where(x => x is { Value: not null }) + .Synchronize(); } diff --git a/src/MeshWeaver.Hosting.AzureBlob/ArticleConfigurationExtensions.cs b/src/MeshWeaver.Hosting.AzureBlob/ArticleConfigurationExtensions.cs index 4985fb001..a6eb17107 100644 --- a/src/MeshWeaver.Hosting.AzureBlob/ArticleConfigurationExtensions.cs +++ b/src/MeshWeaver.Hosting.AzureBlob/ArticleConfigurationExtensions.cs @@ -67,7 +67,7 @@ public class AzureBlobStreamProviderFactory(IServiceProvider serviceProvider) : { public const string SourceType = "AzureBlob"; - public IStreamProvider Create(ContentCollectionConfig config) + public Task CreateAsync(ContentCollectionConfig config, CancellationToken cancellationToken = default) { if (config.Settings == null) throw new ArgumentException("Settings are required for AzureBlob source type"); @@ -81,6 +81,6 @@ public IStreamProvider Create(ContentCollectionConfig config) var clientName = config.Settings.GetValueOrDefault("ClientName", "default"); var blobServiceClient = factory.CreateClient(clientName); - return new AzureBlobStreamProvider(blobServiceClient, containerName); + return Task.FromResult(new AzureBlobStreamProvider(blobServiceClient, containerName)); } } diff --git a/src/MeshWeaver.Import/Configuration/AutoEntityBuilder.cs b/src/MeshWeaver.Import/Configuration/AutoEntityBuilder.cs new file mode 100644 index 000000000..d759c25a2 --- /dev/null +++ b/src/MeshWeaver.Import/Configuration/AutoEntityBuilder.cs @@ -0,0 +1,113 @@ +using System.Reflection; + +namespace MeshWeaver.Import.Configuration; + +/// +/// Helper class to automatically build entities from property dictionaries using reflection. +/// +public static class AutoEntityBuilder +{ + /// + /// Creates an entity builder function for a given type using reflection. + /// + /// The type to instantiate + /// A function that builds instances from property dictionaries + public static Func, object> CreateBuilder(Type type) + { + return properties => + { + var instance = Activator.CreateInstance(type); + if (instance == null) + throw new InvalidOperationException($"Failed to create instance of type {type.FullName}"); + + foreach (var (key, value) in properties) + { + var prop = type.GetProperty(key, BindingFlags.Public | BindingFlags.Instance | BindingFlags.IgnoreCase); + if (prop == null || !prop.CanWrite) + continue; + + try + { + var convertedValue = ConvertValue(value, prop.PropertyType); + prop.SetValue(instance, convertedValue); + } + catch + { + // Ignore conversion errors for individual properties + } + } + + return instance; + }; + } + + /// + /// Creates a typed entity builder function. + /// + public static Func, T> CreateBuilder() where T : class + { + var builder = CreateBuilder(typeof(T)); + return properties => (T)builder(properties); + } + + /// + /// Converts a value to the target type with support for common conversions. + /// + private static object? ConvertValue(object? value, Type targetType) + { + if (value == null) + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + + // If already correct type + if (targetType.IsInstanceOfType(value)) + return value; + + var underlying = Nullable.GetUnderlyingType(targetType) ?? targetType; + + // Empty strings should be treated as null/default + if (value is string s && string.IsNullOrWhiteSpace(s)) + return targetType.IsValueType ? Activator.CreateInstance(targetType) : null; + + // Handle string conversions + if (underlying == typeof(string)) + return value.ToString(); + + // Handle numeric conversions + if (underlying == typeof(int)) + { + if (value is int i) return i; + if (value is decimal dm) return (int)dm; + if (value is double d) return (int)d; + if (int.TryParse(value.ToString(), out var parsed)) return parsed; + } + + if (underlying == typeof(double)) + { + if (value is double d) return d; + if (value is decimal dm) return (double)dm; + if (value is int i) return (double)i; + if (double.TryParse(value.ToString(), out var parsed)) return parsed; + } + + if (underlying == typeof(decimal)) + { + if (value is decimal dm) return dm; + if (value is double d) return (decimal)d; + if (decimal.TryParse(value.ToString(), out var parsed)) return parsed; + } + + if (underlying == typeof(bool)) + { + if (value is bool b) return b; + var str = value.ToString()?.Trim().ToLowerInvariant(); + if (str == "true" || str == "yes" || str == "1") return true; + if (str == "false" || str == "no" || str == "0") return false; + } + + // Fallback to Convert.ChangeType + if (value is IConvertible) + return Convert.ChangeType(value, underlying); + + return value; + } +} diff --git a/src/MeshWeaver.Import/Configuration/ExcelImportConfiguration.cs b/src/MeshWeaver.Import/Configuration/ExcelImportConfiguration.cs index e438913b8..f02db810a 100644 --- a/src/MeshWeaver.Import/Configuration/ExcelImportConfiguration.cs +++ b/src/MeshWeaver.Import/Configuration/ExcelImportConfiguration.cs @@ -6,7 +6,7 @@ namespace MeshWeaver.Import.Configuration; /// /// Configuration describing how to transform an Excel worksheet into typed entities. /// -public class ExcelImportConfiguration +public class ExcelImportConfiguration : ImportConfiguration { public ExcelImportConfiguration() { @@ -26,13 +26,11 @@ public ExcelImportConfiguration(string name, string entityId, string worksheetNa } /// - /// File name of the Excel workbook used for this mapping (key). + /// The fully qualified type name of the entity to import (e.g., "MeshWeaver.Insurance.Domain.PropertyRisk"). + /// Used for automatic entity instantiation. /// - [Key] public required string Name { get; init; } - /// - /// Entity identifier that this configuration applies to (e.g., PricingId, ProjectId, etc.). - /// - public required string EntityId { get; init; } + public string? TypeName { get; set; } + /// /// Name of the worksheet within the Excel file to process. /// diff --git a/src/MeshWeaver.Import/Configuration/ImportBuilder.cs b/src/MeshWeaver.Import/Configuration/ImportBuilder.cs new file mode 100644 index 000000000..dd866954e --- /dev/null +++ b/src/MeshWeaver.Import/Configuration/ImportBuilder.cs @@ -0,0 +1,238 @@ +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Linq.Expressions; +using System.Reflection; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; +using MeshWeaver.DataSetReader; +using MeshWeaver.DataSetReader.Csv; +using MeshWeaver.DataSetReader.Excel; +using MeshWeaver.Domain; +using Microsoft.Extensions.DependencyInjection; + +namespace MeshWeaver.Import.Configuration; + +public record ImportBuilder +{ + public IWorkspace Workspace { get; } + public IServiceProvider? ServiceProvider { get; init; } + + public ImportBuilder( + IWorkspace workspace, + IServiceProvider? serviceProvider = null + ) + { + this.Workspace = workspace; + this.ServiceProvider = serviceProvider; + StreamProviders = InitializeStreamProviders(); + Validations = ImmutableList + .Empty.Add(StandardValidations) + .Add(CategoriesValidation); + if (workspace.MappedTypes.Any()) + ImportFormatBuilders = ImportFormatBuilders.Add( + ImportFormat.Default, + [f => f.WithMappings(m => m.WithAutoMappingsForTypes(workspace.MappedTypes))] + ); + } + + + private readonly ConcurrentDictionary ImportFormats = new(); + + public ImportBuilder WithFormat( + string format, + Func configuration + ) => + this with + { + ImportFormatBuilders = ImportFormatBuilders.SetItem( + format, + ( + ImportFormatBuilders.GetValueOrDefault(format) + ?? ImmutableList>.Empty + ).Add(configuration) + ) + }; + + private ImmutableDictionary< + string, + ImmutableList> + > ImportFormatBuilders { get; init; } = + ImmutableDictionary>>.Empty; + + public ImportFormat? GetFormat(string format) + { + if (ImportFormats.TryGetValue(format, out var ret)) + return ret; + + var builders = ImportFormatBuilders.GetValueOrDefault(format); + if (builders == null) + return null; + + return ImportFormats.GetOrAdd( + format, + builders.Aggregate( + new ImportFormat(format, Workspace, Validations), + (a, b) => b.Invoke(a) + ) + ); + } + + internal ImmutableDictionary DataSetReaders { get; init; } = + ImmutableDictionary + .Empty.Add( + MimeTypes.csv, + (stream, options, _) => DataSetCsvSerializer.ReadAsync(stream, options) + ) + .Add( + MimeTypes.xlsx, + (stream, _, _) => Task.FromResult(new ExcelDataSetReader().Read(stream)) + ) + .Add(MimeTypes.xls, new ExcelDataSetReaderOld().ReadAsync); + + public ImportBuilder WithDataSetReader(string fileType, ReadDataSet dataSetReader) => + this with + { + DataSetReaders = DataSetReaders.SetItem(fileType, dataSetReader) + }; + + internal ImmutableDictionary>> StreamProviders { get; init; } + + private ImmutableDictionary>> InitializeStreamProviders() => + ImmutableDictionary>> + .Empty.Add(typeof(StringStream), CreateMemoryStream) + .Add(typeof(EmbeddedResource), CreateEmbeddedResourceStream) + .Add(typeof(CollectionSource), CreateCollectionStreamAsync); + + private static Task CreateEmbeddedResourceStream(ImportRequest request) + { + var embeddedResource = (EmbeddedResource)request.Source; + var assembly = embeddedResource.Assembly; + var resourceName = $"{assembly.GetName().Name}.{embeddedResource.Resource}"; + var stream = assembly.GetManifestResourceStream(resourceName); + if (stream == null) + { + throw new ArgumentException($"Resource '{resourceName}' not found."); + } + return Task.FromResult(stream); + } + + private static Task CreateMemoryStream(ImportRequest request) + { + var stream = new MemoryStream(); + var writer = new StreamWriter(stream); + writer.Write(((StringStream)request.Source).Content); + writer.Flush(); + stream.Position = 0; + return Task.FromResult(stream); + } + + private async Task CreateCollectionStreamAsync(ImportRequest request) + { + var collectionSource = (CollectionSource)request.Source; + + // Resolve from ContentCollection + if (ServiceProvider == null) + throw new ImportException("ServiceProvider is not available to resolve CollectionSource from ContentCollection"); + + var contentService = ServiceProvider.GetService(); + if (contentService == null) + throw new ImportException("IContentService is not registered. Ensure ContentCollections are configured."); + + var stream = await contentService.GetContentAsync(collectionSource.Collection, collectionSource.Path); + if (stream == null) + throw new ImportException($"Could not find content at collection '{collectionSource.Collection}' path '{collectionSource.Path}'"); + + return stream; + } + + public ImportBuilder WithStreamReader( + Type sourceType, + Func> reader + ) => this with { StreamProviders = StreamProviders.SetItem(sourceType, reader) }; + + internal ImmutableList Validations { get; init; } + + public ImportBuilder WithValidation(ValidationFunction validation) => + this with + { + Validations = Validations.Add(validation) + }; + + private bool StandardValidations(object instance, ValidationContext validationContext, Activity activity) + { + var ret = true; + var validationResults = new List(); + Validator.TryValidateObject(instance, validationContext, validationResults, true); + + foreach (var validation in validationResults) + { + activity.LogError(validation.ToString()); + ret = false; + } + return ret; + } + + public static string MissingCategoryErrorMessage = "Category with name {0} was not found."; + + private static readonly ConcurrentDictionary< + Type, + (Type, string, Func)[] + > TypesWithCategoryAttributes = new(); + + private bool CategoriesValidation(object instance, ValidationContext validationContext, Activity activity) + { + var type = instance.GetType(); + var dimensions = TypesWithCategoryAttributes.GetOrAdd( + type, + key => + key.GetProperties() + .Where(x => x.PropertyType == typeof(string)) + .Select(x => new + { + Attr = x.GetCustomAttribute(), + x.Name + }) + .Where(x => x.Attr != null) + .Select(x => + ( + x.Attr!.Type, + x.Name, + CreateGetter(type, x.Name) + ) + ) + .ToArray() + ); + + var ret = true; + foreach (var (dimensionType, propertyName, propGetter) in dimensions) + { + if (!Workspace.DataContext.DataSourcesByType.ContainsKey(dimensionType)) + { + activity.LogError(string.Format(MissingCategoryErrorMessage, dimensionType)); + ret = false; + continue; + } + //if (!string.IsNullOrEmpty(value)) + // TODO V10: Need to restore categories validation here (03.12.2024, Roland Bürgi) + //if (false) + //{ + // activity.LogError( + // string.Format(UnknownValueErrorMessage, propertyName, type.FullName, propGetter(instance)) + // ); + // ret = false; + //} + } + return ret; + } + + private static Func CreateGetter(Type type, string property) + { + var prm = Expression.Parameter(typeof(object)); + var typedPrm = Expression.Convert(prm, type); + var propertyExpression = Expression.Property(typedPrm, property); + return Expression.Lambda>(propertyExpression, prm).Compile(); + } + + +} diff --git a/src/MeshWeaver.Import/Configuration/ImportConfiguration.cs b/src/MeshWeaver.Import/Configuration/ImportConfiguration.cs index ce7e5d922..ca3aa0539 100644 --- a/src/MeshWeaver.Import/Configuration/ImportConfiguration.cs +++ b/src/MeshWeaver.Import/Configuration/ImportConfiguration.cs @@ -1,210 +1,21 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; using System.ComponentModel.DataAnnotations; -using System.Linq.Expressions; -using System.Reflection; -using MeshWeaver.Data; -using MeshWeaver.DataSetReader; -using MeshWeaver.DataSetReader.Csv; -using MeshWeaver.DataSetReader.Excel; -using MeshWeaver.Domain; namespace MeshWeaver.Import.Configuration; -public record ImportConfiguration +/// +/// Base configuration for import operations. +/// Contains common properties shared across different import types. +/// +public class ImportConfiguration { - public IWorkspace Workspace { get; } - - public ImportConfiguration( - IWorkspace workspace - ) - { - this.Workspace = workspace; - Validations = ImmutableList - .Empty.Add(StandardValidations) - .Add(CategoriesValidation); - if (workspace.MappedTypes.Any()) - ImportFormatBuilders = ImportFormatBuilders.Add( - ImportFormat.Default, - [f => f.WithMappings(m => m.WithAutoMappingsForTypes(workspace.MappedTypes))] - ); - } - - - private readonly ConcurrentDictionary ImportFormats = new(); - - public ImportConfiguration WithFormat( - string format, - Func configuration - ) => - this with - { - ImportFormatBuilders = ImportFormatBuilders.SetItem( - format, - ( - ImportFormatBuilders.GetValueOrDefault(format) - ?? ImmutableList>.Empty - ).Add(configuration) - ) - }; - - private ImmutableDictionary< - string, - ImmutableList> - > ImportFormatBuilders { get; init; } = - ImmutableDictionary>>.Empty; - - public ImportFormat? GetFormat(string format) - { - if (ImportFormats.TryGetValue(format, out var ret)) - return ret; - - var builders = ImportFormatBuilders.GetValueOrDefault(format); - if (builders == null) - return null; - - return ImportFormats.GetOrAdd( - format, - builders.Aggregate( - new ImportFormat(format, Workspace, Validations), - (a, b) => b.Invoke(a) - ) - ); - } - - internal ImmutableDictionary DataSetReaders { get; init; } = - ImmutableDictionary - .Empty.Add( - MimeTypes.csv, - (stream, options, _) => DataSetCsvSerializer.ReadAsync(stream, options) - ) - .Add( - MimeTypes.xlsx, - (stream, _, _) => Task.FromResult(new ExcelDataSetReader().Read(stream)) - ) - .Add(MimeTypes.xls, new ExcelDataSetReaderOld().ReadAsync); - - public ImportConfiguration WithDataSetReader(string fileType, ReadDataSet dataSetReader) => - this with - { - DataSetReaders = DataSetReaders.SetItem(fileType, dataSetReader) - }; - - internal ImmutableDictionary> StreamProviders { get; init; } = - ImmutableDictionary> - .Empty.Add(typeof(StringStream), CreateMemoryStream) - .Add(typeof(EmbeddedResource), CreateEmbeddedResourceStream); - - private static Stream CreateEmbeddedResourceStream(ImportRequest request) - { - var embeddedResource = (EmbeddedResource)request.Source; - var assembly = embeddedResource.Assembly; - var resourceName = $"{assembly.GetName().Name}.{embeddedResource.Resource}"; - var stream = assembly.GetManifestResourceStream(resourceName); - if (stream == null) - { - throw new ArgumentException($"Resource '{resourceName}' not found."); - } - return stream; - } - - private static Stream CreateMemoryStream(ImportRequest request) - { - var stream = new MemoryStream(); - var writer = new StreamWriter(stream); - writer.Write(((StringStream)request.Source).Content); - writer.Flush(); - stream.Position = 0; - return stream; - } - - public ImportConfiguration WithStreamReader( - Type sourceType, - Func reader - ) => this with { StreamProviders = StreamProviders.SetItem(sourceType, reader) }; - - internal ImmutableList Validations { get; init; } - - public ImportConfiguration WithValidation(ValidationFunction validation) => - this with - { - Validations = Validations.Add(validation) - }; - - private bool StandardValidations(object instance, ValidationContext validationContext, Activity activity) - { - var ret = true; - var validationResults = new List(); - Validator.TryValidateObject(instance, validationContext, validationResults, true); - - foreach (var validation in validationResults) - { - activity.LogError(validation.ToString()); - ret = false; - } - return ret; - } - - public static string MissingCategoryErrorMessage = "Category with name {0} was not found."; - - private static readonly ConcurrentDictionary< - Type, - (Type, string, Func)[] - > TypesWithCategoryAttributes = new(); - - private bool CategoriesValidation(object instance, ValidationContext validationContext, Activity activity) - { - var type = instance.GetType(); - var dimensions = TypesWithCategoryAttributes.GetOrAdd( - type, - key => - key.GetProperties() - .Where(x => x.PropertyType == typeof(string)) - .Select(x => new - { - Attr = x.GetCustomAttribute(), - x.Name - }) - .Where(x => x.Attr != null) - .Select(x => - ( - x.Attr!.Type, - x.Name, - CreateGetter(type, x.Name) - ) - ) - .ToArray() - ); - - var ret = true; - foreach (var (dimensionType, propertyName, propGetter) in dimensions) - { - if (!Workspace.DataContext.DataSourcesByType.ContainsKey(dimensionType)) - { - activity.LogError(string.Format(MissingCategoryErrorMessage, dimensionType)); - ret = false; - continue; - } - //if (!string.IsNullOrEmpty(value)) - // TODO V10: Need to restore categories validation here (03.12.2024, Roland Bürgi) - //if (false) - //{ - // activity.LogError( - // string.Format(UnknownValueErrorMessage, propertyName, type.FullName, propGetter(instance)) - // ); - // ret = false; - //} - } - return ret; - } - - private static Func CreateGetter(Type type, string property) - { - var prm = Expression.Parameter(typeof(object)); - var typedPrm = Expression.Convert(prm, type); - var propertyExpression = Expression.Property(typedPrm, property); - return Expression.Lambda>(propertyExpression, prm).Compile(); - } - - + /// + /// Unique identifier for this configuration (e.g., file name). + /// + [Key] + public required string Name { get; init; } + + /// + /// Entity identifier that this configuration applies to (e.g., PricingId, ProjectId, etc.). + /// + public required string EntityId { get; init; } } diff --git a/src/MeshWeaver.Import/ConfiguredExcelImporter.cs b/src/MeshWeaver.Import/ConfiguredExcelImporter.cs index 5436c09cc..141192721 100644 --- a/src/MeshWeaver.Import/ConfiguredExcelImporter.cs +++ b/src/MeshWeaver.Import/ConfiguredExcelImporter.cs @@ -1,4 +1,4 @@ -using ClosedXML.Excel; +using ClosedXML.Excel; using MeshWeaver.Import.Configuration; using MeshWeaver.Utils; @@ -8,22 +8,16 @@ namespace MeshWeaver.Import; /// Imports entities from Excel files using declarative configuration. /// /// The entity type to import -public class ConfiguredExcelImporter where T : class +public class ConfiguredExcelImporter(Func, T> entityBuilder) + where T : class { - private readonly Func, T> entityBuilder; - - public ConfiguredExcelImporter(Func, T> entityBuilder) - { - this.entityBuilder = entityBuilder; - } - public IEnumerable Import(Stream stream, string sourceName, ExcelImportConfiguration config) { using var wb = new XLWorkbook(stream); var ws = string.IsNullOrWhiteSpace(config.WorksheetName) ? wb.Worksheets.First() : wb.Worksheet(config.WorksheetName); // Pre-read total cells for allocations (tolerate duplicates/invalid addresses) - var allocationTotals = new Dictionary(StringComparer.OrdinalIgnoreCase); + var allocationTotals = new Dictionary(StringComparer.OrdinalIgnoreCase); foreach (var a in config.Allocations) { if (string.IsNullOrWhiteSpace(a.TotalCell)) @@ -31,7 +25,7 @@ public IEnumerable Import(Stream stream, string sourceName, ExcelImportConfig // Only accept simple A1-style addresses; skip anything suspicious if (!IsValidCellAddress(a.TotalCell)) continue; - var totalVal = GetCellDecimal(ws, a.TotalCell) ?? 0m; + var totalVal = GetCellDouble(ws, a.TotalCell) ?? 0; // Last write wins if duplicates exist; avoids ArgumentException allocationTotals[a.TotalCell] = totalVal; } @@ -49,10 +43,10 @@ public IEnumerable Import(Stream stream, string sourceName, ExcelImportConfig rows = rows.Where(r => !IsIgnoredByExpressions(r, config)); // Pre-calc denominators for each allocation (sum of weights over data rows) - var allocationDenominators = new Dictionary(); + var allocationDenominators = new Dictionary(); foreach (var alloc in config.Allocations) { - decimal denom = 0m; + var denom = 0.0; foreach (var row in rows) { denom += SumWeightColumns(row, alloc.WeightColumns); @@ -198,43 +192,43 @@ private static bool EvaluatePropertyNull(IXLRow row, ExcelImportConfiguration co return s; } - private static decimal SumColumns(IXLRow row, IEnumerable columnLetters) - => columnLetters.Select(c => GetCellDecimal(row.Worksheet, c + row.RowNumber()) ?? 0m).Sum(); + private static double SumColumns(IXLRow row, IEnumerable columnLetters) + => columnLetters.Select(c => GetCellDouble(row.Worksheet, c + row.RowNumber()) ?? 0).Sum(); - private static decimal SumWeightColumns(IXLRow row, IEnumerable columnLetters) + private static double SumWeightColumns(IXLRow row, IEnumerable columnLetters) { - decimal sum = 0m; + var sum = 0.0; foreach (var col in columnLetters) { - var val = GetCellDecimal(row.Worksheet, col + row.RowNumber()); + var val = GetCellDouble(row.Worksheet, col + row.RowNumber()); if (val.HasValue) sum += val.Value; } return sum; } - private static decimal DiffColumns(IXLRow row, IEnumerable columnLetters) + private static double DiffColumns(IXLRow row, IEnumerable columnLetters) { var cols = columnLetters.Take(2).ToArray(); - var a = GetCellDecimal(row.Worksheet, cols.ElementAtOrDefault(0) + row.RowNumber()) ?? 0m; - var b = GetCellDecimal(row.Worksheet, cols.ElementAtOrDefault(1) + row.RowNumber()) ?? 0m; + var a = GetCellDouble(row.Worksheet, cols.ElementAtOrDefault(0) + row.RowNumber()) ?? 0; + var b = GetCellDouble(row.Worksheet, cols.ElementAtOrDefault(1) + row.RowNumber()) ?? 0; return b - a; } - private static decimal? GetCellDecimal(IXLWorksheet ws, string cellAddress) + private static double? GetCellDouble(IXLWorksheet ws, string cellAddress) { var cell = ws.Cell(cellAddress); cell = ResolveMergedAnchor(cell); if (cell.DataType == XLDataType.Number) { - if (cell.TryGetValue(out decimal dec)) return dec; - if (cell.TryGetValue(out double dbl)) return (decimal)dbl; + if (cell.TryGetValue(out double dec)) return dec; + if (cell.TryGetValue(out double dbl)) return (double)dbl; if (cell.TryGetValue(out int i)) return i; } var str = GetStringSafe(cell); // Try invariant culture first, then current culture, allow currency/thousands - if (decimal.TryParse(str, System.Globalization.NumberStyles.Number | System.Globalization.NumberStyles.AllowCurrencySymbol, System.Globalization.CultureInfo.InvariantCulture, out var parsed)) + if (double.TryParse(str, System.Globalization.NumberStyles.Number | System.Globalization.NumberStyles.AllowCurrencySymbol, System.Globalization.CultureInfo.InvariantCulture, out var parsed)) return parsed; - if (decimal.TryParse(str, System.Globalization.NumberStyles.Number | System.Globalization.NumberStyles.AllowCurrencySymbol, System.Globalization.CultureInfo.CurrentCulture, out parsed)) + if (double.TryParse(str, System.Globalization.NumberStyles.Number | System.Globalization.NumberStyles.AllowCurrencySymbol, System.Globalization.CultureInfo.CurrentCulture, out parsed)) return parsed; return null; } diff --git a/src/MeshWeaver.Import/ExcelImportExtensions.cs b/src/MeshWeaver.Import/ExcelImportExtensions.cs index 97cfee11f..79b032b75 100644 --- a/src/MeshWeaver.Import/ExcelImportExtensions.cs +++ b/src/MeshWeaver.Import/ExcelImportExtensions.cs @@ -14,7 +14,7 @@ public static class ExcelImportExtensions /// Function to build an entity from property dictionary /// Enumerable of imported entities public static IEnumerable ImportExcel( - this ImportConfiguration configuration, + this ImportBuilder configuration, Stream stream, ExcelImportConfiguration excelConfig, Func, T> entityBuilder) where T : class @@ -33,7 +33,7 @@ public static IEnumerable ImportExcel( /// Function to build an entity from property dictionary /// Enumerable of imported entities public static IEnumerable ImportExcel( - this ImportConfiguration configuration, + this ImportBuilder configuration, string filePath, ExcelImportConfiguration excelConfig, Func, T> entityBuilder) where T : class diff --git a/src/MeshWeaver.Import/Implementation/ImportManager.cs b/src/MeshWeaver.Import/Implementation/ImportManager.cs index 859c075cf..7c21163ae 100644 --- a/src/MeshWeaver.Import/Implementation/ImportManager.cs +++ b/src/MeshWeaver.Import/Implementation/ImportManager.cs @@ -10,7 +10,7 @@ namespace MeshWeaver.Import.Implementation; public class ImportManager { - public ImportConfiguration Configuration { get; } + public ImportBuilder Configuration { get; } public IWorkspace Workspace { get; } public IMessageHub Hub { get; } @@ -22,7 +22,7 @@ public ImportManager(IWorkspace workspace, IMessageHub hub) Workspace = workspace; Hub = hub; - Configuration = hub.Configuration.GetListOfLambdas().Aggregate(new ImportConfiguration(workspace), (c, l) => l.Invoke(c)); + Configuration = hub.Configuration.GetListOfLambdas().Aggregate(new ImportBuilder(workspace, hub.ServiceProvider), (c, l) => l.Invoke(c)); // Don't initialize the import hub in constructor - do it lazily to avoid timing issues logger?.LogDebug("ImportManager constructor completed for hub {HubAddress}", hub.Address); @@ -83,18 +83,17 @@ private void FinishWithException(IMessageDelivery request, Exception e, private async Task ImportImpl(IMessageDelivery request, CancellationToken cancellationToken) { var activity = new Activity(ActivityCategory.Import, Hub, autoClose: false); + var importActivity = activity.StartSubActivity(ActivityCategory.Import); try { activity.LogInformation("Starting import {ActivityId} for request {RequestId}", activity.Id, request.Id); - var importActivity = activity.StartSubActivity(ActivityCategory.Import); var imported = await ImportInstancesAsync(request.Message, importActivity, cancellationToken); importActivity.Complete(log => { if (log.HasErrors()) { - Hub.Post(new ImportResponse(Hub.Version, log), o => o.ResponseFor(request)); return; } @@ -107,6 +106,7 @@ private async Task ImportImpl(IMessageDelivery request, Cancellat request ); activity.LogInformation("Finished import {ActivityId} for request {RequestId}", activity.Id, request.Id); + Hub.Post(new ImportResponse(Hub.Version, log), o => o.ResponseFor(request)); }); @@ -114,6 +114,9 @@ private async Task ImportImpl(IMessageDelivery request, Cancellat } catch (Exception e) { + importActivity.LogError(e.Message); + importActivity.Complete(); + activity.LogError("Import {ImportId} for {RequestId} failed with exception: {Exception}", activity.Id, request.Id, e.Message); FinishWithException(request, e, activity); } @@ -124,11 +127,116 @@ public async Task ImportInstancesAsync( Activity? activity, CancellationToken cancellationToken) { + // If ExcelImportConfiguration is provided, use ConfiguredExcelImporter directly + if (importRequest.Configuration is ExcelImportConfiguration excelConfig) + { + return await ImportWithConfiguredExcelImporter(importRequest, excelConfig, activity); + } + var (dataSet, format) = await ReadDataSetAsync(importRequest, activity, cancellationToken); var imported = await format.Import(importRequest, dataSet, activity, cancellationToken); return imported!; } + private async Task ImportWithConfiguredExcelImporter( + ImportRequest importRequest, + ExcelImportConfiguration config, + Activity? activity) + { + activity?.LogInformation("Using ConfiguredExcelImporter with TypeName: {TypeName}", config.TypeName); + + // Get the stream provider + var sourceType = importRequest.Source.GetType(); + if (!Configuration.StreamProviders.TryGetValue(sourceType, out var streamProvider)) + throw new ImportException($"Unknown stream type: {sourceType.FullName}"); + + var stream = await streamProvider.Invoke(importRequest); + if (stream == null) + throw new ImportException($"Could not open stream: {importRequest.Source}"); + + // Get the source name for tracking + var sourceName = importRequest.Source switch + { + CollectionSource cs => cs.Path, + _ => "unknown" + }; + + // Resolve the entity type from TypeName + if (string.IsNullOrWhiteSpace(config.TypeName)) + throw new ImportException("TypeName is required in ExcelImportConfiguration"); + + var entityType = ResolveType(config.TypeName); + if (entityType == null) + throw new ImportException($"Could not resolve type: {config.TypeName}"); + + activity?.LogInformation("Resolved entity type: {EntityType}", entityType.FullName); + + // Create entity builder using AutoEntityBuilder.CreateBuilder() generic method + var builderGenericMethod = typeof(AutoEntityBuilder).GetMethods() + .FirstOrDefault(m => m.Name == nameof(AutoEntityBuilder.CreateBuilder) && m.IsGenericMethod); + if (builderGenericMethod == null) + throw new ImportException("Could not find AutoEntityBuilder.CreateBuilder method"); + + var builderMethod = builderGenericMethod.MakeGenericMethod(entityType); + var entityBuilder = builderMethod.Invoke(null, null); + if (entityBuilder == null) + throw new ImportException("Failed to create entity builder"); + + // Create ConfiguredExcelImporter using reflection (since it's generic) + var importerType = typeof(ConfiguredExcelImporter<>).MakeGenericType(entityType); + var importer = Activator.CreateInstance(importerType, entityBuilder); + if (importer == null) + throw new ImportException($"Failed to create ConfiguredExcelImporter<{entityType.Name}>"); + + // Call the Import method + var importMethod = importerType.GetMethod(nameof(ConfiguredExcelImporter.Import), new[] { typeof(Stream), typeof(string), typeof(ExcelImportConfiguration) }); + if (importMethod == null) + throw new ImportException("Could not find Import method on ConfiguredExcelImporter"); + + var importedEntities = importMethod.Invoke(importer, new object[] { stream, sourceName, config }) as System.Collections.IEnumerable; + if (importedEntities == null) + throw new ImportException("Import returned null"); + + // Convert to EntityStore + var entities = importedEntities.Cast().ToArray(); + activity?.LogInformation("Imported {Count} entities of type {TypeName}", entities.Length, config.TypeName); + + var entityStore = new EntityStore(); + var instanceDict = new Dictionary(); + + foreach (var entity in entities) + { + var id = GetEntityId(entity, entityType); + instanceDict[id] = entity; + } + + var collection = new InstanceCollection(instanceDict); + var collectionName = entityType.FullName ?? entityType.Name; + + entityStore = entityStore.WithCollection(collectionName, collection); + return entityStore; + } + + private string GetEntityId(object entity, Type entityType) + { + // Try to get Id property + var idProp = entityType.GetProperty("Id"); + if (idProp != null) + { + var idValue = idProp.GetValue(entity); + if (idValue != null) + return idValue.ToString() ?? Guid.NewGuid().ToString(); + } + + // Fall back to hash code or GUID + return entity.GetHashCode().ToString(); + } + + private Type? ResolveType(string typeName) + { + return Hub.TypeRegistry.GetType(typeName); + } + private async Task<(IDataSet dataSet, ImportFormat format)> ReadDataSetAsync(ImportRequest importRequest, Activity? activity, CancellationToken cancellationToken) @@ -137,7 +245,7 @@ public async Task ImportInstancesAsync( if (!Configuration.StreamProviders.TryGetValue(sourceType, out var streamProvider)) throw new ImportException($"Unknown stream type: {sourceType.FullName}"); - var stream = streamProvider.Invoke(importRequest); + var stream = await streamProvider.Invoke(importRequest); if (stream == null) throw new ImportException($"Could not open stream: {importRequest.Source}"); @@ -156,7 +264,9 @@ public async Task ImportInstancesAsync( cancellationToken ); activity?.LogInformation("Read data set with {Tables} tables. Will import in format {Format}", dataSet.Tables.Count, format); + format ??= importRequest.Format; + if (format == null) throw new ImportException("Format not specified."); diff --git a/src/MeshWeaver.Import/Implementation/ImportUnpartitionedDataSource.cs b/src/MeshWeaver.Import/Implementation/ImportUnpartitionedDataSource.cs index 173b7e332..422cc7166 100644 --- a/src/MeshWeaver.Import/Implementation/ImportUnpartitionedDataSource.cs +++ b/src/MeshWeaver.Import/Implementation/ImportUnpartitionedDataSource.cs @@ -30,11 +30,11 @@ protected override async Task GetInitialValueAsync(ISynchronization } private ImmutableList< - Func + Func > Configurations { get; init; } = - ImmutableList>.Empty; + ImmutableList>.Empty; public ImportUnpartitionedDataSource WithImportConfiguration( - Func config + Func config ) => this with { Configurations = Configurations.Add(config) }; } diff --git a/src/MeshWeaver.Import/ImportRegistryExtensions.cs b/src/MeshWeaver.Import/ImportRegistryExtensions.cs index caecc9f83..c6d91afe2 100644 --- a/src/MeshWeaver.Import/ImportRegistryExtensions.cs +++ b/src/MeshWeaver.Import/ImportRegistryExtensions.cs @@ -17,7 +17,7 @@ public static MessageHubConfiguration AddImport(this MessageHubConfiguration con public static MessageHubConfiguration AddImport( this MessageHubConfiguration configuration, - Func importConfiguration + Func importConfiguration ) { var lambdas = configuration.GetListOfLambdas(); @@ -30,6 +30,14 @@ Func importConfiguration .AddData() .WithServices(x => x.AddScoped()) .AddHandlers() + .WithTypes( + typeof(ImportRequest), + typeof(ImportResponse), + typeof(Source), + typeof(StringStream), + typeof(CollectionSource), + typeof(EmbeddedResource) + ) .WithInitialization(h => h.ServiceProvider.GetRequiredService()) ; @@ -108,11 +116,10 @@ EmbeddedResource source return ret; } - - internal static ImmutableList> GetListOfLambdas( + internal static ImmutableList> GetListOfLambdas( this MessageHubConfiguration config ) => - config.Get>>() ?? []; + config.Get>>() ?? []; } public record EmbeddedResource(Assembly Assembly, string Resource) : Source; diff --git a/src/MeshWeaver.Import/ImportRequest.cs b/src/MeshWeaver.Import/ImportRequest.cs index 952c718f1..c22824384 100644 --- a/src/MeshWeaver.Import/ImportRequest.cs +++ b/src/MeshWeaver.Import/ImportRequest.cs @@ -1,4 +1,5 @@ -using MeshWeaver.Data; +using System.Text.Json.Serialization; +using MeshWeaver.Data; using MeshWeaver.DataSetReader; using MeshWeaver.Import.Configuration; using MeshWeaver.Messaging; @@ -9,18 +10,34 @@ namespace MeshWeaver.Import; /// This is a request entity triggering import when executing in a data hub /// using the Import Plugin. See also AddImport method. /// -/// Content of the source to be imported, e.g. a string (shipping the entire content) or a file name (together with StreamType = File) -public record ImportRequest(Source Source) : IRequest +public record ImportRequest : IRequest { public ImportRequest(string content) : this(new StringStream(content)) { } - public string MimeType { get; init; } = - MimeTypes.MapFileExtension( - Source is StreamSource stream ? Path.GetExtension(stream.Name) : "" + /// + /// This is a request entity triggering import when executing in a data hub + /// using the Import Plugin. See also AddImport method. + /// + /// Content of the source to be imported, e.g. a string (shipping the entire content) or a file name (together with StreamType = File) + [JsonConstructor] + public ImportRequest(Source Source) + { + this.Source = Source; + MimeType = MimeTypes.MapFileExtension( + Source is CollectionSource stream ? Path.GetExtension(stream.Path) : "" ) ?? ""; + } + + public string MimeType { get; init; } public string Format { get; init; } = ImportFormat.Default; + + /// + /// Optional import configuration. When provided, this configuration will be used instead of the Format string. + /// + public ImportConfiguration? Configuration { get; init; } + public object? TargetDataSource { get; init; } public UpdateOptions UpdateOptions { get; init; } = UpdateOptions.Default; public DataSetReaderOptions DataSetReaderOptions { get; init; } = new(); @@ -39,6 +56,13 @@ public ImportRequest(string content) public bool SaveLog { get; init; } + /// Content of the source to be imported, e.g. a string (shipping the entire content) or a file name (together with StreamType = File) + public Source Source { get; init; } + + public void Deconstruct(out Source Source) + { + Source = this.Source; + } } public record ImportResponse(long Version, ActivityLog Log); @@ -47,6 +71,6 @@ public abstract record Source { } public record StringStream(string Content) : Source; -public record StreamSource(string Name, Stream Stream) : Source; +public record CollectionSource(string Collection, string Path) : Source; //public record FileStream(string FileName) : Source; diff --git a/src/MeshWeaver.Import/MeshWeaver.Import.csproj b/src/MeshWeaver.Import/MeshWeaver.Import.csproj index e9296714c..75e65cbc7 100644 --- a/src/MeshWeaver.Import/MeshWeaver.Import.csproj +++ b/src/MeshWeaver.Import/MeshWeaver.Import.csproj @@ -10,5 +10,6 @@ + diff --git a/src/MeshWeaver.Layout/Client/LayoutClientExtensions.cs b/src/MeshWeaver.Layout/Client/LayoutClientExtensions.cs index 5dd83f799..91fb2b98d 100644 --- a/src/MeshWeaver.Layout/Client/LayoutClientExtensions.cs +++ b/src/MeshWeaver.Layout/Client/LayoutClientExtensions.cs @@ -13,7 +13,7 @@ namespace MeshWeaver.Layout.Client; public static class LayoutClientExtensions { - public static void UpdatePointer(this ISynchronizationStream stream, + public static void UpdatePointer(this ISynchronizationStream stream, object? value, string? dataContext, JsonPointerReference? reference, ModelParameter? model = null) @@ -68,14 +68,14 @@ public static void UpdatePointer(this ISynchronizationStream stream } public static IObservable DataBind(this ISynchronizationStream stream, - JsonPointerReference reference, - string? dataContext = null, - Func? conversion = null, + JsonPointerReference reference, + string? dataContext = null, + Func? conversion = null, T? defaultValue = default(T)) => stream.GetStream(JsonPointer.Parse(GetPointer(reference.Pointer, dataContext ?? ""))) - .Select(x => - conversion is not null - ? conversion.Invoke(x, defaultValue) + .Select(x => + conversion is not null + ? conversion.Invoke(x, defaultValue) : stream.Hub.ConvertSingle(x, null, defaultValue!)) .Where(x => x is not null) .Select(x => (T)x!) @@ -139,7 +139,7 @@ private static string GetPointer(string pointer, string? dataContext) return $"{dataContext}/{pointer.TrimEnd('/')}"; } - public static T? ConvertSingle(this IMessageHub hub, object? value, Func? conversion, T? defaultValue = default(T)) + public static T? ConvertSingle(this IMessageHub hub, object? value, Func? conversion, T? defaultValue = default(T)) { conversion ??= null; if (conversion != null) @@ -169,7 +169,7 @@ private static string GetPointer(string pointer, string? dataContext) // This is a nullable type - check if it has a value var underlyingValue = valueType.GetProperty("Value")?.GetValue(value); var hasValue = (bool)(valueType.GetProperty("HasValue")?.GetValue(value) ?? false); - + if (hasValue && underlyingValue != null) { // Use the underlying value for conversion @@ -182,7 +182,7 @@ private static string GetPointer(string pointer, string? dataContext) } } } - + // Not a nullable type, proceed with normal numeric conversion return ConvertNumericValue(value); } @@ -190,13 +190,13 @@ private static string GetPointer(string pointer, string? dataContext) private static T? ConvertNumericValue(object? value) { var targetType = Nullable.GetUnderlyingType(typeof(T)) ?? typeof(T); - + // Handle numeric conversions more safely if (IsNumericType(targetType)) { return ConvertNumericSafely(value, targetType); } - + // Fall back to Convert.ChangeType for non-numeric types return (T?)Convert.ChangeType(value, typeof(T)); } @@ -205,8 +205,8 @@ private static bool IsNumericType(Type type) { return Type.GetTypeCode(type) switch { - TypeCode.Byte or TypeCode.SByte or TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 or - TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 or + TypeCode.Byte or TypeCode.SByte or TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 or + TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 or TypeCode.Decimal or TypeCode.Double or TypeCode.Single => true, _ => false }; @@ -219,27 +219,27 @@ TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 or { if (double.IsNaN(d) || double.IsInfinity(d)) throw new OverflowException($"Cannot convert {d} to {targetType.Name}"); - + // For integer targets, check if the value is within range and truncate if (IsIntegerType(targetType)) { return ConvertDoubleToInteger(d, targetType); } } - - // Handle special float values + + // Handle special float values if (value is float f) { if (float.IsNaN(f) || float.IsInfinity(f)) throw new OverflowException($"Cannot convert {f} to {targetType.Name}"); - + // For integer targets, check if the value is within range and truncate if (IsIntegerType(targetType)) { return ConvertDoubleToInteger(f, targetType); } } - + // Use Convert.ChangeType for other numeric conversions return value is null ? default : (T?)Convert.ChangeType(value, targetType); } @@ -248,7 +248,7 @@ private static bool IsIntegerType(Type type) { return Type.GetTypeCode(type) switch { - TypeCode.Byte or TypeCode.SByte or TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 or + TypeCode.Byte or TypeCode.SByte or TypeCode.UInt16 or TypeCode.UInt32 or TypeCode.UInt64 or TypeCode.Int16 or TypeCode.Int32 or TypeCode.Int64 => true, _ => false }; @@ -259,28 +259,28 @@ private static T ConvertDoubleToInteger(double value, Type targetType) // Check bounds and truncate the value return Type.GetTypeCode(targetType) switch { - TypeCode.Int32 => value > int.MaxValue || value < int.MinValue + TypeCode.Int32 => value > int.MaxValue || value < int.MinValue ? throw new OverflowException($"Value {value} is out of range for Int32") : (T)(object)(int)Math.Truncate(value), - TypeCode.Int16 => value > short.MaxValue || value < short.MinValue + TypeCode.Int16 => value > short.MaxValue || value < short.MinValue ? throw new OverflowException($"Value {value} is out of range for Int16") : (T)(object)(short)Math.Truncate(value), - TypeCode.Int64 => value > long.MaxValue || value < long.MinValue + TypeCode.Int64 => value > long.MaxValue || value < long.MinValue ? throw new OverflowException($"Value {value} is out of range for Int64") : (T)(object)(long)Math.Truncate(value), - TypeCode.Byte => value > byte.MaxValue || value < byte.MinValue + TypeCode.Byte => value > byte.MaxValue || value < byte.MinValue ? throw new OverflowException($"Value {value} is out of range for Byte") : (T)(object)(byte)Math.Truncate(value), - TypeCode.SByte => value > sbyte.MaxValue || value < sbyte.MinValue + TypeCode.SByte => value > sbyte.MaxValue || value < sbyte.MinValue ? throw new OverflowException($"Value {value} is out of range for SByte") : (T)(object)(sbyte)Math.Truncate(value), - TypeCode.UInt16 => value > ushort.MaxValue || value < ushort.MinValue + TypeCode.UInt16 => value > ushort.MaxValue || value < ushort.MinValue ? throw new OverflowException($"Value {value} is out of range for UInt16") : (T)(object)(ushort)Math.Truncate(value), - TypeCode.UInt32 => value > uint.MaxValue || value < uint.MinValue + TypeCode.UInt32 => value > uint.MaxValue || value < uint.MinValue ? throw new OverflowException($"Value {value} is out of range for UInt32") : (T)(object)(uint)Math.Truncate(value), - TypeCode.UInt64 => value > ulong.MaxValue || value < 0 + TypeCode.UInt64 => value > ulong.MaxValue || value < 0 ? throw new OverflowException($"Value {value} is out of range for UInt64") : (T)(object)(ulong)Math.Truncate(value), _ => throw new InvalidOperationException($"Unsupported integer type: {targetType.Name}") diff --git a/src/MeshWeaver.Layout/Composition/LayoutAreaHost.cs b/src/MeshWeaver.Layout/Composition/LayoutAreaHost.cs index 819330862..e4b02d869 100644 --- a/src/MeshWeaver.Layout/Composition/LayoutAreaHost.cs +++ b/src/MeshWeaver.Layout/Composition/LayoutAreaHost.cs @@ -48,16 +48,16 @@ public LayoutAreaHost(IWorkspace workspace, var context = new RenderingContext(reference.Area) { Layout = reference.Layout }; LayoutDefinition = uiControlService.LayoutDefinition; configuration ??= c => c; - var delayedStart = new TaskCompletionSource(); + // Create stream with deferred initialization to avoid circular dependency + // where initialization lambda uses 'this' before Stream property is assigned Stream = new SynchronizationStream( new(workspace.Hub.Address, reference), workspace.Hub, reference, workspace.ReduceManager.ReduceTo(), - c => configuration.Invoke(c) + c => configuration.Invoke(c.WithDeferredInitialization()) .WithInitialization(async (_, _) => { - await delayedStart.Task; return ( await LayoutDefinition .RenderAsync(this, context, new EntityStore() @@ -72,6 +72,10 @@ await LayoutDefinition return Task.CompletedTask; })); Reference = reference; + + // Manually trigger initialization now that Stream property is assigned + // This resolves the circular dependency where initialization lambda uses 'this' + Stream.Hub.Post(new InitializeHubRequest()); Stream.RegisterForDisposal(this); Stream.RegisterForDisposal( Stream.Hub.Register( @@ -87,7 +91,6 @@ await LayoutDefinition ); logger = Stream.Hub.ServiceProvider.GetRequiredService>(); - delayedStart.SetResult(); } diff --git a/src/MeshWeaver.Messaging.Contract/PreventLoggingAttribute.cs b/src/MeshWeaver.Messaging.Contract/PreventLoggingAttribute.cs new file mode 100644 index 000000000..aa62361d2 --- /dev/null +++ b/src/MeshWeaver.Messaging.Contract/PreventLoggingAttribute.cs @@ -0,0 +1,10 @@ +namespace MeshWeaver.Messaging; + +/// +/// Marks a property or field to be excluded from serialization when logging. +/// This is useful for properties with large payloads that would clutter logs. +/// +[AttributeUsage(AttributeTargets.Property | AttributeTargets.Field, Inherited = true)] +public class PreventLoggingAttribute : Attribute +{ +} diff --git a/src/MeshWeaver.Messaging.Hub/DeferralContainer.cs b/src/MeshWeaver.Messaging.Hub/DeferralContainer.cs deleted file mode 100644 index bd460cc0f..000000000 --- a/src/MeshWeaver.Messaging.Hub/DeferralContainer.cs +++ /dev/null @@ -1,31 +0,0 @@ -using MeshWeaver.Utils; - -namespace MeshWeaver.Messaging; - -public class DeferralContainer : IAsyncDisposable -{ - private readonly LinkedList deferralChain = new(); - - public DeferralContainer(SyncDelivery asyncDelivery, SyncDelivery failure) - { - deferralChain.AddFirst(new DeferralItem(_ => false, asyncDelivery, failure)); - } - - public IDisposable Defer(Predicate deferredFilter) - { - var deferralItem = deferralChain.First!; - - var deliveryLink = new DeferralItem(deferredFilter, deferralItem.Value.DeliverMessage, deferralItem.Value.Failure); - deferralChain.AddFirst(deliveryLink); - return new AnonymousDisposable(deliveryLink.Release); - } - - public IMessageDelivery DeliverMessage(IMessageDelivery delivery) => - deferralChain.First!.Value.DeliverMessage(delivery); - - public async ValueTask DisposeAsync() - { - foreach (var deferralItem in deferralChain) - await deferralItem.DisposeAsync(); - } -} diff --git a/src/MeshWeaver.Messaging.Hub/DeferralItem.cs b/src/MeshWeaver.Messaging.Hub/DeferralItem.cs deleted file mode 100644 index 468b8afd6..000000000 --- a/src/MeshWeaver.Messaging.Hub/DeferralItem.cs +++ /dev/null @@ -1,98 +0,0 @@ -using System.Threading.Tasks.Dataflow; - -namespace MeshWeaver.Messaging; - -public record DeferralItem : IAsyncDisposable, IDisposable -{ - private readonly SyncDelivery syncDelivery; - private readonly SyncDelivery failure; - private readonly ActionBlock executionBuffer; - private readonly BufferBlock deferral = new(); - private bool isReleased; - - public DeferralItem(Predicate Filter, SyncDelivery syncDelivery, SyncDelivery failure) - { - this.syncDelivery = syncDelivery; - this.failure = failure; - executionBuffer = new ActionBlock(d => syncDelivery(d)); - this.Filter = Filter; - } - - public IMessageDelivery Failure( - IMessageDelivery delivery - ) - => failure.Invoke(delivery); - - public IMessageDelivery DeliverMessage( - IMessageDelivery delivery - ) - { - if (Filter(delivery)) - { - deferral.Post(delivery); - return null!; - } - - try - { - // TODO V10: Add logging here. (30.07.2024, Roland Bürgi) - var ret = syncDelivery.Invoke(delivery); - if(ret is null) - return null!; - if (ret.State == MessageDeliveryState.Failed) - return failure(ret); - return ret; - } - catch (Exception e) - { - // TODO V10: Add logging here. (30.07.2024, Roland Bürgi) - var ret = delivery.Failed(e.Message); - failure.Invoke(ret); - return ret; - } - } - - private bool isLinked; - private readonly object locker = new object(); - - public void Dispose() - { - bool shouldLink; - lock (locker) - { - if (isLinked) - return; - isLinked = true; - shouldLink = true; - } - - // Link OUTSIDE the lock to avoid deadlock - if (shouldLink) - deferral.LinkTo(executionBuffer, new DataflowLinkOptions { PropagateCompletion = true }); - } - - public void Release() - { - bool shouldLink; - lock (locker) - { - if (isReleased) - return; - isReleased = true; - shouldLink = true; - } - - // Link OUTSIDE the lock to avoid deadlock - if (shouldLink) - deferral.LinkTo(executionBuffer); - } - - public async ValueTask DisposeAsync() - { - Dispose(); - deferral.Complete(); - await executionBuffer.Completion; - } - - public Predicate Filter { get; init; } -} diff --git a/src/MeshWeaver.Messaging.Hub/IMessageHub.cs b/src/MeshWeaver.Messaging.Hub/IMessageHub.cs index 0af3e4b4f..b558f1e6f 100644 --- a/src/MeshWeaver.Messaging.Hub/IMessageHub.cs +++ b/src/MeshWeaver.Messaging.Hub/IMessageHub.cs @@ -14,21 +14,32 @@ public interface IMessageHub : IMessageHandlerRegistry, IDisposable IServiceProvider ServiceProvider { get; } Task> AwaitResponse(IRequest request) => - AwaitResponse(request, new CancellationTokenSource(DefaultTimeout).Token); + AwaitResponse(request, new CancellationTokenSource(Configuration.RequestTimeout).Token); - Task> AwaitResponse(IMessageDelivery> request, CancellationToken cancellationToken); + async Task> AwaitResponse(IMessageDelivery> request, CancellationToken cancellationToken) + => (IMessageDelivery)(await AwaitResponse(request, o => o, o => o, cancellationToken))!; - Task> AwaitResponse(IRequest request, CancellationToken cancellationToken); + Task> AwaitResponse(IRequest request, + CancellationToken cancellationToken) + => AwaitResponse(request, x => x, x => x, cancellationToken)!; - Task> AwaitResponse(IRequest request, Func options, CancellationToken cancellationToken = default); - Task AwaitResponse(IRequest request, Func, TResult> selector) + async Task> AwaitResponse(IRequest request, + Func options, CancellationToken cancellationToken = default) + => (await AwaitResponse(request, options, o => o, cancellationToken))!; + Task AwaitResponse(IRequest request, + Func, TResult> selector) => AwaitResponse(request, x => x, selector); - Task AwaitResponse(IRequest request, Func, TResult> selector, CancellationToken cancellationToken) + Task AwaitResponse(IRequest request, + Func, TResult> selector, CancellationToken cancellationToken) => AwaitResponse(request, x => x, selector, cancellationToken); - Task AwaitResponse(IRequest request, Func options, Func, TResult> selector, CancellationToken cancellationToken = default); + async Task AwaitResponse(IRequest request, Func options, + Func, TResult> selector, CancellationToken cancellationToken = default) + => (TResult?)await AwaitResponse((object)request, options, o => selector((IMessageDelivery)o), cancellationToken); + + Task AwaitResponse(object request, Func options, Func selector, CancellationToken cancellationToken = default); Task RegisterCallback(IMessageDelivery> request, AsyncDelivery callback, CancellationToken cancellationToken = default) => RegisterCallback((IMessageDelivery)request, (r, c) => callback((IMessageDelivery)r, c), @@ -97,7 +108,13 @@ IMessageHub GetHostedHub(TAddress address, Func disposeAction); JsonSerializerOptions JsonSerializerOptions { get; } MessageHubRunLevel RunLevel { get; } - IDisposable Defer(Predicate deferredFilter); + + /// + /// Opens a named initialization gate, allowing all deferred messages to be processed. + /// + /// The name of the gate to open + /// True if the gate was found and opened, false if already opened or not found + bool OpenGate(string name); internal Task HandleMessageAsync( @@ -107,12 +124,6 @@ CancellationToken cancellationToken Task? Disposal { get; } ITypeRegistry TypeRegistry { get; } -#if DEBUG - - internal static TimeSpan DefaultTimeout => TimeSpan.FromSeconds(3000); -#else - internal static TimeSpan DefaultTimeout => TimeSpan.FromSeconds(30); - -#endif + internal void Start(); } diff --git a/src/MeshWeaver.Messaging.Hub/IMessageService.cs b/src/MeshWeaver.Messaging.Hub/IMessageService.cs index 747062dfb..878e3862f 100644 --- a/src/MeshWeaver.Messaging.Hub/IMessageService.cs +++ b/src/MeshWeaver.Messaging.Hub/IMessageService.cs @@ -1,12 +1,10 @@ -using System.Text.Json; - -namespace MeshWeaver.Messaging; +namespace MeshWeaver.Messaging; internal interface IMessageService : IAsyncDisposable { Address Address { get; } - public IDisposable Defer(Predicate deferredFilter); IMessageDelivery RouteMessageAsync(IMessageDelivery message, CancellationToken cancellationToken); IMessageDelivery? Post(TMessage message, PostOptions opt); - internal void Start(); + void Start(); + bool OpenGate(string name); } diff --git a/src/MeshWeaver.Messaging.Hub/InitializeHubRequest.cs b/src/MeshWeaver.Messaging.Hub/InitializeHubRequest.cs index d8517441a..9519f3ff7 100644 --- a/src/MeshWeaver.Messaging.Hub/InitializeHubRequest.cs +++ b/src/MeshWeaver.Messaging.Hub/InitializeHubRequest.cs @@ -4,4 +4,4 @@ /// Request to initialize a message hub during startup. /// Used to defer messages until initialization is complete. /// -public record InitializeHubRequest(IDisposable Deferral); +public record InitializeHubRequest(); diff --git a/src/MeshWeaver.Messaging.Hub/MessageHub.cs b/src/MeshWeaver.Messaging.Hub/MessageHub.cs index 67c179e25..ba0592e19 100644 --- a/src/MeshWeaver.Messaging.Hub/MessageHub.cs +++ b/src/MeshWeaver.Messaging.Hub/MessageHub.cs @@ -22,6 +22,8 @@ public void InvokeAsync(Func action, Func> callbacks = new(); private readonly HashSet pendingCallbackCancellations = new(); @@ -33,10 +35,18 @@ public void InvokeAsync(Func action, Func rules = new(); private readonly Lock messageHandlerRegistrationLock = new(); private readonly Lock typeRegistryLock = new(); - public MessageHub( IServiceProvider serviceProvider, HostedHubsCollection hostedHubs, @@ -80,9 +90,10 @@ public MessageHub( } Register(ExecuteRequest); Register(HandleCallbacks); - messageService.Start(); - Post(new InitializeHubRequest(Defer(Configuration.StartupDeferral))); + if (!configuration.DeferredInitialization) + Post(new InitializeHubRequest()); + } private IMessageDelivery HandlePingRequest(IMessageDelivery request) @@ -99,11 +110,11 @@ private async Task HandleInitialize(IMessageDelivery IMessageHub.HandleMessageAsync( IMessageDelivery delivery, CancellationToken cancellationToken @@ -339,110 +357,59 @@ private IMessageDelivery FinishDelivery(IMessageDelivery delivery) - public Task> AwaitResponse( - IMessageDelivery> request, CancellationToken cancellationToken) - { - var tcs = new TaskCompletionSource>(cancellationToken); - var callbackTask = RegisterCallback( - request.Id, - d => - { - tcs.SetResult((IMessageDelivery)d); - return d.Processed(); - }, - cancellationToken - ); - return callbackTask.ContinueWith(_ => tcs.Task.Result, cancellationToken); - } - public Task> AwaitResponse( - IRequest request, - CancellationToken cancellationToken - ) => AwaitResponse(request, x => x, x => x, cancellationToken); - - public Task> AwaitResponse( - IRequest request, - Func options - ) => - AwaitResponse( - request, - options, - new CancellationTokenSource(IMessageHub.DefaultTimeout).Token - ); - public Task> AwaitResponse( - IRequest request, - Func options, - CancellationToken cancellationToken - ) => AwaitResponse(request, options, x => x, cancellationToken); - public Task AwaitResponse( - IRequest request, - Func options, - Func, TResult> selector, - CancellationToken cancellationToken - ) - { - var id = Guid.NewGuid().AsString(); - var ret = AwaitResponse( - id, - selector, - cancellationToken - ); - Post(request, o => options.Invoke(o).WithMessageId(id)); - return ret; - } - public Task AwaitResponse( - IMessageDelivery request, - Func, TResult> selector, - CancellationToken cancellationToken - ) + public Task AwaitResponse(object r, Func options, Func selector, CancellationToken cancellationToken = default) { - var response = RegisterCallback( - request.Id, - d => d, - cancellationToken - ); - var task = response - .ContinueWith(t => - InnerCallback(request.Id, t.Result, selector), + // Check if r is already a delivery (in which case it's already posted) + if (r is IMessageDelivery existingDelivery) + { + var response = RegisterCallback( + existingDelivery.Id, + d => d, cancellationToken ); - return task; - } - public Task AwaitResponse( - string id, - Func, TResult> selector, - CancellationToken cancellationToken - ) - { - var response = RegisterCallback( - id, - d => d, - cancellationToken - ); - var task = response.ContinueWith(async t => - { - // Await the task to propagate the original exception - var result = await t; - return InnerCallback(id, result, selector); - }, cancellationToken).Unwrap(); + return response.ContinueWith(t => + { + var ret = t.Result; + return InnerCallback(existingDelivery.Id, ret, selector); + }, cancellationToken); + } + + // For new messages, we need to generate the ID first, register callback, THEN post + // to avoid race condition where response arrives before callback is registered + var messageId = Guid.NewGuid().AsString(); + var response2 = RegisterCallback(messageId, d => d, cancellationToken); + + // Now post the message with the pre-generated ID + var request = Post(r, opts => { + var configured = options(opts); + return configured.WithMessageId(messageId); + })!; + + var task = response2 + .ContinueWith(t => + { + var ret = t.Result; + return InnerCallback(request.Id, ret, selector); + }, + cancellationToken + ); return task; } - private TResult InnerCallback( + private object? InnerCallback( string id, IMessageDelivery response, - Func, TResult> selector) + Func selector) { try { - if (response is IMessageDelivery tResponse) - return selector.Invoke(tResponse); - throw new DeliveryFailureException($"Response for {id} was of unexpected type: {response}"); + return selector.Invoke(response); } catch (DeliveryFailureException) { @@ -569,8 +536,9 @@ CancellationToken cancellationToken { if (!callbacks.Remove(requestIdString, out myCallbacks)) { - logger.LogDebug("No callbacks found for {Id}", requestIdString); - return delivery; + logger.LogDebug("No callbacks found for response message {MessageType} (ID: {MessageId}) - treating as processed", + delivery.Message.GetType().Name, delivery.Id); + return delivery.Processed(); } } @@ -587,7 +555,7 @@ CancellationToken cancellationToken logger.LogTrace("MESSAGE_FLOW: HUB_CALLBACKS_COMPLETE | {MessageType} | Hub: {Address} | MessageId: {MessageId}", delivery.Message.GetType().Name, Address, delivery.Id); - return delivery; + return delivery.Processed(); } Address IMessageHub.Address => Address; @@ -700,6 +668,7 @@ public void Dispose() private void DisposeImpl() { + while (disposeActions.TryTake(out var disposeAction)) disposeAction.Invoke(this); @@ -891,8 +860,7 @@ private void CancelCallbacks() private readonly ConcurrentBag> asyncDisposeActions = new(); private readonly ConcurrentBag> disposeActions = new(); - public IDisposable Defer(Predicate deferredFilter) => - messageService.Defer(deferredFilter); + private readonly ConcurrentDictionary<(string Conext, Type Type), object?> properties = new(); diff --git a/src/MeshWeaver.Messaging.Hub/MessageHubConfiguration.cs b/src/MeshWeaver.Messaging.Hub/MessageHubConfiguration.cs index 8e991a0fb..99313adbc 100644 --- a/src/MeshWeaver.Messaging.Hub/MessageHubConfiguration.cs +++ b/src/MeshWeaver.Messaging.Hub/MessageHubConfiguration.cs @@ -9,6 +9,8 @@ namespace MeshWeaver.Messaging; public record MessageHubConfiguration { + public const string InitializeGateName = "Initialize"; + public Address Address { get; } protected readonly IServiceProvider? ParentServiceProvider; public MessageHubConfiguration(IServiceProvider? parentServiceProvider, Address address) @@ -20,10 +22,26 @@ public MessageHubConfiguration(IServiceProvider? parentServiceProvider, Address DeliveryPipeline = [UserServiceDeliveryPipeline]; } - internal Predicate StartupDeferral { get; init; } = x => x.Message is not InitializeHubRequest; - - public MessageHubConfiguration WithStartupDeferral(Predicate startupDeferral) - => this with { StartupDeferral = startupDeferral }; + /// + /// Named initialization gates that are created during hub initialization and can be opened by name. + /// The key is the gate name, the value is the predicate that determines which messages are allowed during initialization. + /// All other messages are deferred until the gate is opened. + /// The Initialize gate doesn't allow any additional messages - it's just a marker for when BuildupActions complete. + /// + internal ImmutableDictionary> InitializationGates { get; init; } = ImmutableDictionary>.Empty + .Add(InitializeGateName, d => d.Message is InitializeHubRequest); // Initialize gate doesn't allow any messages - just marks completion of BuildupActions + + /// + /// Adds a named initialization gate that will be created during hub initialization. + /// This ensures the gate is in place before any messages are processed. + /// Only messages matching the predicate will be allowed through during initialization. + /// All other messages will be deferred until the gate is opened via OpenGate(). + /// + /// Unique name for this initialization gate + /// Predicate that determines which messages are allowed during initialization (e.g. InitializeHubRequest, SetCurrentRequest) + /// Updated configuration + public MessageHubConfiguration WithInitializationGate(string name, Predicate? allowDuringInit = null) + => this with { InitializationGates = InitializationGates.SetItem(name, allowDuringInit ?? (_ => false)) }; public IMessageHub? ParentHub { @@ -185,9 +203,37 @@ private SyncPipelineConfig UserServicePostPipeline(SyncPipelineConfig syncPipeli }); } internal ImmutableList> DeliveryPipeline { get; set; } - internal long StartupTimeout { get; init; } - - public MessageHubConfiguration WithStartupTimeout(long timeout) => this with { StartupTimeout = timeout }; + internal TimeSpan? StartupTimeout { get; init; } //= new(0, 0, 30); // Default 10 seconds + internal TimeSpan RequestTimeout { get; init; } = new(0, 0, 30); + + /// + /// When true, the hub will not automatically post InitializeHubRequest during construction. + /// Manual initialization is required by posting InitializeHubRequest to the hub. + /// + internal bool DeferredInitialization { get; init; } + + /// + /// Sets the timeout allowed for startup + /// + /// + /// + public MessageHubConfiguration WithStartupTimeout(TimeSpan timeout) => this with { StartupTimeout = timeout }; + + /// + /// Sets the timeout for callbacks (AwaitResponse) + /// + /// + /// + public MessageHubConfiguration WithRequestTimeout(TimeSpan timeout) => this with { RequestTimeout = timeout }; + + /// + /// Enables deferred initialization. When enabled, the hub will not automatically post InitializeHubRequest + /// during construction. Manual initialization is required by posting InitializeHubRequest to the hub. + /// This is useful when the hub needs to be fully constructed before initialization can proceed. + /// + /// Whether to defer initialization (default: true) + /// Updated configuration + public MessageHubConfiguration WithDeferredInitialization(bool deferred = true) => this with { DeferredInitialization = deferred }; public MessageHubConfiguration AddDeliveryPipeline(Func pipeline) => this with { DeliveryPipeline = DeliveryPipeline.Add(pipeline) }; private AsyncPipelineConfig UserServiceDeliveryPipeline(AsyncPipelineConfig asyncPipeline) diff --git a/src/MeshWeaver.Messaging.Hub/MessageHubExtensions.cs b/src/MeshWeaver.Messaging.Hub/MessageHubExtensions.cs index e3355f35b..413e33d78 100644 --- a/src/MeshWeaver.Messaging.Hub/MessageHubExtensions.cs +++ b/src/MeshWeaver.Messaging.Hub/MessageHubExtensions.cs @@ -1,4 +1,5 @@ using System.Collections.Immutable; +using System.Text.Json; using System.Text.Json.Nodes; using MeshWeaver.Domain; using MeshWeaver.Messaging.Serialization; @@ -67,4 +68,56 @@ public static Address GetAddress(this IMessageHub hub, string address) return (Address)Activator.CreateInstance(type, [string.Join('/', split.Skip(1))])!; } + + /// + /// Sends a request deserialized from JSON and awaits the response. + /// This is useful when working with JSON-based messaging without direct type references. + /// + /// The message hub + /// The request object (deserialized from JSON) + /// Post options + /// Cancellation token + /// The response message + public static async Task AwaitResponse( + this IMessageHub hub, + object request, + Func options, + CancellationToken cancellationToken = default) + { + // If the request is a JsonElement, we need to deserialize it to the concrete type first + if (request is JsonElement jsonElement) + { + // Get the type discriminator from the JSON + if (!jsonElement.TryGetProperty("$type", out var typeElement)) + throw new InvalidOperationException("JSON request must have a '$type' property"); + + var typeName = typeElement.GetString(); + if (string.IsNullOrEmpty(typeName)) + throw new InvalidOperationException("'$type' property cannot be empty"); + + // Find the type in the type registry + var concreteType = hub.GetTypeRegistry().GetType(typeName); + if (concreteType == null) + concreteType = typeof(JsonElement); + + // Deserialize to the concrete type + request = JsonSerializer.Deserialize(jsonElement.GetRawText(), concreteType, hub.JsonSerializerOptions)!; + } + + // Find the IRequest interface to get the response type + var requestType = request.GetType(); + var requestInterface = requestType.GetInterfaces() + .FirstOrDefault(i => i.IsGenericType && i.GetGenericTypeDefinition() == typeof(IRequest<>)); + + + + // Create the result selector lambda: (IMessageDelivery d) => d.Message + var deliveryParam = System.Linq.Expressions.Expression.Parameter(typeof(IMessageDelivery), "d"); + var messageProperty = System.Linq.Expressions.Expression.Property(deliveryParam, "Message"); + var lambda = System.Linq.Expressions.Expression.Lambda>(messageProperty, deliveryParam); + var resultSelector = lambda.Compile(); + + var resultProperty = await hub.AwaitResponse(request, options, resultSelector, cancellationToken); + return resultProperty; + } } diff --git a/src/MeshWeaver.Messaging.Hub/MessageService.cs b/src/MeshWeaver.Messaging.Hub/MessageService.cs index 0f341ff5e..ada54bbe0 100644 --- a/src/MeshWeaver.Messaging.Hub/MessageService.cs +++ b/src/MeshWeaver.Messaging.Hub/MessageService.cs @@ -1,9 +1,9 @@ -using System.Diagnostics; +using System.Collections.Concurrent; +using System.Diagnostics; using System.Reflection; using System.Text.Json; using System.Text.Json.Nodes; using System.Threading.Tasks.Dataflow; -using Json.More; using Microsoft.Extensions.Logging; // ReSharper disable InconsistentlySynchronizedField @@ -14,46 +14,110 @@ public class MessageService : IMessageService private readonly ILogger logger; private readonly IMessageHub hub; private readonly BufferBlock>> buffer = new(); + private readonly BufferBlock>> deferredBuffer = new(); private readonly ActionBlock>> deliveryAction; private readonly BufferBlock> executionBuffer = new(); private readonly ActionBlock> executionBlock = new(f => f.Invoke(default)); private readonly HierarchicalRouting hierarchicalRouting; private readonly SyncDelivery postPipeline; private readonly AsyncDelivery deliveryPipeline; - private readonly DeferralContainer deferralContainer; private readonly CancellationTokenSource hangDetectionCts = new(); + private readonly ConcurrentDictionary> gates; + private readonly Lock gateStateLock = new(); + private readonly TaskCompletionSource startupCompletionSource = new(); + //private volatile int pendingStartupMessages; + private JsonSerializerOptions? loggingSerializerOptions; + private JsonSerializerOptions LoggingSerializerOptions => + loggingSerializerOptions ??= hub.CreateLoggingSerializerOptions(); public MessageService( Address address, ILogger logger, IMessageHub hub, IMessageHub? parentHub - ) + ) { Address = address; ParentHub = parentHub; this.logger = logger; this.hub = hub; - deferralContainer = new DeferralContainer(ScheduleExecution, ReportFailure); - deliveryAction = - new(x => x.Invoke()); - postPipeline = hub.Configuration.PostPipeline.Aggregate(new SyncPipelineConfig(hub, d => d), (p, c) => c.Invoke(p)).SyncDelivery; + deliveryAction = new(x => x.Invoke()); + postPipeline = hub.Configuration.PostPipeline + .Aggregate(new SyncPipelineConfig(hub, d => d), (p, c) => c.Invoke(p)).SyncDelivery; hierarchicalRouting = new HierarchicalRouting(hub, parentHub); - deliveryPipeline = hub.Configuration.DeliveryPipeline.Aggregate(new AsyncPipelineConfig(hub, (d, _) => Task.FromResult(deferralContainer.DeliverMessage(d))), (p, c) => c.Invoke(p)).AsyncDelivery; + deliveryPipeline = hub.Configuration.DeliveryPipeline + .Aggregate(new AsyncPipelineConfig(hub, (d, _) => Task.FromResult(ScheduleExecution(d))), + (p, c) => c.Invoke(p)).AsyncDelivery; + // Store gate names from configuration for tracking which gates are still open + gates = new(hub.Configuration.InitializationGates); + if (hub.Configuration.StartupTimeout is not null) + startupTimer = new(NotifyStartupFailure, null, hub.Configuration.StartupTimeout.Value, Timeout.InfiniteTimeSpan); } + + + private readonly Timer? startupTimer; + + void IMessageService.Start() { // Ensure the execution buffer is linked before we start processing executionBuffer.LinkTo(executionBlock, new DataflowLinkOptions { PropagateCompletion = true }); - // Link the delivery buffer to the action block immediately to avoid race conditions + // Link only the main buffer to the action block initially + // The deferred buffer will be linked when all gates are opened buffer.LinkTo(deliveryAction, new DataflowLinkOptions { PropagateCompletion = true }); + } + + private void NotifyStartupFailure(object? _) + { + // TODO V10: See that we respond to each message (31.10.2025, Roland Buergi) + throw new DeliveryFailureException( + $"Message hub {Address} failed to initialize in {hub.Configuration.StartupTimeout}"); + } + + public bool OpenGate(string name) + { + lock (gateStateLock) + { + if (gates.TryRemove(name, out _)) + { + logger.LogDebug("Opening initialization gate '{Name}' for hub {Address}. Closed gates {Gates}", name, + Address, gates.Keys); + + // If this was the last gate, link deferred buffer to main buffer and mark hub as started + // Use lock to ensure atomicity with ScheduleNotify checking gates.IsEmpty + if (gates.IsEmpty) + { + if (hub.RunLevel < MessageHubRunLevel.Started) + { + startupTimer?.Dispose(); + hub.Start(); + + // Link deferred buffer to main buffer to preserve FIFO order + // This creates a chain: deferredBuffer → buffer → deliveryAction + // All deferred messages will flow through the main buffer, ensuring they are + // processed before any new messages that arrive after the gate opens + logger.LogDebug("Linking deferred buffer to main buffer for hub {Address}", Address); + deferredBuffer.LinkTo(buffer, new DataflowLinkOptions { PropagateCompletion = false }); + + logger.LogInformation("Message hub {address} fully initialized (all gates opened)", Address); + } + } + + return true; + } + } + logger.LogDebug("Initialization gate '{Name}' not found in hub {Address} (may have already been opened)", name, + Address); + return false; } + + private IMessageDelivery ReportFailure(IMessageDelivery delivery) { logger.LogWarning("An exception occurred processing {MessageType} (ID: {MessageId}) in {Address}", @@ -64,7 +128,7 @@ private IMessageDelivery ReportFailure(IMessageDelivery delivery) { try { - var message = delivery.Properties.TryGetValue("Error", out var error) ? error?.ToString() : $"Message delivery failed in address {Address}d}}"; + var message = delivery.Properties.TryGetValue("Error", out var error) ? error.ToString() : $"Message delivery failed in address {Address}d}}"; Post(new DeliveryFailure(delivery, message), new PostOptions(Address).ResponseFor(delivery)); } catch (Exception ex) @@ -85,8 +149,6 @@ private IMessageDelivery ReportFailure(IMessageDelivery delivery) public Address Address { get; } public IMessageHub? ParentHub { get; } - public IDisposable Defer(Predicate deferredFilter) => - deferralContainer.Defer(deferredFilter); IMessageDelivery IMessageService.RouteMessageAsync(IMessageDelivery delivery, CancellationToken cancellationToken) => ScheduleNotify(delivery, cancellationToken); @@ -97,19 +159,25 @@ private IMessageDelivery ScheduleNotify(IMessageDelivery delivery, CancellationT delivery.Message.GetType().Name, Address, delivery.Id, delivery.Target); logger.LogDebug("Buffering message {MessageType} (ID: {MessageId}) in {Address}", - delivery.Message.GetType().Name, delivery.Id, Address); // Reset hang detection timer on activity (if not debugging and not already triggered) + delivery.Message.GetType().Name, delivery.Id, Address); logger.LogTrace("MESSAGE_FLOW: POSTING_TO_DELIVERY_PIPELINE | {MessageType} | Hub: {Address} | MessageId: {MessageId}", delivery.Message.GetType().Name, Address, delivery.Id); + + // Always buffer to the main buffer - deferral logic will be handled in NotifyAsync + // based on whether the message is actually targeted at this hub buffer.Post(() => NotifyAsync(delivery, cancellationToken)); + logger.LogTrace("MESSAGE_FLOW: SCHEDULE_NOTIFY_END | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Result: Forwarded", delivery.Message.GetType().Name, Address, delivery.Id); return delivery.Forwarded(); } + private async Task NotifyAsync(IMessageDelivery delivery, CancellationToken cancellationToken) { var name = GetMessageType(delivery); - logger.LogDebug("MESSAGE_FLOW: NOTIFY_START | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Target: {Target}", + logger.LogDebug( + "MESSAGE_FLOW: NOTIFY_START | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Target: {Target}", name, Address, delivery.Id, delivery.Target); if (delivery.State != MessageDeliveryState.Submitted) @@ -119,17 +187,23 @@ private async Task NotifyAsync(IMessageDelivery delivery, Canc // For all other messages, wait for parent to be ready before routing if (ParentHub is not null) { - if (delivery.Target is HostedAddress ha && hub.Address.Equals(ha.Address) && ha.Host.Equals(ParentHub.Address)) + if (delivery.Target is HostedAddress ha && hub.Address.Equals(ha.Address) && + ha.Host.Equals(ParentHub.Address)) delivery = delivery.WithTarget(ha.Address); } - // Add current address to routing path - delivery = delivery.AddToRoutingPath(hub.Address); + // Add current address to routing path (only if not already present to handle deferred messages) + if (!delivery.RoutingPath.Contains(hub.Address)) + delivery = delivery.AddToRoutingPath(hub.Address); var isOnTarget = delivery.Target is null || delivery.Target.Equals(hub.Address); + + // Only defer messages that are targeted at this hub + // Messages being routed through should not be deferred if (isOnTarget) { + delivery = UnpackIfNecessary(delivery); logger.LogTrace("MESSAGE_FLOW: Unpacking message | {MessageType} | Hub: {Address} | MessageId: {MessageId}", name, Address, delivery.Id); @@ -140,15 +214,53 @@ private async Task NotifyAsync(IMessageDelivery delivery, Canc - logger.LogTrace("MESSAGE_FLOW: ROUTING_TO_HIERARCHICAL | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Target: {Target}", + logger.LogTrace( + "MESSAGE_FLOW: ROUTING_TO_HIERARCHICAL | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Target: {Target}", name, Address, delivery.Id, delivery.Target); delivery = await hierarchicalRouting.RouteMessageAsync(delivery, cancellationToken); - logger.LogTrace("MESSAGE_FLOW: HIERARCHICAL_ROUTING_RESULT | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Result: {State}", + logger.LogTrace( + "MESSAGE_FLOW: HIERARCHICAL_ROUTING_RESULT | {MessageType} | Hub: {Address} | MessageId: {MessageId} | Result: {State}", name, Address, delivery.Id, delivery.State); if (isOnTarget) { - logger.LogTrace("MESSAGE_FLOW: ROUTING_TO_LOCAL_EXECUTION | {MessageType} | Hub: {Address} | MessageId: {MessageId}", + // Check if we need to defer this message - must check inside lock to avoid race with OpenGate + bool shouldDefer = !gates.IsEmpty; + if (shouldDefer) + { + lock (gateStateLock) + { + shouldDefer = !gates.IsEmpty; + if (shouldDefer) + { + // Check all gate predicates + foreach (var (gateName, allowDuringInit) in gates) + { + if (allowDuringInit(delivery)) + { + logger.LogDebug( + "Allowing message {MessageType} (ID: {MessageId}) through gate '{GateName}' for hub {Address}", + delivery.Message.GetType().Name, delivery.Id, gateName, Address); + shouldDefer = false; + break; + } + } + } + + // If we still need to defer, post to deferred buffer and return + if (shouldDefer) + { + logger.LogDebug("Deferring on-target message {MessageType} (ID: {MessageId}) in {Address}", + delivery.Message.GetType().Name, delivery.Id, Address); + deferredBuffer.Post(() => ProcessDeferredMessage(delivery, cancellationToken)); + return delivery.Forwarded(); + } + } + + } + + logger.LogTrace( + "MESSAGE_FLOW: ROUTING_TO_LOCAL_EXECUTION | {MessageType} | Hub: {Address} | MessageId: {MessageId}", name, Address, delivery.Id); return await deliveryPipeline.Invoke(delivery, cancellationToken); } @@ -172,6 +284,39 @@ private static string ExtractJsonType(string rawJsonContent) return "Unknown"; } + /// + /// Process a deferred message, bypassing the deferral check to prevent infinite loops + /// + private async Task ProcessDeferredMessage(IMessageDelivery delivery, CancellationToken cancellationToken) + { + logger.LogDebug("Processing deferred message {MessageType} (ID: {MessageId}) in {Address}", + delivery.Message.GetType().Name, delivery.Id, Address); + + // Add to routing path if not already present + if (!delivery.RoutingPath.Contains(hub.Address)) + delivery = delivery.AddToRoutingPath(hub.Address); + + var isOnTarget = delivery.Target is null || delivery.Target.Equals(hub.Address); + + // Skip deferral check - we're reprocessing after gates opened + if (isOnTarget) + { + delivery = UnpackIfNecessary(delivery); + + if (delivery.State == MessageDeliveryState.Failed) + return ReportFailure(delivery); + } + + delivery = await hierarchicalRouting.RouteMessageAsync(delivery, cancellationToken); + + if (isOnTarget) + { + return await deliveryPipeline.Invoke(delivery, cancellationToken); + } + + return delivery; + } + private readonly CancellationTokenSource cancellationTokenSource = new(); private IMessageDelivery ScheduleExecution(IMessageDelivery delivery) { @@ -196,6 +341,8 @@ private IMessageDelivery ScheduleExecution(IMessageDelivery delivery) { delivery = await hub.HandleMessageAsync(delivery, cancellationTokenSource.Token); + if (delivery.State == MessageDeliveryState.Ignored) + ReportFailure(delivery.WithProperty("Error", $"No handler found for delivery {delivery.Message.GetType().FullName}")); } else { @@ -254,7 +401,7 @@ private IMessageDelivery ScheduleExecution(IMessageDelivery delivery) var ret = PostImpl(message, opt); if (!ExcludedFromLogging.Contains(message.GetType())) logger.LogInformation("Posting message {Delivery} (ID: {MessageId}) in {Address}", - JsonSerializer.Serialize(ret, hub.JsonSerializerOptions), ret.Id, Address); + JsonSerializer.Serialize(ret, LoggingSerializerOptions), ret.Id, Address); return ret; } } @@ -271,11 +418,6 @@ private IMessageDelivery UnpackIfNecessary(IMessageDelivery delivery) return delivery.Failed($"Deserialization failed: {ex.Message}"); } - if (delivery.Message is JsonElement je) - { - return delivery.Failed($"Could not deserialize message {je.ToJsonString()}"); - } - return delivery; } private IMessageDelivery DeserializeDelivery(IMessageDelivery delivery) @@ -290,7 +432,16 @@ private IMessageDelivery DeserializeDelivery(IMessageDelivery delivery) } private IMessageDelivery PostImpl(object message, PostOptions opt) - => (IMessageDelivery)PostImplMethod.MakeGenericMethod(message.GetType()).Invoke(this, new[] { message, opt })!; + { + if (message is JsonElement je) + message = new RawJson(je.ToString()); + if (message is JsonNode jn) + message = new RawJson(jn.ToString()); + + return (IMessageDelivery)PostImplMethod.MakeGenericMethod(message.GetType()) + .Invoke(this, [message, opt])!; + + } private static readonly MethodInfo PostImplMethod = typeof(MessageService).GetMethod(nameof(PostImplGeneric), BindingFlags.Instance | BindingFlags.NonPublic)!; @@ -300,11 +451,6 @@ private IMessageDelivery PostImplGeneric(TMessage message, PostOptions if (message == null) throw new ArgumentNullException(nameof(message)); - if (typeof(TMessage) != message.GetType()) - return (IMessageDelivery)PostImplMethod - .MakeGenericMethod(message.GetType()) - .Invoke(this, [message, opt])!; - var delivery = new MessageDelivery(message, opt, hub.JsonSerializerOptions) { Id = opt.MessageId @@ -321,6 +467,11 @@ public async ValueTask DisposeAsync() { var totalStopwatch = Stopwatch.StartNew(); logger.LogInformation("Starting disposal of message service in {Address}", Address); + // Open all remaining initialization gates to release any buffered messages + foreach (var gateName in gates.Keys.ToArray()) + { + OpenGate(gateName); + } // Dispose hang detection timer first var hangDetectionStopwatch = Stopwatch.StartNew(); @@ -342,6 +493,7 @@ public async ValueTask DisposeAsync() var bufferStopwatch = Stopwatch.StartNew(); logger.LogDebug("Completing buffers for message service in {Address}", Address); buffer.Complete(); + deferredBuffer.Complete(); executionBuffer.Complete(); logger.LogDebug("Buffers completed in {elapsed}ms for {Address}", bufferStopwatch.ElapsedMilliseconds, Address); @@ -363,24 +515,13 @@ public async ValueTask DisposeAsync() { logger.LogError(ex, "Error during delivery completion after {elapsed}ms in {Address}", deliveryStopwatch.ElapsedMilliseconds, Address); - } // Don't wait for execution completion during disposal as this disposal itself + } + + // Don't wait for execution completion during disposal as this disposal itself // runs as an execution and might cause deadlocks waiting for itself logger.LogDebug("Skipping execution completion wait during disposal for {Address}", Address); - // Wait for startup processing to complete before disposing deferrals - var deferralsStopwatch = Stopwatch.StartNew(); - try - { - logger.LogDebug("Awaiting finishing deferrals in {Address}", Address); - await deferralContainer.DisposeAsync(); - logger.LogDebug("Deferrals completed successfully in {elapsed}ms for {Address}", - deferralsStopwatch.ElapsedMilliseconds, Address); - } - catch (Exception ex) - { - logger.LogError(ex, "Error during deferrals disposal after {elapsed}ms in {Address}", - deferralsStopwatch.ElapsedMilliseconds, Address); - } // Complete the startup task if it's still pending + // Complete the startup task if it's still pending try { if (!startupCompletionSource.Task.IsCompleted) diff --git a/src/MeshWeaver.Messaging.Hub/Serialization/LoggingTypeInfoResolver.cs b/src/MeshWeaver.Messaging.Hub/Serialization/LoggingTypeInfoResolver.cs new file mode 100644 index 000000000..e9d44b02d --- /dev/null +++ b/src/MeshWeaver.Messaging.Hub/Serialization/LoggingTypeInfoResolver.cs @@ -0,0 +1,51 @@ +using System.Reflection; +using System.Text.Json; +using System.Text.Json.Serialization.Metadata; + +namespace MeshWeaver.Messaging.Serialization; + +/// +/// A custom JSON type info resolver that filters out properties marked with [PreventLogging] attribute. +/// This resolver wraps an existing resolver and removes properties that should not appear in logs. +/// +public class LoggingTypeInfoResolver : IJsonTypeInfoResolver +{ + private readonly IJsonTypeInfoResolver _innerResolver; + + public LoggingTypeInfoResolver(IJsonTypeInfoResolver innerResolver) + { + _innerResolver = innerResolver ?? throw new ArgumentNullException(nameof(innerResolver)); + } + + public JsonTypeInfo? GetTypeInfo(Type type, JsonSerializerOptions options) + { + var typeInfo = _innerResolver.GetTypeInfo(type, options); + + if (typeInfo?.Kind == JsonTypeInfoKind.Object && typeInfo.Properties.Count > 0) + { + // Find properties to remove (can't modify during enumeration) + var propertiesToRemove = typeInfo.Properties + .Where(ShouldExcludeFromLogging) + .ToList(); + + // Remove properties marked with [PreventLogging] + foreach (var property in propertiesToRemove) + { + typeInfo.Properties.Remove(property); + } + } + + return typeInfo; + } + + private static bool ShouldExcludeFromLogging(JsonPropertyInfo propertyInfo) + { + // Check if the underlying property/field has [PreventLogging] attribute + if (propertyInfo.AttributeProvider is MemberInfo memberInfo) + { + return memberInfo.GetCustomAttribute(inherit: true) != null; + } + + return false; + } +} diff --git a/src/MeshWeaver.Messaging.Hub/SerializationExtensions.cs b/src/MeshWeaver.Messaging.Hub/SerializationExtensions.cs index 24ee2fac9..12d76242e 100644 --- a/src/MeshWeaver.Messaging.Hub/SerializationExtensions.cs +++ b/src/MeshWeaver.Messaging.Hub/SerializationExtensions.cs @@ -2,6 +2,7 @@ using System.Text.Json; using System.Text.Json.Nodes; using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; using MeshWeaver.Domain; using MeshWeaver.Messaging.Serialization; using Microsoft.Extensions.DependencyInjection; @@ -87,4 +88,24 @@ private static SerializationConfiguration CreateSerializationConfiguration(IMess }); } + /// + /// Creates a JsonSerializerOptions configured for logging purposes. + /// This wraps the hub's standard serializer options with a LoggingTypeInfoResolver + /// that filters out properties marked with [PreventLogging] attribute. + /// + public static JsonSerializerOptions CreateLoggingSerializerOptions(this IMessageHub hub) + { + var baseOptions = hub.JsonSerializerOptions; + + // Create new options that copy settings from base options + var loggingOptions = new JsonSerializerOptions(baseOptions); + + // Wrap the existing TypeInfoResolver with LoggingTypeInfoResolver + loggingOptions.TypeInfoResolver = new LoggingTypeInfoResolver( + baseOptions.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver() + ); + + return loggingOptions; + } + } diff --git a/templates/MeshWeaverApp1.Portal/Program.cs b/templates/MeshWeaverApp1.Portal/Program.cs index da4d79c3a..713616e20 100644 --- a/templates/MeshWeaverApp1.Portal/Program.cs +++ b/templates/MeshWeaverApp1.Portal/Program.cs @@ -15,7 +15,7 @@ .ConfigureWebPortal() .ConfigurePortalMesh() .UseMonolithMesh() - .ConfigureServices(services => services.AddContentCollections()) + .ConfigureHub(hub => hub.AddContentCollections()) ); var app = builder.Build(); diff --git a/test/MeshWeaver.AI.Test/ContentPluginTest.cs b/test/MeshWeaver.AI.Test/ContentPluginTest.cs new file mode 100644 index 000000000..e724aad90 --- /dev/null +++ b/test/MeshWeaver.AI.Test/ContentPluginTest.cs @@ -0,0 +1,320 @@ +using System; +using System.IO; +using System.Linq; +using System.Text; +using System.Threading.Tasks; +using ClosedXML.Excel; +using FluentAssertions; +using MeshWeaver.AI.Plugins; +using MeshWeaver.ContentCollections; +using MeshWeaver.Fixture; +using MeshWeaver.Messaging; +using Xunit; + +namespace MeshWeaver.AI.Test; + +/// +/// Tests for ContentPlugin functionality, specifically the GetContent method with Excel support +/// +public class ContentPluginTest(ITestOutputHelper output) : HubTestBase(output), IAsyncLifetime +{ + private const string TestCollectionName = "test-collection"; + private const string TestExcelFileName = "test.xlsx"; + private const string TestTextFileName = "test.txt"; + private readonly string collectionBasePath = Path.Combine(Path.GetTempPath(), $"CollectionPluginTest_{Guid.NewGuid()}"); + + /// + /// Initialize the test + /// + public override async ValueTask InitializeAsync() + { + await base.InitializeAsync(); + + // Create directory for test files + Directory.CreateDirectory(collectionBasePath); + + // Create test Excel file with empty cells at the start + CreateTestExcelFile(); + + // Create test text file + await CreateTestTextFile(); + } /// + /// Dispose the test + /// + public override async ValueTask DisposeAsync() + { + // Clean up test files and directory + if (Directory.Exists(collectionBasePath)) + { + try + { + Directory.Delete(collectionBasePath, true); + } + catch + { + // Ignore cleanup errors + } + } + + await base.DisposeAsync(); + } + + /// + /// Creates a test Excel file with multiple worksheets and empty cells at the start of rows + /// + private void CreateTestExcelFile() + { + using var wb = new XLWorkbook(); + + // Create first worksheet with data including null cells at the start + var ws1 = wb.Worksheets.Add("Sheet1"); + + // Header row with empty cells at start + ws1.Cell(1, 1).Value = ""; // Empty cell + ws1.Cell(1, 2).Value = ""; // Empty cell + ws1.Cell(1, 3).Value = "ID"; + ws1.Cell(1, 4).Value = "Name"; + ws1.Cell(1, 5).Value = "Value"; + + // Data rows with empty cells + ws1.Cell(2, 1).Value = ""; // Empty + ws1.Cell(2, 2).Value = ""; // Empty + ws1.Cell(2, 3).Value = "1"; + ws1.Cell(2, 4).Value = "Item A"; + ws1.Cell(2, 5).Value = "100"; + + ws1.Cell(3, 1).Value = ""; // Empty + ws1.Cell(3, 2).Value = ""; // Empty + ws1.Cell(3, 3).Value = "2"; + ws1.Cell(3, 4).Value = ""; // Empty cell in middle + ws1.Cell(3, 5).Value = "200"; + + // Add more rows for testing row limiting + for (int i = 4; i <= 30; i++) + { + ws1.Cell(i, 1).Value = ""; + ws1.Cell(i, 2).Value = ""; + ws1.Cell(i, 3).Value = i - 1; + ws1.Cell(i, 4).Value = $"Item {(char)('A' + i - 2)}"; + ws1.Cell(i, 5).Value = i * 100; + } + + // Create second worksheet with minimal data + var ws2 = wb.Worksheets.Add("Sheet2"); + ws2.Cell(1, 1).Value = "Column1"; + ws2.Cell(1, 2).Value = "Column2"; + ws2.Cell(2, 1).Value = "Value1"; + ws2.Cell(2, 2).Value = "Value2"; + + var filePath = Path.Combine(collectionBasePath!, TestExcelFileName); + wb.SaveAs(filePath); + } + + /// + /// Creates a test text file with multiple lines + /// + private async Task CreateTestTextFile() + { + var sb = new StringBuilder(); + for (int i = 1; i <= 50; i++) + { + sb.AppendLine($"Line {i}"); + } + + var filePath = Path.Combine(collectionBasePath!, TestTextFileName); + await File.WriteAllTextAsync(filePath, sb.ToString()); + } + + /// + /// Tests that GetContent preserves null values in Excel files with empty cells at the start of rows + /// + [Fact] + public async Task GetFile_ExcelWithEmptyCellsAtStart_ShouldPreserveNulls() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // act + var result = await plugin.GetContent(TestExcelFileName, TestCollectionName, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().NotBeNullOrEmpty(); + result.Should().Contain("## Sheet: Sheet1"); + result.Should().Contain("## Sheet: Sheet2"); + + // Verify markdown table structure with column headers + result.Should().Contain("| Row | A | B | C | D | E |"); + + // Verify that empty cells at the start show as empty in the table + var lines = result.Split('\n', StringSplitOptions.RemoveEmptyEntries); + var headerLine = lines.FirstOrDefault(l => l.Contains("ID") && l.Contains("Name")); + headerLine.Should().NotBeNull(); + headerLine.Should().Contain("| 1 | | | ID | Name | Value |"); + + var secondDataLine = lines.FirstOrDefault(l => l.Contains("Item A")); + secondDataLine.Should().NotBeNull(); + secondDataLine.Should().Contain("| 2 | | | 1 | Item A | 100 |"); + + // Verify empty cell in the middle (row 3 has empty Name column) + var thirdDataLine = lines.FirstOrDefault(l => l.Contains("| 3 |")); + thirdDataLine.Should().NotBeNull(); + thirdDataLine.Should().Contain("| 3 | | | 2 | | 200 |"); + } + + /// + /// Tests that GetContent with numberOfRows parameter limits Excel file output + /// + [Fact] + public async Task GetFile_ExcelWithNumberOfRows_ShouldLimitRows() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + const int rowLimit = 5; + + // act + var result = await plugin.GetContent(TestExcelFileName, TestCollectionName, numberOfRows: rowLimit, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().NotBeNullOrEmpty(); + result.Should().Contain("## Sheet: Sheet1"); + + // Count the number of data rows in the markdown table (excluding header and separator) + var lines = result.Split('\n', StringSplitOptions.RemoveEmptyEntries); + var dataLines = lines + .SkipWhile(l => !l.Contains("| Row |")) + .Skip(2) // Skip header and separator + .TakeWhile(l => l.StartsWith("|") && !l.Contains("## Sheet:")) + .ToList(); + + dataLines.Count.Should().Be(rowLimit); + + // Verify it still has the markdown table structure + result.Should().Contain("| Row | A | B | C | D | E |"); + } + + /// + /// Tests that GetContent with numberOfRows parameter limits text file output + /// + [Fact] + public async Task GetFile_TextWithNumberOfRows_ShouldLimitRows() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + const int rowLimit = 10; + + // act + var result = await plugin.GetContent(TestTextFileName, TestCollectionName, numberOfRows: rowLimit, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().NotBeNullOrEmpty(); + + var lines = result.Split('\n', StringSplitOptions.RemoveEmptyEntries) + .Select(l => l.Trim()) + .ToArray(); + lines.Length.Should().Be(rowLimit); + + lines[0].Should().Be("Line 1"); + lines[9].Should().Be("Line 10"); + } + + /// + /// Tests that GetContent without numberOfRows parameter reads entire text file + /// + [Fact] + public async Task GetFile_TextWithoutNumberOfRows_ShouldReadEntireFile() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // act + var result = await plugin.GetContent(TestTextFileName, TestCollectionName, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().NotBeNullOrEmpty(); + + var lines = result.Split('\n', StringSplitOptions.RemoveEmptyEntries) + .Select(l => l.Trim()) + .ToArray(); + lines.Length.Should().Be(50); + + lines[0].Should().Be("Line 1"); + lines[49].Should().Be("Line 50"); + } + + /// + /// Tests that GetContent without numberOfRows parameter reads entire Excel file + /// + [Fact] + public async Task GetFile_ExcelWithoutNumberOfRows_ShouldReadEntireFile() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // act + var result = await plugin.GetContent(TestExcelFileName, TestCollectionName, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().NotBeNullOrEmpty(); + result.Should().Contain("## Sheet: Sheet1"); + result.Should().Contain("## Sheet: Sheet2"); + + // Should have all 30 rows from Sheet1 in the markdown table + var lines = result.Split('\n', StringSplitOptions.RemoveEmptyEntries); + var sheet1DataLines = lines + .SkipWhile(l => !l.Contains("## Sheet: Sheet1")) + .SkipWhile(l => !l.Contains("| Row |")) + .Skip(2) // Skip header and separator + .TakeWhile(l => l.StartsWith("|") && !l.Contains("## Sheet:")) + .ToList(); + + sheet1DataLines.Count.Should().Be(30); + } + + /// + /// Tests that GetContent handles non-existent collection + /// + [Fact] + public async Task GetFile_NonExistentCollection_ShouldReturnErrorMessage() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // act + var result = await plugin.GetContent("test.xlsx", "non-existent-collection", cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().Contain("Collection 'non-existent-collection' not found"); + } + + /// + /// Tests that GetContent handles non-existent file + /// + [Fact] + public async Task GetFile_NonExistentFile_ShouldReturnErrorMessage() + { + // arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // act + var result = await plugin.GetContent("non-existent.xlsx", TestCollectionName, cancellationToken: TestContext.Current.CancellationToken); + + // assert + result.Should().Contain("File 'non-existent.xlsx' not found"); + } + + /// + /// Configuration for test client + /// + protected override MessageHubConfiguration ConfigureClient(MessageHubConfiguration configuration) + { + return base.ConfigureClient(configuration) + .AddFileSystemContentCollection(TestCollectionName, _ => collectionBasePath); + } +} diff --git a/test/MeshWeaver.Data.Test/SerializationAndSchemaTest.cs b/test/MeshWeaver.Data.Test/SerializationAndSchemaTest.cs index 0b961d75f..22de55c78 100644 --- a/test/MeshWeaver.Data.Test/SerializationAndSchemaTest.cs +++ b/test/MeshWeaver.Data.Test/SerializationAndSchemaTest.cs @@ -245,8 +245,8 @@ public async Task GetSchemaRequest_ShouldHandleEnumTypes() ); // assert var schemaResponse = response.Message.Should().BeOfType().Which; var schemaJson = JsonDocument.Parse(schemaResponse.Schema); - var properties = FindPropertiesInSchema(schemaJson); - + var properties = FindPropertiesInSchema(schemaJson); + var statusProperty = properties.GetProperty("status"); // Check if the status property has enum values (the key enum feature) @@ -371,7 +371,9 @@ public async Task GetDomainTypesRequest_ShouldIncludeAllRegisteredTypes() public async Task DataSerialization_ShouldPreserveComplexObjects() { // arrange - var client = GetClient(); var testData = new SerializationTestData( + var client = GetClient(); + + var testData = new SerializationTestData( name: "Serialization Test", nullableNumber: null, createdAt: DateTime.UtcNow, @@ -382,7 +384,7 @@ public async Task DataSerialization_ShouldPreserveComplexObjects() // act var response = await client.AwaitResponse( - DataChangeRequest.Update(new object[] { testData }), + DataChangeRequest.Update([testData]), o => o.WithTarget(new ClientAddress()), new CancellationTokenSource(TimeSpan.FromSeconds(10)).Token ); diff --git a/test/MeshWeaver.Data.TestDomain/TestHubSetup.cs b/test/MeshWeaver.Data.TestDomain/TestHubSetup.cs index 82e025175..f0e98e6e3 100644 --- a/test/MeshWeaver.Data.TestDomain/TestHubSetup.cs +++ b/test/MeshWeaver.Data.TestDomain/TestHubSetup.cs @@ -52,6 +52,15 @@ this MessageHubConfiguration configuration public const string CashflowImportFormat = nameof(CashflowImportFormat); + public static MessageHubConfiguration ConfigureImportRouter(this MessageHubConfiguration config) + => config.WithRoutes(forward => + forward + .RouteAddressToHostedHub(c => c.ConfigureReferenceDataModel()) + .RouteAddressToHostedHub(c => + c.ConfigureTransactionalModel((TransactionalDataAddress)c.Address)) + .RouteAddressToHostedHub(c => c.ConfigureComputedModel()) + .RouteAddressToHostedHub(c => c.ConfigureImportHub()) + ); public static MessageHubConfiguration ConfigureImportHub( this MessageHubConfiguration config ) => @@ -60,9 +69,11 @@ this MessageHubConfiguration config data.AddPartitionedHubSource( c => c.WithType(td => new TransactionalDataAddress(td.Year, td.BusinessUnit)) + .InitializingPartitions(new TransactionalDataAddress(2024, "1"), new TransactionalDataAddress(2024, "2")) ) .AddPartitionedHubSource( c => c.WithType(cd => new(cd.Year, cd.BusinessUnit)) + .InitializingPartitions(new ComputedDataAddress(2024, "1"), new ComputedDataAddress(2024, "2")) ) .AddHubSource( new ReferenceDataAddress(), @@ -79,7 +90,7 @@ this MessageHubConfiguration config format => format.WithAutoMappings().WithImportFunction(ImportFunction) ) ); - + private static EntityStore ImportFunction( ImportRequest request, diff --git a/test/MeshWeaver.Data.TestDomain/TransactionalData.cs b/test/MeshWeaver.Data.TestDomain/TransactionalData.cs index 81f9a3f5e..c4f24aa52 100644 --- a/test/MeshWeaver.Data.TestDomain/TransactionalData.cs +++ b/test/MeshWeaver.Data.TestDomain/TransactionalData.cs @@ -22,5 +22,5 @@ public record BusinessUnit([property: Key] string SystemName, string DisplayName public record ImportAddress(int Year) : Address(nameof(ImportAddress), Year.ToString()); public record ReferenceDataAddress() : Address(nameof(ReferenceDataAddress), "1"); -public record ComputedDataAddress(int Year, string BusinessUnit) : Address(nameof(ComputedDataAddress), $"{Year}/{BusinessUnit}"); -public record TransactionalDataAddress(int Year, string BusinessUnit) : Address(nameof(TransactionalData), $"{Year}/{BusinessUnit}"); +public record ComputedDataAddress(int Year, string BusinessUnit) : Address(nameof(ComputedDataAddress), $"{Year}-{BusinessUnit}"); +public record TransactionalDataAddress(int Year, string BusinessUnit) : Address(nameof(TransactionalData), $"{Year}-{BusinessUnit}"); diff --git a/test/MeshWeaver.Hosting.Monolith.Test/NotebookConnectionTest.cs b/test/MeshWeaver.Hosting.Monolith.Test/NotebookConnectionTest.cs index c2a0f5ae5..f143fdf31 100644 --- a/test/MeshWeaver.Hosting.Monolith.Test/NotebookConnectionTest.cs +++ b/test/MeshWeaver.Hosting.Monolith.Test/NotebookConnectionTest.cs @@ -86,8 +86,9 @@ public async Task LayoutAreas() var control = await stream .GetControlStream(area.ToString()!) - .Timeout(5.Seconds()) - .FirstAsync(x => x != null); + .Where(x => x != null) + .Timeout(10.Seconds()) + .FirstAsync(); var md = control.Should().BeOfType() diff --git a/test/MeshWeaver.Import.Test/CollectionPluginImportTest.cs b/test/MeshWeaver.Import.Test/CollectionPluginImportTest.cs new file mode 100644 index 000000000..88a37519b --- /dev/null +++ b/test/MeshWeaver.Import.Test/CollectionPluginImportTest.cs @@ -0,0 +1,231 @@ +using System; +using System.IO; +using System.Linq; +using System.Reactive.Linq; +using System.Threading.Tasks; +using FluentAssertions; +using FluentAssertions.Extensions; +using MeshWeaver.AI.Plugins; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; +using MeshWeaver.Data.TestDomain; +using MeshWeaver.Fixture; +using MeshWeaver.Messaging; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace MeshWeaver.Import.Test; + +public class CollectionPluginImportTest(ITestOutputHelper output) : HubTestBase(output) +{ + private readonly string _testFilesPath = Path.Combine(AppContext.BaseDirectory, "TestFiles", "CollectionPluginImport"); + + protected override MessageHubConfiguration ConfigureRouter(MessageHubConfiguration configuration) + { + // Ensure test directory and file exist + Directory.CreateDirectory(_testFilesPath); + var csvContent = @"@@LineOfBusiness +SystemName,DisplayName +1,LoB 1 +2,LoB 2 +3,LoB 3"; + File.WriteAllText(Path.Combine(_testFilesPath, "test-data.csv"), csvContent); + + return base.ConfigureRouter(configuration) + .WithTypes(typeof(ImportAddress), typeof(ImportRequest), typeof(CollectionSource)) + .AddContentCollections() + .AddFileSystemContentCollection("TestCollection", _ => _testFilesPath) + .ConfigureImportRouter(); + } + + [Fact] + public async Task CollectionPlugin_Import_ShouldImportSuccessfully() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: "TestCollection", + address: new ImportAddress(2024), + format: null, // Use default format + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("succeeded", "import should succeed"); + result.Should().NotContain("Error", "there should be no errors"); + + // Verify data was imported + var referenceDataHub = Router.GetHostedHub(new ReferenceDataAddress()); + var workspace = referenceDataHub.ServiceProvider.GetRequiredService(); + + var allData = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count >= 3); + + allData.Should().HaveCount(3); + var items = allData.OrderBy(x => x.SystemName).ToList(); + items[0].DisplayName.Should().Be("LoB 1"); + items[1].DisplayName.Should().Be("LoB 2"); + items[2].DisplayName.Should().Be("LoB 3"); + } + + [Fact] + public async Task CollectionPlugin_Import_WithNonExistentFile_ShouldFail() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "non-existent.csv", + collection: "TestCollection", + address: new ImportAddress(2024), + format: null, + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("failed", "import should fail for non-existent file"); + } + + [Fact] + public async Task CollectionPlugin_Import_WithNonExistentCollection_ShouldFail() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: "NonExistentCollection", + address: new ImportAddress(2024), + format: null, + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("Error", "import should fail for non-existent collection"); + } + + [Fact] + public async Task CollectionPlugin_Import_WithMissingCollection_ShouldReturnError() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: null, + address: "ImportAddress/2024", + format: null, + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("Collection name is required"); + } + + [Fact] + public async Task CollectionPlugin_Import_WithMissingAddress_ShouldReturnError() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: "TestCollection", + address: null, + format: null, + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("Target address is required"); + } + + [Fact] + public async Task CollectionPlugin_Import_WithCustomFormat_ShouldImportSuccessfully() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: "TestCollection", + address: new ImportAddress(2024), + format: "Default", // Explicit format + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + result.Should().Contain("succeeded", "import should succeed with explicit format"); + + // Verify data was imported + var referenceDataHub = Router.GetHostedHub(new ReferenceDataAddress()); + var workspace = referenceDataHub.ServiceProvider.GetRequiredService(); + + var allData = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count >= 3); + + allData.Should().HaveCount(3); + } + + [Fact] + public async Task CollectionPlugin_Import_WithConfiguration_ShouldImportWithoutFormat() + { + // Arrange + var client = GetClient(); + var plugin = new ContentPlugin(client); + + // Create a configuration JSON that is not registered as a format + var configurationJson = @"{ + ""$type"": ""MeshWeaver.Import.Configuration.ImportConfiguration"", + ""name"": ""test-config-not-registered"", + ""entityId"": ""2024"" + }"; + + // Act + var result = await plugin.Import( + path: "test-data.csv", + collection: "TestCollection", + address: new ImportAddress(2024), + format: null, + configuration: configurationJson, + cancellationToken: TestContext.Current.CancellationToken + ); + + // Assert + // The import should succeed even though "test-config-not-registered" is not a registered format + // This demonstrates that when configuration is provided, it bypasses format resolution + result.Should().Contain("succeeded", "import should succeed with configuration even if not registered as format"); + result.Should().NotContain("Error", "there should be no errors"); + result.Should().NotContain("Unknown format", "should not try to resolve configuration as format"); + + // Verify data was imported + var referenceDataHub = Router.GetHostedHub(new ReferenceDataAddress()); + var workspace = referenceDataHub.ServiceProvider.GetRequiredService(); + + var allData = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count >= 3); + + allData.Should().HaveCount(3); + var items = allData.OrderBy(x => x.SystemName).ToList(); + items[0].DisplayName.Should().Be("LoB 1"); + items[1].DisplayName.Should().Be("LoB 2"); + items[2].DisplayName.Should().Be("LoB 3"); + } +} diff --git a/test/MeshWeaver.Import.Test/CollectionSourceImportTest.cs b/test/MeshWeaver.Import.Test/CollectionSourceImportTest.cs new file mode 100644 index 000000000..46d7e9f94 --- /dev/null +++ b/test/MeshWeaver.Import.Test/CollectionSourceImportTest.cs @@ -0,0 +1,147 @@ +using System; +using System.IO; +using System.Linq; +using System.Reactive.Linq; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using FluentAssertions.Extensions; +using MeshWeaver.ContentCollections; +using MeshWeaver.Data; +using MeshWeaver.Data.TestDomain; +using MeshWeaver.Fixture; +using MeshWeaver.Messaging; +using Microsoft.Extensions.DependencyInjection; +using Xunit; + +namespace MeshWeaver.Import.Test; + +public class CollectionSourceImportTest(ITestOutputHelper output) : HubTestBase(output) +{ + private readonly string _testFilesPath = Path.Combine(AppContext.BaseDirectory, "TestFiles", "CollectionSource"); + + protected override MessageHubConfiguration ConfigureRouter(MessageHubConfiguration configuration) + { + // Ensure test directory and file exist + Directory.CreateDirectory(_testFilesPath); + var csvContent = @"@@LineOfBusiness +SystemName,DisplayName +1,LoB 1 +2,LoB 2 +3,LoB 3"; + File.WriteAllText(Path.Combine(_testFilesPath, "test-data.csv"), csvContent); + + return base.ConfigureRouter(configuration) + .AddContentCollections() + .AddFileSystemContentCollection("TestCollection", _ => _testFilesPath) + .ConfigureImportRouter(); + } + + [Fact] + public async Task ImportFromCollectionSource_ShouldResolveAndImportSuccessfully() + { + // Arrange + var client = GetClient(); + + // Create ImportRequest with CollectionSource - stream will be resolved automatically + var importRequest = new ImportRequest(new CollectionSource("TestCollection", "test-data.csv")); + + // Act + var importResponse = await client.AwaitResponse( + importRequest, + o => o.WithTarget(new ImportAddress(2024)), + TestContext.Current.CancellationToken + ); + + // Assert + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded); + + // Verify data was imported + var referenceDataHub = Router.GetHostedHub(new ReferenceDataAddress()); + var workspace = referenceDataHub.ServiceProvider.GetRequiredService(); + + var allData = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count >= 3); + + allData.Should().HaveCount(3); + var items = allData.OrderBy(x => x.SystemName).ToList(); + items[0].DisplayName.Should().Be("LoB 1"); + items[1].DisplayName.Should().Be("LoB 2"); + items[2].DisplayName.Should().Be("LoB 3"); + } + + [Fact] + public async Task ImportFromCollectionSource_WithSubfolder_ShouldResolveAndImportSuccessfully() + { + // Arrange + var client = GetClient(); + + // Test with path without leading slash (file is in root of collection) + var importRequest = new ImportRequest(new CollectionSource("TestCollection", "test-data.csv")); + + // Act + var token = CancellationTokenSource.CreateLinkedTokenSource(TestContext.Current.CancellationToken, + new CancellationTokenSource(5.Seconds()).Token).Token; + var importResponse = await client.AwaitResponse( + importRequest, + o => o.WithTarget(new ImportAddress(2024)), + token + ); + + // Assert + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded); + + // Verify data was imported + var referenceDataHub = Router.GetHostedHub(new ReferenceDataAddress()); + var workspace = referenceDataHub.ServiceProvider.GetRequiredService(); + + var allData = await workspace.GetObservable() + .Timeout(10.Seconds()) + .FirstAsync(x => x.Count >= 3); + + allData.Should().HaveCount(3); + } + + [Fact] + public async Task ImportFromCollectionSource_NonExistentFile_ShouldFail() + { + // Arrange + var client = GetClient(); + + var importRequest = new ImportRequest(new CollectionSource("TestCollection", "non-existent.csv")); + + // Act + var importResponse = await client.AwaitResponse( + importRequest, + o => o.WithTarget(new ImportAddress(2024)), + TestContext.Current.CancellationToken + ); + + // Assert + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Failed); + var errors = importResponse.Message.Log.Errors(); + errors.Should().Contain(m => m.Message.Contains("Could not find content")); + } + + [Fact] + public async Task ImportFromCollectionSource_NonExistentCollection_ShouldFail() + { + // Arrange + var client = GetClient(); + + var importRequest = new ImportRequest(new CollectionSource("NonExistentCollection", "test-data.csv")); + + // Act + var importResponse = await client.AwaitResponse( + importRequest, + o => o.WithTarget(new ImportAddress(2024)), + TestContext.Current.CancellationToken + ); + + // Assert + importResponse.Message.Log.Status.Should().Be(ActivityStatus.Failed); + var errors = importResponse.Message.Log.Errors(); + errors.Should().Contain(m => m.Message.Contains("Collection") && m.Message.Contains("not found")); + } +} diff --git a/test/MeshWeaver.Import.Test/ImportTest.cs b/test/MeshWeaver.Import.Test/ImportTest.cs index 014b21db4..bd15cbd2f 100644 --- a/test/MeshWeaver.Import.Test/ImportTest.cs +++ b/test/MeshWeaver.Import.Test/ImportTest.cs @@ -23,14 +23,7 @@ public class ImportTest(ITestOutputHelper output) : HubTestBase(output) protected override MessageHubConfiguration ConfigureRouter( MessageHubConfiguration configuration) { - return base.ConfigureRouter(configuration) - .WithRoutes(forward => - forward - .RouteAddressToHostedHub(c => c.ConfigureReferenceDataModel()) - .RouteAddressToHostedHub(c => c.ConfigureTransactionalModel((TransactionalDataAddress)c.Address)) - .RouteAddressToHostedHub(c => c.ConfigureComputedModel()) - .RouteAddressToHostedHub(c => c.ConfigureImportHub()) - ); + return base.ConfigureRouter(configuration).ConfigureImportRouter(); } @@ -53,7 +46,6 @@ public async Task DistributedImportTest() { // arrange var client = GetClient(); - var timeout = 20.Seconds(); var importRequest = new ImportRequest(VanillaDistributedCsv) { Format = TestHubSetup.CashflowImportFormat, @@ -80,11 +72,12 @@ public async Task DistributedImportTest() importResponse.Message.Log.Status.Should().Be(ActivityStatus.Succeeded); Logger.LogInformation("DistributedImportTest {TestId}: Getting transactional workspace", testId); - var transactionalItems1 = await (GetWorkspace( - Router.GetHostedHub(new TransactionalDataAddress(2024, "1")) - )) + var workspace = GetWorkspace( + Router.GetHostedHub(new TransactionalDataAddress(2024, "1")) + ); + var transactionalItems1 = await workspace .GetObservable() - .Timeout(timeout) + .Timeout(5.Seconds()) .FirstAsync(x => x.Count > 1); Logger.LogInformation("DistributedImportTest {TestId}: Got {Count} transactional items", testId, transactionalItems1.Count); @@ -93,7 +86,7 @@ public async Task DistributedImportTest() Router.GetHostedHub(new ComputedDataAddress(2024, "1")) )) .GetObservable() - .Timeout(timeout) + .Timeout(5.Seconds()) .FirstAsync(x => x is { Count: > 0 }); Logger.LogInformation("DistributedImportTest {TestId}: Got {Count} computed items", testId, computedItems1.Count); diff --git a/test/MeshWeaver.Import.Test/MeshWeaver.Import.Test.csproj b/test/MeshWeaver.Import.Test/MeshWeaver.Import.Test.csproj index 5e81e56a8..1365efc1c 100644 --- a/test/MeshWeaver.Import.Test/MeshWeaver.Import.Test.csproj +++ b/test/MeshWeaver.Import.Test/MeshWeaver.Import.Test.csproj @@ -14,5 +14,6 @@ + diff --git a/test/MeshWeaver.Layout.Test/DataChangeStreamUpdateTest.cs b/test/MeshWeaver.Layout.Test/DataChangeStreamUpdateTest.cs index aef0934e3..75b2639b9 100644 --- a/test/MeshWeaver.Layout.Test/DataChangeStreamUpdateTest.cs +++ b/test/MeshWeaver.Layout.Test/DataChangeStreamUpdateTest.cs @@ -10,7 +10,6 @@ using MeshWeaver.Fixture; using MeshWeaver.Layout.Composition; using MeshWeaver.Messaging; -using Xunit; namespace MeshWeaver.Layout.Test; @@ -28,7 +27,7 @@ DateTime UpdatedAt /// /// Initial test data for seeding /// - public static readonly TestTaskItem[] InitialData = + public static readonly TestTaskItem[] InitialData = [ new("task-1", "First Task", "Pending", DateTime.UtcNow.AddDays(-1), DateTime.UtcNow.AddDays(-1)), new("task-2", "Second Task", "InProgress", DateTime.UtcNow.AddHours(-2), DateTime.UtcNow.AddHours(-2)), @@ -46,8 +45,6 @@ DateTime UpdatedAt /// public class DataChangeStreamUpdateTest(ITestOutputHelper output) : HubTestBase(output) { - private const string TaskListArea = nameof(TaskListArea); - private const string TaskCountArea = nameof(TaskCountArea); /// /// Step 1: Configure host with TestTaskItem entity type and initial data @@ -68,12 +65,12 @@ protected override MessageHubConfiguration ConfigureHost(MessageHubConfiguration .AddLayout(layout => layout // Step 2: Create layout area that subscribes to stream and shows property we'll change - .WithView(TaskListArea, TaskListView) - .WithView(TaskCountArea, TaskCountView) + .WithView(nameof(TaskListView), TaskListView) + .WithView(nameof(TaskCountView), TaskCountView) ); } - protected override MessageHubConfiguration ConfigureClient(MessageHubConfiguration configuration) + protected override MessageHubConfiguration ConfigureClient(MessageHubConfiguration configuration) => base.ConfigureClient(configuration).AddLayoutClient(d => d); /// @@ -109,7 +106,7 @@ private static IObservable TaskCountView(LayoutAreaHost host, Renderi private static UiControl CreateTaskListMarkdown(IReadOnlyCollection taskItems) { var markdown = "# Task List\n\n"; - + if (!taskItems.Any()) { markdown += "*No tasks found.*"; @@ -121,7 +118,7 @@ private static UiControl CreateTaskListMarkdown(IReadOnlyCollection "⏳", - "InProgress" => "🔄", + "InProgress" => "🔄", "Completed" => "✅", _ => "❓" }; @@ -141,7 +138,7 @@ private static UiControl CreateTaskListMarkdown(IReadOnlyCollection taskItems) { var markdown = "# Task Count\n\n"; - + if (!taskItems.Any()) { markdown += "*No tasks found.*"; @@ -154,7 +151,7 @@ private static UiControl CreateTaskCountMarkdown(IReadOnlyCollection( new HostAddress(), - new LayoutAreaReference(TaskListArea) + new LayoutAreaReference(nameof(TaskListView)) ); // Verify initial data is loaded in layout area var initialControl = await stream - .GetControlStream(TaskListArea) + .GetControlStream(nameof(TaskListView)) .Timeout(10.Seconds()) .FirstAsync(x => x != null && x.ToString().Contains("First Task")); @@ -213,18 +210,18 @@ public async Task DataChangeRequest_ShouldUpdateLayoutAreaViews() Output.WriteLine($"🎯 Target task found: '{taskToUpdate.Title}' with status '{taskToUpdate.Status}'"); // Step 4: Emit DataChangeRequest to change the status - var updatedTask = taskToUpdate with - { - Status = "InProgress", - UpdatedAt = DateTime.UtcNow + var updatedTask = taskToUpdate with + { + Status = "InProgress", + UpdatedAt = DateTime.UtcNow }; var changeRequest = new DataChangeRequest().WithUpdates(updatedTask); - + Output.WriteLine($"📤 Sending DataChangeRequest to change status: {taskToUpdate.Status} → {updatedTask.Status}"); var updatedControlTask = stream - .GetControlStream(TaskListArea) + .GetControlStream(nameof(TaskListView)) .Skip(1) .Where(x => x != null && x.ToString().Contains("Status:** InProgress")) .Timeout(10.Seconds()) @@ -245,11 +242,11 @@ public async Task DataChangeRequest_ShouldUpdateLayoutAreaViews() // Additional verification: Check that task count view also updates var countStream = workspace.GetRemoteStream( new HostAddress(), - new LayoutAreaReference(TaskCountArea) + new LayoutAreaReference(nameof(TaskCountView)) ); var updatedCountControl = await countStream - .GetControlStream(TaskCountArea) + .GetControlStream(nameof(TaskCountView)) .Where(x => x != null && x.ToString().Contains("🔄 **InProgress:** 2")) // Should now have 2 InProgress tasks .Timeout(10.Seconds()) .FirstAsync(); @@ -274,12 +271,12 @@ public async Task MultipleDataChanges_ShouldUpdateLayoutAreaViews() var stream = workspace.GetRemoteStream( new HostAddress(), - new LayoutAreaReference(TaskCountArea) + new LayoutAreaReference(nameof(TaskCountView)) ); // Wait for initial data await stream - .GetControlStream(TaskCountArea) + .GetControlStream(nameof(TaskCountView)) .Timeout(5.Seconds()) .FirstAsync(x => x != null && x.ToString().Contains("Total Tasks")); @@ -290,24 +287,28 @@ await stream .FirstAsync(); // Update multiple tasks simultaneously - var updatedTasks = tasksData.Select(task => task with - { - Status = "Completed", - UpdatedAt = DateTime.UtcNow + var updatedTasks = tasksData.Select(task => task with + { + Status = "Completed", + UpdatedAt = DateTime.UtcNow }).Cast().ToArray(); var changeRequest = new DataChangeRequest().WithUpdates(updatedTasks); - + Output.WriteLine($"📤 Sending DataChangeRequest to complete all {updatedTasks.Length} tasks"); - client.Post(changeRequest, o => o.WithTarget(new HostAddress())); - // Verify all tasks are now completed - var allCompletedControl = await stream - .GetControlStream(TaskCountArea) + // Set up the completion watch BEFORE posting the change to avoid race condition + var allCompletedTask = stream + .GetControlStream(nameof(TaskCountView)) .Where(x => x != null && x.ToString().Contains("✅ **Completed:** 3")) .Timeout(10.Seconds()) .FirstAsync(); + client.Post(changeRequest, o => o.WithTarget(new HostAddress())); + + // Verify all tasks are now completed + var allCompletedControl = await allCompletedTask; + allCompletedControl.Should().NotBeNull(); var content = allCompletedControl.ToString(); content.Should().Contain("✅ **Completed:** 3"); @@ -328,12 +329,12 @@ public async Task CreateNewTask_ShouldUpdateLayoutAreaViews() var stream = workspace.GetRemoteStream( new HostAddress(), - new LayoutAreaReference(TaskCountArea) + new LayoutAreaReference(nameof(TaskCountView)) ); // Wait for initial data (should show 3 tasks) await stream - .GetControlStream(TaskCountArea) + .GetControlStream(nameof(TaskCountView)) .Timeout(5.Seconds()) .FirstAsync(x => x != null && x.ToString().Contains("Total Tasks:** 3")); @@ -347,13 +348,13 @@ await stream ); var createRequest = new DataChangeRequest().WithCreations(newTask); - + Output.WriteLine($"📤 Creating new task: '{newTask.Title}'"); client.Post(createRequest, o => o.WithTarget(new HostAddress())); // Verify task count increased var updatedControl = await stream - .GetControlStream(TaskCountArea) + .GetControlStream(nameof(TaskCountView)) .Where(x => x != null && x.ToString().Contains("Total Tasks:** 4")) .Timeout(10.Seconds()) .FirstAsync(); @@ -361,7 +362,7 @@ await stream updatedControl.Should().NotBeNull(); var content = updatedControl.ToString(); content.Should().Contain("Total Tasks:** 4"); - + Output.WriteLine("✅ New task creation updated layout area correctly"); } @@ -376,12 +377,12 @@ public async Task DeleteTask_ShouldUpdateLayoutAreaViews() var stream = workspace.GetRemoteStream( new HostAddress(), - new LayoutAreaReference(TaskListArea) + new LayoutAreaReference(nameof(TaskListView)) ); // Wait for initial data await stream - .GetControlStream(TaskListArea) + .GetControlStream(nameof(TaskListView)) .Timeout(5.Seconds()) .FirstAsync(x => x != null && x.ToString().Contains("First Task")); @@ -393,13 +394,13 @@ await stream var taskToDelete = tasksData.First(t => t.Id == "task-1"); var deleteRequest = new DataChangeRequest().WithDeletions(taskToDelete); - + Output.WriteLine($"📤 Deleting task: '{taskToDelete.Title}'"); client.Post(deleteRequest, o => o.WithTarget(new HostAddress())); // Verify task is no longer in the list var updatedControl = await stream - .GetControlStream(TaskListArea) + .GetControlStream(nameof(TaskListView)) .Where(x => x != null && !x.ToString().Contains("First Task")) .Timeout(10.Seconds()) .FirstAsync(); @@ -408,7 +409,7 @@ await stream var content = updatedControl.ToString(); content.Should().NotContain("First Task"); content.Should().Contain("Second Task"); // Other tasks should still be there - + Output.WriteLine("✅ Task deletion updated layout area correctly"); } } diff --git a/test/MeshWeaver.Serialization.Test/SerializationTest.cs b/test/MeshWeaver.Serialization.Test/SerializationTest.cs index cde5e4b04..31c46c2ec 100644 --- a/test/MeshWeaver.Serialization.Test/SerializationTest.cs +++ b/test/MeshWeaver.Serialization.Test/SerializationTest.cs @@ -300,21 +300,21 @@ public void TestGenericPolymorphicTypeSerialization() public async Task TestSerializationFailureHandling() { Output.WriteLine("Testing serialization failure handling..."); - + // This test verifies that when no handler exists for a request message type, // AwaitResponse should throw DeliveryFailureException instead of hanging - + var client = Router.GetHostedHub(new ClientAddress(), ConfigureClient); - + // Send an UnknownRequest to the host // The host has no handler for this type at all // This should result in a DeliveryFailure being sent back to the client var unknownRequest = new GetDataRequest(new EntityReference("collection", "id")); Output.WriteLine("Sending UnknownRequest to host (no handler exists for this type)..."); - + // AwaitResponse should now throw DeliveryFailureException due to no handler being found - var exception = await Assert.ThrowsAsync(() => + var exception = await Assert.ThrowsAsync(() => client.AwaitResponse( unknownRequest, o => o.WithTarget(new HostAddress()), @@ -326,10 +326,10 @@ public async Task TestSerializationFailureHandling() exception.Should().NotBeNull(); exception.Message.Should().NotBeEmpty(); Output.WriteLine($"Exception message: {exception.Message}"); - + // The message should indicate no handler was found var message = exception.Message.ToLowerInvariant(); - message.Should().Contain("could not deserialize"); + message.Should().Contain("no handler found"); } } diff --git a/test/appsettings.json b/test/appsettings.json index 7077e231d..98fad356c 100644 --- a/test/appsettings.json +++ b/test/appsettings.json @@ -3,8 +3,8 @@ "LogLevel": { "Default": "Information", "MeshWeaver.Import": "Warning", - "MeshWeaver.Data": "Warning", "MeshWeaver.Messaging": "Warning", + "MeshWeaver.Data": "Warning", "Microsoft": "Warning", "System": "Warning" },