diff --git a/2025/solid/.gitignore b/2025/solid/.gitignore
new file mode 100644
index 0000000..015bd64
--- /dev/null
+++ b/2025/solid/.gitignore
@@ -0,0 +1,3 @@
+output/
+logs/
+sales_report.json
\ No newline at end of file
diff --git a/2025/solid/README.md b/2025/solid/README.md
new file mode 100644
index 0000000..d33dd14
--- /dev/null
+++ b/2025/solid/README.md
@@ -0,0 +1,97 @@
+# Sales Report — Multi-Paradigm Code Showcase
+
+**Purpose:** Demonstrates how the same business logic (a simple sales performance report)
+can be implemented using multiple programming paradigms and architectural styles in Python.
+
+---
+
+## Project Overview
+
+All implementations compute the same core metrics from `sales_data.csv`:
+
+- Number of customers
+- Average order value (pre-tax)
+- Percentage of returns
+- Total sales in period (pre-tax)
+
+Each version differs only in *structure* and *paradigm emphasis*.
+
+| Paradigm | File | Highlights |
+|-----------|------|------------|
+| Procedural / Baseline | `messy_report.py` | Imperative style, easy to follow but not modular |
+| Object-Oriented (SOLID) | Legacy: `class_based_report.py`
Enhanced: `class_based_report_v2.py` | Classes and interfaces; SRP and OCP applied with improved logging/robustness |
+| Functional | Legacy: `functional_report.py`
Enhanced: `functional_report_v2.py` | Pure transformations with v2 adding structured logging and stronger error handling |
+| Declarative Pipeline | `declarative_report.py` | Type-checked pipelines using Pandera |
+| Config-Driven | `config_report.py` | YAML configuration defines logic dynamically |
+| Asynchronous | `async_report.py` | Concurrent metric computation and async I/O |
+| Async Streaming (No Pandas) | `async_no_pandas_report.py` | True non-blocking CSV streaming with aiofiles |
+| Dataflow / DAG | `report_dataflow.py` | Declarative dependency graph with explicit dataflow |
+| Actor Model | `report_actor_model.py` | Cooperative message-passing actors with isolated state |
+| Reactive | `reactive_report.py` | RxPY stream-based reporting |
+| Logic / Relational | `logic_report.py` | Relational facts and symbolic reasoning via Kanren |
+
+> The legacy functional and class-based versions match the walkthrough in the video. The `_v2` editions layer in richer logging, error handling, and filesystem conventions while preserving the same outputs.
+
+---
+
+## Usage
+
+### 1. Setup Environment
+
+This project uses [uv](https://docs.astral.sh/uv/) for dependency management. Install it if you have not already, then run:
+
+```bash
+uv sync
+```
+
+### 2. Run Examples
+
+#### Option A — Helper Script
+
+From this directory you can execute one or more implementations with the helper script:
+
+```bash
+./run_reports.sh --list # view available keys
+./run_reports.sh --run functional --run logic
+./run_reports.sh --run-all
+```
+
+The script uses `uv run` automatically when `uv` is installed (recommended). Use `--dry-run` to preview the commands without executing them.
+
+#### Option B — Manual Commands
+
+Use `uv run` to execute any of the implementations inside the managed environment:
+
+```bash
+uv run python logic_report.py
+uv run python functional_report.py
+uv run python functional_report_v2.py
+uv run python async_report.py
+uv run python async_no_pandas_report.py
+uv run python report_dataflow.py
+uv run python report_actor_model.py
+uv run python class_based_report.py
+uv run python class_based_report_v2.py
+uv run python reactive_report.py
+# etc.
+```
+
+### 3. Validate Outputs
+
+After running one or more implementations, verify the generated JSON payloads agree:
+
+```bash
+uv run python verify_reports.py # compare every file to the first baseline
+uv run python verify_reports.py --verbose
+uv run python verify_reports.py --baseline sales_report.json
+```
+
+The script checks for consistent keys and values (within a configurable tolerance) across every report implementation. A non-zero exit code indicates a mismatch.
+
+## Educational Goal
+
+This project illustrates how:
+
+- The same logic can map into multiple thought models (OOP, FP, Async, Logic).
+- Paradigm choice affects extensibility, readability, and reasoning complexity.
+- Abstraction boundaries (metrics, config, I/O) remain constant across paradigms.
diff --git a/2025/solid/async_no_pandas_report.py b/2025/solid/async_no_pandas_report.py
new file mode 100644
index 0000000..bfe0926
--- /dev/null
+++ b/2025/solid/async_no_pandas_report.py
@@ -0,0 +1,157 @@
+"""
+===============================================================================
+Sales Report - True Async Streaming Implementation
+===============================================================================
+Fully asynchronous, non-blocking reporting pipeline:
+ * Reads CSV line by line using aiofiles (no pandas blocking)
+ * Parses each record asynchronously using csv.DictReader
+ * Aggregates metrics incrementally (no in-memory dataset)
+ * Writes JSON output asynchronously
+
+This version is a true example of async I/O — no fake async via thread executors.
+Perfect for large files or integration into larger async systems.
+===============================================================================
+"""
+
+import aiofiles
+import asyncio
+import csv
+import json
+import logging
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [async-true] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_async_true.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+
+# ------------------------------------------------------------------------------
+# Async Data Processing
+# ------------------------------------------------------------------------------
+
+
+async def parse_and_aggregate_csv(
+ file_path: Path,
+ start: datetime | None = None,
+ end: datetime | None = None,
+) -> dict[str, Any]:
+ """
+ Asynchronously reads and aggregates sales data.
+ Computes metrics incrementally to avoid loading entire file in memory.
+ """
+
+ num_customers: set[str] = set()
+ total_sales = 0.0
+ positive_sales: list[float] = []
+ total_records = 0
+ total_returns = 0
+
+ logger.info("Reading asynchronously from %s", file_path)
+
+ async with aiofiles.open(file_path, mode="r", encoding="utf-8") as afp:
+ # Read header line for DictReader initialization
+ header_line = await afp.readline()
+ headers = header_line.strip().split(",")
+ reader = csv.DictReader([],
+ fieldnames=[h.strip('"') for h in headers])
+
+ # Process line by line
+ async for line in afp:
+ if not line.strip():
+ continue
+
+ values = list(csv.reader([line]))[0]
+ record = dict(zip(reader.fieldnames, values))
+
+ try:
+ sale_date = datetime.fromisoformat(record["date"].strip('"'))
+ price = float(record["price"])
+ name = record["name"].strip('"')
+ except Exception:
+ # Skip badly formed lines safely
+ continue
+
+ if start and sale_date < start or end and sale_date > end:
+ continue
+
+ total_records += 1
+ num_customers.add(name)
+ total_sales += price
+
+ if price > 0:
+ positive_sales.append(price)
+ elif price < 0:
+ total_returns += 1
+
+ avg_order_value = sum(positive_sales) / len(positive_sales) if positive_sales else 0.0
+ percentage_returns = (
+ (total_returns / total_records) * 100 if total_records else 0.0
+ )
+
+ logger.info(
+ "Processed %d records (%d customers, %.2f avg order)",
+ total_records,
+ len(num_customers),
+ avg_order_value,
+ )
+
+ return {
+ "number_of_customers": len(num_customers),
+ "average_order_value (pre-tax)": round(avg_order_value, 2),
+ "percentage_of_returns": round(percentage_returns, 2),
+ "total_sales_in_period (pre-tax)": round(total_sales, 2),
+ "report_start": start.strftime("%Y-%m-%d") if start else "N/A",
+ "report_end": end.strftime("%Y-%m-%d") if end else "N/A",
+ }
+
+
+async def write_report(report: dict[str, Any], file_path: Path) -> None:
+ """Writes the computed report asynchronously to JSON."""
+
+ async with aiofiles.open(file_path, mode="w", encoding="utf-8") as afp:
+ await afp.write(json.dumps(report, indent=2))
+ logger.info("Async streaming report written to %s", file_path)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+async def main() -> None:
+ logger.info("Starting true asynchronous sales report pipeline...")
+
+ try:
+ report_data = await parse_and_aggregate_csv(
+ INPUT_FILE, REPORT_START, REPORT_END
+ )
+ await write_report(report_data, OUTPUT_FILE)
+ logger.info("Report generation completed successfully.")
+ except Exception as exc:
+ logger.exception("Async streaming report failed: %s", exc)
+ raise SystemExit(1) from exc
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/2025/solid/async_report.py b/2025/solid/async_report.py
new file mode 100644
index 0000000..3231ae5
--- /dev/null
+++ b/2025/solid/async_report.py
@@ -0,0 +1,188 @@
+"""
+===============================================================================
+Sales Report - Asynchronous Implementation
+===============================================================================
+Uses asyncio to concurrently compute metrics and handle async I/O operations.
+
+Highlights:
+ * Concurrent execution of metric computations
+ * Async-friendly structure (could be extended for DB/API operations)
+===============================================================================
+"""
+
+import asyncio
+import json
+import logging
+from collections.abc import Awaitable
+from dataclasses import dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+import aiofiles # Async file I/O
+import pandas as pd
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [async] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_async.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Data and Config
+# ------------------------------------------------------------------------------
+
+
+@dataclass
+class ReportConfig:
+ """Configuration for asynchronous report generation."""
+
+ input_file: Path
+ output_file: Path
+ start_date: datetime | None = None
+ end_date: datetime | None = None
+
+
+# ------------------------------------------------------------------------------
+# Async Helpers
+# ------------------------------------------------------------------------------
+
+
+async def read_sales(file: Path) -> pd.DataFrame:
+ """Simulates asynchronous file read (e.g., using aiofiles or async API)."""
+ logger.info("Reading data asynchronously from %s", file)
+ # Pandas isn't async, but this placeholder represents async I/O in real case
+ loop = asyncio.get_running_loop()
+ df = await loop.run_in_executor(None, lambda: pd.read_csv(file, parse_dates=["date"])) # pyright: ignore[reportUnknownMemberType]
+ logger.info("Data loaded successfully: %d rows", len(df))
+ return df
+
+
+async def filter_sales(
+ df: pd.DataFrame, start: datetime | None, end: datetime | None
+) -> pd.DataFrame:
+ """Filters sales asynchronously by date range."""
+
+ loop = asyncio.get_running_loop()
+
+ def _filter() -> pd.DataFrame:
+ filtered = df
+ if start:
+ filtered = filtered.loc[filtered["date"] >= pd.Timestamp(start)]
+ if end:
+ filtered = filtered.loc[filtered["date"] <= pd.Timestamp(end)]
+ return filtered
+
+ result = await loop.run_in_executor(None, _filter)
+ logger.info("Filtered dataset to %d records", len(result))
+ return result
+
+
+# ------------------------------------------------------------------------------
+# Async Metric Coroutines
+# ------------------------------------------------------------------------------
+
+
+async def customer_count_metric(df: pd.DataFrame) -> dict[str, Any]:
+ await asyncio.sleep(0.01)
+ return {"number_of_customers": df["name"].nunique()}
+
+
+async def average_order_value_metric(df: pd.DataFrame) -> dict[str, Any]:
+ await asyncio.sleep(0.01)
+ positive_sales = df[df["price"] > 0]["price"]
+ avg = positive_sales.mean() if not positive_sales.empty else 0.0
+ return {"average_order_value (pre-tax)": round(avg, 2)}
+
+
+async def return_percentage_metric(df: pd.DataFrame) -> dict[str, Any]:
+ await asyncio.sleep(0.01)
+ returns = df[df["price"] < 0]
+ pct = (len(returns) / len(df)) * 100 if len(df) else 0.0
+ return {"percentage_of_returns": round(pct, 2)}
+
+
+async def total_sales_metric(df: pd.DataFrame) -> dict[str, Any]:
+ await asyncio.sleep(0.01)
+ return {"total_sales_in_period (pre-tax)": round(df["price"].sum(), 2)}
+
+
+# ------------------------------------------------------------------------------
+# Report Assembly
+# ------------------------------------------------------------------------------
+
+
+async def compute_metrics(
+ df: pd.DataFrame, start: datetime | None, end: datetime | None
+) -> dict[str, Any]:
+ """Runs metric computations concurrently using asyncio.gather()."""
+
+ logger.info("Computing metrics concurrently...")
+ tasks: list[Awaitable[dict[str, Any]]] = [
+ customer_count_metric(df),
+ average_order_value_metric(df),
+ return_percentage_metric(df),
+ total_sales_metric(df),
+ ]
+ results = await asyncio.gather(*tasks)
+
+ report: dict[str, Any] = {}
+ for r in results:
+ report |= r
+
+ report["report_start"] = start.strftime("%Y-%m-%d") if start else "N/A"
+ report["report_end"] = end.strftime("%Y-%m-%d") if end else "N/A"
+
+ return report
+
+
+async def write_report(report: dict[str, Any], path: Path) -> None:
+ """Asynchronously writes JSON report to file."""
+ async with aiofiles.open(path, "w", encoding="utf-8") as f:
+ await f.write(json.dumps(report, indent=2))
+ logger.info("Async report written to: %s", path)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+async def main() -> None:
+ config = ReportConfig(
+ input_file=INPUT_FILE,
+ output_file=OUTPUT_FILE,
+ start_date=REPORT_START,
+ end_date=REPORT_END,
+ )
+
+ try:
+ df = await read_sales(config.input_file)
+ filtered_df = await filter_sales(df, config.start_date, config.end_date)
+ report = await compute_metrics(filtered_df, config.start_date, config.end_date)
+ await write_report(report, config.output_file)
+ logger.info("Async report generation completed successfully.")
+ except Exception as e:
+ logger.exception("Failed during async report generation: %s", e)
+ raise SystemExit(1) from e
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
diff --git a/2025/solid/class_based_report_v2.py b/2025/solid/class_based_report_v2.py
new file mode 100644
index 0000000..a37331d
--- /dev/null
+++ b/2025/solid/class_based_report_v2.py
@@ -0,0 +1,217 @@
+"""
+===============================================================================
+Sales Report - Class-Based Implementation
+===============================================================================
+Generates a summarized sales performance report using an object-oriented (OOP)
+approach that follows SOLID principles.
+
+Structure:
+ * CsvSalesReader - Responsible for reading raw CSV data.
+ * DateRangeFilter - Handles date range filtering.
+ * Metric classes - Each computes a single metric.
+ * SalesReportGenerator - Coordinates the workflow.
+ * JSONReportWriter - Persists report results to JSON.
+===============================================================================
+"""
+
+import json
+import logging
+from dataclasses import dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Protocol
+
+import pandas as pd
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_class.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Config and Protocols
+# ------------------------------------------------------------------------------
+
+
+@dataclass
+class ReportConfig:
+ """Configuration object controlling report generation parameters."""
+
+ input_file: Path
+ output_file: Path
+ start_date: datetime | None = None
+ end_date: datetime | None = None
+
+
+class SalesReader(Protocol):
+ """Interface for reading sales data sources (CSV, DB, etc.)."""
+
+ def read(self, file: Path) -> pd.DataFrame: ...
+
+
+class Metric(Protocol):
+ """Interface for computing a single metric."""
+
+ def compute(self, df: pd.DataFrame) -> dict[str, object]: ...
+
+
+# ------------------------------------------------------------------------------
+# Core Classes
+# ------------------------------------------------------------------------------
+
+
+class CsvSalesReader:
+ """Reads sales input data from a CSV file."""
+
+ def read(self, file: Path) -> pd.DataFrame:
+ logger.info("Reading data from %s", file)
+ return pd.read_csv(file, parse_dates=["date"]) # pyright: ignore[reportUnknownMemberType]
+
+
+class DateRangeFilter:
+ """Filters sales data between a given start and end date."""
+
+ def apply(self, df: pd.DataFrame, start: datetime | None, end: datetime | None) -> pd.DataFrame:
+ logger.info("Applying date filter %s - %s", start or "N/A", end or "N/A")
+ if start:
+ df = df.loc[df["date"] >= pd.Timestamp(start)].copy()
+ if end:
+ df = df.loc[df["date"] <= pd.Timestamp(end)].copy()
+ return df
+
+
+# ------------------------------------------------------------------------------
+# Metrics Implementations
+# ------------------------------------------------------------------------------
+
+
+class CustomerCountMetric:
+ """Counts distinct customers in the dataset."""
+
+ def compute(self, df: pd.DataFrame) -> dict[str, object]:
+ return {"number_of_customers": df["name"].nunique()}
+
+
+class AverageOrderValueMetric:
+ """Computes the average positive order value (pre-tax)."""
+
+ def compute(self, df: pd.DataFrame) -> dict[str, object]:
+ sales = df.loc[df["price"] > 0, "price"]
+ avg = sales.mean() if not sales.empty else 0.0
+ return {"average_order_value (pre-tax)": round(avg, 2)}
+
+
+class ReturnPercentageMetric:
+ """Calculates what percentage of sales were returns."""
+
+ def compute(self, df: pd.DataFrame) -> dict[str, object]:
+ returns = df[df["price"] < 0]
+ pct = (len(returns) / len(df)) * 100 if len(df) > 0 else 0
+ return {"percentage_of_returns": round(pct, 2)}
+
+
+class TotalSalesMetric:
+ """Computes the total value of all transactions (pre-tax)."""
+
+ def compute(self, df: pd.DataFrame) -> dict[str, object]:
+ return {"total_sales_in_period (pre-tax)": round(df["price"].sum(), 2)}
+
+
+# ------------------------------------------------------------------------------
+# Generators and Writers
+# ------------------------------------------------------------------------------
+
+
+class SalesReportGenerator:
+ """Coordinates the report creation process."""
+
+ def __init__(self, reader: SalesReader, filterer: DateRangeFilter, metrics: list[Metric]):
+ self.reader = reader
+ self.filterer = filterer
+ self.metrics = metrics
+
+ def generate(self, config: ReportConfig) -> dict[str, object]:
+ """Executes the complete reporting process: read → filter → compute."""
+
+ df = self.reader.read(config.input_file)
+ df = self.filterer.apply(df, config.start_date, config.end_date)
+
+ logger.info("Computing metrics...")
+ result: dict[str, object] = {}
+ for metric in self.metrics:
+ metric_data = metric.compute(df)
+ result = result | metric_data
+
+ result["report_start"] = (
+ config.start_date.strftime("%Y-%m-%d") if config.start_date else "N/A"
+ )
+ result["report_end"] = config.end_date.strftime("%Y-%m-%d") if config.end_date else "N/A"
+ return result
+
+
+class JSONReportWriter:
+ """Writes the report dictionary to a JSON file."""
+
+ def write(self, report: dict[str, object], output_file: Path) -> None:
+ output_file.parent.mkdir(parents=True, exist_ok=True)
+ with open(output_file, "w", encoding="utf-8") as f:
+ json.dump(report, f, indent=2)
+ logger.info("Report written to %s", output_file)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+def main() -> None:
+ """Assembles and executes the OOP-style reporting workflow."""
+
+ config = ReportConfig(
+ input_file=INPUT_FILE,
+ output_file=OUTPUT_FILE,
+ start_date=REPORT_START,
+ end_date=REPORT_END,
+ )
+
+ reader = CsvSalesReader()
+ filterer = DateRangeFilter()
+ metrics: list[Metric] = [
+ CustomerCountMetric(),
+ AverageOrderValueMetric(),
+ ReturnPercentageMetric(),
+ TotalSalesMetric(),
+ ]
+
+ generator = SalesReportGenerator(reader, filterer, metrics)
+ writer = JSONReportWriter()
+
+ try:
+ report = generator.generate(config)
+ writer.write(report, config.output_file)
+ except Exception as e:
+ logger.exception("Failed to generate report: %s", e)
+ raise SystemExit(1) from e
+
+
+if __name__ == "__main__":
+ main()
diff --git a/2025/solid/config_report.py b/2025/solid/config_report.py
new file mode 100644
index 0000000..3164368
--- /dev/null
+++ b/2025/solid/config_report.py
@@ -0,0 +1,169 @@
+"""
+===============================================================================
+Sales Report - Config-Driven (Declarative) Implementation
+===============================================================================
+Loads pipeline configuration from an external YAML/JSON file and runs the
+same metric logic dynamically, controlled entirely by configuration content.
+
+Highlights:
+ * Decouples logic from configuration
+ * Allows multiple report variants without code modification
+===============================================================================
+"""
+
+import json
+import logging
+from collections.abc import Callable
+from dataclasses import dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+import pandas as pd
+import yaml
+
+# ------------------------------------------------------------------------------
+# Logging Setup
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [config] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+CONFIG_FILE = BASE_DIR / "report_config.yaml"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+
+# ------------------------------------------------------------------------------
+# Metric Functions
+# ------------------------------------------------------------------------------
+
+
+def customer_count(df: pd.DataFrame) -> dict[str, Any]:
+ """Counts distinct customers."""
+
+ return {"number_of_customers": df["name"].nunique()}
+
+
+def average_order_value(df: pd.DataFrame) -> dict[str, Any]:
+ """Calculates average order value."""
+
+ sales = df[df["price"] > 0]["price"]
+ avg = sales.mean() if not sales.empty else 0.0
+ return {"average_order_value (pre-tax)": round(avg, 2)}
+
+
+def return_percentage(df: pd.DataFrame) -> dict[str, Any]:
+ """Calculates percentage of returns."""
+
+ returns = df[df["price"] < 0]
+ pct = (len(returns) / len(df)) * 100 if len(df) else 0.0
+ return {"percentage_of_returns": round(pct, 2)}
+
+
+def total_sales(df: pd.DataFrame) -> dict[str, Any]:
+ """Calculates total sales."""
+
+ return {"total_sales_in_period (pre-tax)": round(df["price"].sum(), 2)}
+
+
+METRIC_REGISTRY: dict[str, Callable[[pd.DataFrame], dict[str, Any]]] = {
+ "customer_count": customer_count,
+ "average_order_value": average_order_value,
+ "return_percentage": return_percentage,
+ "total_sales": total_sales,
+}
+
+# ------------------------------------------------------------------------------
+# Config Schema
+# ------------------------------------------------------------------------------
+
+
+@dataclass
+class ReportConfig:
+ dataset: Path
+ start_date: datetime | None
+ end_date: datetime | None
+ metrics: list[str]
+ output_file: Path
+
+
+# ------------------------------------------------------------------------------
+# Core Functions
+# ------------------------------------------------------------------------------
+
+
+def load_config(file: Path) -> ReportConfig:
+ """Loads YAML config and converts to ReportConfig dataclass."""
+ with open(file, encoding="utf-8") as f:
+ data = yaml.safe_load(f)
+
+ return ReportConfig(
+ dataset=BASE_DIR / data["dataset"],
+ start_date=datetime.fromisoformat(data["start_date"]),
+ end_date=datetime.fromisoformat(data["end_date"]),
+ metrics=data["metrics"],
+ output_file=BASE_DIR / data["output"],
+ )
+
+
+def run_pipeline(config: ReportConfig) -> dict[str, Any]:
+ """Executes report pipeline as declared in YAML config."""
+ logger.info("Loading dataset %s", config.dataset)
+ df = pd.read_csv(config.dataset, parse_dates=["date"]) # pyright: ignore[reportUnknownMemberType]
+
+ # Apply filters
+ if config.start_date:
+ df = df[df["date"] >= pd.Timestamp(config.start_date)]
+ if config.end_date:
+ df = df[df["date"] <= pd.Timestamp(config.end_date)]
+ logger.info("Records after filtering: %d", len(df))
+
+ # Compute declared metrics dynamically
+ results: dict[str, Any] = {}
+ for metric_key in config.metrics:
+ metric_fn = METRIC_REGISTRY.get(metric_key)
+ if metric_fn:
+ results.update(metric_fn(df))
+ else:
+ logger.warning("Unknown metric '%s' skipped.", metric_key)
+
+ results["report_start"] = config.start_date.strftime("%Y-%m-%d") if config.start_date else "N/A"
+ results["report_end"] = config.end_date.strftime("%Y-%m-%d") if config.end_date else "N/A"
+ return results
+
+
+def write_report(report: dict[str, Any], output: Path) -> None:
+ """Writes report to JSON file."""
+ output.parent.mkdir(parents=True, exist_ok=True)
+ output.write_text(json.dumps(report, indent=2), encoding="utf-8")
+ logger.info("Config-based report written to %s", output)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+def main() -> None:
+ config = load_config(CONFIG_FILE)
+ logger.info("Running config-driven report using %s", CONFIG_FILE)
+ try:
+ result = run_pipeline(config)
+ write_report(result, config.output_file)
+ logger.info("Report generated successfully.")
+ except Exception as e:
+ logger.exception("Failed to generate report: %s", e)
+ raise SystemExit(1) from e
+
+
+if __name__ == "__main__":
+ main()
diff --git a/2025/solid/declarative_report.py b/2025/solid/declarative_report.py
new file mode 100644
index 0000000..78b469a
--- /dev/null
+++ b/2025/solid/declarative_report.py
@@ -0,0 +1,169 @@
+"""
+===============================================================================
+Sales Report - Declarative (Pipeline) Implementation
+===============================================================================
+A declarative, type-safe data pipeline using Pandera for runtime validation
+and pure function composition.
+
+Stages:
+ 1. fetch_data - Extract + validate CSV data
+ 2. process_data - Transform by filtering rows to time window
+ 3. build_report - Compute key metrics declaratively
+ 4. write_report - Serialize results to JSON
+===============================================================================
+"""
+
+import json
+import logging
+from datetime import datetime
+from pathlib import Path
+from typing import Any, Protocol
+
+import pandas as pd
+import pandera.pandas as pa
+from pandera.typing import DataFrame
+
+# ------------------------------------------------------------------------------
+# Logging configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Schema definition
+# ------------------------------------------------------------------------------
+
+
+class SalesSchema(pa.DataFrameModel):
+ """Defines the expected structure and types for the input sales data."""
+
+ date: datetime = pa.Field(coerce=True)
+ name: str
+ address: str
+ item: str
+ price: float
+ tax: float
+
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+DATA_PATH = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+REPORT_PATH = OUTPUT_DIR / "sales_report_declarative.json"
+REPORT_START_DATE = datetime(2024, 1, 1)
+REPORT_END_DATE = datetime(2024, 12, 31)
+
+
+# ------------------------------------------------------------------------------
+# Data Pipeline Steps
+# ------------------------------------------------------------------------------
+
+
+def fetch_data(_: None) -> DataFrame[SalesSchema]:
+ """Step 1: Read and validate input CSV file."""
+
+ logger.info("Reading input data from %s", DATA_PATH)
+ df = pd.read_csv(DATA_PATH, parse_dates=["date"]) # pyright: ignore[reportUnknownMemberType]
+ validated = SalesSchema.validate(df)
+ logger.info("Data loaded successfully: %d rows", len(validated))
+ return validated
+
+
+def process_data(df: DataFrame[SalesSchema]) -> DataFrame[SalesSchema]:
+ """Step 2: Filter the dataset to report date range."""
+
+ logger.info(
+ "Filtering records between %s and %s",
+ REPORT_START_DATE.strftime("%Y-%m-%d"),
+ REPORT_END_DATE.strftime("%Y-%m-%d"),
+ )
+ filtered = df.loc[(df["date"] >= REPORT_START_DATE) & (df["date"] <= REPORT_END_DATE)].copy()
+ logger.info("Filtered dataset contains %d records", len(filtered))
+ return SalesSchema.validate(filtered)
+
+
+def build_report(df: DataFrame[SalesSchema]) -> dict[str, Any]:
+ """Step 3: Compute key report metrics."""
+
+ logger.info("Computing report metrics...")
+
+ number_of_customers = int(df["name"].nunique())
+ positive_sales = df.loc[df["price"] > 0, "price"]
+ average_order_value = float(positive_sales.mean()) if not positive_sales.empty else 0.0
+ returns = df.loc[df["price"] < 0]
+ percentage_of_returns = (len(returns) / len(df)) * 100 if len(df) > 0 else 0.0
+ total_sales_pre_tax = float(df["price"].sum())
+
+ report = {
+ "number_of_customers": number_of_customers,
+ "average_order_value (pre-tax)": round(average_order_value, 2),
+ "percentage_of_returns": round(percentage_of_returns, 2),
+ "total_sales_in_period (pre-tax)": round(total_sales_pre_tax, 2),
+ "report_start": REPORT_START_DATE.strftime("%Y-%m-%d"),
+ "report_end": REPORT_END_DATE.strftime("%Y-%m-%d"),
+ }
+ logger.info("Completed metric calculations.")
+ return report
+
+
+def write_report(report: dict[str, Any]) -> None:
+ """Step 4: Write final JSON report to disk."""
+
+ REPORT_PATH.write_text(json.dumps(report, indent=2), encoding="utf-8")
+ print(f"Report written to: {REPORT_PATH}")
+
+
+# ------------------------------------------------------------------------------
+# Declarative Pipeline Orchestrator
+# ------------------------------------------------------------------------------
+
+
+class PipelineStep(Protocol):
+ """Formal protocol for pipeline-compatible callables."""
+
+ def __call__(self, __data: Any) -> Any: ...
+
+
+def pipeline(*steps: PipelineStep):
+ """
+ Declaratively compose a pipeline of sequential transformation steps.
+
+ Each step takes the output of the previous one.
+ The first receives `None` if it doesn't need input.
+ """
+
+ def composed(data: Any = None) -> Any:
+ for step in steps:
+ data = step(data)
+ return data
+
+ return composed
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+if __name__ == "__main__":
+ logger.info("Starting declarative report pipeline...")
+ try:
+ report_pipeline = pipeline(
+ fetch_data,
+ process_data,
+ build_report,
+ write_report,
+ )
+ report_pipeline(None)
+ logger.info("Report generation completed successfully.")
+ except Exception as e:
+ logger.exception("Failed to generate report: %s", e)
+ raise SystemExit(1) from e
diff --git a/2025/solid/functional_report_v2.py b/2025/solid/functional_report_v2.py
new file mode 100644
index 0000000..7709ea8
--- /dev/null
+++ b/2025/solid/functional_report_v2.py
@@ -0,0 +1,179 @@
+"""
+===============================================================================
+Sales Report - Functional Implementation
+===============================================================================
+A lightweight, flat functional approach to compute and write a
+sales performance report.
+
+Structure:
+ * "Pure" functions for reading, filtering, and metrics
+ * Simple data flow: read → filter → compute → write
+===============================================================================
+"""
+
+import json
+import logging
+from collections.abc import Callable
+from dataclasses import dataclass, field
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+import pandas as pd
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_functional.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Type aliases and config
+# ------------------------------------------------------------------------------
+
+type MetricFn = Callable[[pd.DataFrame], dict[str, Any]]
+
+
+@dataclass
+class ReportConfig:
+ """Encapsulates configuration for the functional report process."""
+
+ input_file: Path
+ start_date: datetime | None = None
+ end_date: datetime | None = None
+ metrics: list[MetricFn] = field(default_factory=list)
+
+
+# ------------------------------------------------------------------------------
+# Functional Components
+# ------------------------------------------------------------------------------
+
+
+def read_sales(file: Path) -> pd.DataFrame:
+ """Reads the sales CSV data into a pandas DataFrame."""
+
+ logger.info("Reading data from %s", file)
+ return pd.read_csv(file, parse_dates=["date"]) # pyright: ignore[reportUnknownMemberType]
+
+
+def filter_sales(df: pd.DataFrame, start: datetime | None, end: datetime | None) -> pd.DataFrame:
+ """Applies optional start/end-date filters to the dataframe."""
+
+ if start:
+ df = df[df["date"] >= pd.Timestamp(start)]
+ if end:
+ df = df[df["date"] <= pd.Timestamp(end)]
+
+ logger.info("Filtered dataset contains %d records", len(df))
+ return df
+
+
+# ------------------------------------------------------------------------------
+# Metrics Functions
+# ------------------------------------------------------------------------------
+
+
+def customer_count_metric(df: pd.DataFrame) -> dict[str, Any]:
+ """Counts distinct customers."""
+
+ return {"number_of_customers": df["name"].nunique()}
+
+
+def average_order_value_metric(df: pd.DataFrame) -> dict[str, Any]:
+ """Computes the average value of positive (non-return) transactions."""
+
+ sales = df[df["price"] > 0]["price"]
+ avg = sales.mean() if not sales.empty else 0.0
+ return {"average_order_value (pre-tax)": round(avg, 2)}
+
+
+def return_percentage_metric(df: pd.DataFrame) -> dict[str, Any]:
+ """Calculates the percentage of rows representing returns."""
+
+ returns = df[df["price"] < 0]
+ pct = (len(returns) / len(df)) * 100 if len(df) > 0 else 0
+ return {"percentage_of_returns": round(pct, 2)}
+
+
+def total_sales_metric(df: pd.DataFrame) -> dict[str, Any]:
+ """Computes total sales before tax."""
+
+ return {"total_sales_in_period (pre-tax)": round(df["price"].sum(), 2)}
+
+
+# ------------------------------------------------------------------------------
+# Reporting Pipeline
+# ------------------------------------------------------------------------------
+
+
+def generate_report_data(df: pd.DataFrame, config: ReportConfig) -> dict[str, Any]:
+ """Executes all metrics and produces a report dictionary."""
+
+ logger.info("Computing metrics...")
+ result: dict[str, Any] = {}
+ for metric in config.metrics:
+ result.update(metric(df))
+ result["report_start"] = config.start_date.strftime("%Y-%m-%d") if config.start_date else "N/A"
+ result["report_end"] = config.end_date.strftime("%Y-%m-%d") if config.end_date else "N/A"
+
+ logger.info("Metrics computed successfully.")
+ return result
+
+
+def write_report(data: dict[str, Any], file: Path) -> None:
+ """Writes the final report JSON to disk."""
+
+ file.parent.mkdir(parents=True, exist_ok=True)
+ file.write_text(json.dumps(data, indent=2), encoding="utf-8")
+ logger.info("Report written to %s", file)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+def main() -> None:
+ """Runs the full functional data pipeline."""
+
+ config = ReportConfig(
+ input_file=INPUT_FILE,
+ start_date=REPORT_START,
+ end_date=REPORT_END,
+ metrics=[
+ customer_count_metric,
+ average_order_value_metric,
+ return_percentage_metric,
+ total_sales_metric,
+ ],
+ )
+
+ try:
+ df = read_sales(config.input_file)
+ df = filter_sales(df, config.start_date, config.end_date)
+ report = generate_report_data(df, config)
+ write_report(report, OUTPUT_FILE)
+ except Exception as e:
+ logger.exception("Failed to generate report: %s", e)
+ raise SystemExit(1) from e
+
+
+if __name__ == "__main__":
+ main()
diff --git a/2025/solid/logic_report.py b/2025/solid/logic_report.py
new file mode 100644
index 0000000..7f4dd21
--- /dev/null
+++ b/2025/solid/logic_report.py
@@ -0,0 +1,232 @@
+"""
+===============================================================================
+Sales Report — Logic-Based Version
+===============================================================================
+Implements the same declarative logic reporting system, but without relying on
+numeric constraint solvers. Instead, we encode the idea of "positive" and
+"negative" prices as pure logic facts.
+
+All reasoning is relational and declarative; no imperative filtering is used.
+===============================================================================
+"""
+
+import json
+import logging
+from collections.abc import Callable, Sequence
+from dataclasses import dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Any, Literal, cast
+
+import pandas as pd
+from kanren import Relation, Var, var
+from kanren import conde as _conde # pyright: ignore[reportUnknownVariableType]
+from kanren import facts as _facts # pyright: ignore[reportUnknownVariableType]
+from kanren import run as _run # pyright: ignore[reportUnknownVariableType]
+
+type Goal = Any
+type LogicVar = Var | Any
+type Sign = Literal["positive", "negative", "zero"]
+type ProfitRecord = tuple[str, float]
+type MetricValue = float | int | str
+type ReportDict = dict[str, MetricValue]
+
+CondeFunc = Callable[..., Goal]
+RunFunc = Callable[..., Sequence[Any] | tuple[Any, ...]]
+
+conde: CondeFunc = cast(CondeFunc, _conde)
+facts: Callable[..., None] = cast(Callable[..., None], _facts)
+run: RunFunc = cast(RunFunc, _run)
+
+# -----------------------------------------------------------------------------
+# Logging
+# -----------------------------------------------------------------------------
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [logic‑pure] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# -----------------------------------------------------------------------------
+# Constants
+# -----------------------------------------------------------------------------
+BASE_DIR = Path(__file__).parent
+DATA_PATH = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_logic.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+
+# -----------------------------------------------------------------------------
+# Data representation
+# -----------------------------------------------------------------------------
+@dataclass
+class Sale:
+ name: str
+ item: str
+ date: datetime
+ price: float
+ tax: float
+
+
+# -----------------------------------------------------------------------------
+# Load CSV data
+# -----------------------------------------------------------------------------
+def load_sales(file_path: Path) -> list[Sale]:
+ df = pd.read_csv(file_path, parse_dates=["date"]) # pyright: ignore
+ df = df.loc[(df["date"] >= REPORT_START) & (df["date"] <= REPORT_END)]
+ logger.info("Loaded %d sales within reporting window.", len(df))
+ return [
+ Sale(row["name"], row["item"], row["date"], row["price"], row["tax"])
+ for _, row in df.iterrows()
+ ]
+
+
+# -----------------------------------------------------------------------------
+# Logic Relations
+# -----------------------------------------------------------------------------
+
+
+def declare_relations(sales: list[Sale]) -> tuple[Relation, Relation]:
+ """
+ Declare logic base facts for sales and price sign.
+
+ sign_rel(price_sign, price)
+ 'price_sign' is one of {"positive", "negative", "zero"}.
+ """
+ sale_rel = Relation()
+ sign_rel = Relation()
+
+ # Declare base sale facts
+ for s in sales:
+ facts(sale_rel, (s.name, s.item, s.date, s.price, s.tax))
+
+ # Build pure logic 'sign' facts — not procedural checks within the logic
+ unique_prices: list[float] = sorted({float(s.price) for s in sales})
+ for price_value in unique_prices:
+ # Outside the logic system, we merely declare new *facts*,
+ # analogous to telling Prolog that sign(positive, 10.0). is true.
+ if price_value > 0:
+ facts(sign_rel, (cast(Sign, "positive"), price_value))
+ elif price_value < 0:
+ facts(sign_rel, (cast(Sign, "negative"), price_value))
+ else:
+ facts(sign_rel, (cast(Sign, "zero"), price_value))
+
+ logger.info("Declared %d sale facts and %d sign facts.", len(sales), len(unique_prices))
+ return sale_rel, sign_rel
+
+
+# -----------------------------------------------------------------------------
+# Logic predicates (pure relations)
+# -----------------------------------------------------------------------------
+def sign_rule(sign_rel: Relation, sign: Sign, price: Any) -> Goal:
+ """Link a price to its logical sign fact."""
+ return cast(Goal, sign_rel(sign, price))
+
+
+def profit_rule(sale_rel: Relation, sign_rel: Relation, name: Any, price: Any) -> Goal:
+ """Logical predicate for profitable sales (positive price)."""
+ return conde(
+ [
+ sale_rel(name, var(), var(), price, var()),
+ sign_rule(sign_rel, cast(Sign, "positive"), price),
+ ]
+ )
+
+
+def return_rule(sale_rel: Relation, sign_rel: Relation, name: Any, price: Any) -> Goal:
+ """Logical predicate for returned sales (negative price)."""
+ return conde(
+ [
+ sale_rel(name, var(), var(), price, var()),
+ sign_rule(sign_rel, cast(Sign, "negative"), price),
+ ]
+ )
+
+
+# -----------------------------------------------------------------------------
+# Logic computations — querying the pure logic model
+# -----------------------------------------------------------------------------
+def compute_metrics(sale_rel: Relation, sign_rel: Relation) -> ReportDict:
+ """Compute metrics via logical queries only."""
+
+ name: LogicVar = cast(LogicVar, var())
+ price: LogicVar = cast(LogicVar, var())
+
+ profit_raw = cast(
+ Sequence[tuple[Any, Any]],
+ run(0, (name, price), profit_rule(sale_rel, sign_rel, name, price)),
+ )
+ return_raw = cast(
+ Sequence[tuple[Any, Any]],
+ run(0, (name, price), return_rule(sale_rel, sign_rel, name, price)),
+ )
+
+ def _filter_records(records: Sequence[tuple[Any, Any]]) -> list[ProfitRecord]:
+ filtered: list[ProfitRecord] = []
+ for raw_name, raw_price in records:
+ if isinstance(raw_name, str) and isinstance(raw_price, (int, float)):
+ filtered.append((raw_name, float(raw_price)))
+ return filtered
+
+ profit_records = _filter_records(profit_raw)
+ return_records = _filter_records(return_raw)
+
+ # Summarize results outside logic
+ profit_customers = {name_value for (name_value, _) in profit_records}
+ return_customers = {name_value for (name_value, _) in return_records}
+ profit_prices = [price_value for (_, price_value) in profit_records]
+ return_prices = [price_value for (_, price_value) in return_records]
+
+ num_customers = len(profit_customers | return_customers)
+ avg_order_value = sum(profit_prices) / len(profit_prices) if profit_prices else 0.0
+ pct_returns = (
+ len(return_prices) / (len(profit_prices) + len(return_prices)) * 100
+ if (profit_prices or return_prices)
+ else 0.0
+ )
+ total_sales = sum(profit_prices) + sum(return_prices)
+
+ return {
+ "number_of_customers": num_customers,
+ "average_order_value (pre-tax)": round(avg_order_value, 2),
+ "percentage_of_returns": round(pct_returns, 2),
+ "total_sales_in_period (pre-tax)": round(total_sales, 2),
+ "report_start": REPORT_START.strftime("%Y-%m-%d"),
+ "report_end": REPORT_END.strftime("%Y-%m-%d"),
+ }
+
+
+# -----------------------------------------------------------------------------
+# Writer
+# -----------------------------------------------------------------------------
+def write_report(report: ReportDict) -> None:
+ OUTPUT_FILE.write_text(json.dumps(report, indent=2), encoding="utf-8")
+ logger.info("Logic report written to %s", OUTPUT_FILE)
+
+
+# -----------------------------------------------------------------------------
+# Entrypoint
+# -----------------------------------------------------------------------------
+def main() -> None:
+ logger.info("Running logic sales report ...")
+ try:
+ sales = load_sales(DATA_PATH)
+ if not sales:
+ logger.warning("No sales data in reporting window.")
+ return
+ sale_rel, sign_rel = declare_relations(sales)
+ report = compute_metrics(sale_rel, sign_rel)
+ write_report(report)
+ logger.info("Logic report generated successfully.")
+ except Exception as exc:
+ logger.exception("Logic computation failed: %s", exc)
+ raise SystemExit(1) from exc
+
+
+if __name__ == "__main__":
+ main()
diff --git a/2025/solid/pyproject.toml b/2025/solid/pyproject.toml
index 9f9cf8a..8c87fae 100644
--- a/2025/solid/pyproject.toml
+++ b/2025/solid/pyproject.toml
@@ -5,4 +5,10 @@ description = "Code refactoring using the SOLID principles"
requires-python = ">=3.13"
dependencies = [
"pandas>=2.3.1",
+ "ruff>=0.14.4",
+ "pandera[pandas]>=0.26.1",
+ "rx>=3.2.0",
+ "pyyaml>=6.0.3",
+ "minikanren>=1.0.5",
+ "aiofiles>=25.1.0",
]
diff --git a/2025/solid/reactive_report.py b/2025/solid/reactive_report.py
new file mode 100644
index 0000000..3523ec9
--- /dev/null
+++ b/2025/solid/reactive_report.py
@@ -0,0 +1,143 @@
+"""
+===============================================================================
+Sales Report - Reactive (Stream-Based) Implementation
+===============================================================================
+Implements a reactive data pipeline using RxPY (Reactive Extensions for Python).
+
+In a reactive system:
+ * Data flows through Observables — streams that emit values.
+ * Transformation steps are implemented as operators (map/filter/aggregate).
+ * The pipeline is executed by subscribing to the observable chain.
+
+Stages (mirroring previous paradigms):
+ 1. fetch_data - Load data from CSV into stream
+ 2. process_data - Filter by date range
+ 3. build_report - Compute aggregate metrics
+ 4. write_report - Persist computed summary to JSON
+===============================================================================
+"""
+
+import json
+import logging
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+import pandas as pd
+from rx import just
+from rx import operators as ops
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [reactive] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_reactive.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Reactive Pipeline Steps
+# ------------------------------------------------------------------------------
+
+
+def fetch_data(file: Path) -> pd.DataFrame:
+ """Reads data from CSV into a pandas DataFrame."""
+
+ logger.info("Reading data from %s", file)
+ df = pd.read_csv(file, parse_dates=["date"]) # pyright: ignore[reportUnknownMemberType]
+ logger.info("Data loaded successfully: %d rows", len(df))
+ return df
+
+
+def process_data(df: pd.DataFrame) -> pd.DataFrame:
+ """Filters DataFrame by start and end date."""
+
+ logger.info(
+ "Filtering dataset from %s to %s",
+ REPORT_START.strftime("%Y-%m-%d"),
+ REPORT_END.strftime("%Y-%m-%d"),
+ )
+ filtered = df.loc[(df["date"] >= REPORT_START) & (df["date"] <= REPORT_END)].copy()
+ logger.info("Filtered dataset contains %d rows", len(filtered))
+ return filtered
+
+
+def build_report(df: pd.DataFrame) -> dict[str, Any]:
+ """Computes high-level report metrics from the dataset."""
+ logger.info("Computing metrics...")
+
+ number_of_customers = int(df["name"].nunique())
+ positive_sales = df.loc[df["price"] > 0, "price"]
+ avg_order_value = positive_sales.mean() if not positive_sales.empty else 0.0
+ returns = df.loc[df["price"] < 0]
+ pct_returns = (len(returns) / len(df)) * 100 if len(df) else 0.0
+ total_sales = df["price"].sum()
+
+ report = {
+ "number_of_customers": number_of_customers,
+ "average_order_value (pre-tax)": round(avg_order_value, 2),
+ "percentage_of_returns": round(pct_returns, 2),
+ "total_sales_in_period (pre-tax)": round(float(total_sales), 2),
+ "report_start": REPORT_START.strftime("%Y-%m-%d"),
+ "report_end": REPORT_END.strftime("%Y-%m-%d"),
+ }
+
+ logger.info("Metric computation complete.")
+ return report
+
+
+def write_report(report: dict[str, Any]) -> None:
+ """Writes final report to disk."""
+ OUTPUT_FILE.write_text(json.dumps(report, indent=2), encoding="utf-8")
+ logger.info("Report written to %s", OUTPUT_FILE)
+
+
+# ------------------------------------------------------------------------------
+# Reactive Pipeline Composition
+# ------------------------------------------------------------------------------
+
+
+def reactive_report_pipeline():
+ """Builds and executes the reactive data pipeline using RxPY."""
+
+ return (
+ just(INPUT_FILE)
+ .pipe(
+ ops.map(fetch_data), # pyright: ignore[reportUnknownMemberType]
+ ops.map(process_data), # pyright: ignore[reportUnknownMemberType]
+ ops.map(build_report), # pyright: ignore[reportUnknownMemberType]
+ )
+ .subscribe(
+ on_next=write_report,
+ on_error=lambda e: logger.exception("Pipeline failed: %s", e), # pyright: ignore[reportUnknownArgumentType, reportUnknownLambdaType]
+ on_completed=lambda: logger.info("Pipeline completed successfully."),
+ )
+ )
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+if __name__ == "__main__":
+ logger.info("Starting reactive sales report pipeline...")
+ try:
+ reactive_report_pipeline()
+ except Exception as e:
+ logger.exception("Unhandled error: %s", e)
+ raise SystemExit(1) from e
diff --git a/2025/solid/report_actor_model.py b/2025/solid/report_actor_model.py
new file mode 100644
index 0000000..7eac4a6
--- /dev/null
+++ b/2025/solid/report_actor_model.py
@@ -0,0 +1,268 @@
+"""
+===============================================================================
+Sales Report - Actor Model Implementation
+===============================================================================
+Implements the report workflow using the Actor Model:
+ * Each actor runs concurrently and communicates via message passing.
+ * Actors never share state; they exchange messages through asyncio queues.
+ * Demonstrates isolated concurrency with cooperative scheduling.
+
+Actors:
+ - ReaderActor : Reads CSV rows asynchronously
+ - FilterActor : Filters rows by date range
+ - MetricsActor : Computes aggregate metrics incrementally
+ - WriterActor : Writes final report to disk
+
+Highlights:
+ * Concurrency through message passing (no shared memory)
+ * Fault isolation between components
+ * Natural fit for distributed or reactive architectures
+===============================================================================
+"""
+
+import asyncio
+import csv
+import json
+import logging
+from dataclasses import dataclass
+from datetime import datetime
+from pathlib import Path
+from typing import Any
+
+import aiofiles
+
+# ------------------------------------------------------------------------------
+# Logging Configuration
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [actor] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_actor.json"
+REPORT_START_DATE = datetime(2024, 1, 1)
+REPORT_END_DATE = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Actor Base Class
+# ------------------------------------------------------------------------------
+
+
+class Actor:
+ """Base actor defining inbox and cooperative message loop."""
+
+ def __init__(self, name: str):
+ self.name = name
+ self.inbox: asyncio.Queue[Any] = asyncio.Queue()
+ self._task: asyncio.Task[Any] | None = None
+
+ async def send(self, message: Any) -> None:
+ """Send a message to this actor's inbox."""
+ await self.inbox.put(message)
+
+ async def handle(self, message: Any) -> None:
+ """Override in subclasses to define message behavior."""
+ raise NotImplementedError
+
+ async def run(self) -> None:
+ """Continuously process messages until STOP signal."""
+ logger.info("%s started.", self.name)
+ while True:
+ message = await self.inbox.get()
+ if message == "STOP":
+ logger.info("%s stopped.", self.name)
+ return
+ try:
+ await self.handle(message)
+ except Exception as exc:
+ logger.exception("[%s] Error handling message: %s", self.name, exc)
+
+ def start(self) -> None:
+ """Launch the actor event loop in background."""
+ self._task = asyncio.create_task(self.run(), name=f"{self.name}-task")
+
+ async def stop(self) -> None:
+ """Send stop signal and await task completion."""
+ await self.send("STOP")
+ if self._task:
+ await self._task
+
+
+# ------------------------------------------------------------------------------
+# Domain Structures
+# ------------------------------------------------------------------------------
+
+@dataclass
+class SalesRecord:
+ name: str
+ date: datetime
+ price: float
+
+
+# ------------------------------------------------------------------------------
+# Actor Implementations
+# ------------------------------------------------------------------------------
+
+
+class ReaderActor(Actor):
+ """Reads CSV rows and sends them downstream."""
+
+ def __init__(self, name: str, file_path: Path, next_actor: Actor):
+ super().__init__(name)
+ self.file_path = file_path
+ self.next_actor = next_actor
+
+ async def handle(self, _: Any) -> None:
+ logger.info("[%s] Reading file: %s", self.name, self.file_path)
+ async with aiofiles.open(self.file_path, "r", encoding="utf-8") as afp:
+ header_line = await afp.readline()
+ headers = [h.strip('"') for h in header_line.strip().split(",")]
+ reader = csv.DictReader([], fieldnames=headers)
+
+ async for line in afp:
+ values = next(csv.reader([line]))
+ row = dict(zip(reader.fieldnames, values))
+ try:
+ record = SalesRecord(
+ name=row["name"].strip('"'),
+ date=datetime.fromisoformat(row["date"].strip('"')),
+ price=float(row["price"]),
+ )
+ await self.next_actor.send(record)
+ except Exception as exc:
+ logger.warning("[%s] Skipping malformed row: %s (%s)", self.name, row, exc)
+
+ await self.next_actor.send("EOF")
+
+
+class FilterActor(Actor):
+ """Filters records that fall outside a date range."""
+
+ def __init__(self, name: str, start_date: datetime, end_date: datetime, next_actor: Actor):
+ super().__init__(name)
+ self.start_date = start_date
+ self.end_date = end_date
+ self.next_actor = next_actor
+
+ async def handle(self, message: Any) -> None:
+ if message == "EOF":
+ await self.next_actor.send("EOF")
+ return
+ record: SalesRecord = message
+ if self.start_date <= record.date <= self.end_date:
+ await self.next_actor.send(record)
+
+
+class MetricsActor(Actor):
+ """Accumulates sales metrics incrementally."""
+
+ def __init__(self, name: str, next_actor: Actor):
+ super().__init__(name)
+ self.next_actor = next_actor
+ self.num_customers: set[str] = set()
+ self.positive_prices: list[float] = []
+ self.returns_count: int = 0
+ self.total_records: int = 0
+ self.total_sales: float = 0.0
+
+ async def handle(self, message: Any) -> None:
+ if message == "EOF":
+ await self._finalize()
+ await self.next_actor.send("EOF")
+ return
+
+ record: SalesRecord = message
+ self.total_records += 1
+ self.num_customers.add(record.name)
+ self.total_sales += record.price
+
+ if record.price > 0:
+ self.positive_prices.append(record.price)
+ elif record.price < 0:
+ self.returns_count += 1
+
+ async def _finalize(self) -> None:
+ avg_order_value = (
+ sum(self.positive_prices) / len(self.positive_prices)
+ if self.positive_prices
+ else 0.0
+ )
+ pct_returns = (
+ (self.returns_count / self.total_records) * 100 if self.total_records else 0.0
+ )
+ report = {
+ "number_of_customers": len(self.num_customers),
+ "average_order_value (pre-tax)": round(avg_order_value, 2),
+ "percentage_of_returns": round(pct_returns, 2),
+ "total_sales_in_period (pre-tax)": round(self.total_sales, 2),
+ "report_start": REPORT_START_DATE.strftime("%Y-%m-%d"),
+ "report_end": REPORT_END_DATE.strftime("%Y-%m-%d"),
+ }
+ logger.info("[%s] Metrics computed. Forwarding to writer...", self.name)
+ await self.next_actor.send(report)
+
+
+class WriterActor(Actor):
+ """Receives final report and writes it to disk."""
+
+ def __init__(self, name: str, output_file: Path):
+ super().__init__(name)
+ self.output_file = output_file
+
+ async def handle(self, message: Any) -> None:
+ if message == "EOF":
+ # Writer receives EOF last—it can stop silently
+ return
+ report: dict[str, Any] = message
+ await asyncio.to_thread(self._write_json, report)
+ logger.info("[%s] Report successfully written to %s", self.name, self.output_file)
+
+ def _write_json(self, report: dict[str, Any]) -> None:
+ self.output_file.write_text(json.dumps(report, indent=2), encoding="utf-8")
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+async def main() -> None:
+ """Assemble and execute the concurrent actor system."""
+
+ logger.info("Starting Actor Model Sales Report...")
+
+ # Instantiate the actor chain
+ writer = WriterActor("WriterActor", OUTPUT_FILE)
+ metrics = MetricsActor("MetricsActor", next_actor=writer)
+ filterer = FilterActor("FilterActor", REPORT_START_DATE, REPORT_END_DATE, next_actor=metrics)
+ reader = ReaderActor("ReaderActor", INPUT_FILE, next_actor=filterer)
+
+ actors = [reader, filterer, metrics, writer]
+
+ # Start all actors
+ for a in actors:
+ a.start()
+
+ # Kick off processing
+ await reader.send("START")
+
+ # Wait for all to gracefully shut down
+ for a in actors:
+ await a.stop()
+
+ logger.info("Actor Model Sales Report completed successfully.")
+
+
+if __name__ == "__main__":
+ asyncio.run(main())
\ No newline at end of file
diff --git a/2025/solid/report_config.yaml b/2025/solid/report_config.yaml
new file mode 100644
index 0000000..51a7a0f
--- /dev/null
+++ b/2025/solid/report_config.yaml
@@ -0,0 +1,9 @@
+dataset: "sales_data.csv"
+start_date: "2024-01-01"
+end_date: "2024-12-31"
+metrics:
+ - customer_count
+ - average_order_value
+ - return_percentage
+ - total_sales
+output: "output/sales_report_config.json"
\ No newline at end of file
diff --git a/2025/solid/report_dataflow.py b/2025/solid/report_dataflow.py
new file mode 100644
index 0000000..e51282c
--- /dev/null
+++ b/2025/solid/report_dataflow.py
@@ -0,0 +1,189 @@
+"""
+===============================================================================
+Sales Report - Dataflow (DAG) Implementation
+===============================================================================
+Implements the report workflow as a declarative Dataflow Graph (Directed
+Acyclic Graph). Each node represents a data transformation or side effect.
+
+Execution is topologically ordered based on declared dependencies —
+the flow of data, not the order of code, determines execution.
+
+Highlights:
+ * Explicit dependency graph (no implicit control flow)
+ * Reusable dataflow engine
+ * Great for illustrating data-oriented design patterns
+===============================================================================
+"""
+
+import json
+import logging
+from datetime import datetime
+from pathlib import Path
+from typing import Any, Callable
+
+import pandas as pd
+
+# ------------------------------------------------------------------------------
+# Logging
+# ------------------------------------------------------------------------------
+
+logging.basicConfig(
+ level=logging.INFO,
+ format="%(asctime)s [%(levelname)s] [dataflow] %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+logger = logging.getLogger(__name__)
+
+# ------------------------------------------------------------------------------
+# Constants
+# ------------------------------------------------------------------------------
+
+BASE_DIR = Path(__file__).parent
+INPUT_FILE = BASE_DIR / "sales_data.csv"
+OUTPUT_DIR = BASE_DIR / "output"
+OUTPUT_DIR.mkdir(exist_ok=True)
+OUTPUT_FILE = OUTPUT_DIR / "sales_report_dataflow.json"
+REPORT_START = datetime(2024, 1, 1)
+REPORT_END = datetime(2024, 12, 31)
+
+# ------------------------------------------------------------------------------
+# Dataflow Engine
+# ------------------------------------------------------------------------------
+
+
+class Node:
+ """Represents a single transformation node in the DAG."""
+
+ def __init__(
+ self,
+ name: str,
+ func: Callable[..., Any],
+ dependencies: list[str] | None = None,
+ ):
+ self.name = name
+ self.func = func
+ self.dependencies = dependencies or []
+ self.result: Any = None
+
+
+class DAG:
+ """Lightweight dataflow DAG executor."""
+
+ def __init__(self):
+ self.nodes: dict[str, Node] = {}
+
+ def add(self, name: str, func: Callable[..., Any], deps: list[str] | None = None):
+ if name in self.nodes:
+ raise ValueError(f"Node '{name}' already exists.")
+ self.nodes[name] = Node(name, func, deps)
+
+ def _resolve_order(self) -> list[Node]:
+ """Topologically sort nodes based on declared dependencies."""
+ visited: set[str] = set()
+ order: list[Node] = []
+
+ def visit(n: str):
+ if n in visited:
+ return
+ node = self.nodes.get(n)
+ if not node:
+ raise ValueError(f"Node '{n}' not found in DAG.")
+ for dep in node.dependencies:
+ visit(dep)
+ visited.add(n)
+ order.append(node)
+
+ for name in self.nodes:
+ visit(name)
+
+ return order
+
+ def run(self):
+ """Execute nodes in topological order, passing results between them."""
+ logger.info("Executing dataflow pipeline...")
+ order = self._resolve_order()
+ results: dict[str, Any] = {}
+
+ for node in order:
+ kwargs = {dep: results[dep] for dep in node.dependencies}
+ logger.info("→ Running %s ...", node.name)
+ node.result = node.func(**kwargs)
+ results[node.name] = node.result
+
+ logger.info("Dataflow pipeline completed successfully.")
+ return results
+
+
+# ------------------------------------------------------------------------------
+# Step Functions (pure transformations)
+# ------------------------------------------------------------------------------
+
+
+def read_data() -> pd.DataFrame:
+ logger.info("Reading CSV from %s", INPUT_FILE)
+ df = pd.read_csv(INPUT_FILE, parse_dates=["date"])
+ logger.info("Loaded %d records.", len(df))
+ return df
+
+
+def filter_data(read_data: pd.DataFrame) -> pd.DataFrame:
+ logger.info("Filtering dates between %s and %s", REPORT_START, REPORT_END)
+ df = read_data[
+ (read_data["date"] >= pd.Timestamp(REPORT_START))
+ & (read_data["date"] <= pd.Timestamp(REPORT_END))
+ ].copy()
+ logger.info("Filtered %d records.", len(df))
+ return df
+
+
+def compute_metrics(filter_data: pd.DataFrame) -> dict[str, Any]:
+ logger.info("Computing metrics declaratively...")
+
+ positive_sales = filter_data.loc[filter_data["price"] > 0, "price"]
+ returns = filter_data.loc[filter_data["price"] < 0, "price"]
+
+ num_customers = filter_data["name"].nunique()
+ avg_order_value = positive_sales.mean() if not positive_sales.empty else 0.0
+ pct_returns = (len(returns) / len(filter_data)) * 100 if len(filter_data) else 0.0
+ total_sales = filter_data["price"].sum()
+
+ report = {
+ "number_of_customers": int(num_customers),
+ "average_order_value (pre-tax)": round(avg_order_value, 2),
+ "percentage_of_returns": round(pct_returns, 2),
+ "total_sales_in_period (pre-tax)": round(total_sales, 2),
+ "report_start": REPORT_START.strftime("%Y-%m-%d"),
+ "report_end": REPORT_END.strftime("%Y-%m-%d"),
+ }
+ return report
+
+
+def write_report(compute_metrics: dict[str, Any]) -> None:
+ OUTPUT_FILE.write_text(json.dumps(compute_metrics, indent=2), encoding="utf-8")
+ logger.info("Report written to %s", OUTPUT_FILE)
+
+
+# ------------------------------------------------------------------------------
+# Entrypoint
+# ------------------------------------------------------------------------------
+
+
+def main() -> None:
+ """Constructs and runs the dataflow DAG."""
+ logger.info("Starting Dataflow (DAG) Sales Report...")
+
+ dag = DAG()
+ dag.add("read_data", read_data)
+ dag.add("filter_data", filter_data, deps=["read_data"])
+ dag.add("compute_metrics", compute_metrics, deps=["filter_data"])
+ dag.add("write_report", write_report, deps=["compute_metrics"])
+
+ try:
+ dag.run()
+ except Exception as e:
+ logger.exception("Dataflow pipeline failed: %s", e)
+ raise SystemExit(1) from e
+
+
+if __name__ == "__main__":
+ main()
\ No newline at end of file
diff --git a/2025/solid/run_reports.sh b/2025/solid/run_reports.sh
new file mode 100755
index 0000000..05f90e5
--- /dev/null
+++ b/2025/solid/run_reports.sh
@@ -0,0 +1,551 @@
+#!/usr/bin/env bash
+
+set -euo pipefail
+
+# ============================================================================
+# Configuration
+# ============================================================================
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+cd "$SCRIPT_DIR"
+
+# Logging configuration
+LOG_DIR="${LOG_DIR:-$SCRIPT_DIR/logs}"
+LOG_FILE="${LOG_FILE:-$LOG_DIR/run_reports_$(date +%Y%m%d_%H%M%S).log}"
+VERBOSE="${VERBOSE:-false}"
+QUIET="${QUIET:-false}"
+
+# Execution configuration
+CONTINUE_ON_ERROR="${CONTINUE_ON_ERROR:-false}"
+TIMEOUT="${TIMEOUT:-0}" # 0 = no timeout
+MAX_JOBS="${MAX_JOBS:-1}" # 1 = sequential
+
+# Report mappings (can be moved to config file)
+declare -A REPORTS=(
+ ["messy"]="messy_report.py"
+ ["class-based"]="class_based_report.py"
+ ["class-based-v2"]="class_based_report_v2.py"
+ ["functional"]="functional_report.py"
+ ["functional-v2"]="functional_report_v2.py"
+ ["declarative"]="declarative_report.py"
+ ["config"]="config_report.py"
+ ["async"]="async_report.py"
+ ["async-no-pandas"]="async_no_pandas_report.py"
+ ["dataflow"]="report_dataflow.py"
+ ["actor"]="report_actor_model.py"
+ ["reactive"]="reactive_report.py"
+ ["logic"]="logic_report.py"
+)
+
+# Execution tracking
+declare -A REPORT_START_TIMES
+declare -A REPORT_END_TIMES
+declare -A REPORT_EXIT_CODES
+SUCCESS_COUNT=0
+FAILURE_COUNT=0
+START_TIME=0
+END_TIME=0
+
+# ============================================================================
+# Logging Functions
+# ============================================================================
+
+init_logging() {
+ mkdir -p "$LOG_DIR"
+ if [ ! -w "$LOG_DIR" ]; then
+ echo "Error: Cannot write to log directory: $LOG_DIR" >&2
+ exit 1
+ fi
+}
+
+log() {
+ local level="$1"
+ shift
+ local message="$*"
+ local timestamp
+ timestamp=$(date +'%Y-%m-%d %H:%M:%S')
+ local log_entry="[$timestamp] [$level] $message"
+
+ # Only show errors and warnings on terminal (since detailed logs go to file)
+ if [ "$QUIET" = false ]; then
+ case "$level" in
+ ERROR)
+ echo "$log_entry" >&2
+ ;;
+ WARN)
+ echo "$log_entry" >&2
+ ;;
+ # INFO and DEBUG only go to log file, not terminal
+ esac
+ fi
+
+ # Always write to log file
+ echo "$log_entry" >> "$LOG_FILE"
+}
+
+log_info() { log "INFO" "$@"; }
+log_warn() { log "WARN" "$@"; }
+log_error() { log "ERROR" "$@"; }
+log_debug() {
+ if [ "$VERBOSE" = true ]; then
+ log "DEBUG" "$@"
+ fi
+}
+
+# ============================================================================
+# Signal Handling
+# ============================================================================
+
+cleanup() {
+ local exit_code="${1:-130}"
+ log_warn "Received interrupt signal. Cleaning up..."
+
+ # Kill any running background jobs
+ jobs -p | xargs -r kill 2>/dev/null || true
+
+ # Print partial summary
+ if [ $START_TIME -gt 0 ]; then
+ END_TIME=$(date +%s)
+ print_summary
+ fi
+
+ exit "$exit_code"
+}
+
+trap 'cleanup 130' SIGINT SIGTERM
+
+# ============================================================================
+# Validation Functions
+# ============================================================================
+
+validate_report_key() {
+ local key="$1"
+
+ # Check key format (alphanumeric and hyphens only)
+ if [[ ! "$key" =~ ^[a-z0-9-]+$ ]]; then
+ log_error "Invalid report key format: '$key' (must be lowercase alphanumeric with hyphens)"
+ return 1
+ fi
+
+ # Check key exists in REPORTS array
+ if [ -z "${REPORTS[$key]:-}" ]; then
+ log_error "Unknown report key: '$key'"
+ return 1
+ fi
+
+ local script="${REPORTS[$key]}"
+
+ # Check script file exists
+ if [ ! -f "$script" ]; then
+ log_error "Report script not found: '$script' for key '$key'"
+ return 1
+ fi
+
+ # Check script is executable or readable
+ if [ ! -r "$script" ]; then
+ log_error "Report script is not readable: '$script'"
+ return 1
+ fi
+
+ return 0
+}
+
+validate_environment() {
+ log_debug "Validating environment..."
+
+ # Check Python availability
+ if ! command -v python >/dev/null 2>&1; then
+ log_error "Python interpreter not found"
+ return 1
+ fi
+
+ # Check Python version (example: require 3.8+)
+ local python_version
+ python_version=$(python --version 2>&1 | awk '{print $2}')
+ log_debug "Python version: $python_version"
+
+ # Check uv availability if uv.lock exists
+ if [ -f "uv.lock" ] && ! command -v uv >/dev/null 2>&1; then
+ log_warn "uv.lock found but uv not available. Using system Python."
+ fi
+
+ return 0
+}
+
+validate_all_report_keys() {
+ local invalid_keys=()
+
+ for key in "$@"; do
+ if ! validate_report_key "$key"; then
+ invalid_keys+=("$key")
+ fi
+ done
+
+ if [ ${#invalid_keys[@]} -gt 0 ]; then
+ log_error "Invalid report keys: ${invalid_keys[*]}"
+ return 1
+ fi
+
+ return 0
+}
+
+# ============================================================================
+# Execution Functions
+# ============================================================================
+
+determine_runner() {
+ RUNNER=(python)
+ if command -v uv >/dev/null 2>&1 && [ -f "$SCRIPT_DIR/uv.lock" ]; then
+ RUNNER=(uv run python)
+ log_debug "Using uv runner"
+ else
+ log_debug "Using system Python"
+ fi
+}
+
+run_report_with_timeout() {
+ local key="$1"
+ local script="${REPORTS[$key]}"
+ local cmd=("${RUNNER[@]}" "$script")
+ local pid
+ local exit_code=0
+
+ REPORT_START_TIMES[$key]=$(date +%s)
+ log_info "Starting report: $key ($script)"
+ log_debug "Command: ${cmd[*]}"
+
+ # Show minimal progress on terminal (only if not quiet)
+ if [ "$QUIET" = false ] && [ "$DRY_RUN" = false ]; then
+ echo -n "Running $key... " >&2
+ fi
+
+ if [ "$DRY_RUN" = true ]; then
+ log_info "[DRY RUN] Would execute: ${cmd[*]}"
+ if [ "$QUIET" = false ]; then
+ echo "[DRY RUN]" >&2
+ fi
+ REPORT_EXIT_CODES[$key]=0
+ REPORT_END_TIMES[$key]=$(date +%s)
+ return 0
+ fi
+
+ # Add separator in log file for this report's output
+ {
+ echo ""
+ echo "--- Report: $key ($script) ---"
+ echo "Started at: $(date +'%Y-%m-%d %H:%M:%S')"
+ echo ""
+ } >> "$LOG_FILE"
+
+ # Run with timeout if specified
+ # By default, Python output only goes to log file (not terminal) to reduce clutter
+ # Use --verbose to see Python output on terminal
+ if [ "$TIMEOUT" -gt 0 ]; then
+ if [ "$VERBOSE" = true ] && [ "$QUIET" = false ]; then
+ # Show output on terminal and append to log file (verbose mode)
+ timeout "$TIMEOUT" "${cmd[@]}" 2>&1 | tee -a "$LOG_FILE"
+ exit_code=${PIPESTATUS[0]}
+ else
+ # Only append to log file (default behavior)
+ timeout "$TIMEOUT" "${cmd[@]}" >> "$LOG_FILE" 2>&1 || exit_code=$?
+ fi
+
+ if [ $exit_code -eq 124 ]; then
+ log_error "Report '$key' timed out after ${TIMEOUT}s"
+ exit_code=124
+ fi
+ else
+ if [ "$VERBOSE" = true ] && [ "$QUIET" = false ]; then
+ # Show output on terminal and append to log file (verbose mode)
+ "${cmd[@]}" 2>&1 | tee -a "$LOG_FILE"
+ exit_code=${PIPESTATUS[0]}
+ else
+ # Only append to log file (default behavior)
+ "${cmd[@]}" >> "$LOG_FILE" 2>&1 || exit_code=$?
+ fi
+ fi
+
+ # Add separator in log file for end of report output
+ {
+ echo ""
+ echo "--- End of report: $key (exit code: $exit_code) ---"
+ echo ""
+ } >> "$LOG_FILE"
+
+ REPORT_END_TIMES[$key]=$(date +%s)
+ REPORT_EXIT_CODES[$key]=$exit_code
+
+ local duration=$((REPORT_END_TIMES[$key] - REPORT_START_TIMES[$key]))
+
+ # Show minimal status on terminal
+ if [ "$QUIET" = false ]; then
+ if [ $exit_code -eq 0 ]; then
+ echo "✓" >&2
+ else
+ echo "✗" >&2
+ fi
+ fi
+
+ if [ $exit_code -eq 0 ]; then
+ log_info "Completed report: $key (duration: ${duration}s)"
+ ((SUCCESS_COUNT++)) || true
+ return 0
+ else
+ log_error "Failed report: $key (exit code: $exit_code, duration: ${duration}s)"
+ ((FAILURE_COUNT++)) || true
+ return $exit_code
+ fi
+}
+
+run_report() {
+ local key="$1"
+
+ if ! validate_report_key "$key"; then
+ return 1
+ fi
+
+ if ! run_report_with_timeout "$key"; then
+ if [ "$CONTINUE_ON_ERROR" = false ]; then
+ log_error "Stopping execution due to failure (use --continue-on-error to continue)"
+ return 1
+ fi
+ return 0 # Continue despite error
+ fi
+
+ return 0
+}
+
+# ============================================================================
+# Summary & Reporting
+# ============================================================================
+
+print_summary() {
+ local total=$((SUCCESS_COUNT + FAILURE_COUNT))
+ local duration=0
+
+ if [ $START_TIME -gt 0 ] && [ $END_TIME -gt 0 ]; then
+ duration=$((END_TIME - START_TIME))
+ fi
+
+ # Build summary text
+ local summary=""
+ summary+=$'\n'
+ summary+="========================================"$'\n'
+ summary+="Execution Summary"$'\n'
+ summary+="========================================"$'\n'
+ summary+="Total reports: $total"$'\n'
+ summary+="Successful: $SUCCESS_COUNT"$'\n'
+ summary+="Failed: $FAILURE_COUNT"$'\n'
+
+ if [ $duration -gt 0 ]; then
+ summary+="Total duration: ${duration}s"$'\n'
+ fi
+
+ if [ $FAILURE_COUNT -gt 0 ]; then
+ summary+=$'\n'
+ summary+="Failed reports:"$'\n'
+ for key in "${!REPORT_EXIT_CODES[@]}"; do
+ if [ "${REPORT_EXIT_CODES[$key]}" -ne 0 ]; then
+ local duration_key=$((REPORT_END_TIMES[$key] - REPORT_START_TIMES[$key]))
+ summary+=" - $key (exit code: ${REPORT_EXIT_CODES[$key]}, duration: ${duration_key}s)"$'\n'
+ fi
+ done
+ fi
+
+ summary+="========================================"$'\n'
+ summary+="Log file: $LOG_FILE"$'\n'
+ summary+="========================================"$'\n'
+
+ # Write to log file
+ echo "$summary" >> "$LOG_FILE"
+
+ # Also show on terminal (unless quiet)
+ if [ "$QUIET" = false ]; then
+ echo "$summary"
+ fi
+}
+
+# ============================================================================
+# Usage & Help
+# ============================================================================
+
+usage() {
+ cat <<'EOF'
+Usage: ./run_reports.sh [OPTIONS]
+
+Run one or more sales-report implementations from this directory.
+
+Options:
+ --list Show available report keys.
+ --run [--run ] Run one or more reports by key.
+ --run-all Run every report sequentially.
+ --dry-run Print the commands without executing them.
+ --continue-on-error Continue execution even if a report fails.
+ --timeout Set timeout per report (0 = no timeout).
+ --max-jobs Run up to N reports in parallel (default: 1).
+ --verbose Enable verbose/debug logging.
+ --quiet Suppress stdout (errors still shown).
+ --log-dir Specify log directory (default: ./logs).
+ -h, --help Show this help message.
+
+Environment Variables:
+ CONTINUE_ON_ERROR Continue on error (true/false)
+ TIMEOUT Timeout per report in seconds
+ MAX_JOBS Maximum parallel jobs
+ VERBOSE Enable verbose logging
+ QUIET Suppress stdout
+ LOG_DIR Log directory path
+
+Examples:
+ ./run_reports.sh --list
+ ./run_reports.sh --run functional --run logic
+ ./run_reports.sh --run-all
+ ./run_reports.sh --run-all --continue-on-error --timeout 300
+ ./run_reports.sh --run-all --max-jobs 4 --verbose
+
+Reports execute inside a uv environment when available; otherwise the
+system python interpreter is used.
+EOF
+}
+
+list_reports() {
+ printf "Available reports:\n"
+ for key in "${!REPORTS[@]}"; do
+ local script="${REPORTS[$key]}"
+ local status=""
+ if [ ! -f "$script" ]; then
+ status=" [MISSING]"
+ fi
+ printf " %-20s %s%s\n" "$key" "$script" "$status"
+ done | sort
+}
+
+# ============================================================================
+# Main
+# ============================================================================
+
+main() {
+ # Initialize
+ init_logging
+ log_info "Starting report execution"
+ log_debug "Script directory: $SCRIPT_DIR"
+ log_debug "Log file: $LOG_FILE"
+
+ # Parse arguments
+ RUN_ALL=false
+ DRY_RUN=false
+ REQUESTED_KEYS=()
+
+ while (($# > 0)); do
+ case "$1" in
+ --list)
+ list_reports
+ exit 0
+ ;;
+ --run)
+ shift || { log_error "--run requires a report key"; exit 1; }
+ REQUESTED_KEYS+=("$1")
+ ;;
+ --run-all)
+ RUN_ALL=true
+ ;;
+ --dry-run)
+ DRY_RUN=true
+ ;;
+ --continue-on-error)
+ CONTINUE_ON_ERROR=true
+ ;;
+ --timeout)
+ shift || { log_error "--timeout requires a value"; exit 1; }
+ TIMEOUT="$1"
+ if ! [[ "$TIMEOUT" =~ ^[0-9]+$ ]]; then
+ log_error "Invalid timeout value: $TIMEOUT (must be a number)"
+ exit 1
+ fi
+ ;;
+ --max-jobs)
+ shift || { log_error "--max-jobs requires a value"; exit 1; }
+ MAX_JOBS="$1"
+ if ! [[ "$MAX_JOBS" =~ ^[0-9]+$ ]] || [ "$MAX_JOBS" -lt 1 ]; then
+ log_error "Invalid max-jobs value: $MAX_JOBS (must be >= 1)"
+ exit 1
+ fi
+ ;;
+ --verbose)
+ VERBOSE=true
+ ;;
+ --quiet)
+ QUIET=true
+ ;;
+ --log-dir)
+ shift || { log_error "--log-dir requires a path"; exit 1; }
+ LOG_DIR="$1"
+ ;;
+ -h|--help)
+ usage
+ exit 0
+ ;;
+ *)
+ log_error "Unknown option: $1"
+ echo ""
+ usage
+ exit 1
+ ;;
+ esac
+ shift
+ done
+
+ # Validate inputs
+ if [ "$RUN_ALL" = false ] && [ "${#REQUESTED_KEYS[@]}" -eq 0 ]; then
+ log_error "No reports specified."
+ echo ""
+ usage
+ exit 1
+ fi
+
+ # Setup
+ if [ "$RUN_ALL" = true ]; then
+ mapfile -t REQUESTED_KEYS < <(printf "%s\n" "${!REPORTS[@]}" | sort)
+ fi
+
+ # Validate environment and report keys
+ if ! validate_environment; then
+ exit 1
+ fi
+
+ if ! validate_all_report_keys "${REQUESTED_KEYS[@]}"; then
+ exit 1
+ fi
+
+ # Determine runner
+ determine_runner
+
+ # Execute reports
+ START_TIME=$(date +%s)
+ STATUS=0
+
+ for key in "${REQUESTED_KEYS[@]}"; do
+ if ! run_report "$key"; then
+ STATUS=1
+ if [ "$CONTINUE_ON_ERROR" = false ]; then
+ break
+ fi
+ fi
+ done
+
+ END_TIME=$(date +%s)
+
+ # Print summary
+ print_summary
+
+ # Exit with appropriate code
+ if [ $FAILURE_COUNT -gt 0 ]; then
+ exit 1
+ fi
+
+ exit "$STATUS"
+}
+
+# Run main function
+main "$@"
+
diff --git a/2025/solid/uv.lock b/2025/solid/uv.lock
index 37b43f4..e48f078 100644
--- a/2025/solid/uv.lock
+++ b/2025/solid/uv.lock
@@ -1,57 +1,157 @@
version = 1
-revision = 2
+revision = 1
requires-python = ">=3.13"
+[[package]]
+name = "aiofiles"
+version = "25.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/41/c3/534eac40372d8ee36ef40df62ec129bee4fdb5ad9706e58a29be53b2c970/aiofiles-25.1.0.tar.gz", hash = "sha256:a8d728f0a29de45dc521f18f07297428d56992a742f0cd2701ba86e44d23d5b2", size = 46354 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bc/8a/340a1555ae33d7354dbca4faa54948d76d89a27ceef032c8c3bc661d003e/aiofiles-25.1.0-py3-none-any.whl", hash = "sha256:abe311e527c862958650f9438e859c1fa7568a141b22abcd015e120e86a85695", size = 14668 },
+]
+
+[[package]]
+name = "annotated-types"
+version = "0.7.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 },
+]
+
+[[package]]
+name = "cons"
+version = "0.4.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "logical-unification" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ae/20/0eca1dcdbac64a570e60df66119847f94cdd513178d9c222c15101ca1022/cons-0.4.7.tar.gz", hash = "sha256:0a96cd2abd6a9f494816c1272cf5583a960041750c2d7a48eeeccd47ce369dfd", size = 8690 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/a5/9f/bffa3362895e5437d9d12e3bbd242f86d91af1d7cd26f6e14ebb6376581b/cons-0.4.7-py3-none-any.whl", hash = "sha256:e38ee12cf703559ea744c94f725bee0e2329f32daf0249b49db1b0437cc6cb94", size = 8603 },
+]
+
+[[package]]
+name = "etuples"
+version = "0.3.10"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cons" },
+ { name = "multipledispatch" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/42/c0/ba049efa7d216221713cffc303641bd73bbb309ff0e4e2a623f32af2a4ea/etuples-0.3.10.tar.gz", hash = "sha256:26fde81d7e822837146231bfce4d6ba67eab5d7ed55bc58ba7437c2568051167", size = 21493 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/39/19/bf11636df040a9f9c3fd6959aedea5b5cfddd751272732278fb04ee0a78c/etuples-0.3.10-py3-none-any.whl", hash = "sha256:4408c7940ef06af52dbbea0954a8a1817ed5750ce905ff48091ac3cd3aeb720b", size = 12201 },
+]
+
+[[package]]
+name = "logical-unification"
+version = "0.4.7"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "multipledispatch" },
+ { name = "toolz" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/b0/5d/37673e494a4eed550785ad1268df0202e69aa081bcbf7c0aafd0a853b0fc/logical_unification-0.4.7.tar.gz", hash = "sha256:3d73b263a870827b3f52d89c94f3336afd7fcaecf1e0c67fa18e73025399775c", size = 13513 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/b1/d0/337b3c49cbe742ab5c118d14730fbc7b14b57d1a130d4f39efaa9ec04226/logical_unification-0.4.7-py3-none-any.whl", hash = "sha256:077f49e32693bc66a418f08c1de540f55b5a20f237ffb80ea85d99bfc6139c3b", size = 13469 },
+]
+
+[[package]]
+name = "minikanren"
+version = "1.0.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "cons" },
+ { name = "etuples" },
+ { name = "logical-unification" },
+ { name = "multipledispatch" },
+ { name = "toolz" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ab/3d/bbab3c19771efbfafc52de98db8ad7cf3c2c444bbbd7241c2b06e9f305bc/minikanren-1.0.5.tar.gz", hash = "sha256:c030e3e9a3fa5f372f84b66966776a8dc63b16b98768b78be0401982b892e00d", size = 21699 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/bb/02/5e9ae831946db26f172e03e896fe83b07c5ca643df2b32c1b81557f0e77f/minikanren-1.0.5-py3-none-any.whl", hash = "sha256:22c24f4fdf009a56e30655787af45c90f0704bcc24e8d3e651378675b4bccb21", size = 24072 },
+]
+
+[[package]]
+name = "multipledispatch"
+version = "1.0.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/fe/3e/a62c3b824c7dec33c4a1578bcc842e6c30300051033a4e5975ed86cc2536/multipledispatch-1.0.0.tar.gz", hash = "sha256:5c839915465c68206c3e9c473357908216c28383b425361e5d144594bf85a7e0", size = 12385 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/51/c0/00c9809d8b9346eb238a6bbd5f83e846a4ce4503da94a4c08cb7284c325b/multipledispatch-1.0.0-py3-none-any.whl", hash = "sha256:0c53cd8b077546da4e48869f49b13164bebafd0c2a5afceb6bb6a316e7fb46e4", size = 12818 },
+]
+
+[[package]]
+name = "mypy-extensions"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963 },
+]
+
[[package]]
name = "numpy"
version = "2.3.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306, upload-time = "2025-07-24T21:32:07.553Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074, upload-time = "2025-07-24T20:43:07.813Z" },
- { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311, upload-time = "2025-07-24T20:43:29.335Z" },
- { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022, upload-time = "2025-07-24T20:43:37.999Z" },
- { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135, upload-time = "2025-07-24T20:43:49.28Z" },
- { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147, upload-time = "2025-07-24T20:44:10.328Z" },
- { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989, upload-time = "2025-07-24T20:44:34.88Z" },
- { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052, upload-time = "2025-07-24T20:44:58.872Z" },
- { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955, upload-time = "2025-07-24T20:45:26.714Z" },
- { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843, upload-time = "2025-07-24T20:49:24.444Z" },
- { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876, upload-time = "2025-07-24T20:49:43.227Z" },
- { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786, upload-time = "2025-07-24T20:49:59.443Z" },
- { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395, upload-time = "2025-07-24T20:45:58.821Z" },
- { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374, upload-time = "2025-07-24T20:46:20.207Z" },
- { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864, upload-time = "2025-07-24T20:46:30.58Z" },
- { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533, upload-time = "2025-07-24T20:46:46.111Z" },
- { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007, upload-time = "2025-07-24T20:47:07.1Z" },
- { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914, upload-time = "2025-07-24T20:47:32.459Z" },
- { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708, upload-time = "2025-07-24T20:47:58.129Z" },
- { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678, upload-time = "2025-07-24T20:48:25.402Z" },
- { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832, upload-time = "2025-07-24T20:48:37.181Z" },
- { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049, upload-time = "2025-07-24T20:48:56.24Z" },
- { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935, upload-time = "2025-07-24T20:49:13.136Z" },
- { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906, upload-time = "2025-07-24T20:50:30.346Z" },
- { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607, upload-time = "2025-07-24T20:50:51.923Z" },
- { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110, upload-time = "2025-07-24T20:51:01.041Z" },
- { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050, upload-time = "2025-07-24T20:51:11.64Z" },
- { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292, upload-time = "2025-07-24T20:51:33.488Z" },
- { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913, upload-time = "2025-07-24T20:51:58.517Z" },
- { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180, upload-time = "2025-07-24T20:52:22.827Z" },
- { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809, upload-time = "2025-07-24T20:52:51.015Z" },
- { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410, upload-time = "2025-07-24T20:56:44.949Z" },
- { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821, upload-time = "2025-07-24T20:57:06.479Z" },
- { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303, upload-time = "2025-07-24T20:57:22.879Z" },
- { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524, upload-time = "2025-07-24T20:53:22.086Z" },
- { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519, upload-time = "2025-07-24T20:53:44.053Z" },
- { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972, upload-time = "2025-07-24T20:53:53.81Z" },
- { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439, upload-time = "2025-07-24T20:54:04.742Z" },
- { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479, upload-time = "2025-07-24T20:54:25.819Z" },
- { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805, upload-time = "2025-07-24T20:54:50.814Z" },
- { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830, upload-time = "2025-07-24T20:55:17.306Z" },
- { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665, upload-time = "2025-07-24T20:55:46.665Z" },
- { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777, upload-time = "2025-07-24T20:55:57.66Z" },
- { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856, upload-time = "2025-07-24T20:56:17.318Z" },
- { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226, upload-time = "2025-07-24T20:56:34.509Z" },
+sdist = { url = "https://files.pythonhosted.org/packages/37/7d/3fec4199c5ffb892bed55cff901e4f39a58c81df9c44c280499e92cad264/numpy-2.3.2.tar.gz", hash = "sha256:e0486a11ec30cdecb53f184d496d1c6a20786c81e55e41640270130056f8ee48", size = 20489306 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1c/c0/c6bb172c916b00700ed3bf71cb56175fd1f7dbecebf8353545d0b5519f6c/numpy-2.3.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c8d9727f5316a256425892b043736d63e89ed15bbfe6556c5ff4d9d4448ff3b3", size = 20949074 },
+ { url = "https://files.pythonhosted.org/packages/20/4e/c116466d22acaf4573e58421c956c6076dc526e24a6be0903219775d862e/numpy-2.3.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:efc81393f25f14d11c9d161e46e6ee348637c0a1e8a54bf9dedc472a3fae993b", size = 14177311 },
+ { url = "https://files.pythonhosted.org/packages/78/45/d4698c182895af189c463fc91d70805d455a227261d950e4e0f1310c2550/numpy-2.3.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:dd937f088a2df683cbb79dda9a772b62a3e5a8a7e76690612c2737f38c6ef1b6", size = 5106022 },
+ { url = "https://files.pythonhosted.org/packages/9f/76/3e6880fef4420179309dba72a8c11f6166c431cf6dee54c577af8906f914/numpy-2.3.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:11e58218c0c46c80509186e460d79fbdc9ca1eb8d8aee39d8f2dc768eb781089", size = 6640135 },
+ { url = "https://files.pythonhosted.org/packages/34/fa/87ff7f25b3c4ce9085a62554460b7db686fef1e0207e8977795c7b7d7ba1/numpy-2.3.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5ad4ebcb683a1f99f4f392cc522ee20a18b2bb12a2c1c42c3d48d5a1adc9d3d2", size = 14278147 },
+ { url = "https://files.pythonhosted.org/packages/1d/0f/571b2c7a3833ae419fe69ff7b479a78d313581785203cc70a8db90121b9a/numpy-2.3.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:938065908d1d869c7d75d8ec45f735a034771c6ea07088867f713d1cd3bbbe4f", size = 16635989 },
+ { url = "https://files.pythonhosted.org/packages/24/5a/84ae8dca9c9a4c592fe11340b36a86ffa9fd3e40513198daf8a97839345c/numpy-2.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:66459dccc65d8ec98cc7df61307b64bf9e08101f9598755d42d8ae65d9a7a6ee", size = 16053052 },
+ { url = "https://files.pythonhosted.org/packages/57/7c/e5725d99a9133b9813fcf148d3f858df98511686e853169dbaf63aec6097/numpy-2.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a7af9ed2aa9ec5950daf05bb11abc4076a108bd3c7db9aa7251d5f107079b6a6", size = 18577955 },
+ { url = "https://files.pythonhosted.org/packages/ae/11/7c546fcf42145f29b71e4d6f429e96d8d68e5a7ba1830b2e68d7418f0bbd/numpy-2.3.2-cp313-cp313-win32.whl", hash = "sha256:906a30249315f9c8e17b085cc5f87d3f369b35fedd0051d4a84686967bdbbd0b", size = 6311843 },
+ { url = "https://files.pythonhosted.org/packages/aa/6f/a428fd1cb7ed39b4280d057720fed5121b0d7754fd2a9768640160f5517b/numpy-2.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:c63d95dc9d67b676e9108fe0d2182987ccb0f11933c1e8959f42fa0da8d4fa56", size = 12782876 },
+ { url = "https://files.pythonhosted.org/packages/65/85/4ea455c9040a12595fb6c43f2c217257c7b52dd0ba332c6a6c1d28b289fe/numpy-2.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:b05a89f2fb84d21235f93de47129dd4f11c16f64c87c33f5e284e6a3a54e43f2", size = 10192786 },
+ { url = "https://files.pythonhosted.org/packages/80/23/8278f40282d10c3f258ec3ff1b103d4994bcad78b0cba9208317f6bb73da/numpy-2.3.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:4e6ecfeddfa83b02318f4d84acf15fbdbf9ded18e46989a15a8b6995dfbf85ab", size = 21047395 },
+ { url = "https://files.pythonhosted.org/packages/1f/2d/624f2ce4a5df52628b4ccd16a4f9437b37c35f4f8a50d00e962aae6efd7a/numpy-2.3.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:508b0eada3eded10a3b55725b40806a4b855961040180028f52580c4729916a2", size = 14300374 },
+ { url = "https://files.pythonhosted.org/packages/f6/62/ff1e512cdbb829b80a6bd08318a58698867bca0ca2499d101b4af063ee97/numpy-2.3.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:754d6755d9a7588bdc6ac47dc4ee97867271b17cee39cb87aef079574366db0a", size = 5228864 },
+ { url = "https://files.pythonhosted.org/packages/7d/8e/74bc18078fff03192d4032cfa99d5a5ca937807136d6f5790ce07ca53515/numpy-2.3.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:a9f66e7d2b2d7712410d3bc5684149040ef5f19856f20277cd17ea83e5006286", size = 6737533 },
+ { url = "https://files.pythonhosted.org/packages/19/ea/0731efe2c9073ccca5698ef6a8c3667c4cf4eea53fcdcd0b50140aba03bc/numpy-2.3.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:de6ea4e5a65d5a90c7d286ddff2b87f3f4ad61faa3db8dabe936b34c2275b6f8", size = 14352007 },
+ { url = "https://files.pythonhosted.org/packages/cf/90/36be0865f16dfed20f4bc7f75235b963d5939707d4b591f086777412ff7b/numpy-2.3.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3ef07ec8cbc8fc9e369c8dcd52019510c12da4de81367d8b20bc692aa07573a", size = 16701914 },
+ { url = "https://files.pythonhosted.org/packages/94/30/06cd055e24cb6c38e5989a9e747042b4e723535758e6153f11afea88c01b/numpy-2.3.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:27c9f90e7481275c7800dc9c24b7cc40ace3fdb970ae4d21eaff983a32f70c91", size = 16132708 },
+ { url = "https://files.pythonhosted.org/packages/9a/14/ecede608ea73e58267fd7cb78f42341b3b37ba576e778a1a06baffbe585c/numpy-2.3.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:07b62978075b67eee4065b166d000d457c82a1efe726cce608b9db9dd66a73a5", size = 18651678 },
+ { url = "https://files.pythonhosted.org/packages/40/f3/2fe6066b8d07c3685509bc24d56386534c008b462a488b7f503ba82b8923/numpy-2.3.2-cp313-cp313t-win32.whl", hash = "sha256:c771cfac34a4f2c0de8e8c97312d07d64fd8f8ed45bc9f5726a7e947270152b5", size = 6441832 },
+ { url = "https://files.pythonhosted.org/packages/0b/ba/0937d66d05204d8f28630c9c60bc3eda68824abde4cf756c4d6aad03b0c6/numpy-2.3.2-cp313-cp313t-win_amd64.whl", hash = "sha256:72dbebb2dcc8305c431b2836bcc66af967df91be793d63a24e3d9b741374c450", size = 12927049 },
+ { url = "https://files.pythonhosted.org/packages/e9/ed/13542dd59c104d5e654dfa2ac282c199ba64846a74c2c4bcdbc3a0f75df1/numpy-2.3.2-cp313-cp313t-win_arm64.whl", hash = "sha256:72c6df2267e926a6d5286b0a6d556ebe49eae261062059317837fda12ddf0c1a", size = 10262935 },
+ { url = "https://files.pythonhosted.org/packages/c9/7c/7659048aaf498f7611b783e000c7268fcc4dcf0ce21cd10aad7b2e8f9591/numpy-2.3.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:448a66d052d0cf14ce9865d159bfc403282c9bc7bb2a31b03cc18b651eca8b1a", size = 20950906 },
+ { url = "https://files.pythonhosted.org/packages/80/db/984bea9d4ddf7112a04cfdfb22b1050af5757864cfffe8e09e44b7f11a10/numpy-2.3.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:546aaf78e81b4081b2eba1d105c3b34064783027a06b3ab20b6eba21fb64132b", size = 14185607 },
+ { url = "https://files.pythonhosted.org/packages/e4/76/b3d6f414f4eca568f469ac112a3b510938d892bc5a6c190cb883af080b77/numpy-2.3.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:87c930d52f45df092f7578889711a0768094debf73cfcde105e2d66954358125", size = 5114110 },
+ { url = "https://files.pythonhosted.org/packages/9e/d2/6f5e6826abd6bca52392ed88fe44a4b52aacb60567ac3bc86c67834c3a56/numpy-2.3.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:8dc082ea901a62edb8f59713c6a7e28a85daddcb67454c839de57656478f5b19", size = 6642050 },
+ { url = "https://files.pythonhosted.org/packages/c4/43/f12b2ade99199e39c73ad182f103f9d9791f48d885c600c8e05927865baf/numpy-2.3.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:af58de8745f7fa9ca1c0c7c943616c6fe28e75d0c81f5c295810e3c83b5be92f", size = 14296292 },
+ { url = "https://files.pythonhosted.org/packages/5d/f9/77c07d94bf110a916b17210fac38680ed8734c236bfed9982fd8524a7b47/numpy-2.3.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed5527c4cf10f16c6d0b6bee1f89958bccb0ad2522c8cadc2efd318bcd545f5", size = 16638913 },
+ { url = "https://files.pythonhosted.org/packages/9b/d1/9d9f2c8ea399cc05cfff8a7437453bd4e7d894373a93cdc46361bbb49a7d/numpy-2.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:095737ed986e00393ec18ec0b21b47c22889ae4b0cd2d5e88342e08b01141f58", size = 16071180 },
+ { url = "https://files.pythonhosted.org/packages/4c/41/82e2c68aff2a0c9bf315e47d61951099fed65d8cb2c8d9dc388cb87e947e/numpy-2.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:b5e40e80299607f597e1a8a247ff8d71d79c5b52baa11cc1cce30aa92d2da6e0", size = 18576809 },
+ { url = "https://files.pythonhosted.org/packages/14/14/4b4fd3efb0837ed252d0f583c5c35a75121038a8c4e065f2c259be06d2d8/numpy-2.3.2-cp314-cp314-win32.whl", hash = "sha256:7d6e390423cc1f76e1b8108c9b6889d20a7a1f59d9a60cac4a050fa734d6c1e2", size = 6366410 },
+ { url = "https://files.pythonhosted.org/packages/11/9e/b4c24a6b8467b61aced5c8dc7dcfce23621baa2e17f661edb2444a418040/numpy-2.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:b9d0878b21e3918d76d2209c924ebb272340da1fb51abc00f986c258cd5e957b", size = 12918821 },
+ { url = "https://files.pythonhosted.org/packages/0e/0f/0dc44007c70b1007c1cef86b06986a3812dd7106d8f946c09cfa75782556/numpy-2.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:2738534837c6a1d0c39340a190177d7d66fdf432894f469728da901f8f6dc910", size = 10477303 },
+ { url = "https://files.pythonhosted.org/packages/8b/3e/075752b79140b78ddfc9c0a1634d234cfdbc6f9bbbfa6b7504e445ad7d19/numpy-2.3.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:4d002ecf7c9b53240be3bb69d80f86ddbd34078bae04d87be81c1f58466f264e", size = 21047524 },
+ { url = "https://files.pythonhosted.org/packages/fe/6d/60e8247564a72426570d0e0ea1151b95ce5bd2f1597bb878a18d32aec855/numpy-2.3.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:293b2192c6bcce487dbc6326de5853787f870aeb6c43f8f9c6496db5b1781e45", size = 14300519 },
+ { url = "https://files.pythonhosted.org/packages/4d/73/d8326c442cd428d47a067070c3ac6cc3b651a6e53613a1668342a12d4479/numpy-2.3.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:0a4f2021a6da53a0d580d6ef5db29947025ae8b35b3250141805ea9a32bbe86b", size = 5228972 },
+ { url = "https://files.pythonhosted.org/packages/34/2e/e71b2d6dad075271e7079db776196829019b90ce3ece5c69639e4f6fdc44/numpy-2.3.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:9c144440db4bf3bb6372d2c3e49834cc0ff7bb4c24975ab33e01199e645416f2", size = 6737439 },
+ { url = "https://files.pythonhosted.org/packages/15/b0/d004bcd56c2c5e0500ffc65385eb6d569ffd3363cb5e593ae742749b2daa/numpy-2.3.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f92d6c2a8535dc4fe4419562294ff957f83a16ebdec66df0805e473ffaad8bd0", size = 14352479 },
+ { url = "https://files.pythonhosted.org/packages/11/e3/285142fcff8721e0c99b51686426165059874c150ea9ab898e12a492e291/numpy-2.3.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cefc2219baa48e468e3db7e706305fcd0c095534a192a08f31e98d83a7d45fb0", size = 16702805 },
+ { url = "https://files.pythonhosted.org/packages/33/c3/33b56b0e47e604af2c7cd065edca892d180f5899599b76830652875249a3/numpy-2.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:76c3e9501ceb50b2ff3824c3589d5d1ab4ac857b0ee3f8f49629d0de55ecf7c2", size = 16133830 },
+ { url = "https://files.pythonhosted.org/packages/6e/ae/7b1476a1f4d6a48bc669b8deb09939c56dd2a439db1ab03017844374fb67/numpy-2.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:122bf5ed9a0221b3419672493878ba4967121514b1d7d4656a7580cd11dddcbf", size = 18652665 },
+ { url = "https://files.pythonhosted.org/packages/14/ba/5b5c9978c4bb161034148ade2de9db44ec316fab89ce8c400db0e0c81f86/numpy-2.3.2-cp314-cp314t-win32.whl", hash = "sha256:6f1ae3dcb840edccc45af496f312528c15b1f79ac318169d094e85e4bb35fdf1", size = 6514777 },
+ { url = "https://files.pythonhosted.org/packages/eb/46/3dbaf0ae7c17cdc46b9f662c56da2054887b8d9e737c1476f335c83d33db/numpy-2.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:087ffc25890d89a43536f75c5fe8770922008758e8eeeef61733957041ed2f9b", size = 13111856 },
+ { url = "https://files.pythonhosted.org/packages/c1/9e/1652778bce745a67b5fe05adde60ed362d38eb17d919a540e813d30f6874/numpy-2.3.2-cp314-cp314t-win_arm64.whl", hash = "sha256:092aeb3449833ea9c0bf0089d70c29ae480685dd2377ec9cdbbb620257f84631", size = 10544226 },
+]
+
+[[package]]
+name = "packaging"
+version = "25.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469 },
]
[[package]]
@@ -64,21 +164,111 @@ dependencies = [
{ name = "pytz" },
{ name = "tzdata" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493, upload-time = "2025-07-07T19:20:04.079Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/d1/6f/75aa71f8a14267117adeeed5d21b204770189c0a0025acbdc03c337b28fc/pandas-2.3.1.tar.gz", hash = "sha256:0a95b9ac964fe83ce317827f80304d37388ea77616b1425f0ae41c9d2d0d7bb2", size = 4487493 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393 },
+ { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750 },
+ { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004 },
+ { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869 },
+ { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218 },
+ { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763 },
+ { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482 },
+ { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159 },
+ { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287 },
+ { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381 },
+ { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998 },
+ { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705 },
+ { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044 },
+]
+
+[[package]]
+name = "pandera"
+version = "0.26.1"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "packaging" },
+ { name = "pydantic" },
+ { name = "typeguard" },
+ { name = "typing-extensions" },
+ { name = "typing-inspect" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/ff/0b/bb312b98a92b00ff48e869e2769ce5ca6c7bc4ec793a429d450dc3c9bba2/pandera-0.26.1.tar.gz", hash = "sha256:81a55a6429770d31b3bf4c3e8e1096a38296bd3009f9eca5780fad3c3c17fd82", size = 560263 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/db/3b/91622e08086a6be44d2c0f34947d94c5282b53d217003d3ba390ee2d174b/pandera-0.26.1-py3-none-any.whl", hash = "sha256:1ff5b70556ce2f85c6b27e8fbe835a1761972f4d05f6548b4686b0db26ecb73b", size = 292907 },
+]
+
+[package.optional-dependencies]
+pandas = [
+ { name = "numpy" },
+ { name = "pandas" },
+]
+
+[[package]]
+name = "pydantic"
+version = "2.12.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "annotated-types" },
+ { name = "pydantic-core" },
+ { name = "typing-extensions" },
+ { name = "typing-inspection" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/96/ad/a17bc283d7d81837c061c49e3eaa27a45991759a1b7eae1031921c6bd924/pydantic-2.12.4.tar.gz", hash = "sha256:0f8cb9555000a4b5b617f66bfd2566264c4984b27589d3b845685983e8ea85ac", size = 821038 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/82/2f/e68750da9b04856e2a7ec56fc6f034a5a79775e9b9a81882252789873798/pydantic-2.12.4-py3-none-any.whl", hash = "sha256:92d3d202a745d46f9be6df459ac5a064fdaa3c1c4cd8adcfa332ccf3c05f871e", size = 463400 },
+]
+
+[[package]]
+name = "pydantic-core"
+version = "2.41.5"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/32/ed/ff0a67a2c5505e1854e6715586ac6693dd860fbf52ef9f81edee200266e7/pandas-2.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9026bd4a80108fac2239294a15ef9003c4ee191a0f64b90f170b40cfb7cf2d22", size = 11531393, upload-time = "2025-07-07T19:19:12.245Z" },
- { url = "https://files.pythonhosted.org/packages/c7/db/d8f24a7cc9fb0972adab0cc80b6817e8bef888cfd0024eeb5a21c0bb5c4a/pandas-2.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6de8547d4fdb12421e2d047a2c446c623ff4c11f47fddb6b9169eb98ffba485a", size = 10668750, upload-time = "2025-07-07T19:19:14.612Z" },
- { url = "https://files.pythonhosted.org/packages/0f/b0/80f6ec783313f1e2356b28b4fd8d2148c378370045da918c73145e6aab50/pandas-2.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:782647ddc63c83133b2506912cc6b108140a38a37292102aaa19c81c83db2928", size = 11342004, upload-time = "2025-07-07T19:19:16.857Z" },
- { url = "https://files.pythonhosted.org/packages/e9/e2/20a317688435470872885e7fc8f95109ae9683dec7c50be29b56911515a5/pandas-2.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba6aff74075311fc88504b1db890187a3cd0f887a5b10f5525f8e2ef55bfdb9", size = 12050869, upload-time = "2025-07-07T19:19:19.265Z" },
- { url = "https://files.pythonhosted.org/packages/55/79/20d746b0a96c67203a5bee5fb4e00ac49c3e8009a39e1f78de264ecc5729/pandas-2.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e5635178b387bd2ba4ac040f82bc2ef6e6b500483975c4ebacd34bec945fda12", size = 12750218, upload-time = "2025-07-07T19:19:21.547Z" },
- { url = "https://files.pythonhosted.org/packages/7c/0f/145c8b41e48dbf03dd18fdd7f24f8ba95b8254a97a3379048378f33e7838/pandas-2.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f3bf5ec947526106399a9e1d26d40ee2b259c66422efdf4de63c848492d91bb", size = 13416763, upload-time = "2025-07-07T19:19:23.939Z" },
- { url = "https://files.pythonhosted.org/packages/b2/c0/54415af59db5cdd86a3d3bf79863e8cc3fa9ed265f0745254061ac09d5f2/pandas-2.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:1c78cf43c8fde236342a1cb2c34bcff89564a7bfed7e474ed2fffa6aed03a956", size = 10987482, upload-time = "2025-07-07T19:19:42.699Z" },
- { url = "https://files.pythonhosted.org/packages/48/64/2fd2e400073a1230e13b8cd604c9bc95d9e3b962e5d44088ead2e8f0cfec/pandas-2.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8dfc17328e8da77be3cf9f47509e5637ba8f137148ed0e9b5241e1baf526e20a", size = 12029159, upload-time = "2025-07-07T19:19:26.362Z" },
- { url = "https://files.pythonhosted.org/packages/d8/0a/d84fd79b0293b7ef88c760d7dca69828d867c89b6d9bc52d6a27e4d87316/pandas-2.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ec6c851509364c59a5344458ab935e6451b31b818be467eb24b0fe89bd05b6b9", size = 11393287, upload-time = "2025-07-07T19:19:29.157Z" },
- { url = "https://files.pythonhosted.org/packages/50/ae/ff885d2b6e88f3c7520bb74ba319268b42f05d7e583b5dded9837da2723f/pandas-2.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:911580460fc4884d9b05254b38a6bfadddfcc6aaef856fb5859e7ca202e45275", size = 11309381, upload-time = "2025-07-07T19:19:31.436Z" },
- { url = "https://files.pythonhosted.org/packages/85/86/1fa345fc17caf5d7780d2699985c03dbe186c68fee00b526813939062bb0/pandas-2.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f4d6feeba91744872a600e6edbbd5b033005b431d5ae8379abee5bcfa479fab", size = 11883998, upload-time = "2025-07-07T19:19:34.267Z" },
- { url = "https://files.pythonhosted.org/packages/81/aa/e58541a49b5e6310d89474333e994ee57fea97c8aaa8fc7f00b873059bbf/pandas-2.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:fe37e757f462d31a9cd7580236a82f353f5713a80e059a29753cf938c6775d96", size = 12704705, upload-time = "2025-07-07T19:19:36.856Z" },
- { url = "https://files.pythonhosted.org/packages/d5/f9/07086f5b0f2a19872554abeea7658200824f5835c58a106fa8f2ae96a46c/pandas-2.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5db9637dbc24b631ff3707269ae4559bce4b7fd75c1c4d7e13f40edc42df4444", size = 13189044, upload-time = "2025-07-07T19:19:39.999Z" },
+ { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403 },
+ { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206 },
+ { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307 },
+ { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258 },
+ { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917 },
+ { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186 },
+ { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164 },
+ { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146 },
+ { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788 },
+ { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133 },
+ { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852 },
+ { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679 },
+ { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766 },
+ { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005 },
+ { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622 },
+ { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725 },
+ { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040 },
+ { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691 },
+ { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897 },
+ { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302 },
+ { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877 },
+ { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680 },
+ { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960 },
+ { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102 },
+ { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039 },
+ { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126 },
+ { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489 },
+ { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288 },
+ { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255 },
+ { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760 },
+ { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092 },
+ { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385 },
+ { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832 },
+ { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585 },
+ { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078 },
+ { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914 },
+ { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560 },
+ { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244 },
+ { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955 },
+ { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906 },
+ { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607 },
+ { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769 },
]
[[package]]
@@ -88,27 +278,98 @@ source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "six" },
]
-sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" },
+ { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892 },
]
[[package]]
name = "pytz"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" },
+ { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225 },
+]
+
+[[package]]
+name = "pyyaml"
+version = "6.0.3"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669 },
+ { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252 },
+ { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081 },
+ { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159 },
+ { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626 },
+ { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613 },
+ { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115 },
+ { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427 },
+ { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090 },
+ { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246 },
+ { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814 },
+ { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809 },
+ { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454 },
+ { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355 },
+ { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175 },
+ { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228 },
+ { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194 },
+ { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429 },
+ { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912 },
+ { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108 },
+ { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641 },
+ { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901 },
+ { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132 },
+ { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261 },
+ { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272 },
+ { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923 },
+ { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062 },
+ { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341 },
+]
+
+[[package]]
+name = "ruff"
+version = "0.14.4"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/df/55/cccfca45157a2031dcbb5a462a67f7cf27f8b37d4b3b1cd7438f0f5c1df6/ruff-0.14.4.tar.gz", hash = "sha256:f459a49fe1085a749f15414ca76f61595f1a2cc8778ed7c279b6ca2e1fd19df3", size = 5587844 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/17/b9/67240254166ae1eaa38dec32265e9153ac53645a6c6670ed36ad00722af8/ruff-0.14.4-py3-none-linux_armv6l.whl", hash = "sha256:e6604613ffbcf2297cd5dcba0e0ac9bd0c11dc026442dfbb614504e87c349518", size = 12606781 },
+ { url = "https://files.pythonhosted.org/packages/46/c8/09b3ab245d8652eafe5256ab59718641429f68681ee713ff06c5c549f156/ruff-0.14.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d99c0b52b6f0598acede45ee78288e5e9b4409d1ce7f661f0fa36d4cbeadf9a4", size = 12946765 },
+ { url = "https://files.pythonhosted.org/packages/14/bb/1564b000219144bf5eed2359edc94c3590dd49d510751dad26202c18a17d/ruff-0.14.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:9358d490ec030f1b51d048a7fd6ead418ed0826daf6149e95e30aa67c168af33", size = 11928120 },
+ { url = "https://files.pythonhosted.org/packages/a3/92/d5f1770e9988cc0742fefaa351e840d9aef04ec24ae1be36f333f96d5704/ruff-0.14.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81b40d27924f1f02dfa827b9c0712a13c0e4b108421665322218fc38caf615c2", size = 12370877 },
+ { url = "https://files.pythonhosted.org/packages/e2/29/e9282efa55f1973d109faf839a63235575519c8ad278cc87a182a366810e/ruff-0.14.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f5e649052a294fe00818650712083cddc6cc02744afaf37202c65df9ea52efa5", size = 12408538 },
+ { url = "https://files.pythonhosted.org/packages/8e/01/930ed6ecfce130144b32d77d8d69f5c610e6d23e6857927150adf5d7379a/ruff-0.14.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa082a8f878deeba955531f975881828fd6afd90dfa757c2b0808aadb437136e", size = 13141942 },
+ { url = "https://files.pythonhosted.org/packages/6a/46/a9c89b42b231a9f487233f17a89cbef9d5acd538d9488687a02ad288fa6b/ruff-0.14.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:1043c6811c2419e39011890f14d0a30470f19d47d197c4858b2787dfa698f6c8", size = 14544306 },
+ { url = "https://files.pythonhosted.org/packages/78/96/9c6cf86491f2a6d52758b830b89b78c2ae61e8ca66b86bf5a20af73d20e6/ruff-0.14.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a9f3a936ac27fb7c2a93e4f4b943a662775879ac579a433291a6f69428722649", size = 14210427 },
+ { url = "https://files.pythonhosted.org/packages/71/f4/0666fe7769a54f63e66404e8ff698de1dcde733e12e2fd1c9c6efb689cb5/ruff-0.14.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:95643ffd209ce78bc113266b88fba3d39e0461f0cbc8b55fb92505030fb4a850", size = 13658488 },
+ { url = "https://files.pythonhosted.org/packages/ee/79/6ad4dda2cfd55e41ac9ed6d73ef9ab9475b1eef69f3a85957210c74ba12c/ruff-0.14.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:456daa2fa1021bc86ca857f43fe29d5d8b3f0e55e9f90c58c317c1dcc2afc7b5", size = 13354908 },
+ { url = "https://files.pythonhosted.org/packages/b5/60/f0b6990f740bb15c1588601d19d21bcc1bd5de4330a07222041678a8e04f/ruff-0.14.4-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:f911bba769e4a9f51af6e70037bb72b70b45a16db5ce73e1f72aefe6f6d62132", size = 13587803 },
+ { url = "https://files.pythonhosted.org/packages/c9/da/eaaada586f80068728338e0ef7f29ab3e4a08a692f92eb901a4f06bbff24/ruff-0.14.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:76158a7369b3979fa878612c623a7e5430c18b2fd1c73b214945c2d06337db67", size = 12279654 },
+ { url = "https://files.pythonhosted.org/packages/66/d4/b1d0e82cf9bf8aed10a6d45be47b3f402730aa2c438164424783ac88c0ed/ruff-0.14.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f3b8f3b442d2b14c246e7aeca2e75915159e06a3540e2f4bed9f50d062d24469", size = 12357520 },
+ { url = "https://files.pythonhosted.org/packages/04/f4/53e2b42cc82804617e5c7950b7079d79996c27e99c4652131c6a1100657f/ruff-0.14.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c62da9a06779deecf4d17ed04939ae8b31b517643b26370c3be1d26f3ef7dbde", size = 12719431 },
+ { url = "https://files.pythonhosted.org/packages/a2/94/80e3d74ed9a72d64e94a7b7706b1c1ebaa315ef2076fd33581f6a1cd2f95/ruff-0.14.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5a443a83a1506c684e98acb8cb55abaf3ef725078be40237463dae4463366349", size = 13464394 },
+ { url = "https://files.pythonhosted.org/packages/54/1a/a49f071f04c42345c793d22f6cf5e0920095e286119ee53a64a3a3004825/ruff-0.14.4-py3-none-win32.whl", hash = "sha256:643b69cb63cd996f1fc7229da726d07ac307eae442dd8974dbc7cf22c1e18fff", size = 12493429 },
+ { url = "https://files.pythonhosted.org/packages/bc/22/e58c43e641145a2b670328fb98bc384e20679b5774258b1e540207580266/ruff-0.14.4-py3-none-win_amd64.whl", hash = "sha256:26673da283b96fe35fa0c939bf8411abec47111644aa9f7cfbd3c573fb125d2c", size = 13635380 },
+ { url = "https://files.pythonhosted.org/packages/30/bd/4168a751ddbbf43e86544b4de8b5c3b7be8d7167a2a5cb977d274e04f0a1/ruff-0.14.4-py3-none-win_arm64.whl", hash = "sha256:dd09c292479596b0e6fec8cd95c65c3a6dc68e9ad17b8f2382130f87ff6a75bb", size = 12663065 },
+]
+
+[[package]]
+name = "rx"
+version = "3.2.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/34/b5/e0f602453b64b0a639d56f3c05ab27202a4eec993eb64d66c077c821b621/Rx-3.2.0.tar.gz", hash = "sha256:b657ca2b45aa485da2f7dcfd09fac2e554f7ac51ff3c2f8f2ff962ecd963d91c", size = 105540 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/e2/a9/efeaeca4928a9a56d04d609b5730994d610c82cf4d9dd7aa173e6ef4233e/Rx-3.2.0-py3-none-any.whl", hash = "sha256:922c5f4edb3aa1beaa47bf61d65d5380011ff6adcd527f26377d05cb73ed8ec8", size = 199245 },
]
[[package]]
name = "six"
version = "1.17.0"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" },
+ { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050 },
]
[[package]]
@@ -116,17 +377,86 @@ name = "solid"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
+ { name = "aiofiles" },
+ { name = "minikanren" },
{ name = "pandas" },
+ { name = "pandera", extra = ["pandas"] },
+ { name = "pyyaml" },
+ { name = "ruff" },
+ { name = "rx" },
]
[package.metadata]
-requires-dist = [{ name = "pandas", specifier = ">=2.3.1" }]
+requires-dist = [
+ { name = "aiofiles", specifier = ">=25.1.0" },
+ { name = "minikanren", specifier = ">=1.0.5" },
+ { name = "pandas", specifier = ">=2.3.1" },
+ { name = "pandera", extras = ["pandas"], specifier = ">=0.26.1" },
+ { name = "pyyaml", specifier = ">=6.0.3" },
+ { name = "ruff", specifier = ">=0.14.4" },
+ { name = "rx", specifier = ">=3.2.0" },
+]
+
+[[package]]
+name = "toolz"
+version = "1.1.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093 },
+]
+
+[[package]]
+name = "typeguard"
+version = "4.4.4"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874 },
+]
+
+[[package]]
+name = "typing-extensions"
+version = "4.15.0"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614 },
+]
+
+[[package]]
+name = "typing-inspect"
+version = "0.9.0"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "mypy-extensions" },
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/dc/74/1789779d91f1961fa9438e9a8710cdae6bd138c80d7303996933d117264a/typing_inspect-0.9.0.tar.gz", hash = "sha256:b23fc42ff6f6ef6954e4852c1fb512cdd18dbea03134f91f856a95ccc9461f78", size = 13825 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/65/f3/107a22063bf27bdccf2024833d3445f4eea42b2e598abfbd46f6a63b6cb0/typing_inspect-0.9.0-py3-none-any.whl", hash = "sha256:9ee6fc59062311ef8547596ab6b955e1b8aa46242d854bfc78f4f6b0eff35f9f", size = 8827 },
+]
+
+[[package]]
+name = "typing-inspection"
+version = "0.4.2"
+source = { registry = "https://pypi.org/simple" }
+dependencies = [
+ { name = "typing-extensions" },
+]
+sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949 }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611 },
+]
[[package]]
name = "tzdata"
version = "2025.2"
source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" }
+sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380 }
wheels = [
- { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" },
+ { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839 },
]
diff --git a/2025/solid/verify_reports.py b/2025/solid/verify_reports.py
new file mode 100644
index 0000000..b0922db
--- /dev/null
+++ b/2025/solid/verify_reports.py
@@ -0,0 +1,148 @@
+import argparse
+import json
+import math
+import sys
+from collections.abc import Iterable
+from pathlib import Path
+from typing import Any, cast
+
+MetricMap = dict[str, Any]
+
+
+def parse_args(argv: Iterable[str]) -> argparse.Namespace:
+ parser = argparse.ArgumentParser(
+ description="Compare JSON outputs from the sales-report implementations."
+ )
+ parser.add_argument(
+ "--outputs-dir",
+ type=Path,
+ default=Path(__file__).parent / "output",
+ help="Directory containing the JSON outputs.",
+ )
+ parser.add_argument(
+ "--baseline",
+ type=Path,
+ default=None,
+ help=(
+ "Specific baseline JSON file to compare against. "
+ "Defaults to the first file found in outputs-dir."
+ ),
+ )
+ parser.add_argument(
+ "--tolerance",
+ type=float,
+ default=1e-6,
+ help="Absolute tolerance for comparing numeric values.",
+ )
+ parser.add_argument(
+ "--verbose",
+ action="store_true",
+ help="Print per-file comparison details even when all metrics match.",
+ )
+ return parser.parse_args(list(argv))
+
+
+def load_metrics(path: Path) -> MetricMap:
+ with path.open("r", encoding="utf-8") as fh:
+ data = json.load(fh)
+
+ if not isinstance(data, dict):
+ raise TypeError(f"Expected JSON object in {path}, found {type(data).__name__}")
+
+ if not all(isinstance(key, str) for key in data):
+ raise TypeError(f"Expected string keys in {path}")
+
+ return cast(MetricMap, data)
+
+
+def is_close(a: Any, b: Any, tolerance: float) -> bool:
+ if isinstance(a, (int, float)) and isinstance(b, (int, float)):
+ return math.isclose(float(a), float(b), rel_tol=0.0, abs_tol=tolerance)
+ return a == b
+
+
+def compare_metrics(
+ baseline_metrics: MetricMap,
+ candidate_metrics: MetricMap,
+ tolerance: float,
+) -> tuple[bool, str]:
+ baseline_keys = set(baseline_metrics)
+ candidate_keys = set(candidate_metrics)
+
+ missing = baseline_keys - candidate_keys
+ extra = candidate_keys - baseline_keys
+
+ if missing or extra:
+ parts = []
+ if missing:
+ parts.append(f"missing keys: {sorted(missing)}")
+ if extra:
+ parts.append(f"unexpected keys: {sorted(extra)}")
+ return False, "; ".join(parts)
+
+ mismatches = []
+ for key in sorted(baseline_keys):
+ baseline_value = baseline_metrics[key]
+ candidate_value = candidate_metrics[key]
+ if not is_close(baseline_value, candidate_value, tolerance):
+ mismatches.append(
+ f"{key!r}: {candidate_value!r} != {baseline_value!r}"
+ )
+
+ if mismatches:
+ return False, "; ".join(mismatches)
+ return True, ""
+
+
+def main(argv: Iterable[str]) -> int:
+ args = parse_args(argv)
+
+ outputs_dir: Path = args.outputs_dir
+ if not outputs_dir.exists() or not outputs_dir.is_dir():
+ print(f"Outputs directory does not exist: {outputs_dir}", file=sys.stderr)
+ return 2
+
+ json_files = sorted(outputs_dir.glob("*.json"))
+ if not json_files:
+ print(f"No JSON files found in {outputs_dir}", file=sys.stderr)
+ return 2
+
+ baseline_arg = args.baseline
+ if baseline_arg is not None:
+ if not baseline_arg.exists():
+ print(f"Baseline file not found: {baseline_arg}", file=sys.stderr)
+ return 2
+ baseline_path = baseline_arg
+ else:
+ baseline_path = json_files[0]
+
+ baseline_metrics = load_metrics(baseline_path)
+ print(f"Baseline: {baseline_path.name}")
+
+ failures: list[tuple[Path, str]] = []
+ for path in json_files:
+ if path == baseline_path:
+ if args.verbose:
+ print(f"✔ {path.name} (baseline)")
+ continue
+
+ metrics = load_metrics(path)
+ ok, message = compare_metrics(baseline_metrics, metrics, args.tolerance)
+ if ok:
+ if args.verbose:
+ print(f"✔ {path.name}")
+ else:
+ print(f"✖ {path.name}: {message}")
+ failures.append((path, message))
+
+ if failures:
+ print(f"\n{len(failures)} file(s) differ from baseline.", file=sys.stderr)
+ return 1
+
+ print("\nAll report outputs match.")
+ return 0
+
+
+if __name__ == "__main__":
+ raise SystemExit(main(sys.argv[1:]))
+